body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
@to_tuple def _select_capabilities(remote_capabilities: Capabilities, local_capabilities: Capabilities) -> Iterable[Capability]: '\n Select the appropriate shared capabilities between local and remote.\n\n https://github.com/ethereum/devp2p/blob/master/rlpx.md#capability-messaging\n ' matching_capabilities = tuple(sorted(set(local_capabilities).intersection(remote_capabilities), key=operator.itemgetter(0))) sort_by_version = functools.partial(sorted, key=operator.itemgetter(1), reverse=True) capabilities_by_name = valmap(tuple, valmap(sort_by_version, groupby(operator.itemgetter(0), matching_capabilities))) for name in sorted(capabilities_by_name.keys()): (yield capabilities_by_name[name][0])
8,425,443,834,844,753,000
Select the appropriate shared capabilities between local and remote. https://github.com/ethereum/devp2p/blob/master/rlpx.md#capability-messaging
p2p/handshake.py
_select_capabilities
g-r-a-n-t/trinity
python
@to_tuple def _select_capabilities(remote_capabilities: Capabilities, local_capabilities: Capabilities) -> Iterable[Capability]: '\n Select the appropriate shared capabilities between local and remote.\n\n https://github.com/ethereum/devp2p/blob/master/rlpx.md#capability-messaging\n ' matching_capabilities = tuple(sorted(set(local_capabilities).intersection(remote_capabilities), key=operator.itemgetter(0))) sort_by_version = functools.partial(sorted, key=operator.itemgetter(1), reverse=True) capabilities_by_name = valmap(tuple, valmap(sort_by_version, groupby(operator.itemgetter(0), matching_capabilities))) for name in sorted(capabilities_by_name.keys()): (yield capabilities_by_name[name][0])
async def negotiate_protocol_handshakes(transport: TransportAPI, p2p_handshake_params: DevP2PHandshakeParams, protocol_handshakers: Sequence[HandshakerAPI[ProtocolAPI]]) -> Tuple[(MultiplexerAPI, DevP2PReceipt, Tuple[(HandshakeReceiptAPI, ...)])]: '\n Negotiate the handshakes for both the base `p2p` protocol and the\n appropriate sub protocols. The basic logic follows the following steps.\n\n * perform the base `p2p` handshake.\n * using the capabilities exchanged during the `p2p` handshake, select the\n appropriate sub protocols.\n * allow each sub-protocol to perform its own handshake.\n * return the established `Multiplexer` as well as the `HandshakeReceipt`\n objects from each handshake.\n ' p2p_protocol_class = p2p_handshake_params.get_base_protocol_class() local_capabilities = tuple((handshaker.protocol_class.as_capability() for handshaker in protocol_handshakers)) duplicate_capabilities = duplicates(local_capabilities) if duplicate_capabilities: raise Exception(f'Duplicate local capabilities: {duplicate_capabilities}') ephemeral_base_protocol = p2p_protocol_class(transport, command_id_offset=0, snappy_support=False) (devp2p_receipt, base_protocol) = (await _do_p2p_handshake(transport, local_capabilities, p2p_handshake_params, ephemeral_base_protocol)) protocol_handshakers_by_capability = dict(zip(local_capabilities, protocol_handshakers)) selected_capabilities = _select_capabilities(devp2p_receipt.capabilities, local_capabilities) if (len(selected_capabilities) < 1): raise NoMatchingPeerCapabilities(f'''Found no matching capabilities between self and peer: - local : {tuple(sorted(local_capabilities))} - remote: {devp2p_receipt.capabilities}''') selected_handshakers = tuple((protocol_handshakers_by_capability[capability] for capability in selected_capabilities)) selected_protocol_types = tuple((handshaker.protocol_class for handshaker in selected_handshakers)) protocol_cmd_offsets = get_cmd_offsets(selected_protocol_types) selected_protocols = tuple((protocol_class(transport, command_id_offset, base_protocol.snappy_support) for (protocol_class, command_id_offset) in zip(selected_protocol_types, protocol_cmd_offsets))) multiplexer = Multiplexer(transport, base_protocol, selected_protocols) (await multiplexer.stream_in_background()) try: protocol_receipts = cast(Tuple[(HandshakeReceiptAPI, ...)], (await asyncio.gather(*(handshaker.do_handshake(multiplexer, protocol) for (handshaker, protocol) in zip(selected_handshakers, selected_protocols))))) except BaseException as handshake_err: multiplexer.raise_if_streaming_error() (await multiplexer.stop_streaming()) raise handshake_err else: multiplexer.raise_if_streaming_error() return (multiplexer, devp2p_receipt, protocol_receipts)
-6,508,748,553,269,420,000
Negotiate the handshakes for both the base `p2p` protocol and the appropriate sub protocols. The basic logic follows the following steps. * perform the base `p2p` handshake. * using the capabilities exchanged during the `p2p` handshake, select the appropriate sub protocols. * allow each sub-protocol to perform its own handshake. * return the established `Multiplexer` as well as the `HandshakeReceipt` objects from each handshake.
p2p/handshake.py
negotiate_protocol_handshakes
g-r-a-n-t/trinity
python
async def negotiate_protocol_handshakes(transport: TransportAPI, p2p_handshake_params: DevP2PHandshakeParams, protocol_handshakers: Sequence[HandshakerAPI[ProtocolAPI]]) -> Tuple[(MultiplexerAPI, DevP2PReceipt, Tuple[(HandshakeReceiptAPI, ...)])]: '\n Negotiate the handshakes for both the base `p2p` protocol and the\n appropriate sub protocols. The basic logic follows the following steps.\n\n * perform the base `p2p` handshake.\n * using the capabilities exchanged during the `p2p` handshake, select the\n appropriate sub protocols.\n * allow each sub-protocol to perform its own handshake.\n * return the established `Multiplexer` as well as the `HandshakeReceipt`\n objects from each handshake.\n ' p2p_protocol_class = p2p_handshake_params.get_base_protocol_class() local_capabilities = tuple((handshaker.protocol_class.as_capability() for handshaker in protocol_handshakers)) duplicate_capabilities = duplicates(local_capabilities) if duplicate_capabilities: raise Exception(f'Duplicate local capabilities: {duplicate_capabilities}') ephemeral_base_protocol = p2p_protocol_class(transport, command_id_offset=0, snappy_support=False) (devp2p_receipt, base_protocol) = (await _do_p2p_handshake(transport, local_capabilities, p2p_handshake_params, ephemeral_base_protocol)) protocol_handshakers_by_capability = dict(zip(local_capabilities, protocol_handshakers)) selected_capabilities = _select_capabilities(devp2p_receipt.capabilities, local_capabilities) if (len(selected_capabilities) < 1): raise NoMatchingPeerCapabilities(f'Found no matching capabilities between self and peer: - local : {tuple(sorted(local_capabilities))} - remote: {devp2p_receipt.capabilities}') selected_handshakers = tuple((protocol_handshakers_by_capability[capability] for capability in selected_capabilities)) selected_protocol_types = tuple((handshaker.protocol_class for handshaker in selected_handshakers)) protocol_cmd_offsets = get_cmd_offsets(selected_protocol_types) selected_protocols = tuple((protocol_class(transport, command_id_offset, base_protocol.snappy_support) for (protocol_class, command_id_offset) in zip(selected_protocol_types, protocol_cmd_offsets))) multiplexer = Multiplexer(transport, base_protocol, selected_protocols) (await multiplexer.stream_in_background()) try: protocol_receipts = cast(Tuple[(HandshakeReceiptAPI, ...)], (await asyncio.gather(*(handshaker.do_handshake(multiplexer, protocol) for (handshaker, protocol) in zip(selected_handshakers, selected_protocols))))) except BaseException as handshake_err: multiplexer.raise_if_streaming_error() (await multiplexer.stop_streaming()) raise handshake_err else: multiplexer.raise_if_streaming_error() return (multiplexer, devp2p_receipt, protocol_receipts)
async def dial_out(remote: NodeAPI, private_key: keys.PrivateKey, p2p_handshake_params: DevP2PHandshakeParams, protocol_handshakers: Sequence[HandshakerAPI[ProtocolAPI]]) -> ConnectionAPI: '\n Perform the auth and P2P handshakes with the given remote.\n\n Return a `Connection` object housing all of the negotiated sub protocols.\n\n Raises UnreachablePeer if we cannot connect to the peer or\n HandshakeFailure if the remote disconnects before completing the\n handshake or if none of the sub-protocols supported by us is also\n supported by the remote.\n ' transport = (await Transport.connect(remote, private_key)) transport.logger.debug2('Initiating p2p handshake with %s', remote) try: (multiplexer, devp2p_receipt, protocol_receipts) = (await negotiate_protocol_handshakes(transport=transport, p2p_handshake_params=p2p_handshake_params, protocol_handshakers=protocol_handshakers)) except BaseException: try: (await transport.close()) except ConnectionResetError: transport.logger.debug('Could not wait for transport to close') raise transport.logger.debug2('Completed p2p handshake with %s', remote) connection = Connection(multiplexer=multiplexer, devp2p_receipt=devp2p_receipt, protocol_receipts=protocol_receipts, is_dial_out=True) return connection
-541,882,953,397,349,950
Perform the auth and P2P handshakes with the given remote. Return a `Connection` object housing all of the negotiated sub protocols. Raises UnreachablePeer if we cannot connect to the peer or HandshakeFailure if the remote disconnects before completing the handshake or if none of the sub-protocols supported by us is also supported by the remote.
p2p/handshake.py
dial_out
g-r-a-n-t/trinity
python
async def dial_out(remote: NodeAPI, private_key: keys.PrivateKey, p2p_handshake_params: DevP2PHandshakeParams, protocol_handshakers: Sequence[HandshakerAPI[ProtocolAPI]]) -> ConnectionAPI: '\n Perform the auth and P2P handshakes with the given remote.\n\n Return a `Connection` object housing all of the negotiated sub protocols.\n\n Raises UnreachablePeer if we cannot connect to the peer or\n HandshakeFailure if the remote disconnects before completing the\n handshake or if none of the sub-protocols supported by us is also\n supported by the remote.\n ' transport = (await Transport.connect(remote, private_key)) transport.logger.debug2('Initiating p2p handshake with %s', remote) try: (multiplexer, devp2p_receipt, protocol_receipts) = (await negotiate_protocol_handshakes(transport=transport, p2p_handshake_params=p2p_handshake_params, protocol_handshakers=protocol_handshakers)) except BaseException: try: (await transport.close()) except ConnectionResetError: transport.logger.debug('Could not wait for transport to close') raise transport.logger.debug2('Completed p2p handshake with %s', remote) connection = Connection(multiplexer=multiplexer, devp2p_receipt=devp2p_receipt, protocol_receipts=protocol_receipts, is_dial_out=True) return connection
def add_conv_status(self, line_no, cmd, object_type, full_command, conv_status, avi_object=None): '\n Adds as status row in conversion status csv\n :param line_no: line number of command\n :param object_type:\n :param full_command: netscaler command\n :param conv_status: dict of conversion status\n :param avi_object: Converted objectconverted avi object\n ' row = {'Line Number': (line_no if line_no else ''), 'Netscaler Command': (cmd if cmd else ''), 'Object Name': (object_type if object_type else ''), 'Full Command': (full_command if full_command else ''), 'Status': conv_status.get('status', ''), 'Skipped settings': str(conv_status.get('skipped', '')), 'Indirect mapping': str(conv_status.get('indirect', '')), 'Not Applicable': str(conv_status.get('na_list', '')), 'User Ignored': str(conv_status.get('user_ignore', '')), 'AVI Object': (str(avi_object) if avi_object else '')} csv_writer_dict_list.append(row)
-524,440,085,604,321,150
Adds as status row in conversion status csv :param line_no: line number of command :param object_type: :param full_command: netscaler command :param conv_status: dict of conversion status :param avi_object: Converted objectconverted avi object
python/avi/migrationtools/netscaler_converter/ns_util.py
add_conv_status
avinetworks/alb-sdk
python
def add_conv_status(self, line_no, cmd, object_type, full_command, conv_status, avi_object=None): '\n Adds as status row in conversion status csv\n :param line_no: line number of command\n :param object_type:\n :param full_command: netscaler command\n :param conv_status: dict of conversion status\n :param avi_object: Converted objectconverted avi object\n ' row = {'Line Number': (line_no if line_no else ), 'Netscaler Command': (cmd if cmd else ), 'Object Name': (object_type if object_type else ), 'Full Command': (full_command if full_command else ), 'Status': conv_status.get('status', ), 'Skipped settings': str(conv_status.get('skipped', )), 'Indirect mapping': str(conv_status.get('indirect', )), 'Not Applicable': str(conv_status.get('na_list', )), 'User Ignored': str(conv_status.get('user_ignore', )), 'AVI Object': (str(avi_object) if avi_object else )} csv_writer_dict_list.append(row)
def add_complete_conv_status(self, ns_config, output_dir, avi_config, report_name, vs_level_status): '\n Adds as status row in conversion status csv\n :param ns_config: NS config dict\n :param output_dir: output directory\n :param avi_config: AVI config dict\n :param report_name: name of report\n :param vs_level_status: add vs level details in XL sheet\n ' global csv_writer_dict_list global progressbar_count global total_count print('Generating Report For Converted Configuration...') ptotal = len(ns_config) ppcount = 0 for config_key in ns_config: ppcount += 1 config_object = ns_config[config_key] msg = 'Generating report' self.print_progress_bar(ppcount, ptotal, msg, prefix='Progress', suffix='') for element_key in config_object: element_object_list = config_object[element_key] if isinstance(element_object_list, dict): element_object_list = [element_object_list] for element_object in element_object_list: match = [match for match in csv_writer_dict_list if (match['Line Number'] == element_object['line_no'])] if (not match): ns_complete_command = self.get_netscalar_full_command(config_key, element_object) self.add_status_row(element_object['line_no'], config_key, element_object['attrs'][0], ns_complete_command, STATUS_INCOMPLETE_CONFIGURATION) unique_line_number_list = set() row_list = [] for dict_row in csv_writer_dict_list: if (dict_row['Line Number'] not in unique_line_number_list): unique_line_number_list.add(dict_row['Line Number']) row_list.append(dict_row) else: row = [row for row in row_list if (row['Line Number'] == dict_row['Line Number'])] if str(dict_row['AVI Object']).startswith('Skipped'): continue if dict_row.get('AVI Object', None): if (str(row[0]['AVI Object']) != str(dict_row['AVI Object'])): row[0]['AVI Object'] += ('__/__%s' % dict_row['AVI Object']) for status in STATUS_LIST: status_list = [row for row in row_list if (row['Status'] == status)] print(('%s: %s' % (status, len(status_list)))) print('Writing Excel Sheet For Converted Configuration...') total_count = (total_count + len(row_list)) if vs_level_status: self.vs_per_skipped_setting_for_references(avi_config) self.correct_vs_ref(avi_config) else: self.vs_complexity_level() self.write_status_report_and_pivot_table_in_xlsx(row_list, output_dir, report_name, vs_level_status)
-1,102,158,361,569,458,000
Adds as status row in conversion status csv :param ns_config: NS config dict :param output_dir: output directory :param avi_config: AVI config dict :param report_name: name of report :param vs_level_status: add vs level details in XL sheet
python/avi/migrationtools/netscaler_converter/ns_util.py
add_complete_conv_status
avinetworks/alb-sdk
python
def add_complete_conv_status(self, ns_config, output_dir, avi_config, report_name, vs_level_status): '\n Adds as status row in conversion status csv\n :param ns_config: NS config dict\n :param output_dir: output directory\n :param avi_config: AVI config dict\n :param report_name: name of report\n :param vs_level_status: add vs level details in XL sheet\n ' global csv_writer_dict_list global progressbar_count global total_count print('Generating Report For Converted Configuration...') ptotal = len(ns_config) ppcount = 0 for config_key in ns_config: ppcount += 1 config_object = ns_config[config_key] msg = 'Generating report' self.print_progress_bar(ppcount, ptotal, msg, prefix='Progress', suffix=) for element_key in config_object: element_object_list = config_object[element_key] if isinstance(element_object_list, dict): element_object_list = [element_object_list] for element_object in element_object_list: match = [match for match in csv_writer_dict_list if (match['Line Number'] == element_object['line_no'])] if (not match): ns_complete_command = self.get_netscalar_full_command(config_key, element_object) self.add_status_row(element_object['line_no'], config_key, element_object['attrs'][0], ns_complete_command, STATUS_INCOMPLETE_CONFIGURATION) unique_line_number_list = set() row_list = [] for dict_row in csv_writer_dict_list: if (dict_row['Line Number'] not in unique_line_number_list): unique_line_number_list.add(dict_row['Line Number']) row_list.append(dict_row) else: row = [row for row in row_list if (row['Line Number'] == dict_row['Line Number'])] if str(dict_row['AVI Object']).startswith('Skipped'): continue if dict_row.get('AVI Object', None): if (str(row[0]['AVI Object']) != str(dict_row['AVI Object'])): row[0]['AVI Object'] += ('__/__%s' % dict_row['AVI Object']) for status in STATUS_LIST: status_list = [row for row in row_list if (row['Status'] == status)] print(('%s: %s' % (status, len(status_list)))) print('Writing Excel Sheet For Converted Configuration...') total_count = (total_count + len(row_list)) if vs_level_status: self.vs_per_skipped_setting_for_references(avi_config) self.correct_vs_ref(avi_config) else: self.vs_complexity_level() self.write_status_report_and_pivot_table_in_xlsx(row_list, output_dir, report_name, vs_level_status)
def add_status_row(self, line_no, cmd, object_type, full_command, status, avi_object=None): '\n Adds as status row in conversion status csv\n :param line_no:\n :param cmd: netscaler command\n :param object_type:\n :param full_command:\n :param status: conversion status\n :param avi_object:\n ' global csv_writer_dict_list row = {'Line Number': (line_no if line_no else ''), 'Netscaler Command': cmd, 'Object Name': object_type, 'Full Command': full_command, 'Status': status, 'AVI Object': (str(avi_object) if avi_object else '')} csv_writer_dict_list.append(row)
-919,691,838,391,966,800
Adds as status row in conversion status csv :param line_no: :param cmd: netscaler command :param object_type: :param full_command: :param status: conversion status :param avi_object:
python/avi/migrationtools/netscaler_converter/ns_util.py
add_status_row
avinetworks/alb-sdk
python
def add_status_row(self, line_no, cmd, object_type, full_command, status, avi_object=None): '\n Adds as status row in conversion status csv\n :param line_no:\n :param cmd: netscaler command\n :param object_type:\n :param full_command:\n :param status: conversion status\n :param avi_object:\n ' global csv_writer_dict_list row = {'Line Number': (line_no if line_no else ), 'Netscaler Command': cmd, 'Object Name': object_type, 'Full Command': full_command, 'Status': status, 'AVI Object': (str(avi_object) if avi_object else )} csv_writer_dict_list.append(row)
def add_csv_headers(self, csv_file): '\n Adds header line in conversion status file\n :param csv_file: File to which header is to be added\n ' global csv_writer fieldnames = ['Line Number', 'Netscaler Command', 'Object Name', 'Full Command', 'Status', 'Skipped settings', 'Indirect mapping', 'Not Applicable', 'User Ignored', 'AVI Object'] csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames, lineterminator='\n') csv_writer.writeheader()
621,174,675,854,295,200
Adds header line in conversion status file :param csv_file: File to which header is to be added
python/avi/migrationtools/netscaler_converter/ns_util.py
add_csv_headers
avinetworks/alb-sdk
python
def add_csv_headers(self, csv_file): '\n Adds header line in conversion status file\n :param csv_file: File to which header is to be added\n ' global csv_writer fieldnames = ['Line Number', 'Netscaler Command', 'Object Name', 'Full Command', 'Status', 'Skipped settings', 'Indirect mapping', 'Not Applicable', 'User Ignored', 'AVI Object'] csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames, lineterminator='\n') csv_writer.writeheader()
def get_avi_lb_algorithm(self, ns_algorithm): '\n Converts NS LB algorithm to equivalent avi LB algorithm\n :param ns_algorithm: NS algorithm name\n :return: Avi LB algorithm enum value\n ' avi_algorithm = 'LB_ALGORITHM_LEAST_CONNECTIONS' if (ns_algorithm == 'LEASTCONNECTIONS'): avi_algorithm = 'LB_ALGORITHM_LEAST_CONNECTIONS' elif (ns_algorithm == 'ROUNDROBIN'): avi_algorithm = 'LB_ALGORITHM_ROUND_ROBIN' elif (ns_algorithm in ['LEASTRESPONSETIME', 'LRTM']): avi_algorithm = 'LB_ALGORITHM_FASTEST_RESPONSE' elif (ns_algorithm == 'SOURCEIPHASH'): avi_algorithm = 'LB_ALGORITHM_CONSISTENT_HASH' elif (ns_algorithm == 'URLHASH'): avi_algorithm = 'LB_ALGORITHM_CONSISTENT_HASH_URI' return avi_algorithm
8,713,300,509,531,353,000
Converts NS LB algorithm to equivalent avi LB algorithm :param ns_algorithm: NS algorithm name :return: Avi LB algorithm enum value
python/avi/migrationtools/netscaler_converter/ns_util.py
get_avi_lb_algorithm
avinetworks/alb-sdk
python
def get_avi_lb_algorithm(self, ns_algorithm): '\n Converts NS LB algorithm to equivalent avi LB algorithm\n :param ns_algorithm: NS algorithm name\n :return: Avi LB algorithm enum value\n ' avi_algorithm = 'LB_ALGORITHM_LEAST_CONNECTIONS' if (ns_algorithm == 'LEASTCONNECTIONS'): avi_algorithm = 'LB_ALGORITHM_LEAST_CONNECTIONS' elif (ns_algorithm == 'ROUNDROBIN'): avi_algorithm = 'LB_ALGORITHM_ROUND_ROBIN' elif (ns_algorithm in ['LEASTRESPONSETIME', 'LRTM']): avi_algorithm = 'LB_ALGORITHM_FASTEST_RESPONSE' elif (ns_algorithm == 'SOURCEIPHASH'): avi_algorithm = 'LB_ALGORITHM_CONSISTENT_HASH' elif (ns_algorithm == 'URLHASH'): avi_algorithm = 'LB_ALGORITHM_CONSISTENT_HASH_URI' return avi_algorithm
def get_avi_resp_code(self, respCode): '\n This function used for getting appropriate response code for avi.\n :param respCode: response code\n :return: returns list of unique responses.\n ' avi_resp_codes = [] codes = [] for res_code in respCode.split(' '): if ('-' in res_code): codes.extend(res_code.split('-')) else: codes.append(res_code) for code in codes: if (code and code.strip().isdigit()): code = int(code.strip()) if (code < 200): avi_resp_codes.append('HTTP_1XX') elif (code < 300): avi_resp_codes.append('HTTP_2XX') elif (code < 400): avi_resp_codes.append('HTTP_3XX') elif (code < 500): avi_resp_codes.append('HTTP_4XX') elif (code < 600): avi_resp_codes.append('HTTP_5XX') avi_resp_codes = list(set(avi_resp_codes)) if (not avi_resp_codes): avi_resp_codes = ['HTTP_ANY'] return avi_resp_codes
2,464,425,441,939,191,300
This function used for getting appropriate response code for avi. :param respCode: response code :return: returns list of unique responses.
python/avi/migrationtools/netscaler_converter/ns_util.py
get_avi_resp_code
avinetworks/alb-sdk
python
def get_avi_resp_code(self, respCode): '\n This function used for getting appropriate response code for avi.\n :param respCode: response code\n :return: returns list of unique responses.\n ' avi_resp_codes = [] codes = [] for res_code in respCode.split(' '): if ('-' in res_code): codes.extend(res_code.split('-')) else: codes.append(res_code) for code in codes: if (code and code.strip().isdigit()): code = int(code.strip()) if (code < 200): avi_resp_codes.append('HTTP_1XX') elif (code < 300): avi_resp_codes.append('HTTP_2XX') elif (code < 400): avi_resp_codes.append('HTTP_3XX') elif (code < 500): avi_resp_codes.append('HTTP_4XX') elif (code < 600): avi_resp_codes.append('HTTP_5XX') avi_resp_codes = list(set(avi_resp_codes)) if (not avi_resp_codes): avi_resp_codes = ['HTTP_ANY'] return avi_resp_codes
def get_conv_status(self, ns_object, skipped_list, na_list, indirect_list, ignore_for_val=None, indirect_commands=None, user_ignore_val=[]): '\n This function used for getting status detail for command like\n skipped or indirect.\n :param ns_object: Netscaler parsed config\n :param skipped_list: list of skipped commands list.\n :param na_list: not applicable commands list.\n :param indirect_list: indirect command list\n :param ignore_for_val: optional field\n :param indirect_commands: indirect commands\n :param user_ignore_val: List of user ignore attributes\n :return: returns dict of coversion status.\n ' skipped = [attr for attr in ns_object.keys() if (attr in skipped_list)] na = [attr for attr in ns_object.keys() if (attr in na_list)] indirect = [attr for attr in ns_object.keys() if (attr in indirect_list)] user_ignore = [val for val in skipped if (val in user_ignore_val)] skipped = [attr for attr in skipped if (attr not in user_ignore_val)] if ignore_for_val: for key in ignore_for_val.keys(): if (key not in ns_object): continue ns_val = ns_object.get(key) ignore_val = ignore_for_val.get(key) if ((key in skipped) and (str(ns_val) == str(ignore_val))): skipped.remove(key) if skipped: status = STATUS_PARTIAL else: status = STATUS_SUCCESSFUL conv_status = {'skipped': skipped, 'indirect': indirect, 'na_list': na, 'status': status, 'user_ignore': user_ignore} return conv_status
4,816,010,555,973,858,000
This function used for getting status detail for command like skipped or indirect. :param ns_object: Netscaler parsed config :param skipped_list: list of skipped commands list. :param na_list: not applicable commands list. :param indirect_list: indirect command list :param ignore_for_val: optional field :param indirect_commands: indirect commands :param user_ignore_val: List of user ignore attributes :return: returns dict of coversion status.
python/avi/migrationtools/netscaler_converter/ns_util.py
get_conv_status
avinetworks/alb-sdk
python
def get_conv_status(self, ns_object, skipped_list, na_list, indirect_list, ignore_for_val=None, indirect_commands=None, user_ignore_val=[]): '\n This function used for getting status detail for command like\n skipped or indirect.\n :param ns_object: Netscaler parsed config\n :param skipped_list: list of skipped commands list.\n :param na_list: not applicable commands list.\n :param indirect_list: indirect command list\n :param ignore_for_val: optional field\n :param indirect_commands: indirect commands\n :param user_ignore_val: List of user ignore attributes\n :return: returns dict of coversion status.\n ' skipped = [attr for attr in ns_object.keys() if (attr in skipped_list)] na = [attr for attr in ns_object.keys() if (attr in na_list)] indirect = [attr for attr in ns_object.keys() if (attr in indirect_list)] user_ignore = [val for val in skipped if (val in user_ignore_val)] skipped = [attr for attr in skipped if (attr not in user_ignore_val)] if ignore_for_val: for key in ignore_for_val.keys(): if (key not in ns_object): continue ns_val = ns_object.get(key) ignore_val = ignore_for_val.get(key) if ((key in skipped) and (str(ns_val) == str(ignore_val))): skipped.remove(key) if skipped: status = STATUS_PARTIAL else: status = STATUS_SUCCESSFUL conv_status = {'skipped': skipped, 'indirect': indirect, 'na_list': na, 'status': status, 'user_ignore': user_ignore} return conv_status
def get_key_cert_obj(self, name, key_file_name, cert_file_name, input_dir): '\n :param name:name of ssl cert.\n :param key_file_name: key file (ie.pem)\n :param cert_file_name: certificate file name\n :param input_dir: input directory for certificate file name\n :return: returns dict of ssl object\n ' folder_path = (input_dir + os.path.sep) key = self.upload_file((folder_path + key_file_name)) cert = self.upload_file((folder_path + cert_file_name)) ssl_kc_obj = None if (key and cert): cert = {'certificate': cert} ssl_kc_obj = {'name': name, 'key': key, 'certificate': cert, 'key_passphrase': ''} return ssl_kc_obj
1,050,294,291,954,220,800
:param name:name of ssl cert. :param key_file_name: key file (ie.pem) :param cert_file_name: certificate file name :param input_dir: input directory for certificate file name :return: returns dict of ssl object
python/avi/migrationtools/netscaler_converter/ns_util.py
get_key_cert_obj
avinetworks/alb-sdk
python
def get_key_cert_obj(self, name, key_file_name, cert_file_name, input_dir): '\n :param name:name of ssl cert.\n :param key_file_name: key file (ie.pem)\n :param cert_file_name: certificate file name\n :param input_dir: input directory for certificate file name\n :return: returns dict of ssl object\n ' folder_path = (input_dir + os.path.sep) key = self.upload_file((folder_path + key_file_name)) cert = self.upload_file((folder_path + cert_file_name)) ssl_kc_obj = None if (key and cert): cert = {'certificate': cert} ssl_kc_obj = {'name': name, 'key': key, 'certificate': cert, 'key_passphrase': } return ssl_kc_obj
def get_command_from_line(self, line): '\n This function is used for getting command and line number from conf file.\n :param line: line\n :return: returns command name and line\n ' cmd = '' line_no = 0 for member in line: if ('line_no' in member): line_no = member[1] continue if isinstance(member, str): cmd += (' %s' % member) else: cmd += (' -%s' % ' '.join(member)) return (cmd, line_no)
5,887,616,961,579,559,000
This function is used for getting command and line number from conf file. :param line: line :return: returns command name and line
python/avi/migrationtools/netscaler_converter/ns_util.py
get_command_from_line
avinetworks/alb-sdk
python
def get_command_from_line(self, line): '\n This function is used for getting command and line number from conf file.\n :param line: line\n :return: returns command name and line\n ' cmd = line_no = 0 for member in line: if ('line_no' in member): line_no = member[1] continue if isinstance(member, str): cmd += (' %s' % member) else: cmd += (' -%s' % ' '.join(member)) return (cmd, line_no)
def update_status_for_skipped(self, skipped_cmds): '\n :param skipped_cmds: separation of non converted commands\n to NA, Indirect,DataScript,NotSupported\n :return: None\n ' na_cmds = ns_constants.netscalar_command_status['NotApplicableCommands'] indirect_cmds = ns_constants.netscalar_command_status['IndirectCommands'] datascript_cmds = ns_constants.netscalar_command_status['DatascriptCommands'] not_supported = ns_constants.netscalar_command_status['NotSupported'] if (not skipped_cmds): return for cmd in skipped_cmds: line_no = cmd['line_no'] cmd = cmd['cmd'] cmd = cmd.strip() for na_cmd in na_cmds: if cmd.startswith(na_cmd): self.add_status_row(line_no, na_cmd, None, cmd, STATUS_NOT_APPLICABLE) break for id_cmd in indirect_cmds: if cmd.startswith(id_cmd): self.add_status_row(line_no, id_cmd, None, cmd, STATUS_INDIRECT) break for datascript_cmd in datascript_cmds: if cmd.startswith(datascript_cmd): self.add_status_row(line_no, datascript_cmd, None, cmd, STATUS_DATASCRIPT) break for not_commands in not_supported: if cmd.startswith(not_commands): self.add_status_row(line_no, not_commands, None, cmd, STATUS_COMMAND_NOT_SUPPORTED) break
-4,603,544,928,523,227,600
:param skipped_cmds: separation of non converted commands to NA, Indirect,DataScript,NotSupported :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
update_status_for_skipped
avinetworks/alb-sdk
python
def update_status_for_skipped(self, skipped_cmds): '\n :param skipped_cmds: separation of non converted commands\n to NA, Indirect,DataScript,NotSupported\n :return: None\n ' na_cmds = ns_constants.netscalar_command_status['NotApplicableCommands'] indirect_cmds = ns_constants.netscalar_command_status['IndirectCommands'] datascript_cmds = ns_constants.netscalar_command_status['DatascriptCommands'] not_supported = ns_constants.netscalar_command_status['NotSupported'] if (not skipped_cmds): return for cmd in skipped_cmds: line_no = cmd['line_no'] cmd = cmd['cmd'] cmd = cmd.strip() for na_cmd in na_cmds: if cmd.startswith(na_cmd): self.add_status_row(line_no, na_cmd, None, cmd, STATUS_NOT_APPLICABLE) break for id_cmd in indirect_cmds: if cmd.startswith(id_cmd): self.add_status_row(line_no, id_cmd, None, cmd, STATUS_INDIRECT) break for datascript_cmd in datascript_cmds: if cmd.startswith(datascript_cmd): self.add_status_row(line_no, datascript_cmd, None, cmd, STATUS_DATASCRIPT) break for not_commands in not_supported: if cmd.startswith(not_commands): self.add_status_row(line_no, not_commands, None, cmd, STATUS_COMMAND_NOT_SUPPORTED) break
def remove_duplicate_objects(self, obj_type, obj_list): '\n Remove duplicate objects from list\n :param obj_type: Object type\n :param obj_list: list of all objects\n :return: return list which has no duplicates objects\n ' if (len(obj_list) == 1): return obj_list for source_obj in obj_list: for (index, tmp_obj) in enumerate(obj_list): if (tmp_obj['name'] == source_obj['name']): continue src_cp = copy.deepcopy(source_obj) tmp_cp = copy.deepcopy(tmp_obj) del src_cp['name'] if ('description' in src_cp): del src_cp['description'] del tmp_cp['name'] if ('description' in tmp_cp): del tmp_cp['description'] if (src_cp.items() == tmp_cp.items()): LOG.warn(('Remove duplicate %s object : %s' % (obj_type, tmp_obj['name']))) del obj_list[index] self.remove_duplicate_objects(obj_type, obj_list) return obj_list
7,003,530,370,034,506,000
Remove duplicate objects from list :param obj_type: Object type :param obj_list: list of all objects :return: return list which has no duplicates objects
python/avi/migrationtools/netscaler_converter/ns_util.py
remove_duplicate_objects
avinetworks/alb-sdk
python
def remove_duplicate_objects(self, obj_type, obj_list): '\n Remove duplicate objects from list\n :param obj_type: Object type\n :param obj_list: list of all objects\n :return: return list which has no duplicates objects\n ' if (len(obj_list) == 1): return obj_list for source_obj in obj_list: for (index, tmp_obj) in enumerate(obj_list): if (tmp_obj['name'] == source_obj['name']): continue src_cp = copy.deepcopy(source_obj) tmp_cp = copy.deepcopy(tmp_obj) del src_cp['name'] if ('description' in src_cp): del src_cp['description'] del tmp_cp['name'] if ('description' in tmp_cp): del tmp_cp['description'] if (src_cp.items() == tmp_cp.items()): LOG.warn(('Remove duplicate %s object : %s' % (obj_type, tmp_obj['name']))) del obj_list[index] self.remove_duplicate_objects(obj_type, obj_list) return obj_list
def cleanup_config(self, config): '\n This function is used for deleting temp variables created for conversion\n :param config: dict type\n :return: None\n ' del config
-4,106,352,914,794,422,000
This function is used for deleting temp variables created for conversion :param config: dict type :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
cleanup_config
avinetworks/alb-sdk
python
def cleanup_config(self, config): '\n This function is used for deleting temp variables created for conversion\n :param config: dict type\n :return: None\n ' del config
def clone_pool(self, pool_name, cloned_for, avi_config, userprefix=None): '\n This function used for cloning shared pools in netscaler.\n :param pool_name: name of pool\n :param cloned_for: cloned for\n :param avi_config: avi config dict\n :param userprefix: prefix for objects\n :return: None\n ' pools = [pool for pool in avi_config['Pool'] if (pool['name'] == pool_name)] if pools: pool_obj = copy.deepcopy(pools[0]) pname = pool_obj['name'] pool_name = re.sub('[:]', '-', ('%s-%s' % (pname, cloned_for))) pool_obj['name'] = pool_name avi_config['Pool'].append(pool_obj) LOG.info(('Same pool reference to other object. Clone Pool %s for %s' % (pool_name, cloned_for))) return pool_obj['name'] return None
60,184,253,157,548,270
This function used for cloning shared pools in netscaler. :param pool_name: name of pool :param cloned_for: cloned for :param avi_config: avi config dict :param userprefix: prefix for objects :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
clone_pool
avinetworks/alb-sdk
python
def clone_pool(self, pool_name, cloned_for, avi_config, userprefix=None): '\n This function used for cloning shared pools in netscaler.\n :param pool_name: name of pool\n :param cloned_for: cloned for\n :param avi_config: avi config dict\n :param userprefix: prefix for objects\n :return: None\n ' pools = [pool for pool in avi_config['Pool'] if (pool['name'] == pool_name)] if pools: pool_obj = copy.deepcopy(pools[0]) pname = pool_obj['name'] pool_name = re.sub('[:]', '-', ('%s-%s' % (pname, cloned_for))) pool_obj['name'] = pool_name avi_config['Pool'].append(pool_obj) LOG.info(('Same pool reference to other object. Clone Pool %s for %s' % (pool_name, cloned_for))) return pool_obj['name'] return None
def get_vs_if_shared_vip(self, avi_config, controller_version): '\n This function checks if same vip is used for other vs\n :param avi_config: avi config dict\n :param controller_version:\n :return: None\n ' vs_list = [v for v in avi_config['VirtualService'] if ('port_range_end' in v['services'][0])] for vs in vs_list: if (parse_version(controller_version) >= parse_version('17.1')): vs_port_list = [int(v['services'][0]['port']) for v in avi_config['VirtualService'] if ((v['vsvip_ref'].split('name=')[1].split('-')[0] == vs['vsvip_ref'].split('name=')[1].split('-')[0]) and ('port_range_end' not in v['services'][0]))] else: vs_port_list = [int(v['services'][0]['port']) for v in avi_config['VirtualService'] if ((v['ip_address']['addr'] == vs['ip_address']['addr']) and ('port_range_end' not in v['services'][0]))] if vs_port_list: min_port = min(vs_port_list) max_port = max(vs_port_list) vs['services'][0]['port_range_end'] = str((min_port - 1)) service = {'enable_ssl': False, 'port': str((max_port + 1)), 'port_range_end': '65535'} vs['services'].append(service)
-5,358,874,291,766,402,000
This function checks if same vip is used for other vs :param avi_config: avi config dict :param controller_version: :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
get_vs_if_shared_vip
avinetworks/alb-sdk
python
def get_vs_if_shared_vip(self, avi_config, controller_version): '\n This function checks if same vip is used for other vs\n :param avi_config: avi config dict\n :param controller_version:\n :return: None\n ' vs_list = [v for v in avi_config['VirtualService'] if ('port_range_end' in v['services'][0])] for vs in vs_list: if (parse_version(controller_version) >= parse_version('17.1')): vs_port_list = [int(v['services'][0]['port']) for v in avi_config['VirtualService'] if ((v['vsvip_ref'].split('name=')[1].split('-')[0] == vs['vsvip_ref'].split('name=')[1].split('-')[0]) and ('port_range_end' not in v['services'][0]))] else: vs_port_list = [int(v['services'][0]['port']) for v in avi_config['VirtualService'] if ((v['ip_address']['addr'] == vs['ip_address']['addr']) and ('port_range_end' not in v['services'][0]))] if vs_port_list: min_port = min(vs_port_list) max_port = max(vs_port_list) vs['services'][0]['port_range_end'] = str((min_port - 1)) service = {'enable_ssl': False, 'port': str((max_port + 1)), 'port_range_end': '65535'} vs['services'].append(service)
def add_prop_for_http_profile(self, profile_name, avi_config, sysdict, prop_dict): '\n This method adds the additional attribute to application profile\n :param profile_name: name of application profile\n :param avi_config: avi config dict\n :param sysdict: system/baseline config dict\n :param prop_dict: property dict\n :return:\n ' profile = [p for p in (avi_config['ApplicationProfile'] + sysdict['ApplicationProfile']) if (p['name'] == profile_name)] if profile: if prop_dict.get('clttimeout'): profile[0]['client_header_timeout'] = int(prop_dict['clttimeout']) profile[0]['client_body_timeout'] = int(prop_dict['clttimeout']) if prop_dict.get('xff_enabled'): if profile[0].get('http_profile'): profile[0]['http_profile'].update({'xff_enabled': True, 'xff_alternate_name': 'X-Forwarded-For'}) else: profile[0].update({'http_profile': {'xff_enabled': True, 'xff_alternate_name': 'X-Forwarded-For'}}) if profile[0].get('http_profile'): profile[0]['http_profile'].update({'x_forwarded_proto_enabled': True, 'hsts_enabled': True, 'http_to_https': True, 'httponly_enabled': True, 'hsts_max_age': 365, 'server_side_redirect_to_https': True, 'secure_cookie_enabled': True}) else: profile[0].update({'http_profile': {'x_forwarded_proto_enabled': True, 'hsts_enabled': True, 'http_to_https': True, 'httponly_enabled': True, 'hsts_max_age': 365, 'server_side_redirect_to_https': True, 'secure_cookie_enabled': True}})
-2,461,371,585,625,357,300
This method adds the additional attribute to application profile :param profile_name: name of application profile :param avi_config: avi config dict :param sysdict: system/baseline config dict :param prop_dict: property dict :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
add_prop_for_http_profile
avinetworks/alb-sdk
python
def add_prop_for_http_profile(self, profile_name, avi_config, sysdict, prop_dict): '\n This method adds the additional attribute to application profile\n :param profile_name: name of application profile\n :param avi_config: avi config dict\n :param sysdict: system/baseline config dict\n :param prop_dict: property dict\n :return:\n ' profile = [p for p in (avi_config['ApplicationProfile'] + sysdict['ApplicationProfile']) if (p['name'] == profile_name)] if profile: if prop_dict.get('clttimeout'): profile[0]['client_header_timeout'] = int(prop_dict['clttimeout']) profile[0]['client_body_timeout'] = int(prop_dict['clttimeout']) if prop_dict.get('xff_enabled'): if profile[0].get('http_profile'): profile[0]['http_profile'].update({'xff_enabled': True, 'xff_alternate_name': 'X-Forwarded-For'}) else: profile[0].update({'http_profile': {'xff_enabled': True, 'xff_alternate_name': 'X-Forwarded-For'}}) if profile[0].get('http_profile'): profile[0]['http_profile'].update({'x_forwarded_proto_enabled': True, 'hsts_enabled': True, 'http_to_https': True, 'httponly_enabled': True, 'hsts_max_age': 365, 'server_side_redirect_to_https': True, 'secure_cookie_enabled': True}) else: profile[0].update({'http_profile': {'x_forwarded_proto_enabled': True, 'hsts_enabled': True, 'http_to_https': True, 'httponly_enabled': True, 'hsts_max_age': 365, 'server_side_redirect_to_https': True, 'secure_cookie_enabled': True}})
def object_exist(self, object_type, name, avi_config): '\n This method returns true if object exists in avi config dict else false\n :param object_type:\n :param name:\n :param avi_config:\n :return:\n ' data = avi_config[object_type] obj_list = [obj for obj in data if (obj['name'] == name)] if obj_list: return True return False
-3,865,873,546,458,717,700
This method returns true if object exists in avi config dict else false :param object_type: :param name: :param avi_config: :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
object_exist
avinetworks/alb-sdk
python
def object_exist(self, object_type, name, avi_config): '\n This method returns true if object exists in avi config dict else false\n :param object_type:\n :param name:\n :param avi_config:\n :return:\n ' data = avi_config[object_type] obj_list = [obj for obj in data if (obj['name'] == name)] if obj_list: return True return False
def is_shared_same_vip(self, vs, cs_vs_list, avi_config, tenant_name, cloud_name, tenant_ref, cloud_ref, controller_version, prefix, input_vrf=None): '\n This function check for vs sharing same vip\n :param vs: Name of vs\n :param cs_vs_list: List of vs\n :param avi_config: avi config dict\n :param tenant_name: Name of tenant\n :param cloud_name: Name of cloud\n :param tenant_ref: Reference of tenant\n :param cloud_ref: Reference of cloud\n :param controller_version: controller version\n :param prefix: prefix for objects\n :param input_vrf: VRF name input\n :return: None\n ' if (parse_version(controller_version) >= parse_version('17.1')): shared_vip = [v for v in cs_vs_list if ((v['vsvip_ref'].split('name=')[1].split('-')[0] == vs['vsvip_ref'].split('name=')[1].split('-')[0]) and (v['services'][0]['port'] == vs['services'][0]['port']))] else: shared_vip = [v for v in cs_vs_list if ((v['ip_address']['addr'] == vs['ip_address']['addr']) and (v['services'][0]['port'] == vs['services'][0]['port']))] if input_vrf: vrf_ref = self.get_object_ref(input_vrf, 'vrfcontext', cloud_name=cloud_name) else: vrf_ref = self.get_object_ref('global', 'vrfcontext', cloud_name=cloud_name) if shared_vip: return True elif (parse_version(controller_version) >= parse_version('17.1')): vsvip = vs['vsvip_ref'].split('name=')[1].split('-')[0] self.create_update_vsvip(vsvip, avi_config['VsVip'], tenant_ref, cloud_ref, prefix=prefix, vrf_ref=vrf_ref) name = (vsvip + '-vsvip') if prefix: name = (((prefix + '-') + vsvip) + '-vsvip') updated_vsvip_ref = self.get_object_ref(name, 'vsvip', tenant_name, cloud_name) vs['vsvip_ref'] = updated_vsvip_ref
6,344,653,761,546,128,000
This function check for vs sharing same vip :param vs: Name of vs :param cs_vs_list: List of vs :param avi_config: avi config dict :param tenant_name: Name of tenant :param cloud_name: Name of cloud :param tenant_ref: Reference of tenant :param cloud_ref: Reference of cloud :param controller_version: controller version :param prefix: prefix for objects :param input_vrf: VRF name input :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
is_shared_same_vip
avinetworks/alb-sdk
python
def is_shared_same_vip(self, vs, cs_vs_list, avi_config, tenant_name, cloud_name, tenant_ref, cloud_ref, controller_version, prefix, input_vrf=None): '\n This function check for vs sharing same vip\n :param vs: Name of vs\n :param cs_vs_list: List of vs\n :param avi_config: avi config dict\n :param tenant_name: Name of tenant\n :param cloud_name: Name of cloud\n :param tenant_ref: Reference of tenant\n :param cloud_ref: Reference of cloud\n :param controller_version: controller version\n :param prefix: prefix for objects\n :param input_vrf: VRF name input\n :return: None\n ' if (parse_version(controller_version) >= parse_version('17.1')): shared_vip = [v for v in cs_vs_list if ((v['vsvip_ref'].split('name=')[1].split('-')[0] == vs['vsvip_ref'].split('name=')[1].split('-')[0]) and (v['services'][0]['port'] == vs['services'][0]['port']))] else: shared_vip = [v for v in cs_vs_list if ((v['ip_address']['addr'] == vs['ip_address']['addr']) and (v['services'][0]['port'] == vs['services'][0]['port']))] if input_vrf: vrf_ref = self.get_object_ref(input_vrf, 'vrfcontext', cloud_name=cloud_name) else: vrf_ref = self.get_object_ref('global', 'vrfcontext', cloud_name=cloud_name) if shared_vip: return True elif (parse_version(controller_version) >= parse_version('17.1')): vsvip = vs['vsvip_ref'].split('name=')[1].split('-')[0] self.create_update_vsvip(vsvip, avi_config['VsVip'], tenant_ref, cloud_ref, prefix=prefix, vrf_ref=vrf_ref) name = (vsvip + '-vsvip') if prefix: name = (((prefix + '-') + vsvip) + '-vsvip') updated_vsvip_ref = self.get_object_ref(name, 'vsvip', tenant_name, cloud_name) vs['vsvip_ref'] = updated_vsvip_ref
def clone_http_policy_set(self, policy, prefix, avi_config, tenant_name, cloud_name, used_poolgrp_ref, userprefix=None): '\n This function clone pool reused in context switching rule\n :param policy: name of policy\n :param prefix: clone for\n :param avi_config: avi config dict\n :param tenant_name:\n :param cloud_name:\n :param used_poolgrp_ref:\n :param userprefix: prefix for objects\n :return:None\n ' policy_name = policy['name'] clone_policy = copy.deepcopy(policy) for rule in clone_policy['http_request_policy']['rules']: if (rule.get('switching_action', None) and rule['switching_action'].get('pool_group_ref')): pool_group_ref = rule['switching_action']['pool_group_ref'].split('&')[1].split('=')[1] if (pool_group_ref in used_poolgrp_ref): LOG.debug('Cloned the pool group for policy %s', policy_name) pool_group_ref = self.clone_pool_group(pool_group_ref, policy_name, avi_config, tenant_name, cloud_name, userprefix=userprefix) if pool_group_ref: updated_pool_group_ref = self.get_object_ref(pool_group_ref, OBJECT_TYPE_POOL_GROUP, tenant_name, cloud_name) rule['switching_action']['pool_group_ref'] = updated_pool_group_ref clone_policy['name'] += ('-%s-clone' % prefix) return clone_policy
6,982,211,706,178,502,000
This function clone pool reused in context switching rule :param policy: name of policy :param prefix: clone for :param avi_config: avi config dict :param tenant_name: :param cloud_name: :param used_poolgrp_ref: :param userprefix: prefix for objects :return:None
python/avi/migrationtools/netscaler_converter/ns_util.py
clone_http_policy_set
avinetworks/alb-sdk
python
def clone_http_policy_set(self, policy, prefix, avi_config, tenant_name, cloud_name, used_poolgrp_ref, userprefix=None): '\n This function clone pool reused in context switching rule\n :param policy: name of policy\n :param prefix: clone for\n :param avi_config: avi config dict\n :param tenant_name:\n :param cloud_name:\n :param used_poolgrp_ref:\n :param userprefix: prefix for objects\n :return:None\n ' policy_name = policy['name'] clone_policy = copy.deepcopy(policy) for rule in clone_policy['http_request_policy']['rules']: if (rule.get('switching_action', None) and rule['switching_action'].get('pool_group_ref')): pool_group_ref = rule['switching_action']['pool_group_ref'].split('&')[1].split('=')[1] if (pool_group_ref in used_poolgrp_ref): LOG.debug('Cloned the pool group for policy %s', policy_name) pool_group_ref = self.clone_pool_group(pool_group_ref, policy_name, avi_config, tenant_name, cloud_name, userprefix=userprefix) if pool_group_ref: updated_pool_group_ref = self.get_object_ref(pool_group_ref, OBJECT_TYPE_POOL_GROUP, tenant_name, cloud_name) rule['switching_action']['pool_group_ref'] = updated_pool_group_ref clone_policy['name'] += ('-%s-clone' % prefix) return clone_policy
def set_rules_index_for_http_policy_set(self, avi_config): '\n Update index as per avi protobuf requirements\n :param avi_config: avi config dict\n :return: None\n ' http_policy_sets = avi_config['HTTPPolicySet'] for http_policy_set in http_policy_sets: rules = http_policy_set['http_request_policy']['rules'] rules = sorted(rules, key=(lambda d: int(d['index']))) for (index, rule) in enumerate(rules): rule['index'] = index
1,613,698,748,301,972,700
Update index as per avi protobuf requirements :param avi_config: avi config dict :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
set_rules_index_for_http_policy_set
avinetworks/alb-sdk
python
def set_rules_index_for_http_policy_set(self, avi_config): '\n Update index as per avi protobuf requirements\n :param avi_config: avi config dict\n :return: None\n ' http_policy_sets = avi_config['HTTPPolicySet'] for http_policy_set in http_policy_sets: rules = http_policy_set['http_request_policy']['rules'] rules = sorted(rules, key=(lambda d: int(d['index']))) for (index, rule) in enumerate(rules): rule['index'] = index
def get_netscalar_full_command(self, netscalar_command, obj): '\n Generate netscaler command from the parse dict\n :param netscalar_command: name of command\n :param obj: object with attributes\n :return: Full command\n ' for attr in obj['attrs']: netscalar_command += (' %s' % attr) for key in obj: if isinstance(obj[key], list): continue if (key == 'line_no'): continue netscalar_command += (' -%s %s' % (key, obj[key])) return netscalar_command
6,395,164,331,100,777,000
Generate netscaler command from the parse dict :param netscalar_command: name of command :param obj: object with attributes :return: Full command
python/avi/migrationtools/netscaler_converter/ns_util.py
get_netscalar_full_command
avinetworks/alb-sdk
python
def get_netscalar_full_command(self, netscalar_command, obj): '\n Generate netscaler command from the parse dict\n :param netscalar_command: name of command\n :param obj: object with attributes\n :return: Full command\n ' for attr in obj['attrs']: netscalar_command += (' %s' % attr) for key in obj: if isinstance(obj[key], list): continue if (key == 'line_no'): continue netscalar_command += (' -%s %s' % (key, obj[key])) return netscalar_command
def clone_pool_group(self, pg_name, cloned_for, avi_config, tenant_name, cloud_name, userprefix=None): '\n Used for cloning shared pool group.\n :param pg_name: pool group name\n :param cloned_for: clone for\n :param avi_config: avi config dict\n :param tenant_name:\n :param cloud_name:\n :param userprefix: prefix for objects\n :return: None\n ' pool_groups = [pg for pg in avi_config['PoolGroup'] if (pg['name'] == pg_name)] if pool_groups: pool_group = copy.deepcopy(pool_groups[0]) pool_group_name = re.sub('[:]', '-', ('%s-%s' % (pg_name, cloned_for))) pool_group['name'] = pool_group_name for member in pool_group.get('members', []): pool_ref = self.get_name(member['pool_ref']) pool_ref = self.clone_pool(pool_ref, cloned_for, avi_config, userprefix=userprefix) if pool_ref: updated_pool_ref = self.get_object_ref(pool_ref, OBJECT_TYPE_POOL, tenant_name, cloud_name) member['pool_ref'] = updated_pool_ref avi_config['PoolGroup'].append(pool_group) LOG.info(('Same pool group reference to other object. Clone Pool group %s for %s' % (pg_name, cloned_for))) return pool_group['name'] return None
-6,270,595,012,755,129,000
Used for cloning shared pool group. :param pg_name: pool group name :param cloned_for: clone for :param avi_config: avi config dict :param tenant_name: :param cloud_name: :param userprefix: prefix for objects :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
clone_pool_group
avinetworks/alb-sdk
python
def clone_pool_group(self, pg_name, cloned_for, avi_config, tenant_name, cloud_name, userprefix=None): '\n Used for cloning shared pool group.\n :param pg_name: pool group name\n :param cloned_for: clone for\n :param avi_config: avi config dict\n :param tenant_name:\n :param cloud_name:\n :param userprefix: prefix for objects\n :return: None\n ' pool_groups = [pg for pg in avi_config['PoolGroup'] if (pg['name'] == pg_name)] if pool_groups: pool_group = copy.deepcopy(pool_groups[0]) pool_group_name = re.sub('[:]', '-', ('%s-%s' % (pg_name, cloned_for))) pool_group['name'] = pool_group_name for member in pool_group.get('members', []): pool_ref = self.get_name(member['pool_ref']) pool_ref = self.clone_pool(pool_ref, cloned_for, avi_config, userprefix=userprefix) if pool_ref: updated_pool_ref = self.get_object_ref(pool_ref, OBJECT_TYPE_POOL, tenant_name, cloud_name) member['pool_ref'] = updated_pool_ref avi_config['PoolGroup'].append(pool_group) LOG.info(('Same pool group reference to other object. Clone Pool group %s for %s' % (pg_name, cloned_for))) return pool_group['name'] return None
def remove_http_mon_from_pool(self, avi_config, pool, sysdict): '\n This function is used for removing http type health monitor from https\n vs.\n :param avi_config: avi config dict\n :param pool: name of pool\n :param sysdict: baseline/system config dict\n :return: None\n ' if pool: hm_refs = copy.deepcopy(pool['health_monitor_refs']) for hm_ref in hm_refs: hm = [h for h in (sysdict['HealthMonitor'] + avi_config['HealthMonitor']) if (h['name'] == hm_ref)] if (hm and (hm[0]['type'] == 'HEALTH_MONITOR_HTTP')): pool['health_monitor_refs'].remove(hm_ref) LOG.warning(('Skipping %s this reference from %s pool because of health monitor type is HTTP and VS has ssl profile.' % (hm_ref, pool['name'])))
4,120,604,159,898,324,000
This function is used for removing http type health monitor from https vs. :param avi_config: avi config dict :param pool: name of pool :param sysdict: baseline/system config dict :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
remove_http_mon_from_pool
avinetworks/alb-sdk
python
def remove_http_mon_from_pool(self, avi_config, pool, sysdict): '\n This function is used for removing http type health monitor from https\n vs.\n :param avi_config: avi config dict\n :param pool: name of pool\n :param sysdict: baseline/system config dict\n :return: None\n ' if pool: hm_refs = copy.deepcopy(pool['health_monitor_refs']) for hm_ref in hm_refs: hm = [h for h in (sysdict['HealthMonitor'] + avi_config['HealthMonitor']) if (h['name'] == hm_ref)] if (hm and (hm[0]['type'] == 'HEALTH_MONITOR_HTTP')): pool['health_monitor_refs'].remove(hm_ref) LOG.warning(('Skipping %s this reference from %s pool because of health monitor type is HTTP and VS has ssl profile.' % (hm_ref, pool['name'])))
def remove_https_mon_from_pool(self, avi_config, pool, sysdict): '\n This function is used for removing https type health monitor from http\n vs.\n :param avi_config: avi config dict\n :param pool: name of pool\n :param sysdict: baseline/system config dict\n :return: None\n ' if pool: hm_refs = copy.deepcopy(pool['health_monitor_refs']) for hm_ref in hm_refs: hm = [h for h in (sysdict['HealthMonitor'] + avi_config['HealthMonitor']) if (h['name'] == hm_ref)] if (hm and (hm[0]['type'] == 'HEALTH_MONITOR_HTTPS')): pool['health_monitor_refs'].remove(hm_ref) LOG.warning(('Skipping %s this reference from %s pool because of health monitor type is HTTPS and VS has no ssl profile.' % (hm_ref, pool['name'])))
4,178,237,875,809,718,300
This function is used for removing https type health monitor from http vs. :param avi_config: avi config dict :param pool: name of pool :param sysdict: baseline/system config dict :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
remove_https_mon_from_pool
avinetworks/alb-sdk
python
def remove_https_mon_from_pool(self, avi_config, pool, sysdict): '\n This function is used for removing https type health monitor from http\n vs.\n :param avi_config: avi config dict\n :param pool: name of pool\n :param sysdict: baseline/system config dict\n :return: None\n ' if pool: hm_refs = copy.deepcopy(pool['health_monitor_refs']) for hm_ref in hm_refs: hm = [h for h in (sysdict['HealthMonitor'] + avi_config['HealthMonitor']) if (h['name'] == hm_ref)] if (hm and (hm[0]['type'] == 'HEALTH_MONITOR_HTTPS')): pool['health_monitor_refs'].remove(hm_ref) LOG.warning(('Skipping %s this reference from %s pool because of health monitor type is HTTPS and VS has no ssl profile.' % (hm_ref, pool['name'])))
def update_application_profile(self, profile_name, pki_profile_ref, tenant_ref, name, avi_config, sysdict): '\n This functions defines to update application profile with pki profile if\n application profile exist if not create new http profile with pki profile\n :param profile_name: name of Http profile\n :param pki_profile_ref: ref of PKI profile\n :param tenant_ref: tenant ref\n :param name: name of virtual service\n :param avi_config: Dict of AVi config\n :param sysdict: baseline/system config\n :return: Http profile\n ' try: if profile_name: app_profile = [p for p in (sysdict['ApplicationProfile'] + avi_config['ApplicationProfile']) if (p['name'] == profile_name)] if app_profile: app_profile[0]['http_profile']['pki_profile_ref'] = pki_profile_ref LOG.debug(('Added PKI profile to application profile successfully : %s' % (profile_name, pki_profile_ref))) else: app_profile = dict() app_profile['name'] = (name + ('-%s-%s' % (random.randrange(0, 1000), ns_constants.PLACE_HOLDER_STR))) app_profile['tenant_ref'] = tenant_ref app_profile['type'] = 'APPLICATION_PROFILE_TYPE_HTTP' http_profile = dict() http_profile['connection_multiplexing_enabled'] = False http_profile['xff_enabled'] = False http_profile['websockets_enabled'] = False http_profile['pki_profile_ref'] = pki_profile_ref app_profile['http_profile'] = http_profile avi_config['ApplicationProfile'].append(app_profile) LOG.debug(('Conversion completed successfully for httpProfile: %s' % app_profile['name'])) return app_profile['name'] except: update_count('error') LOG.error('Error in convertion of httpProfile', exc_info=True)
8,957,387,572,475,818,000
This functions defines to update application profile with pki profile if application profile exist if not create new http profile with pki profile :param profile_name: name of Http profile :param pki_profile_ref: ref of PKI profile :param tenant_ref: tenant ref :param name: name of virtual service :param avi_config: Dict of AVi config :param sysdict: baseline/system config :return: Http profile
python/avi/migrationtools/netscaler_converter/ns_util.py
update_application_profile
avinetworks/alb-sdk
python
def update_application_profile(self, profile_name, pki_profile_ref, tenant_ref, name, avi_config, sysdict): '\n This functions defines to update application profile with pki profile if\n application profile exist if not create new http profile with pki profile\n :param profile_name: name of Http profile\n :param pki_profile_ref: ref of PKI profile\n :param tenant_ref: tenant ref\n :param name: name of virtual service\n :param avi_config: Dict of AVi config\n :param sysdict: baseline/system config\n :return: Http profile\n ' try: if profile_name: app_profile = [p for p in (sysdict['ApplicationProfile'] + avi_config['ApplicationProfile']) if (p['name'] == profile_name)] if app_profile: app_profile[0]['http_profile']['pki_profile_ref'] = pki_profile_ref LOG.debug(('Added PKI profile to application profile successfully : %s' % (profile_name, pki_profile_ref))) else: app_profile = dict() app_profile['name'] = (name + ('-%s-%s' % (random.randrange(0, 1000), ns_constants.PLACE_HOLDER_STR))) app_profile['tenant_ref'] = tenant_ref app_profile['type'] = 'APPLICATION_PROFILE_TYPE_HTTP' http_profile = dict() http_profile['connection_multiplexing_enabled'] = False http_profile['xff_enabled'] = False http_profile['websockets_enabled'] = False http_profile['pki_profile_ref'] = pki_profile_ref app_profile['http_profile'] = http_profile avi_config['ApplicationProfile'].append(app_profile) LOG.debug(('Conversion completed successfully for httpProfile: %s' % app_profile['name'])) return app_profile['name'] except: update_count('error') LOG.error('Error in convertion of httpProfile', exc_info=True)
def convert_persistance_prof(self, vs, name, tenant_ref): '\n This function defines that it convert the persistent profile and\n return that profile\n :param vs: object of lb vs or pool\n :param name: name of application persteance profile\n :param tenant_ref: reference of tenant\n :return: application persistent profile\n ' profile = None persistenceType = vs.get('persistenceType', '') if (persistenceType == 'COOKIEINSERT'): timeout = vs.get('timeout', 2) profile = {'http_cookie_persistence_profile': {'always_send_cookie': False}, 'persistence_type': 'PERSISTENCE_TYPE_HTTP_COOKIE', 'server_hm_down_recovery': 'HM_DOWN_PICK_NEW_SERVER', 'name': name} if (int(timeout) > 0): profile['http_cookie_persistence_profile']['timeout'] = timeout elif (persistenceType == 'SOURCEIP'): timeout = vs.get('timeout', 120) timeout = (int(timeout) / 60) if (timeout < 1): timeout = 1 profile = {'server_hm_down_recovery': 'HM_DOWN_PICK_NEW_SERVER', 'persistence_type': 'PERSISTENCE_TYPE_CLIENT_IP_ADDRESS', 'ip_persistence_profile': {'ip_persistent_timeout': timeout}, 'name': name} elif (persistenceType == 'SSLSESSION'): profile = {'server_hm_down_recovery': 'HM_DOWN_PICK_NEW_SERVER', 'persistence_type': 'PERSISTENCE_TYPE_TLS', 'name': name} profile['tenant_ref'] = tenant_ref return profile
-4,700,764,150,025,277,000
This function defines that it convert the persistent profile and return that profile :param vs: object of lb vs or pool :param name: name of application persteance profile :param tenant_ref: reference of tenant :return: application persistent profile
python/avi/migrationtools/netscaler_converter/ns_util.py
convert_persistance_prof
avinetworks/alb-sdk
python
def convert_persistance_prof(self, vs, name, tenant_ref): '\n This function defines that it convert the persistent profile and\n return that profile\n :param vs: object of lb vs or pool\n :param name: name of application persteance profile\n :param tenant_ref: reference of tenant\n :return: application persistent profile\n ' profile = None persistenceType = vs.get('persistenceType', ) if (persistenceType == 'COOKIEINSERT'): timeout = vs.get('timeout', 2) profile = {'http_cookie_persistence_profile': {'always_send_cookie': False}, 'persistence_type': 'PERSISTENCE_TYPE_HTTP_COOKIE', 'server_hm_down_recovery': 'HM_DOWN_PICK_NEW_SERVER', 'name': name} if (int(timeout) > 0): profile['http_cookie_persistence_profile']['timeout'] = timeout elif (persistenceType == 'SOURCEIP'): timeout = vs.get('timeout', 120) timeout = (int(timeout) / 60) if (timeout < 1): timeout = 1 profile = {'server_hm_down_recovery': 'HM_DOWN_PICK_NEW_SERVER', 'persistence_type': 'PERSISTENCE_TYPE_CLIENT_IP_ADDRESS', 'ip_persistence_profile': {'ip_persistent_timeout': timeout}, 'name': name} elif (persistenceType == 'SSLSESSION'): profile = {'server_hm_down_recovery': 'HM_DOWN_PICK_NEW_SERVER', 'persistence_type': 'PERSISTENCE_TYPE_TLS', 'name': name} profile['tenant_ref'] = tenant_ref return profile
def update_status_target_lb_vs_to_indirect(self, larget_lb_vs): '\n This function defines that update status for the target lb vserver as\n Indirect\n :param larget_lb_vs: name of target lb vserver\n :return: None\n ' global csv_writer_dict_list row = [row for row in csv_writer_dict_list if ((row['Object Name'] == larget_lb_vs) and (row['Netscaler Command'] == 'add lb vserver'))] if row: row[0]['Status'] = STATUS_INDIRECT
-5,964,540,455,396,743,000
This function defines that update status for the target lb vserver as Indirect :param larget_lb_vs: name of target lb vserver :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
update_status_target_lb_vs_to_indirect
avinetworks/alb-sdk
python
def update_status_target_lb_vs_to_indirect(self, larget_lb_vs): '\n This function defines that update status for the target lb vserver as\n Indirect\n :param larget_lb_vs: name of target lb vserver\n :return: None\n ' global csv_writer_dict_list row = [row for row in csv_writer_dict_list if ((row['Object Name'] == larget_lb_vs) and (row['Netscaler Command'] == 'add lb vserver'))] if row: row[0]['Status'] = STATUS_INDIRECT
def create_http_policy_set_for_redirect_url(self, vs_obj, redirect_uri, avi_config, tenant_name, tenant_ref, enable_ssl): '\n This function defines that create http policy for redirect url\n :param vs_obj: object of VS\n :param redirect_uri: redirect uri\n :param avi_config: dict of AVi\n :param tenant_name: name of tenant\n :param tenant_ref: tenant ref\n :param enable_ssl: flag for enabling ssl\n :return: None\n ' redirect_uri = str(redirect_uri).replace('"', '') action = self.build_redirect_action_dict(redirect_uri, enable_ssl) policy_obj = {'name': (vs_obj['name'] + '-redirect-policy'), 'tenant_ref': tenant_ref, 'http_request_policy': {'rules': [{'index': 0, 'name': (vs_obj['name'] + '-redirect-policy-rule-0'), 'match': {'path': {'match_case': 'INSENSITIVE', 'match_str': ['/'], 'match_criteria': 'EQUALS'}}, 'redirect_action': action}]}} updated_http_policy_ref = self.get_object_ref(policy_obj['name'], OBJECT_TYPE_HTTP_POLICY_SET, tenant_name) http_policies = {'index': 11, 'http_policy_set_ref': updated_http_policy_ref} if (not vs_obj.get('http_policies')): vs_obj['http_policies'] = [] else: ind = max([policies['index'] for policies in vs_obj['http_policies']]) http_policies['index'] = (ind + 1) vs_obj['http_policies'].append(http_policies) avi_config['HTTPPolicySet'].append(policy_obj)
2,394,880,107,976,594,400
This function defines that create http policy for redirect url :param vs_obj: object of VS :param redirect_uri: redirect uri :param avi_config: dict of AVi :param tenant_name: name of tenant :param tenant_ref: tenant ref :param enable_ssl: flag for enabling ssl :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
create_http_policy_set_for_redirect_url
avinetworks/alb-sdk
python
def create_http_policy_set_for_redirect_url(self, vs_obj, redirect_uri, avi_config, tenant_name, tenant_ref, enable_ssl): '\n This function defines that create http policy for redirect url\n :param vs_obj: object of VS\n :param redirect_uri: redirect uri\n :param avi_config: dict of AVi\n :param tenant_name: name of tenant\n :param tenant_ref: tenant ref\n :param enable_ssl: flag for enabling ssl\n :return: None\n ' redirect_uri = str(redirect_uri).replace('"', ) action = self.build_redirect_action_dict(redirect_uri, enable_ssl) policy_obj = {'name': (vs_obj['name'] + '-redirect-policy'), 'tenant_ref': tenant_ref, 'http_request_policy': {'rules': [{'index': 0, 'name': (vs_obj['name'] + '-redirect-policy-rule-0'), 'match': {'path': {'match_case': 'INSENSITIVE', 'match_str': ['/'], 'match_criteria': 'EQUALS'}}, 'redirect_action': action}]}} updated_http_policy_ref = self.get_object_ref(policy_obj['name'], OBJECT_TYPE_HTTP_POLICY_SET, tenant_name) http_policies = {'index': 11, 'http_policy_set_ref': updated_http_policy_ref} if (not vs_obj.get('http_policies')): vs_obj['http_policies'] = [] else: ind = max([policies['index'] for policies in vs_obj['http_policies']]) http_policies['index'] = (ind + 1) vs_obj['http_policies'].append(http_policies) avi_config['HTTPPolicySet'].append(policy_obj)
def clean_virtual_service_from_avi_config(self, avi_config, controller_version): '\n This function defines that clean up vs which has vip 0.0.0.0\n :param avi_config: dict of AVI\n :param controller_version:\n :return: None\n ' vs_list = copy.deepcopy(avi_config['VirtualService']) avi_config['VirtualService'] = [] if (parse_version(controller_version) >= parse_version('17.1')): avi_config['VirtualService'] = [vs for vs in vs_list if (vs['vsvip_ref'].split('name=')[1].split('-')[0] != '0.0.0.0')] else: avi_config['VirtualService'] = [vs for vs in vs_list if (vs['ip_address']['addr'] != '0.0.0.0')]
927,490,409,122,392,400
This function defines that clean up vs which has vip 0.0.0.0 :param avi_config: dict of AVI :param controller_version: :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
clean_virtual_service_from_avi_config
avinetworks/alb-sdk
python
def clean_virtual_service_from_avi_config(self, avi_config, controller_version): '\n This function defines that clean up vs which has vip 0.0.0.0\n :param avi_config: dict of AVI\n :param controller_version:\n :return: None\n ' vs_list = copy.deepcopy(avi_config['VirtualService']) avi_config['VirtualService'] = [] if (parse_version(controller_version) >= parse_version('17.1')): avi_config['VirtualService'] = [vs for vs in vs_list if (vs['vsvip_ref'].split('name=')[1].split('-')[0] != '0.0.0.0')] else: avi_config['VirtualService'] = [vs for vs in vs_list if (vs['ip_address']['addr'] != '0.0.0.0')]
def parse_url(self, url): '\n This method returns the parsed url\n :param url: url that need to be parsed\n :return:\n ' parsed = urlparse(url) return parsed
1,088,853,979,099,526,800
This method returns the parsed url :param url: url that need to be parsed :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
parse_url
avinetworks/alb-sdk
python
def parse_url(self, url): '\n This method returns the parsed url\n :param url: url that need to be parsed\n :return:\n ' parsed = urlparse(url) return parsed
def format_string_to_json(self, avi_string): '\n This function defines that it convert string into json format to\n convert into dict\n :param avi_string: string to be converted\n :return: Return converted string\n ' avi_string = avi_string.split('__/__')[0] return ast.literal_eval(avi_string)
-4,568,014,645,511,582,000
This function defines that it convert string into json format to convert into dict :param avi_string: string to be converted :return: Return converted string
python/avi/migrationtools/netscaler_converter/ns_util.py
format_string_to_json
avinetworks/alb-sdk
python
def format_string_to_json(self, avi_string): '\n This function defines that it convert string into json format to\n convert into dict\n :param avi_string: string to be converted\n :return: Return converted string\n ' avi_string = avi_string.split('__/__')[0] return ast.literal_eval(avi_string)
def get_csv_object_list(self, csv_writer_dict_list, command_list): '\n This method is used for getting csv object\n :param csv_writer_dict_list: CSV row of object from xlsx report\n :param command_list: List of netscaler commands\n :return: List of CSV rows\n ' csv_object = [row for row in csv_writer_dict_list if ((row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]) and (row['Netscaler Command'] in command_list))] return csv_object
3,593,132,901,122,004,500
This method is used for getting csv object :param csv_writer_dict_list: CSV row of object from xlsx report :param command_list: List of netscaler commands :return: List of CSV rows
python/avi/migrationtools/netscaler_converter/ns_util.py
get_csv_object_list
avinetworks/alb-sdk
python
def get_csv_object_list(self, csv_writer_dict_list, command_list): '\n This method is used for getting csv object\n :param csv_writer_dict_list: CSV row of object from xlsx report\n :param command_list: List of netscaler commands\n :return: List of CSV rows\n ' csv_object = [row for row in csv_writer_dict_list if ((row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]) and (row['Netscaler Command'] in command_list))] return csv_object
def get_csv_skipped_list(self, csv_object, name_of_object, vs_ref): '\n This method is used for getting skipped list from vs.\n :param csv_object: CSV row of object from xlsx report\n :param name_of_object: Name of Object\n :param vs_ref: Reference of VS\n :return: List of skipped settings\n ' skipped_list = [] for each_partial in csv_object: avi_object_json = self.format_string_to_json(each_partial['AVI Object']) if (avi_object_json.get('name') and (avi_object_json['name'] == name_of_object)): each_partial['VS Reference'] = vs_ref repls = (('[', ''), (']', '')) skipped_setting_csv = reduce((lambda a, kv: a.replace(*kv)), repls, each_partial['Skipped settings']) if skipped_setting_csv: skipped_list.append(skipped_setting_csv) return skipped_list
7,593,250,242,791,457,000
This method is used for getting skipped list from vs. :param csv_object: CSV row of object from xlsx report :param name_of_object: Name of Object :param vs_ref: Reference of VS :return: List of skipped settings
python/avi/migrationtools/netscaler_converter/ns_util.py
get_csv_skipped_list
avinetworks/alb-sdk
python
def get_csv_skipped_list(self, csv_object, name_of_object, vs_ref): '\n This method is used for getting skipped list from vs.\n :param csv_object: CSV row of object from xlsx report\n :param name_of_object: Name of Object\n :param vs_ref: Reference of VS\n :return: List of skipped settings\n ' skipped_list = [] for each_partial in csv_object: avi_object_json = self.format_string_to_json(each_partial['AVI Object']) if (avi_object_json.get('name') and (avi_object_json['name'] == name_of_object)): each_partial['VS Reference'] = vs_ref repls = (('[', ), (']', )) skipped_setting_csv = reduce((lambda a, kv: a.replace(*kv)), repls, each_partial['Skipped settings']) if skipped_setting_csv: skipped_list.append(skipped_setting_csv) return skipped_list
def get_ssl_key_and_cert_refs_skipped(self, csv_writer_dict_list, object_name, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list: CSV row of object from xlsx report\n :param object_name: like virtual service or pool name\n :param vs_ref: Reference of VS\n :return: List of skipped settings\n ' ssl_key_cert = self.get_name(object_name['ssl_key_and_certificate_refs'][0]) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['bind ssl vserver', 'bind ssl service', 'bind ssl serviceGroup']) skipped_list = self.get_csv_skipped_list(csv_object, ssl_key_cert, vs_ref) return (ssl_key_cert, skipped_list)
4,522,307,787,195,755,500
This functions defines that get the skipped list of CSV row :param csv_writer_dict_list: CSV row of object from xlsx report :param object_name: like virtual service or pool name :param vs_ref: Reference of VS :return: List of skipped settings
python/avi/migrationtools/netscaler_converter/ns_util.py
get_ssl_key_and_cert_refs_skipped
avinetworks/alb-sdk
python
def get_ssl_key_and_cert_refs_skipped(self, csv_writer_dict_list, object_name, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list: CSV row of object from xlsx report\n :param object_name: like virtual service or pool name\n :param vs_ref: Reference of VS\n :return: List of skipped settings\n ' ssl_key_cert = self.get_name(object_name['ssl_key_and_certificate_refs'][0]) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['bind ssl vserver', 'bind ssl service', 'bind ssl serviceGroup']) skipped_list = self.get_csv_skipped_list(csv_object, ssl_key_cert, vs_ref) return (ssl_key_cert, skipped_list)
def get_ssl_profile_skipped(self, csv_writer_dict_list, ssl_profile_ref, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list: CSV row of object from xlsx report\n :param ssl_profile_ref: reference of ssl profile object\n :param vs_ref: virtual service obj reference.\n :return: List of skipped settings\n ' ssl_profile_name = self.get_name(ssl_profile_ref) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['set ssl vserver', 'set ssl service', 'set ssl serviceGroup']) skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name, vs_ref) return (ssl_profile_name, skipped_list)
-7,045,245,419,789,641,000
This functions defines that get the skipped list of CSV row :param csv_writer_dict_list: CSV row of object from xlsx report :param ssl_profile_ref: reference of ssl profile object :param vs_ref: virtual service obj reference. :return: List of skipped settings
python/avi/migrationtools/netscaler_converter/ns_util.py
get_ssl_profile_skipped
avinetworks/alb-sdk
python
def get_ssl_profile_skipped(self, csv_writer_dict_list, ssl_profile_ref, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list: CSV row of object from xlsx report\n :param ssl_profile_ref: reference of ssl profile object\n :param vs_ref: virtual service obj reference.\n :return: List of skipped settings\n ' ssl_profile_name = self.get_name(ssl_profile_ref) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['set ssl vserver', 'set ssl service', 'set ssl serviceGroup']) skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name, vs_ref) return (ssl_profile_name, skipped_list)
def get_application_profile_skipped(self, csv_writer_dict_list, name_of_object, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list: CSV row of object from xlsx report\n :param name_of_object: object name like pool name, etc\n :param vs_ref: virtual service obj reference.\n :return: List of skipped settings\n ' ssl_profile_name = self.get_name(name_of_object['application_profile_ref']) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['add ns httpProfile']) skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name, vs_ref) return (ssl_profile_name, skipped_list)
-5,602,520,979,021,883,000
This functions defines that get the skipped list of CSV row :param csv_writer_dict_list: CSV row of object from xlsx report :param name_of_object: object name like pool name, etc :param vs_ref: virtual service obj reference. :return: List of skipped settings
python/avi/migrationtools/netscaler_converter/ns_util.py
get_application_profile_skipped
avinetworks/alb-sdk
python
def get_application_profile_skipped(self, csv_writer_dict_list, name_of_object, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list: CSV row of object from xlsx report\n :param name_of_object: object name like pool name, etc\n :param vs_ref: virtual service obj reference.\n :return: List of skipped settings\n ' ssl_profile_name = self.get_name(name_of_object['application_profile_ref']) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['add ns httpProfile']) skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name, vs_ref) return (ssl_profile_name, skipped_list)
def get_network_profile_skipped(self, csv_writer_dict_list, name_of_object, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list:List of add ns tcpProfile netscaler command rows\n :param name_of_object: object name like pool name, etc\n :param vs_ref: virtual service obj reference.\n :return: List of skipped settings\n ' ssl_profile_name = self.get_name(name_of_object['network_profile_ref']) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['add ns tcpProfile']) skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name, vs_ref) return (ssl_profile_name, skipped_list)
-5,462,502,248,008,268,000
This functions defines that get the skipped list of CSV row :param csv_writer_dict_list:List of add ns tcpProfile netscaler command rows :param name_of_object: object name like pool name, etc :param vs_ref: virtual service obj reference. :return: List of skipped settings
python/avi/migrationtools/netscaler_converter/ns_util.py
get_network_profile_skipped
avinetworks/alb-sdk
python
def get_network_profile_skipped(self, csv_writer_dict_list, name_of_object, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list:List of add ns tcpProfile netscaler command rows\n :param name_of_object: object name like pool name, etc\n :param vs_ref: virtual service obj reference.\n :return: List of skipped settings\n ' ssl_profile_name = self.get_name(name_of_object['network_profile_ref']) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['add ns tcpProfile']) skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name, vs_ref) return (ssl_profile_name, skipped_list)
def get_app_persistence_profile_skipped(self, csv_writer_dict_list, name_of_object, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list: List of set lb group netscaler command rows\n :param name_of_object: object name like pool name, etc\n :param vs_ref: virtual service obj reference.\n :return: List of skipped settings\n ' app_persistence_profile_name = self.get_name(name_of_object['ssl_profile_ref']) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['set lb group']) skipped_list = self.get_csv_skipped_list(csv_object, app_persistence_profile_name, vs_ref) return (app_persistence_profile_name, skipped_list)
-8,380,088,319,087,735,000
This functions defines that get the skipped list of CSV row :param csv_writer_dict_list: List of set lb group netscaler command rows :param name_of_object: object name like pool name, etc :param vs_ref: virtual service obj reference. :return: List of skipped settings
python/avi/migrationtools/netscaler_converter/ns_util.py
get_app_persistence_profile_skipped
avinetworks/alb-sdk
python
def get_app_persistence_profile_skipped(self, csv_writer_dict_list, name_of_object, vs_ref): '\n This functions defines that get the skipped list of CSV row\n :param csv_writer_dict_list: List of set lb group netscaler command rows\n :param name_of_object: object name like pool name, etc\n :param vs_ref: virtual service obj reference.\n :return: List of skipped settings\n ' app_persistence_profile_name = self.get_name(name_of_object['ssl_profile_ref']) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['set lb group']) skipped_list = self.get_csv_skipped_list(csv_object, app_persistence_profile_name, vs_ref) return (app_persistence_profile_name, skipped_list)
def get_pool_skipped_list(self, avi_config, pool_group_name, skipped_setting, csv_object, obj_name, csv_writer_dict_list, vs_ref): '\n This method is used for getting pool skipped list.\n :param avi_config: AVI dict\n :param pool_group_name: Name of Pool group\n :param skipped_setting: List of skipped settings\n :param csv_object: CSV row\n :param obj_name: Name of Object\n :param csv_writer_dict_list: List of bind lb vserver netscaler command\n rows\n :param vs_ref: vs object reference\n :return: List of skipped settings\n ' pool_group_object_ref = [pool_group_object_ref for pool_group_object_ref in avi_config['PoolGroup'] if (pool_group_object_ref['name'] == pool_group_name)] for pool_group in pool_group_object_ref: if ('members' in pool_group): for each_pool_ref in pool_group['members']: pool_name = self.get_name(each_pool_ref['pool_ref']) skipped_list = self.get_csv_skipped_list(csv_object, pool_name, vs_ref) if (len(skipped_list) > 0): skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['pool_skipped_list'] = skipped_list for pool_partial in csv_object: avi_object_json = self.format_string_to_json(pool_partial['AVI Object']) if (avi_object_json['name'] == pool_name): if (('health_monitor_refs' in avi_object_json) and avi_object_json['health_monitor_refs']): monitor_refs = avi_object_json['health_monitor_refs'] for monitor_ref in monitor_refs: monitor_ref = self.get_name(monitor_ref) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['add lb monitor']) skipped_list = self.get_csv_skipped_list(csv_object, monitor_ref, vs_ref) if skipped_list: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['health monitor'] = {} skipped_setting[obj_name]['pool']['health monitor']['name'] = monitor_ref skipped_setting[obj_name]['pool']['health monitor']['skipped_list'] = skipped_list if ('ssl_key_and_certificate_refs' in avi_object_json): (name, skipped) = self.get_ssl_key_and_cert_refs_skipped(csv_writer_dict_list, avi_object_json, vs_ref) if skipped: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['ssl key and cert'] = {} skipped_setting[obj_name]['pool']['ssl key and cert']['name'] = name skipped_setting[obj_name]['pool']['ssl key and cert']['skipped_list'] = skipped if ('ssl_profile_ref' in avi_object_json): (name, skipped) = self.get_ssl_profile_skipped(csv_writer_dict_list, avi_object_json['ssl_profile_ref'], vs_ref) if skipped: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['ssl profile'] = {} skipped_setting[obj_name]['pool']['ssl profile']['name'] = name skipped_setting[obj_name]['pool']['ssl profile']['skipped_list'] = skipped if ('application_persistence_profile_ref' in avi_object_json): (name, skipped) = self.get_app_persistence_profile_skipped(csv_writer_dict_list, avi_object_json, vs_ref) if skipped: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['Application Persistence profile'] = {} skipped_setting[obj_name]['pool']['Application Persistence profile']['name'] = name skipped_setting[obj_name]['pool']['Application Persistence profile']['skipped_list'] = skipped if ('application_persistence_profile_ref' in avi_object_json): (name, skipped) = self.get_app_persistence_profile_skipped(csv_writer_dict_list, avi_object_json, vs_ref) if skipped: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['Application Persistence profile'] = {} skipped_setting[obj_name]['pool']['Application Persistence profile']['name'] = name skipped_setting[obj_name]['pool']['Application Persistence profile']['skipped_list'] = skipped
8,853,895,342,330,683,000
This method is used for getting pool skipped list. :param avi_config: AVI dict :param pool_group_name: Name of Pool group :param skipped_setting: List of skipped settings :param csv_object: CSV row :param obj_name: Name of Object :param csv_writer_dict_list: List of bind lb vserver netscaler command rows :param vs_ref: vs object reference :return: List of skipped settings
python/avi/migrationtools/netscaler_converter/ns_util.py
get_pool_skipped_list
avinetworks/alb-sdk
python
def get_pool_skipped_list(self, avi_config, pool_group_name, skipped_setting, csv_object, obj_name, csv_writer_dict_list, vs_ref): '\n This method is used for getting pool skipped list.\n :param avi_config: AVI dict\n :param pool_group_name: Name of Pool group\n :param skipped_setting: List of skipped settings\n :param csv_object: CSV row\n :param obj_name: Name of Object\n :param csv_writer_dict_list: List of bind lb vserver netscaler command\n rows\n :param vs_ref: vs object reference\n :return: List of skipped settings\n ' pool_group_object_ref = [pool_group_object_ref for pool_group_object_ref in avi_config['PoolGroup'] if (pool_group_object_ref['name'] == pool_group_name)] for pool_group in pool_group_object_ref: if ('members' in pool_group): for each_pool_ref in pool_group['members']: pool_name = self.get_name(each_pool_ref['pool_ref']) skipped_list = self.get_csv_skipped_list(csv_object, pool_name, vs_ref) if (len(skipped_list) > 0): skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['pool_skipped_list'] = skipped_list for pool_partial in csv_object: avi_object_json = self.format_string_to_json(pool_partial['AVI Object']) if (avi_object_json['name'] == pool_name): if (('health_monitor_refs' in avi_object_json) and avi_object_json['health_monitor_refs']): monitor_refs = avi_object_json['health_monitor_refs'] for monitor_ref in monitor_refs: monitor_ref = self.get_name(monitor_ref) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['add lb monitor']) skipped_list = self.get_csv_skipped_list(csv_object, monitor_ref, vs_ref) if skipped_list: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['health monitor'] = {} skipped_setting[obj_name]['pool']['health monitor']['name'] = monitor_ref skipped_setting[obj_name]['pool']['health monitor']['skipped_list'] = skipped_list if ('ssl_key_and_certificate_refs' in avi_object_json): (name, skipped) = self.get_ssl_key_and_cert_refs_skipped(csv_writer_dict_list, avi_object_json, vs_ref) if skipped: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['ssl key and cert'] = {} skipped_setting[obj_name]['pool']['ssl key and cert']['name'] = name skipped_setting[obj_name]['pool']['ssl key and cert']['skipped_list'] = skipped if ('ssl_profile_ref' in avi_object_json): (name, skipped) = self.get_ssl_profile_skipped(csv_writer_dict_list, avi_object_json['ssl_profile_ref'], vs_ref) if skipped: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['ssl profile'] = {} skipped_setting[obj_name]['pool']['ssl profile']['name'] = name skipped_setting[obj_name]['pool']['ssl profile']['skipped_list'] = skipped if ('application_persistence_profile_ref' in avi_object_json): (name, skipped) = self.get_app_persistence_profile_skipped(csv_writer_dict_list, avi_object_json, vs_ref) if skipped: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['Application Persistence profile'] = {} skipped_setting[obj_name]['pool']['Application Persistence profile']['name'] = name skipped_setting[obj_name]['pool']['Application Persistence profile']['skipped_list'] = skipped if ('application_persistence_profile_ref' in avi_object_json): (name, skipped) = self.get_app_persistence_profile_skipped(csv_writer_dict_list, avi_object_json, vs_ref) if skipped: skipped_setting[obj_name] = {} skipped_setting[obj_name]['pool'] = {} skipped_setting[obj_name]['pool']['pool_name'] = pool_name skipped_setting[obj_name]['pool']['Application Persistence profile'] = {} skipped_setting[obj_name]['pool']['Application Persistence profile']['name'] = name skipped_setting[obj_name]['pool']['Application Persistence profile']['skipped_list'] = skipped
def vs_complexity_level(self): '\n This method calculate complexity of vs.\n :return:\n ' vs_csv_objects = [row for row in csv_writer_dict_list if ((row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]) and (row['Netscaler Command'] in ['add cs vserver', 'add lb vserver']))] for vs_csv_object in vs_csv_objects: virtual_service = self.format_string_to_json(vs_csv_object['AVI Object']) self.update_vs_complexity_level(vs_csv_object, virtual_service)
-5,497,695,561,299,338,000
This method calculate complexity of vs. :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
vs_complexity_level
avinetworks/alb-sdk
python
def vs_complexity_level(self): '\n This method calculate complexity of vs.\n :return:\n ' vs_csv_objects = [row for row in csv_writer_dict_list if ((row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]) and (row['Netscaler Command'] in ['add cs vserver', 'add lb vserver']))] for vs_csv_object in vs_csv_objects: virtual_service = self.format_string_to_json(vs_csv_object['AVI Object']) self.update_vs_complexity_level(vs_csv_object, virtual_service)
def vs_per_skipped_setting_for_references(self, avi_config): '\n This functions defines that Add the skipped setting per VS CSV row\n :param avi_config: this methode use avi_config for checking vs skipped\n :return: None\n ' global fully_migrated global total_count global progressbar_count fully_migrated = 0 vs_csv_objects = [row for row in csv_writer_dict_list if ((row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]) and (row['Netscaler Command'] in ['add cs vserver', 'add lb vserver']))] total_count = (total_count + len(vs_csv_objects)) for vs_csv_object in vs_csv_objects: progressbar_count += 1 skipped_setting = {} virtual_service = self.format_string_to_json(vs_csv_object['AVI Object']) self.update_vs_complexity_level(vs_csv_object, virtual_service) vs_ref = virtual_service['name'] repls = (('[', ''), (']', '')) skipped_setting_csv = reduce((lambda a, kv: a.replace(*kv)), repls, vs_csv_object['Skipped settings']) if skipped_setting_csv: skipped_setting['virtual_service'] = [skipped_setting_csv] if ('ssl_key_and_certificate_refs' in virtual_service): (name, skipped) = self.get_ssl_key_and_cert_refs_skipped(csv_writer_dict_list, virtual_service, vs_ref) if skipped: skipped_setting['ssl key and cert'] = {} skipped_setting['ssl key and cert']['name'] = name skipped_setting['ssl key and cert']['skipped_list'] = skipped if ('ssl_profile_ref' in virtual_service): (name, skipped) = self.get_ssl_profile_skipped(csv_writer_dict_list, virtual_service['ssl_profile_ref'], vs_ref) if skipped: skipped_setting['ssl profile'] = {} skipped_setting['ssl profile']['name'] = name skipped_setting['ssl profile']['skipped_list'] = skipped if ('pool_group_ref' in virtual_service): pool_group_name = self.get_name(virtual_service['pool_group_ref']) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['bind lb vserver']) self.get_pool_skipped_list(avi_config, pool_group_name, skipped_setting, csv_object, 'pool group', csv_writer_dict_list, vs_ref) if ('http_policies' in virtual_service): csv_object = self.get_csv_object_list(csv_writer_dict_list, ['add cs policy', 'add responder policy', 'add rewrite policy']) for http_ref in virtual_service['http_policies']: http_name = self.get_name(http_ref['http_policy_set_ref']) skipped_list = self.get_csv_skipped_list(csv_object, http_name, vs_ref) if skipped_list: skipped_setting['Httppolicy'] = {} skipped_setting['Httppolicy']['name'] = http_name skipped_setting['Httppolicy']['skipped_list'] = skipped_list for each_http_policy in avi_config['HTTPPolicySet']: if (each_http_policy['name'] == http_name): for http_req in each_http_policy['http_request_policy']['rules']: if (http_req.get('switching_action', None) and http_req['switching_action'].get('pool_group_ref', None)): pool_group_name = self.get_name(http_req['switching_action']['pool_group_ref']) self.get_pool_skipped_list(avi_config, pool_group_name, skipped_setting, csv_object, 'Httppolicy', csv_writer_dict_list, vs_ref) if (('application_profile_ref' in virtual_service) and ('admin:System' not in virtual_service['application_profile_ref'])): (name, skipped) = self.get_application_profile_skipped(csv_writer_dict_list, virtual_service, vs_ref) if skipped: skipped_setting['Application profile'] = {} skipped_setting['Application profile']['name'] = name skipped_setting['Application profile']['skipped_list'] = skipped if (('network_profile_ref' in virtual_service) and ('admin:System' not in virtual_service['network_profile_ref'])): (name, skipped) = self.get_network_profile_skipped(csv_writer_dict_list, virtual_service, vs_ref) if skipped: skipped_setting['Network profile'] = {} skipped_setting['Network profile']['name'] = name skipped_setting['Network profile']['skipped_list'] = skipped if skipped_setting: vs_csv_object.update({'Overall skipped settings': str(skipped_setting)}) else: vs_csv_object.update({'Overall skipped settings': 'FULLY MIGRATION'}) fully_migrated += 1 msg = 'Writing excel sheet started...' self.print_progress_bar(progressbar_count, total_count, msg, prefix='Progress', suffix='') csv_objects = [row for row in csv_writer_dict_list if ((row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]) and (row['Netscaler Command'] not in ['add cs vserver', 'add lb vserver']) and (('VS Reference' not in row) or (not row['VS Reference'])))] for csv_object in csv_objects: csv_object['VS Reference'] = STATUS_NOT_IN_USE
-5,424,762,615,369,725,000
This functions defines that Add the skipped setting per VS CSV row :param avi_config: this methode use avi_config for checking vs skipped :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
vs_per_skipped_setting_for_references
avinetworks/alb-sdk
python
def vs_per_skipped_setting_for_references(self, avi_config): '\n This functions defines that Add the skipped setting per VS CSV row\n :param avi_config: this methode use avi_config for checking vs skipped\n :return: None\n ' global fully_migrated global total_count global progressbar_count fully_migrated = 0 vs_csv_objects = [row for row in csv_writer_dict_list if ((row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]) and (row['Netscaler Command'] in ['add cs vserver', 'add lb vserver']))] total_count = (total_count + len(vs_csv_objects)) for vs_csv_object in vs_csv_objects: progressbar_count += 1 skipped_setting = {} virtual_service = self.format_string_to_json(vs_csv_object['AVI Object']) self.update_vs_complexity_level(vs_csv_object, virtual_service) vs_ref = virtual_service['name'] repls = (('[', ), (']', )) skipped_setting_csv = reduce((lambda a, kv: a.replace(*kv)), repls, vs_csv_object['Skipped settings']) if skipped_setting_csv: skipped_setting['virtual_service'] = [skipped_setting_csv] if ('ssl_key_and_certificate_refs' in virtual_service): (name, skipped) = self.get_ssl_key_and_cert_refs_skipped(csv_writer_dict_list, virtual_service, vs_ref) if skipped: skipped_setting['ssl key and cert'] = {} skipped_setting['ssl key and cert']['name'] = name skipped_setting['ssl key and cert']['skipped_list'] = skipped if ('ssl_profile_ref' in virtual_service): (name, skipped) = self.get_ssl_profile_skipped(csv_writer_dict_list, virtual_service['ssl_profile_ref'], vs_ref) if skipped: skipped_setting['ssl profile'] = {} skipped_setting['ssl profile']['name'] = name skipped_setting['ssl profile']['skipped_list'] = skipped if ('pool_group_ref' in virtual_service): pool_group_name = self.get_name(virtual_service['pool_group_ref']) csv_object = self.get_csv_object_list(csv_writer_dict_list, ['bind lb vserver']) self.get_pool_skipped_list(avi_config, pool_group_name, skipped_setting, csv_object, 'pool group', csv_writer_dict_list, vs_ref) if ('http_policies' in virtual_service): csv_object = self.get_csv_object_list(csv_writer_dict_list, ['add cs policy', 'add responder policy', 'add rewrite policy']) for http_ref in virtual_service['http_policies']: http_name = self.get_name(http_ref['http_policy_set_ref']) skipped_list = self.get_csv_skipped_list(csv_object, http_name, vs_ref) if skipped_list: skipped_setting['Httppolicy'] = {} skipped_setting['Httppolicy']['name'] = http_name skipped_setting['Httppolicy']['skipped_list'] = skipped_list for each_http_policy in avi_config['HTTPPolicySet']: if (each_http_policy['name'] == http_name): for http_req in each_http_policy['http_request_policy']['rules']: if (http_req.get('switching_action', None) and http_req['switching_action'].get('pool_group_ref', None)): pool_group_name = self.get_name(http_req['switching_action']['pool_group_ref']) self.get_pool_skipped_list(avi_config, pool_group_name, skipped_setting, csv_object, 'Httppolicy', csv_writer_dict_list, vs_ref) if (('application_profile_ref' in virtual_service) and ('admin:System' not in virtual_service['application_profile_ref'])): (name, skipped) = self.get_application_profile_skipped(csv_writer_dict_list, virtual_service, vs_ref) if skipped: skipped_setting['Application profile'] = {} skipped_setting['Application profile']['name'] = name skipped_setting['Application profile']['skipped_list'] = skipped if (('network_profile_ref' in virtual_service) and ('admin:System' not in virtual_service['network_profile_ref'])): (name, skipped) = self.get_network_profile_skipped(csv_writer_dict_list, virtual_service, vs_ref) if skipped: skipped_setting['Network profile'] = {} skipped_setting['Network profile']['name'] = name skipped_setting['Network profile']['skipped_list'] = skipped if skipped_setting: vs_csv_object.update({'Overall skipped settings': str(skipped_setting)}) else: vs_csv_object.update({'Overall skipped settings': 'FULLY MIGRATION'}) fully_migrated += 1 msg = 'Writing excel sheet started...' self.print_progress_bar(progressbar_count, total_count, msg, prefix='Progress', suffix=) csv_objects = [row for row in csv_writer_dict_list if ((row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]) and (row['Netscaler Command'] not in ['add cs vserver', 'add lb vserver']) and (('VS Reference' not in row) or (not row['VS Reference'])))] for csv_object in csv_objects: csv_object['VS Reference'] = STATUS_NOT_IN_USE
def write_status_report_and_pivot_table_in_xlsx(self, row_list, output_dir, report_name, vs_level_status): '\n This method writes the status and make pivot table in excel sheet\n :param row_list:\n :param output_dir:\n :param report_name:\n :param vs_level_status:\n :return:\n ' global total_count global progressbar_count if vs_level_status: fieldnames = ['Line Number', 'Netscaler Command', 'Object Name', 'Full Command', 'Status', 'Skipped settings', 'Indirect mapping', 'Not Applicable', 'User Ignored', 'Overall skipped settings', 'Complexity Level', 'VS Reference', 'AVI Object'] else: fieldnames = ['Line Number', 'Netscaler Command', 'Object Name', 'Full Command', 'Status', 'Skipped settings', 'Indirect mapping', 'Not Applicable', 'User Ignored', 'Complexity Level', 'AVI Object'] xlsx_report = ((output_dir + os.path.sep) + ('%s-ConversionStatus.xlsx' % report_name)) status_wb = Workbook(xlsx_report) status_ws = status_wb.add_worksheet('Status Sheet') status_ws.freeze_panes(1, 0) first_row = 0 for header in fieldnames: col = fieldnames.index(header) status_ws.write(first_row, col, header) row = 1 for row_data in row_list: progressbar_count += 1 for (_key, _value) in row_data.items(): if (_key in fieldnames): col = fieldnames.index(_key) status_ws.write(row, col, _value) msg = 'Writing excel sheet started...' self.print_progress_bar(progressbar_count, total_count, msg, prefix='Progress', suffix='') row += 1 status_wb.close() df = pandas.DataFrame(row_list, columns=fieldnames) pivot_table = pandas.pivot_table(df, index=['Status', 'Netscaler Command'], values=[], aggfunc=[len], fill_value=0) pivot_df = pandas.DataFrame(pivot_table) master_book = load_workbook(xlsx_report) master_writer = pandas.ExcelWriter(xlsx_report, engine='openpyxl') master_writer.book = master_book pivot_df.to_excel(master_writer, 'Pivot Sheet') master_writer.save()
-2,934,899,319,914,298,000
This method writes the status and make pivot table in excel sheet :param row_list: :param output_dir: :param report_name: :param vs_level_status: :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
write_status_report_and_pivot_table_in_xlsx
avinetworks/alb-sdk
python
def write_status_report_and_pivot_table_in_xlsx(self, row_list, output_dir, report_name, vs_level_status): '\n This method writes the status and make pivot table in excel sheet\n :param row_list:\n :param output_dir:\n :param report_name:\n :param vs_level_status:\n :return:\n ' global total_count global progressbar_count if vs_level_status: fieldnames = ['Line Number', 'Netscaler Command', 'Object Name', 'Full Command', 'Status', 'Skipped settings', 'Indirect mapping', 'Not Applicable', 'User Ignored', 'Overall skipped settings', 'Complexity Level', 'VS Reference', 'AVI Object'] else: fieldnames = ['Line Number', 'Netscaler Command', 'Object Name', 'Full Command', 'Status', 'Skipped settings', 'Indirect mapping', 'Not Applicable', 'User Ignored', 'Complexity Level', 'AVI Object'] xlsx_report = ((output_dir + os.path.sep) + ('%s-ConversionStatus.xlsx' % report_name)) status_wb = Workbook(xlsx_report) status_ws = status_wb.add_worksheet('Status Sheet') status_ws.freeze_panes(1, 0) first_row = 0 for header in fieldnames: col = fieldnames.index(header) status_ws.write(first_row, col, header) row = 1 for row_data in row_list: progressbar_count += 1 for (_key, _value) in row_data.items(): if (_key in fieldnames): col = fieldnames.index(_key) status_ws.write(row, col, _value) msg = 'Writing excel sheet started...' self.print_progress_bar(progressbar_count, total_count, msg, prefix='Progress', suffix=) row += 1 status_wb.close() df = pandas.DataFrame(row_list, columns=fieldnames) pivot_table = pandas.pivot_table(df, index=['Status', 'Netscaler Command'], values=[], aggfunc=[len], fill_value=0) pivot_df = pandas.DataFrame(pivot_table) master_book = load_workbook(xlsx_report) master_writer = pandas.ExcelWriter(xlsx_report, engine='openpyxl') master_writer.book = master_book pivot_df.to_excel(master_writer, 'Pivot Sheet') master_writer.save()
def update_skip_duplicates(self, obj, obj_list, obj_type, merge_object_mapping, name, ent_type, prefix, syslist): '\n This method merge duplicate objects\n :param obj: Source object to find duplicates for\n :param obj_list: List of object to search duplicates in\n :param obj_type: Type of object to add in converted_objs status\n :param converted_objs: Converted avi object or merged object name\n :param name: Name of the object\n :param default_profile_name : Name of root parent default profile\n :return:\n ' dup_of = None merge_object_mapping[obj_type].update({name: name}) (dup_of, old_name) = self.check_for_duplicates(obj, obj_list, obj_type, merge_object_mapping, ent_type, prefix, syslist) if dup_of: LOG.info(('Duplicate profiles: %s merged in %s' % (obj['name'], dup_of))) if (old_name in merge_object_mapping[obj_type].keys()): merge_object_mapping[obj_type].update({old_name: dup_of}) merge_object_mapping[obj_type].update({name: dup_of}) return True return False
642,166,565,262,819,300
This method merge duplicate objects :param obj: Source object to find duplicates for :param obj_list: List of object to search duplicates in :param obj_type: Type of object to add in converted_objs status :param converted_objs: Converted avi object or merged object name :param name: Name of the object :param default_profile_name : Name of root parent default profile :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
update_skip_duplicates
avinetworks/alb-sdk
python
def update_skip_duplicates(self, obj, obj_list, obj_type, merge_object_mapping, name, ent_type, prefix, syslist): '\n This method merge duplicate objects\n :param obj: Source object to find duplicates for\n :param obj_list: List of object to search duplicates in\n :param obj_type: Type of object to add in converted_objs status\n :param converted_objs: Converted avi object or merged object name\n :param name: Name of the object\n :param default_profile_name : Name of root parent default profile\n :return:\n ' dup_of = None merge_object_mapping[obj_type].update({name: name}) (dup_of, old_name) = self.check_for_duplicates(obj, obj_list, obj_type, merge_object_mapping, ent_type, prefix, syslist) if dup_of: LOG.info(('Duplicate profiles: %s merged in %s' % (obj['name'], dup_of))) if (old_name in merge_object_mapping[obj_type].keys()): merge_object_mapping[obj_type].update({old_name: dup_of}) merge_object_mapping[obj_type].update({name: dup_of}) return True return False
def create_update_vsvip(self, vip, vsvip_config, tenant_ref, cloud_ref, prefix=None, vrf_ref=None): '\n This functions defines that create or update VSVIP object.\n :param vip: vip of VS\n :param vsvip_config: List of vs object\n :param tenant_ref: tenant reference\n :param cloud_ref: cloud reference\n :param prefix: prefix for objects\n :param vrf_ref: VRF ref to be added in VIP object\n :return: None\n ' name = (vip + '-vsvip') if prefix: name = ((prefix + '-') + name) vsvip = [vip_obj for vip_obj in vsvip_config if (vip_obj['name'] == name)] if vsvip: diff_ten = [vips for vips in vsvip if (vips['tenant_ref'] != tenant_ref)] if diff_ten: LOG.debug('VsVip %s is repeated with vrf %s but different tenant %s', name, (self.get_name(vrf_ref) if vrf_ref else 'None'), self.get_name(tenant_ref)) name = '' else: vsvip_object = {'name': name, 'tenant_ref': tenant_ref, 'cloud_ref': cloud_ref, 'vip': [{'vip_id': '0', 'ip_address': {'type': 'V4', 'addr': vip}}]} if vrf_ref: vsvip_object['vrf_context_ref'] = vrf_ref vsvip_config.append(vsvip_object)
-3,150,883,588,053,014,500
This functions defines that create or update VSVIP object. :param vip: vip of VS :param vsvip_config: List of vs object :param tenant_ref: tenant reference :param cloud_ref: cloud reference :param prefix: prefix for objects :param vrf_ref: VRF ref to be added in VIP object :return: None
python/avi/migrationtools/netscaler_converter/ns_util.py
create_update_vsvip
avinetworks/alb-sdk
python
def create_update_vsvip(self, vip, vsvip_config, tenant_ref, cloud_ref, prefix=None, vrf_ref=None): '\n This functions defines that create or update VSVIP object.\n :param vip: vip of VS\n :param vsvip_config: List of vs object\n :param tenant_ref: tenant reference\n :param cloud_ref: cloud reference\n :param prefix: prefix for objects\n :param vrf_ref: VRF ref to be added in VIP object\n :return: None\n ' name = (vip + '-vsvip') if prefix: name = ((prefix + '-') + name) vsvip = [vip_obj for vip_obj in vsvip_config if (vip_obj['name'] == name)] if vsvip: diff_ten = [vips for vips in vsvip if (vips['tenant_ref'] != tenant_ref)] if diff_ten: LOG.debug('VsVip %s is repeated with vrf %s but different tenant %s', name, (self.get_name(vrf_ref) if vrf_ref else 'None'), self.get_name(tenant_ref)) name = else: vsvip_object = {'name': name, 'tenant_ref': tenant_ref, 'cloud_ref': cloud_ref, 'vip': [{'vip_id': '0', 'ip_address': {'type': 'V4', 'addr': vip}}]} if vrf_ref: vsvip_object['vrf_context_ref'] = vrf_ref vsvip_config.append(vsvip_object)
def get_redirect_fail_action(self, url): '\n This method returns the fail action dict\n :param url: url\n :return:\n ' parsed = urlparse(url) redirect_fail_action = {'fail_action': {'redirect': {'host': parsed.hostname, 'protocol': str(parsed.scheme).upper(), 'status_code': 'HTTP_REDIRECT_STATUS_CODE_302'}, 'type': 'FAIL_ACTION_HTTP_REDIRECT'}} if parsed.path: redirect_fail_action['fail_action']['redirect']['path'] = str(parsed.path).replace('"', '') if parsed.query: redirect_fail_action['fail_action']['redirect']['query'] = parsed.query return redirect_fail_action
5,927,314,944,786,889,000
This method returns the fail action dict :param url: url :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
get_redirect_fail_action
avinetworks/alb-sdk
python
def get_redirect_fail_action(self, url): '\n This method returns the fail action dict\n :param url: url\n :return:\n ' parsed = urlparse(url) redirect_fail_action = {'fail_action': {'redirect': {'host': parsed.hostname, 'protocol': str(parsed.scheme).upper(), 'status_code': 'HTTP_REDIRECT_STATUS_CODE_302'}, 'type': 'FAIL_ACTION_HTTP_REDIRECT'}} if parsed.path: redirect_fail_action['fail_action']['redirect']['path'] = str(parsed.path).replace('"', ) if parsed.query: redirect_fail_action['fail_action']['redirect']['query'] = parsed.query return redirect_fail_action
def cleanup_dupof(self, avi_config): '\n This method is used to clean up dup_of key from different AVI objects\n :param avi_config:\n :return:\n ' self.remove_dup_key(avi_config['ApplicationProfile']) self.remove_dup_key(avi_config['NetworkProfile']) self.remove_dup_key(avi_config['SSLProfile']) self.remove_dup_key(avi_config['PKIProfile']) self.remove_dup_key(avi_config['ApplicationPersistenceProfile']) self.remove_dup_key(avi_config['HealthMonitor'])
6,673,110,791,954,695,000
This method is used to clean up dup_of key from different AVI objects :param avi_config: :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
cleanup_dupof
avinetworks/alb-sdk
python
def cleanup_dupof(self, avi_config): '\n This method is used to clean up dup_of key from different AVI objects\n :param avi_config:\n :return:\n ' self.remove_dup_key(avi_config['ApplicationProfile']) self.remove_dup_key(avi_config['NetworkProfile']) self.remove_dup_key(avi_config['SSLProfile']) self.remove_dup_key(avi_config['PKIProfile']) self.remove_dup_key(avi_config['ApplicationPersistenceProfile']) self.remove_dup_key(avi_config['HealthMonitor'])
def update_profile_ref(self, ref, avi_obj, merge_obj_list): '\n This method is used to update the profile references which was\n attached at the time of creation\n :param ref:\n :param avi_obj:\n :param merge_obj_list:\n :return:\n ' for obj in avi_obj: obj_ref = obj.get(ref) tenant_ref = obj.get('tenant_ref') if obj_ref: name = self.get_name(obj_ref) tenant = self.get_name(tenant_ref) if (name in merge_obj_list): updated_name = merge_obj_list[name] if (ref == 'application_persistence_profile_ref'): type_cons = OBJECT_TYPE_APPLICATION_PERSISTENCE_PROFILE if (ref == 'application_profile_ref'): type_cons = OBJECT_TYPE_APPLICATION_PROFILE obj[ref] = self.get_object_ref(updated_name, type_cons, tenant)
7,339,447,582,846,898,000
This method is used to update the profile references which was attached at the time of creation :param ref: :param avi_obj: :param merge_obj_list: :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
update_profile_ref
avinetworks/alb-sdk
python
def update_profile_ref(self, ref, avi_obj, merge_obj_list): '\n This method is used to update the profile references which was\n attached at the time of creation\n :param ref:\n :param avi_obj:\n :param merge_obj_list:\n :return:\n ' for obj in avi_obj: obj_ref = obj.get(ref) tenant_ref = obj.get('tenant_ref') if obj_ref: name = self.get_name(obj_ref) tenant = self.get_name(tenant_ref) if (name in merge_obj_list): updated_name = merge_obj_list[name] if (ref == 'application_persistence_profile_ref'): type_cons = OBJECT_TYPE_APPLICATION_PERSISTENCE_PROFILE if (ref == 'application_profile_ref'): type_cons = OBJECT_TYPE_APPLICATION_PROFILE obj[ref] = self.get_object_ref(updated_name, type_cons, tenant)
def vs_redirect_http_to_https(self, avi_config, sysdict): '\n Removes the VS which is redirected to another VS amd update the\n status and avi object for that VS\n :param avi_config: avi configuration after all conversion\n :param sysdict: system configuration\n :return:\n ' vsrem = {} LOG.debug('Check started for redirect from HTTP VS to HTTPS VS with no pool') for vs in avi_config['VirtualService']: if ((not vs.get('pool_group_ref')) and (not vs.get('application_profile_ref')) and vs.get('services', []) and (not all([s.get('enable_ssl', True) for s in vs['services']])) and vs.get('http_policies', []) and vs['http_policies'][0].get('http_policy_set_ref')): polname = self.get_name(vs['http_policies'][0]['http_policy_set_ref']) pol = [pl for pl in avi_config['HTTPPolicySet'] if (pl['name'] == polname)] if (pol and pol[0].get('http_request_policy', {}).get('rules', []) and pol[0]['http_request_policy']['rules'][0].get('redirect_action')): iplist = ([ip['ip_address']['addr'] for ip in vs.get('vip', []) if ip.get('ip_address', {}).get('addr')] or ([vs['ip_address']['addr']] if vs.get('ip_address', {}).get('addr') else [])) if iplist: for nvs in avi_config['VirtualService']: if ((vs['name'] != nvs['name']) and [ip for ip in iplist if (ip in ([nip['ip_address']['addr'] for nip in nvs.get('vip', []) if nip.get('ip_address', {}).get('addr')] or [(nvs['ip_address']['addr'] if nvs.get('ip_address', {}).get('addr') else [])]))]): appname = (self.get_name(nvs['application_profile_ref']) if nvs.get('application_profile_ref') else None) if (appname == 'ns-migrate-http'): LOG.debug(('%s has redirect to %s, hence removing %s' % (vs['name'], nvs['name'], vs['name']))) vsrem[vs['name']] = nvs['name'] appprof = [pr for pr in (avi_config['ApplicationProfile'] + sysdict['ApplicationProfile']) if (pr['name'] == appname)] if (appprof and (appprof[0]['type'] == 'APPLICATION_PROFILE_TYPE_HTTP')): if appprof[0].get('http_profile'): appprof[0]['http_profile']['http_to_https'] = True else: appprof[0]['http_profile'] = {'http_to_https': True} LOG.debug(("%s has redirect to %s, hence setting 'http_to_https' as true and removing %s" % (vs['name'], nvs['name'], vs['name']))) vsrem[vs['name']] = nvs['name'] if ([True for ssl in nvs['services'] if ssl['enable_ssl']] and [True for ssl_vs in vs['services'] if (not ssl_vs['enable_ssl'])]): nvs['services'].append(vs['services'][0]) vsrem[vs['name']] = nvs['name'] LOG.debug('Check completed for redirect from HTTP VS to HTTPS VS with no pool') if vsrem: avi_config['VirtualService'] = [v for v in avi_config['VirtualService'] if (v['name'] not in vsrem.keys())] LOG.debug(('%s VS got removed from AVI configuration' % str(len(vsrem)))) for cl in csv_writer_dict_list: if ((cl['Object Name'] in vsrem.keys()) and (cl['Netscaler Command'] in ['add lb vserver', 'add cs vserver'])): cl['Status'] = STATUS_INDIRECT cl['AVI Object'] = ('Redirected to %s' % vsrem[cl['Object Name']])
-1,752,131,683,458,171,000
Removes the VS which is redirected to another VS amd update the status and avi object for that VS :param avi_config: avi configuration after all conversion :param sysdict: system configuration :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
vs_redirect_http_to_https
avinetworks/alb-sdk
python
def vs_redirect_http_to_https(self, avi_config, sysdict): '\n Removes the VS which is redirected to another VS amd update the\n status and avi object for that VS\n :param avi_config: avi configuration after all conversion\n :param sysdict: system configuration\n :return:\n ' vsrem = {} LOG.debug('Check started for redirect from HTTP VS to HTTPS VS with no pool') for vs in avi_config['VirtualService']: if ((not vs.get('pool_group_ref')) and (not vs.get('application_profile_ref')) and vs.get('services', []) and (not all([s.get('enable_ssl', True) for s in vs['services']])) and vs.get('http_policies', []) and vs['http_policies'][0].get('http_policy_set_ref')): polname = self.get_name(vs['http_policies'][0]['http_policy_set_ref']) pol = [pl for pl in avi_config['HTTPPolicySet'] if (pl['name'] == polname)] if (pol and pol[0].get('http_request_policy', {}).get('rules', []) and pol[0]['http_request_policy']['rules'][0].get('redirect_action')): iplist = ([ip['ip_address']['addr'] for ip in vs.get('vip', []) if ip.get('ip_address', {}).get('addr')] or ([vs['ip_address']['addr']] if vs.get('ip_address', {}).get('addr') else [])) if iplist: for nvs in avi_config['VirtualService']: if ((vs['name'] != nvs['name']) and [ip for ip in iplist if (ip in ([nip['ip_address']['addr'] for nip in nvs.get('vip', []) if nip.get('ip_address', {}).get('addr')] or [(nvs['ip_address']['addr'] if nvs.get('ip_address', {}).get('addr') else [])]))]): appname = (self.get_name(nvs['application_profile_ref']) if nvs.get('application_profile_ref') else None) if (appname == 'ns-migrate-http'): LOG.debug(('%s has redirect to %s, hence removing %s' % (vs['name'], nvs['name'], vs['name']))) vsrem[vs['name']] = nvs['name'] appprof = [pr for pr in (avi_config['ApplicationProfile'] + sysdict['ApplicationProfile']) if (pr['name'] == appname)] if (appprof and (appprof[0]['type'] == 'APPLICATION_PROFILE_TYPE_HTTP')): if appprof[0].get('http_profile'): appprof[0]['http_profile']['http_to_https'] = True else: appprof[0]['http_profile'] = {'http_to_https': True} LOG.debug(("%s has redirect to %s, hence setting 'http_to_https' as true and removing %s" % (vs['name'], nvs['name'], vs['name']))) vsrem[vs['name']] = nvs['name'] if ([True for ssl in nvs['services'] if ssl['enable_ssl']] and [True for ssl_vs in vs['services'] if (not ssl_vs['enable_ssl'])]): nvs['services'].append(vs['services'][0]) vsrem[vs['name']] = nvs['name'] LOG.debug('Check completed for redirect from HTTP VS to HTTPS VS with no pool') if vsrem: avi_config['VirtualService'] = [v for v in avi_config['VirtualService'] if (v['name'] not in vsrem.keys())] LOG.debug(('%s VS got removed from AVI configuration' % str(len(vsrem)))) for cl in csv_writer_dict_list: if ((cl['Object Name'] in vsrem.keys()) and (cl['Netscaler Command'] in ['add lb vserver', 'add cs vserver'])): cl['Status'] = STATUS_INDIRECT cl['AVI Object'] = ('Redirected to %s' % vsrem[cl['Object Name']])
def merge_pool(self, avi_config): '\n This method merge the pools in AVI if HM is same\n :param avi_config:\n :return:\n ' mergelist = [] for poolgrp in avi_config['PoolGroup']: if (poolgrp['name'] == 'lb-depoed1cdb.qai-pri-5984-poolgroup'): print('found') pool_member = [obj for obj in poolgrp['members'] if (not (obj.get('priority_label', '10') == '2'))] length = len(pool_member) for count in range(length): pool_name = pool_member[count]['pool_ref'].split('&')[1].split('=')[1] if (pool_name in mergelist): continue pool = [pl for pl in avi_config['Pool'] if (pl['name'] == pool_name)] if (not pool): LOG.debug(("'%s' not present" % pool_name)) continue for count2 in range((count + 1), length): pname = pool_member[count2]['pool_ref'].split('&')[1].split('=')[1] nextpool = [pol for pol in avi_config['Pool'] if (pol['name'] == pname)] if (not nextpool): LOG.debug(("'%s' not present" % pname)) continue if (pool[0]['health_monitor_refs'].sort() == nextpool[0]['health_monitor_refs'].sort()): LOG.debug(("Merging pool '%s' in '%s'" % (nextpool[0]['name'], pool[0]['name']))) ip_port = set() for ser in pool[0]['servers']: ip_port.add(((str(ser['ip']['addr']) + ':') + str(ser['port']))) for server in nextpool[0]['servers']: ipport = ((str(server['ip']['addr']) + ':') + str(server['port'])) if (ipport not in list(ip_port)): pool[0]['servers'].append(server) for cl in csv_writer_dict_list: if ((cl['Object Name'] == nextpool[0]['name'].replace('-pool', '')) and (cl['Netscaler Command'] in ['add service', 'add serviceGroup'])): cl['AVI Object'] = ('Merged to %s' % pool[0]['name']) mergelist.append(nextpool[0]['name']) for plg in avi_config['PoolGroup']: plg['members'] = [member for member in plg['members'] if (member['pool_ref'].split('&')[1].split('=')[1] not in mergelist)] avi_config['Pool'] = [pools for pools in avi_config['Pool'] if (pools['name'] not in mergelist)]
4,898,384,470,949,158,000
This method merge the pools in AVI if HM is same :param avi_config: :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
merge_pool
avinetworks/alb-sdk
python
def merge_pool(self, avi_config): '\n This method merge the pools in AVI if HM is same\n :param avi_config:\n :return:\n ' mergelist = [] for poolgrp in avi_config['PoolGroup']: if (poolgrp['name'] == 'lb-depoed1cdb.qai-pri-5984-poolgroup'): print('found') pool_member = [obj for obj in poolgrp['members'] if (not (obj.get('priority_label', '10') == '2'))] length = len(pool_member) for count in range(length): pool_name = pool_member[count]['pool_ref'].split('&')[1].split('=')[1] if (pool_name in mergelist): continue pool = [pl for pl in avi_config['Pool'] if (pl['name'] == pool_name)] if (not pool): LOG.debug(("'%s' not present" % pool_name)) continue for count2 in range((count + 1), length): pname = pool_member[count2]['pool_ref'].split('&')[1].split('=')[1] nextpool = [pol for pol in avi_config['Pool'] if (pol['name'] == pname)] if (not nextpool): LOG.debug(("'%s' not present" % pname)) continue if (pool[0]['health_monitor_refs'].sort() == nextpool[0]['health_monitor_refs'].sort()): LOG.debug(("Merging pool '%s' in '%s'" % (nextpool[0]['name'], pool[0]['name']))) ip_port = set() for ser in pool[0]['servers']: ip_port.add(((str(ser['ip']['addr']) + ':') + str(ser['port']))) for server in nextpool[0]['servers']: ipport = ((str(server['ip']['addr']) + ':') + str(server['port'])) if (ipport not in list(ip_port)): pool[0]['servers'].append(server) for cl in csv_writer_dict_list: if ((cl['Object Name'] == nextpool[0]['name'].replace('-pool', )) and (cl['Netscaler Command'] in ['add service', 'add serviceGroup'])): cl['AVI Object'] = ('Merged to %s' % pool[0]['name']) mergelist.append(nextpool[0]['name']) for plg in avi_config['PoolGroup']: plg['members'] = [member for member in plg['members'] if (member['pool_ref'].split('&')[1].split('=')[1] not in mergelist)] avi_config['Pool'] = [pools for pools in avi_config['Pool'] if (pools['name'] not in mergelist)]
def add_policy(self, policy, updated_vs_name, avi_config, tmp_policy_ref, vs_obj, tenant_name, cloud_name, prefix, used_poolgrp_ref): '\n This method is used to add policy objects to AVI and also add\n reference in VS\n :param policy: policy object\n :param updated_vs_name: vs name\n :param avi_config: avi config dict\n :param tmp_policy_ref: list of policy ref which are already used\n :param vs_obj: vs object\n :param tenant_name: name of tenant\n :param cloud_name: name of cloud\n :param prefix: prefix\n :param used_poolgrp_ref: list of used pool group ref\n :return:\n ' if (policy['name'] in tmp_policy_ref): policy = self.clone_http_policy_set(policy, updated_vs_name, avi_config, tenant_name, cloud_name, used_poolgrp_ref, userprefix=prefix) updated_http_policy_ref = self.get_object_ref(policy['name'], OBJECT_TYPE_HTTP_POLICY_SET, tenant_name) tmp_policy_ref.append(policy['name']) http_policies = {'index': 11, 'http_policy_set_ref': updated_http_policy_ref} if (not vs_obj.get('http_policies')): vs_obj['http_policies'] = [] else: ind = max([policies['index'] for policies in vs_obj['http_policies']]) http_policies['index'] = (ind + 1) vs_obj['http_policies'].append(http_policies) avi_config['HTTPPolicySet'].append(policy)
3,358,375,780,726,247,000
This method is used to add policy objects to AVI and also add reference in VS :param policy: policy object :param updated_vs_name: vs name :param avi_config: avi config dict :param tmp_policy_ref: list of policy ref which are already used :param vs_obj: vs object :param tenant_name: name of tenant :param cloud_name: name of cloud :param prefix: prefix :param used_poolgrp_ref: list of used pool group ref :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
add_policy
avinetworks/alb-sdk
python
def add_policy(self, policy, updated_vs_name, avi_config, tmp_policy_ref, vs_obj, tenant_name, cloud_name, prefix, used_poolgrp_ref): '\n This method is used to add policy objects to AVI and also add\n reference in VS\n :param policy: policy object\n :param updated_vs_name: vs name\n :param avi_config: avi config dict\n :param tmp_policy_ref: list of policy ref which are already used\n :param vs_obj: vs object\n :param tenant_name: name of tenant\n :param cloud_name: name of cloud\n :param prefix: prefix\n :param used_poolgrp_ref: list of used pool group ref\n :return:\n ' if (policy['name'] in tmp_policy_ref): policy = self.clone_http_policy_set(policy, updated_vs_name, avi_config, tenant_name, cloud_name, used_poolgrp_ref, userprefix=prefix) updated_http_policy_ref = self.get_object_ref(policy['name'], OBJECT_TYPE_HTTP_POLICY_SET, tenant_name) tmp_policy_ref.append(policy['name']) http_policies = {'index': 11, 'http_policy_set_ref': updated_http_policy_ref} if (not vs_obj.get('http_policies')): vs_obj['http_policies'] = [] else: ind = max([policies['index'] for policies in vs_obj['http_policies']]) http_policies['index'] = (ind + 1) vs_obj['http_policies'].append(http_policies) avi_config['HTTPPolicySet'].append(policy)
def build_redirect_action_dict(self, redirect_url, enable_ssl): '\n This method returns a redirect action dict\n :param redirect_url: redirect url\n :param enable_ssl: flag for ssl enable\n :return:\n ' redirect_url = self.parse_url(redirect_url) protocol = str(redirect_url.scheme).upper() hostname = str(redirect_url.hostname) pathstring = str(redirect_url.path) querystring = str(redirect_url.query) full_path = (('%s?%s' % (pathstring, querystring)) if (pathstring and querystring) else pathstring) protocol = (((enable_ssl and 'HTTPS') or 'HTTP') if (not protocol) else protocol) action = {'protocol': protocol} if hostname: action.update({'host': {'type': 'URI_PARAM_TYPE_TOKENIZED', 'tokens': [{'type': 'URI_TOKEN_TYPE_STRING', 'str_value': hostname, 'start_index': '0', 'end_index': '65535'}]}}) if full_path: action.update({'path': {'type': 'URI_PARAM_TYPE_TOKENIZED', 'tokens': [{'type': 'URI_TOKEN_TYPE_STRING', 'str_value': full_path, 'start_index': '0', 'end_index': '65535'}]}}) return action
-4,270,356,411,581,813,000
This method returns a redirect action dict :param redirect_url: redirect url :param enable_ssl: flag for ssl enable :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
build_redirect_action_dict
avinetworks/alb-sdk
python
def build_redirect_action_dict(self, redirect_url, enable_ssl): '\n This method returns a redirect action dict\n :param redirect_url: redirect url\n :param enable_ssl: flag for ssl enable\n :return:\n ' redirect_url = self.parse_url(redirect_url) protocol = str(redirect_url.scheme).upper() hostname = str(redirect_url.hostname) pathstring = str(redirect_url.path) querystring = str(redirect_url.query) full_path = (('%s?%s' % (pathstring, querystring)) if (pathstring and querystring) else pathstring) protocol = (((enable_ssl and 'HTTPS') or 'HTTP') if (not protocol) else protocol) action = {'protocol': protocol} if hostname: action.update({'host': {'type': 'URI_PARAM_TYPE_TOKENIZED', 'tokens': [{'type': 'URI_TOKEN_TYPE_STRING', 'str_value': hostname, 'start_index': '0', 'end_index': '65535'}]}}) if full_path: action.update({'path': {'type': 'URI_PARAM_TYPE_TOKENIZED', 'tokens': [{'type': 'URI_TOKEN_TYPE_STRING', 'str_value': full_path, 'start_index': '0', 'end_index': '65535'}]}}) return action
def create_http_to_https_custom_profile(self): '\n\n :return: custom application profile dict\n ' return {'name': 'ns-migrate-http', 'type': 'APPLICATION_PROFILE_TYPE_HTTP', 'tenant_ref': '/api/tenant/?name=admin', 'preserve_client_ip': False, 'http_profile': {'max_rps_uri': 0, 'keepalive_header': False, 'max_rps_cip_uri': 0, 'x_forwarded_proto_enabled': False, 'connection_multiplexing_enabled': True, 'websockets_enabled': True, 'enable_request_body_buffering': False, 'hsts_enabled': False, 'xff_enabled': True, 'disable_keepalive_posts_msie6': True, 'keepalive_timeout': 30000, 'ssl_client_certificate_mode': 'SSL_CLIENT_CERTIFICATE_NONE', 'http_to_https': True, 'max_bad_rps_cip_uri': 0, 'client_body_timeout': 30000, 'httponly_enabled': False, 'hsts_max_age': 365, 'max_bad_rps_cip': 0, 'server_side_redirect_to_https': False, 'client_max_header_size': 12, 'client_max_request_size': 48, 'max_rps_unknown_uri': 0, 'post_accept_timeout': 30000, 'client_header_timeout': 10000, 'secure_cookie_enabled': False, 'xff_alternate_name': 'X-Forwarded-For', 'max_rps_cip': 0, 'client_max_body_size': 0, 'max_rps_unknown_cip': 0, 'allow_dots_in_header_name': False, 'max_bad_rps_uri': 0, 'use_app_keepalive_timeout': False}, 'dos_rl_profile': {'rl_profile': {'client_ip_connections_rate_limit': {'explicit_tracking': False, 'action': {'status_code': 'HTTP_LOCAL_RESPONSE_STATUS_CODE_429', 'type': 'RL_ACTION_NONE'}, 'fine_grain': False}}, 'dos_profile': {'thresh_period': 5}}}
5,881,756,311,370,102,000
:return: custom application profile dict
python/avi/migrationtools/netscaler_converter/ns_util.py
create_http_to_https_custom_profile
avinetworks/alb-sdk
python
def create_http_to_https_custom_profile(self): '\n\n \n ' return {'name': 'ns-migrate-http', 'type': 'APPLICATION_PROFILE_TYPE_HTTP', 'tenant_ref': '/api/tenant/?name=admin', 'preserve_client_ip': False, 'http_profile': {'max_rps_uri': 0, 'keepalive_header': False, 'max_rps_cip_uri': 0, 'x_forwarded_proto_enabled': False, 'connection_multiplexing_enabled': True, 'websockets_enabled': True, 'enable_request_body_buffering': False, 'hsts_enabled': False, 'xff_enabled': True, 'disable_keepalive_posts_msie6': True, 'keepalive_timeout': 30000, 'ssl_client_certificate_mode': 'SSL_CLIENT_CERTIFICATE_NONE', 'http_to_https': True, 'max_bad_rps_cip_uri': 0, 'client_body_timeout': 30000, 'httponly_enabled': False, 'hsts_max_age': 365, 'max_bad_rps_cip': 0, 'server_side_redirect_to_https': False, 'client_max_header_size': 12, 'client_max_request_size': 48, 'max_rps_unknown_uri': 0, 'post_accept_timeout': 30000, 'client_header_timeout': 10000, 'secure_cookie_enabled': False, 'xff_alternate_name': 'X-Forwarded-For', 'max_rps_cip': 0, 'client_max_body_size': 0, 'max_rps_unknown_cip': 0, 'allow_dots_in_header_name': False, 'max_bad_rps_uri': 0, 'use_app_keepalive_timeout': False}, 'dos_rl_profile': {'rl_profile': {'client_ip_connections_rate_limit': {'explicit_tracking': False, 'action': {'status_code': 'HTTP_LOCAL_RESPONSE_STATUS_CODE_429', 'type': 'RL_ACTION_NONE'}, 'fine_grain': False}}, 'dos_profile': {'thresh_period': 5}}}
def correct_vs_ref(self, avi_config): '\n This method corrects the reference of VS to different objects\n :param avi_config: avi configuration dict\n :return:\n ' global csv_writer_dict_list avi_graph = self.make_graph(avi_config) csv_dict_sub = [row for row in csv_writer_dict_list if ((row['Netscaler Command'] not in ('add lb vserver', 'add cs vserver')) and (row['Status'] in (STATUS_PARTIAL, STATUS_SUCCESSFUL)))] for dict_row in csv_dict_sub: obj = dict_row['AVI Object'] if (isinstance(obj, str) and obj.startswith('{')): vs = [] if ('__/__' in obj): for dataobj in obj.split('__/__'): obj = eval(dataobj) self.add_vs_ref(obj, avi_graph, vs) else: obj = eval(obj) self.add_vs_ref(obj, avi_graph, vs) if vs: dict_row['VS Reference'] = str(list(set(vs))) else: dict_row['VS Reference'] = STATUS_NOT_IN_USE
-3,131,435,864,647,046,000
This method corrects the reference of VS to different objects :param avi_config: avi configuration dict :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
correct_vs_ref
avinetworks/alb-sdk
python
def correct_vs_ref(self, avi_config): '\n This method corrects the reference of VS to different objects\n :param avi_config: avi configuration dict\n :return:\n ' global csv_writer_dict_list avi_graph = self.make_graph(avi_config) csv_dict_sub = [row for row in csv_writer_dict_list if ((row['Netscaler Command'] not in ('add lb vserver', 'add cs vserver')) and (row['Status'] in (STATUS_PARTIAL, STATUS_SUCCESSFUL)))] for dict_row in csv_dict_sub: obj = dict_row['AVI Object'] if (isinstance(obj, str) and obj.startswith('{')): vs = [] if ('__/__' in obj): for dataobj in obj.split('__/__'): obj = eval(dataobj) self.add_vs_ref(obj, avi_graph, vs) else: obj = eval(obj) self.add_vs_ref(obj, avi_graph, vs) if vs: dict_row['VS Reference'] = str(list(set(vs))) else: dict_row['VS Reference'] = STATUS_NOT_IN_USE
def add_vs_ref(self, obj, avi_graph, vs): '\n Helper method for adding vs ref\n :param obj: object\n :param avi_graph: avi graph\n :param vs: VS list\n :return:\n ' obj_name = obj.get('name', obj.get('hostname')) if obj_name: if avi_graph.has_node(obj_name): LOG.debug('Checked predecessor for %s', obj_name) predecessor = list(avi_graph.predecessors(obj_name)) if predecessor: self.get_predecessor(predecessor, avi_graph, vs) else: LOG.debug('Object %s may be merged or orphaned', obj_name)
-5,466,778,583,118,322,000
Helper method for adding vs ref :param obj: object :param avi_graph: avi graph :param vs: VS list :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
add_vs_ref
avinetworks/alb-sdk
python
def add_vs_ref(self, obj, avi_graph, vs): '\n Helper method for adding vs ref\n :param obj: object\n :param avi_graph: avi graph\n :param vs: VS list\n :return:\n ' obj_name = obj.get('name', obj.get('hostname')) if obj_name: if avi_graph.has_node(obj_name): LOG.debug('Checked predecessor for %s', obj_name) predecessor = list(avi_graph.predecessors(obj_name)) if predecessor: self.get_predecessor(predecessor, avi_graph, vs) else: LOG.debug('Object %s may be merged or orphaned', obj_name)
def get_predecessor(self, predecessor, avi_graph, vs): '\n This method gets the predecessor of the object\n :param predecessor: predecessor list\n :param avi_graph: avi graph\n :param vs: VS list\n :return:\n ' if (len(predecessor) > 1): for node in predecessor: nodelist = [node] self.get_predecessor(nodelist, avi_graph, vs) elif len(predecessor): node_obj = [nod for nod in list(avi_graph.nodes().data()) if (nod[0] == predecessor[0])] if (node_obj and ((node_obj[0][1]['type'] == 'VS') or ('VS' in node_obj[0][1]['type']))): LOG.debug('Predecessor %s found', predecessor[0]) vs.extend(predecessor) else: LOG.debug('Checked predecessor for %s', predecessor[0]) nodelist = list(avi_graph.predecessors(predecessor[0])) self.get_predecessor(nodelist, avi_graph, vs) else: LOG.debug('No more predecessor')
-3,511,810,973,429,969,400
This method gets the predecessor of the object :param predecessor: predecessor list :param avi_graph: avi graph :param vs: VS list :return:
python/avi/migrationtools/netscaler_converter/ns_util.py
get_predecessor
avinetworks/alb-sdk
python
def get_predecessor(self, predecessor, avi_graph, vs): '\n This method gets the predecessor of the object\n :param predecessor: predecessor list\n :param avi_graph: avi graph\n :param vs: VS list\n :return:\n ' if (len(predecessor) > 1): for node in predecessor: nodelist = [node] self.get_predecessor(nodelist, avi_graph, vs) elif len(predecessor): node_obj = [nod for nod in list(avi_graph.nodes().data()) if (nod[0] == predecessor[0])] if (node_obj and ((node_obj[0][1]['type'] == 'VS') or ('VS' in node_obj[0][1]['type']))): LOG.debug('Predecessor %s found', predecessor[0]) vs.extend(predecessor) else: LOG.debug('Checked predecessor for %s', predecessor[0]) nodelist = list(avi_graph.predecessors(predecessor[0])) self.get_predecessor(nodelist, avi_graph, vs) else: LOG.debug('No more predecessor')
def __init__(self, base_url=None, symbol=None, apiKey=None, apiSecret=None, orderIDPrefix='mm_bitmex_', shouldWSAuth=True, postOnly=False, timeout=7): 'Init connector.' self.base_url = base_url self.symbol = symbol self.postOnly = postOnly self.shouldWSAuth = shouldWSAuth if (apiKey is None): raise Exception(('Please set an API key and Secret to get started. See ' + 'https://github.com/BitMEX/sample-market-maker/#getting-started for more information.')) self.apiKey = apiKey self.apiSecret = apiSecret if (len(orderIDPrefix) > 13): raise ValueError('settings.ORDERID_PREFIX must be at most 13 characters long!') self.orderIDPrefix = orderIDPrefix self.retries = 0 self.session = requests.Session() self.session.headers.update({'user-agent': ('liquidbot-' + constants.VERSION)}) self.session.headers.update({'content-type': 'application/json'}) self.session.headers.update({'accept': 'application/json'}) self.ws = BitMEXWebsocket() self.ws.connect(base_url, symbol, shouldAuth=shouldWSAuth) self.__check_ws_alive() self.timeout = timeout
-8,712,447,931,383,142,000
Init connector.
market_maker/bitmex.py
__init__
veskokaradzhov/sample-market-maker
python
def __init__(self, base_url=None, symbol=None, apiKey=None, apiSecret=None, orderIDPrefix='mm_bitmex_', shouldWSAuth=True, postOnly=False, timeout=7): self.base_url = base_url self.symbol = symbol self.postOnly = postOnly self.shouldWSAuth = shouldWSAuth if (apiKey is None): raise Exception(('Please set an API key and Secret to get started. See ' + 'https://github.com/BitMEX/sample-market-maker/#getting-started for more information.')) self.apiKey = apiKey self.apiSecret = apiSecret if (len(orderIDPrefix) > 13): raise ValueError('settings.ORDERID_PREFIX must be at most 13 characters long!') self.orderIDPrefix = orderIDPrefix self.retries = 0 self.session = requests.Session() self.session.headers.update({'user-agent': ('liquidbot-' + constants.VERSION)}) self.session.headers.update({'content-type': 'application/json'}) self.session.headers.update({'accept': 'application/json'}) self.ws = BitMEXWebsocket() self.ws.connect(base_url, symbol, shouldAuth=shouldWSAuth) self.__check_ws_alive() self.timeout = timeout
def ticker_data(self, symbol=None): 'Get ticker data.' if (symbol is None): symbol = self.symbol return self.ws.get_ticker(symbol)
1,730,474,277,475,240,000
Get ticker data.
market_maker/bitmex.py
ticker_data
veskokaradzhov/sample-market-maker
python
def ticker_data(self, symbol=None): if (symbol is None): symbol = self.symbol return self.ws.get_ticker(symbol)
def instrument(self, symbol): "Get an instrument's details." return self.ws.get_instrument(symbol)
2,904,046,683,405,473,300
Get an instrument's details.
market_maker/bitmex.py
instrument
veskokaradzhov/sample-market-maker
python
def instrument(self, symbol): return self.ws.get_instrument(symbol)
def market_depth(self): 'Get market depth / orderbook.' return self.ws.market_depth()
6,477,251,214,908,190,000
Get market depth / orderbook.
market_maker/bitmex.py
market_depth
veskokaradzhov/sample-market-maker
python
def market_depth(self): return self.ws.market_depth()
def recent_trades(self): "Get recent trades.\n\n Returns\n -------\n A list of dicts:\n {u'amount': 60,\n u'date': 1306775375,\n u'price': 8.7401099999999996,\n u'tid': u'93842'},\n\n " return self.ws.recent_trades()
5,726,568,376,258,028,000
Get recent trades. Returns ------- A list of dicts: {u'amount': 60, u'date': 1306775375, u'price': 8.7401099999999996, u'tid': u'93842'},
market_maker/bitmex.py
recent_trades
veskokaradzhov/sample-market-maker
python
def recent_trades(self): "Get recent trades.\n\n Returns\n -------\n A list of dicts:\n {u'amount': 60,\n u'date': 1306775375,\n u'price': 8.7401099999999996,\n u'tid': u'93842'},\n\n " return self.ws.recent_trades()
def authentication_required(fn): 'Annotation for methods that require auth.' def wrapped(self, *args, **kwargs): if (not self.apiKey): msg = 'You must be authenticated to use this method' raise errors.AuthenticationError(msg) else: return fn(self, *args, **kwargs) return wrapped
-3,795,446,689,248,801,300
Annotation for methods that require auth.
market_maker/bitmex.py
authentication_required
veskokaradzhov/sample-market-maker
python
def authentication_required(fn): def wrapped(self, *args, **kwargs): if (not self.apiKey): msg = 'You must be authenticated to use this method' raise errors.AuthenticationError(msg) else: return fn(self, *args, **kwargs) return wrapped
@authentication_required def funds(self): 'Get your current balance.' return self.ws.funds()
-2,661,775,430,091,995,000
Get your current balance.
market_maker/bitmex.py
funds
veskokaradzhov/sample-market-maker
python
@authentication_required def funds(self): return self.ws.funds()
@authentication_required def position(self, symbol): 'Get your open position.' return self.ws.position(symbol)
-5,280,714,735,572,079,000
Get your open position.
market_maker/bitmex.py
position
veskokaradzhov/sample-market-maker
python
@authentication_required def position(self, symbol): return self.ws.position(symbol)
@authentication_required def isolate_margin(self, symbol, leverage, rethrow_errors=False): 'Set the leverage on an isolated margin position' path = 'position/leverage' postdict = {'symbol': symbol, 'leverage': leverage} return self._curl_bitmex(path=path, postdict=postdict, verb='POST', rethrow_errors=rethrow_errors)
-6,562,018,996,676,132,000
Set the leverage on an isolated margin position
market_maker/bitmex.py
isolate_margin
veskokaradzhov/sample-market-maker
python
@authentication_required def isolate_margin(self, symbol, leverage, rethrow_errors=False): path = 'position/leverage' postdict = {'symbol': symbol, 'leverage': leverage} return self._curl_bitmex(path=path, postdict=postdict, verb='POST', rethrow_errors=rethrow_errors)
@authentication_required def buy(self, quantity, price): 'Place a buy order.\n\n Returns order object. ID: orderID\n ' return self.place_order(quantity, price)
1,627,606,856,503,511,000
Place a buy order. Returns order object. ID: orderID
market_maker/bitmex.py
buy
veskokaradzhov/sample-market-maker
python
@authentication_required def buy(self, quantity, price): 'Place a buy order.\n\n Returns order object. ID: orderID\n ' return self.place_order(quantity, price)
@authentication_required def sell(self, quantity, price): 'Place a sell order.\n\n Returns order object. ID: orderID\n ' return self.place_order((- quantity), price)
7,339,910,726,446,182,000
Place a sell order. Returns order object. ID: orderID
market_maker/bitmex.py
sell
veskokaradzhov/sample-market-maker
python
@authentication_required def sell(self, quantity, price): 'Place a sell order.\n\n Returns order object. ID: orderID\n ' return self.place_order((- quantity), price)
@authentication_required def place_order(self, quantity, price): 'Place an order.' if (price < 0): raise Exception('Price must be positive.') endpoint = 'order' clOrdID = (self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('utf8').rstrip('=\n')) postdict = {'symbol': self.symbol, 'orderQty': quantity, 'price': price, 'clOrdID': clOrdID} return self._curl_bitmex(path=endpoint, postdict=postdict, verb='POST')
2,789,380,601,364,919,300
Place an order.
market_maker/bitmex.py
place_order
veskokaradzhov/sample-market-maker
python
@authentication_required def place_order(self, quantity, price): if (price < 0): raise Exception('Price must be positive.') endpoint = 'order' clOrdID = (self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('utf8').rstrip('=\n')) postdict = {'symbol': self.symbol, 'orderQty': quantity, 'price': price, 'clOrdID': clOrdID} return self._curl_bitmex(path=endpoint, postdict=postdict, verb='POST')
@authentication_required def amend_bulk_orders(self, orders): 'Amend multiple orders.' return self._curl_bitmex(path='order/bulk', postdict={'orders': orders}, verb='PUT', rethrow_errors=True)
3,046,872,120,849,604,600
Amend multiple orders.
market_maker/bitmex.py
amend_bulk_orders
veskokaradzhov/sample-market-maker
python
@authentication_required def amend_bulk_orders(self, orders): return self._curl_bitmex(path='order/bulk', postdict={'orders': orders}, verb='PUT', rethrow_errors=True)
@authentication_required def create_bulk_orders(self, orders): 'Create multiple orders.' for order in orders: order['clOrdID'] = (self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('utf8').rstrip('=\n')) order['symbol'] = self.symbol if self.postOnly: order['execInst'] = 'ParticipateDoNotInitiate' return self._curl_bitmex(path='order/bulk', postdict={'orders': orders}, verb='POST')
6,811,979,488,187,572,000
Create multiple orders.
market_maker/bitmex.py
create_bulk_orders
veskokaradzhov/sample-market-maker
python
@authentication_required def create_bulk_orders(self, orders): for order in orders: order['clOrdID'] = (self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('utf8').rstrip('=\n')) order['symbol'] = self.symbol if self.postOnly: order['execInst'] = 'ParticipateDoNotInitiate' return self._curl_bitmex(path='order/bulk', postdict={'orders': orders}, verb='POST')
@authentication_required def open_orders(self): 'Get open orders.' return self.ws.open_orders(self.orderIDPrefix)
-8,255,759,037,361,243,000
Get open orders.
market_maker/bitmex.py
open_orders
veskokaradzhov/sample-market-maker
python
@authentication_required def open_orders(self): return self.ws.open_orders(self.orderIDPrefix)
@authentication_required def http_open_orders(self): 'Get open orders via HTTP. Used on close to ensure we catch them all.' path = 'order' orders = self._curl_bitmex(path=path, query={'filter': json.dumps({'ordStatus.isTerminated': False, 'symbol': self.symbol}), 'count': 500}, verb='GET') return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
7,783,532,974,160,712,000
Get open orders via HTTP. Used on close to ensure we catch them all.
market_maker/bitmex.py
http_open_orders
veskokaradzhov/sample-market-maker
python
@authentication_required def http_open_orders(self): path = 'order' orders = self._curl_bitmex(path=path, query={'filter': json.dumps({'ordStatus.isTerminated': False, 'symbol': self.symbol}), 'count': 500}, verb='GET') return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
@authentication_required def cancel(self, orderID): 'Cancel an existing order.' path = 'order' postdict = {'orderID': orderID} return self._curl_bitmex(path=path, postdict=postdict, verb='DELETE')
-3,841,336,432,020,093,000
Cancel an existing order.
market_maker/bitmex.py
cancel
veskokaradzhov/sample-market-maker
python
@authentication_required def cancel(self, orderID): path = 'order' postdict = {'orderID': orderID} return self._curl_bitmex(path=path, postdict=postdict, verb='DELETE')
def _curl_bitmex(self, path, query=None, postdict=None, timeout=None, verb=None, rethrow_errors=False, max_retries=None): 'Send a request to BitMEX Servers.' url = (self.base_url + path) if (timeout is None): timeout = self.timeout if (not verb): verb = ('POST' if postdict else 'GET') if (max_retries is None): max_retries = (0 if (verb in ['POST', 'PUT']) else 3) auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret) def exit_or_throw(e): if rethrow_errors: raise e else: exit(1) def retry(): self.retries += 1 if (self.retries > max_retries): raise Exception(('Max retries on %s (%s) hit, raising.' % (path, json.dumps((postdict or ''))))) return self._curl_bitmex(path, query, postdict, timeout, verb, rethrow_errors, max_retries) response = None try: logger.info(('sending req to %s: %s' % (url, json.dumps((postdict or query or ''))))) req = requests.Request(verb, url, json=postdict, auth=auth, params=query) prepped = self.session.prepare_request(req) response = self.session.send(prepped, timeout=timeout) response.raise_for_status() except requests.exceptions.HTTPError as e: if (response is None): raise e if (response.status_code == 401): logger.error('API Key or Secret incorrect, please check and restart.') logger.error(('Error: ' + response.text)) if postdict: logger.error(postdict) exit(1) elif (response.status_code == 404): if (verb == 'DELETE'): logger.error(('Order not found: %s' % postdict['orderID'])) return logger.error(('Unable to contact the BitMEX API (404). ' + ('Request: %s \n %s' % (url, json.dumps(postdict))))) exit_or_throw(e) elif (response.status_code == 429): logger.error((('Ratelimited on current request. Sleeping, then trying again. Try fewer ' + 'order pairs or contact example@example.com to raise your limits. ') + ('Request: %s \n %s' % (url, json.dumps(postdict))))) ratelimit_reset = response.headers['X-RateLimit-Reset'] to_sleep = (int(ratelimit_reset) - int(time.time())) reset_str = datetime.datetime.fromtimestamp(int(ratelimit_reset)).strftime('%X') logger.warning('Canceling all known orders in the meantime.') self.cancel([o['orderID'] for o in self.open_orders()]) logger.error(('Your ratelimit will reset at %s. Sleeping for %d seconds.' % (reset_str, to_sleep))) time.sleep(to_sleep) return retry() elif (response.status_code == 503): logger.warning(('Unable to contact the BitMEX API (503), retrying. ' + ('Request: %s \n %s' % (url, json.dumps(postdict))))) time.sleep(3) return retry() elif (response.status_code == 400): error = response.json()['error'] message = (error['message'].lower() if error else '') if ('duplicate clordid' in message): orders = (postdict['orders'] if ('orders' in postdict) else postdict) IDs = json.dumps({'clOrdID': [order['clOrdID'] for order in orders]}) orderResults = self._curl_bitmex('/order', query={'filter': IDs}, verb='GET') for (i, order) in enumerate(orderResults): if ((order['orderQty'] != abs(postdict['orderQty'])) or (order['side'] != ('Buy' if (postdict['orderQty'] > 0) else 'Sell')) or (order['price'] != postdict['price']) or (order['symbol'] != postdict['symbol'])): raise Exception(('Attempted to recover from duplicate clOrdID, but order returned from API ' + ('did not match POST.\nPOST data: %s\nReturned order: %s' % (json.dumps(orders[i]), json.dumps(order))))) return orderResults elif ('insufficient available balance' in message): logger.error(('Account out of funds. The message: %s' % error['message'])) exit_or_throw(Exception('Insufficient Funds')) logger.error(('Unhandled Error: %s: %s' % (e, response.text))) logger.error(('Endpoint was: %s %s: %s' % (verb, path, json.dumps(postdict)))) exit_or_throw(e) except requests.exceptions.Timeout as e: logger.warning(('Timed out on request: %s (%s), retrying...' % (path, json.dumps((postdict or ''))))) return retry() except requests.exceptions.ConnectionError as e: logger.warning(('Unable to contact the BitMEX API (%s). Please check the URL. Retrying. ' + ('Request: %s %s \n %s' % (e, url, json.dumps(postdict))))) time.sleep(1) return retry() self.retries = 0 return response.json()
-4,221,852,213,426,500,600
Send a request to BitMEX Servers.
market_maker/bitmex.py
_curl_bitmex
veskokaradzhov/sample-market-maker
python
def _curl_bitmex(self, path, query=None, postdict=None, timeout=None, verb=None, rethrow_errors=False, max_retries=None): url = (self.base_url + path) if (timeout is None): timeout = self.timeout if (not verb): verb = ('POST' if postdict else 'GET') if (max_retries is None): max_retries = (0 if (verb in ['POST', 'PUT']) else 3) auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret) def exit_or_throw(e): if rethrow_errors: raise e else: exit(1) def retry(): self.retries += 1 if (self.retries > max_retries): raise Exception(('Max retries on %s (%s) hit, raising.' % (path, json.dumps((postdict or ))))) return self._curl_bitmex(path, query, postdict, timeout, verb, rethrow_errors, max_retries) response = None try: logger.info(('sending req to %s: %s' % (url, json.dumps((postdict or query or ))))) req = requests.Request(verb, url, json=postdict, auth=auth, params=query) prepped = self.session.prepare_request(req) response = self.session.send(prepped, timeout=timeout) response.raise_for_status() except requests.exceptions.HTTPError as e: if (response is None): raise e if (response.status_code == 401): logger.error('API Key or Secret incorrect, please check and restart.') logger.error(('Error: ' + response.text)) if postdict: logger.error(postdict) exit(1) elif (response.status_code == 404): if (verb == 'DELETE'): logger.error(('Order not found: %s' % postdict['orderID'])) return logger.error(('Unable to contact the BitMEX API (404). ' + ('Request: %s \n %s' % (url, json.dumps(postdict))))) exit_or_throw(e) elif (response.status_code == 429): logger.error((('Ratelimited on current request. Sleeping, then trying again. Try fewer ' + 'order pairs or contact example@example.com to raise your limits. ') + ('Request: %s \n %s' % (url, json.dumps(postdict))))) ratelimit_reset = response.headers['X-RateLimit-Reset'] to_sleep = (int(ratelimit_reset) - int(time.time())) reset_str = datetime.datetime.fromtimestamp(int(ratelimit_reset)).strftime('%X') logger.warning('Canceling all known orders in the meantime.') self.cancel([o['orderID'] for o in self.open_orders()]) logger.error(('Your ratelimit will reset at %s. Sleeping for %d seconds.' % (reset_str, to_sleep))) time.sleep(to_sleep) return retry() elif (response.status_code == 503): logger.warning(('Unable to contact the BitMEX API (503), retrying. ' + ('Request: %s \n %s' % (url, json.dumps(postdict))))) time.sleep(3) return retry() elif (response.status_code == 400): error = response.json()['error'] message = (error['message'].lower() if error else ) if ('duplicate clordid' in message): orders = (postdict['orders'] if ('orders' in postdict) else postdict) IDs = json.dumps({'clOrdID': [order['clOrdID'] for order in orders]}) orderResults = self._curl_bitmex('/order', query={'filter': IDs}, verb='GET') for (i, order) in enumerate(orderResults): if ((order['orderQty'] != abs(postdict['orderQty'])) or (order['side'] != ('Buy' if (postdict['orderQty'] > 0) else 'Sell')) or (order['price'] != postdict['price']) or (order['symbol'] != postdict['symbol'])): raise Exception(('Attempted to recover from duplicate clOrdID, but order returned from API ' + ('did not match POST.\nPOST data: %s\nReturned order: %s' % (json.dumps(orders[i]), json.dumps(order))))) return orderResults elif ('insufficient available balance' in message): logger.error(('Account out of funds. The message: %s' % error['message'])) exit_or_throw(Exception('Insufficient Funds')) logger.error(('Unhandled Error: %s: %s' % (e, response.text))) logger.error(('Endpoint was: %s %s: %s' % (verb, path, json.dumps(postdict)))) exit_or_throw(e) except requests.exceptions.Timeout as e: logger.warning(('Timed out on request: %s (%s), retrying...' % (path, json.dumps((postdict or ))))) return retry() except requests.exceptions.ConnectionError as e: logger.warning(('Unable to contact the BitMEX API (%s). Please check the URL. Retrying. ' + ('Request: %s %s \n %s' % (e, url, json.dumps(postdict))))) time.sleep(1) return retry() self.retries = 0 return response.json()
def create_annotation_class(project, name, color, attribute_groups=None): 'Create annotation class in project\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param name: name for the class\n :type name: str\n :param color: RGB hex color value, e.g., "#FFFFAA"\n :type color: str\n :param attribute_groups: example:\n [ { "name": "tall", "is_multiselect": 0, "attributes": [ { "name": "yes" }, { "name": "no" } ] },\n { "name": "age", "is_multiselect": 0, "attributes": [ { "name": "young" }, { "name": "old" } ] } ]\n :type attribute_groups: list of dicts\n\n :return: new class metadata\n :rtype: dict\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) try: get_annotation_class_metadata(project, name) except SANonExistingAnnotationClassNameException: pass else: logger.warning('Annotation class %s already in project. Skipping.', name) return None (team_id, project_id) = (project['team_id'], project['id']) logger.info('Creating annotation class in project %s with name %s', project['name'], name) params = {'team_id': team_id, 'project_id': project_id} data = {'classes': [{'name': name, 'color': color, 'attribute_groups': (attribute_groups if (attribute_groups is not None) else [])}]} response = _api.send_request(req_type='POST', path='/classes', params=params, json_req=data) if (not response.ok): raise SABaseException(response.status_code, ("Couldn't create class " + response.text)) res = response.json() new_class = res[0] return new_class
-5,240,219,407,293,144,000
Create annotation class in project :param project: project name or metadata of the project :type project: str or dict :param name: name for the class :type name: str :param color: RGB hex color value, e.g., "#FFFFAA" :type color: str :param attribute_groups: example: [ { "name": "tall", "is_multiselect": 0, "attributes": [ { "name": "yes" }, { "name": "no" } ] }, { "name": "age", "is_multiselect": 0, "attributes": [ { "name": "young" }, { "name": "old" } ] } ] :type attribute_groups: list of dicts :return: new class metadata :rtype: dict
superannotate/db/annotation_classes.py
create_annotation_class
yuki-inaho/superannotate-python-sdk
python
def create_annotation_class(project, name, color, attribute_groups=None): 'Create annotation class in project\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param name: name for the class\n :type name: str\n :param color: RGB hex color value, e.g., "#FFFFAA"\n :type color: str\n :param attribute_groups: example:\n [ { "name": "tall", "is_multiselect": 0, "attributes": [ { "name": "yes" }, { "name": "no" } ] },\n { "name": "age", "is_multiselect": 0, "attributes": [ { "name": "young" }, { "name": "old" } ] } ]\n :type attribute_groups: list of dicts\n\n :return: new class metadata\n :rtype: dict\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) try: get_annotation_class_metadata(project, name) except SANonExistingAnnotationClassNameException: pass else: logger.warning('Annotation class %s already in project. Skipping.', name) return None (team_id, project_id) = (project['team_id'], project['id']) logger.info('Creating annotation class in project %s with name %s', project['name'], name) params = {'team_id': team_id, 'project_id': project_id} data = {'classes': [{'name': name, 'color': color, 'attribute_groups': (attribute_groups if (attribute_groups is not None) else [])}]} response = _api.send_request(req_type='POST', path='/classes', params=params, json_req=data) if (not response.ok): raise SABaseException(response.status_code, ("Couldn't create class " + response.text)) res = response.json() new_class = res[0] return new_class
def delete_annotation_class(project, annotation_class): 'Deletes annotation class from project\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param project: annotation class name or metadata\n :type project: str or dict\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) if (not isinstance(annotation_class, dict)): annotation_class = get_annotation_class_metadata(project, annotation_class) (team_id, project_id, name, class_id) = (_api.team_id, annotation_class['project_id'], annotation_class['name'], annotation_class['id']) logger.info('Deleting annotation class from project %s with name %s', project['name'], name) params = {'team_id': team_id, 'project_id': project_id} response = _api.send_request(req_type='DELETE', path=f'/class/{class_id}', params=params) if (not response.ok): raise SABaseException(response.status_code, ("Couldn't delete annotation class " + response.text))
-7,986,916,238,161,438,000
Deletes annotation class from project :param project: project name or metadata of the project :type project: str or dict :param project: annotation class name or metadata :type project: str or dict
superannotate/db/annotation_classes.py
delete_annotation_class
yuki-inaho/superannotate-python-sdk
python
def delete_annotation_class(project, annotation_class): 'Deletes annotation class from project\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param project: annotation class name or metadata\n :type project: str or dict\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) if (not isinstance(annotation_class, dict)): annotation_class = get_annotation_class_metadata(project, annotation_class) (team_id, project_id, name, class_id) = (_api.team_id, annotation_class['project_id'], annotation_class['name'], annotation_class['id']) logger.info('Deleting annotation class from project %s with name %s', project['name'], name) params = {'team_id': team_id, 'project_id': project_id} response = _api.send_request(req_type='DELETE', path=f'/class/{class_id}', params=params) if (not response.ok): raise SABaseException(response.status_code, ("Couldn't delete annotation class " + response.text))
def create_annotation_classes_from_classes_json(project, classes_json, from_s3_bucket=None): 'Creates annotation classes in project from a SuperAnnotate format\n annotation classes.json.\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param classes_json: JSON itself or path to the JSON file\n :type classes_json: list or Pathlike (str or Path)\n :param from_s3_bucket: AWS S3 bucket to use. If None then classes_json is in local filesystem\n :type from_s3_bucket: str\n\n :return: list of created annotation class metadatas\n :rtype: list of dicts\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) (team_id, project_id) = (project['team_id'], project['id']) if (not isinstance(classes_json, list)): logger.info('Creating annotation classes in project %s from %s.', project['name'], classes_json) if (from_s3_bucket is None): classes = json.load(open(classes_json)) else: from_session = boto3.Session() from_s3 = from_session.resource('s3') file = io.BytesIO() from_s3_object = from_s3.Object(from_s3_bucket, classes_json) from_s3_object.download_fileobj(file) file.seek(0) classes = json.load(file) else: classes = classes_json existing_classes = search_annotation_classes(project) new_classes = [] for cs in classes: if (cs['name'] in existing_classes): logger.warning('Annotation class %s already in project. Skipping.', cs['name']) else: new_classes.append(cs) res = [] def del_unn(d): for s in ['updatedAt', 'createdAt', 'id', 'project_id', 'group_id', 'class_id', 'count']: if (s in d): del d[s] for annotation_class in new_classes: del_unn(annotation_class) for attribute_group in annotation_class['attribute_groups']: del_unn(attribute_group) for attribute in attribute_group['attributes']: del_unn(attribute) CHUNK_SIZE = 2000 for i in range(0, len(new_classes), CHUNK_SIZE): params = {'team_id': team_id, 'project_id': project_id} data = {'classes': new_classes[i:(i + CHUNK_SIZE)]} response = _api.send_request(req_type='POST', path='/classes', params=params, json_req=data) if (not response.ok): raise SABaseException(response.status_code, ("Couldn't create classes " + response.text)) res += response.json() assert (len(res) == len(new_classes)) return res
2,071,405,053,133,175,600
Creates annotation classes in project from a SuperAnnotate format annotation classes.json. :param project: project name or metadata of the project :type project: str or dict :param classes_json: JSON itself or path to the JSON file :type classes_json: list or Pathlike (str or Path) :param from_s3_bucket: AWS S3 bucket to use. If None then classes_json is in local filesystem :type from_s3_bucket: str :return: list of created annotation class metadatas :rtype: list of dicts
superannotate/db/annotation_classes.py
create_annotation_classes_from_classes_json
yuki-inaho/superannotate-python-sdk
python
def create_annotation_classes_from_classes_json(project, classes_json, from_s3_bucket=None): 'Creates annotation classes in project from a SuperAnnotate format\n annotation classes.json.\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param classes_json: JSON itself or path to the JSON file\n :type classes_json: list or Pathlike (str or Path)\n :param from_s3_bucket: AWS S3 bucket to use. If None then classes_json is in local filesystem\n :type from_s3_bucket: str\n\n :return: list of created annotation class metadatas\n :rtype: list of dicts\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) (team_id, project_id) = (project['team_id'], project['id']) if (not isinstance(classes_json, list)): logger.info('Creating annotation classes in project %s from %s.', project['name'], classes_json) if (from_s3_bucket is None): classes = json.load(open(classes_json)) else: from_session = boto3.Session() from_s3 = from_session.resource('s3') file = io.BytesIO() from_s3_object = from_s3.Object(from_s3_bucket, classes_json) from_s3_object.download_fileobj(file) file.seek(0) classes = json.load(file) else: classes = classes_json existing_classes = search_annotation_classes(project) new_classes = [] for cs in classes: if (cs['name'] in existing_classes): logger.warning('Annotation class %s already in project. Skipping.', cs['name']) else: new_classes.append(cs) res = [] def del_unn(d): for s in ['updatedAt', 'createdAt', 'id', 'project_id', 'group_id', 'class_id', 'count']: if (s in d): del d[s] for annotation_class in new_classes: del_unn(annotation_class) for attribute_group in annotation_class['attribute_groups']: del_unn(attribute_group) for attribute in attribute_group['attributes']: del_unn(attribute) CHUNK_SIZE = 2000 for i in range(0, len(new_classes), CHUNK_SIZE): params = {'team_id': team_id, 'project_id': project_id} data = {'classes': new_classes[i:(i + CHUNK_SIZE)]} response = _api.send_request(req_type='POST', path='/classes', params=params, json_req=data) if (not response.ok): raise SABaseException(response.status_code, ("Couldn't create classes " + response.text)) res += response.json() assert (len(res) == len(new_classes)) return res
def search_annotation_classes(project, name_prefix=None, return_metadata=False): 'Searches annotation classes by name_prefix (case-insensitive)\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param name_prefix: name prefix for search. If None all annotation classes\n will be returned\n :type name_prefix: str\n\n :return: annotation classes of the project\n :rtype: list of dicts\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) result_list = [] (team_id, project_id) = (project['team_id'], project['id']) params = {'team_id': team_id, 'project_id': project_id, 'offset': 0} if (name_prefix is not None): params['name'] = name_prefix while True: response = _api.send_request(req_type='GET', path='/classes', params=params) if (not response.ok): raise SABaseException(response.status_code, ("Couldn't search classes " + response.text)) res = response.json() result_list += res['data'] new_len = len(result_list) if (res['count'] <= new_len): break params['offset'] = new_len if return_metadata: return result_list else: return [x['name'] for x in result_list]
1,149,609,391,371,542,300
Searches annotation classes by name_prefix (case-insensitive) :param project: project name or metadata of the project :type project: str or dict :param name_prefix: name prefix for search. If None all annotation classes will be returned :type name_prefix: str :return: annotation classes of the project :rtype: list of dicts
superannotate/db/annotation_classes.py
search_annotation_classes
yuki-inaho/superannotate-python-sdk
python
def search_annotation_classes(project, name_prefix=None, return_metadata=False): 'Searches annotation classes by name_prefix (case-insensitive)\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param name_prefix: name prefix for search. If None all annotation classes\n will be returned\n :type name_prefix: str\n\n :return: annotation classes of the project\n :rtype: list of dicts\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) result_list = [] (team_id, project_id) = (project['team_id'], project['id']) params = {'team_id': team_id, 'project_id': project_id, 'offset': 0} if (name_prefix is not None): params['name'] = name_prefix while True: response = _api.send_request(req_type='GET', path='/classes', params=params) if (not response.ok): raise SABaseException(response.status_code, ("Couldn't search classes " + response.text)) res = response.json() result_list += res['data'] new_len = len(result_list) if (res['count'] <= new_len): break params['offset'] = new_len if return_metadata: return result_list else: return [x['name'] for x in result_list]
def get_annotation_class_metadata(project, annotation_class_name): 'Returns annotation class metadata\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param annotation_class_name: annotation class name\n :type annotation_class_name: str\n\n :return: metadata of annotation class\n :rtype: dict\n ' annotation_classes = search_annotation_classes(project, annotation_class_name, return_metadata=True) results = [] for annotation_class in annotation_classes: if (annotation_class['name'] == annotation_class_name): results.append(annotation_class) if (len(results) > 1): raise SAExistingAnnotationClassNameException(0, (('Annotation class name ' + annotation_class_name) + ' is not unique. To use SDK please make annotation class names unique.')) elif (len(results) == 1): return results[0] else: raise SANonExistingAnnotationClassNameException(0, (('Annotation class with name ' + annotation_class_name) + " doesn't exist."))
-3,052,328,036,060,555,000
Returns annotation class metadata :param project: project name or metadata of the project :type project: str or dict :param annotation_class_name: annotation class name :type annotation_class_name: str :return: metadata of annotation class :rtype: dict
superannotate/db/annotation_classes.py
get_annotation_class_metadata
yuki-inaho/superannotate-python-sdk
python
def get_annotation_class_metadata(project, annotation_class_name): 'Returns annotation class metadata\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param annotation_class_name: annotation class name\n :type annotation_class_name: str\n\n :return: metadata of annotation class\n :rtype: dict\n ' annotation_classes = search_annotation_classes(project, annotation_class_name, return_metadata=True) results = [] for annotation_class in annotation_classes: if (annotation_class['name'] == annotation_class_name): results.append(annotation_class) if (len(results) > 1): raise SAExistingAnnotationClassNameException(0, (('Annotation class name ' + annotation_class_name) + ' is not unique. To use SDK please make annotation class names unique.')) elif (len(results) == 1): return results[0] else: raise SANonExistingAnnotationClassNameException(0, (('Annotation class with name ' + annotation_class_name) + " doesn't exist."))
def download_annotation_classes_json(project, folder): 'Downloads project classes.json to folder\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param folder: folder to download to\n :type folder: Pathlike (str or Path)\n\n :return: path of the download file\n :rtype: str\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) logger.info('Downloading classes.json from project %s to folder %s.', project['name'], folder) clss = search_annotation_classes(project, return_metadata=True) filepath = (Path(folder) / 'classes.json') json.dump(clss, open(filepath, 'w'), indent=4) return str(filepath)
1,210,275,893,233,714,000
Downloads project classes.json to folder :param project: project name or metadata of the project :type project: str or dict :param folder: folder to download to :type folder: Pathlike (str or Path) :return: path of the download file :rtype: str
superannotate/db/annotation_classes.py
download_annotation_classes_json
yuki-inaho/superannotate-python-sdk
python
def download_annotation_classes_json(project, folder): 'Downloads project classes.json to folder\n\n :param project: project name or metadata of the project\n :type project: str or dict\n :param folder: folder to download to\n :type folder: Pathlike (str or Path)\n\n :return: path of the download file\n :rtype: str\n ' if (not isinstance(project, dict)): project = get_project_metadata(project) logger.info('Downloading classes.json from project %s to folder %s.', project['name'], folder) clss = search_annotation_classes(project, return_metadata=True) filepath = (Path(folder) / 'classes.json') json.dump(clss, open(filepath, 'w'), indent=4) return str(filepath)
def __init__(self, img_path) -> None: '\n 读取图片\n ' self.img = cv2.imread(img_path)
-2,976,448,287,938,725,000
读取图片
Photo.py
__init__
Rougnt/ArkNightAutoClick
python
def __init__(self, img_path) -> None: '\n \n ' self.img = cv2.imread(img_path)
@staticmethod def calcFangcha(img): '\n 计算938:1035,1935:2247区域间图片的方差,用于比较图片见相似程度\n 计算过程,对图像每一行像素求平均,对所有行像素平均值求方差\n return (int)\n ' img_new = img[938:1013, 1935:2247] img_avg = np.mean(img_new, axis=(0, 2)) return np.var(img_avg)
-8,420,559,267,574,300,000
计算938:1035,1935:2247区域间图片的方差,用于比较图片见相似程度 计算过程,对图像每一行像素求平均,对所有行像素平均值求方差 return (int)
Photo.py
calcFangcha
Rougnt/ArkNightAutoClick
python
@staticmethod def calcFangcha(img): '\n 计算938:1035,1935:2247区域间图片的方差,用于比较图片见相似程度\n 计算过程,对图像每一行像素求平均,对所有行像素平均值求方差\n return (int)\n ' img_new = img[938:1013, 1935:2247] img_avg = np.mean(img_new, axis=(0, 2)) return np.var(img_avg)
def rebase_file(relative_path_in_file, file_path_resides_in): '\n In config files, you often put file paths that are meant to be relative\n to the location of that config file. This does that calculation.\n ' check.str_param(relative_path_in_file, 'relative_path_in_file') check.str_param(file_path_resides_in, 'file_path_resides_in') return os.path.join(os.path.dirname(os.path.abspath(file_path_resides_in)), relative_path_in_file)
2,218,992,647,814,451,200
In config files, you often put file paths that are meant to be relative to the location of that config file. This does that calculation.
python_modules/dagster/dagster/core/code_pointer.py
rebase_file
idjevm/dagster
python
def rebase_file(relative_path_in_file, file_path_resides_in): '\n In config files, you often put file paths that are meant to be relative\n to the location of that config file. This does that calculation.\n ' check.str_param(relative_path_in_file, 'relative_path_in_file') check.str_param(file_path_resides_in, 'file_path_resides_in') return os.path.join(os.path.dirname(os.path.abspath(file_path_resides_in)), relative_path_in_file)
def load_python_file(python_file, working_directory): '\n Takes a path to a python file and returns a loaded module\n ' check.str_param(python_file, 'python_file') module_name = os.path.splitext(os.path.basename(python_file))[0] cwd = sys.path[0] if working_directory: with alter_sys_path(to_add=[working_directory], to_remove=[cwd]): return import_module_from_path(module_name, python_file) error = None sys_modules = {k: v for (k, v) in sys.modules.items()} with alter_sys_path(to_add=[], to_remove=[cwd]): try: module = import_module_from_path(module_name, python_file) except ImportError as ie: to_delete = (set(sys.modules) - set(sys_modules)) for key in to_delete: del sys.modules[key] error = ie if (not error): return module try: module = import_module_from_path(module_name, python_file) warnings.warn('Module `{module}` was resolved using the working directory. The ability to implicitly load modules from the working directory is deprecated and will be removed in a future release. Please explicitly specify the `working_directory` config option in your workspace.yaml or install `{module}` to your python environment.'.format(module=(error.name if hasattr(error, 'name') else module_name))) return module except ImportError: raise error
4,991,760,834,715,504,000
Takes a path to a python file and returns a loaded module
python_modules/dagster/dagster/core/code_pointer.py
load_python_file
idjevm/dagster
python
def load_python_file(python_file, working_directory): '\n \n ' check.str_param(python_file, 'python_file') module_name = os.path.splitext(os.path.basename(python_file))[0] cwd = sys.path[0] if working_directory: with alter_sys_path(to_add=[working_directory], to_remove=[cwd]): return import_module_from_path(module_name, python_file) error = None sys_modules = {k: v for (k, v) in sys.modules.items()} with alter_sys_path(to_add=[], to_remove=[cwd]): try: module = import_module_from_path(module_name, python_file) except ImportError as ie: to_delete = (set(sys.modules) - set(sys_modules)) for key in to_delete: del sys.modules[key] error = ie if (not error): return module try: module = import_module_from_path(module_name, python_file) warnings.warn('Module `{module}` was resolved using the working directory. The ability to implicitly load modules from the working directory is deprecated and will be removed in a future release. Please explicitly specify the `working_directory` config option in your workspace.yaml or install `{module}` to your python environment.'.format(module=(error.name if hasattr(error, 'name') else module_name))) return module except ImportError: raise error
def get_python_file_from_previous_stack_frame(): 'inspect.stack() lets us introspect the call stack; inspect.stack()[1] is the previous\n stack frame.\n\n In Python < 3.5, this is just a tuple, of which the python file of the previous frame is the 1st\n element.\n\n In Python 3.5+, this is a FrameInfo namedtuple instance; the python file of the previous frame\n remains the 1st element.\n ' previous_stack_frame = inspect.stack(0)[2] if ((sys.version_info.major == 3) and (sys.version_info.minor >= 5)): check.inst(previous_stack_frame, inspect.FrameInfo) else: check.inst(previous_stack_frame, tuple) python_file = previous_stack_frame[1] return os.path.abspath(python_file)
-6,616,463,174,597,527,000
inspect.stack() lets us introspect the call stack; inspect.stack()[1] is the previous stack frame. In Python < 3.5, this is just a tuple, of which the python file of the previous frame is the 1st element. In Python 3.5+, this is a FrameInfo namedtuple instance; the python file of the previous frame remains the 1st element.
python_modules/dagster/dagster/core/code_pointer.py
get_python_file_from_previous_stack_frame
idjevm/dagster
python
def get_python_file_from_previous_stack_frame(): 'inspect.stack() lets us introspect the call stack; inspect.stack()[1] is the previous\n stack frame.\n\n In Python < 3.5, this is just a tuple, of which the python file of the previous frame is the 1st\n element.\n\n In Python 3.5+, this is a FrameInfo namedtuple instance; the python file of the previous frame\n remains the 1st element.\n ' previous_stack_frame = inspect.stack(0)[2] if ((sys.version_info.major == 3) and (sys.version_info.minor >= 5)): check.inst(previous_stack_frame, inspect.FrameInfo) else: check.inst(previous_stack_frame, tuple) python_file = previous_stack_frame[1] return os.path.abspath(python_file)
def tarjan(sequence): '\n Tarjan algorithm for finding Strongly Connected Components (SCCs) of a graph.\n\n Args:\n sequence (list):\n List of head indices.\n\n Yields:\n A list of indices making up a SCC. All self-loops are ignored.\n\n Examples:\n >>> next(tarjan([2, 5, 0, 3, 1])) # (1 -> 5 -> 2 -> 1) is a cycle\n [2, 5, 1]\n ' sequence = ([(- 1)] + sequence) dfn = ([(- 1)] * len(sequence)) low = ([(- 1)] * len(sequence)) (stack, onstack) = ([], ([False] * len(sequence))) def connect(i, timestep): dfn[i] = low[i] = timestep[0] timestep[0] += 1 stack.append(i) onstack[i] = True for (j, head) in enumerate(sequence): if (head != i): continue if (dfn[j] == (- 1)): (yield from connect(j, timestep)) low[i] = min(low[i], low[j]) elif onstack[j]: low[i] = min(low[i], dfn[j]) if (low[i] == dfn[i]): cycle = [stack.pop()] while (cycle[(- 1)] != i): onstack[cycle[(- 1)]] = False cycle.append(stack.pop()) onstack[i] = False if (len(cycle) > 1): (yield cycle) timestep = [0] for i in range(len(sequence)): if (dfn[i] == (- 1)): (yield from connect(i, timestep))
-2,976,157,928,269,709,300
Tarjan algorithm for finding Strongly Connected Components (SCCs) of a graph. Args: sequence (list): List of head indices. Yields: A list of indices making up a SCC. All self-loops are ignored. Examples: >>> next(tarjan([2, 5, 0, 3, 1])) # (1 -> 5 -> 2 -> 1) is a cycle [2, 5, 1]
supar/structs/fn.py
tarjan
zysite/parser
python
def tarjan(sequence): '\n Tarjan algorithm for finding Strongly Connected Components (SCCs) of a graph.\n\n Args:\n sequence (list):\n List of head indices.\n\n Yields:\n A list of indices making up a SCC. All self-loops are ignored.\n\n Examples:\n >>> next(tarjan([2, 5, 0, 3, 1])) # (1 -> 5 -> 2 -> 1) is a cycle\n [2, 5, 1]\n ' sequence = ([(- 1)] + sequence) dfn = ([(- 1)] * len(sequence)) low = ([(- 1)] * len(sequence)) (stack, onstack) = ([], ([False] * len(sequence))) def connect(i, timestep): dfn[i] = low[i] = timestep[0] timestep[0] += 1 stack.append(i) onstack[i] = True for (j, head) in enumerate(sequence): if (head != i): continue if (dfn[j] == (- 1)): (yield from connect(j, timestep)) low[i] = min(low[i], low[j]) elif onstack[j]: low[i] = min(low[i], dfn[j]) if (low[i] == dfn[i]): cycle = [stack.pop()] while (cycle[(- 1)] != i): onstack[cycle[(- 1)]] = False cycle.append(stack.pop()) onstack[i] = False if (len(cycle) > 1): (yield cycle) timestep = [0] for i in range(len(sequence)): if (dfn[i] == (- 1)): (yield from connect(i, timestep))
def chuliu_edmonds(s): "\n ChuLiu/Edmonds algorithm for non-projective decoding :cite:`mcdonald-etal-2005-non`.\n\n Some code is borrowed from `tdozat's implementation`_.\n Descriptions of notations and formulas can be found in :cite:`mcdonald-etal-2005-non`.\n\n Notes:\n The algorithm does not guarantee to parse a single-root tree.\n\n Args:\n s (~torch.Tensor): ``[seq_len, seq_len]``.\n Scores of all dependent-head pairs.\n\n Returns:\n ~torch.Tensor:\n A tensor with shape ``[seq_len]`` for the resulting non-projective parse tree.\n\n .. _tdozat's implementation:\n https://github.com/tdozat/Parser-v3\n " s[0, 1:] = MIN s.diagonal()[1:].fill_(MIN) tree = s.argmax((- 1)) cycle = next(tarjan(tree.tolist()[1:]), None) if (not cycle): return tree cycle = torch.tensor(cycle) noncycle = torch.ones(len(s)).index_fill_(0, cycle, 0) noncycle = torch.where(noncycle.gt(0))[0] def contract(s): cycle_heads = tree[cycle] s_cycle = s[(cycle, cycle_heads)] s_dep = s[noncycle][:, cycle] deps = s_dep.argmax(1) s_head = ((s[cycle][:, noncycle] - s_cycle.view((- 1), 1)) + s_cycle.sum()) heads = s_head.argmax(0) contracted = torch.cat((noncycle, torch.tensor([(- 1)]))) s = s[contracted][:, contracted] s[:(- 1), (- 1)] = s_dep[(range(len(deps)), deps)] s[(- 1), :(- 1)] = s_head[(heads, range(len(heads)))] return (s, heads, deps) (s, heads, deps) = contract(s) y = chuliu_edmonds(s) (y, cycle_head) = (y[:(- 1)], y[(- 1)]) subtree = (y < len(y)) tree[noncycle[subtree]] = noncycle[y[subtree]] subtree = (~ subtree) tree[noncycle[subtree]] = cycle[deps[subtree]] cycle_root = heads[cycle_head] tree[cycle[cycle_root]] = noncycle[cycle_head] return tree
-2,960,571,905,903,595,000
ChuLiu/Edmonds algorithm for non-projective decoding :cite:`mcdonald-etal-2005-non`. Some code is borrowed from `tdozat's implementation`_. Descriptions of notations and formulas can be found in :cite:`mcdonald-etal-2005-non`. Notes: The algorithm does not guarantee to parse a single-root tree. Args: s (~torch.Tensor): ``[seq_len, seq_len]``. Scores of all dependent-head pairs. Returns: ~torch.Tensor: A tensor with shape ``[seq_len]`` for the resulting non-projective parse tree. .. _tdozat's implementation: https://github.com/tdozat/Parser-v3
supar/structs/fn.py
chuliu_edmonds
zysite/parser
python
def chuliu_edmonds(s): "\n ChuLiu/Edmonds algorithm for non-projective decoding :cite:`mcdonald-etal-2005-non`.\n\n Some code is borrowed from `tdozat's implementation`_.\n Descriptions of notations and formulas can be found in :cite:`mcdonald-etal-2005-non`.\n\n Notes:\n The algorithm does not guarantee to parse a single-root tree.\n\n Args:\n s (~torch.Tensor): ``[seq_len, seq_len]``.\n Scores of all dependent-head pairs.\n\n Returns:\n ~torch.Tensor:\n A tensor with shape ``[seq_len]`` for the resulting non-projective parse tree.\n\n .. _tdozat's implementation:\n https://github.com/tdozat/Parser-v3\n " s[0, 1:] = MIN s.diagonal()[1:].fill_(MIN) tree = s.argmax((- 1)) cycle = next(tarjan(tree.tolist()[1:]), None) if (not cycle): return tree cycle = torch.tensor(cycle) noncycle = torch.ones(len(s)).index_fill_(0, cycle, 0) noncycle = torch.where(noncycle.gt(0))[0] def contract(s): cycle_heads = tree[cycle] s_cycle = s[(cycle, cycle_heads)] s_dep = s[noncycle][:, cycle] deps = s_dep.argmax(1) s_head = ((s[cycle][:, noncycle] - s_cycle.view((- 1), 1)) + s_cycle.sum()) heads = s_head.argmax(0) contracted = torch.cat((noncycle, torch.tensor([(- 1)]))) s = s[contracted][:, contracted] s[:(- 1), (- 1)] = s_dep[(range(len(deps)), deps)] s[(- 1), :(- 1)] = s_head[(heads, range(len(heads)))] return (s, heads, deps) (s, heads, deps) = contract(s) y = chuliu_edmonds(s) (y, cycle_head) = (y[:(- 1)], y[(- 1)]) subtree = (y < len(y)) tree[noncycle[subtree]] = noncycle[y[subtree]] subtree = (~ subtree) tree[noncycle[subtree]] = cycle[deps[subtree]] cycle_root = heads[cycle_head] tree[cycle[cycle_root]] = noncycle[cycle_head] return tree
def mst(scores, mask, multiroot=False): '\n MST algorithm for decoding non-projective trees.\n This is a wrapper for ChuLiu/Edmonds algorithm.\n\n The algorithm first runs ChuLiu/Edmonds to parse a tree and then have a check of multi-roots,\n If ``multiroot=True`` and there indeed exist multi-roots, the algorithm seeks to find\n best single-root trees by iterating all possible single-root trees parsed by ChuLiu/Edmonds.\n Otherwise the resulting trees are directly taken as the final outputs.\n\n Args:\n scores (~torch.Tensor): ``[batch_size, seq_len, seq_len]``.\n Scores of all dependent-head pairs.\n mask (~torch.BoolTensor): ``[batch_size, seq_len]``.\n The mask to avoid parsing over padding tokens.\n The first column serving as pseudo words for roots should be ``False``.\n multiroot (bool):\n Ensures to parse a single-root tree If ``False``.\n\n Returns:\n ~torch.Tensor:\n A tensor with shape ``[batch_size, seq_len]`` for the resulting non-projective parse trees.\n\n Examples:\n >>> scores = torch.tensor([[[-11.9436, -13.1464, -6.4789, -13.8917],\n [-60.6957, -60.2866, -48.6457, -63.8125],\n [-38.1747, -49.9296, -45.2733, -49.5571],\n [-19.7504, -23.9066, -9.9139, -16.2088]]])\n >>> scores[:, 0, 1:] = MIN\n >>> scores.diagonal(0, 1, 2)[1:].fill_(MIN)\n >>> mask = torch.tensor([[False, True, True, True]])\n >>> mst(scores, mask)\n tensor([[0, 2, 0, 2]])\n ' (batch_size, seq_len, _) = scores.shape scores = scores.cpu().unbind() preds = [] for (i, length) in enumerate(mask.sum(1).tolist()): s = scores[i][:(length + 1), :(length + 1)] tree = chuliu_edmonds(s) roots = (torch.where(tree[1:].eq(0))[0] + 1) if ((not multiroot) and (len(roots) > 1)): s_root = s[:, 0] s_best = MIN s = s.index_fill(1, torch.tensor(0), MIN) for root in roots: s[:, 0] = MIN s[(root, 0)] = s_root[root] t = chuliu_edmonds(s) s_tree = s[1:].gather(1, t[1:].unsqueeze((- 1))).sum() if (s_tree > s_best): (s_best, tree) = (s_tree, t) preds.append(tree) return pad(preds, total_length=seq_len).to(mask.device)
1,458,930,332,535,327,000
MST algorithm for decoding non-projective trees. This is a wrapper for ChuLiu/Edmonds algorithm. The algorithm first runs ChuLiu/Edmonds to parse a tree and then have a check of multi-roots, If ``multiroot=True`` and there indeed exist multi-roots, the algorithm seeks to find best single-root trees by iterating all possible single-root trees parsed by ChuLiu/Edmonds. Otherwise the resulting trees are directly taken as the final outputs. Args: scores (~torch.Tensor): ``[batch_size, seq_len, seq_len]``. Scores of all dependent-head pairs. mask (~torch.BoolTensor): ``[batch_size, seq_len]``. The mask to avoid parsing over padding tokens. The first column serving as pseudo words for roots should be ``False``. multiroot (bool): Ensures to parse a single-root tree If ``False``. Returns: ~torch.Tensor: A tensor with shape ``[batch_size, seq_len]`` for the resulting non-projective parse trees. Examples: >>> scores = torch.tensor([[[-11.9436, -13.1464, -6.4789, -13.8917], [-60.6957, -60.2866, -48.6457, -63.8125], [-38.1747, -49.9296, -45.2733, -49.5571], [-19.7504, -23.9066, -9.9139, -16.2088]]]) >>> scores[:, 0, 1:] = MIN >>> scores.diagonal(0, 1, 2)[1:].fill_(MIN) >>> mask = torch.tensor([[False, True, True, True]]) >>> mst(scores, mask) tensor([[0, 2, 0, 2]])
supar/structs/fn.py
mst
zysite/parser
python
def mst(scores, mask, multiroot=False): '\n MST algorithm for decoding non-projective trees.\n This is a wrapper for ChuLiu/Edmonds algorithm.\n\n The algorithm first runs ChuLiu/Edmonds to parse a tree and then have a check of multi-roots,\n If ``multiroot=True`` and there indeed exist multi-roots, the algorithm seeks to find\n best single-root trees by iterating all possible single-root trees parsed by ChuLiu/Edmonds.\n Otherwise the resulting trees are directly taken as the final outputs.\n\n Args:\n scores (~torch.Tensor): ``[batch_size, seq_len, seq_len]``.\n Scores of all dependent-head pairs.\n mask (~torch.BoolTensor): ``[batch_size, seq_len]``.\n The mask to avoid parsing over padding tokens.\n The first column serving as pseudo words for roots should be ``False``.\n multiroot (bool):\n Ensures to parse a single-root tree If ``False``.\n\n Returns:\n ~torch.Tensor:\n A tensor with shape ``[batch_size, seq_len]`` for the resulting non-projective parse trees.\n\n Examples:\n >>> scores = torch.tensor([[[-11.9436, -13.1464, -6.4789, -13.8917],\n [-60.6957, -60.2866, -48.6457, -63.8125],\n [-38.1747, -49.9296, -45.2733, -49.5571],\n [-19.7504, -23.9066, -9.9139, -16.2088]]])\n >>> scores[:, 0, 1:] = MIN\n >>> scores.diagonal(0, 1, 2)[1:].fill_(MIN)\n >>> mask = torch.tensor([[False, True, True, True]])\n >>> mst(scores, mask)\n tensor([[0, 2, 0, 2]])\n ' (batch_size, seq_len, _) = scores.shape scores = scores.cpu().unbind() preds = [] for (i, length) in enumerate(mask.sum(1).tolist()): s = scores[i][:(length + 1), :(length + 1)] tree = chuliu_edmonds(s) roots = (torch.where(tree[1:].eq(0))[0] + 1) if ((not multiroot) and (len(roots) > 1)): s_root = s[:, 0] s_best = MIN s = s.index_fill(1, torch.tensor(0), MIN) for root in roots: s[:, 0] = MIN s[(root, 0)] = s_root[root] t = chuliu_edmonds(s) s_tree = s[1:].gather(1, t[1:].unsqueeze((- 1))).sum() if (s_tree > s_best): (s_best, tree) = (s_tree, t) preds.append(tree) return pad(preds, total_length=seq_len).to(mask.device)
def make_mainlib_replicates_train_test_split(mainlib_mol_list, replicates_mol_list, splitting_type, mainlib_fractions, replicates_fractions, mainlib_maximum_num_molecules_to_use=None, replicates_maximum_num_molecules_to_use=None, rseed=42): "Makes train/validation/test inchikey lists from two lists of rdkit.Mol.\n\n Args:\n mainlib_mol_list : list of molecules from main library\n replicates_mol_list : list of molecules from replicates library\n splitting_type : type of splitting to use for validation splits.\n mainlib_fractions : TrainValTestFractions namedtuple\n holding desired fractions for train/val/test split of mainlib\n replicates_fractions : TrainValTestFractions namedtuple\n holding desired fractions for train/val/test split of replicates.\n For the replicates set, the train fraction should be set to 0.\n mainlib_maximum_num_molecules_to_use : Largest number of molecules to use\n when making datasets from mainlib\n replicates_maximum_num_molecules_to_use : Largest number of molecules to use\n when making datasets from replicates\n rseed : random seed for shuffling\n\n Returns:\n main_inchikey_dict : Dict that is keyed by inchikey, containing a list of\n rdkit.Mol objects corresponding to that inchikey from the mainlib\n replicates_inchikey_dict : Dict that is keyed by inchikey, containing a list\n of rdkit.Mol objects corresponding to that inchikey from the replicates\n library\n main_replicates_split_inchikey_lists_dict : dict with keys :\n 'mainlib_train', 'mainlib_validation', 'mainlib_test',\n 'replicates_train', 'replicates_validation', 'replicates_test'\n Values are lists of inchikeys corresponding to each dataset.\n\n " random.seed(rseed) main_inchikey_dict = train_test_split_utils.make_inchikey_dict(mainlib_mol_list) main_inchikey_list = main_inchikey_dict.keys() if six.PY3: main_inchikey_list = list(main_inchikey_list) if (mainlib_maximum_num_molecules_to_use is not None): main_inchikey_list = random.sample(main_inchikey_list, mainlib_maximum_num_molecules_to_use) replicates_inchikey_dict = train_test_split_utils.make_inchikey_dict(replicates_mol_list) replicates_inchikey_list = replicates_inchikey_dict.keys() if six.PY3: replicates_inchikey_list = list(replicates_inchikey_list) if (replicates_maximum_num_molecules_to_use is not None): replicates_inchikey_list = random.sample(replicates_inchikey_list, replicates_maximum_num_molecules_to_use) main_train_validation_test_inchikeys = train_test_split_utils.make_train_val_test_split_inchikey_lists(main_inchikey_list, main_inchikey_dict, mainlib_fractions, holdout_inchikey_list=replicates_inchikey_list, splitting_type=splitting_type) replicates_validation_test_inchikeys = train_test_split_utils.make_train_val_test_split_inchikey_lists(replicates_inchikey_list, replicates_inchikey_dict, replicates_fractions, splitting_type=splitting_type) component_inchikey_dict = {ds_constants.MAINLIB_TRAIN_BASENAME: main_train_validation_test_inchikeys.train, ds_constants.MAINLIB_VALIDATION_BASENAME: main_train_validation_test_inchikeys.validation, ds_constants.MAINLIB_TEST_BASENAME: main_train_validation_test_inchikeys.test, ds_constants.REPLICATES_TRAIN_BASENAME: replicates_validation_test_inchikeys.train, ds_constants.REPLICATES_VALIDATION_BASENAME: replicates_validation_test_inchikeys.validation, ds_constants.REPLICATES_TEST_BASENAME: replicates_validation_test_inchikeys.test} train_test_split_utils.assert_all_lists_mutally_exclusive(list(component_inchikey_dict.values())) all_inchikeys_in_components = [] for ikey_list in list(component_inchikey_dict.values()): for ikey in ikey_list: all_inchikeys_in_components.append(ikey) assert (set((main_inchikey_list + replicates_inchikey_list)) == set(all_inchikeys_in_components)), 'The inchikeys in the original inchikey dictionary are not all included in the train/val/test component libraries' return (main_inchikey_dict, replicates_inchikey_dict, component_inchikey_dict)
-8,021,216,745,129,914,000
Makes train/validation/test inchikey lists from two lists of rdkit.Mol. Args: mainlib_mol_list : list of molecules from main library replicates_mol_list : list of molecules from replicates library splitting_type : type of splitting to use for validation splits. mainlib_fractions : TrainValTestFractions namedtuple holding desired fractions for train/val/test split of mainlib replicates_fractions : TrainValTestFractions namedtuple holding desired fractions for train/val/test split of replicates. For the replicates set, the train fraction should be set to 0. mainlib_maximum_num_molecules_to_use : Largest number of molecules to use when making datasets from mainlib replicates_maximum_num_molecules_to_use : Largest number of molecules to use when making datasets from replicates rseed : random seed for shuffling Returns: main_inchikey_dict : Dict that is keyed by inchikey, containing a list of rdkit.Mol objects corresponding to that inchikey from the mainlib replicates_inchikey_dict : Dict that is keyed by inchikey, containing a list of rdkit.Mol objects corresponding to that inchikey from the replicates library main_replicates_split_inchikey_lists_dict : dict with keys : 'mainlib_train', 'mainlib_validation', 'mainlib_test', 'replicates_train', 'replicates_validation', 'replicates_test' Values are lists of inchikeys corresponding to each dataset.
make_train_test_split.py
make_mainlib_replicates_train_test_split
berlinguyinca/deep-molecular-massspec
python
def make_mainlib_replicates_train_test_split(mainlib_mol_list, replicates_mol_list, splitting_type, mainlib_fractions, replicates_fractions, mainlib_maximum_num_molecules_to_use=None, replicates_maximum_num_molecules_to_use=None, rseed=42): "Makes train/validation/test inchikey lists from two lists of rdkit.Mol.\n\n Args:\n mainlib_mol_list : list of molecules from main library\n replicates_mol_list : list of molecules from replicates library\n splitting_type : type of splitting to use for validation splits.\n mainlib_fractions : TrainValTestFractions namedtuple\n holding desired fractions for train/val/test split of mainlib\n replicates_fractions : TrainValTestFractions namedtuple\n holding desired fractions for train/val/test split of replicates.\n For the replicates set, the train fraction should be set to 0.\n mainlib_maximum_num_molecules_to_use : Largest number of molecules to use\n when making datasets from mainlib\n replicates_maximum_num_molecules_to_use : Largest number of molecules to use\n when making datasets from replicates\n rseed : random seed for shuffling\n\n Returns:\n main_inchikey_dict : Dict that is keyed by inchikey, containing a list of\n rdkit.Mol objects corresponding to that inchikey from the mainlib\n replicates_inchikey_dict : Dict that is keyed by inchikey, containing a list\n of rdkit.Mol objects corresponding to that inchikey from the replicates\n library\n main_replicates_split_inchikey_lists_dict : dict with keys :\n 'mainlib_train', 'mainlib_validation', 'mainlib_test',\n 'replicates_train', 'replicates_validation', 'replicates_test'\n Values are lists of inchikeys corresponding to each dataset.\n\n " random.seed(rseed) main_inchikey_dict = train_test_split_utils.make_inchikey_dict(mainlib_mol_list) main_inchikey_list = main_inchikey_dict.keys() if six.PY3: main_inchikey_list = list(main_inchikey_list) if (mainlib_maximum_num_molecules_to_use is not None): main_inchikey_list = random.sample(main_inchikey_list, mainlib_maximum_num_molecules_to_use) replicates_inchikey_dict = train_test_split_utils.make_inchikey_dict(replicates_mol_list) replicates_inchikey_list = replicates_inchikey_dict.keys() if six.PY3: replicates_inchikey_list = list(replicates_inchikey_list) if (replicates_maximum_num_molecules_to_use is not None): replicates_inchikey_list = random.sample(replicates_inchikey_list, replicates_maximum_num_molecules_to_use) main_train_validation_test_inchikeys = train_test_split_utils.make_train_val_test_split_inchikey_lists(main_inchikey_list, main_inchikey_dict, mainlib_fractions, holdout_inchikey_list=replicates_inchikey_list, splitting_type=splitting_type) replicates_validation_test_inchikeys = train_test_split_utils.make_train_val_test_split_inchikey_lists(replicates_inchikey_list, replicates_inchikey_dict, replicates_fractions, splitting_type=splitting_type) component_inchikey_dict = {ds_constants.MAINLIB_TRAIN_BASENAME: main_train_validation_test_inchikeys.train, ds_constants.MAINLIB_VALIDATION_BASENAME: main_train_validation_test_inchikeys.validation, ds_constants.MAINLIB_TEST_BASENAME: main_train_validation_test_inchikeys.test, ds_constants.REPLICATES_TRAIN_BASENAME: replicates_validation_test_inchikeys.train, ds_constants.REPLICATES_VALIDATION_BASENAME: replicates_validation_test_inchikeys.validation, ds_constants.REPLICATES_TEST_BASENAME: replicates_validation_test_inchikeys.test} train_test_split_utils.assert_all_lists_mutally_exclusive(list(component_inchikey_dict.values())) all_inchikeys_in_components = [] for ikey_list in list(component_inchikey_dict.values()): for ikey in ikey_list: all_inchikeys_in_components.append(ikey) assert (set((main_inchikey_list + replicates_inchikey_list)) == set(all_inchikeys_in_components)), 'The inchikeys in the original inchikey dictionary are not all included in the train/val/test component libraries' return (main_inchikey_dict, replicates_inchikey_dict, component_inchikey_dict)
def write_list_of_inchikeys(inchikey_list, base_name, output_dir): 'Write list of inchikeys as a text file.' inchikey_list_name = (base_name + INCHIKEY_FILENAME_END) with tf.gfile.Open(os.path.join(output_dir, inchikey_list_name), 'w') as writer: for inchikey in inchikey_list: writer.write(('%s\n' % inchikey))
-5,325,643,015,259,984,000
Write list of inchikeys as a text file.
make_train_test_split.py
write_list_of_inchikeys
berlinguyinca/deep-molecular-massspec
python
def write_list_of_inchikeys(inchikey_list, base_name, output_dir): inchikey_list_name = (base_name + INCHIKEY_FILENAME_END) with tf.gfile.Open(os.path.join(output_dir, inchikey_list_name), 'w') as writer: for inchikey in inchikey_list: writer.write(('%s\n' % inchikey))
def write_all_dataset_files(inchikey_dict, inchikey_list, base_name, output_dir, max_atoms, max_mass_spec_peak_loc, make_library_array=False): 'Helper function for writing all the files associated with a TFRecord.\n\n Args:\n inchikey_dict : Full dictionary keyed by inchikey containing lists of\n rdkit.Mol objects\n inchikey_list : List of inchikeys to include in dataset\n base_name : Base name for the dataset\n output_dir : Path for saving all TFRecord files\n max_atoms : Maximum number of atoms to include for a given molecule\n max_mass_spec_peak_loc : Largest m/z peak to include in a spectra.\n make_library_array : Flag for whether to make library array\n Returns:\n Saves 3 files:\n basename.tfrecord : a TFRecord file,\n basename.inchikey.txt : a text file with all the inchikeys in the dataset\n basename.tfrecord.info: a text file with one line describing\n the length of the TFRecord file.\n Also saves if make_library_array is set:\n basename.npz : see parse_sdf_utils.write_dicts_to_example\n ' record_name = (base_name + TFRECORD_FILENAME_END) mol_list = train_test_split_utils.make_mol_list_from_inchikey_dict(inchikey_dict, inchikey_list) if make_library_array: library_array_pathname = (base_name + NP_LIBRARY_ARRAY_END) parse_sdf_utils.write_dicts_to_example(mol_list, os.path.join(output_dir, record_name), max_atoms, max_mass_spec_peak_loc, os.path.join(output_dir, library_array_pathname)) else: parse_sdf_utils.write_dicts_to_example(mol_list, os.path.join(output_dir, record_name), max_atoms, max_mass_spec_peak_loc) write_list_of_inchikeys(inchikey_list, base_name, output_dir) parse_sdf_utils.write_info_file(mol_list, os.path.join(output_dir, record_name))
-7,087,730,737,864,763,000
Helper function for writing all the files associated with a TFRecord. Args: inchikey_dict : Full dictionary keyed by inchikey containing lists of rdkit.Mol objects inchikey_list : List of inchikeys to include in dataset base_name : Base name for the dataset output_dir : Path for saving all TFRecord files max_atoms : Maximum number of atoms to include for a given molecule max_mass_spec_peak_loc : Largest m/z peak to include in a spectra. make_library_array : Flag for whether to make library array Returns: Saves 3 files: basename.tfrecord : a TFRecord file, basename.inchikey.txt : a text file with all the inchikeys in the dataset basename.tfrecord.info: a text file with one line describing the length of the TFRecord file. Also saves if make_library_array is set: basename.npz : see parse_sdf_utils.write_dicts_to_example
make_train_test_split.py
write_all_dataset_files
berlinguyinca/deep-molecular-massspec
python
def write_all_dataset_files(inchikey_dict, inchikey_list, base_name, output_dir, max_atoms, max_mass_spec_peak_loc, make_library_array=False): 'Helper function for writing all the files associated with a TFRecord.\n\n Args:\n inchikey_dict : Full dictionary keyed by inchikey containing lists of\n rdkit.Mol objects\n inchikey_list : List of inchikeys to include in dataset\n base_name : Base name for the dataset\n output_dir : Path for saving all TFRecord files\n max_atoms : Maximum number of atoms to include for a given molecule\n max_mass_spec_peak_loc : Largest m/z peak to include in a spectra.\n make_library_array : Flag for whether to make library array\n Returns:\n Saves 3 files:\n basename.tfrecord : a TFRecord file,\n basename.inchikey.txt : a text file with all the inchikeys in the dataset\n basename.tfrecord.info: a text file with one line describing\n the length of the TFRecord file.\n Also saves if make_library_array is set:\n basename.npz : see parse_sdf_utils.write_dicts_to_example\n ' record_name = (base_name + TFRECORD_FILENAME_END) mol_list = train_test_split_utils.make_mol_list_from_inchikey_dict(inchikey_dict, inchikey_list) if make_library_array: library_array_pathname = (base_name + NP_LIBRARY_ARRAY_END) parse_sdf_utils.write_dicts_to_example(mol_list, os.path.join(output_dir, record_name), max_atoms, max_mass_spec_peak_loc, os.path.join(output_dir, library_array_pathname)) else: parse_sdf_utils.write_dicts_to_example(mol_list, os.path.join(output_dir, record_name), max_atoms, max_mass_spec_peak_loc) write_list_of_inchikeys(inchikey_list, base_name, output_dir) parse_sdf_utils.write_info_file(mol_list, os.path.join(output_dir, record_name))
def write_mainlib_split_datasets(component_inchikey_dict, mainlib_inchikey_dict, output_dir, max_atoms, max_mass_spec_peak_loc): 'Write all train/val/test set TFRecords from main NIST sdf file.' for component_kwarg in component_inchikey_dict.keys(): component_mainlib_filename = (component_kwarg + FROM_MAINLIB_FILENAME_MODIFIER) if (component_kwarg == ds_constants.MAINLIB_TRAIN_BASENAME): write_all_dataset_files(mainlib_inchikey_dict, component_inchikey_dict[component_kwarg], component_mainlib_filename, output_dir, max_atoms, max_mass_spec_peak_loc, make_library_array=True) else: write_all_dataset_files(mainlib_inchikey_dict, component_inchikey_dict[component_kwarg], component_mainlib_filename, output_dir, max_atoms, max_mass_spec_peak_loc)
-3,808,761,830,725,798,000
Write all train/val/test set TFRecords from main NIST sdf file.
make_train_test_split.py
write_mainlib_split_datasets
berlinguyinca/deep-molecular-massspec
python
def write_mainlib_split_datasets(component_inchikey_dict, mainlib_inchikey_dict, output_dir, max_atoms, max_mass_spec_peak_loc): for component_kwarg in component_inchikey_dict.keys(): component_mainlib_filename = (component_kwarg + FROM_MAINLIB_FILENAME_MODIFIER) if (component_kwarg == ds_constants.MAINLIB_TRAIN_BASENAME): write_all_dataset_files(mainlib_inchikey_dict, component_inchikey_dict[component_kwarg], component_mainlib_filename, output_dir, max_atoms, max_mass_spec_peak_loc, make_library_array=True) else: write_all_dataset_files(mainlib_inchikey_dict, component_inchikey_dict[component_kwarg], component_mainlib_filename, output_dir, max_atoms, max_mass_spec_peak_loc)
def write_replicates_split_datasets(component_inchikey_dict, replicates_inchikey_dict, output_dir, max_atoms, max_mass_spec_peak_loc): 'Write replicates val/test set TFRecords from replicates sdf file.' for component_kwarg in [ds_constants.REPLICATES_VALIDATION_BASENAME, ds_constants.REPLICATES_TEST_BASENAME]: component_replicates_filename = (component_kwarg + FROM_REPLICATES_FILENAME_MODIFIER) write_all_dataset_files(replicates_inchikey_dict, component_inchikey_dict[component_kwarg], component_replicates_filename, output_dir, max_atoms, max_mass_spec_peak_loc)
-3,912,315,331,998,454,300
Write replicates val/test set TFRecords from replicates sdf file.
make_train_test_split.py
write_replicates_split_datasets
berlinguyinca/deep-molecular-massspec
python
def write_replicates_split_datasets(component_inchikey_dict, replicates_inchikey_dict, output_dir, max_atoms, max_mass_spec_peak_loc): for component_kwarg in [ds_constants.REPLICATES_VALIDATION_BASENAME, ds_constants.REPLICATES_TEST_BASENAME]: component_replicates_filename = (component_kwarg + FROM_REPLICATES_FILENAME_MODIFIER) write_all_dataset_files(replicates_inchikey_dict, component_inchikey_dict[component_kwarg], component_replicates_filename, output_dir, max_atoms, max_mass_spec_peak_loc)
def combine_inchikey_sets(dataset_subdivision_list, dataset_split_dict): 'A function to combine lists of inchikeys that are values from a dict.\n\n Args:\n dataset_subdivision_list: List of keys in dataset_split_dict to combine\n into one list\n dataset_split_dict: dict containing keys in dataset_subdivision_list, with\n lists of inchikeys as values.\n Returns:\n A list of inchikeys.\n ' dataset_inchikey_list = [] for dataset_subdivision_name in dataset_subdivision_list: dataset_inchikey_list.extend(dataset_split_dict[dataset_subdivision_name]) return dataset_inchikey_list
2,988,018,329,595,237,000
A function to combine lists of inchikeys that are values from a dict. Args: dataset_subdivision_list: List of keys in dataset_split_dict to combine into one list dataset_split_dict: dict containing keys in dataset_subdivision_list, with lists of inchikeys as values. Returns: A list of inchikeys.
make_train_test_split.py
combine_inchikey_sets
berlinguyinca/deep-molecular-massspec
python
def combine_inchikey_sets(dataset_subdivision_list, dataset_split_dict): 'A function to combine lists of inchikeys that are values from a dict.\n\n Args:\n dataset_subdivision_list: List of keys in dataset_split_dict to combine\n into one list\n dataset_split_dict: dict containing keys in dataset_subdivision_list, with\n lists of inchikeys as values.\n Returns:\n A list of inchikeys.\n ' dataset_inchikey_list = [] for dataset_subdivision_name in dataset_subdivision_list: dataset_inchikey_list.extend(dataset_split_dict[dataset_subdivision_name]) return dataset_inchikey_list
def check_experiment_setup(experiment_setup_dict, component_inchikey_dict): 'Validates experiment setup for given lists of inchikeys.' all_inchikeys_in_library = (combine_inchikey_sets(experiment_setup_dict[ds_constants.LIBRARY_MATCHING_OBSERVED_KEY], component_inchikey_dict) + combine_inchikey_sets(experiment_setup_dict[ds_constants.LIBRARY_MATCHING_PREDICTED_KEY], component_inchikey_dict)) all_inchikeys_in_use = [] for kwarg in component_inchikey_dict.keys(): all_inchikeys_in_use.extend(component_inchikey_dict[kwarg]) assert (set(all_inchikeys_in_use) == set(all_inchikeys_in_library)), 'Inchikeys in library for library matching does not match full dataset.' assert set(combine_inchikey_sets(experiment_setup_dict[ds_constants.LIBRARY_MATCHING_QUERY_KEY], component_inchikey_dict)).issubset(set(all_inchikeys_in_library)), 'Inchikeys in query set for library matching notfound in library.'
-3,406,558,791,919,901,000
Validates experiment setup for given lists of inchikeys.
make_train_test_split.py
check_experiment_setup
berlinguyinca/deep-molecular-massspec
python
def check_experiment_setup(experiment_setup_dict, component_inchikey_dict): all_inchikeys_in_library = (combine_inchikey_sets(experiment_setup_dict[ds_constants.LIBRARY_MATCHING_OBSERVED_KEY], component_inchikey_dict) + combine_inchikey_sets(experiment_setup_dict[ds_constants.LIBRARY_MATCHING_PREDICTED_KEY], component_inchikey_dict)) all_inchikeys_in_use = [] for kwarg in component_inchikey_dict.keys(): all_inchikeys_in_use.extend(component_inchikey_dict[kwarg]) assert (set(all_inchikeys_in_use) == set(all_inchikeys_in_library)), 'Inchikeys in library for library matching does not match full dataset.' assert set(combine_inchikey_sets(experiment_setup_dict[ds_constants.LIBRARY_MATCHING_QUERY_KEY], component_inchikey_dict)).issubset(set(all_inchikeys_in_library)), 'Inchikeys in query set for library matching notfound in library.'
def write_json_for_experiment(experiment_setup, output_dir): 'Writes json for experiment, recording relevant files for each component.\n\n Writes a json containing a list of TFRecord file names to read\n for each experiment component, i.e. spectrum_prediction, library_matching.\n\n Args:\n experiment_setup: A dataset_setup_constants.ExperimentSetup tuple\n output_dir: directory to write json\n Returns:\n Writes json recording which files to load for each component\n of the experiment\n Raises:\n ValueError: if the experiment component is not specified to be taken from\n either the main NIST library or the replicates library.\n\n ' experiment_json_dict = {} for dataset_kwarg in experiment_setup.experiment_setup_dataset_dict: if (dataset_kwarg in experiment_setup.data_to_get_from_mainlib): experiment_json_dict[dataset_kwarg] = [((component_basename + FROM_MAINLIB_FILENAME_MODIFIER) + TFRECORD_FILENAME_END) for component_basename in experiment_setup.experiment_setup_dataset_dict[dataset_kwarg]] elif (dataset_kwarg in experiment_setup.data_to_get_from_replicates): experiment_json_dict[dataset_kwarg] = [((component_basename + FROM_REPLICATES_FILENAME_MODIFIER) + TFRECORD_FILENAME_END) for component_basename in experiment_setup.experiment_setup_dataset_dict[dataset_kwarg]] else: raise ValueError('Did not specify origin for {}.'.format(dataset_kwarg)) training_spectra_filename = ((ds_constants.MAINLIB_TRAIN_BASENAME + FROM_MAINLIB_FILENAME_MODIFIER) + NP_LIBRARY_ARRAY_END) experiment_json_dict[ds_constants.TRAINING_SPECTRA_ARRAY_KEY] = training_spectra_filename with tf.gfile.Open(os.path.join(output_dir, experiment_setup.json_name), 'w') as writer: experiment_json = json.dumps(experiment_json_dict) writer.write(experiment_json)
8,856,359,304,804,744,000
Writes json for experiment, recording relevant files for each component. Writes a json containing a list of TFRecord file names to read for each experiment component, i.e. spectrum_prediction, library_matching. Args: experiment_setup: A dataset_setup_constants.ExperimentSetup tuple output_dir: directory to write json Returns: Writes json recording which files to load for each component of the experiment Raises: ValueError: if the experiment component is not specified to be taken from either the main NIST library or the replicates library.
make_train_test_split.py
write_json_for_experiment
berlinguyinca/deep-molecular-massspec
python
def write_json_for_experiment(experiment_setup, output_dir): 'Writes json for experiment, recording relevant files for each component.\n\n Writes a json containing a list of TFRecord file names to read\n for each experiment component, i.e. spectrum_prediction, library_matching.\n\n Args:\n experiment_setup: A dataset_setup_constants.ExperimentSetup tuple\n output_dir: directory to write json\n Returns:\n Writes json recording which files to load for each component\n of the experiment\n Raises:\n ValueError: if the experiment component is not specified to be taken from\n either the main NIST library or the replicates library.\n\n ' experiment_json_dict = {} for dataset_kwarg in experiment_setup.experiment_setup_dataset_dict: if (dataset_kwarg in experiment_setup.data_to_get_from_mainlib): experiment_json_dict[dataset_kwarg] = [((component_basename + FROM_MAINLIB_FILENAME_MODIFIER) + TFRECORD_FILENAME_END) for component_basename in experiment_setup.experiment_setup_dataset_dict[dataset_kwarg]] elif (dataset_kwarg in experiment_setup.data_to_get_from_replicates): experiment_json_dict[dataset_kwarg] = [((component_basename + FROM_REPLICATES_FILENAME_MODIFIER) + TFRECORD_FILENAME_END) for component_basename in experiment_setup.experiment_setup_dataset_dict[dataset_kwarg]] else: raise ValueError('Did not specify origin for {}.'.format(dataset_kwarg)) training_spectra_filename = ((ds_constants.MAINLIB_TRAIN_BASENAME + FROM_MAINLIB_FILENAME_MODIFIER) + NP_LIBRARY_ARRAY_END) experiment_json_dict[ds_constants.TRAINING_SPECTRA_ARRAY_KEY] = training_spectra_filename with tf.gfile.Open(os.path.join(output_dir, experiment_setup.json_name), 'w') as writer: experiment_json = json.dumps(experiment_json_dict) writer.write(experiment_json)
def combinationSum(self, candidates, target): '\n :type candidates: List[int]\n :type target: int\n :rtype: List[List[int]]\n ' def recurhelper(nums, res, path, target, start): if (target == 0): res.append(path) return if (target < 0): return if (target > 0): for i in xrange(start, len(nums)): if (nums[i] <= target): recurhelper(nums, res, (path + [nums[i]]), (target - nums[i]), i) res = [] candidates.sort() recurhelper(candidates, res, [], target, 0) return res
-1,772,554,806,074,169,600
:type candidates: List[int] :type target: int :rtype: List[List[int]]
39-Combination-Sum/solution.py
combinationSum
Tanych/CodeTracking
python
def combinationSum(self, candidates, target): '\n :type candidates: List[int]\n :type target: int\n :rtype: List[List[int]]\n ' def recurhelper(nums, res, path, target, start): if (target == 0): res.append(path) return if (target < 0): return if (target > 0): for i in xrange(start, len(nums)): if (nums[i] <= target): recurhelper(nums, res, (path + [nums[i]]), (target - nums[i]), i) res = [] candidates.sort() recurhelper(candidates, res, [], target, 0) return res
def force_console_input(query: str, allowable, onfail: str='Input not recognised, please try again.\n', case_sensitive=False): 'Get an input from the user matching some string in allowable.\n\n Args:\n query (str): The query to issue the user with.\n allowable (str or container): The options which the user is allowed to submit.\n If this is a string, acceptable answers will be substrings.\n For containers acceptable answers will be elements of the container.\n \n Returns:\n The correct input returned\n \n Raises:\n IOError: A request to quit was submitted.\n ' if (not allowable): raise ValueError('At least one entry must be allowable.') submission = input(query) while True: if (not case_sensitive): submission = submission.lower() if (submission in ('quit', 'exit')): raise IOError('Exit command received.') if (submission in allowable): return submission submission = input(onfail)
8,692,285,292,826,950,000
Get an input from the user matching some string in allowable. Args: query (str): The query to issue the user with. allowable (str or container): The options which the user is allowed to submit. If this is a string, acceptable answers will be substrings. For containers acceptable answers will be elements of the container. Returns: The correct input returned Raises: IOError: A request to quit was submitted.
deutscheflash.py
force_console_input
n-Holmes/deutscheflash
python
def force_console_input(query: str, allowable, onfail: str='Input not recognised, please try again.\n', case_sensitive=False): 'Get an input from the user matching some string in allowable.\n\n Args:\n query (str): The query to issue the user with.\n allowable (str or container): The options which the user is allowed to submit.\n If this is a string, acceptable answers will be substrings.\n For containers acceptable answers will be elements of the container.\n \n Returns:\n The correct input returned\n \n Raises:\n IOError: A request to quit was submitted.\n ' if (not allowable): raise ValueError('At least one entry must be allowable.') submission = input(query) while True: if (not case_sensitive): submission = submission.lower() if (submission in ('quit', 'exit')): raise IOError('Exit command received.') if (submission in allowable): return submission submission = input(onfail)
def get_languages(): 'Gets the language: genders dictionary.' with open('genders.json', 'r') as f: return json.loads(f.read())
-1,632,231,346,404,707,800
Gets the language: genders dictionary.
deutscheflash.py
get_languages
n-Holmes/deutscheflash
python
def get_languages(): with open('genders.json', 'r') as f: return json.loads(f.read())