_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q32500
AzInteractiveShell.example_repl
train
def example_repl(self, text, example, start_index, continue_flag): """ REPL for interactive tutorials """ if start_index: start_index = start_index + 1 cmd = ' '.join(text.split()[:start_index]) example_cli = CommandLineInterface( application=self.create_application( full_layout=False), eventloop=create_eventloop()) example_cli.buffers['example_line'].reset( initial_document=Document(u'{}\n'.format( add_new_lines(example))) ) while start_index < len(text.split()): if self.default_command: cmd = cmd.replace(self.default_command + ' ', '') example_cli.buffers[DEFAULT_BUFFER].reset( initial_document=Document( u'{}'.format(cmd), cursor_position=len(cmd))) example_cli.request_redraw() answer = example_cli.run() if not answer: return "", True answer = answer.text if answer.strip('\n') == cmd.strip('\n'): continue else: if len(answer.split()) > 1: start_index += 1 cmd += " " + answer.split()[-1] + " " +\ u' '.join(text.split()[start_index:start_index + 1]) example_cli.exit() del example_cli else: cmd = text return cmd, continue_flag
python
{ "resource": "" }
q32501
AzInteractiveShell.handle_jmespath_query
train
def handle_jmespath_query(self, args): """ handles the jmespath query for injection or printing """ continue_flag = False query_symbol = SELECT_SYMBOL['query'] symbol_len = len(query_symbol) try: if len(args) == 1: # if arguments start with query_symbol, just print query result if args[0] == query_symbol: result = self.last.result elif args[0].startswith(query_symbol): result = jmespath.search(args[0][symbol_len:], self.last.result) print(json.dumps(result, sort_keys=True, indent=2), file=self.output) elif args[0].startswith(query_symbol): # print error message, user unsure of query shortcut usage print(("Usage Error: " + os.linesep + "1. Use {0} stand-alone to display previous result with optional filtering " "(Ex: {0}[jmespath query])" + os.linesep + "OR:" + os.linesep + "2. Use {0} to query the previous result for argument values " "(Ex: group show --name {0}[jmespath query])").format(query_symbol), file=self.output) else: # query, inject into cmd def jmespath_query(match): if match.group(0) == query_symbol: return str(self.last.result) query_result = jmespath.search(match.group(0)[symbol_len:], self.last.result) return str(query_result) def sub_result(arg): escaped_symbol = re.escape(query_symbol) # regex captures query symbol and all characters following it in the argument return json.dumps(re.sub(r'%s.*' % escaped_symbol, jmespath_query, arg)) cmd_base = ' '.join(map(sub_result, args)) self.cli_execute(cmd_base) continue_flag = True except (jmespath.exceptions.ParseError, CLIError) as e: print("Invalid Query Input: " + str(e), file=self.output) continue_flag = True return continue_flag
python
{ "resource": "" }
q32502
AzInteractiveShell.handle_scoping_input
train
def handle_scoping_input(self, continue_flag, cmd, text): """ handles what to do with a scoping gesture """ default_split = text.partition(SELECT_SYMBOL['scope'])[2].split() cmd = cmd.replace(SELECT_SYMBOL['scope'], '') continue_flag = True if not default_split: self.default_command = "" print('unscoping all', file=self.output) return continue_flag, cmd while default_split: if not text: value = '' else: value = default_split[0] tree_path = self.default_command.split() tree_path.append(value) if self.completer.command_tree.in_tree(tree_path): self.set_scope(value) print("defaulting: " + value, file=self.output) cmd = cmd.replace(SELECT_SYMBOL['scope'], '') elif SELECT_SYMBOL['unscope'] == default_split[0] and self.default_command.split(): value = self.default_command.split()[-1] self.default_command = ' ' + ' '.join(self.default_command.split()[:-1]) if not self.default_command.strip(): self.default_command = self.default_command.strip() print('unscoping: ' + value, file=self.output) elif SELECT_SYMBOL['unscope'] not in text: print("Scope must be a valid command", file=self.output) default_split = default_split[1:] return continue_flag, cmd
python
{ "resource": "" }
q32503
AzInteractiveShell.cli_execute
train
def cli_execute(self, cmd): """ sends the command to the CLI to be executed """ try: args = parse_quotes(cmd) if args and args[0] == 'feedback': self.config.set_feedback('yes') self.user_feedback = False azure_folder = get_config_dir() if not os.path.exists(azure_folder): os.makedirs(azure_folder) ACCOUNT.load(os.path.join(azure_folder, 'azureProfile.json')) CONFIG.load(os.path.join(azure_folder, 'az.json')) SESSION.load(os.path.join(azure_folder, 'az.sess'), max_age=3600) invocation = self.cli_ctx.invocation_cls(cli_ctx=self.cli_ctx, parser_cls=self.cli_ctx.parser_cls, commands_loader_cls=self.cli_ctx.commands_loader_cls, help_cls=self.cli_ctx.help_cls) if '--progress' in args: args.remove('--progress') execute_args = [args] thread = Thread(target=invocation.execute, args=execute_args) thread.daemon = True thread.start() self.threads.append(thread) self.curr_thread = thread progress_args = [self] thread = Thread(target=progress_view, args=progress_args) thread.daemon = True thread.start() self.threads.append(thread) result = None else: result = invocation.execute(args) self.last_exit = 0 if result and result.result is not None: if self.output: self.output.write(result) self.output.flush() else: formatter = self.cli_ctx.output.get_formatter(self.cli_ctx.invocation.data['output']) self.cli_ctx.output.out(result, formatter=formatter, out_file=sys.stdout) self.last = result except Exception as ex: # pylint: disable=broad-except self.last_exit = handle_exception(ex) except SystemExit as ex: self.last_exit = int(ex.code)
python
{ "resource": "" }
q32504
AzInteractiveShell.progress_patch
train
def progress_patch(self, _=False): """ forces to use the Shell Progress """ from .progress import ShellProgressView self.cli_ctx.progress_controller.init_progress(ShellProgressView()) return self.cli_ctx.progress_controller
python
{ "resource": "" }
q32505
AzInteractiveShell.run
train
def run(self): """ starts the REPL """ from .progress import ShellProgressView self.cli_ctx.get_progress_controller().init_progress(ShellProgressView()) self.cli_ctx.get_progress_controller = self.progress_patch self.command_table_thread = LoadCommandTableThread(self.restart_completer, self) self.command_table_thread.start() from .configuration import SHELL_HELP self.cli.buffers['symbols'].reset( initial_document=Document(u'{}'.format(SHELL_HELP))) # flush telemetry for new commands and send successful interactive mode entry event telemetry.set_success() telemetry.flush() while True: try: document = self.cli.run(reset_current_buffer=True) text = document.text if not text: # not input self.set_prompt() continue cmd = text outside = False except AttributeError: # when the user pressed Control D break except (KeyboardInterrupt, ValueError): # CTRL C self.set_prompt() continue else: self.history.append(text) b_flag, c_flag, outside, cmd = self._special_cases(cmd, outside) if b_flag: break if c_flag: self.set_prompt() continue self.set_prompt() if outside: subprocess.Popen(cmd, shell=True).communicate() else: telemetry.start() self.cli_execute(cmd) if self.last_exit and self.last_exit != 0: telemetry.set_failure() else: telemetry.set_success() telemetry.flush() telemetry.conclude()
python
{ "resource": "" }
q32506
progress_view
train
def progress_view(shell): """ updates the view """ while not ShellProgressView.done: _, col = get_window_dim() col = int(col) progress = get_progress_message() if '\n' in progress: prog_list = progress.split('\n') prog_val = len(prog_list[-1]) else: prog_val = len(progress) buffer_size = col - prog_val - 4 if ShellProgressView.progress_bar: doc = u'{}:{}'.format(progress, ShellProgressView.progress_bar) shell.spin_val = -1 counter = 0 ShellProgressView.heart_bar = '' else: if progress and not ShellProgressView.done: heart_bar = ShellProgressView.heart_bar if shell.spin_val >= 0: beat = ShellProgressView.heart_beat_values[_get_heart_frequency()] heart_bar += beat heart_bar = heart_bar[len(beat):] len_beat = len(heart_bar) if len_beat > buffer_size: heart_bar = heart_bar[len_beat - buffer_size:] while len(heart_bar) < buffer_size: beat = ShellProgressView.heart_beat_values[_get_heart_frequency()] heart_bar += beat else: shell.spin_val = 0 counter = 0 while counter < buffer_size: beat = ShellProgressView.heart_beat_values[_get_heart_frequency()] heart_bar += beat counter += len(beat) ShellProgressView.heart_bar = heart_bar doc = u'{}:{}'.format(progress, ShellProgressView.heart_bar) shell.cli.buffers['progress'].reset( initial_document=Document(doc)) shell.cli.request_redraw() sleep(shell.intermediate_sleep) ShellProgressView.done = False ShellProgressView.progress_bar = '' shell.spin_val = -1 sleep(shell.final_sleep) return True
python
{ "resource": "" }
q32507
ShellProgressView.write
train
def write(self, args): # pylint: disable=no-self-use """ writes the progres """ ShellProgressView.done = False message = args.get('message', '') percent = args.get('percent', None) if percent: ShellProgressView.progress_bar = _format_value(message, percent) if int(percent) == 1: ShellProgressView.progress_bar = None ShellProgressView.progress = message
python
{ "resource": "" }
q32508
sqlvm_list
train
def sqlvm_list( client, resource_group_name=None): ''' Lists all SQL virtual machines in a resource group or subscription. ''' if resource_group_name: # List all sql vms in the resource group return client.list_by_resource_group(resource_group_name=resource_group_name) # List all sql vms in the subscription return client.list()
python
{ "resource": "" }
q32509
sqlvm_group_list
train
def sqlvm_group_list( client, resource_group_name=None): ''' Lists all SQL virtual machine groups in a resource group or subscription. ''' if resource_group_name: # List all sql vm groups in the resource group return client.list_by_resource_group(resource_group_name=resource_group_name) # List all sql vm groups in the subscription return client.list()
python
{ "resource": "" }
q32510
sqlvm_group_create
train
def sqlvm_group_create(client, cmd, sql_virtual_machine_group_name, resource_group_name, location, sql_image_offer, sql_image_sku, domain_fqdn, cluster_operator_account, sql_service_account, storage_account_url, storage_account_key, cluster_bootstrap_account=None, file_share_witness_path=None, ou_path=None, tags=None): ''' Creates a SQL virtual machine group. ''' tags = tags or {} # Create the windows server failover cluster domain profile object. wsfc_domain_profile_object = WsfcDomainProfile(domain_fqdn=domain_fqdn, ou_path=ou_path, cluster_bootstrap_account=cluster_bootstrap_account, cluster_operator_account=cluster_operator_account, sql_service_account=sql_service_account, file_share_witness_path=file_share_witness_path, storage_account_url=storage_account_url, storage_account_primary_key=storage_account_key) sqlvm_group_object = SqlVirtualMachineGroup(sql_image_offer=sql_image_offer, sql_image_sku=sql_image_sku, wsfc_domain_profile=wsfc_domain_profile_object, location=location, tags=tags) # Since it's a running operation, we will do the put and then the get to display the instance. LongRunningOperation(cmd.cli_ctx)(sdk_no_wait(False, client.create_or_update, resource_group_name, sql_virtual_machine_group_name, sqlvm_group_object)) return client.get(resource_group_name, sql_virtual_machine_group_name)
python
{ "resource": "" }
q32511
sqlvm_group_update
train
def sqlvm_group_update(instance, domain_fqdn=None, sql_image_sku=None, sql_image_offer=None, cluster_operator_account=None, sql_service_account=None, storage_account_url=None, storage_account_key=None, cluster_bootstrap_account=None, file_share_witness_path=None, ou_path=None, tags=None): ''' Updates a SQL virtual machine group. ''' if sql_image_sku is not None: instance.sql_image_sku = sql_image_sku if sql_image_offer is not None: instance.sql_image_offer = sql_image_offer if domain_fqdn is not None: instance.wsfc_domain_profile.domain_fqdn = domain_fqdn if cluster_operator_account is not None: instance.wsfc_domain_profile.cluster_operator_account = cluster_operator_account if cluster_bootstrap_account is not None: instance.wsfc_domain_profile.cluster_bootstrap_account = cluster_bootstrap_account if sql_service_account is not None: instance.wsfc_domain_profile.sql_service_account = sql_service_account if storage_account_url is not None: instance.wsfc_domain_profile.storage_account_url = storage_account_url if storage_account_key is not None: instance.wsfc_domain_profile.storage_access_key = storage_account_key if file_share_witness_path is not None: instance.wsfc_domain_profile.file_share_witness_path = file_share_witness_path if ou_path is not None: instance.wsfc_domain_profile.ou_path = ou_path if tags is not None: instance.tags = tags return instance
python
{ "resource": "" }
q32512
sqlvm_aglistener_create
train
def sqlvm_aglistener_create(client, cmd, availability_group_listener_name, sql_virtual_machine_group_name, resource_group_name, availability_group_name, ip_address, subnet_resource_id, load_balancer_resource_id, probe_port, sql_virtual_machine_instances, port=1433, public_ip_address_resource_id=None): ''' Creates an availability group listener ''' if not is_valid_resource_id(subnet_resource_id): raise CLIError("Invalid subnet resource id.") if not is_valid_resource_id(load_balancer_resource_id): raise CLIError("Invalid load balancer resource id.") if public_ip_address_resource_id and not is_valid_resource_id(public_ip_address_resource_id): raise CLIError("Invalid public IP address resource id.") for sqlvm in sql_virtual_machine_instances: if not is_valid_resource_id(sqlvm): raise CLIError("Invalid SQL virtual machine resource id.") # Create the private ip address private_ip_object = PrivateIPAddress(ip_address=ip_address, subnet_resource_id=subnet_resource_id if is_valid_resource_id(subnet_resource_id) else None) # Create the load balancer configurations load_balancer_object = LoadBalancerConfiguration(private_ip_address=private_ip_object, public_ip_address_resource_id=public_ip_address_resource_id, load_balancer_resource_id=load_balancer_resource_id, probe_port=probe_port, sql_virtual_machine_instances=sql_virtual_machine_instances) # Create the availability group listener object ag_listener_object = AvailabilityGroupListener(availability_group_name=availability_group_name, load_balancer_configurations=load_balancer_object, port=port) LongRunningOperation(cmd.cli_ctx)(sdk_no_wait(False, client.create_or_update, resource_group_name, sql_virtual_machine_group_name, availability_group_listener_name, ag_listener_object)) return client.get(resource_group_name, sql_virtual_machine_group_name, availability_group_listener_name)
python
{ "resource": "" }
q32513
sqlvm_update
train
def sqlvm_update(instance, sql_server_license_type=None, enable_auto_patching=None, day_of_week=None, maintenance_window_starting_hour=None, maintenance_window_duration=None, enable_auto_backup=None, enable_encryption=False, retention_period=None, storage_account_url=None, storage_access_key=None, backup_password=None, backup_system_dbs=False, backup_schedule_type=None, full_backup_frequency=None, full_backup_start_time=None, full_backup_window_hours=None, log_backup_frequency=None, enable_key_vault_credential=None, credential_name=None, azure_key_vault_url=None, service_principal_name=None, service_principal_secret=None, connectivity_type=None, port=None, sql_workload_type=None, enable_r_services=None, tags=None): ''' Updates a SQL virtual machine. ''' if tags is not None: instance.tags = tags if sql_server_license_type is not None: instance.sql_server_license_type = sql_server_license_type if (enable_auto_patching is not None or day_of_week is not None or maintenance_window_starting_hour is not None or maintenance_window_duration is not None): enable_auto_patching = enable_auto_patching if enable_auto_patching is False else True instance.auto_patching_settings = AutoPatchingSettings(enable=enable_auto_patching, day_of_week=day_of_week, maintenance_window_starting_hour=maintenance_window_starting_hour, maintenance_window_duration=maintenance_window_duration) if (enable_auto_backup is not None or enable_encryption or retention_period is not None or storage_account_url is not None or storage_access_key is not None or backup_password is not None or backup_system_dbs or backup_schedule_type is not None or full_backup_frequency is not None or full_backup_start_time is not None or full_backup_window_hours is not None or log_backup_frequency is not None): enable_auto_backup = enable_auto_backup if enable_auto_backup is False else True instance.auto_backup_settings = AutoBackupSettings(enable=enable_auto_backup, enable_encryption=enable_encryption if enable_auto_backup else None, retention_period=retention_period, storage_account_url=storage_account_url, storage_access_key=storage_access_key, password=backup_password, backup_system_dbs=backup_system_dbs if enable_auto_backup else None, backup_schedule_type=backup_schedule_type, full_backup_frequency=full_backup_frequency, full_backup_start_time=full_backup_start_time, full_backup_window_hours=full_backup_window_hours, log_backup_frequency=log_backup_frequency) if (enable_key_vault_credential is not None or credential_name is not None or azure_key_vault_url is not None or service_principal_name is not None or service_principal_secret is not None): enable_key_vault_credential = enable_key_vault_credential if enable_key_vault_credential is False else True instance.key_vault_credential_settings = KeyVaultCredentialSettings(enable=enable_key_vault_credential, credential_name=credential_name, service_principal_name=service_principal_name, service_principal_secret=service_principal_secret, azure_key_vault_url=azure_key_vault_url) instance.server_configurations_management_settings = ServerConfigurationsManagementSettings() if (connectivity_type is not None or port is not None): instance.server_configurations_management_settings.sql_connectivity_update_settings = SqlConnectivityUpdateSettings(connectivity_type=connectivity_type, port=port) if sql_workload_type is not None: instance.server_configurations_management_settings.sql_workload_type_update_settings = SqlWorkloadTypeUpdateSettings(sql_workload_type=sql_workload_type) if enable_r_services is not None: instance.server_configurations_management_settings.additional_features_server_configurations = AdditionalFeaturesServerConfigurations(is_rservices_enabled=enable_r_services) # If none of the settings was modified, reset server_configurations_management_settings to be null if (instance.server_configurations_management_settings.sql_connectivity_update_settings is None and instance.server_configurations_management_settings.sql_workload_type_update_settings is None and instance.server_configurations_management_settings.sql_storage_update_settings is None and instance.server_configurations_management_settings.additional_features_server_configurations is None): instance.server_configurations_management_settings = None return instance
python
{ "resource": "" }
q32514
add_sqlvm_to_group
train
def add_sqlvm_to_group(instance, sql_virtual_machine_group_resource_id, sql_service_account_password, cluster_operator_account_password, cluster_bootstrap_account_password=None): ''' Add a SQL virtual machine to a SQL virtual machine group. ''' if not is_valid_resource_id(sql_virtual_machine_group_resource_id): raise CLIError("Invalid SQL virtual machine group resource id.") instance.sql_virtual_machine_group_resource_id = sql_virtual_machine_group_resource_id instance.wsfc_domain_credentials = WsfcDomainCredentials(cluster_bootstrap_account_password=cluster_bootstrap_account_password, cluster_operator_account_password=cluster_operator_account_password, sql_service_account_password=sql_service_account_password) return instance
python
{ "resource": "" }
q32515
add_sqlvm_to_aglistener
train
def add_sqlvm_to_aglistener(instance, sqlvm_resource_id): ''' Add a SQL virtual machine to an availability group listener. ''' if not is_valid_resource_id(sqlvm_resource_id): raise CLIError("Invalid SQL virtual machine resource id.") vm_list = instance.load_balancer_configurations[0].sql_virtual_machine_instances if sqlvm_resource_id not in vm_list: instance.load_balancer_configurations[0].sql_virtual_machine_instances.append(sqlvm_resource_id) return instance
python
{ "resource": "" }
q32516
remove_sqlvm_from_aglistener
train
def remove_sqlvm_from_aglistener(instance, sqlvm_resource_id): ''' Remove a SQL virtual machine from an availability group listener. ''' if not is_valid_resource_id(sqlvm_resource_id): raise CLIError("Invalid SQL virtual machine resource id.") vm_list = instance.load_balancer_configurations[0].sql_virtual_machine_instances if sqlvm_resource_id in vm_list: instance.load_balancer_configurations[0].sql_virtual_machine_instances.remove(sqlvm_resource_id) return instance
python
{ "resource": "" }
q32517
publish_app
train
def publish_app(cmd, client, resource_group_name, resource_name, code_dir=None, proj_name=None, version='v3'): """Publish local bot code to Azure. This method is directly called via "bot publish" :param cmd: :param client: :param resource_group_name: :param resource_name: :param code_dir: :param proj_name: :param version: :return: """ if version == 'v3': return publish_appv3(cmd, client, resource_group_name, resource_name, code_dir) # Get the bot information and ensure it's not only a registration bot. bot = client.bots.get( resource_group_name=resource_group_name, resource_name=resource_name ) if bot.kind == 'bot': raise CLIError('Bot kind is \'bot\', meaning it is a registration bot. ' 'Source publish is not supported for registration only bots.') # If the user does not pass in a path to the local bot project, get the current working directory. if not code_dir: code_dir = os.getcwd() logger.info('Parameter --code-dir not provided, defaulting to current working directory, %s. ' 'For more information, run \'az bot publish -h\'', code_dir) if not os.path.isdir(code_dir): raise CLIError('The path %s is not a valid directory. ' 'Please supply a valid directory path containing your source code.' % code_dir) # Ensure that the directory contains appropriate post deploy scripts folder if 'PostDeployScripts' not in os.listdir(code_dir): BotPublishPrep.prepare_publish_v4(logger, code_dir, proj_name) logger.info('Creating upload zip file.') zip_filepath = BotPublishPrep.create_upload_zip(logger, code_dir, include_node_modules=False) logger.info('Zip file path created, at %s.', zip_filepath) kudu_client = KuduClient(cmd, resource_group_name, resource_name, bot) output = kudu_client.publish(zip_filepath) logger.info('Bot source published. Preparing bot application to run the new source.') os.remove('upload.zip') if os.path.exists(os.path.join('.', 'package.json')): logger.info('Detected language javascript. Installing node dependencies in remote bot.') __install_node_dependencies(kudu_client) if output.get('active'): logger.info('Deployment successful!') if not output.get('active'): scm_url = output.get('url') deployment_id = output.get('id') # Instead of replacing "latest", which would could be in the bot name, we replace "deployments/latest" deployment_url = scm_url.replace('deployments/latest', 'deployments/%s' % deployment_id) logger.error('Deployment failed. To find out more information about this deployment, please visit %s.' % deployment_url) return output
python
{ "resource": "" }
q32518
CommandTree.get_child
train
def get_child(self, child_name): # pylint: disable=no-self-use """ returns the object with the name supplied """ child = self.children.get(child_name, None) if child: return child raise ValueError("Value {} not in this tree".format(child_name))
python
{ "resource": "" }
q32519
CommandTree.in_tree
train
def in_tree(self, cmd_args): """ if a command is in the tree """ if not cmd_args: return True tree = self try: for datum in cmd_args: tree = tree.get_child(datum) except ValueError: return False return True
python
{ "resource": "" }
q32520
AliasExtensionTelemetrySession.generate_payload
train
def generate_payload(self): """ Generate a list of telemetry events as payload """ events = [] transformation_task = self._get_alias_transformation_properties() transformation_task.update(self._get_based_properties()) events.append(transformation_task) for exception in self.exceptions: properties = { 'Reserved.DataModel.Fault.TypeString': exception.__class__.__name__, 'Reserved.DataModel.Fault.Exception.Message': self.get_exception_message(exception), 'Reserved.DataModel.Fault.Exception.StackTrace': _get_stack_trace(), } self.set_custom_properties(properties, 'ActionType', 'Exception') self.set_custom_properties(properties, 'Version', VERSION) events.append(properties) return events
python
{ "resource": "" }
q32521
sort_completions
train
def sort_completions(completions_gen): """ sorts the completions """ from knack.help import REQUIRED_TAG def _get_weight(val): """ weights the completions with required things first the lexicographically""" priority = '' if val.display_meta and val.display_meta.startswith(REQUIRED_TAG): priority = ' ' # a space has the lowest ordinance return priority + val.text return sorted(completions_gen, key=_get_weight)
python
{ "resource": "" }
q32522
AzCompleter.validate_param_completion
train
def validate_param_completion(self, param, leftover_args): """ validates that a param should be completed """ # validates param starts with unfinished word completes = self.validate_completion(param) # show parameter completions when started full_param = self.unfinished_word.startswith("--") and param.startswith("--") char_param = self.unfinished_word.startswith("-") and not param.startswith("--") # show full parameters before any are used new_param = not self.unfinished_word and not leftover_args and param.startswith("--") # checks for parameters already in the line as well as aliases no_doubles = True command_doubles = self.command_param_info.get(self.current_command, {}) for alias in command_doubles.get(param, []): if alias in leftover_args: no_doubles = False return completes and no_doubles and any((full_param, char_param, new_param))
python
{ "resource": "" }
q32523
AzCompleter.process_dynamic_completion
train
def process_dynamic_completion(self, completion): """ how to validate and generate completion for dynamic params """ if len(completion.split()) > 1: completion = '\"' + completion + '\"' if self.validate_completion(completion): yield Completion(completion, -len(self.unfinished_word))
python
{ "resource": "" }
q32524
AzCompleter.gen_enum_completions
train
def gen_enum_completions(self, arg_name): """ generates dynamic enumeration completions """ try: # if enum completion for choice in self.cmdtab[self.current_command].arguments[arg_name].choices: if self.validate_completion(choice): yield Completion(choice, -len(self.unfinished_word)) except TypeError: # there is no choices option pass
python
{ "resource": "" }
q32525
AzCompleter.get_arg_name
train
def get_arg_name(self, param): """ gets the argument name used in the command table for a parameter """ if self.current_command in self.cmdtab: for arg in self.cmdtab[self.current_command].arguments: for name in self.cmdtab[self.current_command].arguments[arg].options_list: if name == param: return arg return None
python
{ "resource": "" }
q32526
AzCompleter.mute_parse_args
train
def mute_parse_args(self, text): """ mutes the parser error when parsing, then puts it back """ error = AzCliCommandParser.error _check_value = AzCliCommandParser._check_value AzCliCommandParser.error = error_pass AzCliCommandParser._check_value = _check_value_muted # No exception is expected. However, we add this try-catch block, as this may have far-reaching effects. try: parse_args = self.argsfinder.get_parsed_args(parse_quotes(text, quotes=False, string=False)) except Exception: # pylint: disable=broad-except pass AzCliCommandParser.error = error AzCliCommandParser._check_value = _check_value return parse_args
python
{ "resource": "" }
q32527
AzCompleter.gen_dynamic_completions
train
def gen_dynamic_completions(self, text): """ generates the dynamic values, like the names of resource groups """ try: # pylint: disable=too-many-nested-blocks param = self.leftover_args[-1] # command table specific name arg_name = self.get_arg_name(param) for comp in self.gen_enum_completions(arg_name): yield comp parsed_args = self.mute_parse_args(text) # there are 3 formats for completers the cli uses # this try catches which format it is if self.cmdtab[self.current_command].arguments[arg_name].completer: completions = [] try: completions = self.cmdtab[self.current_command].arguments[arg_name].completer( prefix=self.unfinished_word, action=None, parsed_args=parsed_args) except TypeError: try: completions = self.cmdtab[self.current_command].arguments[arg_name].completer( prefix=self.unfinished_word) except TypeError: try: completions = self.cmdtab[self.current_command].arguments[arg_name].completer() except TypeError: pass # other completion method used for comp in completions: for completion in self.process_dynamic_completion(comp): yield completion # if the user isn't logged in except Exception: # pylint: disable=broad-except pass
python
{ "resource": "" }
q32528
AzCompleter.yield_param_completion
train
def yield_param_completion(self, param, last_word): """ yields a parameter """ return Completion(param, -len(last_word), display_meta=self.param_description.get( self.current_command + " " + str(param), '').replace(os.linesep, ''))
python
{ "resource": "" }
q32529
AzCompleter.gen_cmd_and_param_completions
train
def gen_cmd_and_param_completions(self): """ generates command and parameter completions """ if self.complete_command: for param in self.command_param_info.get(self.current_command, []): if self.validate_param_completion(param, self.leftover_args): yield self.yield_param_completion(param, self.unfinished_word) elif not self.leftover_args: for child_command in self.subtree.children: if self.validate_completion(child_command): yield Completion(child_command, -len(self.unfinished_word))
python
{ "resource": "" }
q32530
AzCompleter.has_description
train
def has_description(self, param): """ if a parameter has a description """ return param in self.param_description.keys() and \ not self.param_description[param].isspace()
python
{ "resource": "" }
q32531
AzCompleter.reformat_cmd
train
def reformat_cmd(self, text): """ reformat the text to be stripped of noise """ # remove az if there text = text.replace('az', '') # disregard defaulting symbols if text and SELECT_SYMBOL['scope'] == text[0:2]: text = text.replace(SELECT_SYMBOL['scope'], "") if self.shell_ctx.default_command: text = self.shell_ctx.default_command + ' ' + text return text
python
{ "resource": "" }
q32532
_remove_nulls
train
def _remove_nulls(managed_clusters): """ Remove some often-empty fields from a list of ManagedClusters, so the JSON representation doesn't contain distracting null fields. This works around a quirk of the SDK for python behavior. These fields are not sent by the server, but get recreated by the CLI's own "to_dict" serialization. """ attrs = ['tags'] ap_attrs = ['os_disk_size_gb', 'vnet_subnet_id'] sp_attrs = ['secret'] for managed_cluster in managed_clusters: for attr in attrs: if getattr(managed_cluster, attr, None) is None: delattr(managed_cluster, attr) for ap_profile in managed_cluster.agent_pool_profiles: for attr in ap_attrs: if getattr(ap_profile, attr, None) is None: delattr(ap_profile, attr) for attr in sp_attrs: if getattr(managed_cluster.service_principal_profile, attr, None) is None: delattr(managed_cluster.service_principal_profile, attr) return managed_clusters
python
{ "resource": "" }
q32533
ArgsFinder.get_parsed_args
train
def get_parsed_args(self, comp_words): """ gets the parsed args from a patched parser """ active_parsers = self._patch_argument_parser() parsed_args = argparse.Namespace() self.completing = True if USING_PYTHON2: # Python 2 argparse only properly works with byte strings. comp_words = [ensure_bytes(word) for word in comp_words] try: active_parsers[0].parse_known_args(comp_words, namespace=parsed_args) except BaseException: # pylint: disable=broad-except pass self.completing = False return parsed_args
python
{ "resource": "" }
q32534
get_alias_table
train
def get_alias_table(): """ Get the current alias table. """ try: alias_table = get_config_parser() alias_table.read(azext_alias.alias.GLOBAL_ALIAS_PATH) return alias_table except Exception: # pylint: disable=broad-except return get_config_parser()
python
{ "resource": "" }
q32535
is_alias_command
train
def is_alias_command(subcommands, args): """ Check if the user is invoking one of the comments in 'subcommands' in the from az alias . Args: subcommands: The list of subcommands to check through. args: The CLI arguments to process. Returns: True if the user is invoking 'az alias {command}'. """ if not args: return False for subcommand in subcommands: if args[:2] == ['alias', subcommand]: return True return False
python
{ "resource": "" }
q32536
remove_pos_arg_placeholders
train
def remove_pos_arg_placeholders(alias_command): """ Remove positional argument placeholders from alias_command. Args: alias_command: The alias command to remove from. Returns: The alias command string without positional argument placeholder. """ # Boundary index is the index at which named argument or positional argument starts split_command = shlex.split(alias_command) boundary_index = len(split_command) for i, subcommand in enumerate(split_command): if not re.match('^[a-z]', subcommand.lower()) or i > COLLISION_CHECK_LEVEL_DEPTH: boundary_index = i break return ' '.join(split_command[:boundary_index]).lower()
python
{ "resource": "" }
q32537
filter_aliases
train
def filter_aliases(alias_table): """ Filter aliases that does not have a command field in the configuration file. Args: alias_table: The alias table. Yield: A tuple with [0] being the first word of the alias and [1] being the command that the alias points to. """ for alias in alias_table.sections(): if alias_table.has_option(alias, 'command'): yield (alias.split()[0], remove_pos_arg_placeholders(alias_table.get(alias, 'command')))
python
{ "resource": "" }
q32538
reduce_alias_table
train
def reduce_alias_table(alias_table): """ Reduce the alias table to a tuple that contains the alias and the command that the alias points to. Args: The alias table to be reduced. Yields A tuple that contains the alias and the command that the alias points to. """ for alias in alias_table.sections(): if alias_table.has_option(alias, 'command'): yield (alias, alias_table.get(alias, 'command'))
python
{ "resource": "" }
q32539
retrieve_file_from_url
train
def retrieve_file_from_url(url): """ Retrieve a file from an URL Args: url: The URL to retrieve the file from. Returns: The absolute path of the downloaded file. """ try: alias_source, _ = urlretrieve(url) # Check for HTTPError in Python 2.x with open(alias_source, 'r') as f: content = f.read() if content[:3].isdigit(): raise CLIError(ALIAS_FILE_URL_ERROR.format(url, content.strip())) except Exception as exception: if isinstance(exception, CLIError): raise # Python 3.x raise CLIError(ALIAS_FILE_URL_ERROR.format(url, exception)) return alias_source
python
{ "resource": "" }
q32540
filter_alias_create_namespace
train
def filter_alias_create_namespace(namespace): """ Filter alias name and alias command inside alias create namespace to appropriate strings. Args namespace: The alias create namespace. Returns: Filtered namespace where excessive whitespaces are removed in strings. """ def filter_string(s): return ' '.join(s.strip().split()) namespace.alias_name = filter_string(namespace.alias_name) namespace.alias_command = filter_string(namespace.alias_command) return namespace
python
{ "resource": "" }
q32541
get_lexers
train
def get_lexers(main_lex, exam_lex, tool_lex): """ gets all the lexer wrappers """ if not main_lex: return None, None, None lexer = None if main_lex: if issubclass(main_lex, PromptLex): lexer = main_lex elif issubclass(main_lex, PygLex): lexer = PygmentsLexer(main_lex) if exam_lex: if issubclass(exam_lex, PygLex): exam_lex = PygmentsLexer(exam_lex) if tool_lex: if issubclass(tool_lex, PygLex): tool_lex = PygmentsLexer(tool_lex) return lexer, exam_lex, tool_lex
python
{ "resource": "" }
q32542
get_anyhline
train
def get_anyhline(config): """ if there is a line between descriptions and example """ if config.BOOLEAN_STATES[config.config.get('Layout', 'command_description')] or\ config.BOOLEAN_STATES[config.config.get('Layout', 'param_description')]: return Window( width=LayoutDimension.exact(1), height=LayoutDimension.exact(1), content=FillControl('-', token=Token.Line)) return get_empty()
python
{ "resource": "" }
q32543
get_example
train
def get_example(config, exam_lex): """ example description window """ if config.BOOLEAN_STATES[config.config.get('Layout', 'examples')]: return Window( content=BufferControl( buffer_name="examples", lexer=exam_lex)) return get_empty()
python
{ "resource": "" }
q32544
get_hline
train
def get_hline(): """ gets a horiztonal line """ return Window( width=LayoutDimension.exact(1), height=LayoutDimension.exact(1), content=FillControl('-', token=Token.Line))
python
{ "resource": "" }
q32545
get_descriptions
train
def get_descriptions(config, exam_lex, lexer): """ based on the configuration settings determines which windows to include """ if config.BOOLEAN_STATES[config.config.get('Layout', 'command_description')]: if config.BOOLEAN_STATES[config.config.get('Layout', 'param_description')]: return VSplit([ get_descript(exam_lex), get_vline(), get_param(lexer), ]) return get_descript(exam_lex) if config.BOOLEAN_STATES[config.config.get('Layout', 'param_description')]: return get_param(lexer) return get_empty()
python
{ "resource": "" }
q32546
LayoutManager.get_prompt_tokens
train
def get_prompt_tokens(self, _): """ returns prompt tokens """ if self.shell_ctx.default_command: prompt = 'az {}>> '.format(self.shell_ctx.default_command) else: prompt = 'az>> ' return [(Token.Az, prompt)]
python
{ "resource": "" }
q32547
LayoutManager.create_tutorial_layout
train
def create_tutorial_layout(self): """ layout for example tutorial """ lexer, _, _ = get_lexers(self.shell_ctx.lexer, None, None) layout_full = HSplit([ FloatContainer( Window( BufferControl( input_processors=self.input_processors, lexer=lexer, preview_search=Always()), get_height=get_height), [ Float(xcursor=True, ycursor=True, content=CompletionsMenu( max_height=MAX_COMPLETION, scroll_offset=1, extra_filter=(HasFocus(DEFAULT_BUFFER))))]), ConditionalContainer( HSplit([ get_hline(), get_param(lexer), get_hline(), Window( content=BufferControl( buffer_name='example_line', lexer=lexer ), ), Window( TokenListControl( get_tutorial_tokens, default_char=Char(' ', Token.Toolbar)), height=LayoutDimension.exact(1)), ]), filter=~IsDone() & RendererHeightIsKnown() ) ]) return layout_full
python
{ "resource": "" }
q32548
LayoutManager.create_layout
train
def create_layout(self, exam_lex, toolbar_lex): """ creates the layout """ lexer, exam_lex, toolbar_lex = get_lexers(self.shell_ctx.lexer, exam_lex, toolbar_lex) if not any(isinstance(processor, DefaultPrompt) for processor in self.input_processors): self.input_processors.append(DefaultPrompt(self.get_prompt_tokens)) layout_lower = ConditionalContainer( HSplit([ get_anyhline(self.shell_ctx.config), get_descriptions(self.shell_ctx.config, exam_lex, lexer), get_examplehline(self.shell_ctx.config), get_example(self.shell_ctx.config, exam_lex), ConditionalContainer( get_hline(), filter=self.show_default | self.show_symbol ), ConditionalContainer( Window( content=BufferControl( buffer_name='default_values', lexer=lexer ) ), filter=self.show_default ), ConditionalContainer( get_hline(), filter=self.show_default & self.show_symbol ), ConditionalContainer( Window( content=BufferControl( buffer_name='symbols', lexer=exam_lex ) ), filter=self.show_symbol ), ConditionalContainer( Window( content=BufferControl( buffer_name='progress', lexer=lexer ) ), filter=self.show_progress ), Window( content=BufferControl( buffer_name='bottom_toolbar', lexer=toolbar_lex ), ), ]), filter=~IsDone() & RendererHeightIsKnown() ) layout_full = HSplit([ FloatContainer( Window( BufferControl( input_processors=self.input_processors, lexer=lexer, preview_search=Always()), get_height=get_height, ), [ Float(xcursor=True, ycursor=True, content=CompletionsMenu( max_height=MAX_COMPLETION, scroll_offset=1, extra_filter=(HasFocus(DEFAULT_BUFFER))))]), layout_lower ]) return layout_full
python
{ "resource": "" }
q32549
ads_use_dev_spaces
train
def ads_use_dev_spaces(cluster_name, resource_group_name, update=False, space_name=None, do_not_prompt=False): """ Use Azure Dev Spaces with a managed Kubernetes cluster. :param cluster_name: Name of the managed cluster. :type cluster_name: String :param resource_group_name: Name of resource group. You can configure the default group. \ Using 'az configure --defaults group=<name>'. :type resource_group_name: String :param update: Update to the latest Azure Dev Spaces client components. :type update: bool :param space_name: Name of the new or existing dev space to select. Defaults to an interactive selection experience. :type space_name: String :param do_not_prompt: Do not prompt for confirmation. Requires --space. :type do_not_prompt: bool """ azds_cli = _install_dev_spaces_cli(update) use_command_arguments = [azds_cli, 'use', '--name', cluster_name, '--resource-group', resource_group_name] if space_name is not None: use_command_arguments.append('--space') use_command_arguments.append(space_name) if do_not_prompt: use_command_arguments.append('-y') subprocess.call( use_command_arguments, universal_newlines=True)
python
{ "resource": "" }
q32550
ads_remove_dev_spaces
train
def ads_remove_dev_spaces(cluster_name, resource_group_name, do_not_prompt=False): """ Remove Azure Dev Spaces from a managed Kubernetes cluster. :param cluster_name: Name of the managed cluster. :type cluster_name: String :param resource_group_name: Name of resource group. You can configure the default group. \ Using 'az configure --defaults group=<name>'. :type resource_group_name: String :param do_not_prompt: Do not prompt for confirmation. :type do_not_prompt: bool """ azds_cli = _install_dev_spaces_cli(False) remove_command_arguments = [azds_cli, 'remove', '--name', cluster_name, '--resource-group', resource_group_name] if do_not_prompt: remove_command_arguments.append('-y') subprocess.call( remove_command_arguments, universal_newlines=True)
python
{ "resource": "" }
q32551
get_query_targets
train
def get_query_targets(cli_ctx, apps, resource_group): """Produces a list of uniform GUIDs representing applications to query.""" if isinstance(apps, list): if resource_group: return [get_id_from_azure_resource(cli_ctx, apps[0], resource_group)] return list(map(lambda x: get_id_from_azure_resource(cli_ctx, x), apps)) else: if resource_group: return [get_id_from_azure_resource(cli_ctx, apps, resource_group)] return apps
python
{ "resource": "" }
q32552
get_linked_properties
train
def get_linked_properties(cli_ctx, app, resource_group, read_properties=None, write_properties=None): """Maps user-facing role names to strings used to identify them on resources.""" roles = { "ReadTelemetry": "api", "WriteAnnotations": "annotations", "AuthenticateSDKControlChannel": "agentconfig" } sub_id = get_subscription_id(cli_ctx) tmpl = '/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/components/{}'.format( sub_id, resource_group, app ) linked_read_properties, linked_write_properties = [], [] if isinstance(read_properties, list): propLen = len(read_properties) linked_read_properties = ['{}/{}'.format(tmpl, roles[read_properties[i]]) for i in range(propLen)] else: linked_read_properties = ['{}/{}'.format(tmpl, roles[read_properties])] if isinstance(write_properties, list): propLen = len(write_properties) linked_write_properties = ['{}/{}'.format(tmpl, roles[write_properties[i]]) for i in range(propLen)] else: linked_write_properties = ['{}/{}'.format(tmpl, roles[write_properties])] return linked_read_properties, linked_write_properties
python
{ "resource": "" }
q32553
transform_aglistener_output
train
def transform_aglistener_output(result): ''' Transforms the result of Availability Group Listener to eliminate unnecessary parameters. ''' from collections import OrderedDict from msrestazure.tools import parse_resource_id try: resource_group = getattr(result, 'resource_group', None) or parse_resource_id(result.id)['resource_group'] # Create a dictionary with the relevant parameters output = OrderedDict([('id', result.id), ('name', result.name), ('provisioningState', result.provisioning_state), ('port', result.port), ('resourceGroup', resource_group)]) # Note, wsfcDomainCredentials will not display if result.load_balancer_configurations is not None: output['loadBalancerConfigurations'] = format_load_balancer_configuration_list(result.load_balancer_configurations) return output except AttributeError: # Return the response object if the formating fails return result
python
{ "resource": "" }
q32554
format_wsfc_domain_profile
train
def format_wsfc_domain_profile(result): ''' Formats the WSFCDomainProfile object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.cluster_bootstrap_account is not None: order_dict['clusterBootstrapAccount'] = result.cluster_bootstrap_account if result.domain_fqdn is not None: order_dict['domainFqdn'] = result.domain_fqdn if result.ou_path is not None: order_dict['ouPath'] = result.ou_path if result.cluster_operator_account is not None: order_dict['clusterOperatorAccount'] = result.cluster_operator_account if result.file_share_witness_path is not None: order_dict['fileShareWitnessPath'] = result.file_share_witness_path if result.sql_service_account is not None: order_dict['sqlServiceAccount'] = result.sql_service_account if result.storage_account_url is not None: order_dict['storageAccountUrl'] = result.storage_account_url return order_dict
python
{ "resource": "" }
q32555
format_additional_features_server_configurations
train
def format_additional_features_server_configurations(result): ''' Formats the AdditionalFeaturesServerConfigurations object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.is_rservices_enabled is not None: order_dict['isRServicesEnabled'] = result.is_rservices_enabled if result.backup_permissions_for_azure_backup_svc is not None: order_dict['backupPermissionsForAzureBackupSvc'] = result.backup_permissions_for_azure_backup_svc return order_dict
python
{ "resource": "" }
q32556
format_auto_backup_settings
train
def format_auto_backup_settings(result): ''' Formats the AutoBackupSettings object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.enable is not None: order_dict['enable'] = result.enable if result.enable_encryption is not None: order_dict['enableEncryption'] = result.enable_encryption if result.retention_period is not None: order_dict['retentionPeriod'] = result.retention_period if result.storage_account_url is not None: order_dict['storageAccountUrl'] = result.storage_account_url if result.backup_system_dbs is not None: order_dict['backupSystemDbs'] = result.backup_system_dbs if result.backup_schedule_type is not None: order_dict['backupScheduleType'] = result.backup_schedule_type if result.full_backup_frequency is not None: order_dict['fullBackupFrequency'] = result.full_backup_frequency if result.full_backup_start_time is not None: order_dict['fullBackupStartTime'] = result.full_backup_start_time if result.full_backup_window_hours is not None: order_dict['fullBackupWindowHours'] = result.full_backup_window_hours if result.log_backup_frequency is not None: order_dict['logBackupFrequency'] = result.log_backup_frequency return order_dict
python
{ "resource": "" }
q32557
format_auto_patching_settings
train
def format_auto_patching_settings(result): ''' Formats the AutoPatchingSettings object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.enable is not None: order_dict['enable'] = result.enable if result.day_of_week is not None: order_dict['dayOfWeek'] = result.day_of_week if result.maintenance_window_starting_hour is not None: order_dict['maintenanceWindowStartingHour'] = result.maintenance_window_starting_hour if result.maintenance_window_duration is not None: order_dict['maintenanceWindowDuration'] = result.maintenance_window_duration return order_dict
python
{ "resource": "" }
q32558
format_key_vault_credential_settings
train
def format_key_vault_credential_settings(result): ''' Formats the KeyVaultCredentialSettings object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.enable is not None: order_dict['enable'] = result.enable if result.credential_name is not None: order_dict['credentialName'] = result.credential_name if result.azure_key_vault_url is not None: order_dict['azureKeyVaultUrl'] = result.azure_key_vault_url return order_dict
python
{ "resource": "" }
q32559
format_load_balancer_configuration
train
def format_load_balancer_configuration(result): ''' Formats the LoadBalancerConfiguration object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.private_ip_address is not None: order_dict['privateIpAddress'] = format_private_ip_address(result.private_ip_address) if result.public_ip_address_resource_id is not None: order_dict['publicIpAddressResourceId'] = result.public_ip_address_resource_id if result.load_balancer_resource_id is not None: order_dict['loadBalancerResourceId'] = result.load_balancer_resource_id if result.probe_port is not None: order_dict['probePort'] = result.probe_port if result.sql_virtual_machine_instances is not None: order_dict['sqlVirtualMachineInstances'] = result.sql_virtual_machine_instances return order_dict
python
{ "resource": "" }
q32560
format_private_ip_address
train
def format_private_ip_address(result): ''' Formats the PrivateIPAddress object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.ip_address is not None: order_dict['ipAddress'] = result.ip_address if result.subnet_resource_id is not None: order_dict['subnetResourceId'] = result.subnet_resource_id return order_dict
python
{ "resource": "" }
q32561
format_server_configuration_management_settings
train
def format_server_configuration_management_settings(result): ''' Formats the ServerConfigurationsManagementSettings object removing arguments that are empty ''' from collections import OrderedDict order_dict = OrderedDict([('sqlConnectivityUpdateSettings', format_sql_connectivity_update_settings(result.sql_connectivity_update_settings)), ('sqlWorkloadTypeUpdateSettings', format_sql_workload_type_update_settings(result.sql_workload_type_update_settings)), ('sqlStorageUpdateSettings', format_sql_storage_update_settings(result.sql_storage_update_settings)), ('additionalFeaturesServerConfigurations', format_additional_features_server_configurations(result.additional_features_server_configurations))]) return order_dict
python
{ "resource": "" }
q32562
format_sql_connectivity_update_settings
train
def format_sql_connectivity_update_settings(result): ''' Formats the SqlConnectivityUpdateSettings object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.connectivity_type is not None: order_dict['connectivityType'] = result.connectivity_type if result.port is not None: order_dict['port'] = result.port if result.sql_auth_update_user_name is not None: order_dict['sqlAuthUpdateUserName'] = result.sql_auth_update_user_name return order_dict
python
{ "resource": "" }
q32563
format_sql_storage_update_settings
train
def format_sql_storage_update_settings(result): ''' Formats the SqlStorageUpdateSettings object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.disk_count is not None: order_dict['diskCount'] = result.disk_count if result.disk_configuration_type is not None: order_dict['diskConfigurationType'] = result.disk_configuration_type return order_dict
python
{ "resource": "" }
q32564
format_sql_workload_type_update_settings
train
def format_sql_workload_type_update_settings(result): ''' Formats the SqlWorkloadTypeUpdateSettings object removing arguments that are empty ''' from collections import OrderedDict # Only display parameters that have content order_dict = OrderedDict() if result.sql_workload_type is not None: order_dict['sqlWorkloadType'] = result.sql_workload_type return order_dict
python
{ "resource": "" }
q32565
aks_upgrades_table_format
train
def aks_upgrades_table_format(result): """Format get-upgrades results as a summary for display with "-o table".""" # pylint: disable=import-error from jmespath import compile as compile_jmes, Options # This expression assumes there is one node pool, and that the master and nodes upgrade in lockstep. parsed = compile_jmes("""{ name: name, resourceGroup: resourceGroup, masterVersion: controlPlaneProfile.kubernetesVersion || `unknown`, nodePoolVersion: agentPoolProfiles[0].kubernetesVersion || `unknown`, upgrades: controlPlaneProfile.upgrades || [`None available`] | sort_versions(@) | join(`, `, @) }""") # use ordered dicts so headers are predictable return parsed.search(result, Options(dict_cls=OrderedDict, custom_functions=_custom_functions()))
python
{ "resource": "" }
q32566
aks_versions_table_format
train
def aks_versions_table_format(result): """Format get-versions results as a summary for display with "-o table".""" # pylint: disable=import-error from jmespath import compile as compile_jmes, Options parsed = compile_jmes("""orchestrators[].{ kubernetesVersion: orchestratorVersion, upgrades: upgrades[].orchestratorVersion || [`None available`] | sort_versions(@) | join(`, `, @) }""") # use ordered dicts so headers are predictable results = parsed.search(result, Options(dict_cls=OrderedDict, custom_functions=_custom_functions())) return sorted(results, key=lambda x: version_to_tuple(x.get('kubernetesVersion')), reverse=True)
python
{ "resource": "" }
q32567
process_alias_create_namespace
train
def process_alias_create_namespace(namespace): """ Validate input arguments when the user invokes 'az alias create'. Args: namespace: argparse namespace object. """ namespace = filter_alias_create_namespace(namespace) _validate_alias_name(namespace.alias_name) _validate_alias_command(namespace.alias_command) _validate_alias_command_level(namespace.alias_name, namespace.alias_command) _validate_pos_args_syntax(namespace.alias_name, namespace.alias_command)
python
{ "resource": "" }
q32568
process_alias_import_namespace
train
def process_alias_import_namespace(namespace): """ Validate input arguments when the user invokes 'az alias import'. Args: namespace: argparse namespace object. """ if is_url(namespace.alias_source): alias_source = retrieve_file_from_url(namespace.alias_source) _validate_alias_file_content(alias_source, url=namespace.alias_source) else: namespace.alias_source = os.path.abspath(namespace.alias_source) _validate_alias_file_path(namespace.alias_source) _validate_alias_file_content(namespace.alias_source)
python
{ "resource": "" }
q32569
process_alias_export_namespace
train
def process_alias_export_namespace(namespace): """ Validate input arguments when the user invokes 'az alias export'. Args: namespace: argparse namespace object. """ namespace.export_path = os.path.abspath(namespace.export_path) if os.path.isfile(namespace.export_path): raise CLIError(FILE_ALREADY_EXISTS_ERROR.format(namespace.export_path)) export_path_dir = os.path.dirname(namespace.export_path) if not os.path.isdir(export_path_dir): os.makedirs(export_path_dir) if os.path.isdir(namespace.export_path): namespace.export_path = os.path.join(namespace.export_path, ALIAS_FILE_NAME)
python
{ "resource": "" }
q32570
_validate_alias_name
train
def _validate_alias_name(alias_name): """ Check if the alias name is valid. Args: alias_name: The name of the alias to validate. """ if not alias_name: raise CLIError(EMPTY_ALIAS_ERROR) if not re.match('^[a-zA-Z]', alias_name): raise CLIError(INVALID_STARTING_CHAR_ERROR.format(alias_name[0]))
python
{ "resource": "" }
q32571
_validate_alias_command
train
def _validate_alias_command(alias_command): """ Check if the alias command is valid. Args: alias_command: The command to validate. """ if not alias_command: raise CLIError(EMPTY_ALIAS_ERROR) split_command = shlex.split(alias_command) boundary_index = len(split_command) for i, subcommand in enumerate(split_command): if not re.match('^[a-z]', subcommand.lower()) or i > COLLISION_CHECK_LEVEL_DEPTH: boundary_index = i break # Extract possible CLI commands and validate command_to_validate = ' '.join(split_command[:boundary_index]).lower() for command in azext_alias.cached_reserved_commands: if re.match(r'([a-z\-]*\s)*{}($|\s)'.format(command_to_validate), command): return _validate_positional_arguments(shlex.split(alias_command))
python
{ "resource": "" }
q32572
_validate_pos_args_syntax
train
def _validate_pos_args_syntax(alias_name, alias_command): """ Check if the positional argument syntax is valid in alias name and alias command. Args: alias_name: The name of the alias to validate. alias_command: The command to validate. """ pos_args_from_alias = get_placeholders(alias_name) # Split by '|' to extract positional argument name from Jinja filter (e.g. {{ arg_name | upper }}) # Split by '.' to extract positional argument name from function call (e.g. {{ arg_name.split()[0] }}) pos_args_from_command = [x.split('|')[0].split('.')[0].strip() for x in get_placeholders(alias_command)] if set(pos_args_from_alias) != set(pos_args_from_command): arg_diff = set(pos_args_from_alias) ^ set(pos_args_from_command) raise CLIError(INCONSISTENT_ARG_ERROR.format('' if len(arg_diff) == 1 else 's', arg_diff, 'is' if len(arg_diff) == 1 else 'are'))
python
{ "resource": "" }
q32573
_validate_alias_command_level
train
def _validate_alias_command_level(alias, command): """ Make sure that if the alias is a reserved command, the command that the alias points to in the command tree does not conflict in levels. e.g. 'dns' -> 'network dns' is valid because dns is a level 2 command and network dns starts at level 1. However, 'list' -> 'show' is not valid because list and show are both reserved commands at level 2. Args: alias: The name of the alias. command: The command that the alias points to. """ alias_collision_table = AliasManager.build_collision_table([alias]) # Alias is not a reserved command, so it can point to any command if not alias_collision_table: return command_collision_table = AliasManager.build_collision_table([command]) alias_collision_levels = alias_collision_table.get(alias.split()[0], []) command_collision_levels = command_collision_table.get(command.split()[0], []) # Check if there is a command level conflict if set(alias_collision_levels) & set(command_collision_levels): raise CLIError(COMMAND_LVL_ERROR.format(alias, command))
python
{ "resource": "" }
q32574
_validate_alias_file_path
train
def _validate_alias_file_path(alias_file_path): """ Make sure the alias file path is neither non-existant nor a directory Args: The alias file path to import aliases from. """ if not os.path.exists(alias_file_path): raise CLIError(ALIAS_FILE_NOT_FOUND_ERROR) if os.path.isdir(alias_file_path): raise CLIError(ALIAS_FILE_DIR_ERROR.format(alias_file_path))
python
{ "resource": "" }
q32575
_validate_alias_file_content
train
def _validate_alias_file_content(alias_file_path, url=''): """ Make sure the alias name and alias command in the alias file is in valid format. Args: The alias file path to import aliases from. """ alias_table = get_config_parser() try: alias_table.read(alias_file_path) for alias_name, alias_command in reduce_alias_table(alias_table): _validate_alias_name(alias_name) _validate_alias_command(alias_command) _validate_alias_command_level(alias_name, alias_command) _validate_pos_args_syntax(alias_name, alias_command) except Exception as exception: # pylint: disable=broad-except error_msg = CONFIG_PARSING_ERROR % AliasManager.process_exception_message(exception) error_msg = error_msg.replace(alias_file_path, url or alias_file_path) raise CLIError(error_msg)
python
{ "resource": "" }
q32576
execute_query
train
def execute_query(cmd, client, application, analytics_query, start_time=None, end_time=None, offset='1h', resource_group_name=None): """Executes a query against the provided Application Insights application.""" from .vendored_sdks.applicationinsights.models import QueryBody targets = get_query_targets(cmd.cli_ctx, application, resource_group_name) return client.query.execute(targets[0], QueryBody(query=analytics_query, timespan=get_timespan(cmd.cli_ctx, start_time, end_time, offset), applications=targets[1:]))
python
{ "resource": "" }
q32577
add_new_lines
train
def add_new_lines(long_phrase, line_min=None, tolerance=TOLERANCE): """ not everything fits on the screen, based on the size, add newlines """ if line_min is None: line_min = math.floor(int(_get_window_columns()) / 2 - 15) if long_phrase is None: return long_phrase line_min = int(line_min) nl_loc = [] skip = False index = 0 if len(long_phrase) > line_min: for _ in range(int(math.floor(len(long_phrase) / line_min))): previous = index index += line_min if skip: index += 1 skip = False while index < len(long_phrase) and \ not long_phrase[index].isspace() and \ index < tolerance + previous + line_min: index += 1 if index < len(long_phrase): if long_phrase[index].isspace(): index += 1 skip = True nl_loc.append(index) counter = 0 for loc in nl_loc: long_phrase = long_phrase[:loc + counter] + '\n' + long_phrase[loc + counter:] counter += 1 return long_phrase + "\n"
python
{ "resource": "" }
q32578
GatherCommands.add_exit
train
def add_exit(self): """ adds the exits from the application """ self.completable.append("quit") self.completable.append("exit") self.descrip["quit"] = "Exits the program" self.descrip["exit"] = "Exits the program" self.command_tree.add_child(CommandBranch("quit")) self.command_tree.add_child(CommandBranch("exit")) self.command_param["quit"] = "" self.command_param["exit"] = ""
python
{ "resource": "" }
q32579
GatherCommands._gather_from_files
train
def _gather_from_files(self, config): """ gathers from the files in a way that is convienent to use """ command_file = config.get_help_files() cache_path = os.path.join(config.get_config_dir(), 'cache') cols = _get_window_columns() with open(os.path.join(cache_path, command_file), 'r') as help_file: data = json.load(help_file) self.add_exit() commands = data.keys() for command in commands: branch = self.command_tree for word in command.split(): if word not in self.completable: self.completable.append(word) if not branch.has_child(word): branch.add_child(CommandBranch(word)) branch = branch.get_child(word) description = data[command]['help'] self.descrip[command] = add_new_lines(description, line_min=int(cols) - 2 * TOLERANCE) if 'examples' in data[command]: examples = [] for example in data[command]['examples']: examples.append([ add_new_lines(example[0], line_min=int(cols) - 2 * TOLERANCE), add_new_lines(example[1], line_min=int(cols) - 2 * TOLERANCE)]) self.command_example[command] = examples command_params = data[command].get('parameters', {}) for param in command_params: if '==SUPPRESS==' not in command_params[param]['help']: param_aliases = set() for par in command_params[param]['name']: param_aliases.add(par) self.param_descript[command + " " + par] = \ add_new_lines( command_params[param]['required'] + " " + command_params[param]['help'], line_min=int(cols) - 2 * TOLERANCE) if par not in self.completable_param: self.completable_param.append(par) param_doubles = self.command_param_info.get(command, {}) for alias in param_aliases: param_doubles[alias] = param_aliases self.command_param_info[command] = param_doubles
python
{ "resource": "" }
q32580
GatherCommands.get_all_subcommands
train
def get_all_subcommands(self): """ returns all the subcommands """ subcommands = [] for command in self.descrip: for word in command.split(): for kid in self.command_tree.children: if word != kid and word not in subcommands: subcommands.append(word) return subcommands
python
{ "resource": "" }
q32581
create_alias
train
def create_alias(alias_name, alias_command): """ Create an alias. Args: alias_name: The name of the alias. alias_command: The command that the alias points to. """ alias_name, alias_command = alias_name.strip(), alias_command.strip() alias_table = get_alias_table() if alias_name not in alias_table.sections(): alias_table.add_section(alias_name) alias_table.set(alias_name, 'command', alias_command) _commit_change(alias_table)
python
{ "resource": "" }
q32582
export_aliases
train
def export_aliases(export_path=None, exclusions=None): """ Export all registered aliases to a given path, as an INI configuration file. Args: export_path: The path of the alias configuration file to export to. exclusions: Space-separated aliases excluded from export. """ if not export_path: export_path = os.path.abspath(ALIAS_FILE_NAME) alias_table = get_alias_table() for exclusion in exclusions or []: if exclusion not in alias_table.sections(): raise CLIError(ALIAS_NOT_FOUND_ERROR.format(exclusion)) alias_table.remove_section(exclusion) _commit_change(alias_table, export_path=export_path, post_commit=False) logger.warning(POST_EXPORT_ALIAS_MSG, export_path)
python
{ "resource": "" }
q32583
import_aliases
train
def import_aliases(alias_source): """ Import aliases from a file or an URL. Args: alias_source: The source of the alias. It can be a filepath or an URL. """ alias_table = get_alias_table() if is_url(alias_source): alias_source = retrieve_file_from_url(alias_source) alias_table.read(alias_source) os.remove(alias_source) else: alias_table.read(alias_source) _commit_change(alias_table)
python
{ "resource": "" }
q32584
list_alias
train
def list_alias(): """ List all registered aliases. Returns: An array of dictionary containing the alias and the command that it points to. """ alias_table = get_alias_table() output = [] for alias in alias_table.sections(): if alias_table.has_option(alias, 'command'): output.append({ 'alias': alias, # Remove unnecessary whitespaces 'command': ' '.join(alias_table.get(alias, 'command').split()) }) return output
python
{ "resource": "" }
q32585
remove_alias
train
def remove_alias(alias_names): """ Remove an alias. Args: alias_name: The name of the alias to be removed. """ alias_table = get_alias_table() for alias_name in alias_names: if alias_name not in alias_table.sections(): raise CLIError(ALIAS_NOT_FOUND_ERROR.format(alias_name)) alias_table.remove_section(alias_name) _commit_change(alias_table)
python
{ "resource": "" }
q32586
_commit_change
train
def _commit_change(alias_table, export_path=None, post_commit=True): """ Record changes to the alias table. Also write new alias config hash and collided alias, if any. Args: alias_table: The alias table to commit. export_path: The path to export the aliases to. Default: GLOBAL_ALIAS_PATH. post_commit: True if we want to perform some extra actions after writing alias to file. """ with open(export_path or GLOBAL_ALIAS_PATH, 'w+') as alias_config_file: alias_table.write(alias_config_file) if post_commit: alias_config_file.seek(0) alias_config_hash = hashlib.sha1(alias_config_file.read().encode('utf-8')).hexdigest() AliasManager.write_alias_config_hash(alias_config_hash) collided_alias = AliasManager.build_collision_table(alias_table.sections()) AliasManager.write_collided_alias(collided_alias) build_tab_completion_table(alias_table)
python
{ "resource": "" }
q32587
applicationinsights_mgmt_plane_client
train
def applicationinsights_mgmt_plane_client(cli_ctx, _, subscription=None): """Initialize Log Analytics mgmt client for use with CLI.""" from .vendored_sdks.mgmt_applicationinsights import ApplicationInsightsManagementClient from azure.cli.core._profile import Profile profile = Profile(cli_ctx=cli_ctx) # Use subscription from resource_id where possible, otherwise use login. if subscription: cred, _, _ = profile.get_login_credentials(subscription_id=subscription) return ApplicationInsightsManagementClient( cred, subscription ) cred, sub_id, _ = profile.get_login_credentials() return ApplicationInsightsManagementClient( cred, sub_id )
python
{ "resource": "" }
q32588
alias_event_handler
train
def alias_event_handler(_, **kwargs): """ An event handler for alias transformation when EVENT_INVOKER_PRE_TRUNCATE_CMD_TBL event is invoked. """ try: telemetry.start() start_time = timeit.default_timer() args = kwargs.get('args') alias_manager = AliasManager(**kwargs) # [:] will keep the reference of the original args args[:] = alias_manager.transform(args) if is_alias_command(['create', 'import'], args): load_cmd_tbl_func = kwargs.get('load_cmd_tbl_func', lambda _: {}) cache_reserved_commands(load_cmd_tbl_func) elapsed_time = (timeit.default_timer() - start_time) * 1000 logger.debug(DEBUG_MSG_WITH_TIMING, args, elapsed_time) telemetry.set_execution_time(round(elapsed_time, 2)) except Exception as client_exception: # pylint: disable=broad-except telemetry.set_exception(client_exception) raise finally: telemetry.conclude()
python
{ "resource": "" }
q32589
enable_aliases_autocomplete
train
def enable_aliases_autocomplete(_, **kwargs): """ Enable aliases autocomplete by injecting aliases into Azure CLI tab completion list. """ external_completions = kwargs.get('external_completions', []) prefix = kwargs.get('cword_prefix', []) cur_commands = kwargs.get('comp_words', []) alias_table = get_alias_table() # Transform aliases if they are in current commands, # so parser can get the correct subparser when chaining aliases _transform_cur_commands(cur_commands, alias_table=alias_table) for alias, alias_command in filter_aliases(alias_table): if alias.startswith(prefix) and alias.strip() != prefix and _is_autocomplete_valid(cur_commands, alias_command): # Only autocomplete the first word because alias is space-delimited external_completions.append(alias) # Append spaces if necessary (https://github.com/kislyuk/argcomplete/blob/master/argcomplete/__init__.py#L552-L559) prequote = kwargs.get('cword_prequote', '') continuation_chars = "=/:" if len(external_completions) == 1 and external_completions[0][-1] not in continuation_chars and not prequote: external_completions[0] += ' '
python
{ "resource": "" }
q32590
transform_cur_commands_interactive
train
def transform_cur_commands_interactive(_, **kwargs): """ Transform any aliases in current commands in interactive into their respective commands. """ event_payload = kwargs.get('event_payload', {}) # text_split = current commands typed in the interactive shell without any unfinished word # text = current commands typed in the interactive shell cur_commands = event_payload.get('text', '').split(' ') _transform_cur_commands(cur_commands) event_payload.update({ 'text': ' '.join(cur_commands) })
python
{ "resource": "" }
q32591
enable_aliases_autocomplete_interactive
train
def enable_aliases_autocomplete_interactive(_, **kwargs): """ Enable aliases autocomplete on interactive mode by injecting aliases in the command tree. """ subtree = kwargs.get('subtree', None) if not subtree or not hasattr(subtree, 'children'): return for alias, alias_command in filter_aliases(get_alias_table()): # Only autocomplete the first word because alias is space-delimited if subtree.in_tree(alias_command.split()): subtree.add_child(CommandBranch(alias))
python
{ "resource": "" }
q32592
_is_autocomplete_valid
train
def _is_autocomplete_valid(cur_commands, alias_command): """ Determine whether autocomplete can be performed at the current state. Args: parser: The current CLI parser. cur_commands: The current commands typed in the console. alias_command: The alias command. Returns: True if autocomplete can be performed. """ parent_command = ' '.join(cur_commands[1:]) with open(GLOBAL_ALIAS_TAB_COMP_TABLE_PATH, 'r') as tab_completion_table_file: try: tab_completion_table = json.loads(tab_completion_table_file.read()) return alias_command in tab_completion_table and parent_command in tab_completion_table[alias_command] except Exception: # pylint: disable=broad-except return False
python
{ "resource": "" }
q32593
_transform_cur_commands
train
def _transform_cur_commands(cur_commands, alias_table=None): """ Transform any aliases in cur_commands into their respective commands. Args: alias_table: The alias table. cur_commands: current commands typed in the console. """ transformed = [] alias_table = alias_table if alias_table else get_alias_table() for cmd in cur_commands: if cmd in alias_table.sections() and alias_table.has_option(cmd, 'command'): transformed += alias_table.get(cmd, 'command').split() else: transformed.append(cmd) cur_commands[:] = transformed
python
{ "resource": "" }
q32594
help_text
train
def help_text(values): """ reformats the help text """ result = "" for key in values: result += key + ' '.join('' for x in range(GESTURE_LENGTH - len(key))) +\ ': ' + values[key] + '\n' return result
python
{ "resource": "" }
q32595
ask_user_for_telemetry
train
def ask_user_for_telemetry(): """ asks the user for if we can collect telemetry """ answer = " " while answer.lower() != 'yes' and answer.lower() != 'no': answer = prompt(u'\nDo you agree to sending telemetry (yes/no)? Default answer is yes: ') if answer == '': answer = 'yes' return answer
python
{ "resource": "" }
q32596
Configuration.firsttime
train
def firsttime(self): """ sets it as already done""" self.config.set('DEFAULT', 'firsttime', 'no') if self.cli_config.getboolean('core', 'collect_telemetry', fallback=False): print(PRIVACY_STATEMENT) else: self.cli_config.set_value('core', 'collect_telemetry', ask_user_for_telemetry()) self.update()
python
{ "resource": "" }
q32597
Configuration.set_val
train
def set_val(self, direct, section, val): """ set the config values """ if val is not None: self.config.set(direct, section, val) self.update()
python
{ "resource": "" }
q32598
Configuration.update
train
def update(self): """ updates the configuration settings """ with open(os.path.join(self.config_dir, CONFIG_FILE_NAME), 'w') as config_file: self.config.write(config_file)
python
{ "resource": "" }
q32599
execute_query
train
def execute_query(client, workspace, analytics_query, timespan=None, workspaces=None): """Executes a query against the provided Log Analytics workspace.""" from .vendored_sdks.loganalytics.models import QueryBody return client.query(workspace, QueryBody(query=analytics_query, timespan=timespan, workspaces=workspaces))
python
{ "resource": "" }