query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Deletes the specified policy. The deletion takes effect typically within 10 seconds.
def delete_policy(self, policy_id, **kwargs): resource_path = "/policies/{policyId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_policy got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "policyId": policy_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def policy_delete(request, policy_id):\n neutronclient(request).delete_qos_policy(policy_id)", "def delete(self, policy_name):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n # Actually run vault\n logging.info(\"Deleting the policy: %s\", address)\n self.vault.requests_request(\"DELETE\", address, headers=self.vault.token_header)", "def delete_policy(policy_id):\n policy = PolicyService.get_policy_by_id(policy_id)\n if policy is None:\n abort(404)\n\n policy.delete()\n\n return {}", "def delete_policy(self, policy_ref: str) -> None:\n self.batch_write(\n [self.batch_detach_policy(policy_ref, obj_ref) for obj_ref in self.list_policy_attachments(\n policy_ref,\n ConsistencyLevel=ConsistencyLevel.SERIALIZABLE.name)])\n self.batch_write(\n [self.batch_detach_object(parent_ref, link_name) for parent_ref, link_name in self.list_object_parents(\n policy_ref,\n ConsistencyLevel=ConsistencyLevel.SERIALIZABLE.name)])\n retry(**cd_read_retry_parameters)(cd_client.delete_object)(\n DirectoryArn=self._dir_arn,\n ObjectReference={'Selector': policy_ref})", "def delete_policy(self, policy_name):\r\n return self.connection.delete_lb_policy(self.name, policy_name)", "def delete_policy(policystore_url, policy_credentials, verbose):\n\n if verbose:\n logging.info('Deleting policy')\n pprint.pprint(policy_credentials)\n\n delete_url = policystore_url + POLICYSTORE_PREFIX + 'DeleteEntitlementPolicy'\n\n r = requests.post(delete_url, headers=headers(), json=policy_credentials)\n if r.status_code != 200:\n logging.error(f'ERROR: Unexpected response: {r.status_code}')\n pprint.pprint(r.json())\n sys.exit('Failed to delete policy')\n\n logging.info('SUCCESS: Deleted policy')", "def rbac_policy_delete(request, policy_id):\n neutronclient(request).delete_rbac_policy(policy_id)", "def delete_ikepolicy(self, ikepolicy):\r\n return self.delete(self.ikepolicy_path % (ikepolicy))", "def Delete(self, fp_id=None, batch_mode=False, only_generate_request=False):\n\n if batch_mode:\n requests = [self._MakeDeleteRequestTuple(fp_id=fp_id)]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.Delete(self._MakeDeleteRequestTuple(fp_id=fp_id)[2])\n operation_poller = DeletePoller(self._service, self.ref)\n return self.WaitOperation(\n op_res,\n operation_poller=operation_poller,\n message='Deleting the organization firewall policy.')", "def delete_firewall_policy(self, firewall_policy):\r\n return self.delete(self.firewall_policy_path % (firewall_policy))", "def delete_metric_policy(ContainerName=None):\n pass", "def delete_group_policy(self, group_name, policy_name):\r\n params = {'GroupName' : group_name,\r\n 'PolicyName' : policy_name}\r\n return self.get_response('DeleteGroupPolicy', params, verb='POST')", "def test_delete_namespaced_policy(self):\n pass", "def delete_ipsecpolicy(self, ipsecpolicy):\r\n return self.delete(self.ipsecpolicy_path % (ipsecpolicy))", "def delete_bucket_policy(Bucket=None):\n pass", "def deletion_policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"deletion_policy\")", "def deletion_policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"deletion_policy\")", "def delete_policies():\n if PoliciesOutput.POLICIES_EVENT not in ctx.instance.runtime_properties:\n return\n\n service_component_name = ctx.instance.runtime_properties.get(\n PoliciesOutput.SERVICE_COMPONENT_NAME\n )\n if not service_component_name:\n ctx.logger.warn(\"failed to find service_component_name to delete_policies in consul-kv\")\n return\n\n delete_policies = [\n PoliciesOutput._gen_txn_operation(\n PoliciesOutput.OPERATION_DELETE_FOLDER, service_component_name\n )\n ]\n PoliciesOutput._run_transaction(\"delete_policies\", delete_policies)", "def delete_retention_policy(self) -> pulumi.Output[Optional['outputs.DeleteRetentionPolicyResponse']]:\n return pulumi.get(self, \"delete_retention_policy\")", "def pre_network_policy_delete(self, resource_id):\n pass", "def post_network_policy_delete(self, resource_id, resource_dict):\n pass", "def policy_delete_all(session, domain, path=\"/\"):\n client = session.client('iam')\n resp = client.list_policies(Scope='Local', PathPrefix=path)\n\n prefix = domain.replace('.', '-')\n for policy in resp.get('Policies', []):\n if policy['PolicyName'].startswith(prefix):\n ARN = policy['Arn']\n if policy['AttachmentCount'] > 0:\n # cannot delete a policy if it is still in use\n attached = client.list_entities_for_policy(PolicyArn=ARN)\n for group in attached.get('PolicyGroups', []):\n client.detach_group_policy(GroupName=group['GroupName'], PolicyArn=ARN)\n for user in attached.get('PolicyUsers', []):\n client.detach_user_policy(UserName=user['UserName'], PolicyArn=ARN)\n for role in attached.get('PolicyRoles', []):\n client.detach_role_policy(RoleName=role['RoleName'], PolicyArn=ARN)\n client.delete_policy(PolicyArn=ARN)", "def detach_policy(\n role,\n policy,\n profile=None,\n access_key_id=None,\n access_key_secret=None):\n aws_profile = utils.get_profile(profile, access_key_id, access_key_secret)\n\n try:\n role_jobs.detach(aws_profile, role, policy)\n except PermissionDenied:\n msg = \"You don't have permission to detach policies.\"\n raise click.ClickException(msg)\n except (MissingKey, Non200Response) as error:\n raise click.ClickException(str(error))\n except AwsError as error:\n raise click.ClickException(str(error))\n except (ResourceDoesNotExist, ResourceNotDeleted) as error:\n raise click.ClickException(str(error))", "def delete_user_policy(self, user_name, policy_name):\r\n params = {'UserName' : user_name,\r\n 'PolicyName' : policy_name}\r\n return self.get_response('DeleteUserPolicy', params, verb='POST')", "def deletion_policy(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"deletion_policy\")", "def delete(self, schedule_policy_name):\n\n if schedule_policy_name and not isinstance(schedule_policy_name, basestring):\n raise SDKException('Schedules', '102')\n\n schedule_policy_name = schedule_policy_name.lower()\n schedule_policy_id = self.all_schedule_policies.get(schedule_policy_name)\n\n if schedule_policy_id:\n request_json = {\n \"TMMsg_TaskOperationReq\":\n {\n \"opType\": 3,\n \"taskEntities\":\n [\n {\n \"_type_\": 69,\n \"taskId\": schedule_policy_id\n }\n ]\n }\n }\n\n modify_schedule = self._commcell_object._services['EXECUTE_QCOMMAND']\n\n flag, response = self._commcell_object._cvpysdk_object.make_request(\n 'POST', modify_schedule, request_json)\n\n if flag:\n if response.json():\n if 'errorCode' in response.json():\n if response.json()['errorCode'] == 0:\n self.refresh()\n else:\n raise SDKException(\n 'Schedules', '102', response.json()['errorMessage'])\n else:\n raise SDKException('Response', '102')\n else:\n response_string = self._commcell_object._update_response_(\n response.text)\n exception_message = 'Failed to delete schedule policy\\nError: \"{0}\"'.format(\n response_string)\n\n raise SDKException('Schedules', '102', exception_message)\n else:\n raise SDKException(\n 'Schedules', '102', 'No schedule policy exists for: {0}'.format(\n schedule_policy_id)\n )", "def remove_policy(self, sec, ptype, rule):\n line = self.convert_to_item(ptype, rule)\n\n _id = line['id']['S']\n\n self.dynamodb.delete_item(\n Key={\n 'id': {\n 'S': _id,\n }\n },\n TableName=self.table_name,\n )\n\n return True", "def delete(nitro, policypatset):\r\n __policypatset = NSPatset()\r\n __policypatset.set_name(policypatset.get_name())\r\n return __policypatset.delete_resource(nitro)", "def delete_unused(self, mode):\n self.policies = self.list_policies()\n for policy in self.policies['Policies']:\n if policy['AttachmentCount'] < 1:\n self.policy_versions = self.con.list_policy_versions(\n PolicyArn=policy['Arn']\n )\n for version in self.policy_versions['Versions']:\n if not version['IsDefaultVersion']:\n if not mode:\n self.con.delete_policy_version(\n PolicyArn=policy['Arn'],\n VersionId=version['VersionId']\n )\n print policy['Arn'] + \" - \" + version['VersionId'] + \\\n \" DELETED\"\n if not mode:\n self.con.delete_policy(\n PolicyArn=policy['Arn']\n )\n print policy['PolicyName'] + \" DELETED\"", "def delete_container_policy(ContainerName=None):\n pass", "def delete(self, consumer_key, rid):\n policy = Policy.query.filter(\n Policy.consumer_key == consumer_key,\n Policy.rid == rid\n ).first_or_404()\n\n policy.remove()\n return '', 204", "def delete(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_l7_policy,\n l7_policy_id,\n load_balancer_getter=l7_policy_get_load_balancer_id,\n resource_id=l7_policy_id)", "def delete_qos_policy(self, name_or_id):\n if not self._has_neutron_extension('qos'):\n raise exc.OpenStackCloudUnavailableExtension(\n 'QoS extension is not available on target cloud'\n )\n policy = self.network.find_qos_policy(name_or_id)\n if not policy:\n self.log.debug(\"QoS policy %s not found for deleting\", name_or_id)\n return False\n\n self.network.delete_qos_policy(policy)\n\n return True", "def qos_policy_group_delete(self, policy_group):\n return self.request( \"qos-policy-group-delete\", {\n 'policy_group': [ policy_group, 'policy-group', [ basestring, 'None' ], False ],\n }, {\n } )", "def test_delete_policy_during_execution(self):\n execute_policy_up = self.autoscale_client.execute_policy(\n self.group.id,\n self.policy_up['policy_id'])\n delete_policy = self.autoscale_client.delete_scaling_policy(\n self.group.id,\n self.policy_up['policy_id'])\n self.assertEquals(\n delete_policy.status_code, 204,\n msg='Deleting the scaling policy while its executing failed {0}'\n ' for group {1}'\n .format(delete_policy.status_code, self.group.id))\n self.assertEquals(\n execute_policy_up.status_code, 202,\n msg='Scale up policy failed for group {0} cause policy was deleted'\n ' during execution: {1}'\n .format(self.group.id, execute_policy_up.status_code))\n self.check_for_expected_number_of_building_servers(\n group_id=self.group.id,\n expected_servers=self.group.groupConfiguration.minEntities +\n self.policy_up_data['change'])", "def delete_retention_policy(self) -> Optional[pulumi.Input['DeleteRetentionPolicyArgs']]:\n return pulumi.get(self, \"delete_retention_policy\")", "def detach(profile, role, policy):\n # Make sure the role exists.\n if not exists(profile, role):\n msg = \"No role '\" + str(role) + \"'.\"\n raise ResourceDoesNotExist(msg)\n\n # Make sure the policy exists.\n policy_data = policy_jobs.fetch_by_name(profile, policy)\n if not policy_data:\n msg = \"No policy '\" + str(policy) + \"'.\"\n raise ResourceDoesNotExist(msg)\n\n # Get the policy's ARN.\n policy_arn = policy_data[0][\"Arn\"]\n\n # Detach the policy.\n params = {}\n params[\"profile\"] = profile\n params[\"role\"] = role\n params[\"policy\"] = policy_arn\n utils.do_request(role_lib, \"detach_policy\", params)", "def delete(self, params=None):\n self.logger.debug('Deleting %s with parameters: %s'\n % (self.type_name, params))\n return self.client.delete_load_balancer_policy(**params)", "def remove_policy(self, sec, ptype, rule):\r\n deleted_count = self._delete_policy_lines(ptype, rule)\r\n return deleted_count > 0", "def test_delete_success(self, mock_delete):\n\n self.policies.delete(id=self.policy_single_response['policy']['id'])\n\n mock_delete.assert_called_once_with(\n url='https://api.newrelic.com/v2/alerts_policies/{0}.json'.format(\n self.policy_single_response['policy']['id']\n ),\n headers=self.policies.headers\n )", "def test_delete_cluster_policy(self):\n pass", "def delete_alert_policy(\n self,\n name: str,\n retry: Retry | _MethodDefault = DEFAULT,\n timeout: float | None = None,\n metadata: Sequence[tuple[str, str]] = (),\n ) -> None:\n policy_client = self._get_policy_client()\n try:\n policy_client.delete_alert_policy(\n request={\"name\": name}, retry=retry, timeout=timeout, metadata=metadata or ()\n )\n except HttpError as err:\n raise AirflowException(f\"Delete alerting policy failed. Error was {err.content}\")", "def dscp_marking_rule_delete(request, policy_id, rule_id):\n\n neutronclient(request).delete_dscp_marking_rule(rule_id, policy_id)", "def delete_lifecycle_policy(ContainerName=None):\n pass", "def minimum_packet_rate_rule_delete(request, policy_id, rule_id):\n neutronclient(request).delete_minimum_packet_rate_rule(rule_id, policy_id)", "def remove_policy(\n self,\n policy_id: PolicyID = DEFAULT_POLICY_ID,\n *,\n policy_mapping_fn: Optional[Callable[[AgentID], PolicyID]] = None,\n policies_to_train: Optional[\n Union[\n Container[PolicyID],\n Callable[[PolicyID, Optional[SampleBatchType]], bool],\n ]\n ] = None,\n evaluation_workers: bool = True,\n ) -> None:\n\n def fn(worker):\n worker.remove_policy(\n policy_id=policy_id,\n policy_mapping_fn=policy_mapping_fn,\n policies_to_train=policies_to_train,\n )\n\n self.workers.foreach_worker(fn, local_worker=True, healthy_only=True)\n if evaluation_workers and self.evaluation_workers is not None:\n self.evaluation_workers.foreach_worker(\n fn,\n local_worker=True,\n healthy_only=True,\n )", "def delete_namespaced_policy(self, body, namespace, name, **kwargs):\n\n all_params = ['body', 'namespace', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_namespaced_policy\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `delete_namespaced_policy`\")\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `delete_namespaced_policy`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `delete_namespaced_policy`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/policies/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def Delete(self,\n priority=None,\n firewall_policy_id=None,\n batch_mode=False,\n only_generate_request=False):\n\n if batch_mode:\n requests = [\n self._MakeDeleteRuleRequestTuple(\n priority=priority, firewall_policy=firewall_policy_id)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.RemoveRule(\n self._MakeDeleteRuleRequestTuple(\n priority=priority, firewall_policy=firewall_policy_id)[2])\n return self.WaitOperation(\n op_res,\n message='Deleting a rule from the organization firewall policy.')", "def __cleanup(self):\n \n self.logger.debug(\"Deleting assignment and definition of policy\")\n self.interactor.delete_policy_assignment(self.assignment_id)\n self.interactor.delete_policy_definition(self.policy_id)", "def delete_lb_health_check_policy(self, policy_id): \n params = {'command':'deleteLBHealthCheckPolicy',\n 'id':policy_id} \n\n try:\n response = self.send_request(params)\n res = json.loads(response)\n clsk_job_id = res['deletelbhealthcheckpolicyresponse']['jobid']\n self.logger.debug('Start job - deleteLBHealthCheckPolicy: %s' % res)\n return clsk_job_id\n except KeyError as ex:\n raise ClskError('Error parsing json data: %s' % ex)\n except ApiError as ex:\n raise ClskError(ex)", "def DeleteAssociation(self,\n firewall_policy_id=None,\n batch_mode=False,\n only_generate_request=False):\n\n if batch_mode:\n requests = [self._MakeDeleteAssociationRequestTuple(firewall_policy_id)]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.RemoveAssociation(\n self._MakeDeleteAssociationRequestTuple(firewall_policy_id)[2])\n return self.WaitOperation(\n op_res,\n message='Deleting the association for the organization firewall policy.'\n )", "def bandwidth_limit_rule_delete(request, policy_id, rule_id):\n neutronclient(request).delete_bandwidth_limit_rule(rule_id, policy_id)", "def dynamic_vnic_conn_policy_delete(handle, name, parent_dn=\"org-root\"):\n mo = dynamic_vnic_conn_policy_get(handle, name, parent_dn)\n if not mo:\n raise UcscOperationError(\"dynamic_vnic_conn_policy_delete\",\n \"Dynamic vNIC Connectivity Policy \"\n \"does not exist\")\n handle.remove_mo(mo)\n handle.commit()", "def test_delete_namespaced_policy_binding(self):\n pass", "def deleteOrDelay(self):\n self.delete()", "def delete_cors_policy(ContainerName=None):\n pass", "def minimum_bandwidth_rule_delete(request, policy_id, rule_id):\n\n neutronclient(request).delete_minimum_bandwidth_rule(rule_id, policy_id)", "def deleteMergePolicy(self, mergePolicyId: str = None) -> str:\n if mergePolicyId is None:\n raise ValueError(\"Require a mergePolicyId\")\n if self.loggingEnabled:\n self.logger.debug(f\"Starting deleteMergePolicy\")\n path = f\"/config/mergePolicies/{mergePolicyId}\"\n res = self.connector.deleteData(self.endpoint + path)\n return res", "def test_delete_bios_policy(self):\n pass", "def cleanup_policy_create(ctx: click.Context, **kwargs):\n # TODO: use a click type for this check?\n criteria_keys = {'downloaded', 'updated', 'regex'}\n util.move_to_key(kwargs, 'criteria', criteria_keys)\n\n util.rename_keys(kwargs['criteria'], {\n 'downloaded': 'lastDownloaded',\n 'updated': 'lastBlobUpdated',\n })\n\n subcommand_cleanup_policy.cmd_create(ctx.obj, **kwargs)", "def container_delete_retention_policy(self) -> pulumi.Output[Optional['outputs.DeleteRetentionPolicyResponse']]:\n return pulumi.get(self, \"container_delete_retention_policy\")", "def test_delete_hyperflex_proxy_setting_policy(self):\n pass", "def delete_app_policy_group(self, id):\n resp, body = self.delete(self.get_uri(self.resource, id))\n self.expected_success(http_client.NO_CONTENT, resp.status)\n return rest_client.ResponseBody(resp, body)", "def delete_lb_stickiness_policy(self, policy_id): \n params = {'command':'deleteLBStickinessPolicy',\n 'id':policy_id} \n\n try:\n response = self.send_request(params)\n res = json.loads(response)\n clsk_job_id = res['deleteLBstickinessrruleresponse']['jobid']\n self.logger.debug('Start job - deleteLBStickinessPolicy: %s' % res)\n return clsk_job_id\n except KeyError as ex:\n raise ClskError('Error parsing json data: %s' % ex)\n except ApiError as ex:\n raise ClskError(ex)", "def test_delete_hyperflex_cluster_storage_policy(self):\n pass", "def delete(nitro, csvserver_responderpolicy_binding):\n __csvserver_responderpolicy_binding = NSCSVServerResponderPolicyBinding()\n __csvserver_responderpolicy_binding.set_name(csvserver_responderpolicy_binding.get_name())\n __csvserver_responderpolicy_binding.set_policyname(csvserver_responderpolicy_binding.get_policyname())\n __csvserver_responderpolicy_binding.set_priority(csvserver_responderpolicy_binding.get_priority())\n __csvserver_responderpolicy_binding.set_bindpoint(csvserver_responderpolicy_binding.get_bindpoint())\n nsresponse = __csvserver_responderpolicy_binding.delete_resource(nitro)\n return nsresponse", "def delete_resource(\n self,\n namespace: str = None,\n propagation_policy: str = \"Foreground\",\n grace_period_seconds: int = 10,\n ):\n names = [\n \"delete_namespaced_csistorage_capacity\",\n \"delete_csistorage_capacity\",\n ]\n\n body = client.V1DeleteOptions(\n propagation_policy=propagation_policy,\n grace_period_seconds=grace_period_seconds,\n )\n\n _kube_api.execute(\n action=\"delete\",\n resource=self,\n names=names,\n namespace=namespace,\n api_client=None,\n api_args={\"name\": self.metadata.name, \"body\": body},\n )", "def delete_resource(\n self,\n namespace: str = None,\n propagation_policy: str = \"Foreground\",\n grace_period_seconds: int = 10,\n ):\n names = [\n \"delete_namespaced_volume_attachment\",\n \"delete_volume_attachment\",\n ]\n\n body = client.V1DeleteOptions(\n propagation_policy=propagation_policy,\n grace_period_seconds=grace_period_seconds,\n )\n\n _kube_api.execute(\n action=\"delete\",\n resource=self,\n names=names,\n namespace=namespace,\n api_client=None,\n api_args={\"name\": self.metadata.name, \"body\": body},\n )", "def DeleteSecurityPolicy(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DeleteSecurityPolicy\", params, headers=headers)\n response = json.loads(body)\n model = models.DeleteSecurityPolicyResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def test_delete_namespaced_egress_network_policy(self):\n pass", "def test_delete_cluster_policy_binding(self):\n pass", "def delete_station_resource_policy(self, station_id):\n return self._stations_service.delete_station_resource_policy(station_id)", "def test_delete_collection_namespaced_policy_binding(self):\n pass", "def test_delete_hyperflex_cluster_network_policy(self):\n pass", "def test_delete_hyperflex_ext_fc_storage_policy(self):\n pass", "def test_delete_hyperflex_vcenter_config_policy(self):\n pass", "def test_delete_collection_namespaced_policy(self):\n pass", "def test_create_update_delete_firewall_policy(self):\n with self.firewall_policy(do_delete=False) as fwp:\n fwp_id = fwp['firewall_policy']['id']\n # Create Firewall Policy\n crd_policy = {'firewall_policy': fwp}\n self.clnt.create_firewall_policy.assert_called_once_with(fwp)\n # Update Firewall Policy\n data = {'firewall_policy': {'name': 'updated-name'}}\n fwp = self.plugin.update_firewall_policy(self.ctx, fwp_id, data)\n crd_policy = {'firewall_policy': fwp}\n self.clnt.update_firewall_policy.assert_called_once_with(\n fwp_id,\n crd_policy)\n # Delete Firewall Policy\n self.plugin.delete_firewall_policy(self.ctx, fwp_id)\n self.clnt.delete_firewall_policy.assert_called_once_with(fwp_id)", "def delete_network_profile(arn=None):\n pass", "def setKeepPolicy(self, policy):\n if not self.__loaded:\n self.__load()\n \n if policy > self.KeepMax:\n return\n if policy == self.__keepCookies:\n return\n \n self.__keepCookies = policy\n self.__saveTimer.changeOccurred()", "def get_deletion_policy() -> base_models.DELETION_POLICY:\n return base_models.DELETION_POLICY.NOT_APPLICABLE", "def get_deletion_policy() -> base_models.DELETION_POLICY:\n return base_models.DELETION_POLICY.NOT_APPLICABLE", "def get_deletion_policy() -> base_models.DELETION_POLICY:\n return base_models.DELETION_POLICY.NOT_APPLICABLE", "def get_deletion_policy() -> base_models.DELETION_POLICY:\n return base_models.DELETION_POLICY.NOT_APPLICABLE", "def delete(self, request, l7_rule_id, l7_policy_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_l7_rule,\n l7_rule_id, l7_policy_id,\n load_balancer_getter=l7_policy_get_load_balancer_id,\n resource_id=l7_policy_id)", "def delete_resource_with_cleanup_policy(self, namespaces, cleanup_policy, method, resource_name):\n responses = []\n for namespace in namespaces:\n self.logger.debug(\"Deleting %s in namespace %s with cleanup policy %s\",\n resource_name, namespace, cleanup_policy)\n try:\n resp = method(namespace, label_selector=labels_to_string({CLEANUP_LABEL: cleanup_policy}))\n responses.append(resp)\n except kubernetes.client.rest.ApiException:\n self.logger.error(\"An error occured trying to delete the marked resource.\")\n return responses", "def test_delete_hyperflex_node_config_policy(self):\n pass", "def delete(self,\n dpd_profile_id,\n ):\n return self._invoke('delete',\n {\n 'dpd_profile_id': dpd_profile_id,\n })", "def delete(self):\r\n self.connection.delete_distribution(self.id, self.etag)", "def delete_payment(self):\r\n return delete_payment_by_id(self.__payment_id__)", "def container_delete_retention_policy(self) -> Optional[pulumi.Input['DeleteRetentionPolicyArgs']]:\n return pulumi.get(self, \"container_delete_retention_policy\")", "def deletecollection_namespaced_policy(self, namespace, **kwargs):\n\n all_params = ['namespace', 'pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method deletecollection_namespaced_policy\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `deletecollection_namespaced_policy`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/policies'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def cancel_policy(self, cancellation_cause=None, date_cursor=None):\n if not date_cursor:\n date_cursor = datetime.now().date()\n if not cancellation_cause:\n cancellation_cause = \"Policy was cancelled on demand\"\n self.policy.status = u'Canceled'\n self.policy.cancellation_date = date_cursor\n self.policy.status_info = cancellation_cause\n\n # mark all policy's invoices deleted ??\n\n db.session.commit()", "def test_delete_hyperflex_ucsm_config_policy(self):\n pass", "def test_delete_collection_cluster_policy_binding(self):\n pass", "def delete_firewall_policy(self, name_or_id, filters=None):\n if not filters:\n filters = {}\n try:\n firewall_policy = self.network.find_firewall_policy(\n name_or_id, ignore_missing=False, **filters\n )\n self.network.delete_firewall_policy(\n firewall_policy, ignore_missing=False\n )\n except exceptions.ResourceNotFound:\n self.log.debug(\n 'Firewall policy %s not found for deleting', name_or_id\n )\n return False\n return True", "async def test_delete(self):\n rsps = respx.delete(f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id') \\\n .mock(return_value=Response(200))\n await provisioning_client.delete_provisioning_profile('id')\n assert rsps.calls[0].request.url == \\\n f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id'\n assert rsps.calls[0].request.headers['auth-token'] == 'header.payload.sign'", "def delete(self,\n provider_id,\n l3vpn_id,\n ):\n return self._invoke('delete',\n {\n 'provider_id': provider_id,\n 'l3vpn_id': l3vpn_id,\n })", "def test_delete_hyperflex_software_version_policy(self):\n pass", "def delete(profile, name):\n client = boto3client.get(\"iam\", profile)\n params = {}\n params[\"InstanceProfileName\"] = name\n return client.delete_instance_profile(**params)" ]
[ "0.8218596", "0.77184653", "0.76911354", "0.7670649", "0.74277234", "0.7140506", "0.6988987", "0.6940251", "0.67815155", "0.665882", "0.65701747", "0.64927775", "0.63801694", "0.6353876", "0.6351143", "0.6344114", "0.6344114", "0.63357604", "0.6287126", "0.6283431", "0.6269533", "0.6256867", "0.62481135", "0.624738", "0.6238893", "0.6227728", "0.61749774", "0.61431044", "0.60832834", "0.6036328", "0.602169", "0.5989219", "0.59637904", "0.59621346", "0.5939107", "0.59381986", "0.5910824", "0.58909136", "0.58493984", "0.58229667", "0.58095807", "0.58081573", "0.57980376", "0.5740395", "0.5735868", "0.5723837", "0.56830156", "0.56683195", "0.5666852", "0.5620154", "0.56074643", "0.5576657", "0.5573313", "0.55667305", "0.55657935", "0.5514387", "0.54501027", "0.5447547", "0.5424814", "0.5424231", "0.53794163", "0.537257", "0.53644824", "0.53388697", "0.5329033", "0.5321659", "0.53202367", "0.5293931", "0.52650195", "0.52392006", "0.52351844", "0.521393", "0.5211217", "0.51924413", "0.5185858", "0.51772064", "0.5170811", "0.5132799", "0.51131094", "0.5106781", "0.5090416", "0.5090416", "0.5090416", "0.5090416", "0.508997", "0.5083601", "0.50750583", "0.5048555", "0.5047011", "0.50340956", "0.5026218", "0.5023879", "0.49985272", "0.49841163", "0.4973284", "0.49694255", "0.4962205", "0.49568903", "0.49534917", "0.494706" ]
0.6445359
12
Deletes the specified SMTP credential for the specified user.
def delete_smtp_credential(self, user_id, smtp_credential_id, **kwargs): resource_path = "/users/{userId}/smtpCredentials/{smtpCredentialId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_smtp_credential got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "smtpCredentialId": smtp_credential_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_credential(self, credential):\r\n return self.delete(self.credential_path % (credential))", "def delete_credential(credentials):\n credentials.delete_credentials()", "def delete_credential(self):\n Credentials.credentials_list.remove(self)", "def delete_credential(name: str):\n # first load any existing credentials\n try:\n creds = load_auth()\n except FileNotFoundError:\n # if no auth file exists we can just treat that as there being no credentials\n creds = []\n\n if '@' in name:\n username, hostname = name.split('@')\n else:\n username = name\n hostname = None\n\n # next, try to figure out which one we're supposed to remove\n matches = []\n match_indices = []\n\n for idx, cred in enumerate(creds):\n # the username must match\n if cred.username != username:\n continue\n # if specified, the hostname must match\n if hostname is not None and cred.hostname != hostname:\n continue\n\n matches.append(cred)\n match_indices.append(idx)\n\n if len(matches) == 0:\n err = f\"No matching credential found with username '{username}'\"\n if hostname is not None:\n err += f\" with hostname '{hostname}'\"\n raise RuntimeError(err)\n elif len(matches) > 1:\n raise RuntimeError(_construct_ambiguous_deletion_message(username, hostname, matches))\n\n # At this point we should have exactly one match, which we can delete\n del creds[match_indices[0]]\n write_auth_data(configure.get_config_path(\"auth\"), creds)\n prune_outdated_auth()", "def delete_credential(self):\n\n Credential.credential_list.remove(self)", "def delete_credential(self, context, id):\n return remove_credential(id)", "def delete_credential(self):\n Credential.credential_list.remove(self)", "def remove_credential(self, authenticator_id, credential_id):\n pass", "def remove_credentials(service: str) -> None:\n\n # SQL query to remove the user servise credentials from the database\n query = f\"DELETE FROM {service}_credentials WHERE user_id=?;\"\n\n # Execute the query\n with connect(DATABASE) as db:\n db.execute(query, (session[\"user_id\"],))\n db.commit()", "def delete_user_account(connection,user):\r\n with connection:\r\n connection.execute(DELETE_SPECIFIC_USER,(user,))", "def delete_credentials(self):\n Credentials.credential_list.remove(self)", "def unset_credentials(ctx, user, store):\n try:\n logger.debug(\"store={store}, user={user}\".format(store=store, user=user))\n _pycred.unset_credentials(store, user)\n except Exception as e:\n logger.debug(e, exc_info=True)\n print('Error: {msg}'.format(msg=str(e)), file=sys.stderr)\n sys.exit(1)", "def delete_user(self, user):\n self.delete(user)", "def delete(bot, update):\n chatID = update.message.chat_id\n username = get_user_info(chatID)['PID']\n logger.info(\"Deleting user credentials for {}!\".format(username))\n Chat.query.filter(Chat.chatID == chatID).delete() # Delete the user's record referenced by their ChatID\n Misc.query.filter(Misc.chatID == chatID).delete()\n db_session.commit()\n messageContent = \"Your credentials have been deleted, {}\\nHope to see you back soon!\".format(username[3:-4].title())\n bot.sendMessage(chat_id=update.message.chat_id, text=messageContent)\n \n mp.track(username, 'User Left')\n mp.people_set(username, {'active': False })", "async def del_user(conn: LDAPConnection, user: dict, mailman: Client) -> None:\n await conn.delete(user[\"dn\"])\n uid = user[\"attributes\"][\"uid\"][0]\n rmtree(user[\"attributes\"][\"homeDirectory\"][0])\n rmtree(f\"/webtree/{uid[:1]}/{uid}\")\n mailing_list = mailman.get_list(\"announce-redbrick\")\n mailing_list.unsubscribe(f\"{uid}@redbrick.dcu.ie\")", "def delete_user(self, user):\n # noinspection PyUnresolvedReferences\n self.delete(user)", "def delete_user(self, user):\n try:\n with dbm.open(self.dbm_path, 'c', 0o600) as db:\n del db[user.name]\n except KeyError as k:\n pass", "def delete_credentials(self):\n Credentials.credentials_list.remove(self)", "def delete_credentials(self):\n Credentials.credentials_list.remove(self)", "def delete_credentials(self):\n Credentials.credentials_list.remove(self)", "def deleteCredential(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def userdel(pwfile, user):\n return __salt__[\"webutil.userdel\"](pwfile, user)", "def _delete_credential(self, key):\n try:\n del self._data[key]\n except KeyError:\n pass\n self._write()", "def revoke(self):\n # Removes credentialing from the user\n with transaction.atomic():\n self.revoked_datetime = timezone.now()\n\n self.migrated_user.is_credentialed = False\n self.migrated_user.credential_datetime = None\n\n self.migrated_user.save()\n self.save()\n\n logger.info('Credentialing for user {0} has been removed.'.format(\n self.migrated_user.email))", "def delete_user(UserName=None, AuthenticationType=None):\n pass", "def delete_user(self, user):\n name = utils.get_name(user)\n self._user_manager.delete(name)", "def delete(self, user):\n q = \"DELETE FROM profiles WHERE user=?\"\n try:\n self._query(q, (user,), fetch='none')\n except Exception as e:\n raise e", "def delete_user(BrokerId=None, Username=None):\n pass", "def delete_user(self, user_id):\n if self.database is None:\n raise Exception(\"No database.\")\n if user_id is None or len(user_id) == 0:\n raise Exception(\"Bad parameter.\")\n return self.database.delete_user(user_id)", "def delete_user_credentials(connection, api_url):\n\n body = {\n 'endpoint': api_url,\n 'user': '',\n 'password': '',\n 'token': '',\n 'type': 'none'\n }\n\n connection.post_obj_as_json('user/credentials', body)", "def delete(ctx, query, force, password, remember):\n\n _init_session(ctx, password, remember)\n session = ctx.obj[\"session\"]\n creds = session.list_credentials()\n hits = _search(creds, query, True)\n if len(hits) == 0:\n click.echo(\"No matches, nothing to be done.\")\n elif len(hits) == 1:\n cred = hits[0]\n if force or (\n click.confirm(\n f\"Delete account: {_string_id(cred)} ?\",\n default=False,\n err=True,\n )\n ):\n session.delete_credential(cred.id)\n click.echo(f\"Deleted {_string_id(cred)}.\")\n else:\n click.echo(\"Deletion aborted by user.\")\n\n else:\n _error_multiple_hits(ctx, hits)", "def delete_user(self, user):\n self.execute(TABELLE['id_users'][\"delete\"], user[\"id\"])", "def deleteUser(self, uID):\n\n cursor = self.conn.cursor()\n query = \"DELETE FROM Users CASCADE \" \\\n \"WHERE uID= %s RETURNING cID; \"\n cursor.execute(query, (uID,))\n cID = cursor.fetchone()[0]\n\n query = \"DELETE FROM Credential \" \\\n \"WHERE cID= %s; \"\n cursor.execute(query, (cID,))\n\n self.conn.commit()\n return", "def delete(ctx, query, force):\n\n ensure_validated(ctx)\n controller = ctx.obj['controller']\n creds = controller.list()\n hits = _search(creds, query)\n if len(hits) == 0:\n click.echo('No matches, nothing to be done.')\n elif len(hits) == 1:\n cred = hits[0]\n if force or (click.confirm(\n u'Delete credential: {} ?'.format(cred.printable_key),\n default=False, err=True\n )):\n controller.delete(cred)\n click.echo(u'Deleted {}.'.format(cred.printable_key))\n else:\n click.echo('Deletion aborted by user.')\n\n else:\n _error_multiple_hits(ctx, hits)", "def delete(self, request, user_id=None):\n data = json.loads(request.body.decode())\n authenticated = Account.check_credentials(request, data['email'], data['password'])\n user = {}\n user['account_id'] = authenticated.id\n\n if authenticated.check_admin(request, user):\n NLTKOutput.remove(request=request, pk=user_id)\n Account.remove(request=request, pk=user_id)\n return Response(json='Account and content deleted', status=204)\n\n return Response(json='Not Authorized', status=401)", "def delete_user(self):\n raise NotImplementedError(\"Function not yet implemented contact package creator\")", "def delete_user(self, user_name):\n user = self.get_user(user_name)\n return self.client.delete_resource(user.get('href'))", "def deleteAgentCredential(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def del_user(self, username):\n pass", "def delete_user(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload=user, request_type=self.REQUEST_DELETE, version=\"v1\")", "def delete_user(self, user_name):\r\n params = {'UserName' : user_name}\r\n return self.get_response('DeleteUser', params)", "def del_user(user_id):\n log = current_app.log\n db = request.db\n Site = db.tables.Site\n Cred = db.tables.Cred\n auth_user_id = SiteService.get_current_uid()\n # Check the user is deleting their own items\n if auth_user_id != user_id:\n log.warn(\"User %u tried to delete sites belonging to user %u.\",\n auth_user_id, user_id)\n abort(404)\n sites = Site.query.filter_by(site_owner=auth_user_id).all()\n num_sites = len(sites)\n creds = Cred.query.filter_by(cred_owner=auth_user_id).all()\n num_creds = len(creds)\n with managed_session(request,\n message=\"Database error while deleting sites\",\n http_error_code=500) as session:\n for cred in creds:\n session.delete(cred)\n for site in sites:\n session.delete(site)\n log.info(\"Deleted all sites for user %u (%u sites, %u creds deleted).\",\n auth_user_id, num_sites, num_creds)\n return \"\"", "def revoke(self):\n # Set the application as unsucessful with the current datetime\n self.status = self.Status.REVOKED\n self.revoked_datetime = timezone.now()\n\n # Removes credentialing from the user\n self.user.is_credentialed = False\n self.user.credential_datetime = None\n\n with transaction.atomic():\n self.user.save()\n self.save()\n\n logger.info('Credentialing for user {0} has been removed.'.format(\n self.user.email))", "def delete_account(self):\n Credential.account_list.remove(self)", "def record_destroy_for_user(project_id, user_id):\n session = get_session()\n with session.begin():\n session.query(models.UserAccountRecord).\\\n filter_by(project_id=project_id).\\\n filter_by(user_id=user_id).\\\n update({'deleted': True,\n 'deleted_at': datetime.datetime.utcnow(),\n 'updated_at': datetime.datetime.utcnow()})", "def delete_user(self, user_id):\n return self._delete('/users/{0}'.format(user_id))", "def remove_credentials(self, conjurrc: ConjurrcData):\n self.credentials_provider.remove_credentials(conjurrc)", "def delete_user_by_email(tx: Transaction, user_email: str) -> BoltStatementResult:\n query = f\"\"\"\n MATCH(n: Person {{email: '{user_email}'}})\n OPTIONAL MATCH(n)--(p: Post)\n DETACH DELETE n, p\"\"\"\n return tx.run(query)", "def delete(self, user_id):\n return delete_user(user_id)", "def delete(self, user: 'UserCondensed'):\n self._delete(entity=user)", "def deleteUser(self, password, feedback=\"\"):\n\t\turl = \"https://habitica.com/api/v3/user\"\n\t\tpayload = {'password': password, 'feedback': feedback}\n\t\treturn(deleteUrl(url, self.credentials, payload))", "def delete_user_entitlement(self, user_id):\n route_values = {}\n if user_id is not None:\n route_values['userId'] = self._serialize.url('user_id', user_id, 'str')\n self._send(http_method='DELETE',\n location_id='8480c6eb-ce60-47e9-88df-eca3c801638b',\n version='6.0-preview.3',\n route_values=route_values)", "def remove(self, user):\r\n url = '{0}/{1}'.format(self.get_url(), user)\r\n\r\n return http.Request('DELETE', url), parsers.parse_empty", "def delete_proj_user(self, user_id):\n conn = pyone.OneServer(\n self.auth_url,\n session=\"{0}:{1}\".format(self.username, self.password)\n )\n try:\n user = conn.user.info(user_id)\n group = user.get_GROUPS().ID[0]\n # delete group\n conn.group.delete(group)\n # delete user\n return conn.user.delete(user.get_ID())\n except pyone.OneNoExistsException as e:\n logger.exception(\"Failed. User trying to delete, doesn't exist: \", user_id)\n except Exception as e:\n logger.exception(\"Failed. User trying to delete, group doesn't exist: \", user_id)", "def remove_client_credentials(self):\n if self._dry_run:\n return\n os.unlink(self._store_pathname)", "def delete_user(self, user):\n\n if self.sql_read_only:\n return False\n\n if not self.check_prereqs():\n return False\n\n if not self.has_user(user):\n return False\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_delete_user_query,{'username_field':self.sql_username_field,'username':user})\n self.log.debug(\"sqlflexibleauthstore: delete_user: %s\" % (query,))\n cursor.execute(query)\n\n db.commit()\n del_user_attribute(self.env,username=user)\n return True", "def delete_user_with_email_confirmation(self, userid, deleteuserlink, template=''):\n payload = {'appkey': self._lr_object._get_api_key(), 'appsecret': self._lr_object._get_api_secret(),\n 'userid':userid, 'deleteuserlink':deleteuserlink,'template':template}\n url = SECURE_API_URL + \"raas/v1/user/deleteuseremail\"\n return self._lr_object._get_json(url, payload)", "def delete_user():", "def delete_user(cls, user_email):\n\n User.query.filter_by(email=user_email).delete()\n\n db.session.commit()\n\n print \"Successfully deleted user with the email: %s!\" % user_email", "def delete(user_id):\n assert isinstance(user_id, ObjectId)\n\n User.objects(id=user_id).delete()", "def DeleteUser(self, row):\n try:\n self.gd_client.DeleteUser(row['user_name'])\n row['status'] = 'success'\n except gdata.apps.service.AppsForYourDomainException, e:\n row['status'] = (\n 'fail gdata error code: %s %s' %\n (e.error_code, ERROR_DICT[str(e.error_code)]))\n except KeyError:\n print 'error - user_name is a required header'\n sys.exit()", "def delete_password_in_keyring(username):\n return keyring.delete_password(KEYRING_SYSTEM, username,)", "def delete_o_auth_client_credential(self, user_id, oauth2_client_credential_id, **kwargs):\n resource_path = \"/users/{userId}/oauth2ClientCredentials/{oauth2ClientCredentialId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_o_auth_client_credential got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"oauth2ClientCredentialId\": oauth2_client_credential_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def deleteCredential(self, credentialName):\n try:\n utility.execLog(\"Deleting Credential: %s\" % credentialName)\n self.browserObject, status, result = self.selectCredential(credentialName)\n if not status:\n return self.browserObject, False, result\n # Checking for Default Credentials - 'Delete' will be Disabled\n disabled = self.handleEvent(EC.presence_of_element_located((By.ID, self.CredentialsObjects('deleteCredentials'))), action=\"GET_ATTRIBUTE_VALUE\", attributeName=\"disabled\")\n if \"true\" in disabled:\n return self.browserObject, False, \"Unable to Delete Default Credential: %s\" % credentialName\n # Clicking on Delete\n self.handleEvent(EC.element_to_be_clickable((By.ID, self.CredentialsObjects('deleteCredentials'))), action=\"CLICK\")\n utility.execLog(\"Checking for Confirm Box...\")\n try:\n currentTitle = self.handleEvent(EC.element_to_be_clickable((By.XPATH, self.CommonObjects('GetFormTitle'))), action=\"GET_TEXT\")\n except:\n return self.browserObject, False, \"Unable to Load Confirm Box To Delete Credential\"\n if \"Confirm\" in currentTitle:\n utility.execLog(\"Confirm Box Loaded...Confirming to Delete Credential: '%s'\" % credentialName)\n self.handleEvent(EC.element_to_be_clickable((By.ID, self.CommonObjects('ConfirmYes'))), action=\"CLICK\")\n else:\n utility.execLog(\"Failed to Verify Confirm Delete Box :: Actual --> '%s' :: Expected --> '%s'\" % (currentTitle, \"Confirm\"))\n return self.browserObject, False, \"Failed to Verify Confirm Delete Box :: Actual --> '%s' :: Expected --> '%s'\" % (currentTitle, \"Confirm\")\n # Checking for Error Deleting a Credential\n try:\n errorRedBox = self.handleEvent(EC.visibility_of_element_located((By.XPATH, self.CommonObjects('RedBoxError'))), wait_time=10)\n if errorRedBox:\n errorMessage = self.handleEvent(EC.element_to_be_clickable((By.XPATH, self.CommonObjects('RedBoxErrorMessages'))),action=\"GET_TEXT\")\n return self.browserObject, False, \"Failed to Delete Credential :: '%s' :: Error -> %s\" % (credentialName, errorMessage)\n except:\n # Refresh Table\n self.handleEvent(EC.element_to_be_clickable((By.ID, self.CredentialsObjects('credentialsRefresh'))), action=\"CLICK\")\n time.sleep(3)\n # VALIDATION: Selecting deleted Credential\n self.browserObject, status, result = self.selectCredential(credentialName)\n if status:\n return self.browserObject, False, \"Failed to Delete Credential :: '%s' :: Error -> %s\" % (credentialName, \"Validation Error\")\n else:\n return self.browserObject, True, \"Successfully Deleted Credential: '%s'\" % credentialName\n except Exception as e:\n return self.browserObject, False, \"Exception while Deleting Credential :: '%s' :: Error -> %s\" % (credentialName, str(e) + format_exc())", "def delete(self, user_id):\r\n return delete_user(request, user_id)", "def delete_user(self, user_id):\n\n # ask the model to delete the user\n um = User(self.settings)\n status = um.delete(user_id)\n\n # return\n return status", "def delete(cls):\n user = user_schema.load(request.get_json(), partial=(\"email\",))\n\n current_identity = get_jwt_identity()\n db_user = UserModel.find_by_id(current_identity)\n logging.info(\n f\"Delete called by {db_user.id}: {db_user.username} with data: {user['username']}\"\n )\n if db_user.username == user['username']:\n if is_correct_password(db_user.pw_salt, db_user.pw_hash, user['password']):\n db_user.delete_from_db()\n return {\"message\": msgs.DELETED.format(db_user.username)}, 200\n else:\n return {\"error\": msgs.INVALID_PASSWORD}, 401\n return {\"error\": msgs.OWN_RECORD_ONLY}, 401", "def delete_by_email(\n user_to_delete: schemas.UserDelete,\n db_session: Session = Depends(get_db),\n current_user: models.User = Depends(get_current_admin_user)\n):\n db_user = crud.get_by_email(db_session, user_to_delete.email)\n\n if not db_user:\n raise HTTPException(\n status_code=status.HTTP_404_NOT_FOUND,\n detail=f'User with email \"{user_to_delete.email}\" not found.'\n )\n\n crud.remove(db_session, db_user)", "def delete_account_with_email_confirmation(self, accountId, deleteuserlink, template=''):\n payload = {'appkey': self._lr_object._get_api_key(), 'appsecret': self._lr_object._get_api_secret(),\n 'accountId':accountId, 'deleteuserlink':deleteuserlink,'template':template}\n url = SECURE_API_URL + \"raas/v1/account/deleteuseremail\"\n return self._lr_object._get_json(url, payload)", "def delete_user():\n del globalopts.appdata[request.user]\n del globalopts.users[request.user]\n return \"\", 200", "async def red_delete_data_for_user(self, *, requester, user_id):\n\t\tawait self.config.user_from_id(user_id).clear()", "def del_user(self, server, username, quiet=False):\n self._op_user(\"del\", server, {\"username\": username}, quiet)", "def delete_user(self) -> None:\n table_dictionary = {\n 'Apple': {\n 'table': 'AppleReceipts',\n 'user_id': 'User_id'\n },\n 'ESL': {\n 'table': 'ESLReceipts',\n 'user_id': 'User_id'\n },\n 'Transactions': {\n 'table': 'Transactions',\n 'user_id': 'User_id'\n },\n 'Users': {\n 'table': 'Users',\n 'user_id': 'id'\n },\n }\n\n # delete the current user's information from the db.\n for key in table_dictionary:\n query = f\"\"\"\n DELETE\n FROM {table_dictionary[key]['table']}\n WHERE {table_dictionary[key]['user_id']}=?;\n \"\"\"\n self.db.commit(query, values=(self.id,))\n\n # perform a sign out\n self.sign_out()\n\n log(f\"User:{self.id} has deleted their account.\")", "def delete_user(self, _id):\n return self.make_request(\"DELETE\", \"users/\"+_id, {})", "def request_account_deletion(user):\n account = accounts.getCurrentAccount(normalize=False)\n sender = system.getApplicationNoReplyEmail()\n\n subject = ADMIN_REQUEST_EMAIL_SUBJEST % {\n 'url_id': user.url_id\n }\n body = ADMIN_REQUEST_EMAIL_BODY % {\n 'name': user.name,\n 'email': account.email(),\n 'url_id': user.url_id,\n }\n\n mail.send_mail_to_admins(sender, subject, body)", "def delete_user(self, user_id):\n sql = 'update account_user set is_deleted = 1 where id = %s'\n with connection.cursor() as cursor:\n cursor.execute(sql, [user_id])\n row = cursor.fetchone()\n\n return row", "def do_deluser(self, line):\n\t\tif isinstance(self.cl, Book):\n\t\t\ttry:\n\t\t\t\tself.cl.del_contact(line)\n\t\t\texcept ValueError:\n\t\t\t\tprint(\"Wrong syntax! Type 'help delete'\")\n\t\telse:\n\t\t\tprint(\"To delete contacts you need to open or create a book.\")", "def delete_user(self,userid, cursor):\n sql=\"DELETE FROM users WHERE userid = %s\"\n cursor.execute(sql,(userid))", "def delete(self):\n\n user_id = get_jwt_identity()\n user = user_crud.get(user_id)\n if not user:\n abort(404, message=\"User not Found\")\n all_tokens = auth_crud.get_user_tokens(user_id)\n tokens = [token.to_dict() for token in all_tokens]\n for token in tokens:\n auth_crud.revoke_token(token['id'], user_id)\n user = user_crud.remove(user_id)\n\n return {'msg': 'User Removed'}", "def delete_user(cls, user_id=None, email=None):\n params = {\n 'email': email,\n 'user_id': user_id\n }\n user_dict = cls._do_call(\n 'DELETE', cls.api_endpoint + 'users', params)\n return user_dict", "def delete_user(user_id):\n\n user = User.query.get(user_id)\n db.session.delete(user)\n db.session.commit()\n return", "def del_user(item, username, passw):\n user = User.load_user_by_username(item, username)\n if not user:\n print(\"User does not exist!\")\n elif check_password(passw, user.hashed_password):\n user.delete(item)\n print(\"User deleted.\")\n else:\n print(\"Incorrect password!\")", "def delete_user_by_id(user_id):\n return woo_request_helper().delete_details(wc_endpoint='customers/{}'.format(user_id))", "def delete_user():\n #TODO user delete\n pass", "def locked_delete(self):\n self._multistore._delete_credential(self._key)", "def delete_user(username, user_id):\r\n global sql_cursor\r\n global database\r\n\r\n print(\"Are you absolutely sure that you want to delete your account.\")\r\n conf_del = input(\"(y/n) : \").lower()\r\n\r\n if conf_del == \"y\":\r\n\r\n print(\"Deleting...\")\r\n\r\n sql_cursor.execute(f\"DELETE FROM passwords WHERE user_id={user_id};\")\r\n sql_cursor.execute(f'DELETE FROM users WHERE username=\"{username}\";')\r\n database.commit()\r\n\r\n print(\"Account successfully deleted\")\r\n print(\"You need to start the program again\")\r\n print(\"Exiting now\")\r\n sleep(5)\r\n quit()\r\n\r\n else:\r\n print(\"Cancelling deletion ...\")\r\n return", "def test_delete_user(self) :\n self.new_credential.save_credential()\n test_credential = Credential(\"peter\", \"Peter\", \"Peter003\") # new user\n test_credential.save_credential()\n self.assertEqual(len(Credential.credential_list),2)", "def create_smtp_credential(self, create_smtp_credential_details, user_id, **kwargs):\n resource_path = \"/users/{userId}/smtpCredentials\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_smtp_credential got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=create_smtp_credential_details,\n response_type=\"SmtpCredential\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=create_smtp_credential_details,\n response_type=\"SmtpCredential\")", "def deleteUser(self,name):\n raise BorkedDeleteUser", "def user_audit_delete(sender, user, request, **kwargs):\n\n try:\n UserAudit.objects.get(audit_key=request.session[constants.USERWARE_AUDIT_KEY]).delete()\n except:\n pass\n logger.info(_('User {} logged out'.format(request.user.username)))", "def delete_account(username):\n set_user_group(username, \"basic\")\n password = generate_password_hash(\"deleted\")\n set_password(username, password)\n sql = \"UPDATE users \" \\\n \"SET is_active=FALSE, username='[deleted]' \" \\\n \"WHERE username=:username\"\n db.session.execute(sql, {\"username\": username})\n db.session.commit()", "def delete_user_async(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload=user, request_type=self.REQUEST_DELETE, version=\"v2\")", "def delete_account(self, account):\n \n pass", "def sipserver_user_remove(self, user: str) -> None:\n self.remove_endpoint_from_sipserver(endpoint=user)", "def list_smtp_credentials(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/smtpCredentials\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_smtp_credentials got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[SmtpCredentialSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[SmtpCredentialSummary]\")", "async def delete_user(user_id):\n \n user = User.select().where(User.id == user_id).first()\n\n if not user:\n return HTTPException(404, 'User not found')\n else:\n user.delete_instance()\n\n return f\"User {user.username} deleted successfully\"", "def delete_keystone_v3_user(self, user_id):\n LOG_OBJ.debug(\"Disable the user.\")\n kwargs = {\"user_id\": user_id, \"enabled\": False}\n self.set_keystone_v3_user(**kwargs)\n\n LOG_OBJ.debug(\"Deleting the user.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/users/\" + str(user_id)\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n response = self.request(\"DELETE\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while deleting the user\")\n print (\"No response from Server while deleting the user\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\" Deleting user Failed with status %s \"\n \"and error : %s\" % (response.status, response.data))\n print (\" Deleting user Failed with status %s and error : %s\" %\n (response.status, response.data))\n return response.status\n\n return True", "def delete_user(self):\n User.user_list.remove(self)", "def delete_user(self):\n User.user_list.remove(self)", "def delete_user(self):\n User.user_list.remove(self)" ]
[ "0.7331302", "0.72613764", "0.670908", "0.661661", "0.65503716", "0.6531281", "0.65208536", "0.6449064", "0.6350622", "0.63303417", "0.63224506", "0.6295059", "0.62919253", "0.6242662", "0.6233042", "0.61947227", "0.6164052", "0.6137985", "0.6137985", "0.6137985", "0.6135059", "0.6127839", "0.6065958", "0.6057861", "0.6005234", "0.59498906", "0.58769244", "0.5837494", "0.58205163", "0.5817403", "0.58159953", "0.5815599", "0.58130133", "0.5795656", "0.57875645", "0.57785016", "0.577822", "0.57523084", "0.5737439", "0.57332283", "0.57028633", "0.56970084", "0.56732804", "0.5672705", "0.56538963", "0.5650145", "0.56466144", "0.5638621", "0.5632244", "0.56298006", "0.5607723", "0.5598308", "0.5593576", "0.5562678", "0.5560583", "0.55527157", "0.5542146", "0.5535685", "0.5531252", "0.5498404", "0.5484572", "0.5481297", "0.5477506", "0.5474591", "0.5466852", "0.54644066", "0.5459298", "0.54557127", "0.5448773", "0.5448178", "0.5440436", "0.5434966", "0.5424033", "0.54104817", "0.5391413", "0.5370477", "0.5359529", "0.534871", "0.53385997", "0.53276235", "0.53091997", "0.5305011", "0.53008235", "0.52909744", "0.5289028", "0.52796763", "0.525907", "0.52550894", "0.52516747", "0.5247501", "0.5247257", "0.5240344", "0.5236228", "0.52333987", "0.52277696", "0.5225831", "0.5222391", "0.52165633", "0.52165633", "0.52165633" ]
0.72640246
1
Deletes the specified tag definition. This operation triggers a process that removes the tag from all resources in your tenancy.
def delete_tag(self, tag_namespace_id, tag_name, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}/tags/{tagName}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_tag got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id, "tagName": tag_name } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete(self):\n request = self.tags_service.delete(path=self._path)\n request.execute()", "def delete_tag(tag):\n tag.destroy()", "def delete(self, tag, params={}, **options):\n path = \"/tags/%s\" % (tag)\n return self.client.delete(path, params, **options)", "def delete_tag(self, tag):\n return self.__datacatalog.delete_tag(name=tag.name)", "def delete_tag(self, session, tag):\n self._tag(session.delete, key=tag, delete=True, session=session)", "def delete_tag(self,tag):\r\n\r\n # with shelf\r\n if self.using_shelf:\r\n del self.tag_dict[tag]", "def delete_tag(self, *tags: TagReference) -> None:\n return TagReference.delete(self, *tags)", "def delete_tag(tag, directory=None):\n execute_command('git tag -d {0}'.format(tag), shell=True, cwd=directory)", "async def slashtag_remove(self, ctx: commands.Context, *, tag: GuildTagConverter):\n await ctx.send(await tag.delete())", "def delete_tags(self, session):\n self._tag(session.delete, delete=True, session=session)", "def _delete_tag_request():\n key = helpers.get('Tag.1.Key')\n resource_id = helpers.get('ResourceId.1')\n\n if resource_id in current_app.config['RESOURCE_TYPE_MAP']:\n resource_type = current_app.config['RESOURCE_TYPE_MAP'][resource_id]\n else:\n errors.invalid_request(\n str(resource_id) + \" not found in configuration\")\n\n args = {\n 'command': 'deleteTags',\n 'resourceids': resource_id,\n 'resourcetype': resource_type,\n 'tags[0].key': key\n }\n\n response = requester.make_request_async(args)\n\n return response", "def delete_tag(filename, tag_name):\n storeapps = APP.config[\"storage\"]\n filename = filename.encode(\"utf-8\")\n\n try:\n application = list(nativeapps.io.ls(storeapps, r\".*\" + filename + \"$\"))[0]\n meta_path = os.path.join(os.path.dirname(application), \"metadata.json\")\n metadata = json.loads(nativeapps.io.readfile(meta_path))\n tags = metadata.get(\"tags\", [])\n if tag_name in tags:\n tags.remove(tag_name)\n metadata[\"tags\"] = tags\n nativeapps.io.writefile(meta_path, json.dumps(metadata))\n except IndexError:\n return \"Unknown application: %s\" % (application), 404\n\n return \"removed\", 200", "def DeleteForTag(cls, tag):\n parent_key = cls._GetParentKeyFromTag(tag)\n frontend_job = cls.query(ancestor=parent_key).get(keys_only=True)\n if frontend_job:\n frontend_job.delete()", "def delete_tag(request):\n try:\n tags = request.POST.getlist('tag_id', 0)\n tag = Tag.objects.filter(pk__in=tags).delete()\n ActionLogger().log(request.user, \"deleted\", \"Knowledgebase Tag %s\" % tags)\n return format_ajax_response(True, \"Knoweldgebase tag deleted successfully.\")\n except Exception as ex:\n logger.error(\"Failed to delete_tag: %s\" % ex)\n return format_ajax_response(False, \"There was an error deleting the specified knowledgebase tag.\")", "def delete(self, uuid):\n\n\t\treturn self._delete(\"/tag/%s\" % base.getid(uuid), \"tag\")", "def remove_tag(self, tag):\n _tag_entity('task', self.task_id, tag, untag=True)", "def delete_tag_template(self, name):\n self.__datacatalog.delete_tag_template(name=name, force=True)\n logging.info('Tag Template deleted: %s', name)", "def delete_tags(configurationIds=None, tags=None):\n pass", "def delete_tag(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(\"/tags\")", "def delete_tag(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n\n db.session.delete(tag)\n db.session.commit()\n\n return redirect('/tags')", "def delete_tag(tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(\"/tags\")", "def delete_a_tag(self, contact_tag_id):\n return self.client._delete(f\"/contactTags/{str(contact_tag_id)}\")", "def delete_tag(tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(f\"/tags\")", "def delete_taggit_tags(apps, schema_editor):\n TaggitTag = apps.get_model('taggit', 'Tag')\n TaggitTag.objects.all().delete()", "def remove_tag(self, tag: str) -> None:\n tags = self.get_tag_index()\n tags.remove(tag)\n self.write_tag_index(list(set(tags)))", "def delete(self, model_definition_uid):\n ##For CP4D, check if either spce or project ID is set\n self._client._check_if_either_is_set()\n model_definition_uid = str_type_conv(model_definition_uid)\n ModelDefinition._validate_type(model_definition_uid, u'model_definition_uid', STR_TYPE, True)\n paramvalue = self._client._params()\n\n model_definition_endpoint = self._href_definitions.get_model_definition_assets_href() + \"/\" + model_definition_uid\n if not self._ICP:\n response_delete = requests.delete(model_definition_endpoint, params=paramvalue, headers=self._client._get_headers())\n else:\n response_delete = requests.delete(model_definition_endpoint, params=paramvalue, headers=self._client._get_headers(), verify=False)\n\n return self._handle_response(204, u'Model definition deletion', response_delete, False)", "def _delete(self, model_definition_uid):\n ##For CP4D, check if either spce or project ID is set\n model_definition_uid = str_type_conv(model_definition_uid)\n ModelDefinition._validate_type(model_definition_uid, u'model_definition_uid', STR_TYPE, True)\n paramvalue = self._client._params()\n model_definition_endpoint = self._href_definitions.get_model_definition_assets_href() + \"/\" + model_definition_uid\n if not self._ICP:\n response_delete = requests.delete(model_definition_endpoint, params=paramvalue, headers=self._client._get_headers())\n else:\n response_delete = requests.delete(model_definition_endpoint, params=paramvalue, headers=self._client._get_headers(), verify=False)", "def bulk_delete(self, **kwargs: Any) -> Response:\n tags = kwargs[\"rison\"]\n try:\n DeleteTagsCommand(tags).run()\n return self.response(200, message=f\"Deleted {len(tags)} tags\")\n except TagNotFoundError:\n return self.response_404()\n except TagInvalidError as ex:\n return self.response(422, message=f\"Invalid tag parameters: {tags}. {ex}\")\n except TagDeleteFailedError as ex:\n return self.response_422(message=str(ex))", "def untag():\n version = git.prompt_tag('Which tag to delete?')\n if not version:\n abort('No available version tag')\n git.delete_tag(version)", "def delete(self, **kwargs):\n url_str = self.base_url + \"/%s\" % kwargs['definition_id']\n newheaders = self.get_headers()\n resp, body = self.client.json_request('DELETE', url_str,\n headers=newheaders)\n return resp", "def remove_tag(self, tag):\n for task in self._tasks:\n task.remove_tag(tag)\n\n return self", "def destroy(self, request, *args, **kwargs):\n self._process_settings(request)\n id_ = args[0]\n tag = kwargs.get('tag', 'v1')\n # TODO: check that tag and user allows getting content\n es_response_raw = req_session.delete(\n '{}/{}/_{document_type}/{id}'.format(\n settings.ELASTIC_SEARCH_HOST,\n '{site}__{app}'.format(site=settings.SITE, app=self.app),\n document_type=self.document_type,\n id=id_))\n if es_response_raw.status_code != 200:\n exceptions.XimpiaAPIException(_(u'Could not delete document'))\n es_response = es_response_raw.json()\n logger.info(u'DocumentViewSet.destroy :: deleted document \"{document_type}\" id_:{id}'.format(\n id=es_response.get('_id', ''),\n document_type=self.document_type\n ))\n return Response(es_response)", "async def removetags(self, ctx, tag=None):\r\n\t\tTag = self.settings.ServerConfig(ctx.guild.id, 'Tags')\r\n\t\tif not tag in Tag:\r\n\t\t\treturn await ctx.send('Can\\'t find Tag: '.format(tag))\t\r\n\r\n\t\tdel Tag[tag]\r\n\t\tself.settings.ServerConfig(ctx.guild.id, 'Tags', Tag)\r\n\r\n\t\tawait ctx.send('Removed Tag: '.format(tag))", "def remove_definition(self, definition):\n assert definition.library == self, \"definition is not included in library\"\n self._remove_definition(definition)\n self._definitions.remove(definition)", "def delete_tag_by_id(self,\r\n access_token,\r\n tag_id):\r\n\r\n # Prepare query URL\r\n _url_path = '/tags/{tag_id}'\r\n _url_path = APIHelper.append_url_with_template_parameters(_url_path, { \r\n 'tag_id': tag_id\r\n })\r\n _query_builder = Configuration.base_uri\r\n _query_builder += _url_path\r\n _query_parameters = {\r\n 'access_token': access_token\r\n }\r\n _query_builder = APIHelper.append_url_with_query_parameters(_query_builder,\r\n _query_parameters, Configuration.array_serialization)\r\n _query_url = APIHelper.clean_url(_query_builder)\r\n\r\n # Prepare and execute request\r\n _request = self.http_client.delete(_query_url)\r\n CustomQueryAuth.apply(_request)\r\n _context = self.execute_request(_request)\r\n\r\n # Endpoint and global error handling using HTTP status codes.\r\n if _context.response.status_code == 0:\r\n raise APIException('Unexpected error.', _context)\r\n self.validate_response(_context)", "def tag_remove(self, remote_path, corpus_id, tag, storage_id=None):\n client, remote_path = self._get_storage(remote_path, storage_id=storage_id)\n return client.tag_remove(corpus_id, tag)", "def delete_tag(user_id, tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(f'/users/{user_id}')", "def tags_delete(tag_id):\n\n tags = Tag.query.get_or_404(tag_id)\n\n db.session.delete(tags)\n db.session.commit()\n\n flash(f\"'{tags.name}' tag is deleted.\")\n\n return redirect(\"/tags\")", "def delete_deployment(request, deployment, **_kwargs):\n pass", "def do_DELETE(self): # pylint: disable=missing-docstring\n self._set_headers(204)\n (resource, id) = self.parse_url(self.path)\n\n if resource == \"comments\":\n delete_comment(id)\n elif resource == \"posts\":\n delete_post(id)\n elif resource == \"categories\":\n delete_category(id)\n elif resource == \"posttags\":\n remove_tag(id)\n \n self.wfile.write(\"\".encode())", "async def delete_concurrency_limit_by_tag(\n self,\n tag: str,\n ):\n try:\n await self._client.delete(\n f\"/concurrency_limits/tag/{tag}\",\n )\n except httpx.HTTPStatusError as e:\n if e.response.status_code == status.HTTP_404_NOT_FOUND:\n raise prefect.exceptions.ObjectNotFound(http_exc=e) from e\n else:\n raise", "def delete_tags(ResourceArn=None, TagKeys=None):\n pass", "def delete_tags(ResourceArn=None, TagKeys=None):\n pass", "def remove_tag(self, dataset: \"Dataset\", tag: \"DatasetTag\"):\n raise NotImplementedError", "def remove_tag(tag_id):\n tag = Tags.query.get(tag_id)\n db_session.delete(tag)\n db_session.commit()\n return 'Tag #%s (%s) has been deleted.' % (tag_id, tag.tag), 'success'", "def test_remove_single_tag(self):\n p = self.load_policy({\n 'name': 'test-azure-remove-single-tag',\n 'resource': 'azure.vm',\n 'filters': [\n {'type': 'value',\n 'key': 'name',\n 'op': 'eq',\n 'value_type': 'normalize',\n 'value': 'cctestvm'}\n ],\n 'actions': [\n {'type': 'tag',\n 'tag': 'tag1',\n 'value': 'to-delete'}\n ],\n })\n p.run()\n\n # verify the initial tag set\n s = Session()\n client = s.client('azure.mgmt.compute.ComputeManagementClient')\n vm = client.virtual_machines.get('test_vm', 'cctestvm')\n self.assertEqual(vm.tags, {'tag1': 'to-delete', 'testtag': 'testvalue'})\n\n p = self.load_policy({\n 'name': 'test-azure-tag',\n 'resource': 'azure.vm',\n 'filters': [\n {'type': 'value',\n 'key': 'name',\n 'op': 'eq',\n 'value_type': 'normalize',\n 'value': 'cctestvm'}\n ],\n 'actions': [\n {'type': 'untag',\n 'tags': ['tag1']}\n ],\n })\n p.run()\n\n # verify that the a tag is deleted without modifying existing tags\n vm = client.virtual_machines.get('test_vm', 'cctestvm')\n self.assertEqual(vm.tags, {'testtag': 'testvalue'})", "def delete(self, keyword, definitions):\r\n print(keyword,definitions)\r\n if self.query(term1='kd',term2=keyword,action='in'):\r\n self.query('kd',term2=keyword,term3=definitions,action='delete')", "async def delete(self, ctx: \"IceTeaContext\", *, otag: TagConverter):\n tag: models.Tag = otag\n if tag.alias:\n if ctx.author.guild_permissions.administrator or tag.author == ctx.author.id:\n try:\n await tag.delete()\n await ctx.send(\"aliases deleted\")\n except:\n await ctx.send(\"Alias unsuccessfully deleted\")\n elif not tag.alias:\n if ctx.author.guild_permissions.administrator or tag.author == ctx.author.id:\n try:\n await tag.delete()\n await ctx.send(\"Tag and all aliases deleted\")\n except:\n await ctx.send(\"Tag unsuccessfully deleted\")\n else:\n await ctx.send(\"No Tag with that name found\")", "def delete_tags(self, resource_ids, tags):\r\n if isinstance(tags, list):\r\n tags = {}.fromkeys(tags, None)\r\n params = {}\r\n self.build_list_params(params, resource_ids, 'ResourceId')\r\n self.build_tag_param_list(params, tags)\r\n return self.get_status('DeleteTags', params, verb='POST')", "def delete_tags(self, entry, tags, tag_template_name):\n persisted_tags = self.list_tags(entry.name)\n\n # Fetch GRPCIterator.\n persisted_tags = [tag for tag in persisted_tags]\n\n for persisted_tag in persisted_tags:\n logging.info('Processing Tag from Template: %s ...',\n persisted_tag.template)\n tag_to_delete = None\n\n if tag_template_name in persisted_tag.template:\n tag_to_delete = persisted_tag\n for tag in tags:\n if tag.template == persisted_tag.template and \\\n tag.column == persisted_tag.column:\n tag_to_delete = None\n break\n\n if tag_to_delete:\n self.delete_tag(tag_to_delete)\n logging.info('Tag deleted: %s', tag_to_delete.name)\n else:\n logging.info('Tag is up-to-date: %s', persisted_tag.name)", "def delete_tag(self, *,\n id: str,\n tag: str,\n tag_type: str = 'default',\n resource_type: ResourceType = ResourceType.Table) -> None:\n LOGGER.info(f'Delete tag {tag} for {id} with type {tag_type} and resource_type: {resource_type.name}')\n\n resource_table = f'{resource_type.name.lower()}_tag'\n resource_model = self._get_model_from_table_name(resource_table)\n if not resource_model:\n raise NotImplementedError(f'{resource_type.name} is not defined!')\n\n resource_key = f'{resource_type.name.lower()}_rk'\n resource_attr = getattr(resource_model, resource_key)\n tag_attr = getattr(resource_model, 'tag_rk')\n try:\n with self.client.create_session() as session:\n session.query(resource_model).filter(resource_attr == id, tag_attr == tag).delete()\n session.commit()\n except Exception as e:\n LOGGER.exception(f'Failed to delete tag {tag} for {id}')\n raise e", "def remove_tag(tag):\n check_call(['git', 'tag', '-d', tag])", "def delete(self, id):\n return self._post(\n request=ApiActions.DELETE.value,\n uri=ApiUri.TAGS.value,\n params={'id': id}\n )", "def remove_tag(self, tag):\n if tag in self._tag:\n self._tag.remove(tag)\n\n return self", "def delete_bucket_tagging(Bucket=None):\n pass", "def test_delete_tag(self):\r\n\r\n with app.test_client() as client:\r\n resp = client.post(f\"/tags/{self.tag.id}/delete\", follow_redirects=True)\r\n html = resp.get_data(as_text=True)\r\n\r\n self.assertEqual(resp.status_code, 200)\r\n self.assertNotIn(\"Marvel\", html)", "def tag_post_delete(sender, instance, **kwargs):\n instance.url.delete(False)", "def test_remove_defined_tag(self, test, object_storage):\n namespace_name, bucket_name = self._get_bucket_details(object_storage)\n session_factory = test.oci_session_factory()\n policy = test.load_policy(\n {\n \"name\": \"bucket-remove-tag\",\n \"resource\": \"oci.bucket\",\n \"filters\": [\n {\"type\": \"value\", \"key\": \"name\", \"value\": bucket_name},\n ],\n \"actions\": [\n {\n \"type\": \"remove-tag\",\n \"defined_tags\": [\"cloud-custodian-test.mark-for-resize\"],\n },\n ],\n },\n session_factory=session_factory,\n )\n policy.run()\n resource = self._fetch_bucket_validation_data(\n policy.resource_manager, namespace_name, bucket_name\n )\n test.assertEqual(resource[\"name\"], bucket_name)\n test.assertEqual(self.get_defined_tag_value(resource[\"defined_tags\"]), None)", "def delete_loadbalancer(self, context, lb):\n deployment_model = self._get_setting(\n lb.tenant_id, \"lbaas_settings\", \"deployment_model\"\n )\n hostnames = self._get_hostname(lb)\n if deployment_model in [\"PER_TENANT\", \"PER_SUBNET\"]:\n vapv = self._get_vapv(hostnames)\n if not vapv.tip_group.list():\n self._destroy_vapv(hostnames, lb)\n elif deployment_model == \"PER_TENANT\":\n # Delete subnet ports if no longer required\n if self.openstack_connector.subnet_in_use(lb) is False:\n self._detach_subnet_port(vapv, hostnames, lb)\n for hostname in hostnames:\n port_ids = self.openstack_connector.get_server_port_ids(\n hostname\n )\n self.openstack_connector.delete_ip_from_ports(\n lb.vip_address, port_ids\n )\n elif deployment_model == \"PER_LOADBALANCER\":\n self._destroy_vapv(hostnames, lb)", "def delete_object_tagging(Bucket=None, Key=None, VersionId=None):\n pass", "def delete(self, request, flavor_id):\n conn = get_sdk_connection(request)\n conn.load_balancer.delete_flavor(flavor_id,\n ignore_missing=True)", "def remove_tag(self, tag):\n if tag in self.tags:\n index = self.tags.index(tag)\n self.tags[index:index + 1] = []\n self.stop_times[index:index + 1] = []", "def remove_tag(convo_ID, tag_ID):\n # Make API request\n url = \"https://api2.frontapp.com/conversations/\" + convo_ID + \"/tags\"\n payload = json.dumps({\"tag_ids\": [tag_ID]})\n headers = {\"Authorization\": BEARER_TOKEN, \"Content-Type\": \"application/json\"}\n requests.request(\"DELETE\", url, headers=headers, data=payload)", "def ex_delete_tags(self, node, tags):\n if not tags:\n return\n\n params = { 'Action': 'DeleteTags',\n 'ResourceId.0': node.id }\n for i, key in enumerate(tags):\n params['Tag.%d.Key' % i] = key\n params['Tag.%d.Value' % i] = tags[key]\n\n self.connection.request(self.path,\n params=params.copy()).object", "def remove(self, keyword):\n tag = self._find(keyword)\n if tag is not None:\n self.meta.remove(tag)", "def test_remove_tag(self):\n fc = self.read_feature(region='Adriatic_Sea')\n\n fc.tag(tags=['Mediterranean_Basin', 'tag1'], remove=True)\n assert (fc.features[0]['properties']['tags'] == 'Adriatic_Sea')\n\n self.check_feature(fc.features[0])", "def remove_tag(self, key, value=None):\r\n if value:\r\n tags = {key : value}\r\n else:\r\n tags = [key]\r\n status = self.connection.delete_tags([self.id], tags)\r\n if key in self.tags:\r\n del self.tags[key]", "def _remove_definition(self, definition):\n global_callback._call_library_remove_definition(self, definition)\n definition._library = None", "def untag(self, tag):\n if isinstance(tag, six.integer_types):\n try:\n tag = Tag.objects.get(pk=tag, owner=self.owner)\n except Tag.DoesNotExist:\n return\n \n if isinstance(tag, six.string_types):\n try:\n tag = Tag.objects.get(slug=makeslug(tag), owner=self.owner)\n except Tag.DoesNotExist:\n return\n \n self.tags.remove(tag)", "def delete_tag_with_http_info(self, tag_id, **kwargs):\n\n all_params = ['tag_id']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_tag\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'tag_id' is set\n if ('tag_id' not in params) or (params['tag_id'] is None):\n raise ValueError(\"Missing the required parameter `tag_id` when calling `delete_tag`\")\n\n\n collection_formats = {}\n\n path_params = {}\n if 'tag_id' in params:\n path_params['tag_id'] = params['tag_id']\n\n query_params = []\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n\n # Authentication setting\n auth_settings = ['X-Token']\n\n return self.api_client.call_api('/tags/{tag_id}', 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type=None,\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def delete_tag_meta(self, tag_name: str) -> ProjectMeta:\n return self.delete_tag_metas([tag_name])", "def delete_taggit_taggeditems(apps, schema_editor):\n TaggitTaggedItem = apps.get_model('taggit', 'TaggedItem')\n TaggitTaggedItem.objects.all().delete()", "def remove_tag(self, tag):\n cp = self.copy()\n cp.tags.remove(tag)\n return cp", "def delete(self, filename):\n pass", "def delete(\n self,\n from_object: str,\n to_object: str,\n definition: Union[Definitions, int],\n **options,\n ):\n definition_id = definition if isinstance(definition, int) else definition.value\n\n return self._call(\n \"associations/delete\",\n method=\"PUT\",\n data={\n \"fromObjectId\": from_object,\n \"toObjectId\": to_object,\n \"category\": \"HUBSPOT_DEFINED\",\n \"definitionId\": definition_id,\n },\n **options,\n )", "def trigger_delete(cls, instance):\n es_client.delete(instance.blog.index_name(), 'blog_post_index', instance.id)", "def delete_remote_tag(tag, remote='origin', directory=None):\n execute_command('git push {0} :{1}'.format(remote, tag), shell=True,\n cwd=directory)", "def delete(self):\n self._instance.delete()\n self._instance = None\n self._data_defs = []", "def test_remove_tags(self):\n p = self.load_policy({\n 'name': 'test-azure-tag',\n 'resource': 'azure.resourcegroup',\n 'filters': [\n {'type': 'value',\n 'key': 'name',\n 'op': 'eq',\n 'value_type': 'normalize',\n 'value': 'test_vm'}\n ],\n 'actions': [\n {'type': 'tag',\n 'tags': {'pre-existing-1': 'to-keep', 'pre-existing-2': 'to-keep',\n 'added-1': 'to-delete', 'added-2': 'to-delete'}},\n ],\n })\n p.run()\n\n # verify initial tag set\n s = Session()\n client = s.client('azure.mgmt.resource.ResourceManagementClient')\n rg = [rg for rg in client.resource_groups.list() if rg.name == 'test_vm'][0]\n self.assertEqual(rg.tags,\n {'pre-existing-1': 'to-keep', 'pre-existing-2': 'to-keep',\n 'added-1': 'to-delete', 'added-2': 'to-delete'})\n\n p = self.load_policy({\n 'name': 'test-azure-remove-tag',\n 'resource': 'azure.resourcegroup',\n 'filters': [\n {'type': 'value',\n 'key': 'name',\n 'op': 'eq',\n 'value_type': 'normalize',\n 'value': 'test_vm'}\n ],\n 'actions': [\n {'type': 'untag',\n 'tags': ['added-1', 'added-2']}\n ],\n })\n p.run()\n\n # verify tags removed and pre-existing tags not removed\n rg = [rg for rg in client.resource_groups.list() if rg.name == 'test_vm'][0]\n self.assertEqual(rg.tags,\n {'pre-existing-1': 'to-keep', 'pre-existing-2': 'to-keep'})", "def delete(self, session, params=None, has_body=False):\n if not self.allow_delete:\n raise exceptions.MethodNotSupported(self, \"delete\")\n # delete tag do not need requires_id\n request = self._prepare_request(requires_id=False)\n\n endpoint_override = self.service.get_endpoint_override()\n response = session.delete(request.uri, endpoint_filter=self.service,\n endpoint_override=endpoint_override,\n headers={\"Accept\": \"\"},\n params=params)\n self._translate_response(response, has_body=has_body)\n return self", "def delete_template(self):\n try:\n os.remove(self.path)\n except Exception:\n pass", "def remove_tag(args):", "def remove(self, tag_name: str, category: ty.Optional[str] = None):\n tags = self.__holder.db_tags.filter(lambda t: t.name == tag_name)\n if category is not None:\n tags = tags.filter(category=category)\n\n tag = tags.first()\n if tag:\n self.__holder.db_tags.remove(tag)", "def subject_tag_delete(context, subject_id, value, session=None):\n _check_subject_id(subject_id)\n session = session or get_session()\n query = session.query(models.SubjectTag).filter_by(\n subject_id=subject_id).filter_by(\n value=value).filter_by(deleted=False)\n try:\n tag_ref = query.one()\n except sa_orm.exc.NoResultFound:\n raise exception.NotFound()\n\n tag_ref.delete(session=session)", "def delete(self, filename):\n raise NotImplementedError", "async def delete(message: discord.Message, tag: Annotate.Content):\n tag = tag_arg(tag)\n assert tag in time_cfg.data[\"countdown\"], \"Countdown with tag `{}` does not exist.\".format(tag)\n\n author_id = time_cfg.data[\"countdown\"][tag][\"author\"]\n assert message.author.id == author_id, \"You are not the author of this tag ({}).\".format(\n getattr(discord.utils.get(client.get_all_members(), id=author_id), \"name\", None) or \"~~Unknown~~\")\n\n del time_cfg.data[\"countdown\"][tag]\n time_cfg.save()\n await client.say(message, \"Countdown with tag `{}` removed.\".format(tag))", "def delTags(self):\r\n for tag in self.tags:\r\n self.canvasCirkt.delete(tag)\r\n self.canvasCirkt.update()", "def delete(self, filename, **kw):\n\n file_path = os.path.join(self.storage_path, filename)\n\n try:\n os.remove(file_path)\n except OSError:\n pass", "def delete_template(self, filename):\n if self.template_exists(filename):\n self.client.service.DeleteTemplate(filename=filename)\n else:\n raise LiveDocxError('Template \"%s\" not exists and it cannot be deleted' % filename)", "def delete_job(self, filename):\n job = Jobs.get(Jobs.filename == filename)\n job.delete_instance()", "def delete(self, api_path, *args, **kwargs):\n\n return self._do_operation(u'delete', api_path, *args, **kwargs)", "def delete(self):\r\n self.connection.delete_distribution(self.id, self.etag)", "def remove_tag(self, index):\n\n model_index = self.GetItemData(index)\n self.DeleteItem(model_index)\n del self._clientData[model_index]", "def delete_upload(arn=None):\n pass", "def delete(self, api_path, *args, **kwargs):\n\n\t\treturn self._do_operation(u'delete', api_path, *args, **kwargs)", "def delete_task_definitions(\n self,\n print_task_definition_id, # type: str\n if_match=None, # type: Optional[str]\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.delete_task_definitions.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printTaskDefinition-id': self._serialize.url(\"print_task_definition_id\", print_task_definition_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n if if_match is not None:\n header_parameters['If-Match'] = self._serialize.header(\"if_match\", if_match, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.delete(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def cli(env, dry_run):\n\n tag_manager = TagManager(env.client)\n empty_tags = tag_manager.get_unattached_tags()\n\n for tag in empty_tags:\n if dry_run:\n click.secho(f\"(Dry Run) Removing {tag.get('name')}\", fg='yellow')\n else:\n result = tag_manager.delete_tag(tag.get('name'))\n color = 'green' if result else 'red'\n click.secho(f\"Removing {tag.get('name')}\", fg=color)", "def delete_tag(delete_timestamps):\n\n ctx = dash.callback_context\n triggered_id, triggered_prop, triggered_value = utils.ctx_triggered_info(ctx)\n\n # When the button is initially added, it fires a callback.\n # We want to prevent this callback from making changes to the update signal.\n if triggered_value is None:\n raise PreventUpdate\n\n # Unfortunately, we have to convert the stringified dict back to a dict.\n # Dash doesn't provide us any other method to see which element triggered the callback.\n # This isn't very elegant, but I don't see any other way to proceed.\n id_dict = utils.string_to_dict(triggered_id)\n tag_idx = id_dict[\"index\"]\n state.delete_tag(tag_idx)\n\n return constants.OK_SIGNAL", "def delete_tag(self, tag_id, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.delete_tag_with_http_info(tag_id, **kwargs)\n else:\n (data) = self.delete_tag_with_http_info(tag_id, **kwargs)\n return data", "def DeleteTarget(self, target_instance_id):" ]
[ "0.7041559", "0.7017274", "0.68210644", "0.67219794", "0.6699245", "0.6619593", "0.6408356", "0.6379325", "0.63487124", "0.62727845", "0.62387455", "0.61683017", "0.60791034", "0.59661263", "0.596435", "0.592325", "0.58542705", "0.58466", "0.5823866", "0.58166766", "0.58107233", "0.57897544", "0.576099", "0.57526094", "0.574979", "0.57068133", "0.569857", "0.5677329", "0.5665213", "0.5593062", "0.5589436", "0.5589002", "0.5580494", "0.55399984", "0.5529753", "0.55147976", "0.5493157", "0.5488724", "0.54769963", "0.54438376", "0.54361546", "0.54344034", "0.54344034", "0.5417662", "0.5394705", "0.5389839", "0.5384105", "0.5370318", "0.5366653", "0.53600544", "0.5340248", "0.5317124", "0.53127", "0.53041697", "0.529698", "0.52824354", "0.5263676", "0.523833", "0.52380306", "0.5235197", "0.5221683", "0.52216357", "0.52095056", "0.5208641", "0.52072775", "0.52043337", "0.5185447", "0.51708114", "0.51688564", "0.5156987", "0.5141353", "0.51382387", "0.5126919", "0.51193166", "0.5117945", "0.51109266", "0.510702", "0.509983", "0.50919086", "0.5080482", "0.5034899", "0.50341237", "0.503311", "0.502328", "0.5022442", "0.50127184", "0.5009715", "0.5008821", "0.50007373", "0.49990344", "0.49877205", "0.4981233", "0.4973524", "0.49594325", "0.49580976", "0.49537653", "0.49525592", "0.49502936", "0.49150106", "0.49146023" ]
0.4964765
93
Deletes the the specified tag default.
def delete_tag_default(self, tag_default_id, **kwargs): resource_path = "/tagDefaults/{tagDefaultId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_request_id", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_tag_default got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagDefaultId": tag_default_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-request-id": kwargs.get("opc_request_id", missing), "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_default(self):\n if self.default_present:\n self.removeItem(0)\n self.default_present = False", "def delete_tag(tag):\n tag.destroy()", "def delete_tag(self,tag):\r\n\r\n # with shelf\r\n if self.using_shelf:\r\n del self.tag_dict[tag]", "def clear_default(self, name, group=None):\n opt_info = self._get_opt_info(name, group)\n opt_info.pop('default', None)", "def delete_tag(self, *tags: TagReference) -> None:\n return TagReference.delete(self, *tags)", "def delete(self, tag, params={}, **options):\n path = \"/tags/%s\" % (tag)\n return self.client.delete(path, params, **options)", "def untag():\n version = git.prompt_tag('Which tag to delete?')\n if not version:\n abort('No available version tag')\n git.delete_tag(version)", "def delete_tag(self, tag):\n return self.__datacatalog.delete_tag(name=tag.name)", "def delete_tag(self, session, tag):\n self._tag(session.delete, key=tag, delete=True, session=session)", "def delete(self):\n request = self.tags_service.delete(path=self._path)\n request.execute()", "def delete_tag(tag, directory=None):\n execute_command('git tag -d {0}'.format(tag), shell=True, cwd=directory)", "def delete(self, uuid):\n\n\t\treturn self._delete(\"/tag/%s\" % base.getid(uuid), \"tag\")", "async def delete(self, ctx: \"IceTeaContext\", *, otag: TagConverter):\n tag: models.Tag = otag\n if tag.alias:\n if ctx.author.guild_permissions.administrator or tag.author == ctx.author.id:\n try:\n await tag.delete()\n await ctx.send(\"aliases deleted\")\n except:\n await ctx.send(\"Alias unsuccessfully deleted\")\n elif not tag.alias:\n if ctx.author.guild_permissions.administrator or tag.author == ctx.author.id:\n try:\n await tag.delete()\n await ctx.send(\"Tag and all aliases deleted\")\n except:\n await ctx.send(\"Tag unsuccessfully deleted\")\n else:\n await ctx.send(\"No Tag with that name found\")", "def delete_tag_template(self, name):\n self.__datacatalog.delete_tag_template(name=name, force=True)\n logging.info('Tag Template deleted: %s', name)", "def delete_tag(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n\n db.session.delete(tag)\n db.session.commit()\n\n return redirect('/tags')", "def delete_tag(request):\n try:\n tags = request.POST.getlist('tag_id', 0)\n tag = Tag.objects.filter(pk__in=tags).delete()\n ActionLogger().log(request.user, \"deleted\", \"Knowledgebase Tag %s\" % tags)\n return format_ajax_response(True, \"Knoweldgebase tag deleted successfully.\")\n except Exception as ex:\n logger.error(\"Failed to delete_tag: %s\" % ex)\n return format_ajax_response(False, \"There was an error deleting the specified knowledgebase tag.\")", "def delete_tag(tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(\"/tags\")", "def delete_tag(tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(f\"/tags\")", "def delete_default_content(site):\n logger.info(u'Apagando conteúdo padrão do Plone')\n for item in DEFAULT_CONTENT:\n if hasattr(site, item):\n api.content.delete(site[item])\n logger.debug(u' {0} apagado'.format(item))", "def delete_tag(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(\"/tags\")", "def __delete__(self, instance):\n raise AttributeError(\"A Default Property cannot be deleted\")", "def delete_tags(configurationIds=None, tags=None):\n pass", "def _delete_tag_request():\n key = helpers.get('Tag.1.Key')\n resource_id = helpers.get('ResourceId.1')\n\n if resource_id in current_app.config['RESOURCE_TYPE_MAP']:\n resource_type = current_app.config['RESOURCE_TYPE_MAP'][resource_id]\n else:\n errors.invalid_request(\n str(resource_id) + \" not found in configuration\")\n\n args = {\n 'command': 'deleteTags',\n 'resourceids': resource_id,\n 'resourcetype': resource_type,\n 'tags[0].key': key\n }\n\n response = requester.make_request_async(args)\n\n return response", "def delete_tags(self, session):\n self._tag(session.delete, delete=True, session=session)", "def delete_default_vpc(session, vpc_id):\n try:\n session.delete_vpc(\n VpcId=vpc_id\n )\n except Exception as e:\n print('Exception: ' + str(e))\n else:\n print(\"Successfully deleted VPC\")", "def tag_post_delete(sender, instance, **kwargs):\n instance.url.delete(False)", "def delete_tag(user_id, tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(f'/users/{user_id}')", "def delete_tag_meta(self, tag_name: str) -> ProjectMeta:\n return self.delete_tag_metas([tag_name])", "def tags_delete(tag_id):\n\n tags = Tag.query.get_or_404(tag_id)\n\n db.session.delete(tags)\n db.session.commit()\n\n flash(f\"'{tags.name}' tag is deleted.\")\n\n return redirect(\"/tags\")", "def setdefault(self, key, default=None):\r\n return self.data.setdefault(ref(key, self._remove),default)", "def __delitem__(self, key):\n del self.default_dataset[key]", "def DeleteForTag(cls, tag):\n parent_key = cls._GetParentKeyFromTag(tag)\n frontend_job = cls.query(ancestor=parent_key).get(keys_only=True)\n if frontend_job:\n frontend_job.delete()", "def delete_tag(delete_timestamps):\n\n ctx = dash.callback_context\n triggered_id, triggered_prop, triggered_value = utils.ctx_triggered_info(ctx)\n\n # When the button is initially added, it fires a callback.\n # We want to prevent this callback from making changes to the update signal.\n if triggered_value is None:\n raise PreventUpdate\n\n # Unfortunately, we have to convert the stringified dict back to a dict.\n # Dash doesn't provide us any other method to see which element triggered the callback.\n # This isn't very elegant, but I don't see any other way to proceed.\n id_dict = utils.string_to_dict(triggered_id)\n tag_idx = id_dict[\"index\"]\n state.delete_tag(tag_idx)\n\n return constants.OK_SIGNAL", "def remove_tag(args):", "def delete(self, name):\n\n pass", "def delete():", "def setdefault(self, key, default=None):\r\n return self.data.setdefault(IdentityRef(key, self._remove),default)", "def Deleted(self, default=None):\n return self.data.get('metadata', {}).get('deleted', default)", "def _dpop(dictionary, key, default=None):\n try:\n ret = dictionary[key]\n del dictionary[key]\n except KeyError:\n ret = default\n\n return ret", "def remove_tag(self, key, value=None):\r\n if value:\r\n tags = {key : value}\r\n else:\r\n tags = [key]\r\n status = self.connection.delete_tags([self.id], tags)\r\n if key in self.tags:\r\n del self.tags[key]", "def delete_object_tagging(Bucket=None, Key=None, VersionId=None):\n pass", "def replace_defaults(d):\n\n # remove the defaults section\n defaults = d.pop('.defaults')\n\n # look for default tags and replace them\n for k, v in defaults.items():\n recursive_search_replace(d, '!' + k + '!', v)", "def delete_bucket_tagging(Bucket=None):\n pass", "def delete_taggit_tags(apps, schema_editor):\n TaggitTag = apps.get_model('taggit', 'Tag')\n TaggitTag.objects.all().delete()", "def delete_entry(title):\n filename = f\"entries/{title}.md\"\n if default_storage.exists(filename):\n default_storage.delete(filename)", "def deletePreviewShape(LIST_SHAPE_NAME,activeStudy, previewShapeEntry,DEFAULT_SHAPE_NAME):\n for i in range(len(LIST_SHAPE_NAME)):\n if LIST_SHAPE_NAME[i] == DEFAULT_SHAPE_NAME:\n xalome.deleteShape(activeStudy, previewShapeEntry[i])\n previewShapeEntry[i] = None", "def remove_tag(self, dataset: \"Dataset\", tag: \"DatasetTag\"):\n raise NotImplementedError", "async def delete(message: discord.Message, tag: Annotate.Content):\n tag = tag_arg(tag)\n assert tag in time_cfg.data[\"countdown\"], \"Countdown with tag `{}` does not exist.\".format(tag)\n\n author_id = time_cfg.data[\"countdown\"][tag][\"author\"]\n assert message.author.id == author_id, \"You are not the author of this tag ({}).\".format(\n getattr(discord.utils.get(client.get_all_members(), id=author_id), \"name\", None) or \"~~Unknown~~\")\n\n del time_cfg.data[\"countdown\"][tag]\n time_cfg.save()\n await client.say(message, \"Countdown with tag `{}` removed.\".format(tag))", "def on_delete_clicked(self, obj):\n store, node = self.list.selection.get_selected()\n if not node:\n return\n name = cuni(self.list.model.get_value(node, 0))\n if name == _('default'): # the default style cannot be removed\n return\n self.sheetlist.delete_style_sheet(name)\n self.redraw()", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def auto_remove_style_from_disk_on_delete(sender, instance, **kwargs):\n if instance.default:\n #deleted style is the default style, reset the default style\n instance.record.setup_default_styles(instance.format)\n\n if instance.content:\n if os.path.isfile(instance.content.path):\n os.remove(instance.content.path)", "def DeleteCustomDefaults(self, *args, **kwargs):\n # type: (*Any, **Any) -> None\n payload = { \"Arg1\": self }\n for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]\n for item in kwargs.items(): payload[item[0]] = item[1]\n return self._execute('deleteCustomDefaults', payload=payload, response_object=None)", "def delete(self, value):\n pass", "async def slashtag_remove(self, ctx: commands.Context, *, tag: GuildTagConverter):\n await ctx.send(await tag.delete())", "def remove_tag(self, tag: str) -> None:\n tags = self.get_tag_index()\n tags.remove(tag)\n self.write_tag_index(list(set(tags)))", "def delete_tags(ResourceArn=None, TagKeys=None):\n pass", "def delete_tags(ResourceArn=None, TagKeys=None):\n pass", "def delete_command():\n global selected_tuple\n backend.delete(selected_tuple[0])", "def delete(self, name=None):\n raise NotImplementedError", "def delete_tag(filename, tag_name):\n storeapps = APP.config[\"storage\"]\n filename = filename.encode(\"utf-8\")\n\n try:\n application = list(nativeapps.io.ls(storeapps, r\".*\" + filename + \"$\"))[0]\n meta_path = os.path.join(os.path.dirname(application), \"metadata.json\")\n metadata = json.loads(nativeapps.io.readfile(meta_path))\n tags = metadata.get(\"tags\", [])\n if tag_name in tags:\n tags.remove(tag_name)\n metadata[\"tags\"] = tags\n nativeapps.io.writefile(meta_path, json.dumps(metadata))\n except IndexError:\n return \"Unknown application: %s\" % (application), 404\n\n return \"removed\", 200", "def delete_template(self):\n try:\n os.remove(self.path)\n except Exception:\n pass", "def test_remove_single_tag(self):\n p = self.load_policy({\n 'name': 'test-azure-remove-single-tag',\n 'resource': 'azure.vm',\n 'filters': [\n {'type': 'value',\n 'key': 'name',\n 'op': 'eq',\n 'value_type': 'normalize',\n 'value': 'cctestvm'}\n ],\n 'actions': [\n {'type': 'tag',\n 'tag': 'tag1',\n 'value': 'to-delete'}\n ],\n })\n p.run()\n\n # verify the initial tag set\n s = Session()\n client = s.client('azure.mgmt.compute.ComputeManagementClient')\n vm = client.virtual_machines.get('test_vm', 'cctestvm')\n self.assertEqual(vm.tags, {'tag1': 'to-delete', 'testtag': 'testvalue'})\n\n p = self.load_policy({\n 'name': 'test-azure-tag',\n 'resource': 'azure.vm',\n 'filters': [\n {'type': 'value',\n 'key': 'name',\n 'op': 'eq',\n 'value_type': 'normalize',\n 'value': 'cctestvm'}\n ],\n 'actions': [\n {'type': 'untag',\n 'tags': ['tag1']}\n ],\n })\n p.run()\n\n # verify that the a tag is deleted without modifying existing tags\n vm = client.virtual_machines.get('test_vm', 'cctestvm')\n self.assertEqual(vm.tags, {'testtag': 'testvalue'})", "def remove_tag(self, index):\n\n model_index = self.GetItemData(index)\n self.DeleteItem(model_index)\n del self._clientData[model_index]", "def tags_clear(self, item, tags):\n self._createTagAction(item, \"tags_clear\", tags)", "def delete_taggit_taggeditems(apps, schema_editor):\n TaggitTaggedItem = apps.get_model('taggit', 'TaggedItem')\n TaggitTaggedItem.objects.all().delete()", "def delete_version(self):\n pass", "def _DeleteAllFromDefaultNamespace(self):\n run_cmd = [\n 'delete', 'all', '--all', '-n', 'default'\n ]\n RunKubectlCommand(run_cmd)\n\n run_cmd = [\n 'delete', 'pvc', '--all', '-n', 'default'\n ]\n RunKubectlCommand(run_cmd)\n # There maybe a slight race if resources are cleaned up in the background\n # where deleting the cluster immediately prevents the PVCs from being\n # deleted.\n logging.info('Sleeping for %s seconds to give resources time to delete.',\n RESOURCE_DELETE_SLEEP_SECONDS)\n time.sleep(RESOURCE_DELETE_SLEEP_SECONDS)", "def delete(self):\n\t\tself.canvas.delete('node_'+self.identifier)\n\t\tself.canvas.tag_unbind('node_'+self.identifier,\"<Any>\")", "def get_default_tag(self, tags):\n tags_counter = Counter()\n for tag in tags:\n tags_counter[tag] += 1\n\n if len(tags_counter) == 2 and list(tags_counter.values())[0] == list(tags_counter.values())[1]:\n return ut.find_positive_tag(tags_counter.keys())\n\n return tags_counter.most_common(1)[0][0]", "def update_tag_default(self, tag_default_id, update_tag_default_details, **kwargs):\n resource_path = \"/tagDefaults/{tagDefaultId}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\",\n \"opc_request_id\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_tag_default got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagDefaultId\": tag_default_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing),\n \"opc-request-id\": kwargs.get(\"opc_request_id\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_tag_default_details,\n response_type=\"TagDefault\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_tag_default_details,\n response_type=\"TagDefault\")", "def delete(self, *args, **kwargs):\n pass", "def delete(self, *args, **kwargs):\n pass", "def delete(self):\n os.system(\"rm \"+self._name)", "def delete(self, section, name):\n section = self._getSettingName(section)\n self._config.remove_option(section, name)\n self.save()", "def delete(self, name):\n path = self.directory / f\"{name}.yaml\"\n if path.exists():\n path.unlink()", "def reset(self, default=False):\n self.local.clear()\n if default:\n for group in self.Group:\n self.system.remove(group.value)", "def delete_default_vpc(role, accounts_list):\n print(\"\\nDeleting all default VPCs\")\n print(\"==============================\")\n\n completed_accounts = []\n bar = IncrementalBar('Processing Accounts:', max = len(accounts_list))\n regions = aws_list_helper.organizations_list_service_regions('ec2')\n for account in accounts_list:\n print(f'\\nRemoving Default VPCs from: {account[\"Name\"]}')\n\n sts_credentials = aws_creds_helper.generate_cross_account_credentials(account['AccountId'], role)\n\n for region in regions:\n try:\n regional_session = aws_creds_helper.generate_cross_account_session(service='ec2',\\\n credentials=sts_credentials, region=region)\n\n awsVPCs = regional_session.describe_vpcs(Filters = [{'Name': 'isDefault','Values': ['true',]},])\n igw = regional_session.describe_internet_gateways()\n subnets = regional_session.describe_subnets()\n\n # iterate over VPC to find Default VPC\n if awsVPCs['Vpcs']:\n # iterate over VPC to find Default VPC\n for vpcAttributes in awsVPCs['Vpcs']:\n # check if VPC is Default VPC\n if vpcAttributes['IsDefault']:\n ec2 = boto3.resource('ec2',\n region_name=region,\n aws_access_key_id=sts_credentials[0],\n aws_secret_access_key=sts_credentials[1],\n aws_session_token=sts_credentials[2])\n\n # Checks if there are any subnets\n if subnets['Subnets']:\n # Delete subnets in Default VPC\n for subnet in subnets['Subnets']:\n # check if Subnets belong to Default VPC\n if subnet['VpcId'] == vpcAttributes['VpcId']:\n subnetId = ec2.Subnet(subnet['SubnetId'])\n print(\"\\tAbout to delete Subnet Id: \" + str(subnetId))\n subnetId.delete()\n\n # detach and delete IGW (Internet Gateway) in Default VPC\n # check if IGW exists\n for _ in igw['InternetGateways']:\n try:\n IGWid = igw['InternetGateways'][0]['InternetGatewayId']\n IGWisAttached = igw['InternetGateways'][0]['Attachments'][0]['State']\n IGWattachedToVPC = igw['InternetGateways'][0]['Attachments'][0]['VpcId']\n except Exception as e:\n IGWisAttached = ''\n IGWattachedToVPC = ''\n # check if IGW atatched to VPC and VPC is Default VPC\n if IGWisAttached == 'available' and IGWattachedToVPC == vpcAttributes['VpcId']:\n internet_gateway = ec2.InternetGateway(\n igw['InternetGateways'][0]['InternetGatewayId'])\n print(\"\\tAbout to detach and delete internet_gateway =\" + str(internet_gateway))\n internet_gateway.detach_from_vpc(VpcId=vpcAttributes['VpcId'])\n internet_gateway.delete()\n\n # delete Default VPC\n regional_session.delete_vpc(VpcId=vpcAttributes['VpcId'], DryRun=False)\n print(\"\\t The detault VPC ID = \" + str(vpcAttributes['VpcId']) + \" is being deleted in \" + str(\n region))\n except botocore.exceptions.ClientError as err:\n if err.response['Error']['Code'] == 'AuthFailure':\n print(f'\\tThe region: {region} is not enabled')\n except Exception as e:\n print(f'\\nException: {str(e)}\\n')", "def delete(self, filename):\n pass", "def delete(self):\n ...", "def __delete__(self, instance):\r\n self._set_instance_tag_cache(instance, '')", "def delete_tag(self, *,\n id: str,\n tag: str,\n tag_type: str = 'default',\n resource_type: ResourceType = ResourceType.Table) -> None:\n LOGGER.info(f'Delete tag {tag} for {id} with type {tag_type} and resource_type: {resource_type.name}')\n\n resource_table = f'{resource_type.name.lower()}_tag'\n resource_model = self._get_model_from_table_name(resource_table)\n if not resource_model:\n raise NotImplementedError(f'{resource_type.name} is not defined!')\n\n resource_key = f'{resource_type.name.lower()}_rk'\n resource_attr = getattr(resource_model, resource_key)\n tag_attr = getattr(resource_model, 'tag_rk')\n try:\n with self.client.create_session() as session:\n session.query(resource_model).filter(resource_attr == id, tag_attr == tag).delete()\n session.commit()\n except Exception as e:\n LOGGER.exception(f'Failed to delete tag {tag} for {id}')\n raise e", "def delete(self, obj=None):\n pass", "def remove(self, tag_name: str, category: ty.Optional[str] = None):\n tags = self.__holder.db_tags.filter(lambda t: t.name == tag_name)\n if category is not None:\n tags = tags.filter(category=category)\n\n tag = tags.first()\n if tag:\n self.__holder.db_tags.remove(tag)", "def delete_suggester(DomainName=None, SuggesterName=None):\n pass", "def _default(self, section, option, default):\r\n if not self.has_section(section):\r\n self.add_section(section)\r\n if not self.has_option(section, option):\r\n self.set(section, option, default)\r\n self.save()", "def delete_a_tag(self, contact_tag_id):\n return self.client._delete(f\"/contactTags/{str(contact_tag_id)}\")", "def delete_image(Name=None):\n pass", "def pop_default(self, option: str) -> Optional[Any]:\n index = self._get_index(option)\n assert index is not None\n value = self._options[index]\n del self._options[index]\n default = value[1] if isinstance(value, tuple) else None\n return default", "def delete_thumbnail(self, thumbnail_name):", "def delete(self, path):\n head = path[:-1]\n key = str(path[-1])\n if len(head):\n pth = self._path[:]\n pth.extend(stringify_keys(head))\n del get_nested_default(self._request.session, pth)[key]\n else:\n del get_nested_default(self._request.session, self._path)[key]\n self.save()", "def delete(self, keyword, key):", "def delete_model(ModelName=None):\n pass", "def delete_upload(arn=None):\n pass", "def setdefault(self, k, d=None): # real signature unknown; restored from __doc__\n pass", "def pop(name, default=None):", "def ex_delete_tags(self, node, tags):\n if not tags:\n return\n\n params = { 'Action': 'DeleteTags',\n 'ResourceId.0': node.id }\n for i, key in enumerate(tags):\n params['Tag.%d.Key' % i] = key\n params['Tag.%d.Value' % i] = tags[key]\n\n self.connection.request(self.path,\n params=params.copy()).object", "def key_delete(self, name=None):\n raise NotImplementedError" ]
[ "0.6711261", "0.6618538", "0.62383074", "0.61715645", "0.5984171", "0.5975831", "0.5970485", "0.585169", "0.58504015", "0.5791107", "0.57085663", "0.55927753", "0.55772495", "0.5562356", "0.5456258", "0.5449171", "0.5441209", "0.5433733", "0.5428394", "0.5425312", "0.54240113", "0.54216796", "0.53936046", "0.53900605", "0.5386305", "0.5369236", "0.5367158", "0.5355882", "0.53492415", "0.52725637", "0.5267662", "0.5248241", "0.52434707", "0.52351636", "0.5225814", "0.522494", "0.5219001", "0.517788", "0.51635224", "0.51474017", "0.5144058", "0.513862", "0.51377225", "0.5122908", "0.5116375", "0.51163024", "0.5113162", "0.5110412", "0.510612", "0.5105899", "0.5105899", "0.5105899", "0.5105899", "0.5105784", "0.5096034", "0.5092228", "0.50840265", "0.5076368", "0.5069566", "0.5069566", "0.50676894", "0.50519896", "0.50504786", "0.50469685", "0.50460804", "0.50350827", "0.5031433", "0.50249434", "0.501735", "0.5008552", "0.50023437", "0.5002051", "0.5001378", "0.499772", "0.499772", "0.4982873", "0.4968259", "0.49604812", "0.49596763", "0.4951929", "0.49500299", "0.49413204", "0.4930333", "0.49283376", "0.4926817", "0.49128914", "0.49090225", "0.48923942", "0.48921335", "0.48769018", "0.48737416", "0.48735747", "0.48684922", "0.48589846", "0.48570213", "0.48496005", "0.48475674", "0.48457217", "0.4839151", "0.48373127" ]
0.6901326
0
Deletes the specified tag namespace. Only an empty tag namespace can be deleted. To delete a tag namespace, first delete all its tag definitions.
def delete_tag_namespace(self, tag_namespace_id, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match", "opc_request_id" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_tag_namespace got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing), "opc-request-id": kwargs.get("opc_request_id", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_namespace(self, namespace):\n return self.core_client.delete_namespace(namespace)", "def delete_namespace_content(context, namespace_id, session):\n count = 0\n query = (session.query(models.MetadefTag).filter_by(\n namespace_id=namespace_id))\n count = query.delete(synchronize_session='fetch')\n return count", "def remove_namespace(self, doc, namespace):\r\n ns = u'{%s}' % namespace\r\n nsl = len(ns)\r\n for elem in doc.getiterator():\r\n if elem.tag.startswith(ns):\r\n elem.tag = elem.tag[nsl:]\r\n else:\r\n pass", "def delete_namespace(node, namespace):\n cmd_timeout = 5\n cmd = f\"ip netns delete {namespace}\"\n (ret_code, _, delete_errmsg) = \\\n exec_cmd(node, cmd, timeout=cmd_timeout, sudo=True)\n if ret_code != 0:\n cmd = f\"ip netns list {namespace}\"\n (stdout, _) = \\\n exec_cmd_no_error(node, cmd, timeout=cmd_timeout, sudo=True)\n if stdout == namespace:\n raise RuntimeError(f\"Could not delete namespace \"\n f\"({namespace}): {delete_errmsg}\")\n try:\n Namespaces.__namespaces.remove(namespace)\n except ValueError:\n pass", "def clean_up_namespaces(node, namespace=None):\n if namespace is not None:\n Namespaces.delete_namespace(node, namespace)\n return\n\n namespace_copy = deepcopy(Namespaces.__namespaces)\n for namespace_name in namespace_copy:\n Namespaces.delete_namespace(node, namespace_name)", "def delete_tags(self, session):\n self._tag(session.delete, delete=True, session=session)", "def namespace_delete(cursor, namespace_id):\n haystack = (namespace_id,)\n query = \"DELETE FROM namespaces WHERE _id=?\"\n try:\n cursor.execute(query, haystack)\n except Exception as e:\n on_error(e)\n else:\n cursor.connection.commit()\n raise Return((True, None))", "def delete_namespace_content(context, namespace_id, session):\n\n count = 0\n query = session.query(models.MetadefNamespaceResourceType).filter_by(\n namespace_id=namespace_id)\n count = query.delete(synchronize_session='fetch')\n return count", "def removeNamespace(self, *args):\n return _libsbml.XMLToken_removeNamespace(self, *args)", "def delete_tag(tag):\n tag.destroy()", "def remove_namespace(doc, namespace=u\"{http://www.EcoInvent.org/EcoSpold02}\"):\n ns = u'{}'.format(namespace)\n nsl = len(ns)\n for elem in doc.getiterator():\n if elem.tag.startswith(ns):\n elem.tag = elem.tag[nsl:]", "def delete_tag(self, session, tag):\n self._tag(session.delete, key=tag, delete=True, session=session)", "def _delete_ns(self, ns_name):\n\n if self.allowed_ns and ns_name not in self.allowed_ns:\n raise PermissionError(\"Namespace %s not in allowed list\" % ns_name)\n\n ns_dir = self._get_ns_dir(ns_name)\n if ns_dir:\n if sum(1 for app_dirs in ns_dir.iterdir()) == 0:\n shutil.rmtree(ns_dir)", "def remove(self, *args):\n return _libsbml.XMLNamespaces_remove(self, *args)", "def removeNamespace(self, *args):\n return _libsbml.SBMLNamespaces_removeNamespace(self, *args)", "def delete_namespaced_namespace(self, body, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.delete_namespaced_namespace_with_http_info(body, name, **kwargs)\n else:\n (data) = self.delete_namespaced_namespace_with_http_info(body, name, **kwargs)\n return data", "def delete(self):\n request = self.tags_service.delete(path=self._path)\n request.execute()", "def destroyNamespace(self, remoteNamespace):\r\n for namespace in self._namespaces:\r\n if namespace.destroyExternal(remoteNamespace):\r\n break", "def delete_tag(self, tag):\n return self.__datacatalog.delete_tag(name=tag.name)", "def delete_namespaces(self, ifaces=None):\n if not ifaces:\n ifaces = list(self.namespaces.keys())\n for iface in ifaces:\n self._lhost.ui.delete_namespace(self.namespaces[iface])\n del self.namespaces[iface]", "def clearNamespaces(self):\n return _libsbml.XMLToken_clearNamespaces(self)", "def post_namespace_delete(self, resource_id, resource_dict):\n pass", "def removePackageNamespace(self, *args):\n return _libsbml.SBMLNamespaces_removePackageNamespace(self, *args)", "async def remove_namespace(self, namespace: str) -> Any:\n if namespace == self.get_namespace(): # if it belongs to this app's namespace\n raise ValueError(\"Cannot remove namespace with the same name as operating namespace\")\n\n return await self.AD.state.remove_namespace(namespace)", "def delete(self, tag, params={}, **options):\n path = \"/tags/%s\" % (tag)\n return self.client.delete(path, params, **options)", "def delete_taggit_tags(apps, schema_editor):\n TaggitTag = apps.get_model('taggit', 'Tag')\n TaggitTag.objects.all().delete()", "def clear(self):\n return _libsbml.XMLNamespaces_clear(self)", "def delete_namespaced_net_namespace(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_namespaced_net_namespace\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `delete_namespaced_net_namespace`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `delete_namespaced_net_namespace`\")\n\n resource_path = '/oapi/v1/netnamespaces/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def destroy(self):\r\n for interface in self._interfaces.copy():\r\n interface.destroy()\r\n\r\n assert len(self._interfaces) == 0\r\n\r\n self._endpoint.unregisterNamespace(self)\r\n self._endpoint = None\r\n\r\n super(Namespace, self).destroy()", "def delete_namespaced_image_stream_tag(self, namespace, name, **kwargs):\n\n all_params = ['namespace', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_namespaced_image_stream_tag\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `delete_namespaced_image_stream_tag`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `delete_namespaced_image_stream_tag`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/imagestreamtags/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def test_delete_net_namespace(self):\n pass", "def delete_tag(self, *tags: TagReference) -> None:\n return TagReference.delete(self, *tags)", "def deletecollection_namespaced_namespace(self, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.deletecollection_namespaced_namespace_with_http_info(**kwargs)\n else:\n (data) = self.deletecollection_namespaced_namespace_with_http_info(**kwargs)\n return data", "def delete_tag(filename, tag_name):\n storeapps = APP.config[\"storage\"]\n filename = filename.encode(\"utf-8\")\n\n try:\n application = list(nativeapps.io.ls(storeapps, r\".*\" + filename + \"$\"))[0]\n meta_path = os.path.join(os.path.dirname(application), \"metadata.json\")\n metadata = json.loads(nativeapps.io.readfile(meta_path))\n tags = metadata.get(\"tags\", [])\n if tag_name in tags:\n tags.remove(tag_name)\n metadata[\"tags\"] = tags\n nativeapps.io.writefile(meta_path, json.dumps(metadata))\n except IndexError:\n return \"Unknown application: %s\" % (application), 404\n\n return \"removed\", 200", "def delete_tag(self,tag):\r\n\r\n # with shelf\r\n if self.using_shelf:\r\n del self.tag_dict[tag]", "def delete_namespaces_with_label(self, label_selector):\n return [\n self.core_client.delete_namespace(namespace.metadata.name)\n for namespace in self.get_namespaces(label_selector=label_selector).items\n ]", "def delete_tags(configurationIds=None, tags=None):\n pass", "def untag():\n version = git.prompt_tag('Which tag to delete?')\n if not version:\n abort('No available version tag')\n git.delete_tag(version)", "def delete_namespace(key):\n\n\tif key not in REGISTRY:\n\t\traise KeyError(\"key:{0} does not exist\".format(key))\n\n\tREGISTRY.pop(key, None)", "def deletecollection_namespaced_net_namespace(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method deletecollection_namespaced_net_namespace\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/netnamespaces'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def delete_tag_template(self, name):\n self.__datacatalog.delete_tag_template(name=name, force=True)\n logging.info('Tag Template deleted: %s', name)", "def delete_tag(tag, directory=None):\n execute_command('git tag -d {0}'.format(tag), shell=True, cwd=directory)", "def delete_pod_in_a_namespace(self, namespace, name, label_selector=\"\"):\n api_response = None\n try:\n api_response = self.ocp_pods.delete(namespace=namespace, name=name, label_selector=label_selector)\n except ApiException as e:\n logger.error(\"Exception deleting pod: %s\\n\", e)\n return api_response", "def delete_collection_namespaced_service(namespace, label_selector=None):\n if label_selector is None:\n label_selector = labels_to_string({CLEANUP_LABEL: cleanup_policy})\n responses = []\n svcs = self.core_api.list_namespaced_service(namespace, label_selector=label_selector)\n for svc in svcs.items:\n responses.append(self.core_api.delete_namespaced_service(svc.metadata.name, namespace))\n return responses", "def delete_all_spaces(self, region_tag: str):\n self.s3_client = self.init_space_connection(region_tag)\n for space in self.get_space_list():\n self.delete_space(space)\n del self.s3_client", "def delete_tag(self, tag_namespace_id, tag_name, **kwargs):\n resource_path = \"/tagNamespaces/{tagNamespaceId}/tags/{tagName}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_tag got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagNamespaceId\": tag_namespace_id,\n \"tagName\": tag_name\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def delete_bucket_tagging(Bucket=None):\n pass", "def delete_namespace(self, name, status_wait=True):\n self.v1_client.delete_namespace(name=name,\n body=k8s_config.V1DeleteOptions())\n\n if status_wait:\n with atomic.ActionTimer(self,\n \"kubernetes.wait_namespace_termination\"):\n wait_for_not_found(name,\n read_method=self.get_namespace)", "def freeSBMLNamespaces(*args):\n return _libsbml.SBMLNamespaces_freeSBMLNamespaces(*args)", "def remove(self, document_id, namespace, timestamp):\n index, doc_type = self._index_and_mapping(namespace)\n\n action = {\n '_op_type': 'delete',\n '_index': index,\n '_type': doc_type,\n '_id': u(document_id)\n }\n\n meta_action = {\n '_op_type': 'delete',\n '_index': self.meta_index_name,\n '_type': self.meta_type,\n '_id': u(document_id)\n }\n\n self.index(action, meta_action)", "def tag_remove(self, remote_path, corpus_id, tag, storage_id=None):\n client, remote_path = self._get_storage(remote_path, storage_id=storage_id)\n return client.tag_remove(corpus_id, tag)", "def clean_up_pods_in_namespaces(self, namespaces, cleanup_policy):\n return self.delete_resource_with_cleanup_policy(namespaces, cleanup_policy,\n self.core_api.delete_collection_namespaced_pod, \"pod\")", "def delete_namespaced_namespace_with_http_info(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_namespaced_namespace\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `delete_namespaced_namespace`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `delete_namespaced_namespace`\")\n\n resource_path = '/api/v1/namespaces/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n return self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'))", "def destroy(self):\r\n self._namespace.unregisterNode(self)\r\n self._namespace = None\r\n\r\n super(Node, self).destroy()", "def DeleteForTag(cls, tag):\n parent_key = cls._GetParentKeyFromTag(tag)\n frontend_job = cls.query(ancestor=parent_key).get(keys_only=True)\n if frontend_job:\n frontend_job.delete()", "def _remove_namespace(self, xml):\n response = self.re_ns_decl.sub('', xml) # Remove namespace declarations\n response = self.re_ns_open.sub('<', response) # Remove namespaces in opening tags\n response = self.re_ns_close.sub('/', response) # Remove namespaces in closing tags\n return response", "def _remove_namespace(self, xml):\n response = self.re_ns_decl.sub('', xml) # Remove namespace declarations\n response = self.re_ns_open.sub('<', response) # Remove namespaces in opening tags\n response = self.re_ns_close.sub('/', response) # Remove namespaces in closing tags\n return response", "def wait_for_namespace_deletion(self, namespace_name, timeout=None,\n number_of_events=None):\n timeout = timeout or WAIT_TIMEOUT\n number_of_events = number_of_events or 10\n watcher = Watch()\n for event in watcher.stream(self.client_core.list_namespace,\n timeout_seconds=timeout):\n logger.debug(f\"Event: {event['type']} Namespace: \"\n f\"{event['object'].metadata.name}\")\n number_of_events -= 1\n if not number_of_events:\n watcher.stop()\n elif namespace_name in event[\n 'object'].metadata.name and \"DELETED\" in event['type']:\n watcher.stop()\n return True\n logger.error(f\"Timeout! Failed to Delete Namespace {namespace_name}\")\n raise K8sResourceTimeout(\n message=f\"Timeout! Failed to Delete Namespace {namespace_name}\")", "def delete_all(self):\n for tag in self._segments['APP1'].get_tag_list():\n try:\n self.__delattr__(tag)\n except AttributeError:\n warnings.warn(\"could not delete tag \" + tag, RuntimeWarning)", "def delete_client_by_namespace(\n client_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteClientByNamespace.create(\n client_id=client_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def delete_action(self, namespace_id):\n logger.info(f\"deleting action on namespace: {namespace_id}\")\n res = requests.delete(\n f\"{self.cf_namespaces_url}/{namespace_id}/actions/{self.action_name}?blocking=true\",\n headers=self.get_headers(),\n )\n if res.status_code != 200:\n logger.warn(res.text)\n return json.loads(res.text)", "def removePkgNamespace(self, *args):\n return _libsbml.SBMLNamespaces_removePkgNamespace(self, *args)", "def delete_tags(ResourceArn=None, TagKeys=None):\n pass", "def delete_tags(ResourceArn=None, TagKeys=None):\n pass", "def remove(self, camera, namespace='*'):\n with self.lock:\n if namespace == '*':\n for cur_namespace in self.camera_namespaces:\n self.camera_namespaces[cur_namespace].remove(camera)\n else:\n self.camera_namespaces[namespace].remove(camera)", "def DeleteClusterTags(self, tags, dry_run=False, reason=None):\n query = [(\"tag\", t) for t in tags]\n _AppendDryRunIf(query, dry_run)\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_DELETE, \"/%s/tags\" % GANETI_RAPI_VERSION,\n query, None)", "async def slashtag_remove(self, ctx: commands.Context, *, tag: GuildTagConverter):\n await ctx.send(await tag.delete())", "def _delete_tag_request():\n key = helpers.get('Tag.1.Key')\n resource_id = helpers.get('ResourceId.1')\n\n if resource_id in current_app.config['RESOURCE_TYPE_MAP']:\n resource_type = current_app.config['RESOURCE_TYPE_MAP'][resource_id]\n else:\n errors.invalid_request(\n str(resource_id) + \" not found in configuration\")\n\n args = {\n 'command': 'deleteTags',\n 'resourceids': resource_id,\n 'resourcetype': resource_type,\n 'tags[0].key': key\n }\n\n response = requester.make_request_async(args)\n\n return response", "def delete_namespaced_template(self, body, namespace, name, **kwargs):\n\n all_params = ['body', 'namespace', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_namespaced_template\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `delete_namespaced_template`\")\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `delete_namespaced_template`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `delete_namespaced_template`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/templates/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def deletecollection_namespaced_namespace_with_http_info(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method deletecollection_namespaced_namespace\" % key\n )\n params[key] = val\n del params['kwargs']\n\n resource_path = '/api/v1/namespaces'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n return self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'))", "def delete_taggit_taggeditems(apps, schema_editor):\n TaggitTaggedItem = apps.get_model('taggit', 'TaggedItem')\n TaggitTaggedItem.objects.all().delete()", "def pre_namespace_delete(self, resource_id):\n pass", "def test_remove_defined_tag(self, test, object_storage):\n namespace_name, bucket_name = self._get_bucket_details(object_storage)\n session_factory = test.oci_session_factory()\n policy = test.load_policy(\n {\n \"name\": \"bucket-remove-tag\",\n \"resource\": \"oci.bucket\",\n \"filters\": [\n {\"type\": \"value\", \"key\": \"name\", \"value\": bucket_name},\n ],\n \"actions\": [\n {\n \"type\": \"remove-tag\",\n \"defined_tags\": [\"cloud-custodian-test.mark-for-resize\"],\n },\n ],\n },\n session_factory=session_factory,\n )\n policy.run()\n resource = self._fetch_bucket_validation_data(\n policy.resource_manager, namespace_name, bucket_name\n )\n test.assertEqual(resource[\"name\"], bucket_name)\n test.assertEqual(self.get_defined_tag_value(resource[\"defined_tags\"]), None)", "def delete(self, name, namespace=''):\n acl.enforce('workbooks:delete', context.ctx())\n\n LOG.debug(\"Delete workbook [name=%s, namespace=%s]\", name, namespace)\n\n rest_utils.rest_retry_on_db_error(db_api.delete_workbook)(\n name,\n namespace\n )", "def delete_space(self, space_name: str):\n self.delete_space_content(space_name)\n try:\n self.s3_client.delete_bucket(Bucket=space_name)\n except boto3.exceptions.S3UploadFailedError:\n pass", "def delete_a_tag(self, contact_tag_id):\n return self.client._delete(f\"/contactTags/{str(contact_tag_id)}\")", "def delete_namespaced_service(self, namespace, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.delete_namespaced_service_with_http_info(namespace, name, **kwargs)\n else:\n (data) = self.delete_namespaced_service_with_http_info(namespace, name, **kwargs)\n return data", "def remove(self, *args):\n return _libsbml.ListWrapperSBMLNamespaces_remove(self, *args)", "def delete_tag(request):\n try:\n tags = request.POST.getlist('tag_id', 0)\n tag = Tag.objects.filter(pk__in=tags).delete()\n ActionLogger().log(request.user, \"deleted\", \"Knowledgebase Tag %s\" % tags)\n return format_ajax_response(True, \"Knoweldgebase tag deleted successfully.\")\n except Exception as ex:\n logger.error(\"Failed to delete_tag: %s\" % ex)\n return format_ajax_response(False, \"There was an error deleting the specified knowledgebase tag.\")", "def delete_empty_namespaces(progress_controller=None):\n if progress_controller is None:\n progress_controller = ProgressControllerBase()\n\n # only allow namespaces with DAG objects in it and no child namespaces\n empty_namespaces = []\n\n all_namespaces = (\n mc.namespaceInfo(recurse=True, listOnlyNamespaces=True, internal=False) or []\n )\n\n progress_controller.maximum = len(all_namespaces)\n for ns in all_namespaces:\n if ns not in [\"UI\", \"shared\"]:\n child_namespaces = mc.namespaceInfo(ns, listNamespace=True)\n if (\n not child_namespaces\n and len(\n mc.ls(\n mc.namespaceInfo(ns, listOnlyDependencyNodes=True),\n dag=True,\n mat=True,\n )\n )\n == 0\n ):\n empty_namespaces.append(ns)\n progress_controller.increment()\n\n for ns in empty_namespaces:\n mc.namespace(rm=ns, mnr=True)\n progress_controller.complete()", "def delete(context, namespace_name, resource_type_name, session):\n\n namespace = namespace_api.get(\n context, namespace_name, session)\n\n resource_type = resource_type_api.get(\n context, resource_type_name, session)\n\n deleted = _delete(context, namespace_name, resource_type_name,\n namespace['id'], resource_type['id'], session)\n\n return _to_model_dict(resource_type_name, deleted)", "def delete(self, uuid):\n\n\t\treturn self._delete(\"/tag/%s\" % base.getid(uuid), \"tag\")", "async def delete_client_by_namespace_async(\n client_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteClientByNamespace.create(\n client_id=client_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def SBMLNamespaces_freeSBMLNamespaces(*args):\n return _libsbml.SBMLNamespaces_freeSBMLNamespaces(*args)", "def removeNode(self, nTag):\r\n try:\r\n self._nodes.pop(nTag).destroy()\r\n except KeyError:\r\n raise InvalidRequest('Can not remove a non existent node '\r\n \"'{0}' from the container.\".format(nTag))", "def test_delete_namespaced_template(self):\n pass", "def delete_namespaced_pod_template(self, body, namespace, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.delete_namespaced_pod_template_with_http_info(body, namespace, name, **kwargs)\n else:\n (data) = self.delete_namespaced_pod_template_with_http_info(body, namespace, name, **kwargs)\n return data", "async def _clear(self, namespace=None):\n if namespace:\n for key in list(SimpleMemoryBackend._cache):\n if key.startswith(namespace):\n self.__delete(key)\n else:\n SimpleMemoryBackend._cache = {}\n SimpleMemoryBackend._handlers = {}\n return True", "def delete_tag(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n\n db.session.delete(tag)\n db.session.commit()\n\n return redirect('/tags')", "async def removetags(self, ctx, tag=None):\r\n\t\tTag = self.settings.ServerConfig(ctx.guild.id, 'Tags')\r\n\t\tif not tag in Tag:\r\n\t\t\treturn await ctx.send('Can\\'t find Tag: '.format(tag))\t\r\n\r\n\t\tdel Tag[tag]\r\n\t\tself.settings.ServerConfig(ctx.guild.id, 'Tags', Tag)\r\n\r\n\t\tawait ctx.send('Removed Tag: '.format(tag))", "def delete_namespaced_pod(self, body, namespace, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.delete_namespaced_pod_with_http_info(body, namespace, name, **kwargs)\n else:\n (data) = self.delete_namespaced_pod_with_http_info(body, namespace, name, **kwargs)\n return data", "def delete(self, force_delete=False):\r\n return self.connection.delete_auto_scaling_group(self.name, force_delete)", "def delete_tag(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(\"/tags\")", "def clean_up_service_accounts_in_namespaces_with_cleanup_policy(self, namespaces, cleanup_policy):\n return self.delete_resource_with_cleanup_policy(namespaces, cleanup_policy,\n self.core_api.delete_collection_namespaced_service_account,\n \"SA\")", "def test_delete_namespaced_policy_binding(self):\n pass", "def remove_from_xml_tree(the_config, tag):\n for elt in the_config.iterdescendants():\n if tag in elt.tag:\n elt.getparent().remove(elt)", "def proxy_delete_namespaced_pod_0(self, namespace, name, path, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.proxy_delete_namespaced_pod_0_with_http_info(namespace, name, path, **kwargs)\n else:\n (data) = self.proxy_delete_namespaced_pod_0_with_http_info(namespace, name, path, **kwargs)\n return data", "def delete_tag(tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n db.session.delete(tag)\n db.session.commit()\n\n return redirect(\"/tags\")", "def clean_up_services_in_namespaces(self, namespaces, cleanup_policy):\n def delete_collection_namespaced_service(namespace, label_selector=None):\n \"\"\"\n Wrapper Method for deleting namespaced services by label,\n as delete_namespaced_service does not provide this feature.\n \"\"\"\n if label_selector is None:\n label_selector = labels_to_string({CLEANUP_LABEL: cleanup_policy})\n responses = []\n svcs = self.core_api.list_namespaced_service(namespace, label_selector=label_selector)\n for svc in svcs.items:\n responses.append(self.core_api.delete_namespaced_service(svc.metadata.name, namespace))\n return responses\n\n return self.delete_resource_with_cleanup_policy(namespaces, cleanup_policy,\n delete_collection_namespaced_service, \"svc\")", "def destroyContainer(tag): #@NoSelf" ]
[ "0.6721054", "0.66566586", "0.62986535", "0.62826926", "0.6097214", "0.6018456", "0.6014207", "0.600658", "0.59086114", "0.5848785", "0.58461416", "0.57414937", "0.5721018", "0.5693057", "0.56371254", "0.55568355", "0.55374837", "0.5530103", "0.5464297", "0.5459409", "0.54590714", "0.54328877", "0.54091394", "0.53905284", "0.5387461", "0.5351305", "0.53393364", "0.5261566", "0.5242784", "0.5226691", "0.5209003", "0.51849014", "0.51214474", "0.51016635", "0.50876933", "0.50872046", "0.50782377", "0.5037703", "0.5029942", "0.5017186", "0.50161535", "0.5009668", "0.4986238", "0.4959133", "0.49575382", "0.49486384", "0.4906451", "0.48708397", "0.4867449", "0.48518148", "0.4847969", "0.48207286", "0.4818701", "0.48144084", "0.48124456", "0.4809913", "0.4809913", "0.4807759", "0.47989655", "0.47839972", "0.47810876", "0.47786087", "0.4770643", "0.4770643", "0.4767072", "0.47593826", "0.4758149", "0.4756469", "0.47454727", "0.4740309", "0.47338602", "0.47282508", "0.47266695", "0.4724949", "0.47144535", "0.47129574", "0.47077072", "0.47052813", "0.46971872", "0.4684224", "0.46767622", "0.46742", "0.46553463", "0.46507755", "0.46481803", "0.4636385", "0.46189922", "0.46145213", "0.46051344", "0.46009746", "0.45956194", "0.4591032", "0.4590341", "0.4588139", "0.45870078", "0.4581294", "0.45770502", "0.45661965", "0.45570004", "0.45372486" ]
0.63382393
2
Deletes the specified user. The user must not be in any groups.
def delete_user(self, user_id, **kwargs): resource_path = "/users/{userId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_user got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_user(self, user):\n self.delete(user)", "def delete_user(self, user):\n # noinspection PyUnresolvedReferences\n self.delete(user)", "def delete_user(self, user):\n self.execute(TABELLE['id_users'][\"delete\"], user[\"id\"])", "def delete_user(self, user):\n name = utils.get_name(user)\n self._user_manager.delete(name)", "def delete_user(self, user_id):\n return self._delete('/users/{0}'.format(user_id))", "def delete_user(self, user_name):\n user = self.get_user(user_name)\n return self.client.delete_resource(user.get('href'))", "def delete(self, user_id):\n return delete_user(user_id)", "def delete_user(self, user_id):\n if self.database is None:\n raise Exception(\"No database.\")\n if user_id is None or len(user_id) == 0:\n raise Exception(\"Bad parameter.\")\n return self.database.delete_user(user_id)", "def delete_user(self, user_id):\n\n # ask the model to delete the user\n um = User(self.settings)\n status = um.delete(user_id)\n\n # return\n return status", "def delete_user(request, user):\n\n if models.Group.created_by(user).count() > 0:\n raise UserDeletionError('Cannot delete user who is a group creator.')\n\n user.groups = []\n\n query = _all_user_annotations_query(request, user)\n annotations = es_helpers.scan(client=request.es.conn, query={'query': query})\n for annotation in annotations:\n storage.delete_annotation(request, annotation['_id'])\n\n request.db.delete(user)", "def delete(self, user_id):\r\n return delete_user(request, user_id)", "def delete_user():", "def delete(user_id):\n assert isinstance(user_id, ObjectId)\n\n User.objects(id=user_id).delete()", "def delete_user(self, user_name):\r\n params = {'UserName' : user_name}\r\n return self.get_response('DeleteUser', params)", "def delete_user(self, user):\n try:\n with dbm.open(self.dbm_path, 'c', 0o600) as db:\n del db[user.name]\n except KeyError as k:\n pass", "def delete_user(user_id):\n\n user = User.query.get(user_id)\n db.session.delete(user)\n db.session.commit()\n return", "def delete_user():\n #TODO user delete\n pass", "def delete_user(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload=user, request_type=self.REQUEST_DELETE, version=\"v1\")", "def delete_user(self, user):\n\n if self.sql_read_only:\n return False\n\n if not self.check_prereqs():\n return False\n\n if not self.has_user(user):\n return False\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_delete_user_query,{'username_field':self.sql_username_field,'username':user})\n self.log.debug(\"sqlflexibleauthstore: delete_user: %s\" % (query,))\n cursor.execute(query)\n\n db.commit()\n del_user_attribute(self.env,username=user)\n return True", "def delete_user(self):\n User.user_list.remove(self)", "def delete_user(self):\n User.user_list.remove(self)", "def delete_user(self):\n User.user_list.remove(self)", "def delete(user_id: int):\n usr = get_by_id(user_id)\n if not usr:\n raise UserNotFound\n\n db.session.delete(usr)\n db.session.commit()", "def delete_user(self):\n\n User.user_list.remove(self)", "def _delete_user(self, user):\n if User.delete_user(user):\n self.session.output({'deleted': 'user {} and their related accounts'.format(user)})\n return True\n else:\n self.session.output({'invalid_user': 'please enter valid user ID!\\n'}, '[ Fail to delete user ]')\n return False", "def delete(self, user: 'UserCondensed'):\n self._delete(entity=user)", "def delete(self, user_id):\n user = User.query.get(user_id)\n \n if user is None:\n return abort(422, message=\"User does not exist\")\n \n # check if the user is an admin and is the only one\n admins = User.query.filter_by(admin=True).all()\n if user.id == get_jwt_identity() and len(admins) == 1:\n return abort(422, message=\"User is the only admin, there must be at least one admin in the system\")\n \n user.delete()\n \n return { 'message': \"User '{}' has been deleted\".format(user.id) }", "def deleteUser(user):\n delete_user(user)\n return redirect(url_for('login'))", "def delete(self):\n\n user_id = get_jwt_identity()\n user = user_crud.get(user_id)\n if not user:\n abort(404, message=\"User not Found\")\n all_tokens = auth_crud.get_user_tokens(user_id)\n tokens = [token.to_dict() for token in all_tokens]\n for token in tokens:\n auth_crud.revoke_token(token['id'], user_id)\n user = user_crud.remove(user_id)\n\n return {'msg': 'User Removed'}", "def delete(self, user_id):\n user = User.query.get(user_id)\n\n if user is None:\n return mk_response(\"User does not exist\", 422)\n\n # check if the user is an admin and is the only one\n admins = User.query.filter_by(admin=True).all()\n if user.id == get_jwt_identity() and len(admins) == 1:\n return mk_response(\"User is the only admin, there must \" +\n \"be at least one admin in the system\", 422)\n\n user.delete()\n\n return mk_response(\"User '{}' has been deleted\".format(user.id))", "def delete_user(user_id):\n usr = storage.get(User, user_id)\n if usr:\n usr.delete(), storage.save()\n return {}\n else:\n abort(404)", "def delete_user(self, _id):\n return self.make_request(\"DELETE\", \"users/\"+_id, {})", "def delete_user(user_id):\n netAdminToolDB = app.config['DATABASE']\n user = netAdminToolDB.get_user(user_id)\n\n if user == None:\n return jsonify({'error': 'User_id not found'}), 404\n\n netAdminToolDB.delete_user(user_id)\n return jsonify({'result': True})", "def user_delete(user_id):\n user = storage.get('User', user_id)\n if user is None:\n abort(404)\n user.delete()\n storage.save()\n return jsonify({}), 200", "def delete_user(user_id):\n user = storage.get(User, user_id)\n if user is None:\n abort(404)\n storage.delete(user)\n storage.save()\n return jsonify({}), 200", "def delete_user_account(connection,user):\r\n with connection:\r\n connection.execute(DELETE_SPECIFIC_USER,(user,))", "def delete_user(self, username):\n Log.info(\"Start to delete user.\")\n if self.check_if_user_exists(username):\n self.click((\"xpath\", self.user_checkbox_xpath.format(username)))\n self.click(self.user_del_btn)\n self.click(self.dialog_del_btn)\n self.wait_unit_el_present(self.user_table)\n Log.info(\"Use is deleted.\")\n else:\n Log.info(\"User \" + username + \" is not in the user list, not delete.\")", "def delete_user(user_id):\n user = User.query.get_or_404(user_id)\n db.session.delete(user)\n db.session.commit()\n\n return redirect(\"/users\")", "async def delete_user(user_id):\n \n user = User.select().where(User.id == user_id).first()\n\n if not user:\n return HTTPException(404, 'User not found')\n else:\n user.delete_instance()\n\n return f\"User {user.username} deleted successfully\"", "def remove(self, user):\r\n url = '{0}/{1}'.format(self.get_url(), user)\r\n\r\n return http.Request('DELETE', url), parsers.parse_empty", "def delete_user(user_id=None):\n obj = storage.get('User', user_id)\n if obj is None:\n abort(404)\n else:\n storage.delete(obj)\n storage.save()\n return jsonify({}), 200", "def del_user(self, username):\n pass", "def delete_user(user_id):\n user = User.query.get_or_404(user_id)\n db.session.delete(user)\n db.session.commit()\n\n return redirect('/')", "def user_id_delete(user_id):\n user = storage.get(\"User\", user_id)\n\n if user is None:\n abort(404)\n user.delete()\n del user\n return make_response(jsonify({}), 200)", "def delete_user(user_id=None):\n\n user = storage.get(\"User\", user_id)\n if user is None:\n abort(404)\n else:\n storage.delete(user)\n storage.save()\n return jsonify({}), 200", "def delete_user(user_id):\n\n user = User.query.get_or_404(user_id)\n db.session.delete(user)\n db.session.commit()\n\n return redirect(\"/users\")", "def delete(self, user):\n q = \"DELETE FROM profiles WHERE user=?\"\n try:\n self._query(q, (user,), fetch='none')\n except Exception as e:\n raise e", "def user_delete(user_id=None):\n obj = storage.get(\"User\", user_id)\n if obj is None:\n abort(404)\n storage.delete(obj)\n storage.save()\n return jsonify({}), 200", "def delete_user(user_id):\n temp = models.storage.get('User', user_id)\n if temp is None:\n abort(404)\n temp.delete()\n models.storage.save()\n return jsonify({})", "def delete_user_process(user_id):\n\n db_user = User.query.get_or_404(user_id)\n\n db.session.delete(db_user)\n db.session.commit()\n\n return redirect(\"/users\")", "def delete_user():\n del globalopts.appdata[request.user]\n del globalopts.users[request.user]\n return \"\", 200", "def delete_user(user_id):\n user_obj = storage.get(\"User\", user_id)\n if user_obj:\n storage.delete(user_obj)\n storage.save()\n return jsonify({}), 200\n else:\n abort(404)", "def delete_user(id):\n pass", "def delete(user_id):\n # Get the user requested\n user = User.query.filter(User.user_id == user_id).one_or_none()\n\n if user is not None:\n db.session.delete(user)\n db.session.commit()\n return (\n \"User {user_id} deleted\".format(user_id=user_id), 200\n )\n\n else:\n abort(\n 404,\n \"Person not found for Id: {user_id}\".format(user_id=user_id),\n )", "def delete_proj_user(self, user_id):\n conn = pyone.OneServer(\n self.auth_url,\n session=\"{0}:{1}\".format(self.username, self.password)\n )\n try:\n user = conn.user.info(user_id)\n group = user.get_GROUPS().ID[0]\n # delete group\n conn.group.delete(group)\n # delete user\n return conn.user.delete(user.get_ID())\n except pyone.OneNoExistsException as e:\n logger.exception(\"Failed. User trying to delete, doesn't exist: \", user_id)\n except Exception as e:\n logger.exception(\"Failed. User trying to delete, group doesn't exist: \", user_id)", "def delete_user(UserName=None, AuthenticationType=None):\n pass", "def delete_user():\r\n raise NotImplementedError()", "def delete_user(self, user_id):\n sql = 'update account_user set is_deleted = 1 where id = %s'\n with connection.cursor() as cursor:\n cursor.execute(sql, [user_id])\n row = cursor.fetchone()\n\n return row", "def delete_user(self) -> 'outputs.ActingUserResponse':\n return pulumi.get(self, \"delete_user\")", "def delete(self, user_id):\n res = self._user.delete_user(user_id)\n\n if res:\n return {\n \"status\": 200,\n \"data\": [{\n \"id\": res[\"id\"],\n \"message\": \"user record has been deleted\"\n }]\n }, 200\n else:\n return {\n \"status\": 404,\n \"error\": \"Not found for id {}\".format(user_id)\n }, 404", "def delete_user(id):\n user_repo = UserRepository(db)\n base_repo = BaseRepository(db, User)\n u = base_repo.get_by_id(id)\n if not u:\n click.echo(\"User with specified id does not exists.\")\n return ERROR_USER_DOES_NOT_EXIST\n user_repo.delete_user(u)\n click.echo(\"User with id \" + str(id) + \" has been deleted.\")", "def delete_user(cls, user_email):\n\n User.query.filter_by(email=user_email).delete()\n\n db.session.commit()\n\n print \"Successfully deleted user with the email: %s!\" % user_email", "def delete_user(self) -> None:\n table_dictionary = {\n 'Apple': {\n 'table': 'AppleReceipts',\n 'user_id': 'User_id'\n },\n 'ESL': {\n 'table': 'ESLReceipts',\n 'user_id': 'User_id'\n },\n 'Transactions': {\n 'table': 'Transactions',\n 'user_id': 'User_id'\n },\n 'Users': {\n 'table': 'Users',\n 'user_id': 'id'\n },\n }\n\n # delete the current user's information from the db.\n for key in table_dictionary:\n query = f\"\"\"\n DELETE\n FROM {table_dictionary[key]['table']}\n WHERE {table_dictionary[key]['user_id']}=?;\n \"\"\"\n self.db.commit(query, values=(self.id,))\n\n # perform a sign out\n self.sign_out()\n\n log(f\"User:{self.id} has deleted their account.\")", "def delete(self, user_id):\n\n user = User.objects.get_or_404(public_id=user_id)\n return user.delete()", "def delete(self, user_id):\n\n try:\n self.get(user_id)\n url = \"{0}/users/{1}\".format(self.base_url, user_id)\n url = self._add_token_to_url(url)\n self.session.headers.update({\"Content-Type\": \"application/x-www-form-urlencoded\"})\n self.logger.debug(\"Deleting user with ID: <{0}>\".format(user_id))\n response = self.session.delete(url)\n self.logger.debug(\"Received response code {0} with reason {1}\"\n .format(response.status_code, response.reason))\n if response.status_code == 200:\n self.logger.debug(\"User successfully deleted\")\n else:\n raise InvalidResponseCodeException(\"Response code invalid, the expected response code is {0}, \"\n \"the actual response code is {1}\".format(200, response.status_code))\n return None\n except UserNotFoundException as err:\n self.logger.debug(\"User not found, error {0}\".format(err))", "def deleteUser(self,name):\n raise BorkedDeleteUser", "def delete_user(self):\n db_acces = DbMethods()\n username = self.result_table.currentItem().text()\n response = db_acces.delete_user(username)\n\n if response == True:\n self.populate_table()\n else:\n message = Message(\n self.language[\"error\"], self.language[\"inf_error\"])\n warning_message = message.create_iw_message(\n self.language[\"ok\"], \"warning\")\n warning_message.exec()", "def delete_user(self, instance, name):\n return instance.delete_user(name)", "def delete_user(self, userId):\n\n try:\n query = \"delete from user where userId = {}\".format(userId)\n print(query)\n cur = self.con.cursor()\n cur.execute(query)\n self.con.commit()\n\n logger.info(\"Deleted\")\n except Exception as e:\n logger.error(\"Error occured at data deletion..\", e)", "def test_080_user_delete(self):\n\n testflow.step(RMV_USR_MSG, TEST_GROUP_DELETE)\n assert USER_CLI.run('delete', TEST_USER_DELETE)[0]", "def del_user(request):\r\n mdict = request.matchdict\r\n\r\n # Submit a username.\r\n del_username = mdict.get('username', None)\r\n\r\n if del_username is None:\r\n LOG.error('No username to remove.')\r\n request.response.status_int = 400\r\n return _api_response(request, {\r\n 'error': 'Bad Request: No username to remove.',\r\n })\r\n\r\n u = UserMgr.get(username=del_username)\r\n\r\n if not u:\r\n LOG.error('Username not found.')\r\n request.response.status_int = 404\r\n return _api_response(request, {\r\n 'error': 'User not found.',\r\n })\r\n\r\n try:\r\n # First delete all the tag references for this user's bookmarks.\r\n res = DBSession.query(Bmark.bid).filter(Bmark.username == u.username)\r\n bids = [b[0] for b in res]\r\n\r\n qry = bmarks_tags.delete(bmarks_tags.c.bmark_id.in_(bids))\r\n qry.execute()\r\n\r\n # Delete all of the bmarks for this year.\r\n Bmark.query.filter(Bmark.username == u.username).delete()\r\n DBSession.delete(u)\r\n return _api_response(request, {\r\n 'success': True,\r\n 'message': 'Removed user: ' + del_username\r\n })\r\n except Exception, exc:\r\n # There might be cascade issues or something that causes us to fail in\r\n # removing.\r\n LOG.error(exc)\r\n request.response.status_int = 500\r\n return _api_response(request, {\r\n 'error': 'Bad Request: ' + str(exc)\r\n })", "def delete_user(user_id):\n current_user = get_jwt_identity()\n\n if not current_user:\n print('uri=/login error=\"Missing user\"', flush=True)\n return jsonify(message=\"Missing user\"), 400\n\n if not Administrator.is_administrator(current_user):\n print('non-admin user error', flush=True)\n return jsonify(message=\"You are not allowed to delete other users\"), 403\n\n if user_id == current_user:\n return jsonify(message=\"You are not allowed to delete yourself\"), 403\n\n try:\n User.delete(user_id)\n return jsonify(message=\"Delete succeeded\"), 200\n\n except Exception as e:\n print(e, flush=True)\n return jsonify(message='{}'.format(e)), 501", "def delete_user(payload, user_id):\n user = User.query.get(user_id)\n # exception for non existing id\n if user is None:\n abort(404)\n # set error status\n error = False\n # delete the user\n try:\n user.delete()\n except Exception:\n user.rollback()\n error = True\n print(sys.exc_info())\n finally:\n user.close_session()\n\n if error:\n abort(422)\n\n return jsonify({\n 'success': True,\n 'deleted': user_id\n })", "def delete_group_user(self, group_id, user_id):\n resp, body = self.delete('groups/%s/users/%s' % (group_id, user_id))\n self.expected_success(204, resp.status)\n return rest_client.ResponseBody(resp, body)", "def delete_group_user(self, group_id, user_id):\n resp, body = self.delete('groups/%s/users/%s' % (group_id, user_id))\n self.expected_success(204, resp.status)\n return service_client.ResponseBody(resp, body)", "def delete_user(request):\n user_id = request.POST.get('user_id')\n User.objects.filter(id=user_id).delete()\n response = {'status': 1, 'status_message': 'Success'}\n return HttpResponse(json.dumps(response))", "def deleteUser(self):\r\n #Find name and ID column\r\n userData = self.getCurrentUserData()\r\n\r\n #Prompt for confirmation\r\n deleteChoice = QMessageBox.question(self.view, 'Confirm user deletion', \r\n 'Are you sure you want to delete user ' \r\n + userData['Name'] + \" with ID \" + userData['User_ID'] + \r\n \" from database permanently?\", \r\n QMessageBox.Yes | QMessageBox.No)\r\n \r\n if (deleteChoice == QMessageBox.Yes):\r\n DBController().deleteUser(userData['User_ID'] )\r\n self.updateUserTable() #Re-fill table\r", "def delete(self, id):\n\t\ttry:\n\t\t\tuser_service.delete(id)\n\t\texcept AssertionError as e:\n\t\t\tuser_space.abort(400, e.args[0], status = \"Could not delete user\", statusCode = \"400\")\n\t\texcept Exception as e:\n\t\t\tuser_space.abort(500, e.args[0], status = \"Could not delete user\", statusCode = \"500\")", "def delete_user(id):\n user = Users.query.filter_by(id=id).first()\n user.delete()\n if not user:\n return send_msg(404, 'Not Found')\n return send_msg(204, \"No data\")", "def delete_user(self, user_id):\n if user_id is None:\n self.log_error(MongoDatabase.delete_user.__name__ + \"Unexpected empty object: user_id\")\n return False\n\n try:\n user_id_obj = ObjectId(user_id)\n user = self.users_collection.delete_one({\"_id\": user_id_obj})\n if user is not None:\n return True\n except:\n traceback.print_exc(file=sys.stdout)\n self.log_error(sys.exc_info()[0])\n return False", "def deleteUser(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def delete_user(id_user: int):\n mycursor.execute(f\"\"\"DELETE FROM User\n WHERE id_user = {id_user}\"\"\")\n mydb.commit()\n return f\"L'utilisateur {id_user} a été supprimé\"", "def delete_user(self):\n raise NotImplementedError(\"Function not yet implemented contact package creator\")", "def delete_user(self,userid, cursor):\n sql=\"DELETE FROM users WHERE userid = %s\"\n cursor.execute(sql,(userid))", "def delete(self):\n data = UserRegister.parser.parse_args()\n user = UserModel.find_by_username(data['username'])\n\n if user:\n user.delete_from_db()\n else :\n return {'message': 'User not found!'} , 204\n\n return {'message': 'User deleted'},202", "def DeleteUser(self, row):\n try:\n self.gd_client.DeleteUser(row['user_name'])\n row['status'] = 'success'\n except gdata.apps.service.AppsForYourDomainException, e:\n row['status'] = (\n 'fail gdata error code: %s %s' %\n (e.error_code, ERROR_DICT[str(e.error_code)]))\n except KeyError:\n print 'error - user_name is a required header'\n sys.exit()", "async def del_user(conn: LDAPConnection, user: dict, mailman: Client) -> None:\n await conn.delete(user[\"dn\"])\n uid = user[\"attributes\"][\"uid\"][0]\n rmtree(user[\"attributes\"][\"homeDirectory\"][0])\n rmtree(f\"/webtree/{uid[:1]}/{uid}\")\n mailing_list = mailman.get_list(\"announce-redbrick\")\n mailing_list.unsubscribe(f\"{uid}@redbrick.dcu.ie\")", "def delete_user(BrokerId=None, Username=None):\n pass", "def delete_user_by_xng_id(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/xngId/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['xngId'])\n return self.__create_request(payload=user, request_type=self.REQUEST_DELETE, version=\"v1\")", "def del_user_id(user_id):\r\n obj = storage.get(User, user_id)\r\n if obj is None:\r\n abort(404)\r\n obj.delete()\r\n storage.save()\r\n return jsonify({}), 200", "def delete(self, new_data, user_id):\n print(new_data)\n request_id = get_jwt_identity()\n user = user_crud.get(request_id)\n if not user.is_superuser:\n abort(401,\n message=\"You do not have permission to view this endpoint\")\n all_tokens = auth_crud.get_user_tokens(user_id)\n tokens = [token.to_dict() for token in all_tokens]\n for token in tokens:\n auth_crud.revoke_token(token['id'], user_id)\n user = user_crud.remove(user_id)\n\n return {'msg': 'User Removed'}", "def delete_user():\n\n request_data = request.get_json(silent=True)\n\n if not request_data:\n logger.warning('Delete_user failed - data missing')\n result = {'message': 'Deletion failed - data missing'}\n return jsonify(result), 400\n\n password = request_data.get('password')\n\n if password is None:\n logger.error('Delete_user failed - no password provided')\n result = {'message': 'Deletion failed - no password provided'}\n return jsonify(result), 400\n\n if not current_user.check_password(password):\n logger.error('Delete_user failed - wrong password provided')\n result = {'message': 'Deletion failed - password incorrect'}\n return jsonify(result), 401\n\n # TODO: make sure to not delete the dashboard if it is shared between users\n for dash in dashboard.dashboards_of_user(current_user.id):\n try:\n dashboard.remove_from_repository(dash)\n\n periodic_tasks.remove_task(('dashboard', dash.id, 'historic_fetching'))\n periodic_tasks.remove_task(('dashboard', dash.id, 'fetching'))\n periodic_tasks.remove_task(('dashboard', dash.id, 'pinging'))\n except KeyError:\n logger.warning(f'Dashboard {dash} from user {current_user} has already been removed.')\n\n try:\n user.remove_from_repository(current_user.id)\n except KeyError:\n result = {'message': 'User not found in database.'}\n logger.warning(f'Delete_user failed - {current_user} was not found in the database.')\n return jsonify(result), 500\n\n logger.info(f'{current_user} deleted themselves successfully.')\n\n result = {'message': f'User {current_user} successfully deleted themselves.'}\n return jsonify(result), 200", "def remove_user(user_id):\n user = Users.query.get(user_id)\n if user_id in [0, 1]:\n return 'Removal of default User #%s (%s) is forbidden.' % (user_id, user.login), 'warning'\n db_session.delete(user)\n db_session.commit()\n return 'User #%s (%s) has been deleted.' % (user_id, user.login), 'success'", "def delete_user_by_id(user_id):\n return woo_request_helper().delete_details(wc_endpoint='customers/{}'.format(user_id))", "def remove(self, user_id):\n pass", "def delete(self):\n return self._router_request(\n self._make_request_data(\n 'deleteUserCommand',\n data=dict(\n uid=self.parent,\n id=self.id\n )\n )\n )", "def delete_by(self, user):\n if user.is_superuser or user is self.added_by:\n self.delete()", "def view_remove_user(self, user, username):\r\n user.realm._checker.removeUser(username)", "def delete_user():\n user_id = validate_id(request.args.get(\"id\"))\n config = config_service.get_one({\"user\": str(user_id)})\n config_service.delete(str(config[\"_id\"]))\n if user_service.delete_user(user_id) != user_id:\n response = {\n \"status\": False,\n \"message\": f\"No se pudo eliminar el usuario: {str(user_id)}\",\n }\n resp = make_response(jsonify(response), 404)\n else:\n response = {\n \"status\": True,\n \"message\": f\"Se elimino corretamente el usuario: {str(user_id)}\",\n }\n resp = make_response(jsonify(response), 200)\n resp.headers[\"Content-Type\"] = \"application/json\"\n return resp", "def orm_delete_user(session: Session, user: User):\n try:\n session.delete(user) # Delete the user\n session.commit() # Commit the change\n LOGGER.success(f\"Deleted user: {user}\")\n except IntegrityError as e:\n LOGGER.error(e.orig)\n raise e.orig\n except SQLAlchemyError as e:\n LOGGER.error(f\"Unexpected error when deleting user: {e}\")\n raise e", "def remove_user(user_data: UserAuthenticationRequest,\n session=Depends(transaction)):\n if not is_valid_email(user_data.email):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST,\n detail=\"Email is invalid.\")\n\n if not delete_user(session, user_data.email, user_data.password):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST,\n detail=\"User is not exists.\")" ]
[ "0.89206994", "0.8815847", "0.8651233", "0.86132264", "0.8323322", "0.83127373", "0.8241263", "0.82311684", "0.82144606", "0.8204657", "0.8158758", "0.80740803", "0.80130297", "0.80094916", "0.79904824", "0.79593945", "0.79579115", "0.79529476", "0.7919857", "0.79124516", "0.79124516", "0.79124516", "0.7908725", "0.7898205", "0.77768534", "0.77625763", "0.77585536", "0.77435416", "0.77245814", "0.77245754", "0.77020144", "0.7676846", "0.76763195", "0.7663246", "0.76608735", "0.765905", "0.76534164", "0.76467323", "0.76330525", "0.7627029", "0.7625628", "0.762467", "0.76186025", "0.7616481", "0.76160425", "0.76121604", "0.7586356", "0.75629914", "0.75344205", "0.7531823", "0.75287306", "0.7516393", "0.7493262", "0.7490823", "0.7462811", "0.7461686", "0.74543077", "0.74505174", "0.7447261", "0.74411213", "0.74341977", "0.74320257", "0.7402907", "0.7400934", "0.7383685", "0.73764914", "0.7371625", "0.7365682", "0.7349441", "0.7339252", "0.73356706", "0.73098475", "0.7307134", "0.7296806", "0.7288112", "0.7255378", "0.7241226", "0.7235872", "0.7230217", "0.7227753", "0.7225023", "0.72216624", "0.7220878", "0.7220715", "0.7214778", "0.71934265", "0.7162708", "0.7161822", "0.7156456", "0.7149762", "0.7146989", "0.7136801", "0.7129621", "0.7101766", "0.70949835", "0.70915633", "0.70833254", "0.7078584", "0.7064123", "0.7054987", "0.7054233" ]
0.0
-1
Generate seed for the MFA TOTP device.
def generate_totp_seed(self, user_id, mfa_totp_device_id, **kwargs): resource_path = "/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}/actions/generateSeed" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "generate_totp_seed got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "mfaTotpDeviceId": mfa_totp_device_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="MfaTotpDevice") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="MfaTotpDevice")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createOTP():\n\t code = []\n\t for i in range(6):\n\t\t code.append(random.randint(0,9))\n\t return \"\".join(str(code) for c in code)", "def seed():", "def passwordGen() :\n\treturn __randomString(12)", "def genSeed():\n\tseed_length = int(''.join(random.SystemRandom().choice(string.digits) for _ in range(0, 3)))\n\tseed = os.urandom(seed_length)\n\thashing_algorithm = hashlib.shake_128()\n\thashing_algorithm.update(seed)\n\t# 2200 bytes from SHAKE-128 function is enough data to get 1024 coefficients\n\t# smaller than 5q, from Alkim, Ducas, Pöppelmann, Schwabe section 7:\n\tseed_hash = hashing_algorithm.digest(100)\n\treturn seed, seed_hash", "def gensalt():\n return hexlify(os.urandom(24)).decode()", "def local_seed(self) -> str:\n assert self.definition.settings.sp_root_dir\n seed_file = self.definition.settings.sp_root_dir.joinpath(\"seed.txt\")\n if not seed_file.exists():\n seed = str(encode_hex(bytes(random.randint(0, 255) for _ in range(20))))\n seed_file.write_text(seed)\n else:\n seed = seed_file.read_text().strip()\n return seed", "def tracking_generation_seed():\n return 112", "def generate_token():\n return uuid4()", "def seed():\n pass", "def seed():\n pass", "def generate_seed(self):\n int_info = np.iinfo(np.int64)\n \n return self.rng.randint(int_info.max)", "def tubeid():\n return binascii.hexlify(os.urandom(12))", "def create_challenge():\n\treturn os.urandom(12)", "def generate_seed():\n global seed\n seed = []\n\n for char_id in range(0, len(printable)):\n while True:\n char_sequence = [printable[randint(0, len(printable)-1)], printable[randint(0, len(printable)-1)]]\n if char_sequence not in seed:\n break\n seed.append(char_sequence)", "def generate_token(self):\n token = randint(100000000000000000, 999999999999999999)\n return str(token)", "def random_seed(self) -> None:\n self.seed = random.SeedSequence().entropy", "def random():\n np.random.seed(1939)", "def generate_seed(input):\n return hashlib.md5(input.encode(\"utf8\")).hexdigest()", "def __init__(self, seed):\n self.mt = MT19937(seed & 0xFFFF)\n self.keystream = []", "def generate_seed(*args):\n\n return md5(\"\".join(args).encode(\"utf-8\")).hexdigest()", "def GetRandomSeed():\n return option['random_seed']", "def generate(self):\n\n four_digits = random.choice(string.ascii_uppercase) + random.choice(string.ascii_lowercase) + \\\n random.choice(string.digits) + random.choice(string.punctuation)\n\n if self.pass_length == 4:\n\n # if password is 4 letter long\n self.shuffle_pass(four_digits)\n else:\n\n # if password length is higher than 4 it add some printable letter and add to the four_digit variable\n diff = self.pass_length - 4\n password_long = ''\n i = 1\n while i <= diff:\n i += 1\n p = random.choice(string.printable)\n password_long += p\n self.shuffle_pass(four_digits + password_long)", "def _generate_raw_environments(self, num, seed):", "def seed(seed: int) -> None:\n ...", "def generate_code(self):\n seeds = \"1234567890\"\n random_str = []\n for i in range(4):\n random_str.append(choice(seeds))\n\n return \"\".join(random_str)", "def seed(self, seed: Optional[int]) -> None:\n ...", "def __initialize_totp(self) -> pyotp.totp.TOTP:\n return pyotp.totp.TOTP(self.user.totp_secret)", "def generate():\n s = random_data.random_bytes(100)\n return generate_from_string(s)", "def Randomize(seed=None):\n random.seed()", "def seed(self) -> int:\n return self._seed # type: ignore", "def __init__(self, seed=__default):\n\n seed = self.__default if seed == 0 else seed\n self.__mt[0] = seed & self.__genm\n for i in range(1, self.__n):\n self.__mt[i] = (self.__genp * self.__mt[i-1]) & self.__genm", "def generate_master_key(self):\n return utils.random(secret.SecretBox.KEY_SIZE)", "def generate_raiz():\n\treturn os.urandom(12)", "def __generate_session_token(self):\n\n return get_random_string(length=32)", "async def password_generate_complex(self, ctx):\n await ctx.send(\n \"\".join(\n random.choice(string.ascii_letters[:94]) for i in range(random.randint(20, 35))\n )\n )", "def generate_random_password(self):\r\n self.symbols = self.__set_symbol_dict() # set new symbol subset dict\r\n self.i = randrange(len(self.symbols)) # set new dict key pointer\r\n return \"\".join(self.__get_random_symbol() for _ in range(self.pw_len))", "def generate_token():\n chars = ('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')\n rand = random.SystemRandom()\n random_string = ''.join(rand.choice(chars) for _ in range(40))\n return hmac.new(\n config.SECRET_KEY,\n random_string,\n hashlib.sha256\n ).hexdigest()", "def generate_key():\n return get_token_generator().generate_token()", "def generate_random_MT():\n # Generate 6 indepdendent normal deviates:\n six_MT_unnormalised = np.array([np.random.normal(loc=0.0, scale=1.0), np.random.normal(loc=0.0, scale=1.0), np.random.normal(loc=0.0, scale=1.0), np.random.normal(loc=0.0, scale=1.0), np.random.normal(loc=0.0, scale=1.0), np.random.normal(loc=0.0, scale=1.0)], dtype=float)\n # Normallise sample onto unit 6-sphere:\n six_MT_normalised = six_MT_unnormalised/(np.sum(six_MT_unnormalised**2)**-0.5) # As in Muller (1959)\n # And normallise so that moment tensor magnitude = 1:\n six_MT_normalised = six_MT_normalised/((np.sum(six_MT_normalised**2))**0.5)\n # And set to correct dimensions (so matrix multiplication in forward model works correctly):\n six_MT_normalised = np.reshape(six_MT_normalised, (6, 1))\n return six_MT_normalised", "def seed(self):\n return self._seed", "def seed(self):\n return self._seed", "def seed(self):\n return self._seed", "def generate() -> int:\n return randint(0, 1000000000)", "def generate_random_mac_addr(self):\n return \"02:00:00:%02x:%02x:%02x\" % (random.randint(0, 255),\n random.randint(0, 255),\n random.randint(0, 255))", "def gen_passphrase(self):\n return ''.join(\n random.sample(map(str, range(0,10)) +\n map(chr, range(ord('a'), ord('z') + 1)) +\n map(chr, range(ord('A'), ord('Z') + 1)), self.passphraselen))", "def NewRndSeed(ss):\n ss.RndSeed = int(datetime.now(timezone.utc).timestamp())", "def seed(self, seed=None):\n raise self.gym.seed(seed)", "def random_ticket():\n ts = time.time()\n return \"%s_%s\" % (ts, random_str(6, string.digits))", "def randkey():\n return binascii.b2a_hex(os.urandom(15))", "def generate_new_token(self):\n self.access_token = random_auth_key()", "def get_random_seed(self):\n return self.random_seed", "def _generate_id() -> str:\n return \"\".join(sample(\"abcdefghjkmopqrstuvqxyz\", 16))", "def seed(val):\n _CAPI_SetSeed(val)", "def random_seed(i): # -> None:\n ...", "def _random_password(self):\n return ''.join([\n random.choice(string.ascii_letters + string.digits)\n for _ in range(12)\n ])", "def create_token(self,uid):\n token_str = self.get_random(5) + str(uid) + str(int(time.time()))\n m = hashlib.md5()\n m.update(token_str)\n return m.hexdigest()", "def MT19937_keystream_generator(seed: int) -> bytes:\n # Verify that the seed is atmost 16 bit long.\n assert math.log2(seed) <= 16\n \n prng = MT19937(seed)\n while True:\n number = prng.extract_number()\n yield from number.to_bytes(4, \"big\")", "def set_seed():\n np.random.seed(1423)", "def random():\n np.random.seed(0)", "def seed_random():\n random.seed(0)", "def get_random(cls):\n\n\t\tnum = randint(0, 6)\n\n\t\treturn Tetromino(num)", "def generate_password():\n return urlsafe_b64encode(urandom(32)).decode('utf-8')", "def make_random_passphrase():\n import random\n prng = random.SystemRandom()\n templates = ['aababbab', 'aabbabab', 'aabbabba', 'abaabbab', 'abababab',\n 'abababba', 'ababbaab', 'ababbaba', 'abbaabab', 'abbaabba',\n 'abbabaab', 'abbababa', 'abbabbaa', 'baababab', 'baababba',\n 'baabbaab', 'baabbaba', 'babaabab', 'babaabba', 'bababaab',\n 'babababa', 'bababbaa', 'babbaaba', 'babbabaa']\n alphabet = {'a':\"aeiou\", 'b':list(\"bcdfghjklmnprsvwxyz\") + [\"ch\",\"ph\",\"st\"]}\n for n in (1,2,3):\n template = prng.choice(templates)\n password = \"\".join([prng.choice(alphabet[c]) for c in template])\n print password.capitalize() + prng.choice(\"0123456789\"),\n return 0", "def seed(self, seed=None):\n raise NotImplementedError()", "def seed(self, seed=None):\n raise NotImplementedError()", "def get_temperature(self):\n rand_number = randint(18, 30)\n return rand_number", "def __generate_pin(cls) -> str:\n return str(randbelow(10 ** cls.PIN_DIGITS)).zfill(cls.PIN_DIGITS)", "def generate_seeds_and_models(args, synced_model, env):\n np.random.seed()\n random_seed = np.random.randint(2**30)\n two_models = perturb_model(args, synced_model, random_seed, env)\n return random_seed, two_models", "def generateToken():\n length = random.randint(8, 32)\n rdmtoken = ''.join(random.choice(string.printable) for i in range(length))\n return f'{rdmtoken}'", "def generate_random_key(self):\n self.key = ''.join(choice(ascii_letters + digits) for i in range(300))", "def gen_key():\n key = os.urandom(32) # 256 bit\n return base64.b64encode(key).rstrip('=') # strip off padding", "def generate_key(self)->bytes:\n return os.urandom(32)", "def testrandom(self):\n for i in range(100):\n AmuletAbility()", "def get_salt():\n return os.urandom(32)", "def generate_session_id():\n return utils.get_32bit_random_num()", "def generate_secret_number(self):\n self.secret_number = randint(0, self.difficulty)\n # print(f\"secret_number = {self.secret_number}\")", "def generate():", "def generate_id():\n return str(uuid.uuid4())[:5].replace('e','a')", "def seed(self, seed=None):\n # to have a different environment at each time (resolve python random problem)\n self.np_random, seed1 = seeding.np_random(seed)\n seed2 = seeding.hash_seed(seed1 + 1) % 2 ** 31\n return [seed1, seed2]", "def generate_password():\n chars = string.ascii_letters + string.digits\n key = random.sample(chars, 10)\n keys = \"\".join(key)\n return keys", "def generate_new_token(uid):\n random_token = uuid.uuid4()\n token = TokenAuth(user_id=uid, token=random_token)\n token.save()\n return random_token", "def genKey(length=32):\r\n return os.urandom(length)", "def __generate_random_string():\n return uuid4().hex[:6].upper()", "def _generateSecretKey():\n return ''.join(SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(20))", "def generate_key():\n key = list(Fleissner.default)\n random.shuffle(key)\n done = False\n while not done:\n try:\n Fleissner(key=\"\".join(key))\n done = True\n except:\n random.shuffle(key)\n return \"\".join(key)", "def generate(self, seed_text, next_words=20, T=0.9):\n\n index_to_word = {index: word for word, index in self.tokenizer.word_index.items()}\n\n for _ in range(next_words):\n token_list = self.tokenizer.texts_to_sequences([seed_text])[0]\n token_list = pad_sequences([token_list], maxlen=self.max_sequence_len, padding='pre')\n\n probas = self.model.predict(token_list, verbose=0)\n probas = np.array(probas[0][1:])\n probas = probas ** (1.0 / T)\n probas /= np.sum(probas)\n predicted = np.random.choice(range(1,self.total_words), p=probas)\n \n seed_text += \" \" + (index_to_word[predicted] if predicted != 0 else '')\n\n return seed_text", "def single_temp() -> str:\n return '36.' + str(random.randint(1, 5))", "def initialize_randomness(seed):", "def util_entropy():\n call('/usr/bin/sudo /usr/sbin/rngd -f -r /dev/urandom', shell=True)", "def run_seed(self):\n user = User(username='administrator')\n user.set_password('123456123a')\n user.save()", "def get_generate_root_otp():\n if vault_version_ge(\"1.10.0\"):\n test_otp = \"BMjzW3wAsEzINXCM05Wbas3u9zSl\"\n elif vault_version_ge(\"1.0.0\"):\n test_otp = \"ygs0vL8GIxu0AjRVEmJ5jLCVq8\"\n else:\n test_otp = \"RSMGkAqBH5WnVLrDTbZ+UQ==\"\n return test_otp", "def initialize_random_number_generator(self,question_type):\n\t\tself.generator.seed(self.generate_index(self.magic, self.level, self.problem_id, question_type))", "def run(seed, ModelClass=Model):\n model = ModelClass(random_seed=seed)\n return model.one_trial(1, 10)", "def generateRegistrationId():\n regId = KeyHelper.getRandomSequence()\n return regId", "def __init__(self, seed):\n # MT 19937 constants\n self.state_size = 624\n self.mt_const0 = 1812433253\n self.mt_const1 = 2636928640\n self.mt_const2 = 4022730752\n self.mt_const3 = 0x80000000\n self.mt_const4 = 0x7fffffff\n\n # use this to maintain state for getting a single byte every time\n self.num = None\n self.count = 0\n \n self.index = 0\n self.mt = [0] * self.state_size\n self.mt[0] = seed\n for i in range(1, self.state_size):\n self.mt[i] = self.get_lsb(self.mt_const0 * (self.mt[i - 1] ^ self.mt[i - 1] >> 30) + i, 32)", "def gen_random_fightID():\n pass", "def generate_session_key(self):\n return ''.join(random.choice(string.digits + string.letters) for _ in range(16))", "def generate_otp(email):\n\tprint \"generate_otp\"\n\totp_key = pyotp.random_base32()\n\ttotp = pyotp.TOTP(otp_key)\n\n\t# Data for generating user specific QR code\n\tqrcode_data = totp.provisioning_uri(email)\n\tprint \"otp_key = \", otp_key\n\tprint \"qrcode_data = \", qrcode_data\n\n\treturn (otp_key, qrcode_data)", "def challenge23():\n seed = random.randint(1, 2 ** 31)\n orig_mt = MersenneTwister(seed)\n copy_mt = MersenneTwister(0)\n for index, p in enumerate(orig_mt.generate(624)):\n copy_mt.y[index] = untempering(p)\n return orig_mt, copy_mt", "def generate_random_key():\n return '%030x' % (random.randrange(256**15),)" ]
[ "0.6439089", "0.6343524", "0.631821", "0.629884", "0.6196717", "0.6172871", "0.6148124", "0.6079628", "0.6045656", "0.6045656", "0.6012038", "0.5985964", "0.58983946", "0.5868619", "0.5857218", "0.5822443", "0.5819638", "0.58043975", "0.5787715", "0.5729017", "0.5728478", "0.5696593", "0.5684027", "0.56737834", "0.56689864", "0.5663572", "0.56358993", "0.5633749", "0.5632289", "0.56223685", "0.56038207", "0.559072", "0.557883", "0.5574247", "0.5571151", "0.5563691", "0.55561715", "0.5552529", "0.55500275", "0.5542173", "0.5542173", "0.5542173", "0.5532734", "0.5532148", "0.5515433", "0.5511651", "0.5504965", "0.5503361", "0.55032784", "0.55000025", "0.5498553", "0.54962206", "0.5495858", "0.54907644", "0.54905313", "0.5486445", "0.5481278", "0.5476469", "0.54741114", "0.54679185", "0.54622656", "0.5455383", "0.5447741", "0.5445098", "0.5445098", "0.54445434", "0.5434268", "0.5433361", "0.5427394", "0.54255027", "0.5424126", "0.54219395", "0.5421204", "0.54043204", "0.5403013", "0.5398859", "0.53947306", "0.53911567", "0.53824776", "0.53793925", "0.53780264", "0.53755075", "0.53645414", "0.5358848", "0.5354348", "0.53480726", "0.5345259", "0.5341433", "0.5339868", "0.5338342", "0.5325961", "0.5325874", "0.53258693", "0.5324682", "0.5322154", "0.53193504", "0.53147525", "0.531266", "0.5311004", "0.5309471" ]
0.6281103
4
Gets the authentication policy for the given tenancy. You must specify your tenant\u2019s OCID as the value for the compartment ID (remember that the tenancy is simply the root compartment).
def get_authentication_policy(self, compartment_id, **kwargs): resource_path = "/authenticationPolicies/{compartmentId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_authentication_policy got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "compartmentId": compartment_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="AuthenticationPolicy") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="AuthenticationPolicy")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def authentication_policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"authentication_policy\")", "def authentication_policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"authentication_policy\")", "def authentication_policy(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"authentication_policy\")", "def _get_tenant_ocid(self):\n if isinstance(self._provider, oci.signer.Signer):\n return self._provider.api_key.split('/')[0]", "def get_tenant_keyring(self) -> Optional[ImaKeyring]:\n return self.keyrings.get(\"tenant_keyring\")", "def rbac_policy_get(request, policy_id, **kwargs):\n policy = neutronclient(request).show_rbac_policy(\n policy_id, **kwargs).get('rbac_policy')\n return RBACPolicy(policy)", "def client_access_policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"client_access_policy\")", "def get_tenant_id(self, **kwargs):\n if self.authenticate() == 200:\n return self.tenant_id\n else:\n return None", "def policies(self, request):\n policies = OtterPolicies(self.store, self.tenant_id, self.group_id,\n self.dispatcher)\n return policies.app.resource()", "def aad_tenant_id(self) -> Optional[str]:\n return pulumi.get(self, \"aad_tenant_id\")", "def get_org_policy(self, resource, constraint, fields=None,\n verb='getOrgPolicy', **kwargs):\n arguments = {'resource': resource, 'fields': fields,\n 'body': {'constraint': constraint}}\n if kwargs:\n arguments.update(kwargs)\n return self.execute_query(\n verb=verb,\n verb_arguments=arguments,\n )", "def get(self):\n policy_number = reqparse.request.args.get('policy_number')\n category = reqparse.request.args.get('category')\n\n dao = ClaimDao()\n return dao.get(policy_number=policy_number, category=category)", "def get_tenant_config(tenant_id):\n for tenant in tenants:\n if tenant['tenant_id'] == tenant_id:\n return tenant\n raise errors.BaseTapisError(\"invalid tenant id.\")", "def get_tenant_by_id(tenant_id):\n tenant = identity.Tenant.query.filter_by(id=tenant_id).first()\n if tenant:\n return tenant\n abort(404, f\"Unable to find tenant with id: {tenant_id}\")", "def get_policy(usage_id):\r\n return policy.get(policy_key(usage_id), {})", "def tenant_access(self) -> Optional[pulumi.Input['ServiceTenantAccessArgs']]:\n return pulumi.get(self, \"tenant_access\")", "def tenant_access(self) -> Optional[pulumi.Input['ServiceTenantAccessArgs']]:\n return pulumi.get(self, \"tenant_access\")", "def get_tenant(key, tenant_name):\n for tenant in key.tenants.list():\n if tenant.name == tenant_name:\n return tenant\n\n return None", "def get_aad_tenant_id(self) -> Union[str, None]:\n return self._get_aad_tenant_id(enable_validation=True)", "def get_access_policy(access_policy_id: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAccessPolicyResult:\n __args__ = dict()\n __args__['accessPolicyId'] = access_policy_id\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('google-native:accesscontextmanager/v1:getAccessPolicy', __args__, opts=opts, typ=GetAccessPolicyResult).value\n\n return AwaitableGetAccessPolicyResult(\n etag=pulumi.get(__ret__, 'etag'),\n name=pulumi.get(__ret__, 'name'),\n parent=pulumi.get(__ret__, 'parent'),\n scopes=pulumi.get(__ret__, 'scopes'),\n title=pulumi.get(__ret__, 'title'))", "def tenant_access(self) -> pulumi.Output['outputs.ServiceTenantAccess']:\n return pulumi.get(self, \"tenant_access\")", "def get_account_for_tenant(test_auth, tenant_id):\n return '%s%s' % (test_auth.reseller_prefixes[0], tenant_id)", "def get_quotas_tenant(self, **_params):\r\n return self.get(self.quota_path % 'tenant', params=_params)", "def get_key_ring_policy(project_id, location_id, key_ring_id):\n\n # Creates an API client for the KMS API.\n kms_client = googleapiclient.discovery.build('cloudkms', 'v1')\n\n # The resource name of the KeyRing.\n parent = 'projects/{}/locations/{}/keyRings/{}'.format(\n project_id, location_id, key_ring_id)\n\n # Get the current IAM policy.\n request = kms_client.projects().locations().keyRings().getIamPolicy(\n resource=parent)\n response = request.execute()\n\n if 'bindings' in response.keys():\n print('Printing IAM policy for resource {}:'.format(parent))\n for binding in response['bindings']:\n print('')\n print('Role: {}'.format(binding['role']))\n print('Members:')\n for member in binding['members']:\n print(member)\n print('')\n else:\n print('No roles found for resource {}.'.format(parent))", "def acquire_token_func():\n\tconf = get_conf_from_json()\n\ttenant_name = conf['tenant_name']\n\tauthority_url = f'https://login.microsoftonline.com/{tenant_name}'\n\tapp = msal.ConfidentialClientApplication(\n\t\tauthority=authority_url,\n\t\tclient_id=conf['client_id'],\n\t\tclient_credential=conf['client_secret']\n\t)\n\ttoken = app.acquire_token_for_client(scopes=[\"https://graph.microsoft.com/.default\"])\n\treturn token", "def check_tenant_authorization(tenant_id, override_permission=None):\n claims = get_jwt_claims()\n if \"id\" in list(claims.keys()):\n tenant_user = identity.TenantUser.query.filter_by(id=claims[\"id\"]).first()\n if (\n tenant_user.tenant_id == tenant_id\n or override_permission in tenant_user.permissions\n ):\n return\n abort(403, \"Unauthorized Tenant\")", "def get_authentication(connection=\"oracle://ATLAS_COOLPROD/ATLAS_COOLONL_GLOBAL\"):\n \n from os import environ\n from os.path import join as pjoin\n assert \"CORAL_AUTH_PATH\" in environ, \"CORAL_AUTH_PATH environment var not set\"\n \n auth_paths = environ[\"CORAL_AUTH_PATH\"].split(\":\")\n \n for auth_path in auth_paths + [\".\"]:\n file_name = pjoin(auth_path, \"authentication.xml\")\n if exists(file_name):\n authentication = parse_auth_file(file_name, connection)\n if authentication:\n return authentication\n \n raise RuntimeError(\"Unable to locate credentials for %s.\" \n % connection)", "def get_policy_id(token_name, utxo):\n assets_id = [k.split('.') for k in utxo['balances'].keys() if len(k.split('.')) == 2 and k.split('.')[1] == token_name]\n if len(assets_id) == 1:\n policy_id = assets_id[0][0]\n else:\n policy_id = None\n return policy_id", "def get_current_tenant():\n return getattr(_thread_locals, \"tenant\", None)", "def getOrganizationBrandingPolicy(self, organizationId: str, brandingPolicyId: str):\n\n metadata = {\n 'tags': ['Dashboard branding policies'],\n 'operation': 'getOrganizationBrandingPolicy',\n }\n resource = f'/organizations/{organizationId}/brandingPolicies/{brandingPolicyId}'\n\n return self._session.get(metadata, resource)", "def get_policies():\r\n policy = policies.values()\r\n return policy", "def client_authority(request):\n for principal in request.effective_principals:\n match = re.match(r\"^client_authority:(.+)$\", principal)\n if match and match.group(1):\n return match.group(1)\n\n return None", "def get_acl_policy(client, container_name, policy_name, **kwargs):\n acl = _get_acl(client, container_name, **kwargs)\n return acl.get(policy_name)", "def tenant(self):\n return self._tenant", "def authentication_strategy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"authentication_strategy\")", "def authentication_strategy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"authentication_strategy\")", "def policy(self) -> Optional[pulumi.Input['ServicePolicyArgs']]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input['ServicePolicyArgs']]:\n return pulumi.get(self, \"policy\")", "def get_policy(self):\n return self.agent.get_policy()", "def get_key_vault_credentials():\n if \"APPSETTING_WEBSITE_SITE_NAME\" in os.environ:\n return MSIAuthentication(\n resource='https://vault.azure.net'\n )\n else:\n return ServicePrincipalCredentials(\n client_id=os.environ['AZURE_CLIENT_ID'],\n secret=os.environ['AZURE_CLIENT_SECRET'],\n tenant=os.environ['AZURE_TENANT_ID'],\n resource='https://vault.azure.net'\n )", "def GetPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('GetPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def get_tenancy(self, tenancy_id, **kwargs):\n resource_path = \"/tenancies/{tenancyId}\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"get_tenancy got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tenancyId\": tenancy_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Tenancy\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Tenancy\")", "def get_access_policy_output(access_policy_id: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAccessPolicyResult]:\n ...", "def storage_account_tenant_id(self) -> Optional[str]:\n return pulumi.get(self, \"storage_account_tenant_id\")", "def policy(self) -> Optional[str]:\n return pulumi.get(self, \"policy\")", "def authorization_strategy(self) -> pulumi.Input['FhirDatastoreIdentityProviderConfigurationAuthorizationStrategy']:\n return pulumi.get(self, \"authorization_strategy\")", "def get_sp_policy(self, context, id):\n # handling policy method in RPC\n response = self.dns_manager.get_sp_policy(context, id)\n return response", "def policy_get(request, policy_id, **kwargs):\n policy = neutronclient(request).show_qos_policy(\n policy_id, **kwargs).get('policy')\n return QoSPolicy(policy)", "def GetPolicies(self):\n policy = {}\n if json is None:\n logging.error('No JSON module, cannot parse policy information')\n else :\n try:\n policy = json.loads(open(self.policy_path).read(), strict=False)\n except IOError:\n logging.error('Failed to load policies from %s' % self.policy_path)\n return policy", "def get_by_id(tenant_id):\n tenant = Tenant.find_by_id(tenant_id)\n if tenant:\n tenant_schema = TenantSchema()\n return tenant_schema.dump(tenant)\n\n raise BusinessException(\"Invalid tenant\", HTTPStatus.BAD_REQUEST)", "def callback__authenticate_get(req, test_env=test_env):\n assert req.url.startswith(OAUTH1__URL_AUTHORITY_AUTHENTICATE)\n qs = req.url.split(\"?\")[1]\n qs = dict(parse_qsl(qs))\n\n testapp = test_env[\"testapp_authority\"]\n res = testapp.get(\n \"/authority/oauth1/authorize?oauth_token=%s\" % qs[\"oauth_token\"],\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)", "def authentication_strategy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"authentication_strategy\")", "def account_deletion_policy(self):\n if \"accountDeletionPolicy\" in self._prop_dict:\n if isinstance(self._prop_dict[\"accountDeletionPolicy\"], OneDriveObjectBase):\n return self._prop_dict[\"accountDeletionPolicy\"]\n else :\n self._prop_dict[\"accountDeletionPolicy\"] = SharedPCAccountDeletionPolicyType(self._prop_dict[\"accountDeletionPolicy\"])\n return self._prop_dict[\"accountDeletionPolicy\"]\n\n return None", "def GetToken(self):\n if self.auth_token_:\n return self.auth_token_\n raise RuntimeError('ClientLoginAuthPolicy is not logged in.')", "def tenants(self):\n # print \"tenant list is %s\" % self.auth.tenants.list()\n if not self._tenancy:\n self._tenancy = {}\n for tenant in self.auth.tenants.list():\n t = Tenant(tenant, self)\n self._tenancy[t[\"name\"]] = t\n return self._tenancy", "def get_policy_for(self, mail_domain):\n policy = self.policies.get(mail_domain)\n if policy.policy_alias is not None:\n return self.policy_aliases[policy.policy_alias]\n return policy", "def tenant_id(self) -> Optional[str]:\n return pulumi.get(self, \"tenant_id\")", "def _get_policy_by_sha256(self, sha256):\n params = {\n \"hash\": [sha256],\n \"alg\": [1]\n }\n\n LOG.debug(\"_get_policy_by_sha256: %s\", params)\n return self.api_call(\"GET\", POLICIES_URI, params)", "def get_access_control_allow_credentials(self):\n return self.access_control_allow_credentials", "def read(self, policy_name):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.debug(\"Reading the policy: %s\", address)\n response = self.vault.requests_request(\n \"GET\", address, headers=self.vault.token_header\n )\n policy_details = response.json()[\"data\"][\"policy\"]\n return policy_details", "def auth(secrets: Dict) -> ClientSecretCredential:\n\n try:\n credential = ClientSecretCredential(\n tenant_id=secrets.get('tenant_id'),\n client_id=secrets.get('client_id'),\n client_secret=secrets.get('client_secret'),\n authority=urlparse(secrets.get('cloud').endpoints.active_directory).hostname\n )\n except ValueError as e:\n raise InterruptExecution(str(e))\n yield credential", "def get_effective_org_policy(self, resource, constraint, fields=None,\n verb='getEffectiveOrgPolicy', **kwargs):\n arguments = {'resource': resource, 'fields': fields,\n 'body': {'constraint': constraint}}\n if kwargs:\n arguments.update(kwargs)\n return self.execute_query(\n verb=verb,\n verb_arguments=arguments,\n )", "def _get_asset_category_policy(self, cr, uid, account, context=None):\n return account.user_type.asset_policy", "def policy(self) -> pulumi.Output['outputs.ServicePolicy']:\n return pulumi.get(self, \"policy\")", "def get_tenants():\n # these are the tenant_id strings configured for the service -\n tenants_strings = conf.tenants\n result = []\n # the tenants service is a special case, as it must be a) configured to serve all tenants and b) actually maintains\n # the list of tenants in its own DB. in this case, we return the empty list since the tenants service will use direct\n # db access to get necessary data.\n if conf.service_name == 'tenants' and tenants_strings[0] == '*':\n return result\n\n # in dev mode, services can be configured to not use the security kernel, in which case we must get\n # configuration for a \"dev\" tenant directly from the service configs:\n if not conf.use_sk:\n for tenant in tenants_strings:\n t = {'tenant_id': tenant,\n 'iss': conf.dev_iss,\n 'public_key': conf.dev_jwt_public_key,\n 'default_access_token_ttl': conf.dev_default_access_token_ttl,\n 'default_refresh_token_ttl': conf.dev_default_refresh_token_ttl,\n }\n result.append(t)\n\n else:\n # TODO -- look up tenants in the tenants API, get the associated parameters (including sk location)\n pass\n return result", "def list_policies(self):\n client = self.connect(VAULT_TOKEN)\n return client.list_policies()", "def get_auth(self):\n return self._auth", "def get_policy(self):\n\n return", "def provision(self, policy):\n client = self.connect(VAULT_TOKEN)\n token = client.create_token(policies = [policy])\n return token[\"auth\"][\"client_token\"]", "def authorization(self, authorization_id):\r\n return authorizations.Authorization(self, authorization_id)", "def tenant_id(self) -> str:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> str:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> str:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> str:\n return pulumi.get(self, \"tenant_id\")", "def get_object_tenant(instance):\n field = get_tenant_field(instance)\n\n if field.primary_key:\n return instance\n\n return getattr(instance, field.name, None)", "def get_workload_policy(self, workload_policy_id):\n url = \"get_workload_policy/%s\" % workload_policy_id\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBody(resp, body[\"workload_policy\"])", "def _get_policy_id(name):\n cohesity_client = _get_client()\n log.info(\"Getting policy with name %s\", name)\n resp = cohesity_client.protection_policies.get_protection_policies(\n names=name)\n if resp:\n return resp[0].id", "def policyid(self):\n return self._policyid", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def authorize(\n context: PolicyContext, resource: str, operation: str, selector: str = \"\",\n) -> Scope:\n request = AccessRequest(\n resource=resource, operation=operation, selector=selector\n )\n scope, authorized = check_permission(context, request)\n if not authorized:\n raise NotEnoughPrivilegesErr(request)\n return scope", "def managed_by_tenant_id(self) -> Optional[str]:\n return pulumi.get(self, \"managed_by_tenant_id\")", "def _get_policies(self):\n flag, response = self._commcell_object._cvpysdk_object.make_request('GET', self._POLICY)\n\n if flag:\n if response.json() and 'taskDetail' in response.json():\n policies = response.json()['taskDetail']\n policies_dict = {}\n\n for policy in policies:\n temp_name = policy['task']['taskName'].lower()\n temp_id = str(policy['task']['taskId']).lower()\n policies_dict[temp_name] = temp_id\n\n return policies_dict\n else:\n raise SDKException('Response', '102')\n else:\n response_string = self._commcell_object._update_response_(response.text)\n raise SDKException('Response', '101', response_string)", "def password_policies(self):\n if \"passwordPolicies\" in self._prop_dict:\n return self._prop_dict[\"passwordPolicies\"]\n else:\n return None", "def password_policies(self):\n if \"passwordPolicies\" in self._prop_dict:\n return self._prop_dict[\"passwordPolicies\"]\n else:\n return None", "def get_one(self, nodepool_policy_ident):\n context = pecan.request.context\n nodepool_policy = api_utils.get_resource('NodePoolPolicy', nodepool_policy_ident)\n # policy.enforce(context, 'nodepool_policy:get', nodepool_policy,\n # action='nodepool_policy:get')\n\n return NodePoolPolicy.convert_with_links(nodepool_policy)", "def list_policies(self, compartment_id, **kwargs):\n resource_path = \"/policies\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_policies got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Policy]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Policy]\")", "def policy(self) -> typing.Optional[\"BucketPolicy\"]:\n return jsii.get(self, \"policy\")", "def policy(self) -> typing.Optional[\"BucketPolicy\"]:\n return jsii.get(self, \"policy\")", "def policy_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy_id\")", "def _GetAuthenticator(scope):\n\n if FLAGS.gcloud_authentication_method == 'client_secrets':\n return _ClientSecretsAuthenticator(scope)\n elif FLAGS.gcloud_authentication_method == 'service_account':\n return _ServiceAccountAuthenticator(scope)\n else:\n raise _AuthenticatorError(\n 'Invalid value \"%s\" for --gcloud_authentication_method. Why wasn''t '\n 'this properly validated?' % FLAGS.gcloud_authentication_method)", "def tenant_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tenant_id\")" ]
[ "0.5763793", "0.5763793", "0.56437725", "0.52095336", "0.51566327", "0.5049942", "0.49517995", "0.49281493", "0.4919025", "0.48551175", "0.48236695", "0.48195675", "0.48187912", "0.47907704", "0.47624832", "0.4753156", "0.4753156", "0.47355378", "0.47262105", "0.4723426", "0.47186", "0.4705896", "0.46974677", "0.46798822", "0.4659315", "0.46489128", "0.4631749", "0.46225977", "0.46120504", "0.45981133", "0.45921606", "0.4591172", "0.45838788", "0.45797306", "0.4553498", "0.4553498", "0.45496055", "0.45496055", "0.45354322", "0.45256063", "0.452253", "0.45174035", "0.45158207", "0.45062372", "0.4505088", "0.44915816", "0.44876948", "0.44713917", "0.4433578", "0.44308582", "0.44291356", "0.44193414", "0.44143045", "0.44058156", "0.44000185", "0.43820995", "0.43790275", "0.437594", "0.43720827", "0.43719608", "0.43463346", "0.43438417", "0.4342928", "0.43426356", "0.432331", "0.43069583", "0.43038923", "0.42994598", "0.42940405", "0.42806843", "0.4276485", "0.4276485", "0.4276485", "0.4276485", "0.42756367", "0.42705545", "0.42663354", "0.42563042", "0.42546389", "0.42546389", "0.42546389", "0.42546278", "0.42473963", "0.4244147", "0.42413694", "0.42413694", "0.42408293", "0.4233215", "0.4231711", "0.4231711", "0.42314515", "0.42290136", "0.42186633", "0.42186633", "0.42186633", "0.42186633", "0.42186633", "0.42186633", "0.42186633", "0.42186633" ]
0.6030179
0
Gets the specified compartment's information. This operation does not return a list of all the resources inside the compartment. There is no single API operation that does that. Compartments can contain multiple types of resources (instances, block storage volumes, etc.). To find out what's in a compartment, you must call the \"List\" operation for each resource type and specify the compartment's OCID as a query parameter in the request. For example,
def get_compartment(self, compartment_id, **kwargs): resource_path = "/compartments/{compartmentId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_compartment got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "compartmentId": compartment_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Compartment") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Compartment")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getCompartment(self, *args):\n return _libsbml.Model_getCompartment(self, *args)", "def getCompartment(self):\n return _libsbml.CompartmentReference_getCompartment(self)", "def list_compartments(self, compartment_id, **kwargs):\n resource_path = \"/compartments\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"access_level\",\n \"compartment_id_in_subtree\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_compartments got unknown kwargs: {!r}\".format(extra_kwargs))\n\n if 'access_level' in kwargs:\n access_level_allowed_values = [\"ANY\", \"ACCESSIBLE\"]\n if kwargs['access_level'] not in access_level_allowed_values:\n raise ValueError(\n \"Invalid value for `access_level`, must be one of {0}\".format(access_level_allowed_values)\n )\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"accessLevel\": kwargs.get(\"access_level\", missing),\n \"compartmentIdInSubtree\": kwargs.get(\"compartment_id_in_subtree\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Compartment]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Compartment]\")", "def getCompartment(self):\n return _libsbml.Species_getCompartment(self)", "def get(self, *args):\n return _libsbml.ListOfCompartmentTypes_get(self, *args)", "def getCompartmentType(self):\n return _libsbml.Compartment_getCompartmentType(self)", "def getCompartment(self):\n return _libsbml.Reaction_getCompartment(self)", "def getCompartment(self):\n return _libsbml.QualitativeSpecies_getCompartment(self)", "def get(self, *args):\n return _libsbml.ListOfCompartmentReferences_get(self, *args)", "def getCompartmentType(self):\n return _libsbml.MultiCompartmentPlugin_getCompartmentType(self)", "def getCompartmentType(self, *args):\n return _libsbml.Model_getCompartmentType(self, *args)", "def getCompartment(self):\n return _libsbml.MultiSpeciesType_getCompartment(self)", "def getListOfCompartmentTypes(self, *args):\n return _libsbml.Model_getListOfCompartmentTypes(self, *args)", "def compartment_id(self):\n return self._compartment_id", "def compartment_id(self):\n return self._compartment_id", "def getName(self):\n return _libsbml.CompartmentType_getName(self)", "def getName(self):\n return _libsbml.Compartment_getName(self)", "def getCompartmentReference(self, *args):\n return _libsbml.MultiCompartmentPlugin_getCompartmentReference(self, *args)", "async def getCollectionDetail(self, slug=None):\n payload = {}\n \n if slug:\n payload[\"slug\"] = slug\n \n\n # Parameter validation\n schema = CatalogValidator.getCollectionDetail()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(self._conf.domain, f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{slug}/\", \"\"\"{\"required\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"slug\",\"description\":\"A `slug` is a human readable, URL friendly unique identifier of an object. Pass the `slug` of the collection which you want to retrieve.\",\"schema\":{\"type\":\"string\"},\"required\":true}],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"slug\",\"description\":\"A `slug` is a human readable, URL friendly unique identifier of an object. Pass the `slug` of the collection which you want to retrieve.\",\"schema\":{\"type\":\"string\"},\"required\":true}]}\"\"\", slug=slug)\n query_string = await create_query_string(slug=slug)\n headers = {\n \"Authorization\": \"Bearer \" + await self._conf.getAccessToken()\n }\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(self._conf.domain, \"get\", await create_url_without_domain(f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{slug}/\", slug=slug), query_string, headers, \"\", exclude_headers=exclude_headers), data=\"\")", "def getId(self):\n return _libsbml.Compartment_getId(self)", "def get(self, *args):\n return _libsbml.ListOfCompartments_get(self, *args)", "def getCompartmentReference(self):\n return _libsbml.MultiSimpleSpeciesReferencePlugin_getCompartmentReference(self)", "def getCompartmentReference(self):\n return _libsbml.SpeciesTypeInstance_getCompartmentReference(self)", "def get(self):\n return GenericGet().get_catalogs()", "def getId(self):\n return _libsbml.CompartmentType_getId(self)", "def get_compliment():\n name = request.args.get('name')\n show_compliments = request.args.get('show_compliments')\n compliments_to_show = sample(compliments, 3)\n\n return render_template(\n 'compliments.html',\n name=name,\n show_compliments=show_compliments,\n compliments=compliments_to_show)", "def getCompartmentId(self):\n return _libsbml.CompartmentGlyph_getCompartmentId(self)", "def createCompartmentType(self):\n return _libsbml.Model_createCompartmentType(self)", "def get_catalog():\n return jsonify(getCatalog())", "def _get_catalog_object(self):\n return self.cluster.catalogd.service.read_debug_webpage(\n \"catalog_object?object_type=TABLE&object_name=functional.alltypes\")", "def get_components(self, req):\n request_name = req.request\n\n names = []\n if(request_name == \"\"):\n comps = self.rt_proxy.get_available_components() # get all\n else:\n comps = self.rt_proxy.get_available_components(request_name)\n\n for c in comps:\n names.append(str(c))\n\n resp = ListComponentsResponse(names)\n\n return resp", "def list_components(self) -> Dict[str, Any]:\n return self._manager.list_components()", "def get_catalog(self) -> Dict[str, str]:\n return self.catalog", "def _get_compartment_name(self, key):\n model = self.model.find(xmlns + 'Model')\n compartments = model.find(xmlns + 'ListOfCompartments')\n for compartment in compartments:\n if compartment.attrib['key'] == key:\n name = compartment.attrib['name']\n break\n assert name != None\n return name", "def get_apartment_info(experiment_config_path, output_dir, keys):\n if isinstance(keys, str):\n keys = [keys]\n\n store = cytometry.get_readonly_datastore(output_dir)\n config = experiment_config.ExperimentConfig(celldom.read_config(experiment_config_path))\n\n df = store.get('apartment').reset_index(drop=True)\n raw_files_map = store.get('acquisition').set_index('acq_id')['raw_image_path']\n\n key_fields = config.experimental_condition_fields + ['apt_num', 'st_num']\n df['key'] = df[key_fields].apply(lambda r: ':'.join(r.values.astype(str)), axis=1)\n df['raw_image_path'] = df['acq_id'].map(raw_files_map)\n\n return df[df['key'].isin(keys)].sort_values(['key', 'acq_datetime'])", "def getCompartmentGlyph(self, *args):\n return _libsbml.Layout_getCompartmentGlyph(self, *args)", "def catalog(self) -> str:\n return pulumi.get(self, \"catalog\")", "def get(self, entity):\n\t\treturn entity.get_component(self.component_type)", "def getListOfCompartments(self):\n return self.model.getListOfCompartments()", "def get_amp_computers(\n host=env.AMP.get(\"host\"),\n client_id=env.AMP_CLIENT_ID,\n api_key=env.AMP_API_KEY\n):\n print(\"\\n==> Computers from AMP\")\n url = f\"https://{client_id}:{api_key}@{host}/v1/computers\"\n response = requests.get(url, verify=False)\n response.raise_for_status()\n computers_list = response.json()[\"data\"] \n return computers_list", "def createCompartment(self):\n return _libsbml.Model_createCompartment(self)", "def getListOfCompartmentReferences(self, *args):\n return _libsbml.MultiCompartmentPlugin_getListOfCompartmentReferences(self, *args)", "def compService():\n return jsonify(compileService.getItems())", "def createCompartmentReference(self):\n return _libsbml.MultiCompartmentPlugin_createCompartmentReference(self)", "def getElementName(self):\n return _libsbml.CompartmentType_getElementName(self)", "def getName(self):\n return _libsbml.CompartmentReference_getName(self)", "def get_compounds(self, ctx, params):\n # ctx is the context object\n # return variables are: out_compounds\n #BEGIN get_compounds\n self._check_param(params, ['compounds'])\n out_compounds = []\n for x in params['compounds']:\n id = x.split('/')[-1]\n comp = self.compounds.get(id, None)\n if comp:\n comp['aliases'] = self.comp_aliases.get(id, '')\n out_compounds.append(comp)\n #END get_compounds\n\n # At some point might do deeper type checking...\n if not isinstance(out_compounds, list):\n raise ValueError('Method get_compounds return value ' +\n 'out_compounds is not type list as required.')\n # return the results\n return [out_compounds]", "def components(self, predicate=None):\n \n if predicate is None:\n return self._get(\"components\").json()\n else:\n return self._get(\"components/search\", params={\"predicate\":predicate}).json()", "def _getComponentsInfo(self):\n result = {}\n et = ElementTree()\n components = self.agentCompleteConfig.listComponents_() + \\\n self.agentCompleteConfig.listWebapps_()\n for comp in components:\n compConfig = getattr(self.agentCompleteConfig, comp)\n daemonXml = os.path.join(compConfig.componentDir, \"Daemon.xml\")\n if not os.path.exists(daemonXml):\n logging.warn(\"%s: can't read file '%s' of component '%s', ignored.\" %\n (self.__class__.__name__, daemonXml, comp))\n continue\n tree = et.parse(daemonXml)\n pid = None\n for child in tree.getchildren():\n if child.tag == \"ProcessID\":\n pid = child.get(\"Value\")\n if pid:\n result[comp] = pid # componentName, componentPID\n return result", "def get_companies(self, **kwargs):\n return self.get('companies.json', **kwargs)", "def getSize(self):\n return _libsbml.Compartment_getSize(self)", "def get_all(self):\n\n url = 'equipment/all'\n\n code, xml = self.submit(None, 'GET', url)\n\n return self.response(code, xml)", "def get(self):\n\n return self.api.query(None, None, \"\"\"\n select s.name site_name, r.fqdn ce, pr.release, pr.arch\n from site s\n join resource_element r on r.site = s.id\n join pinned_releases pr on pr.ce_id = r.id\n where r.type = 'CE'\n \"\"\")", "def get_oem_details(self, oem):\n return self.get('vehicles/GetManufacturerDetails/{}'.format(oem))", "def get_companies(self):\n response = self.do_request('/management/companies/export/json')\n if response:\n return response.json()", "def list(self,\n component_type=None,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n summary=None,\n sync=None,\n ):\n return self._invoke('list',\n {\n 'component_type': component_type,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n 'summary': summary,\n 'sync': sync,\n })", "def comics_get(label=None, page=None, per_page=None): # noqa: E501\n\n\n return query_manager.get_resource(\n label=label,\n page=page,\n per_page=per_page,\n rdf_type_uri=COMIC_TYPE_URI,\n rdf_type_name=COMIC_TYPE_NAME, \n kls=Comic)", "def catalog_comp_info_dict(self, catkey):\n return self._catalog_comp_info_dicts[catkey]", "def getEnvironments(request):\n environments = Environment.objects.all()\n serializer = environmentSerializer(environments, many=True)\n result = {'data':serializer.data, 'code':HTTP_200_OK, 'message':OK}\n return result", "def get_details(codetoget):\n\tTrainingComponentDetailsRequest= client.factory.create('TrainingComponentDetailsRequest')\n\tTrainingComponentDetailsRequest.Code=codetoget\n\tTrainingComponentInformationRequested=client.factory.create('TrainingComponentInformationRequested')\n\tTrainingComponentInformationRequested.ShowReleases=True\n\tTrainingComponentInformationRequested.ShowUnitGrid=True\n\tTrainingComponentInformationRequested.ShowComponents=True\n\tTrainingComponentDetailsRequest.InformationRequest=TrainingComponentInformationRequested\n\treturn client.service.GetDetails(TrainingComponentDetailsRequest)", "def GetCompleter(self):\n return self._code['compsvc']", "def info(self):\n return self._fetch_json('/api/info')", "def get_infores_catalog(self):\n return self._infores_catalog", "def get_company_info(company_no):\n in_ = 'curl -s -X GET -u yLwgnyHvwlYxkbOBAoLEwsaEfVQ_a7kAuCUTNtSt: https://api.companieshouse.gov.uk/company/{}/officers?q=Officers&items_per_page=100&start_index=0'.format(company_no).split()\n\n out = subprocess.check_output(in_)\n res = json.loads(out.decode('utf8'))\n ret = res['items']\n \n return ret", "def getId(self):\n return _libsbml.CompartmentReference_getId(self)", "def query_cs(charm):\n try:\n series, charm = charm.split('/')\n except ValueError:\n series = 'trusty'\n charm_store_url = 'https://manage.jujucharms.com/api/3/charm'\n url = path.join(charm_store_url, series, charm)\n r = requests.get(url)\n if r.status_code != 200:\n log.error(\"could not find charm store URL for charm '{}'\".format(url))\n rj = r.json()\n raise CharmNotFoundError(\"{type} {charm_id}\".format(**rj))\n\n return r.json()", "def contact_details(self) -> 'outputs.ContactDetailsResponse':\n return pulumi.get(self, \"contact_details\")", "def contact_details(self) -> 'outputs.ContactDetailsResponse':\n return pulumi.get(self, \"contact_details\")", "def contact_details(self) -> 'outputs.ContactDetailsResponse':\n return pulumi.get(self, \"contact_details\")", "def get_companies(self):\n url = 'companies'\n result = self.get(url)\n return result['companies']", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def get_catalog(self) -> Catalog:\n params: Dict[str, Any] = self._status.get_status_info()\n\n response = self._client.open_api_do(\n \"GET\", \"labels/catalogs\", self.dataset_id, params=params\n ).json()\n return Catalog.loads(response[\"catalog\"])", "def compute_api(self):\n path = self.compute.path = '/'\n res = self.compute.call('', 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack compute services: %s' % truncate(res))\n self.compute.path = path\n return res[0]", "def getChemCompVars(self):\n dataDict = self.__dict__\n result = self.specificChemCompVars\n if not result:\n result = self.getByNavigation('namingSystem', 'chemComp', 'chemCompVars')\n return result", "def info(self):\n path = self._get_path('info')\n \n response = self._GET(path)\n self._set_attrs_to_values(response)\n return response", "def get(self, request, p_name, conn_name):\n project = Project.objects.get(name=p_name)\n connector = project.connector_set.filter(name=conn_name)\n serializer = ConnectorSerializer(connector[0], many=False)\n # Not modifying this as it works in tandem with the Thingworx app.\n return Response(serializer.data)", "def isSetCompartment(self):\n return _libsbml.CompartmentReference_isSetCompartment(self)", "def list_components(self, request, context):\n response = ListComponentsResponse()\n for component in self._delegator.list_components():\n response.components.append(component)\n return response", "def get(self, FriendlyName=None, Status=None,\r\n Page=None, PageSize=None, AfterSid=None):\r\n params = resource.get_params(None, locals())\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def get(self):\n return Car.browse()", "def getElementName(self):\n return _libsbml.ListOfCompartmentTypes_getElementName(self)", "def getListOfCompartments(self, *args):\n return _libsbml.Model_getListOfCompartments(self, *args)", "def get(self):\n args = GET_PARSER.parse_args()\n print(f'args={args}')\n\n return Contacts().get_all(\n args[\"phonetypeOne\"],\n args[\"phonetypeTwo\"],\n args[\"phonetypeThree\"],\n args[\"firstName\"],\n args[\"lastName\"],)", "def get_companies(self):\n response = self.do_request('/undertaking/list')\n if response:\n return response.json()", "def details(self, identifier):\n return self.client.request_with_method(Methods.GET % (self.name, identifier,))", "def findcomps():\n try:\n appuser, _ = util.authenticate()\n tlid = dbacc.reqarg(\"tlid\", \"dbid\", required=True)\n where = (\"WHERE tlid = \" + tlid + \" AND userid != \" + appuser[\"dsId\"] +\n \" ORDER BY modified DESC LIMIT 50\")\n tlcs = dbacc.query_entity(\"TLComp\", where)\n except ValueError as e:\n return util.serve_value_error(e)\n return util.respJSON(tlcs)", "def get_component(self):\n component = []\n component = [self.component_type, self.component_value, self.spot]\n\n if component[2] != None:\n print component\n return component", "def getInfoContainer(data):\n\tAPI_URL = 'https://gps.cs.etc.vn:15443/etcaccr-ecargo-api/swagger-resources/request-object'\n\turl_data = urlencode(data)\n\turl = API_URL + \"?\" + url_data\n\n\tcurl = pycurl.Curl()\n\tcurl.setopt(curl.SSL_VERIFYPEER, 0)\n\tcurl.setopt(pycurl.URL, url)\n\tcurl.setopt(pycurl.HTTPHEADER, ['Accept: application/json',\n\t 'Content-Type: application/json'])\n\n\tbuffer = BytesIO()\n\n\t# prepare and send. See also: pycurl.READFUNCTION to pass function instead\n\tcurl.setopt(pycurl.WRITEFUNCTION, buffer.write)\n\tcurl.perform()\n\n\tstatus_code = curl.getinfo(pycurl.RESPONSE_CODE)\n\n\treturn status_code, buffer.getvalue().decode('utf8')", "def info_equipment_get():\n equipment = _equipment_by_group()\n return equipment, 200", "def recover_compartment(self, compartment_id, **kwargs):\n resource_path = \"/compartments/{compartmentId}/actions/recoverCompartment\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\",\n \"opc_request_id\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"recover_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"compartmentId\": compartment_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing),\n \"opc-request-id\": kwargs.get(\"opc_request_id\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Compartment\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Compartment\")", "def list_identity_providers(self, protocol, compartment_id, **kwargs):\n resource_path = \"/identityProviders\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_identity_providers got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"protocol\": protocol,\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[IdentityProvider]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[IdentityProvider]\")", "def get(self, request, pk, format=None):\n settings.LOGGER.info(\n \"JobCatalogDetailViewList >> get >> request {}, pk: {}\".format(\n request.GET, pk))\n try:\n program_id = request.META.get('HTTP_X_SVMS_PROGRAM_ID')\n catalog_obj = self.get_object(pk)\n context = {'program_id': program_id, \"request_obj\": self.request}\n serializer = JobCatalogSerializer(catalog_obj, context=context)\n settings.LOGGER.info(\n \"JobCatalogDetailViewList >> get >> pk: {}, Response {}\".format(\n pk, serializer.data))\n return Response(\n serializer.data,\n status=status.HTTP_200_OK\n )\n except Exception as e:\n settings.LOGGER.info(\n \"JobCatalogDetailViewList >> get >> pk: {}, error {}\".format(pk,\n e))\n return Response({\"error\": \"{}\".format(e)},\n status=status.HTTP_400_BAD_REQUEST)", "async def get_account_info(self, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements\n error_map = {\n 401: ClientAuthenticationError,\n 404: ResourceNotFoundError,\n 409: ResourceExistsError,\n 304: ResourceNotModifiedError,\n }\n error_map.update(kwargs.pop(\"error_map\", {}) or {})\n\n _headers = kwargs.pop(\"headers\", {}) or {}\n _params = case_insensitive_dict(kwargs.pop(\"params\", {}) or {})\n\n restype: Literal[\"account\"] = kwargs.pop(\"restype\", _params.pop(\"restype\", \"account\"))\n comp: Literal[\"properties\"] = kwargs.pop(\"comp\", _params.pop(\"comp\", \"properties\"))\n cls: ClsType[None] = kwargs.pop(\"cls\", None)\n\n request = build_get_account_info_request(\n url=self._config.url,\n restype=restype,\n comp=comp,\n version=self._config.version,\n template_url=self.get_account_info.metadata[\"url\"],\n headers=_headers,\n params=_params,\n )\n request = _convert_request(request)\n request.url = self._client.format_url(request.url)\n\n _stream = False\n pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access\n request, stream=_stream, **kwargs\n )\n\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)\n raise HttpResponseError(response=response, model=error)\n\n response_headers = {}\n response_headers[\"x-ms-client-request-id\"] = self._deserialize(\n \"str\", response.headers.get(\"x-ms-client-request-id\")\n )\n response_headers[\"x-ms-request-id\"] = self._deserialize(\"str\", response.headers.get(\"x-ms-request-id\"))\n response_headers[\"x-ms-version\"] = self._deserialize(\"str\", response.headers.get(\"x-ms-version\"))\n response_headers[\"Date\"] = self._deserialize(\"rfc-1123\", response.headers.get(\"Date\"))\n response_headers[\"x-ms-sku-name\"] = self._deserialize(\"str\", response.headers.get(\"x-ms-sku-name\"))\n response_headers[\"x-ms-account-kind\"] = self._deserialize(\"str\", response.headers.get(\"x-ms-account-kind\"))\n\n if cls:\n return cls(pipeline_response, None, response_headers)", "def getNumCompartmentTypes(self):\n return _libsbml.Model_getNumCompartmentTypes(self)", "def compartment(self):\n return \"_embedded\"", "def getCatalogs():", "def get(self, FriendlyName=None, Page=None, PageSize=None, AfterSid=None):\r\n params = resource.get_params(None, locals())\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def GET(self, uri):\n content_type = negotiated_content_type(self.supported_types, self.default_content_type)\n def body(conn, cur):\n self.enforce_right('enumerate', uri)\n return web.ctx.ermrest_catalog_model.acls\n\n def post_commit(acls):\n self.set_http_etag( web.ctx.ermrest_catalog_model.etag() )\n self.http_check_preconditions()\n self.emit_headers()\n web.header('Content-Type', content_type)\n web.ctx.ermrest_request_content_type = content_type\n\n meta = _acls_to_meta(acls)\n\n if self.key is not None:\n # project out single ACL from ACL set\n try:\n meta = meta[self.key]\n except KeyError:\n raise exception.rest.NotFound(uri)\n\n response = json.dumps(meta) + '\\n'\n web.header('Content-Length', len(response))\n return response\n\n return self.perform(body, post_commit)", "def comics_id_get(id): # noqa: E501\n\n\n return query_manager.get_resource(id=id,\n rdf_type_uri=COMIC_TYPE_URI,\n rdf_type_name=COMIC_TYPE_NAME, \n kls=Comic)", "def get_customer_information(self):\n return self._request_json(\"/api/consumer\")[\"consumer\"]" ]
[ "0.651724", "0.6153286", "0.6037744", "0.58690524", "0.57859063", "0.5781716", "0.5763131", "0.576079", "0.5643876", "0.5598771", "0.5592769", "0.55235153", "0.5381384", "0.5341171", "0.5341171", "0.5283982", "0.5278441", "0.5222629", "0.5219247", "0.5206948", "0.5200719", "0.51894313", "0.51697594", "0.5166381", "0.516513", "0.51017845", "0.5092011", "0.506716", "0.5047575", "0.5045076", "0.50330055", "0.5032416", "0.50315946", "0.50156325", "0.49972835", "0.49546432", "0.49439666", "0.49428022", "0.4940055", "0.4939712", "0.49251142", "0.49150974", "0.49094692", "0.49092487", "0.48974252", "0.48759666", "0.48731336", "0.48515302", "0.48478064", "0.48472118", "0.48159224", "0.48123276", "0.47792915", "0.47790015", "0.47643864", "0.47639692", "0.47568515", "0.47567174", "0.4729175", "0.47265932", "0.47259742", "0.4719172", "0.47100443", "0.47024179", "0.46868837", "0.46847424", "0.46764827", "0.46764827", "0.46764827", "0.46757752", "0.4666891", "0.466426", "0.466363", "0.46423942", "0.4639307", "0.46384948", "0.46232063", "0.46223247", "0.46132764", "0.46087047", "0.46044078", "0.4601493", "0.4600553", "0.4598949", "0.45972225", "0.4589007", "0.45854467", "0.4579106", "0.4577802", "0.45766118", "0.45719984", "0.45671567", "0.4563531", "0.45594925", "0.45520973", "0.45340934", "0.45317888", "0.45313415", "0.45298964", "0.45246843" ]
0.6193012
1
Gets the specified dynamic group's information.
def get_dynamic_group(self, dynamic_group_id, **kwargs): resource_path = "/dynamicGroups/{dynamicGroupId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_dynamic_group got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "dynamicGroupId": dynamic_group_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="DynamicGroup") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="DynamicGroup")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_group_info(groupname):\n return jsonify(admin.get_group_info(current_app.scoped_session(), groupname))", "def get_group_details(self, group_id):\n url = self.groups_url + \"/\" + group_id\n return requests.get(url, headers=self.headers)", "def customer_group_get(group_id=None):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n query = \"\"\"\n SELECT \n `group_id`,\n `group_name`,\n `description`,\n `timestamp`,\n `created_by`,\n `creation_time`,\n `is_deleted`,\n `updated_by`,\n `role_id`,\n `is_default`,\n `is_customer`,\n `company_name`,\n `company_address`,\n `company_telephone`,\n `company_fax`,\n `company_website`,\n `company_sales_contact`,\n `company_purchase_contact`,\n `company_business`,\n `company_business_type`,\n `company_sales_email`,\n `company_purchase_email`,\n `company_reg_number`,\n `company_vat_number` \n FROM `groups` \n WHERE `is_customer` = 1\n \"\"\"\n\n if group_id:\n query += \"\"\"\n AND `group_id` = \\\"%s\\\"\n \"\"\" % (group_id)\n\n group_details = None\n cursor = db.cursor()\n\n if cursor.execute(query) != 0:\n group_details = cursor.fetchall()\n\n cursor.close()\n db.close()\n\n return group_details", "def getGroupInfo(groupId):\n url = f\"https://groups.roblox.com/v1/groups/{groupId}\"\n r = requests.get(url)\n j = json.loads(r.text)\n return j", "def get_group(self, group_name):\n\n return self._group[group_name]", "def get_group(self):\n\t\treturn self.variables.get('group')", "def getGroup(self):\n\t\treturn self.Group", "def get_group(self, group_id):\n return self.root.get(group_id)", "def get_group_info(self, data):\n return self.__form_call('channels.info', data)", "def get_group(self, group, intg):\n return self.data[intg, group, :, :]", "def customer_group_get_related(group_id):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n query = \"\"\"\n SELECT \n `group_id`,\n `group_name`,\n `description`,\n `timestamp`,\n `created_by`,\n `creation_time`,\n `is_deleted`,\n `updated_by`,\n `role_id`,\n `is_default`,\n `is_customer`,\n `company_name`,\n `company_address`,\n `company_telephone`,\n `company_fax`,\n `company_website`,\n `company_sales_contact`,\n `company_purchase_contact`,\n `company_business`,\n `company_business_type`,\n `company_sales_email`,\n `company_purchase_email`,\n `company_reg_number`,\n `company_vat_number` \n FROM `groups` \n WHERE `groups`.`company_name` = (\n SELECT `asshole`.`company_name` \n FROM \n (\n SELECT * \n FROM `groups` \n WHERE `group_id` = \"%s\"\n ) AS `asshole`\n )\n \"\"\" %(group_id)\n \n group_details = None\n cursor = db.cursor()\n\n if cursor.execute(query) != 0:\n group_details = cursor.fetchall()\n\n cursor.close()\n db.close()\n\n return group_details", "def get_group(self, group_id: str) -> dict:\n group = self.ms_client.http_request(method='GET', url_suffix=f'groups/{group_id}')\n return group", "def getAGroupInfo(group_id):\r\n return Group.getAGroupInfo(group_id)", "def get_group(self, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/\"))", "def get_GroupInfo(test_case, # type: AnyMagpieTestCaseType\n override_body=None, # type: JSON\n override_group_name=null, # type: Optional[Str]\n override_version=null, # type: Optional[Str]\n override_headers=null, # type: Optional[HeadersType]\n override_cookies=null, # type: Optional[CookiesType]\n ): # type: (...) -> JSON\n version = override_version if override_version is not null else TestSetup.get_Version(test_case)\n grp_name = override_group_name if override_group_name is not null else test_case.test_group_name\n if TestVersion(version) < TestVersion(\"0.6.4\"): # route did not exist before that\n if override_body and \"group\" in override_body:\n return override_body[\"group\"]\n if override_body and \"group_name\" in override_body:\n return override_body\n return {\"group_name\": grp_name or {}}\n if override_body:\n if override_body and \"group\" in override_body:\n return override_body[\"group\"]\n if override_body and \"group_name\" in override_body:\n return override_body\n resp = test_request(test_case, \"GET\", \"/groups/{}\".format(grp_name),\n headers=override_headers if override_headers is not null else test_case.json_headers,\n cookies=override_cookies if override_cookies is not null else test_case.cookies)\n body = check_response_basic_info(resp)\n check_val_is_in(\"group\", body)\n return body[\"group\"] or {}", "def get_group_info(self, control_id: str) -> Tuple[str, str, str]:\n return (\n self._control_dict[control_id].group_id,\n self._control_dict[control_id].group_title,\n self._control_dict[control_id].group_class\n )", "def get(person_group_id):\n url = 'persongroups/{}'.format(person_group_id)\n\n return util.request('GET', url)", "def get_group_command(client: MsGraphClient, args: dict) -> tuple[str, dict, dict]:\n group_id = str(args.get('group_id'))\n group = client.get_group(group_id)\n\n group_readable, group_outputs = parse_outputs(group)\n human_readable = tableToMarkdown(name=\"Groups:\", t=group_readable,\n headers=['ID', 'Display Name', 'Description', 'Created Date Time', 'Mail',\n 'Security Enabled', 'Visibility'],\n removeNull=True)\n entry_context = {f'{INTEGRATION_CONTEXT_NAME}(obj.ID === {group_id})': group_outputs}\n return human_readable, entry_context, group", "def get(self):\n self._group = self._client.get(\n url=self._client.get_full_url(\n self.get_path(\n 'single', realm=self._realm_name, group_id=self._group_id\n )\n )\n )\n self._group_id = self._group[\"id\"]\n return self._group", "def get_group(self, group_path=None):\n if group_path is not None:\n path = '/group/' + group_path\n else:\n path = '/group/%2F'\n try:\n response = self.__session.get(self.__api_base_url + path)\n response.raise_for_status()\n response = response.json()\n except (requests.HTTPError, requests.ConnectionError), error:\n raise Exception(error.message)\n\n return response", "def get_group(group):\n\n return ldapi.lookup(ld, 'cn', group, cfg['ldap_groups_base'])", "def get_groups_details(self, groups):\n assert isinstance(groups, list)\n # It may be require we request the API by splitting the names list\n # If the list is too long to be handled by the Gerrit server (URI)\n query_args = \"?%s\" % \"&\".join([\"q=%s\" % g for g in groups])\n query_args += \"&o=MEMBERS\" if groups else \"o=MEMBERS\"\n\n try:\n ret = self.g.get('groups/%s' % query_args)\n except HTTPError as e:\n return self._manage_errors(e)\n\n return ret", "def getGroupData(service, groupName, attList):\n # import IPython ; IPython.embed() ; exit(); \n groupsDataList = service.contactGroups().list().execute()[\"contactGroups\"]\n for group in groupsDataList:\n if group[\"name\"] == groupName:\n groupData = []\n for att in attList:\n groupData.append(group[att])\n return groupData", "def get_definition(self):\n return self.client._perform_json(\n \"GET\", \"/admin/groups/%s\" % self.name)", "def getGroup(self, group_id: int) -> 'Group':\n return self.sObj.getGroup(group_id)", "def get_group(tkn: Token = Depends(from_authotization_header_nondyn),):\n assert_has_clearance(tkn.owner, \"sni.read_group\")\n return [\n GetGroupShortOut(group_id=str(grp.pk), group_name=grp.group_name)\n for grp in Group.objects().order_by(\"group_name\")\n ]", "def get_group_member(self, group):\n fake_group_obj = SimpleNamespace(id=group[\"id\"])\n current_identity = self.context[\"identity\"]\n avatar = current_groups_service.links_item_tpl.expand(\n current_identity, fake_group_obj\n )[\"avatar\"]\n return {\n \"type\": \"group\",\n \"id\": group[\"id\"],\n \"name\": group.get(\"name\") or group[\"id\"],\n \"description\": group.get(\"description\", \"\"),\n \"avatar\": avatar,\n }", "def get(ctx):\n user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'),\n ctx.obj.get('group'))\n try:\n response = PolyaxonClient().experiment_group.get_experiment_group(\n user, project_name, _group)\n cache.cache(config_manager=GroupManager, response=response)\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not get experiment group `{}`.'.format(_group))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n\n get_group_details(response)", "def test_get_group_v2(self):\n response = self.client.get_group(\"ABC123\", api_version=2)\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v2/groups/ABC123\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def get_adgroup(self, adgroup_id, fields=None, batch=False):\n path = '%s' % adgroup_id\n args = {'fields': fields} if fields else {}\n return self.make_request(path, 'GET', args, batch=batch)", "def get_group(self, wanted_group):\n if self.group_file:\n return self._get_group_from_file(wanted_group)\n return self._get_group_from_host(wanted_group)", "def cli(ctx, group_id):\n return ctx.gi.groups.show_group(group_id)", "def get(cls, group_id, db_session=None):\n db_session = get_db_session(db_session)\n return db_session.query(cls.model).get(group_id)", "async def get_group_info(self, group_id: int) -> models.Group:\n results = await self._api.call('group', 'get_group_info', gid=group_id)\n return models.Group(results.payload)", "def get(self, group) -> Optional[OrderedDict]:\n return self._queue.get(group)", "def test_get_group(self):\n pass", "def get_group_by_name_get(self, groupName, groupType):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/Name/{groupName}/{groupType}/\"))", "def _get_endpoint_group(self, group_name):\n params = {\n \"name\": group_name\n }\n\n response, err_msg = self.api_call(\"GET\", ENDPOINT_GROUP_URI, params)\n if not err_msg:\n result = response.json()\n if result.get(\"nextPage\"):\n response_next = self.get_next_page(result.get(\"nextPage\"))\n\n return response, err_msg", "def get(self, name_group, key, format=str):\n self.psettings.beginGroup(name_group)\n value = self.psettings.value(key, type=format)\n self.closeGroup()\n return value", "def get_identity_group(self, group):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.identitygroup.1.0+xml'})\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tresp = self.ise.get('{0}/config/identitygroup?filter=name.EQ.{1}'.format(self.url_base, group))\n\t\tfound_group = ERS._to_json(resp.text)\n\n\t\tif found_group['ns3:searchResult']['@total'] == '1':\n\t\t\tresp = self.ise.get('{0}/config/identitygroup/{1}'.format(\n\t\t\t\t\tself.url_base, found_group['ns3:searchResult']['ns3:resources']['ns5:resource']['@id']))\n\t\t\tif resp.status_code == 200:\n\t\t\t\tresult['success'] = True\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns4:identitygroup']\n\t\t\t\treturn result\n\t\t\telif resp.status_code == 404:\n\t\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\t\telse:\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\telif found_group['ns3:searchResult']['@total'] == '0':\n\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\tresult['error'] = 404\n\t\t\treturn result\n\n\t\telse:\n\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def get(self):\n status = ErrorCode.SUCCESS\n try:\n res = []\n cid = self.get_argument('cid', None)\n if not (cid is None):\n res = QueryHelper.get_groups_by_cid(cid, self.db)\n self.write_ret(status,\n dict_=DotDict(res=res))\n except Exception as e:\n logging.exception(\"[UWEB] Get groups failed. Exception: %s\",\n e.args)\n status = ErrorCode.SERVER_BUSY\n self.write_ret(status)", "def groups(self):\n #return self.get('{}/groups'.format(ApiVersion.A1.value))\n return self.get('{}/groups'.format(ApiVersion.CM1.value))", "def get(isamAppliance, id, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieving group\", \"/sysaccount/groups/{0}/v1\".format(id))", "def get_group_output(group_id: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetGroupResult]:\n ...", "def _get_group(self):\n if self.resource.group is not None:\n try:\n return grp.getgrnam(self.resource.group).gr_gid\n except KeyError:\n raise error.InvalidGroup()", "def product_group_get(obj, name):\n client = get_client(obj)\n\n pgs = client.product_group_list(name)\n if not pgs:\n fatal_error('Product group {} does not exist'.format(name))\n\n print(json.dumps(pgs[0], indent=4))", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def get(self, platform_group):\n return platform_group", "def show_group(self, group_id):\n\n return Client._get(self, id=group_id)", "def get_group(self):\n return self._group", "def group(self):\n return self.properties.get('Group', None)", "def get_config(group):\n config = toml.load('./config.toml')\n return config[group]", "def get_group_metadata(self):\n # Implemented from template for osid.resource.ResourceForm.get_group_metadata_template\n metadata = dict(self._group_metadata)\n metadata.update({'existing_group_values': self._my_map['group']})\n return Metadata(**metadata)", "def getGroup(self, *args):\n return _libsbml.GroupsModelPlugin_getGroup(self, *args)", "def test_groups_group_id_get(self):\n pass", "def _get_group_example_data(self, data_group_id: str) -> Dict[\n str, dict\n ]:\n return {\n e['example_id']: self._get_example_data(e['example_id'])\n for e in self.tasks['data_groups'][data_group_id]\n }", "def get_service_group(servicegroup=None, vsys=\"1\"):\n query = {\n \"type\": \"config\",\n \"action\": \"get\",\n \"xpath\": (\n \"/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys{}']/\"\n \"service-group/entry[@name='{}']\".format(vsys, servicegroup)\n ),\n }\n\n return __proxy__[\"panos.call\"](query)", "def list_group():\n data, code, message = FIELD_SERVICE.list_group()\n return __result(data, code, message)", "def what_is(self, _id):\n for g in self.groups:\n if _id in self.h_group_ids[g]:\n return g\n return None", "def get_description(self):\n return self['contactgroup_name']", "def get_group(self, group_id: str):\n\n return self._get(f\"cloudConnectorGroups/{group_id}\")", "def group_info(args):\n\n args.suppress_verify_output = True\n if verify(args) != 0:\n # restore stdout\n sys.stdout = sys.__stdout__\n print(\"Config file not valid, please use the verify function to debug\")\n return 1\n\n with open(args.file, \"r\") as f:\n config_json = json.load(f)\n\n for group in config_json[\"groups\"]:\n if group[\"name\"] == args.group:\n print(json.dumps(group, indent=4))\n return 0\n\n print(\"No group matching {} found\".format(args.group))\n return 1", "def get_group(group_data,path):\n path_name = path.label()\n group_name = re.sub(r'[0-9]+','',path_name)\n if group_name not in group_data:\n group_data[group_name] = len(group_data.keys())\n return group_data[group_name]", "def get_group_id_and_members(self, group_name, group_type=\"exclusivity\"):\n group_details = self.check_group_exists(group_name)\n\n try:\n if group_details is None:\n GeneralLogger.log_info(\"Creating group\")\n create_response = self.create_group(group_name, group_type)\n return create_response.json()[\"id\"], \\\n create_response.json()[\"members\"]\n else:\n GeneralLogger.log_info(\"Group exists\")\n\n return group_details\n except Exception:\n GeneralLogger.log_error(traceback.format_exc())", "def GetGroupMembers(self, group):\n return []", "def getElementByMetaId(self, *args):\n return _libsbml.GroupsModelPlugin_getElementByMetaId(self, *args)", "def get_group_name(\n group_id: BSONObjectId,\n tkn: Token = Depends(from_authotization_header_nondyn),\n):\n assert_has_clearance(tkn.owner, \"sni.read_group\")\n return GetGroupOut.from_record(Group.objects(pk=group_id).get())", "def test_IGroupIntrospection_getGroupById(self):\n from Products.PlonePAS.plugins.group import PloneGroup\n\n self.assertIsInstance(self.ldap.getGroupById(\"group0\"), PloneGroup)\n self.assertEqual(self.ldap.getGroupById(\"group0\").getId(), \"group0\")\n self.assertIsNone(self.ldap.getGroupById(\"non-existent\"))", "def get_user_group_details(connection_obj, device=\"server\"):\n command = \"id\"\n if device == \"server\":\n output = utils_obj.remove_last_line_from_string(conn_obj.execute_command(connection_obj, command))\n else:\n output = utils_obj.remove_last_line_from_string(st.show(connection_obj, command, skip_tmpl=True, type=\"click\"))\n return output", "def GetGroup(self, group, reason=None):\n query = []\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_GET,\n \"/%s/groups/%s\" % (GANETI_RAPI_VERSION, group),\n query, None)", "def get_sgd(self, id, name):\n # check if id exists in group definition\n if id in self.mstats.keys() and 'df' in self.mstats[id].keys():\n # print \"id %s in mstats\" % id\n type = 'group' if id.endswith('/') else 'dataset'\n sgd = {'id': id, 'type': type, 'ns':self.sdef['ns'], 'df': self.mstats[id]['df'],}\n # print \"found definition for %s in mstats, mstats=\" % id\n # pp.pprint(self.mstats)\n return sgd\n else:\n # see if parent group is specified in locations; if so, check for id in \n # locations list of members of parent group. Example for nwb format is are\n # \"UnitTimes/\" inside <module>/. <module> is parent group\n pid = self.sdef['id'] # parent id, e.g. \"<module>\"\n ns = self.sdef['ns']\n if pid in self.file.ddef[ns]['locations']:\n if id in self.file.ddef[ns]['locations'][pid]:\n type = 'group' if id.endswith('/') else 'dataset'\n # add id to mstats so can register creation of group\n self.mstats[id] = {'ns':ns, 'created': [], 'qty': '+', \n 'type': type} # todo: jeff, need to check df\n sgd = self.file.get_sdef(id, ns, \"referenced in make_subgroup\")\n # print \"id %s in %s location ns %s structures\" % (id, pid, ns)\n # example output: id UnitTimes/ in <module>/ location ns core structures\n # traceback.print_stack()\n return sgd\n else:\n print \"found parent %s in locations, but %s not inside\" % (pid, id)\n print \"locations contains:\"\n pp.pprint(self.file.ddef[ns]['locations'][pid])\n else:\n print \"did not find parent %s in locations for namespace %s\" % (pid, ns)\n print \"** Error, attempting to create '%s' (name='%s') inside group:\" % (id, name)\n print self.full_path\n print \"But '%s' is not a member of the structure for the group\" % id\n print \"Valid options are:\", self.mstats.keys()\n # print \"Extra information (for debugging): Unable to find definition for node %s\" % id\n # print \"mstats=\"\n # pp.pprint(self.mstats)\n traceback.print_stack()\n sys.exit(1)", "def list_group(group):\n\n members = group_members(group)\n ret = {}\n if members:\n for member in members:\n info = get(member)\n if info:\n ret[uid2dn(member)] = info\n return ret", "def __getitem__(self, index):\n return self.group_list[index]", "def get_group(self) -> Optional[str]:\n return self.group", "def get(self, group_id):\n return self._get(\"/consistencygroups/%s\" % group_id,\n \"consistencygroup\")", "def get(self, machinegroup_id):\n row = g.db.query(MachineGroup).get(machinegroup_id)\n if not row:\n log.warning(\"Requested a non-existant machine group %s\", machinegroup_id)\n abort(http_client.NOT_FOUND, description=\"Machine Group not found\")\n record = row.as_dict()\n record[\"url\"] = url_for(\"machinegroups.entry\", machinegroup_id=machinegroup_id,\n _external=True)\n\n log.info(\"Returning info for run config %s\", machinegroup_id)\n\n return jsonify(record)", "def _get_group():\n bus = dbus.SystemBus()\n server = dbus.Interface(\n bus.get_object('org.freedesktop.Avahi', '/'),\n 'org.freedesktop.Avahi.Server',\n )\n\n return dbus.Interface(\n bus.get_object('org.freedesktop.Avahi', server.EntryGroupNew()),\n 'org.freedesktop.Avahi.EntryGroup',\n )", "def get_group_info(\n self, tenant: str, group_config: dict, authnz_status: dict\n ) -> tuple:\n if not group_config[\"enabled\"]:\n return (\n group_config[\"default_url_group\"],\n group_config[\"default_memberships\"],\n )\n try:\n group_name = url_unescape(self.get_query_argument(\"group\"))\n except HTTPError as e:\n # first check if it is in the url\n found = re.sub(\n r\"/v1/.+/(p[0-9]+-[a-zA-Z0-9-]+-group).*\", r\"\\1\", self.request.uri\n )\n if found == self.request.uri:\n # then it is not there, so we need to use the default\n default_url_group = group_config[\"default_url_group\"]\n if options.tenant_string_pattern in default_url_group:\n group_name = default_url_group.replace(\n options.tenant_string_pattern, tenant\n )\n else:\n group_name = found\n try:\n group_memberships = authnz_status[\"claims\"][\"groups\"]\n except Exception as e:\n logging.info(\n \"Could not get group memberships - choosing default memberships\"\n )\n default_membership = group_config[\"default_memberships\"]\n group_memberships = []\n for group in default_membership:\n if options.tenant_string_pattern in group:\n new = group.replace(options.tenant_string_pattern, tenant)\n else:\n new = group\n group_memberships.append(new)\n return group_name, group_memberships", "def group_describe(self, group):\n mapped = self.map_vects(datanorm)\n mappednp= np.array(mapped)\n \n groups= mappednp[:,0]\n data['Group'] = pd.Series(groups, index=data.index)\n print(data[data['Group']==group].describe())", "def customer_group_get_one(group_id):\n return customer_group_get(group_id)", "def customer_group_get_all():\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n query = \"\"\"\n SELECT \n `group_id`, \n `group_name`, \n `description`, \n `timestamp`, \n `created_by`, \n `creation_time`, \n `is_deleted`, \n `updated_by`, \n `role_id`, \n `is_default`, \n `is_customer` \n FROM `groups` \n WHERE `is_customer` = 1\n \"\"\"\n user_group_details = None\n cursor = db.cursor()\n if cursor.execute(query) != 0:\n user_group_details = cursor.fetchall()\n cursor.close()\n db.close()\n return user_group_details", "def get_target_group_info(self, short_name):\n try:\n response = self.client.describe_target_groups(\n Names=[self.get_target_group_name(short_name)],\n )\n assert response['ResponseMetadata']['HTTPStatusCode'] == 200\n\n return response['TargetGroups'][0]\n except ClientError:\n self.logger.debug('Unable to find load balancer {balancer} target group {group}.'.format(\n balancer=self.get_balancer_name(),\n group=self.get_target_group_name(short_name)\n ))\n return None", "def get_endpoint_group(self, group):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.endpointgroup.1.0+xml'})\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tresp = self.ise.get('{0}/config/endpointgroup?filter=name.EQ.{1}'.format(self.url_base, group))\n\t\tfound_group = ERS._to_json(resp.text)\n\n\t\tif found_group['ns3:searchResult']['@total'] == '1':\n\t\t\tresp = self.ise.get('{0}/config/endpointgroup/{1}'.format(\n\t\t\t\t\tself.url_base, found_group['ns3:searchResult']['ns3:resources']['ns5:resource']['@id']))\n\t\t\tif resp.status_code == 200:\n\t\t\t\tresult['success'] = True\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns4:endpointgroup']\n\t\t\t\treturn result\n\t\t\telif resp.status_code == 404:\n\t\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\t\telse:\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\telif found_group['ns3:searchResult']['@total'] == '0':\n\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\tresult['error'] = 404\n\t\t\treturn result\n\n\t\telse:\n\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def getSetting(self, group, key):\n theValue = None\n\n if not isinstance(group, str):\n raise ValueError(\"'group'' must be a string\")\n if not isinstance(key, str):\n raise ValueError(\"'key'' must be a string\")\n\n for aRoot in [\"USER\", \"MET\", \"MAIN\"]:\n if aRoot in self._confData:\n if group in self._confData[aRoot][\"config\"]:\n if key in self._confData[aRoot][\"config\"][group]:\n return self._confData[aRoot][\"config\"][group][key]\n\n logger.error(\"No config entry found matching %s/%s\" % (group, key))\n\n return theValue", "def get_description(self):\n return self['hostgroup_name']", "def group(self) -> str:\n return pulumi.get(self, \"group\")", "def show( self, trans, encoded_id, **kwd ):\n decoded_id = trans.security.decode_id( encoded_id )\n group = self.group_manager.get( trans, decoded_id )\n if group is None:\n raise ObjectNotFound( 'Unable to locate group record for id %s.' % ( str( encoded_id ) ) )\n return self._populate( trans, group )", "def _get_group_from_file(self, wanted_group):\n wanted_gid = \"\"\n if (isinstance(wanted_group, int) or\n re.match(\"^\\\\d+$\", wanted_group)):\n wanted_gid = str(wanted_group)\n wanted_group = \"\"\n try:\n ingroup = open(self.group_file)\n except (IOError, OSError):\n return (\"\", \"\", \"\")\n else:\n for line in ingroup:\n (group, dummy, gid, users) = line.strip().split(':')\n if wanted_group and group == wanted_group:\n return (group, gid, users)\n if wanted_gid and gid == wanted_gid:\n return (group, gid, users)\n ingroup.close()\n return (\"\", \"\", \"\")", "def _get_group_from_host(self, wanted_group):\n wanted_gid = \"\"\n if (isinstance(wanted_group, int) or\n re.match(\"^\\\\d+$\", wanted_group)):\n wanted_gid = str(wanted_group)\n wanted_group = \"\"\n if wanted_gid:\n try:\n hgr = grp.getgrgid(int(wanted_gid))\n except (IOError, OSError, KeyError):\n return (\"\", \"\", \"\")\n return (str(hgr.gr_name), str(hgr.gr_gid), str(hgr.gr_mem))\n\n try:\n hgr = grp.getgrnam(wanted_group)\n except (IOError, OSError, KeyError):\n return (\"\", \"\", \"\")\n return (str(hgr.gr_name), str(hgr.gr_gid), str(hgr.gr_mem))", "def get(self, group_id=None):\n utils.check_group_id_in_req(group_id)\n #group_id = int(group_id)\n _check_group_exists(group_id)\n (process_id, ip_addr) = GID_COORD_DICT[group_id]\n response = {constants.COORD_PID_KEY: process_id,\n constants.COORD_IP_KEY: ip_addr}\n return response", "def getGroup(self, resource):\n\n if isinstance(resource, int):\n resource = 'groups/{0}'.format(resource)\n\n res = self.getRequest(resource)\n\n if res:\n return vsdModels.Group(**res)\n else:\n return None", "def group(self) -> Optional[str]:\n return pulumi.get(self, \"group\")", "def test_get_scaling_group_info(self):\n def view_manifest(with_policies, with_webhooks, get_deleting):\n self.assertEqual(with_policies, False)\n self.assertEqual(with_webhooks, False)\n self.assertEqual(get_deleting, True)\n return succeed(manifest)\n\n manifest = {}\n self.group.view_manifest.side_effect = view_manifest\n info = self.perform_with_group(\n Effect(GetScalingGroupInfo(tenant_id='00', group_id='g1')),\n (self.log, '00', 'g1'), self.group)\n self.assertEqual(info, (self.group, manifest))", "def getGroupById(self, id):\n for group in self.groups:\n if group.id == id:\n return group\n\n return None", "def get_device_group(self, device_group_oid):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.network.networkdevicegroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/networkdevicegroup/{1}'.format(self.url_base, device_group_oid))\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns4:networkdevicegroup']\n\t\t\treturn result\n\t\telif resp.status_code == 404:\n\t\t\tresult['response'] = '{0} not found'.format(device_group_oid)\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def parse_group( self, group ):\n return group", "def get_info(obj):\n res = {}\n res['vserver_group_id'] = obj.vserver_group_id\n if hasattr(obj, 'backend_servers'):\n res['backend_servers'] = obj.backend_servers\n if hasattr(obj, 'vserver_group_name'):\n res['vserver_group_name'] = obj.vserver_group_name\n return res", "def group(self):\n return self._group", "def group(self):\n return self._group", "def group(self):\n return self._group" ]
[ "0.7188056", "0.6950326", "0.6684029", "0.66321135", "0.6617548", "0.6559893", "0.6399865", "0.6395958", "0.63655305", "0.6357235", "0.6337324", "0.6310174", "0.63096356", "0.6253531", "0.6217098", "0.6209635", "0.620371", "0.6186933", "0.618276", "0.6155897", "0.6151408", "0.6140479", "0.61028534", "0.6054149", "0.60314757", "0.6030202", "0.60075194", "0.6005347", "0.59986573", "0.59743685", "0.5966302", "0.5952729", "0.59481394", "0.59079564", "0.58853537", "0.5884452", "0.5834124", "0.5824365", "0.58203316", "0.5817163", "0.5815068", "0.58105373", "0.5809822", "0.5806278", "0.5805615", "0.5803407", "0.5781871", "0.5780169", "0.57439804", "0.5735893", "0.57288367", "0.5717313", "0.5706371", "0.57006776", "0.569776", "0.56950915", "0.56910557", "0.568611", "0.56610733", "0.5655883", "0.565565", "0.56493217", "0.5647667", "0.56444365", "0.5637781", "0.56284624", "0.56265104", "0.5622615", "0.56054956", "0.5594512", "0.55941087", "0.5591721", "0.5588921", "0.5588584", "0.558418", "0.55757403", "0.55647004", "0.5563058", "0.55611724", "0.55550677", "0.55549294", "0.5554815", "0.555367", "0.5539812", "0.55393994", "0.55371165", "0.5525107", "0.55241174", "0.55228513", "0.55146027", "0.5499484", "0.54945165", "0.54844177", "0.54768634", "0.5467152", "0.54656184", "0.5459233", "0.5451041", "0.5451041", "0.5451041" ]
0.6638582
3
Gets the specified group's information. This operation does not return a list of all the users in the group. To do that, use
def get_group(self, group_id, **kwargs): resource_path = "/groups/{groupId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_group got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "groupId": group_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Group") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Group")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_group_info(groupname):\n return jsonify(admin.get_group_info(current_app.scoped_session(), groupname))", "def get_group_details(self, group_id):\n url = self.groups_url + \"/\" + group_id\n return requests.get(url, headers=self.headers)", "def get_group_users(groupname):\n return jsonify(admin.get_group_users(current_app.scoped_session(), groupname))", "def getGroup(group: int, name=\"\") -> list:\n groups = mongo.db.groups.find({'id':group},{'_id':0})\n userID_list = []\n user_list = []\n for entry in groups:\n if entry[\"id\"] == group:\n userID_list = userID_list + entry[\"members\"]\n if len(userID_list) != 0:\n for entry in userID_list:\n x = fetchUser(userId=entry)\n user_list = user_list + x\n return user_list", "def getGroupInfo(groupId):\n url = f\"https://groups.roblox.com/v1/groups/{groupId}\"\n r = requests.get(url)\n j = json.loads(r.text)\n return j", "def get(self, id):\r\n return UserGroupService.getUserGroup(self, id)", "def get_groups_details(self, groups):\n assert isinstance(groups, list)\n # It may be require we request the API by splitting the names list\n # If the list is too long to be handled by the Gerrit server (URI)\n query_args = \"?%s\" % \"&\".join([\"q=%s\" % g for g in groups])\n query_args += \"&o=MEMBERS\" if groups else \"o=MEMBERS\"\n\n try:\n ret = self.g.get('groups/%s' % query_args)\n except HTTPError as e:\n return self._manage_errors(e)\n\n return ret", "async def get_group_info(self, group_id: int) -> models.Group:\n results = await self._api.call('group', 'get_group_info', gid=group_id)\n return models.Group(results.payload)", "def customer_group_get(group_id=None):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n query = \"\"\"\n SELECT \n `group_id`,\n `group_name`,\n `description`,\n `timestamp`,\n `created_by`,\n `creation_time`,\n `is_deleted`,\n `updated_by`,\n `role_id`,\n `is_default`,\n `is_customer`,\n `company_name`,\n `company_address`,\n `company_telephone`,\n `company_fax`,\n `company_website`,\n `company_sales_contact`,\n `company_purchase_contact`,\n `company_business`,\n `company_business_type`,\n `company_sales_email`,\n `company_purchase_email`,\n `company_reg_number`,\n `company_vat_number` \n FROM `groups` \n WHERE `is_customer` = 1\n \"\"\"\n\n if group_id:\n query += \"\"\"\n AND `group_id` = \\\"%s\\\"\n \"\"\" % (group_id)\n\n group_details = None\n cursor = db.cursor()\n\n if cursor.execute(query) != 0:\n group_details = cursor.fetchall()\n\n cursor.close()\n db.close()\n\n return group_details", "def view_group(request, group_id):\n users = models.UserProfile.all().order('email')\n if group_id:\n group = models.UserGroup.get_by_id(int(group_id))\n if group.users:\n users = models.UserProfile.get(group.users)\n else:\n users = []\n return utility.respond(request, 'admin/view_group', {'users': users})", "def get_group(self, group_name, marker=None, max_items=None):\r\n params = {'GroupName' : group_name}\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('GetGroup', params, list_marker='Users')", "def list_group_users(self, group_id, **params):\n url = 'groups/%s/users' % group_id\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def get_group_group_members(self, group_id):\n try:\n group_id = self.quote(group_id)\n return self.g.get('groups/%s/groups/' % group_id)\n except HTTPError as e:\n return self._manage_errors(e)", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def show_group(self, group_id):\n\n return Client._get(self, id=group_id)", "def getGroup(self, group_id: int) -> 'Group':\n return self.sObj.getGroup(group_id)", "def groups_get(self, mar, request):\n if not mar.viewed_user_auth:\n raise exceptions.NoSuchUserException(request.groupName)\n group_id = mar.viewed_user_auth.user_id\n group_settings = self._services.usergroup.GetGroupSettings(\n mar.cnxn, group_id)\n member_ids, owner_ids = self._services.usergroup.LookupAllMembers(\n mar.cnxn, [group_id])\n (owned_project_ids, membered_project_ids,\n contrib_project_ids) = self._services.project.GetUserRolesInAllProjects(\n mar.cnxn, mar.auth.effective_ids)\n project_ids = owned_project_ids.union(\n membered_project_ids).union(contrib_project_ids)\n if not permissions.CanViewGroupMembers(\n mar.perms, mar.auth.effective_ids, group_settings, member_ids[group_id],\n owner_ids[group_id], project_ids):\n raise permissions.PermissionException(\n 'The user is not allowed to view this group.')\n\n member_ids, owner_ids = self._services.usergroup.LookupMembers(\n mar.cnxn, [group_id])\n\n member_emails = list(self._services.user.LookupUserEmails(\n mar.cnxn, member_ids[group_id]).values())\n owner_emails = list(self._services.user.LookupUserEmails(\n mar.cnxn, owner_ids[group_id]).values())\n\n return api_pb2_v1.GroupsGetResponse(\n groupID=group_id,\n groupSettings=api_pb2_v1_helpers.convert_group_settings(\n request.groupName, group_settings),\n groupOwners=owner_emails,\n groupMembers=member_emails)", "def locate_group_users(self, group):\n return self.ldap_connection.search_s(\"ou=Groups,dc=redhat,dc=com\",\n ldap.SCOPE_SUBTREE, 'cn={0}'.format(group))", "def get_group(group):\n\n return ldapi.lookup(ld, 'cn', group, cfg['ldap_groups_base'])", "def get_group_member(self, group):\n fake_group_obj = SimpleNamespace(id=group[\"id\"])\n current_identity = self.context[\"identity\"]\n avatar = current_groups_service.links_item_tpl.expand(\n current_identity, fake_group_obj\n )[\"avatar\"]\n return {\n \"type\": \"group\",\n \"id\": group[\"id\"],\n \"name\": group.get(\"name\") or group[\"id\"],\n \"description\": group.get(\"description\", \"\"),\n \"avatar\": avatar,\n }", "def list_group_users(self, group_id):\n resp, body = self.get('groups/%s/users' % group_id)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBodyList(resp, body['users'])", "def get_group_members(self, group_key):\n try:\n paged_results = self.repository.members.list(group_key)\n result = api_helpers.flatten_list_results(paged_results, 'members')\n LOGGER.debug('Getting all the members for group_key = %s,'\n ' result = %s', group_key, result)\n return result\n except (errors.HttpError, HttpLib2Error) as e:\n raise api_errors.ApiExecutionError(group_key, e)", "def get(self):\n status = ErrorCode.SUCCESS\n try:\n res = []\n cid = self.get_argument('cid', None)\n if not (cid is None):\n res = QueryHelper.get_groups_by_cid(cid, self.db)\n self.write_ret(status,\n dict_=DotDict(res=res))\n except Exception as e:\n logging.exception(\"[UWEB] Get groups failed. Exception: %s\",\n e.args)\n status = ErrorCode.SERVER_BUSY\n self.write_ret(status)", "def cli(ctx, group_id):\n return ctx.gi.groups.show_group(group_id)", "def GetGroup(self, group, reason=None):\n query = []\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_GET,\n \"/%s/groups/%s\" % (GANETI_RAPI_VERSION, group),\n query, None)", "def GetGroupMembers(self, group):\n return []", "def fetch_their_members(our_group):\n\tgroup_id = our_group[\"groupId\"]\n\turl = f'{BASE_URL}/groups/{group_id}/members'\n\tparams = {'$select': 'userPrincipalName,id'}\n\treturn call_api(url, params)", "def getGroupMembers(group_id):\r\n return Group.getGroupMembers(group_id)", "def get_group(self, group_id: str) -> dict:\n group = self.ms_client.http_request(method='GET', url_suffix=f'groups/{group_id}')\n return group", "def customer_group_get_all():\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n query = \"\"\"\n SELECT \n `group_id`, \n `group_name`, \n `description`, \n `timestamp`, \n `created_by`, \n `creation_time`, \n `is_deleted`, \n `updated_by`, \n `role_id`, \n `is_default`, \n `is_customer` \n FROM `groups` \n WHERE `is_customer` = 1\n \"\"\"\n user_group_details = None\n cursor = db.cursor()\n if cursor.execute(query) != 0:\n user_group_details = cursor.fetchall()\n cursor.close()\n db.close()\n return user_group_details", "def get_group(self, group_path=None):\n if group_path is not None:\n path = '/group/' + group_path\n else:\n path = '/group/%2F'\n try:\n response = self.__session.get(self.__api_base_url + path)\n response.raise_for_status()\n response = response.json()\n except (requests.HTTPError, requests.ConnectionError), error:\n raise Exception(error.message)\n\n return response", "def get(person_group_id):\n url = 'persongroups/{}'.format(person_group_id)\n\n return util.request('GET', url)", "def list_group(group):\n\n members = group_members(group)\n ret = {}\n if members:\n for member in members:\n info = get(member)\n if info:\n ret[uid2dn(member)] = info\n return ret", "def get(self):\n self._group = self._client.get(\n url=self._client.get_full_url(\n self.get_path(\n 'single', realm=self._realm_name, group_id=self._group_id\n )\n )\n )\n self._group_id = self._group[\"id\"]\n return self._group", "def get_membersof(self, kwargs):\n group = kwargs[\"group\"]\n verbose = kwargs.get(\"verbose\", False)\n\n results = list(self.engine.query(self.engine.GROUP_DN_FILTER(group), [\"distinguishedName\", \"objectSid\"]))\n if results:\n group_dn = results[0][\"distinguishedName\"]\n else:\n error(\"Group {group} does not exists\".format(group=group))\n\n primary_group_id = results[0][\"objectSid\"].split('-')[-1]\n results = self.engine.query(self.engine.ACCOUNTS_IN_GROUP_FILTER(primary_group_id, group_dn))\n self.display(results, verbose)", "def list_group(self, groupname):\n return self.get_admin(\"groups/{}\".format(groupname))", "def get_identity_group(self, group):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.identitygroup.1.0+xml'})\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tresp = self.ise.get('{0}/config/identitygroup?filter=name.EQ.{1}'.format(self.url_base, group))\n\t\tfound_group = ERS._to_json(resp.text)\n\n\t\tif found_group['ns3:searchResult']['@total'] == '1':\n\t\t\tresp = self.ise.get('{0}/config/identitygroup/{1}'.format(\n\t\t\t\t\tself.url_base, found_group['ns3:searchResult']['ns3:resources']['ns5:resource']['@id']))\n\t\t\tif resp.status_code == 200:\n\t\t\t\tresult['success'] = True\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns4:identitygroup']\n\t\t\t\treturn result\n\t\t\telif resp.status_code == 404:\n\t\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\t\telse:\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\telif found_group['ns3:searchResult']['@total'] == '0':\n\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\tresult['error'] = 404\n\t\t\treturn result\n\n\t\telse:\n\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def list_members(self, group_name):\n METHOD = 'GET'\n API_PATH = '/groups/list-members'\n\n data = {'group_name': group_name}\n\n # Make REST call\n resp = self._rest_call[METHOD](API_PATH, data=data)\n\n if resp.status_code == 200:\n return resp.json().get('members')\n\n elif resp.status_code == 403:\n raise AuthorizationError(\"User is not authorized or token is incorrect.\")\n\n else:\n if resp.json().get(\"error_code\") in ERROR_CODES:\n raise ERROR_CODES[resp.json().get('error_code')](resp.json().get('message'))\n else:\n raise APIError(\"Response code {0}: {1} {2}\".format(resp.status_code,\n resp.json().get('error_code'),\n resp.json().get('message')))", "def get_group(self, group_name):\n\n return self._group[group_name]", "def get(cls, group_id, db_session=None):\n db_session = get_db_session(db_session)\n return db_session.query(cls.model).get(group_id)", "def get_group_members(self, group):\n members = []\n result = self.search('ou=groups,dc=mozilla',\n filterstr='cn=%s' % (group))\n if result == False:\n raise self.SearchError\n elif result == []:\n return []\n for group in result[1]:\n members = list(set(members) | set(group[1]['memberUid']))\n return members", "def get_all_ldap_group_users(group_id):\n\n endpoint = f\"/identities/groups/{group_id}/userMembers?depth=-1\" # recurse to bottom of LDAP group's member tree\n http_response = call_rest_api(endpoint, \"get\", **config.DEFAULT_REST_KWARGS)\n if http_response.status_code != 200: # 200 = 'OK'\n raise ValueError(http_response.text)\n users = http_response.json()[\"items\"]\n return users", "def getAGroupInfo(group_id):\r\n return Group.getAGroupInfo(group_id)", "def list_group_members(self, token, userGroup):\n requestUser = self.get_username_from_token(token)\n dataBase = self.read_database()\n if userGroup not in dataBase['userGroups']:\n raise GroupDoesNotExistException(\"User group does not exist\")\n\n if requestUser not in dataBase['userGroups'][userGroup]['owners']:\n raise UserPermissionException(\"User is not an owner of this group\")\n owners = dataBase['userGroups'][userGroup]['owners']\n members = dataBase['userGroups'][userGroup]['members']\n return {'owners':owners, 'members':members}", "def group_get_members(self,groupname):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_group_get_members_query+\" ORDER BY $username_field$\",{'groupname':groupname,'username_field':self.sql_username_field,'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: group_get_members: %s\" % (query,))\n\n cursor.execute(query)\n desc=[i[0] for i in cursor.description]\n for row in cursor:\n dictrow=dict(zip(desc,row))\n yield dictrow[self.sql_username_field]", "def get_group(self, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/\"))", "def group(self, group_id):\r\n return users.Group(self, group_id)", "def get_group(tkn: Token = Depends(from_authotization_header_nondyn),):\n assert_has_clearance(tkn.owner, \"sni.read_group\")\n return [\n GetGroupShortOut(group_id=str(grp.pk), group_name=grp.group_name)\n for grp in Group.objects().order_by(\"group_name\")\n ]", "def test_get_group_users(self):\n response = self.client.get_group_users(\"ABC123\")\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v2/groups/ABC123/users\")\n self.assertEqual(\n util.params_to_dict(args),\n {\n \"account_id\": [self.client.account_id],\n \"limit\": [\"100\"],\n \"offset\": [\"0\"],\n },\n )", "def get_group(self, group_id):\n return self.root.get(group_id)", "def get_group(self, group_id: str):\n\n return self._get(f\"cloudConnectorGroups/{group_id}\")", "def users_in_group(self, group_id):\n users = []\n users = self._get(('user', 'group', str(group_id)))\n for user in users:\n if 'dreamdiary.diary.user' in user['saml_permissions']:\n users.append(user)\n return users", "def customer_group_get_related(group_id):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n query = \"\"\"\n SELECT \n `group_id`,\n `group_name`,\n `description`,\n `timestamp`,\n `created_by`,\n `creation_time`,\n `is_deleted`,\n `updated_by`,\n `role_id`,\n `is_default`,\n `is_customer`,\n `company_name`,\n `company_address`,\n `company_telephone`,\n `company_fax`,\n `company_website`,\n `company_sales_contact`,\n `company_purchase_contact`,\n `company_business`,\n `company_business_type`,\n `company_sales_email`,\n `company_purchase_email`,\n `company_reg_number`,\n `company_vat_number` \n FROM `groups` \n WHERE `groups`.`company_name` = (\n SELECT `asshole`.`company_name` \n FROM \n (\n SELECT * \n FROM `groups` \n WHERE `group_id` = \"%s\"\n ) AS `asshole`\n )\n \"\"\" %(group_id)\n \n group_details = None\n cursor = db.cursor()\n\n if cursor.execute(query) != 0:\n group_details = cursor.fetchall()\n\n cursor.close()\n db.close()\n\n return group_details", "def get(self):\n usergroup_node = graph.find_one(\"Usergroup\",\n property_key='id',\n property_value=self.id)\n return usergroup_node", "def get_group_command(client: MsGraphClient, args: dict) -> tuple[str, dict, dict]:\n group_id = str(args.get('group_id'))\n group = client.get_group(group_id)\n\n group_readable, group_outputs = parse_outputs(group)\n human_readable = tableToMarkdown(name=\"Groups:\", t=group_readable,\n headers=['ID', 'Display Name', 'Description', 'Created Date Time', 'Mail',\n 'Security Enabled', 'Visibility'],\n removeNull=True)\n entry_context = {f'{INTEGRATION_CONTEXT_NAME}(obj.ID === {group_id})': group_outputs}\n return human_readable, entry_context, group", "def get(self):\r\n return UserGroupService.getAllUserGroups(self)", "def get_group_members(self, group_id: int, page_size=default_members_page_size) -> List[dict]:\n try:\n chat = self.call_method('getChat', chat_id=group_id) # offline request\n except errors.ObjectNotFound:\n self.get_all_chats()\n chat = self.call_method('getChat', chat_id=group_id) # offline request\n\n if chat['type']['@type'] == 'chatTypeBasicGroup':\n members = self.call_method('getBasicGroupFullInfo',\n basic_group_id=chat['type']['basic_group_id'])['members']\n\n elif chat['type']['@type'] == 'chatTypeSupergroup':\n members = self._get_super_group_members(chat, page_size)\n\n else:\n raise errors.TDLibError('Unknown group type: %s' % chat['type']['@type'])\n\n return members", "def get_adgroup(self, adgroup_id, fields=None, batch=False):\n path = '%s' % adgroup_id\n args = {'fields': fields} if fields else {}\n return self.make_request(path, 'GET', args, batch=batch)", "def get(self, filter=None, private=None):\r\n params = base.get_params(('filter', 'private'), locals())\r\n url = '{0}/group-privileges/{1}/'.format(self.parent.parent.get_url(),\r\n self.user)\r\n\r\n return http.Request('GET', url, params), parsers.parse_json", "def get(ctx):\n user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'),\n ctx.obj.get('group'))\n try:\n response = PolyaxonClient().experiment_group.get_experiment_group(\n user, project_name, _group)\n cache.cache(config_manager=GroupManager, response=response)\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not get experiment group `{}`.'.format(_group))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n\n get_group_details(response)", "def _get_endpoint_group(self, group_name):\n params = {\n \"name\": group_name\n }\n\n response, err_msg = self.api_call(\"GET\", ENDPOINT_GROUP_URI, params)\n if not err_msg:\n result = response.json()\n if result.get(\"nextPage\"):\n response_next = self.get_next_page(result.get(\"nextPage\"))\n\n return response, err_msg", "def list_group():\n data, code, message = FIELD_SERVICE.list_group()\n return __result(data, code, message)", "def get_group_access(self, group):\n return self._access_lists.get_group_access(group)", "def get_users_in_group(self, group_id):\n members = self.vk.groups.getMembers(group_id=group_id, count=1)\n peoples = members['count']\n ids = []\n while len(ids) < peoples:\n members = self.vk.groups.getMembers(group_id=group_id, offset=len(ids))\n ids += members['items']\n\n return ids", "def printUsersInGroup(group) -> None:\n click.echo(tabulate(listUsersInDict(group), headers=\"keys\", tablefmt=\"grid\"))", "def getGroup(self):\n\t\treturn self.Group", "def getGroupData(service, groupName, attList):\n # import IPython ; IPython.embed() ; exit(); \n groupsDataList = service.contactGroups().list().execute()[\"contactGroups\"]\n for group in groupsDataList:\n if group[\"name\"] == groupName:\n groupData = []\n for att in attList:\n groupData.append(group[att])\n return groupData", "def get_user_group_details(connection_obj, device=\"server\"):\n command = \"id\"\n if device == \"server\":\n output = utils_obj.remove_last_line_from_string(conn_obj.execute_command(connection_obj, command))\n else:\n output = utils_obj.remove_last_line_from_string(st.show(connection_obj, command, skip_tmpl=True, type=\"click\"))\n return output", "def get_members_of_group_get(self, currentpage, groupId, memberType, nameSearch):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/Members/\"))", "def get_group_info(self, data):\n return self.__form_call('channels.info', data)", "def list(self):\n METHOD = 'GET'\n API_PATH = '/groups/list'\n\n # Make REST call\n resp = self._rest_call[METHOD](API_PATH)\n\n if resp.status_code == 200:\n return resp.json().get('group_names')\n\n elif resp.status_code == 403:\n raise AuthorizationError(\"User is not authorized or token is incorrect.\")\n\n else:\n if resp.json().get(\"error_code\") in ERROR_CODES:\n raise ERROR_CODES[resp.json().get('error_code')](resp.json().get('message'))\n else:\n raise APIError(\"Response code {0}: {1} {2}\".format(resp.status_code,\n resp.json().get('error_code'),\n resp.json().get('message')))", "def test_get_group_v2(self):\n response = self.client.get_group(\"ABC123\", api_version=2)\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v2/groups/ABC123\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def get_admins_and_founder_of_group_get(self, currentpage, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/AdminsAndFounder/\"))", "def check_group_exists(self, group_name):\n for grp in self.get_list_groups():\n if grp[\"name\"] == group_name:\n return grp[\"id\"], grp[\"members\"]\n\n return None", "def get_group_details(self, event):\n body = event['body']\n body = json.loads(body)\n\n required_fields = ['group_id']\n for f in required_fields:\n if f not in body:\n return get_bad_request('POST body missing field {}'.format(f))\n\n group_id = body['group_id']\n \n user = self.mealShareUsers.get_user_cognito_data(event)\n current_user = user['user_id']\n \n # Requesting user must already be a member\n if not self.mealShareGroups.is_user_in_group(current_user, str(group_id)):\n return {\n 'statusCode': 401,\n 'statusMessage': 'User {} is not a member of the group ID {}'.format(current_user, group_id),\n 'group_id': group_id\n }\n \n # Check if adding was successful\n group_data = self.mealShareGroups.get_group_details(group_id)\n if group_data:\n return {\n 'statusCode': 200,\n 'statusMessage': 'SUCCESS!',\n 'group_id': group_id,\n 'group_data': group_data\n }\n else:\n return {\n 'statusCode': 500,\n 'statusMessage': 'FAILED to get group details',\n 'group_id': group_id,\n 'group_data': {}\n }", "def get_groups(self):\n result = self.conn.usergroup.get(status=0, output='extend', selectUsers=\"extend\")\n groups = {group[\"name\"]: Group(\n name=group[\"name\"],\n id=group[\"usrgrpid\"],\n members=group[\"users\"],\n ) for group in result}\n return groups", "def test_get_groups_users(self):\n api.user.create(\n username='chuck',\n email='chuck@norris.org',\n password='secret',\n )\n api.group.create(groupname='staff')\n api.group.add_user(username='chuck', groupname='staff')\n\n users = api.user.get_users(groupname='staff')\n usernames = [user.getUserName() for user in users]\n\n self.assertEqual(usernames, ['chuck'])", "def get_banned_members_of_group_get(self, currentpage, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/Banned/\"))", "def get_groups():\n\n # FUTURE: Properly reutrn error, Mongo is giving it's own\n if current_user.groups:\n return Response(response=json.dumps([g.to_dict() for g in current_user.groups]), status=200, mimetype=\"application/json\")\n else:\n return return_json_error('No groups assigned to', 500)", "def customer_group_get_one(group_id):\n return customer_group_get(group_id)", "def groupfinder(user_id, request):\n ret = DBSession.query(User).filter_by(user_id=user_id).all()\n if len(ret) == 0:\n return None\n user = ret[0]\n groups = [x.group_name for x in user.groups]\n return groups", "def get_group_id_and_members(self, group_name, group_type=\"exclusivity\"):\n group_details = self.check_group_exists(group_name)\n\n try:\n if group_details is None:\n GeneralLogger.log_info(\"Creating group\")\n create_response = self.create_group(group_name, group_type)\n return create_response.json()[\"id\"], \\\n create_response.json()[\"members\"]\n else:\n GeneralLogger.log_info(\"Group exists\")\n\n return group_details\n except Exception:\n GeneralLogger.log_error(traceback.format_exc())", "def get_group_members(self, group_id, max_results=None, paging_token=None):\n route_values = {}\n if group_id is not None:\n route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')\n query_parameters = {}\n if max_results is not None:\n query_parameters['maxResults'] = self._serialize.query('max_results', max_results, 'int')\n if paging_token is not None:\n query_parameters['pagingToken'] = self._serialize.query('paging_token', paging_token, 'str')\n response = self._send(http_method='GET',\n location_id='45a36e53-5286-4518-aa72-2d29f7acc5d8',\n version='6.0-preview.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('PagedGraphMemberList', response)", "def getMember(self, *args):\n return _libsbml.Group_getMember(self, *args)", "def get_group_by_name_get(self, groupName, groupType):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/Name/{groupName}/{groupType}/\"))", "def get(self, group_id):\n return self._get(\"/consistencygroups/%s\" % group_id,\n \"consistencygroup\")", "def search_users_belong_to_group(self, group_name, q=None, selected=\"selected\"):\n params = {\n 'name': group_name,\n 'selected': selected\n }\n page_num = 1\n page_size = 1\n total = 2\n\n if q:\n params.update({'q': q})\n\n while page_num * page_size < total:\n resp = self.sonarqube.make_call('get', API_USER_GROUPS_USERS_ENDPOINT, **params)\n response = resp.json()\n\n page_num = response['p']\n page_size = response['ps']\n total = response['total']\n\n params['p'] = page_num + 1\n\n for user in response['users']:\n yield user", "def retrieve(self, request, *args, **kwargs):\n queryset = Group.objects.get(pk=request.GET['pk'])\n serializer = GroupReadSerializer(queryset, many=False)\n return Response(serializer.data)", "def get_group(self, bucket_id, group_id, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.get_group_with_http_info(bucket_id, group_id, **kwargs)\n else:\n (data) = self.get_group_with_http_info(bucket_id, group_id, **kwargs)\n return data", "def get_group_name(\n group_id: BSONObjectId,\n tkn: Token = Depends(from_authotization_header_nondyn),\n):\n assert_has_clearance(tkn.owner, \"sni.read_group\")\n return GetGroupOut.from_record(Group.objects(pk=group_id).get())", "def get_device_group(self, device_group_oid):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.network.networkdevicegroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/networkdevicegroup/{1}'.format(self.url_base, device_group_oid))\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns4:networkdevicegroup']\n\t\t\treturn result\n\t\telif resp.status_code == 404:\n\t\t\tresult['response'] = '{0} not found'.format(device_group_oid)\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def from_record(grp: Group) -> \"GetGroupOut\":\n return GetGroupOut(\n authorized_to_login=grp.authorized_to_login,\n created_on=grp.created_on,\n description=grp.description,\n group_id=str(grp.pk),\n group_name=grp.group_name,\n is_autogroup=grp.is_autogroup,\n members=[\n GetUserShortOut.from_record(member) for member in grp.members\n ],\n owner=GetUserShortOut.from_record(grp.owner)\n if grp.owner is not None\n else None,\n updated_on=grp.updated_on,\n )", "def get_members(self):\r\n database = main.connect_to_cloudsql()\r\n cursor = database.cursor()\r\n query = (\"SELECT username from \" + ENV_DB + \".Groups WHERE gid='{}'\").format(self.g_id)\r\n cursor.execute(query)\r\n data = cursor.fetchall()\r\n database.close()\r\n return list(i[0] for i in data)", "def get_queryset(self):\n user = self.request.user\n return user.group_set.all()", "def group_members(self) -> pulumi.Output[Optional[Sequence['outputs.GroupMembersItemResponse']]]:\n return pulumi.get(self, \"group_members\")", "def get_user_group(self, user_id):\n user_sn = self.id_to_sn(user_id)\n self.send_command(cmd=DEFS.CMD_USERGRP_RRQ,\n data=struct.pack('<I', user_sn))\n self.recv_reply()\n return self.last_payload_data[0]", "def getGroup(self, *args):\n return _libsbml.GroupsModelPlugin_getGroup(self, *args)", "def get_group(self, group, intg):\n return self.data[intg, group, :, :]", "def get(isamAppliance, id, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieving group\", \"/sysaccount/groups/{0}/v1\".format(id))", "def group_members(group):\n\n group = ldapi.lookup(ld, 'cn', group, cfg['ldap_groups_base'])\n\n if group and 'uniqueMember' in group:\n r = re.compile('^uid=([^,]*)')\n return map(lambda x: r.match(x).group(1), group['uniqueMember'])\n return []", "def get_contactgroup(self, object_name, user_key = None):\n\t\treturn self.get_object('contactgroup',object_name, user_key = user_key)" ]
[ "0.72617346", "0.7245011", "0.7240115", "0.71307135", "0.7093996", "0.7082136", "0.7026646", "0.7011292", "0.6943717", "0.69318485", "0.6926901", "0.68956447", "0.68776196", "0.68700737", "0.68466604", "0.68005854", "0.6799346", "0.6788357", "0.6776635", "0.67737633", "0.6730012", "0.67237824", "0.67001456", "0.66942024", "0.66928077", "0.6691698", "0.6685921", "0.6680963", "0.6667302", "0.66654557", "0.66436666", "0.6634021", "0.65951747", "0.6574081", "0.65738857", "0.65030015", "0.65022326", "0.647247", "0.6455441", "0.6432687", "0.6424268", "0.6383735", "0.63827854", "0.6375808", "0.63634795", "0.6359028", "0.6348828", "0.6334156", "0.63034546", "0.6296041", "0.6285469", "0.62527865", "0.6240623", "0.6235473", "0.6198602", "0.6185879", "0.6179086", "0.61750937", "0.6164191", "0.6149447", "0.6116049", "0.6115731", "0.60888237", "0.6085627", "0.6065247", "0.6042909", "0.60352015", "0.603351", "0.6018075", "0.6015978", "0.60100603", "0.5987288", "0.598404", "0.59488475", "0.59446937", "0.59415066", "0.5924023", "0.59011304", "0.58991957", "0.58879536", "0.588028", "0.58760333", "0.5866494", "0.5866094", "0.58593035", "0.5857885", "0.58517313", "0.5835416", "0.58326745", "0.5824148", "0.5809476", "0.5803041", "0.57846975", "0.57730585", "0.5766323", "0.574951", "0.57466406", "0.5743318", "0.57307595", "0.5727339", "0.57114667" ]
0.0
-1
Gets the specified identity provider's information.
def get_identity_provider(self, identity_provider_id, **kwargs): resource_path = "/identityProviders/{identityProviderId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_identity_provider got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="IdentityProvider") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="IdentityProvider")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_consumer_info_for(self, provider):\n return secrets.AUTH_CONFIG[provider]", "def identity_provider_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"identity_provider_name\")", "def get(self,\n provider_id,\n ):\n return self._invoke('get',\n {\n 'provider_id': provider_id,\n })", "def get(self,\n provider_id,\n ):\n return self._invoke('get',\n {\n 'provider_id': provider_id,\n })", "def identity_provider(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider\")", "def _get_provider_details(self, project_id, service_id):\n try:\n provider_details = self.storage_controller.get_provider_details(\n project_id,\n service_id)\n except Exception:\n raise LookupError(u'Service {0} does not exist'.format(\n service_id))\n return provider_details", "def get_provider(self):\n return self.provider", "def identity(self) -> pulumi.Output[Optional['outputs.IdentityInfoResponse']]:\n return pulumi.get(self, \"identity\")", "def identity_provider_type(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def identity_provider_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_name\")", "def get_provider_config(self, provider_id):\n try:\n result = self.client.get('{providers_url}/{id}/?attributes=endpoints'.format(providers_url=self.providers_url, id=provider_id))\n return result\n except Exception as e:\n self.module.fail_json(msg=\"Failed to get provider data. Error: {!r}\".format(e))", "def provider(self) -> str:\n return pulumi.get(self, \"provider\")", "def provider(self) -> str:\n return pulumi.get(self, \"provider\")", "def identity_provider_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"identity_provider_name\")", "def provider_id(self):\n raise NotImplementedError", "def identity(self) -> Optional[pulumi.Input['IdentityInfoArgs']]:\n return pulumi.get(self, \"identity\")", "def provider_id(self):\n return self.get('_id')", "def get_service_provider_details(self,\n headers=None,\n **request_parameters):\n check_type(headers, dict)\n if headers is not None:\n if 'X-Auth-Token' in headers:\n check_type(headers.get('X-Auth-Token'),\n basestring, may_be_none=False)\n\n _params = {\n }\n _params.update(request_parameters)\n _params = dict_from_items_with_values(_params)\n\n path_params = {\n }\n\n with_custom_headers = False\n _headers = self._session.headers or {}\n if headers:\n _headers.update(dict_of_str(headers))\n with_custom_headers = True\n\n e_url = ('/dna/intent/api/v1/service-provider')\n endpoint_full_url = apply_path_params(e_url, path_params)\n if with_custom_headers:\n json_data = self._session.get(endpoint_full_url, params=_params,\n headers=_headers)\n else:\n json_data = self._session.get(endpoint_full_url, params=_params)\n\n return self._object_factory('bpm_dda850a0675b888048adf8d488aec1_v2_2_1', json_data)", "def get_user_info(self) -> str:\n return self._searcher.get_user_info()", "def GetUserInformation(self):\n if _USER_INFO_KEY in self._session:\n return self._session[_USER_INFO_KEY]\n return self._ReCreateUserInfo()", "def provider_entity(provider):\n try:\n return NUTEntity.objects.get(id=provider.first_access().target.id)\n except:\n return None", "def user_info(self):\n response = self.query('user_info')\n return response", "def provider_identity(request):\r\n\r\n response = render_to_response('identity.xml',\r\n {'url': get_xrds_url('login', request)},\r\n mimetype='text/xml')\r\n\r\n # custom XRDS header necessary for discovery process\r\n response['X-XRDS-Location'] = get_xrds_url('identity', request)\r\n return response", "def get_user_info(self):\n user_info = self.data_source.get_user_info(self.user_id)\n\n return user_info", "def get_user_info(credentials):\r\n user_info_service = build(\r\n serviceName='oauth2', version='v2',\r\n http=credentials.authorize(httplib2.Http()))\r\n user_info = None\r\n try:\r\n user_info = user_info_service.userinfo().get().execute()\r\n except errors.HttpError, e:\r\n logging.error('An error occurred: %s', e)\r\n if user_info and user_info.get('id'):\r\n return user_info\r\n else:\r\n raise NoUserIdException()", "def identity(self) -> Optional['outputs.IdentityPropertiesResponse']:\n return pulumi.get(self, \"identity\")", "def getUserInfo(self):\r\n userJson = self.httpGet(ReaderUrl.USER_INFO_URL)\r\n result = json.loads(userJson, strict=False)\r\n self.userId = result['userId']\r\n return result", "def get_user_info(credentials):\n user_info_service = build(\n serviceName='oauth2', version='v2',\n http=credentials.authorize(httplib2.Http()))\n user_info = None\n try:\n user_info = user_info_service.userinfo().get().execute()\n except errors.HttpError, e:\n logging.error('An error occurred: %s', e)\n if user_info and user_info.get('id'):\n return user_info\n else:\n raise NoUserIdException()", "def identity_provider_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def identity_provider_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def get_user_info(self):\n\n if self._access_token is None:\n raise RequiresAccessTokenError()\n response = self.__make_oauth_request(USER_INFO_URL, token=self._access_token, signed=True)\n return simplejson.loads(response.read())", "def get_userinfo_from_access_token(self) -> dict:\n pass", "def get_cloud_provider(providername):\n return jsonify(admin.get_provider(current_app.scoped_session(), providername))", "def get_info(self, token):\n\n openid_resp = get_remote(get_config(\"login.qq.openid_url\") + token)\n self.log.debug(\"get access_token from qq:\" + token)\n info = json.loads(openid_resp[10:-4])\n\n if info.get(\"error\") is not None:\n raise Exception(info)\n\n return info", "def userinfo(self, **kwargs):\n metadata = self.load_server_metadata()\n resp = self.get(metadata['userinfo_endpoint'], **kwargs)\n resp.raise_for_status()\n data = resp.json()\n return UserInfo(data)", "def get(self,\n provider_id,\n service_instance_id,\n ):\n return self._invoke('get',\n {\n 'provider_id': provider_id,\n 'service_instance_id': service_instance_id,\n })", "def get_provider(self):\r\n if self.provided_by:\r\n return list(self.provided_by)[0]", "def user_info(self):\n return self.auth.get_user_by_session()", "def get_info(self):\n pass", "def get_info(self):\n pass", "def provider(self) -> str:\n return self._provider", "def identity(self) -> pulumi.Output[Optional['outputs.IdentityResponse']]:\n return pulumi.get(self, \"identity\")", "def getUserInfo(userId):\n if(not searchForUser(userId)):\n raise RuntimeError('The user id not exist, the user id=> ' + userId)\n else:\n return client.service.getUser(userid=userId)['return']['user']", "def user_info(self):\n \n return self.auth.get_user_by_session()", "def userinfo(self, access_token: str) -> dict[str, Any]:\n data: dict[str, Any] = self.client.get(\n url=f\"{self.protocol}://{self.domain}/userinfo\",\n headers={\"Authorization\": f\"Bearer {access_token}\"},\n )\n return data", "def get_users_info(): \n \n data = user_obj.get_users_info()\n return data", "def get_provider_info_from_provider_yaml(provider_package_id: str) -> dict[str, Any]:\n provider_yaml_dict = get_provider_yaml(provider_package_id=provider_package_id)\n validate_provider_info_with_runtime_schema(provider_yaml_dict)\n return provider_yaml_dict", "def show_provider(cls, args, config):\n # print \"MOLNSProvider.show_provider(args={0}, config={1})\".format(args, config)\n if len(args) == 0:\n print \"USAGE: molns provider show name\"\n return\n print config.get_object(name=args[0], kind='Provider')", "def access_info_get(context, storage_id):\n return _access_info_get(context, storage_id)", "def healthcare_provider_id(self):\n return self._healthcare_provider_id", "def provider_display_name(self) -> Optional[str]:\n return pulumi.get(self, \"provider_display_name\")", "def current_user_info():\n\n return current_user", "def info(self):\n return self.__dict__[self.sid]", "def get_user_info(self, access_token, openid):\n url = get_config(\"login.wechat.user_info_url\") % (access_token, openid)\n return self._access_wxapi_or_raise(url)", "def identity(self) -> Optional['outputs.ResourceIdentityResponse']:\n return pulumi.get(self, \"identity\")", "def get_identity(self):\n return GetIdentity(*self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_GET_IDENTITY, (), '', 33, '8s 8s c 3B 3B H'))", "def userinfo(self):\n return self._userinfo", "def get(self,\n provider_id,\n provider_deployment_map_id,\n ):\n return self._invoke('get',\n {\n 'provider_id': provider_id,\n 'provider_deployment_map_id': provider_deployment_map_id,\n })", "def getInfo():", "def get_personal_info(self):\n self.get(\"INFO\",\"GetPersonalInfo\")\n response = self.send()\n return response", "def search_provider(request: Request, provider_id:UUID = Form(...)):\n provider_data = open_for_reading()\n if provider_data:\n try :\n data = provider_data[str(provider_id)]\n except KeyError:\n return {\"message\": \"invalid provider ID\"}\n return templates.TemplateResponse(\"provider_details.html\", {\n \"request\": request,\n \"provider_id\": provider_id,\n \"provider_data\": data\n })\n else: \n return {\"message\": \"database error\"}", "def get_info(self) -> str:\n return self.info", "def identity(self) -> Optional[pulumi.Input['ServiceIdentityArgs']]:\n return pulumi.get(self, \"identity\")", "def identity(self) -> Optional[pulumi.Input['ServiceIdentityArgs']]:\n return pulumi.get(self, \"identity\")", "def provider_params(self):\n return self.tree.get('provider_params', {})", "def get_info() -> str:\n req = Request(URL + '/info')\n context = ssl._create_unverified_context()\n with urlopen(req, context=context) as response:\n return response.read().decode('utf-8')", "def get_provider_credentials(provider):\n logging.info('Getting provider credentials for {}'.format(provider))\n uppercase_provider = provider.upper()\n username_variable = '{}_USERNAME'.format(uppercase_provider)\n authentication_variable = '{}_AUTHENTICATION'.format(uppercase_provider)\n username = os.environ.get(username_variable, '')\n authentication = os.environ[authentication_variable]\n return authentication, username", "def getInfo(self):\n return self.info", "def get_info(self) -> Optional[Dict[str, Any]]:", "def owner_info(self) -> pulumi.Output['outputs.UserInfoResponse']:\n return pulumi.get(self, \"owner_info\")", "def get_info(self) -> str:\n raise NotImplementedError()", "def getInfo(self):\n return self._info", "def get(self):\n return get_all_provider()", "def handle_callback(self, response):\n client = FigshareClient(response['access_token'])\n about = client.userinfo()\n\n return {\n 'provider_id': about['id'],\n 'display_name': u'{} {}'.format(about['first_name'], about.get('last_name')),\n }", "def get_info(self):\n return None", "def cloud_provider_profile(self) -> Optional[pulumi.Input['CloudProviderProfileArgs']]:\n return pulumi.get(self, \"cloud_provider_profile\")", "def cloud_provider_profile(self) -> Optional[pulumi.Input['CloudProviderProfileArgs']]:\n return pulumi.get(self, \"cloud_provider_profile\")", "def cloud_provider_profile(self) -> Optional[pulumi.Input['CloudProviderProfileArgs']]:\n return pulumi.get(self, \"cloud_provider_profile\")", "def cloud_provider_profile(self) -> Optional[pulumi.Input['CloudProviderProfileArgs']]:\n return pulumi.get(self, \"cloud_provider_profile\")", "def identity(self) -> Optional['outputs.DataCollectionEndpointResourceResponseIdentity']:\n return pulumi.get(self, \"identity\")", "def get_provider_properties_dict(self):\n pass", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def provider_display_name(self) -> str:\n return self._provider_display_name", "def info(ctx: CLIContext) -> None:\n fields = [\n keypair_fields['user_id'],\n keypair_fields['full_name'],\n keypair_fields['access_key'],\n keypair_fields['secret_key'],\n keypair_fields['is_active'],\n keypair_fields['is_admin'],\n keypair_fields['created_at'],\n keypair_fields['last_used'],\n keypair_fields['resource_policy'],\n keypair_fields['rate_limit'],\n keypair_fields['concurrency_used'],\n ]\n with Session() as session:\n try:\n kp = session.KeyPair(session.config.access_key)\n item = kp.info(fields=fields)\n ctx.output.print_item(item, fields)\n except Exception as e:\n ctx.output.print_error(e)\n sys.exit(1)", "def user_info(user_id):\n return User.query.filter_by(id=user_id).first()", "def get(self) -> Info:\n return InfoService.get()", "def fetch_customer_info_identities(self, client_id):\n\n try:\n return self._make_private_api_request(\n method=PyttributionIo.GET_REQUEST,\n endpoint='customers',\n subject_id=client_id,\n show_identities='true'\n ).get('customer')\n except RequestException as e:\n logger.error('Pyttribution.io: Retrieval of customer identities failed with HTTP status {exception}'.format(\n exception=e))", "def get_metadata(self):\n try:\n r = requests.get('https://login.mailchimp.com/oauth2/metadata', auth=self)\n except requests.exceptions.RequestException as e:\n raise e\n else:\n r.raise_for_status()\n output = r.json()\n if 'error' in output:\n raise requests.exceptions.RequestException(output['error'])\n return output", "def get_principal(self, principal_id, info=True):", "def details(self, identifier):\n return self.client.request_with_method(Methods.GET % (self.name, identifier,))", "def get_account_info(self):\n resp = requests.get(\n self.URL + 'info/',\n headers={'Authorization': 'Token ' + self.api_key}\n )\n\n return self.__handle_response(resp)", "def info():\n if g.party_id is None:\n # No party is configured for the current site.\n abort(404)\n\n party = party_service.get_party(g.party_id)\n\n return {\n 'party': party,\n }", "def provider_name(self):\n raise NotImplementedError", "def get_provider(self, provider):\n\n if provider is None:\n if self.default_provider is None:\n log.error('a provider must be specified')\n return\n else:\n provider = self.default_provider\n else:\n check = self.check_provider(provider)\n if check is None:\n return None\n return provider", "def fusion_api_get_provider(self, param='', api=None, headers=None):\n return self.provider.get(api=api, headers=headers, param=param)", "def get_account_details(self):\n pass", "def get_profile_details(self):\n cursor = self.__connection.cursor()\n cursor.execute(\n \"select first_name, last_name, purchased_products from neutron_buyer where buyer_id=%s\",\n (self.__buyer_id,)\n )\n result = cursor.fetchone()\n if result:\n return result\n raise IDNotFoundException", "def get_user_info(self, token):\n user_info_url = get_config('login.github.user_info_url')\n headers = {\n \"Authorization\": \"token %s\" % token,\n \"Accept\": \"application/json\"\n }\n user_info_resp = get_remote(user_info_url, headers)\n\n user_info = json.loads(user_info_resp)\n if user_info.get(\"message\") is not None:\n raise Exception(user_info)\n\n return user_info", "def _get_auth_data(self, storage_type, provider_id='default'):\n if storage_type == 'S3':\n return self.s3_auth.get(provider_id, None)\n elif storage_type == 'MINIO':\n return self.minio_auth.get(provider_id, None)\n elif storage_type == 'ONEDATA':\n return self.onedata_auth.get(provider_id, None)\n elif storage_type == 'WEBDAV':\n return self.webdav_auth.get(provider_id, None)\n return None", "def get_identity(self, uid):\n self.setQuery(\"\"\"\n Select ?last ?first ?email where {\n ?who <http://vivo.dartmouth.edu/ontology/geiselId> \\\"\"\"\" + str(uid) + \"\"\"\"^^<http://www.w3.org/2001/XMLSchema#int> .\n ?who <http://purl.obolibrary.org/obo/ARG_2000028> ?id .\n ?id <http://www.w3.org/2006/vcard/ns#hasName> ?name .\n ?name <http://www.w3.org/2006/vcard/ns#familyName> ?last .\n ?name <http://www.w3.org/2006/vcard/ns#givenName> ?first .\n optional {\n ?id <http://www.w3.org/2006/vcard/ns#hasEmail> ?mail .\n ?mail <http://www.w3.org/2006/vcard/ns#email> ?email .\n }\n }\"\"\")\n rval = {'last_name': 'LAST',\n 'first_name': 'FIRST',\n 'email': 'EMAIL'\n }\n try:\n qval = self.query()\n try:\n g = qval.convert()\n except:\n pass\n rval['last_name'] = g['results']['bindings'][0]['last']['value']\n rval['first_name'] = g['results']['bindings'][0]['first']['value']\n rval['email'] = g['results']['bindings'][0]['email']['value']\n except:\n pass\n return rval" ]
[ "0.67674404", "0.6680759", "0.66144663", "0.66144663", "0.6562782", "0.6389836", "0.6265224", "0.62433296", "0.61461306", "0.61224926", "0.6072356", "0.60681295", "0.60681295", "0.6066393", "0.60295826", "0.5999033", "0.59945655", "0.59081084", "0.590395", "0.58972937", "0.58887947", "0.58834547", "0.5867754", "0.58442885", "0.5828102", "0.58108354", "0.58063614", "0.58037025", "0.57747364", "0.57747364", "0.5731713", "0.5730497", "0.5730158", "0.572619", "0.57261616", "0.57258326", "0.56825477", "0.5652494", "0.5648566", "0.5648566", "0.5641544", "0.56352943", "0.5615545", "0.5592594", "0.5583806", "0.55749667", "0.5564594", "0.55452496", "0.554486", "0.55416685", "0.5522663", "0.5521479", "0.5518724", "0.5509716", "0.55090207", "0.5508561", "0.5504096", "0.5503993", "0.5503276", "0.55027854", "0.5498776", "0.5496394", "0.5474817", "0.5474817", "0.5473503", "0.5472202", "0.54714906", "0.54635876", "0.5452599", "0.54468673", "0.54415756", "0.5437558", "0.54355973", "0.5428758", "0.5409654", "0.5394588", "0.5394588", "0.5394588", "0.5394588", "0.5392789", "0.5389805", "0.5389368", "0.5384829", "0.53765607", "0.53743446", "0.5373971", "0.53674495", "0.5364012", "0.53618515", "0.5349792", "0.5346637", "0.5332176", "0.53286207", "0.53233147", "0.5321577", "0.53212804", "0.53107303", "0.53037846", "0.53032565", "0.5301636" ]
0.5865107
23
Gets the specified group mapping.
def get_idp_group_mapping(self, identity_provider_id, mapping_id, **kwargs): resource_path = "/identityProviders/{identityProviderId}/groupMappings/{mappingId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_idp_group_mapping got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id, "mappingId": mapping_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="IdpGroupMapping") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="IdpGroupMapping")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_group(self, group_name):\n\n return self._group[group_name]", "def resolv_map(self, group):\n if group not in self._cache_map:\n self._cache_map[group] = self._upcall_read('map', dict(GROUP=group))\n\n return self._cache_map[group]", "def get_group(group):\n\n return ldapi.lookup(ld, 'cn', group, cfg['ldap_groups_base'])", "def get_group(self, group, intg):\n return self.data[intg, group, :, :]", "def get_group(self, group_id):\n return self.root.get(group_id)", "def map(self):\n return self.map_digis(self.group)", "def get_group(self, group_id: str) -> dict:\n group = self.ms_client.http_request(method='GET', url_suffix=f'groups/{group_id}')\n return group", "def getGroup(self, group_id: int) -> 'Group':\n return self.sObj.getGroup(group_id)", "def get_group(self, wanted_group):\n if self.group_file:\n return self._get_group_from_file(wanted_group)\n return self._get_group_from_host(wanted_group)", "def get_group(self, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/\"))", "def get_group(self, address):\n return self.groups[address]", "def get_address_group(self, group=None):\n return self.__get_addr_grp('address-group', group)", "def give_group(self, key):\n return self._grps[key]", "def get_map_groups(user_map):\n pg = user_map.permission_group_user_map.all()\n gids = list(pg.values_list('group', flat=True))\n if len(gids) > 0:\n return Group.objects.filter(id__in=gids)\n\n return Group.objects.filter(name=DEFAULT_GROUP)", "def getGroup(self):\n\t\treturn self.Group", "def get_group(self):\n\t\treturn self.variables.get('group')", "def _get_group(self):\n if self.resource.group is not None:\n try:\n return grp.getgrnam(self.resource.group).gr_gid\n except KeyError:\n raise error.InvalidGroup()", "def getGroup(self, resname, atomname):\n group = \"\"\n if resname in self.map:\n resid = self.map[resname]\n if resid.hasAtom(atomname):\n atom = resid.atoms[atomname]\n group = atom.group\n return group", "def getGroup(self, index):\n index = int(index)\n if index < 0:\n return self.top_group1\n elif index > (self.layers - 1):\n index = (self.layers - 1)\n return self.groups[index]", "def get_group(self):\n return self._group", "def _get_gid_map(self, level=0):\n fof_rdd = self.fof_rdd\n sc = self.sc\n\n nPartitions = sc.defaultParallelism*5\n\n groups_map = (fof_rdd.flatMap(lambda p: p[np.where(p['is_ghost'])[0]])\n .map(pid_gid)\n .groupByKey(nPartitions)\n .values()\n .filter(lambda x: len(x)>1)\n .map(lambda x: sorted(x))\n .flatMap(lambda gs: [(g, gs[0]) for g in gs[1:]]))\n\n return groups_map", "def get(cls, group_id, db_session=None):\n db_session = get_db_session(db_session)\n return db_session.query(cls.model).get(group_id)", "def get(self):\n self._group = self._client.get(\n url=self._client.get_full_url(\n self.get_path(\n 'single', realm=self._realm_name, group_id=self._group_id\n )\n )\n )\n self._group_id = self._group[\"id\"]\n return self._group", "def get_groups(self, group_name):\r\n assert group_name in self.groups.keys(), group_name\r\n try:\r\n group_list = self.groups[group_name]\r\n except KeyError:\r\n raise GroupKeyError()\r\n return group_list", "def group(self):\n if iswin32:\n raise NotImplementedError(\"XXX win32\")\n import grp\n\n entry = error.checked_call(grp.getgrgid, self.gid) # type:ignore[attr-defined]\n return entry[0]", "def group_nodes(self, group, namespace=None):\n source = self._source(namespace)\n return self._list(source, 'map', group)", "def get_group(self, group_id: str):\n\n return self._get(f\"cloudConnectorGroups/{group_id}\")", "def get(self, group_id=None):\n utils.check_group_id_in_req(group_id)\n #group_id = int(group_id)\n _check_group_exists(group_id)\n (process_id, ip_addr) = GID_COORD_DICT[group_id]\n response = {constants.COORD_PID_KEY: process_id,\n constants.COORD_IP_KEY: ip_addr}\n return response", "def get_group(self, group_id):\n\t\treturn Group(group_id, self.user_id, self.site_id)", "def get_group(self, group_name, marker=None, max_items=None):\r\n params = {'GroupName' : group_name}\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('GetGroup', params, list_marker='Users')", "def get_group(self, group_path=None):\n if group_path is not None:\n path = '/group/' + group_path\n else:\n path = '/group/%2F'\n try:\n response = self.__session.get(self.__api_base_url + path)\n response.raise_for_status()\n response = response.json()\n except (requests.HTTPError, requests.ConnectionError), error:\n raise Exception(error.message)\n\n return response", "def getGroupById(self, id):\n for group in self.groups:\n if group.id == id:\n return group\n\n return None", "def _get_group_from_file(self, wanted_group):\n wanted_gid = \"\"\n if (isinstance(wanted_group, int) or\n re.match(\"^\\\\d+$\", wanted_group)):\n wanted_gid = str(wanted_group)\n wanted_group = \"\"\n try:\n ingroup = open(self.group_file)\n except (IOError, OSError):\n return (\"\", \"\", \"\")\n else:\n for line in ingroup:\n (group, dummy, gid, users) = line.strip().split(':')\n if wanted_group and group == wanted_group:\n return (group, gid, users)\n if wanted_gid and gid == wanted_gid:\n return (group, gid, users)\n ingroup.close()\n return (\"\", \"\", \"\")", "def get_group(self) -> Optional[str]:\n return self.group", "def get(person_group_id):\n url = 'persongroups/{}'.format(person_group_id)\n\n return util.request('GET', url)", "def get_map(self, name, return_type='image'):\n m = self.maps.get(name)\n if m is None:\n raise ValueError(\"No map with name '{}' found.\".format(name))\n return self.masker.inverse_transform(m) if return_type == 'image' else m", "def GetGroup(self, group, reason=None):\n query = []\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_GET,\n \"/%s/groups/%s\" % (GANETI_RAPI_VERSION, group),\n query, None)", "def getMatchGroupDicByGlyph(inputGlyph, groupDict):\n\tif inputGlyph in groupDict.keys():\n\t\treturn groupDict\n\t\n\treturn None", "def give_group_key(self, mesh, grp_name):\n grps = self._grps\n for key in grps:\n if grp_name in grps[key].find_groups(mesh):\n return key\n else:\n mess = \"Group '%s' not found on the mesh '%s'\"\n raise ValueError(mess % (grp_name, mesh.read_name()))", "def getGroupInfo(groupId):\n url = f\"https://groups.roblox.com/v1/groups/{groupId}\"\n r = requests.get(url)\n j = json.loads(r.text)\n return j", "def get_map_from_id(self, id_m):\n return self.id_to_map_dict[id_m]", "def get_mapping(self):\n if self.role:\n return self.role.get_mapping(self.mapping)\n\n return self.mapping", "def getGroup(self, *args):\n return _libsbml.GroupsModelPlugin_getGroup(self, *args)", "def fusion_api_get_group_role_assignment(self, uri=None, param='', api=None, headers=None):\n return self.LoginDomainsGroupToRoleMapping.get(uri=uri, api=api, headers=headers, param=param)", "def get(self, *args):\n return _libsbml.ListOfGroups_get(self, *args)", "def get_group_member(self, group):\n fake_group_obj = SimpleNamespace(id=group[\"id\"])\n current_identity = self.context[\"identity\"]\n avatar = current_groups_service.links_item_tpl.expand(\n current_identity, fake_group_obj\n )[\"avatar\"]\n return {\n \"type\": \"group\",\n \"id\": group[\"id\"],\n \"name\": group.get(\"name\") or group[\"id\"],\n \"description\": group.get(\"description\", \"\"),\n \"avatar\": avatar,\n }", "def mapping(self, release_id, grouping):\n\n helper = Known(self.config, self.session)\n classes = [g['name']['full'] for g in grouping]\n return helper.mapping(release_id, classes)", "def get_group(tkn: Token = Depends(from_authotization_header_nondyn),):\n assert_has_clearance(tkn.owner, \"sni.read_group\")\n return [\n GetGroupShortOut(group_id=str(grp.pk), group_name=grp.group_name)\n for grp in Group.objects().order_by(\"group_name\")\n ]", "def get_group_output(group_id: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetGroupResult]:\n ...", "def get_group_access(self, group):\n return self._access_lists.get_group_access(group)", "def _get_group_from_host(self, wanted_group):\n wanted_gid = \"\"\n if (isinstance(wanted_group, int) or\n re.match(\"^\\\\d+$\", wanted_group)):\n wanted_gid = str(wanted_group)\n wanted_group = \"\"\n if wanted_gid:\n try:\n hgr = grp.getgrgid(int(wanted_gid))\n except (IOError, OSError, KeyError):\n return (\"\", \"\", \"\")\n return (str(hgr.gr_name), str(hgr.gr_gid), str(hgr.gr_mem))\n\n try:\n hgr = grp.getgrnam(wanted_group)\n except (IOError, OSError, KeyError):\n return (\"\", \"\", \"\")\n return (str(hgr.gr_name), str(hgr.gr_gid), str(hgr.gr_mem))", "def group(self):\n return self.properties.get('Group', None)", "def get_group_by_name_get(self, groupName, groupType):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/Name/{groupName}/{groupType}/\"))", "def get(self, group) -> Optional[OrderedDict]:\n return self._queue.get(group)", "def get_group(username: str) -> Group:\n return grp.getgrnam(username)", "def list_group(group):\n\n members = group_members(group)\n ret = {}\n if members:\n for member in members:\n info = get(member)\n if info:\n ret[uid2dn(member)] = info\n return ret", "def _get_group(self, group_or_name, autocreate=False):\n group = group_or_name if isinstance(group_or_name, OptGroup) else None\n group_name = group.name if group else group_or_name\n\n if group_name not in self._groups:\n if not autocreate:\n raise NoSuchGroupError(group_name)\n\n self.register_group(group or OptGroup(name=group_name))\n\n return self._groups[group_name]", "def getStrGroup(self, groupname, default=None):\n \n try:\n value = self.match.group(groupname)\n if value is None: return default\n return value\n except: return default", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def getAGroupInfo(group_id):\r\n return Group.getAGroupInfo(group_id)", "def get_destination_group(self):\n sg = self.source_group\n dd = self.destination_directory\n\n while True:\n try:\n matches = dd.groups.search({'name': sg.name})\n return matches[0] if len(matches) > 0 else None\n except StormpathError as err:\n logger.error('Failed to search for Group: {} in Directory: {} ({})'.format(sg.name.encode('utf-8'), dd.name.encode('utf-8'), err))", "def getGroupByName(self, name):\n for group in self.groups:\n if name == group.name:\n return group\n\n return None", "def get_group(group_id: Optional[str] = None,\n location: Optional[str] = None,\n project: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult:\n __args__ = dict()\n __args__['groupId'] = group_id\n __args__['location'] = location\n __args__['project'] = project\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('google-native:vmmigration/v1alpha1:getGroup', __args__, opts=opts, typ=GetGroupResult).value\n\n return AwaitableGetGroupResult(\n create_time=pulumi.get(__ret__, 'create_time'),\n description=pulumi.get(__ret__, 'description'),\n display_name=pulumi.get(__ret__, 'display_name'),\n name=pulumi.get(__ret__, 'name'),\n update_time=pulumi.get(__ret__, 'update_time'))", "def group(self):\n return self._group", "def group(self):\n return self._group", "def group(self):\n return self._group", "def group(self, group_id):\r\n return resources.Group(self, group_id)", "def group(self) -> Optional[str]:\n return pulumi.get(self, \"group\")", "def get_groups(self) -> dict:\n return dict(self._groups)", "def get_group_info(groupname):\n return jsonify(admin.get_group_info(current_app.scoped_session(), groupname))", "def get_config(group):\n config = toml.load('./config.toml')\n return config[group]", "def what_is(self, _id):\n for g in self.groups:\n if _id in self.h_group_ids[g]:\n return g\n return None", "def get_mapping(self, ksf: str) -> InfoResMapping:\n irm = self.InfoResMapping(self, ksf)\n return irm", "def GroupId(self):\n\t\treturn self._get_attribute('groupId')", "def from_record(grp: Group) -> \"GetGroupOut\":\n return GetGroupOut(\n authorized_to_login=grp.authorized_to_login,\n created_on=grp.created_on,\n description=grp.description,\n group_id=str(grp.pk),\n group_name=grp.group_name,\n is_autogroup=grp.is_autogroup,\n members=[\n GetUserShortOut.from_record(member) for member in grp.members\n ],\n owner=GetUserShortOut.from_record(grp.owner)\n if grp.owner is not None\n else None,\n updated_on=grp.updated_on,\n )", "def getGroupId(groupName):\r\n return Group.getGroupId(str(groupName))", "def get_map(self):\n return self.map", "def get_group(group_data,path):\n path_name = path.label()\n group_name = re.sub(r'[0-9]+','',path_name)\n if group_name not in group_data:\n group_data[group_name] = len(group_data.keys())\n return group_data[group_name]", "def groups(self):\n #return self.get('{}/groups'.format(ApiVersion.A1.value))\n return self.get('{}/groups'.format(ApiVersion.CM1.value))", "def getIntGroup(self, groupname, default=None):\n try: return int(self.match.group(groupname))\n except: return default", "def group(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"group\")", "def group(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"group\")", "def group(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"group\")", "def getMapping(self):\n self._process()\n return self._mapping", "def info_materials_groups_get():\n session = info_map.Session()\n\n mat = aliased(info_map.Material)\n grp = aliased(info_map.Group)\n\n q = session.query(mat.group_id,grp.name).join(grp).distinct()\n groups = [Group(group=row.group_id,name=row.name) for row in q.all()]\n return groups, 200", "def get_group(self, x, y):\n if self[x, y] not in self.TURNS:\n raise BoardError('Can only get group for black or white location')\n\n return self._get_group(x, y, set())", "def get_or_create_group(fw, group_id, group_label):\n\n groups = fw.groups.find(f\"label={group_label}\")\n if len(groups) > 0:\n print(f\"Found group.\")\n group = groups[0]\n print(f\"group.label {group.label}\")\n print(f\"group.id {group.id}\")\n else:\n print(\"Group not found - Creating it.\")\n group_id = fw.add_group(flywheel.Group(group_id, group_label))\n group = fw.get_group(group_id)\n print(f\"group.label {group.label}\")\n print(f\"group.id {group.id}\")\n return group", "def getGroups(self):\n return [g[0] for g in grp.getgrall()]", "def group_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"group_id\")", "def get_group_selector(*args):\n return _ida_segment.get_group_selector(*args)", "def findMappedModeRequestRef(self, modeDeclarationGroupRef):\r\n return self.modeRequestMap.get(modeDeclarationGroupRef, None)", "def __getitem__(self, index):\n return self.group_list[index]", "def get_identity_group(self, group):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.identitygroup.1.0+xml'})\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tresp = self.ise.get('{0}/config/identitygroup?filter=name.EQ.{1}'.format(self.url_base, group))\n\t\tfound_group = ERS._to_json(resp.text)\n\n\t\tif found_group['ns3:searchResult']['@total'] == '1':\n\t\t\tresp = self.ise.get('{0}/config/identitygroup/{1}'.format(\n\t\t\t\t\tself.url_base, found_group['ns3:searchResult']['ns3:resources']['ns5:resource']['@id']))\n\t\t\tif resp.status_code == 200:\n\t\t\t\tresult['success'] = True\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns4:identitygroup']\n\t\t\t\treturn result\n\t\t\telif resp.status_code == 404:\n\t\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\t\telse:\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\telif found_group['ns3:searchResult']['@total'] == '0':\n\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\tresult['error'] = 404\n\t\t\treturn result\n\n\t\telse:\n\t\t\tresult['response'] = '{0} not found'.format(group)\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def groupfinder(name, request):\n #FIXME: Implement\n return ()\n return request.context.get_groups(name)", "def customer_group_get(group_id=None):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n query = \"\"\"\n SELECT \n `group_id`,\n `group_name`,\n `description`,\n `timestamp`,\n `created_by`,\n `creation_time`,\n `is_deleted`,\n `updated_by`,\n `role_id`,\n `is_default`,\n `is_customer`,\n `company_name`,\n `company_address`,\n `company_telephone`,\n `company_fax`,\n `company_website`,\n `company_sales_contact`,\n `company_purchase_contact`,\n `company_business`,\n `company_business_type`,\n `company_sales_email`,\n `company_purchase_email`,\n `company_reg_number`,\n `company_vat_number` \n FROM `groups` \n WHERE `is_customer` = 1\n \"\"\"\n\n if group_id:\n query += \"\"\"\n AND `group_id` = \\\"%s\\\"\n \"\"\" % (group_id)\n\n group_details = None\n cursor = db.cursor()\n\n if cursor.execute(query) != 0:\n group_details = cursor.fetchall()\n\n cursor.close()\n db.close()\n\n return group_details", "def get_group(group, number):\n return '-'.join([get_random_2(number) for i in xrange(group)])", "def _map_group_to_other_column(params, column_name):\n to_plot = params[~params[\"group\"].isin([None, False, np.nan, \"\"])]\n group_to_indices = to_plot.groupby(\"group\").groups\n group_to_values = {}\n for group, loc in group_to_indices.items():\n group_to_values[group] = to_plot[column_name].loc[loc].tolist()\n return group_to_values", "def _get_gid(name):\n if getgrnam is None or name is None:\n return None\n try:\n result = getgrnam(name)\n except KeyError:\n result = None\n if result is not None:\n return result[2]\n return None", "async def get_group_info(self, group_id: int) -> models.Group:\n results = await self._api.call('group', 'get_group_info', gid=group_id)\n return models.Group(results.payload)", "def get_group_name(\n group_id: BSONObjectId,\n tkn: Token = Depends(from_authotization_header_nondyn),\n):\n assert_has_clearance(tkn.owner, \"sni.read_group\")\n return GetGroupOut.from_record(Group.objects(pk=group_id).get())" ]
[ "0.70288837", "0.6876679", "0.68675697", "0.6823533", "0.6671686", "0.64943933", "0.6347924", "0.6328841", "0.63257825", "0.6268732", "0.6233412", "0.6212056", "0.61379856", "0.6134247", "0.60706836", "0.60260206", "0.60117376", "0.5994067", "0.5948818", "0.5932824", "0.590336", "0.59008014", "0.5890851", "0.5855379", "0.58528274", "0.58380425", "0.582541", "0.58222586", "0.58202106", "0.5788551", "0.5787997", "0.5782731", "0.5766401", "0.5765812", "0.57596594", "0.5756455", "0.57522506", "0.5731029", "0.57215184", "0.5720401", "0.57193327", "0.5718228", "0.5707055", "0.5674623", "0.5671901", "0.56714046", "0.56684303", "0.56661516", "0.5664253", "0.56588817", "0.5654932", "0.5632963", "0.56274956", "0.5625044", "0.5619704", "0.56145716", "0.56091577", "0.56086516", "0.5599096", "0.5584641", "0.55815583", "0.55610996", "0.5537132", "0.55293477", "0.55293477", "0.55293477", "0.5524706", "0.5516328", "0.55038744", "0.54998434", "0.54804045", "0.54652536", "0.54560184", "0.5453657", "0.54533273", "0.54464066", "0.5432706", "0.5424707", "0.54241645", "0.54163283", "0.5403615", "0.5403615", "0.5403615", "0.5396217", "0.5394937", "0.53928685", "0.5386078", "0.538594", "0.53828007", "0.53808093", "0.5379992", "0.53736764", "0.5371928", "0.53684586", "0.53662753", "0.5366233", "0.53658926", "0.53577816", "0.5353239", "0.535316" ]
0.54366267
76
Get the specified MFA TOTP device for the specified user.
def get_mfa_totp_device(self, user_id, mfa_totp_device_id, **kwargs): resource_path = "/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_mfa_totp_device got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "mfaTotpDeviceId": mfa_totp_device_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="MfaTotpDeviceSummary") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="MfaTotpDeviceSummary")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_mfa_totp_device(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_mfa_totp_device got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDevice\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDevice\")", "def delete_mfa_totp_device(self, user_id, mfa_totp_device_id, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_mfa_totp_device got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"mfaTotpDeviceId\": mfa_totp_device_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def list_mfa_totp_devices(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"sort_by\",\n \"sort_order\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_mfa_totp_devices got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n if 'sort_by' in kwargs:\n sort_by_allowed_values = [\"TIMECREATED\", \"NAME\"]\n if kwargs['sort_by'] not in sort_by_allowed_values:\n raise ValueError(\n \"Invalid value for `sort_by`, must be one of {0}\".format(sort_by_allowed_values)\n )\n\n if 'sort_order' in kwargs:\n sort_order_allowed_values = [\"ASC\", \"DESC\"]\n if kwargs['sort_order'] not in sort_order_allowed_values:\n raise ValueError(\n \"Invalid value for `sort_order`, must be one of {0}\".format(sort_order_allowed_values)\n )\n\n query_params = {\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"sortBy\": kwargs.get(\"sort_by\", missing),\n \"sortOrder\": kwargs.get(\"sort_order\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[MfaTotpDeviceSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[MfaTotpDeviceSummary]\")", "def __retrieve_rt_token(user_id):\n\n slack_user = user_profile(user_id)\n if slack_user['ok']:\n username = slack_user['user']['profile'].get('email', '').split('@')[0]\n user = get_user_model().objects.filter(username=username).first()\n if user:\n prefs = UserPreferences.objects.filter(user=user).first()\n if prefs:\n if prefs.rt_token:\n cipher_suite = Fernet(settings.CRYPTO_KEY)\n return cipher_suite.decrypt(prefs.rt_token.encode('utf-8')).decode('utf-8')\n return None", "def retrieve_user_devices(self, user_id):\n if user_id is None:\n self.log_error(MongoDatabase.retrieve_user_devices.__name__ + \"Unexpected empty object: user_id\")\n return None\n\n try:\n user_id_obj = ObjectId(user_id)\n user = self.users_collection.find_one({\"_id\": user_id_obj})\n if user is not None:\n if 'devices' in user:\n return user['devices']\n except:\n traceback.print_exc(file=sys.stdout)\n self.log_error(sys.exc_info()[0])\n return None", "def get_custom_jwt(user, device):\n jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER\n payload = jwt_otp_payload(user, device)\n return jwt_encode_handler(payload)", "def generate_totp_seed(self, user_id, mfa_totp_device_id, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}/actions/generateSeed\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"generate_totp_seed got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"mfaTotpDeviceId\": mfa_totp_device_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDevice\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDevice\")", "def activate_mfa_totp_device(self, user_id, mfa_totp_device_id, mfa_totp_token, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}/actions/activate\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"activate_mfa_totp_device got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"mfaTotpDeviceId\": mfa_totp_device_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing),\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=mfa_totp_token,\n response_type=\"MfaTotpDeviceSummary\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=mfa_totp_token,\n response_type=\"MfaTotpDeviceSummary\")", "def fetch_token(self, user_id, password):\n url = buildCommandUrl(self.server, \"/as/user/token\")\n result = json_request(\"POST\", url, {\n \"userId\": user_id,\n \"password\": password\n })\n return result[\"token\"]", "def get_token(self, user_id, token_id):\n query = \"\"\"SELECT yubikeys.attribute_association_id AS yubikeys_attribute_association_id,\n yubikeys.id AS yubikeys_id,\n yubikeys.prefix AS yubikeys_prefix,\n yubikeys.enabled AS yubikeys_enabled\n FROM yubikeys, user_yubikeys\n WHERE user_yubikeys.user_id = %s\n AND yubikeys.prefix = %s\n AND yubikeys.id = user_yubikeys.yubikey_id\n ORDER BY yubikeys.prefix\"\"\"\n self._execute(query, (user_id, token_id))\n return self._dictfetchone()", "def get_user(self, user_id=None):\n raise NotImplementedError", "def get(user):\n if user:\n return Member.get_by_key_name(user.user_id())", "def get_token(self, user_id, token_id):\n query = \"\"\"SELECT yubikeys.attribute_association_id AS yubikeys_attribute_association_id,\n yubikeys.id AS yubikeys_id,\n yubikeys.prefix AS yubikeys_prefix,\n yubikeys.enabled AS yubikeys_enabled\n FROM yubikeys\n INNER JOIN user_yubikeys\n ON user_yubikeys.yubikey_id = yubikeys.id\n WHERE user_yubikeys.user_id = %s\n AND yubikeys.prefix = %s\"\"\"\n self._execute(query, (user_id, token_id))\n return self._dictfetchone()", "def get_token(self, user):\n\n jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER\n jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER\n payload = jwt_payload_handler(user)\n token = jwt_encode_handler(payload)\n return token", "def for_user(cls, user):\n\n token = super().for_user(user)\n\n TokenMeta.objects.get_or_create(\n jti=token['jti'],\n token=str(token),\n )\n\n return token", "def get_permission_user(self, user_id):\n return self.execute(TABELLE[\"id_users\"]['select']['from_id'], (user_id,))", "def _get_device(self, dev_id):\n tuya = self.hass.data[DOMAIN][TUYA_DATA]\n return tuya.get_device_by_id(dev_id)", "def retrieve_user_devices(self, user_id):\n if self.database is None:\n raise Exception(\"No database.\")\n if user_id is None or len(user_id) == 0:\n raise Exception(\"Bad parameter.\")\n devices = self.database.retrieve_user_devices(user_id)\n if devices is not None:\n devices = list(set(devices)) # De-duplicate\n return devices", "def get_user(self, user_id):\n return None # noqa: WPS324", "def jwt_otp_payload(user, device=None):\n # username_field = get_username_field()\n username = get_username(user)\n\n payload = {\n 'user_id': user.pk,\n 'username': username,\n 'exp': datetime.utcnow() + api_settings.JWT_EXPIRATION_DELTA\n }\n\n # Include original issued at time for a brand new token,\n # to allow token refresh\n if api_settings.JWT_ALLOW_REFRESH:\n payload['orig_iat'] = timegm(\n datetime.utcnow().utctimetuple()\n )\n\n if api_settings.JWT_AUDIENCE is not None:\n payload['aud'] = api_settings.JWT_AUDIENCE\n\n if api_settings.JWT_ISSUER is not None:\n payload['iss'] = api_settings.JWT_ISSUER\n\n # custom additions\n is_user_and_device = user is not None and device is not None\n is_users_device = is_user_and_device and device.user_id == user.id\n is_device_confirmed = is_users_device and device.confirmed is True\n if is_device_confirmed:\n payload['otp_device_id'] = device.persistent_id\n else:\n payload['otp_device_id'] = None\n\n return payload", "def find_token_by_user_id(session, user_id):\n return session.query(Token).filter(Token.user_id == user_id).one_or_none()", "def getUser(self, user_uuid):\n if user_uuid in self.users.keys():\n return self.users[user_uuid]\n else:\n return None", "def get_token(user, password):\n url = urljoin(PivotalTrackerService.URI, \"me\")\n auth = (user, password)\n response = PivotalTrackerService.get_response(\"get\", url, auth=auth)\n\n try:\n response.raise_for_status()\n data = response.json()\n ret_val = data[\"api_token\"]\n except RequestException:\n ret_val = None\n\n return ret_val", "def get_user(self, token: str) -> Optional[User]:", "def get_user(self, token: str) -> Optional[User]:", "def get_user(self, user_id):\n oauth_user = OAuthioUser.objects.filter(user__id=user_id)\n if oauth_user.exists():\n return oauth_user.get().user", "def get(self, user_id):\n user = UserServices(public_id=user_id).get_an_item()\n if not user:\n api.abort(404)\n else:\n return user", "def get_user(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload={}, request_type=self.REQUEST_GET, version=\"v2\")", "def get_user(self, user_id):\n uri = 'users/' + user_id\n return self.make_request(uri)", "def get_user(self, user, instance=None):\n instance = self._get_resource(_instance.Instance, instance)\n return self._get(_user.User, user)", "def get_customer(self, user):\n if not user.stripe_customer_id:\n return None\n\n return stripe.Customer.retrieve(user.stripe_customer_id)", "def get_token(cls, user, full_result=False):\n if user is None:\n return EMPTY_KNOX_TOKEN\n result = AuthToken.objects.create(user=user)\n return result if full_result else result[1]", "def get(self, user_id):\n return User.get(user_id)", "def get_user(self, user_id):\n return UserModel._default_manager.get(pk=user_id)", "def get_all_mfa_devices(self, user_name, marker=None, max_items=None):\r\n params = {'UserName' : user_name}\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('ListMFADevices',\r\n params, list_marker='MFADevices')", "def get_user_from_jwt_token(user_from_token):\n user_id = user_from_token.get('id')\n user_address = user_from_token.get('address')\n\n if user_id and user_address:\n user = User.get_by_id(user_id)\n\n if user and user.id == user_id and user.address == user_address:\n return user\n\n return None", "def login_user(user_id):\n session_service.get_session_token(user_id)", "def devices_for_user(self, user, confirmed=None):\n devices = self.model.objects.filter(user=user)\n if confirmed is not None:\n devices = devices.filter(confirmed=bool(confirmed))\n\n return devices", "def get_device(token, device_id, sensitive_data=False):\n\n tenant = init_tenant_context(token, db)\n orm_device = assert_device_exists(device_id)\n return serialize_full_device(orm_device, tenant, sensitive_data)", "def list_tokens(user):\n return AppSpecificAuthToken.select().where(AppSpecificAuthToken.user == user)", "def login(self, *, app, user):\n method = 'POST'\n path = self.path('login')\n app = extract_id(app)\n user = extract_name(user)\n data = {'app_id': app,\n 'user_id': user}\n\n token = yield from authenticate(self.req_handler,\n method,\n path,\n json=data)\n return token", "def get_object_with_user(self, user):\n try:\n uid = int(user)\n except TypeError:\n try:\n uid = user.id \n except:\n return None\n try:\n return self.get(db_player__user__id=uid)\n except Exception:\n return None", "def get_user_phone(cls, userid):\n\n user = User.query.filter_by(user_id=userid).one()\n\n user_phone = user.mobile_phone\n\n return user_phone", "def get_device_by_uuid(self, device_id: str):\n return get_device_by_uuid(self.api_key, device_id)", "def get_session_by_user(user):\n if user:\n return manager.get_session(user)\n else:\n return manager.get_default_session()", "def getUser():\n\t\tuser = users.get_current_user()\n\t\tuserList = db.GqlQuery(\"SELECT * FROM AppUser WHERE id = :1 LIMIT 1\",\n\t\t\t\t\t\t\tuser).fetch(1)\n\t\tif userList == []:\t\t# Wasn't found\n\t\t\treturn AppUser.registerUser()\n\t\treturn userList[0]", "def me_get(): # noqa: E501\n s = base.check_session()\n return _cleanuser(s['user'])", "def retrieve_user_from_device(self, device_str):\n if self.database is None:\n raise Exception(\"No database.\")\n if device_str is None or len(device_str) == 0:\n raise Exception(\"Device string not provided.\")\n return self.database.retrieve_user_from_device(device_str)", "def _get_token(self):\n return user.get_token()", "def _get_device():\n return context.get_context('device_target')", "def get_device_by_uuid(cls, device_id):\n return cls.dbdriver.get_device_by_uuid(device_id)", "def get_device(self, dev_id):\n return self.api_request('GET', self.url + '/device/' + str(dev_id), {})", "def get_user(self, user_id):\n User = get_user_model()\n try:\n return User.objects.get(pk=user_id)\n except User.DoesNotExist:\n return None", "def get_user(self, user_id):\n try:\n User = get_user_model()\n return User.objects.get(pk=user_id)\n except User.DoesNotExist:\n return None", "def get_user(self):\n\n user_session = self.get()\n if not user_session:\n return None\n\n us = ServiceLocator.resolve(ServiceLocator.USERS)\n return us.single(user_session.login)", "def get_user(self, user_id: int) -> dict:\n user = self.call_method('getUser', user_id=user_id)\n return user", "def device(self):\n return self.broker.device(**{\"DeviceRouteID\": self.DeviceRouteID})", "def get_user(user_id):\n try:\n return UserModel.objects.get(id=user_id)\n except UserModel.DoesNotExist:\n return None", "def device_from_request(request):\n from mezzanine.conf import settings\n try:\n # If a device was set via cookie, match available devices.\n for (device, _) in settings.DEVICE_USER_AGENTS:\n if device == request.COOKIES[\"mezzanine-device\"]:\n return device\n except KeyError:\n # If a device wasn't set via cookie, match user agent.\n try:\n user_agent = request.META[\"HTTP_USER_AGENT\"]\n except KeyError:\n pass\n else:\n for (device, ua_strings) in settings.DEVICE_USER_AGENTS:\n for ua_string in ua_strings:\n if ua_string in user_agent:\n return device\n return \"\"", "async def get_user(self, user_target: str) -> Optional[User]:\n if len(user_target) >= 17:\n try:\n user_id = int(user_target)\n except ValueError:\n pass\n else:\n user: User = self.bot.get_user(user_id)\n if user is None:\n try:\n user = await self.bot.fetch_user(user_id)\n except NotFound:\n return None\n return user\n return None", "def get_user(cls, user_id):\n try:\n return User.objects.get(pk=user_id)\n except User.DoesNotExist:\n return None", "def getUser(self, resource):\n if isinstance(resource, int):\n resource = 'users/{0}'.format(resource)\n\n res = self.getRequest(resource)\n\n if res:\n user = vsdModels.User(**res)\n return user\n else:\n return None", "def get_user(self, user_id):\n return self.my_get_user(self.get_all_dbusers(), user_id)", "async def pm(self, user_id: str) -> Tuple[str, str]:\n\n data = {\"user_id\": user_id}\n\n reply = await self._connection.send(\"pm-initiate\", data)\n data = self._extract_data(reply)\n\n pm_id = data[\"pm_id\"]\n to_nick = data[\"to_nick\"]\n return pm_id, to_nick", "def user(self, user_token, user_device=None):\n self.set('user', user_token)\n self.set('device', user_device)", "def get_actor(self, user_id=None):\n if user_id is None:\n user_id = 'me'\n return self.user_to_actor(json.loads(\n self.urlopen(API_OBJECT_URL % user_id).read()))", "def get_user(self, user_id):\n try:\n return Account.objects.get(pk=user_id)\n except Account.DoesNotExist:\n return None", "def user(self):\n u = self.user_info\n return self.user_model.get_by_id(u['user_id']) if u else None", "def get_user(id):\n pass", "def verify_two_factor_token(username, token):\n device = TOTPDevice.objects.device_for_user(username)\n if device:\n return device.verify_token(token)", "def deactivate_mfa_device(self, user_name, serial_number):\r\n params = {'UserName' : user_name,\r\n 'SerialNumber' : serial_number}\r\n return self.get_response('DeactivateMFADevice', params)", "def get_user(self):\n if \"user\" not in self._data:\n self._data[\"user\"] = User.objects.get(pk=self.kwargs[\"user_id\"])\n return self._data[\"user\"]", "def get_person(self, user_id):\n endpoint = '/user/{}'.format(user_id)\n return self.get_request(endpoint)", "def get_user(self):\n raise NotImplementedError", "def get_one_user():", "def get_device(self, field):\n return self._devices[field]", "def getuser(gh, user):\n return gh.users(user).get()", "def get_user(self, object_id):\n return self.get_object(\"user\", object_id)", "def user(self):\n u = self.user_info\n return self.user_model.get_by_id(u['user_id']) if u else None", "def get_device(arn=None):\n pass", "def get_user(self, user_reference):\n url = 'users/{0}'.format(user_reference)\n result = self.get(url)\n return result.get('user', result)", "def user(self):\n return self.getattr('user')", "async def get(self, id: UUID4) -> Optional[UD]:\n user = await self.async_deta_base.get(key=id)\n return self.user_db_model(**user) if user else None", "def get_user_from_id(user_id):\n return Users.query.filter_by(id=user_id).first()", "def getUserInfo(self, user):\n return pwd.getpwnam(user)[2:4]", "def get_task_user():\n from olympia.users.models import UserProfile\n\n return UserProfile.objects.get(pk=settings.TASK_USER_ID)", "def user_drf_client(user):\n client = APIClient()\n client.force_authenticate(user=user)\n return client", "def test_get_device_token(self):\n pass", "def get_user(user: User) -> User:\n if user.is_authenticated:\n return user\n else:\n return get_anonymous_user()", "def member(self, user):\n return self.search(uid=user)[0][1]", "def get_user(self):\n return self.user", "def get_user(self):\n return self.user", "def get_user(self, wanted_user):\n if self.passwd_file:\n return self._get_user_from_file(wanted_user)\n return self._get_user_from_host(wanted_user)", "def peer_device(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"peer_device\"), kwargs)", "def device_token(self):\n return self._device_token", "def get_user_from_id(\n self, user_id: Union[str, int], *, params: Optional[dict] = None\n ) -> \"resource_types.User\":\n\n return communicator.User(self.__requester).from_user_id(\n user_id=user_id, parameters=params\n )", "def init_user(device, user_id=None):\n logging.debug(\"Initializing user: device={0} user_id={1}\".format(device, user_id))\n if user_id:\n data = {\n \"device\": {\n \"id\": device,\n \"platform\": \"other\"\n },\n \"userId\": user_id\n }\n else:\n data = {\n \"device\": {\n \"id\": device,\n \"platform\": \"other\"\n }\n }\n return ask('init', data, 'post')", "def UserToken(self) -> object:", "def get_user_model():\n try:\n from django.contrib.auth import get_user_model\n User = get_user_model()\n except ImportError:\n from django.contrib.auth.models import User\n return User", "def user(self, user_token, user_device=None):\n\n self.user_token = user_token\n self.user_device = user_device" ]
[ "0.6745442", "0.60613525", "0.59726477", "0.5702831", "0.55748624", "0.5469353", "0.54422414", "0.53837264", "0.53459823", "0.53429735", "0.5318181", "0.5315078", "0.5313801", "0.5306907", "0.5290798", "0.52894306", "0.52770966", "0.5263118", "0.5261724", "0.52087736", "0.5182853", "0.5177443", "0.5173183", "0.5163499", "0.5163499", "0.5140894", "0.5122385", "0.51194084", "0.51137084", "0.51085293", "0.50996125", "0.50842184", "0.507212", "0.50702584", "0.5059949", "0.50163203", "0.5014988", "0.50112915", "0.50075525", "0.49983963", "0.49912077", "0.4988646", "0.49847278", "0.49694562", "0.49691886", "0.49560767", "0.49476817", "0.4943018", "0.49248636", "0.49216825", "0.49025044", "0.48992217", "0.48946226", "0.48943642", "0.4892523", "0.489066", "0.48816237", "0.48749334", "0.48720515", "0.48680106", "0.48588187", "0.48552963", "0.48488006", "0.4838598", "0.48322532", "0.48289457", "0.48278344", "0.48184127", "0.4815868", "0.48116967", "0.47990674", "0.47943613", "0.47874817", "0.478004", "0.4775791", "0.47704923", "0.4768091", "0.47676343", "0.4766226", "0.475575", "0.47501814", "0.47370753", "0.47370014", "0.47358367", "0.47349197", "0.4734464", "0.4717244", "0.4713761", "0.47130528", "0.47086686", "0.4706669", "0.4706669", "0.47040328", "0.46991184", "0.46974173", "0.4694987", "0.46885374", "0.4686253", "0.4679716", "0.46789002" ]
0.7053731
0
Gets the specified network source's information.
def get_network_source(self, network_source_id, **kwargs): resource_path = "/networkSources/{networkSourceId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_network_source got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "networkSourceId": network_source_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="NetworkSources") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="NetworkSources")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def data_source_info(self) -> 'outputs.DatasourceResponse':\n return pulumi.get(self, \"data_source_info\")", "def source(self):\n if self._source not in ['Idle', 'Network']:\n return self._source\n else:\n return None", "def get_metadata(self, source, graph):\n return self.server.get_metadata(source, self.graphs.get(graph))", "def getNetworksDetails(network_id):\n logger.debug('Start.')\n code, res = rest_requests.get(networks_url + \"/\" + network_id)\n if code != requests.codes.ok:\n logger.error((code, res))\n return None\n return res[\"network\"]", "def Source(self):\r\n\t\treturn self._get_attribute('source')", "def source_connections_info(self) -> pulumi.Output[Sequence['outputs.MapperSourceConnectionsInfoResponse']]:\n return pulumi.get(self, \"source_connections_info\")", "def get_source(self):\n\t\treturn self.source.get_source()", "def get_traffic(self):\n req = requests.get(self.source_url)\n text = req.text\n interface, rv = self.parse_traffic(text)\n host = \", \".join([self.source_label, interface])\n return host, rv", "def get_source(self):\n return self.source", "def getSource(self):\n return self.source", "def get_network(self):\n return self.get_ip_network()[-1]", "def get_network_info(self, network_id):\n return DictModel(self.call(self.context,\n self.make_msg('get_network_info',\n network_id=network_id,\n host=self.host),\n topic=self.topic))", "def data_source_info(self) -> pulumi.Input['DatasourceArgs']:\n return pulumi.get(self, \"data_source_info\")", "def get_network_profile(arn=None):\n pass", "def get_network(self):\n return self._network", "def source_connections_info(self) -> pulumi.Input[Sequence[pulumi.Input['MapperSourceConnectionsInfoArgs']]]:\n return pulumi.get(self, \"source_connections_info\")", "def getSource(self, url):\n try:\n f = urllib2.urlopen(url)\n source = f.read()\n f.close()\n return source\n except urllib2.URLError:\n raise HNException(\"Error getting source from \" + url + \". Your internet connection may have something funny going on, or you could be behind a proxy.\")", "def network_info(self) -> dict:\n \n network_info_url = self.network + bf_network_informations_url\n \n response = query_blockfrost(network_info_url, self.api_key, self.proxies)\n \n return response", "def source(self):\n for source in self.coordinator.data.sources:\n if source.SourceID == self.zone.SourceID:\n return source.Name\n return None", "def get_net_details(self, net_name=\"dummy_net\", net_id=None):\n _url = \"http://\" + self.host_ip + \":9696/v2.0/networks\"\n _headers = {'x-auth-token': self.project_info[\"token_project\"]}\n _body = None\n\n result = self.request(\"GET\", _url, _headers, _body)\n if result is None:\n LOG_OBJ.error(\"No response from Server while listing the nets\")\n return result.status\n if result.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get network Failed with status %s \" % result.status)\n return result.status\n output = json.loads(result.data)\n\n for nets in output['networks']:\n if (net_id is not None and (nets['id'] == net_id)) or \\\n nets['name'].lower() == net_name.lower():\n LOG_OBJ.debug(\"Net details : %s \" % nets)\n return nets\n\n LOG_OBJ.debug(\"Network with name:%s or with ID:%s is Not Found\" %\n (net_name, net_id))", "def show_network(self, network, **_params):\r\n return self.get(self.network_path % (network), params=_params)", "def get_source(self, name):\n return self._sources[name]", "def get_sources():\n url = base_url + \"sources\"\n params = {\"language\": \"en\"}\n resp = requests.get(url, params=params)\n data = resp.json()\n sources = [src['id'].strip() for src in data['sources']]\n print(\"all the sources:\")\n print(sources)\n return sources", "def get_sources():\n url = base_url + \"sources\"\n params = {\"language\": \"en\"}\n resp = requests.get(url, params=params)\n data = resp.json()\n sources = [src['id'].strip() for src in data['sources']]\n print(\"all the sources\")\n print(sources)\n return sources", "def get_source(self, source, driver_name=None):\n if not driver_name:\n driver_name = self.driver_name\n driver = ogr.GetDriverByName(driver_name)\n return driver.Open(source, 0)", "def get_network(session, network):\n # type: (Session, str) -> Dict[str, Any]\n url_tail = f\"/{CoordConstsV2.RSC_NETWORKS}/{network}\"\n return _get_dict(session, url_tail)", "def FlowStatEthernetSource(self):\n\t\treturn self._get_attribute('flowStatEthernetSource')", "def GetNetwork(self, network, reason=None):\n query = []\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_GET,\n \"/%s/networks/%s\" % (GANETI_RAPI_VERSION, network),\n query, None)", "def getSource(self):\n return self.__source", "def FlowAggregatedStatEthernetSource(self):\n\t\treturn self._get_attribute('flowAggregatedStatEthernetSource')", "def fusion_api_get_network_set(self, uri=None, param='', api=None, headers=None):\n return self.network_set.get(uri=uri, api=api, headers=headers, param=param)", "def get_network(self, network_id):\n url = '%s/v2.0/networks/%s' % (self.catalog['network'], network_id)\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['network']\n else:\n LOG.error('Get network failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)", "def FlowStatIpv4Source(self):\n\t\treturn self._get_attribute('flowStatIpv4Source')", "def attributes_for_source(self, source):\n\n if source not in self:\n self[source] = {}\n\n return self.get(source)", "def FlowStatTransportSource(self):\n\t\treturn self._get_attribute('flowStatTransportSource')", "def get(self, source, graph, dest):\n return self.get_all([(source, graph, dest)])[0]", "def getSource(self):\n return urllib2.urlopen(Parser.SOURCE_URL)", "def _fetch_source_objects(source_objects, source_type, name=None):\n try:\n nodes = source_objects[0].nodes\n for node in nodes:\n if node.get(\"nodes\", []):\n nodes.extend(node[\"nodes\"])\n else:\n if node[\"protectionSource\"][\"vmWareProtectionSource\"][\n \"type\"] == source_type:\n obj_name = node[\"protectionSource\"][\"name\"]\n if not name:\n return node[\"protectionSource\"][\"id\"]\n elif name and name == obj_name:\n return node[\"protectionSource\"][\"id\"]\n except APIException as err:\n return str(err)", "def get_source(source, data):\n\n # source = 'NCv1.143'\n z = data[source]['z']\n line_width = data[source]['line_width']\n delta_v = 1 * kms # do not care actually, fully degenerate with\n # the column density\n\n # selecting only CO lines\n keys = [key for key in data[source].keys()\n if 'CO' in key and 'eCO' not in key]\n CO_data = Table(np.asarray([(Jlow + 1, data[source][key], data[source]['e' + key])\n for Jlow, key in enumerate(keys)\n if np.isfinite(data[source][key])]),\n names=['Jup', 'flux', 'eflux'],\n dtype=[int, float, float])\n\n Jup = CO_data['Jup'].data\n flux = CO_data['flux'].data * Jykms\n eflux = CO_data['eflux'].data * Jykms\n\n return z, line_width, Jup, flux, eflux", "def get_source(self):", "def show_sources_all():\n response = requests.get(SOURCE_URL)\n json = response.json()\n for source in json['sources']:\n print(u\"{0}: <{1}> {2}\".format(\"News Code\", source['id'], source['name']))", "def getAddressSourceInfo(self, address: ghidra.program.model.address.Address) -> ghidra.program.database.mem.AddressSourceInfo:\n ...", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def network(self):\n return self._network", "def network(self):\n return self._network", "def network(self):\n return self._network", "def get_network(self) -> Optional[str]:\n return self.get_value(self._network_attribute)", "def get_netlist_info(self) -> SimNetlistInfo:\n pass", "def networkInfo(self):\n assert False, \"Deriving class must implement\"", "def getSourceIp(self):\n return self.__sourceIp", "def getSource():", "def get_node(self, name):\n return self.source_net.nodes[name]", "def source_reader_info(self) -> dict:\n return self._source_reader_info", "def network(self):\n return self.__network", "def data_source_set_info(self) -> Optional['outputs.DatasourceSetResponse']:\n return pulumi.get(self, \"data_source_set_info\")", "def get_source_details(env, source_id, upload_id, api_headers, cookies):\n try:\n source_api_endpoint = f\"{common_lib.get_source_api_url(env)}/sources/{source_id}\"\n logging.info(f\"Requesting source configuration from {source_api_endpoint}\")\n r = requests.get(source_api_endpoint,\n headers=api_headers, cookies=cookies)\n if r and r.status_code == 200:\n api_json = r.json()\n logging.info(f\"Received source API response: {api_json}\")\n return api_json[\"origin\"][\"url\"], api_json[\"format\"], api_json.get(\n \"automation\", {}).get(\n \"parser\", {}).get(\n \"awsLambdaArn\", \"\"), api_json.get(\n \"dateFilter\", {}), api_json.get(\n \"hasStableIdentifiers\", False), api_json.get(\n \"uploads\", {})\n upload_error = (\n common_lib.UploadError.SOURCE_CONFIGURATION_NOT_FOUND\n if r.status_code == 404 else common_lib.UploadError.INTERNAL_ERROR)\n e = RuntimeError(\n f\"Error retrieving source details, status={r.status_code}, response={r.text}\")\n common_lib.complete_with_error(\n e, env, upload_error, source_id, upload_id,\n api_headers, cookies)\n except ValueError as e:\n common_lib.complete_with_error(\n e, env, common_lib.UploadError.INTERNAL_ERROR, source_id, upload_id,\n api_headers, cookies)", "def source():\n\n source = models.Source(name=u\"Joe's Funerals.com\", url=u\"http://www.joesfunerals.com\")\n return source", "def network(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"network\")", "def test_get_network(self):\n pass", "def FlowAggregatedStatIpv4Source(self):\n\t\treturn self._get_attribute('flowAggregatedStatIpv4Source')", "def getSrc(self):\n xml = open(self.model, 'r')\n keywd1 = ['RA', 'DEC', 'PointSource']\n ra = []\n dec = []\n nam = []\n sep = []\n target = SkyCoord(ra=self.ra*u.degree, dec=self.dec*u.degree, frame='icrs') \n for line in xml :\n if keywd1[0] in line:\n ra.append( float(line.split('\"')[-2]) )\n if keywd1[1] in line:\n dec.append( float(line.split('\"')[-2]) )\n s = SkyCoord(ra=ra[-1]*u.degree, dec=dec[-1]*u.degree, frame='icrs')\n sep.append(target.separation(s).deg)\n if keywd1[2] in line:\n nam.append( line.split('\"')[3].split()[-1] ) # no '3FGL'\n xml.close()\n\n if self.csys == 'GAL':\n srcPos = SkyCoord(np.array(ra)*u.degree, np.array(dec)*u.degree, frame='icrs')\n ra, dec = srcPos.galactic.l.deg, srcPos.galactic.b.deg\n\n srcs = Table([ra, dec, nam, sep], names=('RA', 'DEC', 'Name', 'Separation'))\n return srcs", "def source(self) -> Optional[str]:\n return pulumi.get(self, \"source\")", "def source(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"source\")", "def get_network_on_vc(options):\n datacenter = get_datacenter(options)\n networks = datacenter.network\n\n name = get_network_name(options)\n for network in networks:\n if re.search(name, network.name):\n return network", "def get_source(self) -> Optional[str]:\n return self._source", "def network(self) -> str:\n return pulumi.get(self, \"network\")", "def get_source_info_list(self):\n self._get_source_info_list = pa_source_info_cb_t(self._source_info_cb)\n pa_context_get_source_info_list(self._context,\n self._get_source_info_list,\n None)", "def listsources():\n\tmain_url = \" https://newsapi.org/v2/sources?apiKey=5f81b593f35d42a8980313250c03d7e7\"\n\n\t# fetching data in json format \n\topen_source = requests.get(main_url).json() \n\n\t# getting all articles in a string sources\n\tsource = open_source[\"sources\"] \n\n\t# empty list which will \n\t# contain all trending newssources \n\tresults = [] \n\t\n\tfor k in source: \n results.append(k[\"id\"])\n \n \t\n\tfor w in results[0:4]:\n print(w)", "def getSourceURL(self):\n return self.SourceURL", "def get_source_properties():\n\n print(\"-> start source properties\")\n tstart = time.time()\n proc = sbp.run([\"curl\",\n \"--silent\",\n \"--request\", \"POST\",\n \"--location\",\n \"--data\", \"REQUEST=doQuery\",\n \"--data\", \"PHASE=RUN\",\n \"--data\", \"FORMAT=votable\",\n \"--data\", \"LANG=ADQL\",\n \"--data\", \"QUERY=SELECT DISTINCT m.name,m.ra,m.dec,m.err_ellipse_r0,m.err_ellipse_r1,m.err_ellipse_ang,m.conf_flag,m.sat_src_flag,m.acis_num,m.hrc_num,m.var_flag,m.significance,m.flux_aper_b,m.flux_aper_lolim_b,m.flux_aper_hilim_b,m.flux_aper_w,m.flux_aper_lolim_w,m.flux_aper_hilim_w,m.nh_gal,m.hard_hm,m.hard_hm_lolim,m.hard_hm_hilim,m.hard_ms,m.hard_ms_lolim,m.hard_ms_hilim FROM csc21_snapshot.master_source m ORDER BY name ASC\",\n \"https://cda.cfa.harvard.edu/csc21_snapshot_tap/sync\"],\n check=True, stdout=sbp.PIPE)\n\n tend = time.time()\n print(f\"<- took {tend - tstart:.1f} seconds\")\n\n return srclist_process_votable(proc.stdout.decode())", "def read(self, source):\n _source = self._source_prefix+source\n return self.cache[_source]", "def getTroveSource(self):\n return self.source", "def getLcInfo(id, classname='unknown'):\n id = str(id)\n isError = False\n if(\"http\" in id):\n url = id\n elif id.isdigit():\n url = \"http://dotastro.org/lightcurves/vosource.php?Source_ID=\" + id\n try:\n lc = urllib.request.urlopen(url).read()\n if lc.find(\"<TD>\") == -1:\n raise urllib.error.URLError('No data for specified source ID.')\n\n except (IOError, urllib.error.URLError) as error:\n print(\"Could not read specified file.\", id, error)\n isError = True\n return False\n except Exception as error:\n print(\"Error encountered.\", id, error)\n isError = True\n return False\n\n if not isError:\n lcs = dotAstroLc(lc, id, classname)\n newSource = Source(id, lcs, classname)\n # print len(lcs), \"light curves processed for source\", id\n return newSource\n\n return", "def source(self) -> Dict:\n return self._db_data.metadata[\"_source\"]", "def _source(self, namespace):\n if not namespace:\n source = self._default_source\n else:\n source = self._sources.get(namespace)\n if not source:\n raise GroupResolverSourceError(namespace or \"<default>\")\n return source", "def _get_infores(source: str) -> str:\n if source in self.context.catalog:\n return self.context.catalog[source]\n else:\n infores: str = _process_infores(source)\n if infores:\n self.context.catalog[source] = infores\n return infores\n else:\n return \"\"", "def source(self) -> Station:\n return self._source", "def getsources(self,\n category='general',\n language=None,\n country='us',\n apiKey=None,\n version=None):\n\n if self.version != 2:\n\n request_params = {\n \"category\":category,\n \"language\": language,\n \"country\":country,\n \"apiKey\": self._api_key,\n }\n\n # retrive the api key if set; otherwise, error\n if not self._api_key:\n raise ValueError(\n 'You must use use an API key; to get a key visit https://news'\n 'api.org/. If you have an API key, set it using the '\n 'Api.SetCredentials method.')\n\n # if api key is there, set the params\n else:\n request_params = {\n \"category\": category,\n 'language': language,\n \"country\": country,\n \"apiKey\": self._api_key,\n }\n\n\n # build the url\n url = self.base_url + self.__endpoints['source']\n\n # make the request\n r = requests.get(url,params=request_params,timeout=self._timeout)\n\n\n # return the json\n return r.json()", "def show_network_profile(self, profile, **params):\r\n return self.get(self.network_profile_path % (profile), params=params)", "def returnNetworkNode(self):\n\n networkNodes = cmds.ls(type=\"network\")\n for node in networkNodes:\n attrs = cmds.listAttr(node)\n if \"moduleName\" in attrs:\n if cmds.getAttr(node + \".moduleName\") == self.name:\n networkNode = node\n\n return networkNode", "def FlowAggregatedStatTransportSource(self):\n\t\treturn self._get_attribute('flowAggregatedStatTransportSource')", "def source_list(self):\n source_list = self._source_list.copy()\n if 'wifi' in source_list:\n del source_list['wifi']\n\n if len(self._source_list) > 0:\n return list(source_list.values())\n else:\n return None", "def show_networks():\n return get_networks()", "def source(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"source\")", "def show_sources_category(category):\n if category not in NEWS_CATEGORIES:\n print(\"Invalid category\")\n sys.exit(1)\n\n url = \"?category={category_type}\"\n response = requests.get((SOURCE_URL+url).format(category_type=category))\n json = response.json()\n for source in json['sources']:\n print(u\"{0}: <{1}> {2}\".format(\"News Code\", source['id'], source['name']))", "def Sources():\n return _sources", "def getSourcesFromVehicle(vehicleName):\n pass", "def get_source_metadata(self, src_name: Union[str, SourceName]) -> Dict:\n if isinstance(src_name, SourceName):\n src_name = src_name.value\n if src_name in self._cached_sources:\n return self._cached_sources[src_name]\n else:\n metadata = self.metadata.get_item(Key={\"src_name\": src_name}).get(\"Item\")\n self._cached_sources[src_name] = metadata\n return metadata", "def data_source_set_info(self) -> Optional[pulumi.Input['DatasourceSetArgs']]:\n return pulumi.get(self, \"data_source_set_info\")", "def source(self):\n return self._source", "def source(self):\n return self._source", "def source(self):\n return self._source", "def source(self):\n return self._source", "def source(self):\n return self._source", "def getNodeNetworks(self,node):\n data = self.connect('get','nodes/%s/network' % (node),None)\n return data" ]
[ "0.6463655", "0.6205295", "0.6124874", "0.609256", "0.60485286", "0.60455465", "0.6035504", "0.59897584", "0.5891964", "0.58896935", "0.58769464", "0.5848994", "0.58475167", "0.58382004", "0.5824984", "0.58154297", "0.5785791", "0.57799673", "0.5767593", "0.57521796", "0.571923", "0.5713581", "0.57122695", "0.5702285", "0.56925523", "0.5687103", "0.568079", "0.5676025", "0.56699574", "0.56563646", "0.5655044", "0.56345886", "0.56195265", "0.5616954", "0.56070197", "0.56030035", "0.56020975", "0.5574226", "0.5573221", "0.5570933", "0.5562432", "0.55607253", "0.55598915", "0.55598915", "0.55598915", "0.55598915", "0.55598915", "0.55598915", "0.5541651", "0.5541651", "0.5541651", "0.5541549", "0.55397224", "0.55230516", "0.55159473", "0.5513974", "0.55136275", "0.5511789", "0.55059314", "0.55043054", "0.5503475", "0.54996836", "0.5497533", "0.54236084", "0.54204464", "0.54152626", "0.5400473", "0.53913623", "0.53850466", "0.53783655", "0.53709924", "0.5356705", "0.5353483", "0.5352591", "0.5344774", "0.5336503", "0.533463", "0.53252953", "0.53211045", "0.5312416", "0.5310147", "0.53047025", "0.52963704", "0.5294608", "0.5290582", "0.52817696", "0.5279907", "0.5273177", "0.5265945", "0.52650774", "0.5262244", "0.52601904", "0.5233436", "0.5230405", "0.522783", "0.522783", "0.522783", "0.522783", "0.522783", "0.52259225" ]
0.6018748
7
Gets the specified policy's information.
def get_policy(self, policy_id, **kwargs): resource_path = "/policies/{policyId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_policy got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "policyId": policy_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Policy") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Policy")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def policy_info(self) -> 'outputs.PolicyInfoResponse':\n return pulumi.get(self, \"policy_info\")", "def policy_info(self) -> pulumi.Input['PolicyInfoArgs']:\n return pulumi.get(self, \"policy_info\")", "def read(self, policy_name):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.debug(\"Reading the policy: %s\", address)\n response = self.vault.requests_request(\n \"GET\", address, headers=self.vault.token_header\n )\n policy_details = response.json()[\"data\"][\"policy\"]\n return policy_details", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def policy_data(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy_data\")", "def policy_data(self) -> str:\n return pulumi.get(self, \"policy_data\")", "def policy_data(self) -> str:\n return pulumi.get(self, \"policy_data\")", "def policy_data(self) -> str:\n return pulumi.get(self, \"policy_data\")", "def get_policy(usage_id):\r\n return policy.get(policy_key(usage_id), {})", "def get_policy(self):\n return self.agent.get_policy()", "def policy(self) -> Optional[str]:\n return pulumi.get(self, \"policy\")", "def get_sp_policy(self, context, id):\n # handling policy method in RPC\n response = self.dns_manager.get_sp_policy(context, id)\n return response", "def get_policy(client, policy_name):\n response = client.describe_firewall_policy(\n FirewallPolicyName=policy_name,\n )\n return response", "def describe(cls, policy):\n return cls.descriptions[policy] if cls.validates(policy) else None", "def get_policies():\r\n policy = policies.values()\r\n return policy", "def policy(self) -> pulumi.Output['outputs.ServicePolicy']:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input['ServicePolicyArgs']]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input['ServicePolicyArgs']]:\n return pulumi.get(self, \"policy\")", "def getPolicy(self, state):\n return self.policy[state]", "def get_policy_info(self):\n policy_info = []\n for pol in self:\n # delete from /info if deprecated\n if pol.is_deprecated:\n continue\n policy_entry = {}\n policy_entry['name'] = pol.name\n if pol.is_default:\n policy_entry['default'] = pol.is_default\n policy_info.append(policy_entry)\n return policy_info", "def GetPolicies(self):\n policy = {}\n if json is None:\n logging.error('No JSON module, cannot parse policy information')\n else :\n try:\n policy = json.loads(open(self.policy_path).read(), strict=False)\n except IOError:\n logging.error('Failed to load policies from %s' % self.policy_path)\n return policy", "def get_policy(self, *args, **kwargs):\r\n pass", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy_get(request, policy_id, **kwargs):\n policy = neutronclient(request).show_qos_policy(\n policy_id, **kwargs).get('policy')\n return QoSPolicy(policy)", "def getPolicy(self, state):\n util.raiseNotDefined()", "def getPolicy(self, state):\n util.raiseNotDefined()", "def policy(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy\")", "def get_policy(self):\n\n return", "def GetPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('GetPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def policy_data(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy_data\")", "def policy_data(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy_data\")", "def show_policy_profile(self, profile, **params):\r\n return self.get(self.policy_profile_path % (profile), params=params)", "def get_policy(self, policy_id: PolicyID = DEFAULT_POLICY_ID) -> Policy:\n return self.workers.local_worker().get_policy(policy_id)", "def policies(self):\n return self._data.get('policies')", "def policy_parameters(self) -> Optional['outputs.PolicyParametersResponse']:\n return pulumi.get(self, \"policy_parameters\")", "def get_policy(self):\n try:\n LOG.debug(\"Searching for retention_policy in K2.\")\n return self.client.search(\"retention_policies\",\n name=\"Best_Effort_Retention\").hits[0]\n except Exception as ex:\n LOG.exception(\"Retention policy search failed in K2.\")\n raise KaminarioCinderDriverException(reason=ex)", "def _get_policies(self):\n flag, response = self._commcell_object._cvpysdk_object.make_request('GET', self._POLICY)\n\n if flag:\n if response.json() and 'taskDetail' in response.json():\n policies = response.json()['taskDetail']\n policies_dict = {}\n\n for policy in policies:\n temp_name = policy['task']['taskName'].lower()\n temp_id = str(policy['task']['taskId']).lower()\n policies_dict[temp_name] = temp_id\n\n return policies_dict\n else:\n raise SDKException('Response', '102')\n else:\n response_string = self._commcell_object._update_response_(response.text)\n raise SDKException('Response', '101', response_string)", "def policy_document(self) -> aws_cdk.aws_iam.PolicyDocument:\n return jsii.get(self, \"policyDocument\")", "def get_policyname(self):\n return self.options[\"policyname\"]", "def policy_name(self) -> Optional[str]:\n return pulumi.get(self, \"policy_name\")", "def policy_name(self) -> Optional[str]:\n return pulumi.get(self, \"policy_name\")", "def get(self):\n policy_number = reqparse.request.args.get('policy_number')\n category = reqparse.request.args.get('category')\n\n dao = ClaimDao()\n return dao.get(policy_number=policy_number, category=category)", "def policy_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"policy_id\")", "def get_policyname(self):\n return self.options['policyname']", "def RunWithArgs(self):\n client = GetClientFromFlags()\n global_params = GetGlobalParamsFromFlags()\n request = messages.GetPolicyDetailsRequest(\n )\n if FLAGS['fullResourcePath'].present:\n request.fullResourcePath = FLAGS.fullResourcePath.decode('utf8')\n if FLAGS['pageSize'].present:\n request.pageSize = FLAGS.pageSize\n if FLAGS['pageToken'].present:\n request.pageToken = FLAGS.pageToken.decode('utf8')\n result = client.iamPolicies.GetPolicyDetails(\n request, global_params=global_params)\n print apitools_base_cli.FormatOutput(result)", "async def _parse_policy(self, raw_policy):\n if raw_policy.get('AttachmentCount') > 0:\n policy_dict = {}\n policy_dict['id'] = policy_dict['name'] = raw_policy.get('PolicyName')\n policy_dict['description'] = raw_policy.get('Description')\n policy_dict['create_date'] = raw_policy.get('CreateDate')\n policy_dict['update_date'] = raw_policy.get('UpdateDate')\n policy_dict['attachment_count'] = raw_policy.get('AttachmentCount')\n policy_dict['type'] = raw_policy.get('PolicyType')\n policy_dict['default_version'] = raw_policy.get('DefaultVersion')\n\n policy_version = await self.facade.ram.get_policy_version(policy_dict['name'],\n policy_dict['type'],\n policy_dict['default_version'])\n policy_version['PolicyDocument'] = json.loads(policy_version['PolicyDocument'])\n # policy_dict['policy_document'] = policy_version['PolicyDocument']\n policy_dict['policy_document'] = policy_version\n\n policy_entities = await self.facade.ram.get_policy_entities(policy_dict['name'],\n policy_dict['type'])\n policy_dict['entities'] = {}\n if policy_entities['Users']['User']:\n policy_dict['entities']['users'] = []\n for user in policy_entities['Users']['User']:\n policy_dict['entities']['users'].append(user['UserName'])\n if policy_entities['Groups']['Group']:\n policy_dict['entities']['groups'] = []\n for group in policy_entities['Groups']['Group']:\n policy_dict['entities']['groups'].append(group['GroupName'])\n if policy_entities['Roles']['Role']:\n policy_dict['entities']['roles'] = []\n for role in policy_entities['Roles']['Role']:\n policy_dict['entities']['roles'].append(role['RoleName'])\n\n return policy_dict['id'], policy_dict\n else:\n return None, None", "def get_policy_by_id(self, id):\n for service, policy_list in self.remote_store.get_policy_list().items():\n for policy in policy_list:\n if policy.id == id:\n return policy", "def policyid(self):\n return self._policyid", "def policy(cls):\n return relationship.many_to_one(cls, 'policy')", "def policy(self) -> typing.Optional[\"BucketPolicy\"]:\n return jsii.get(self, \"policy\")", "def policy(self) -> typing.Optional[\"BucketPolicy\"]:\n return jsii.get(self, \"policy\")", "def policy_name(self):\n return self._policy_name", "def policy(self) -> HwPolicy:\n return self._policy", "def policy_repr(self, policy):\n return policy.__repr__()", "def show_ikepolicy(self, ikepolicy, **_params):\r\n return self.get(self.ikepolicy_path % (ikepolicy), params=_params)", "def policy_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy_type\")", "def policy_parameters(self) -> Optional[pulumi.Input['PolicyParametersArgs']]:\n return pulumi.get(self, \"policy_parameters\")", "def policy_parameters(self) -> Optional[pulumi.Input['PolicyParametersArgs']]:\n return pulumi.get(self, \"policy_parameters\")", "def get_policy_data(policy_id):\n policy = PolicyService.get_policy_by_id(policy_id)\n if policy is None:\n abort(404)\n\n policy_json = json.loads(POLICY_JSON)\n policy_json[ID_KEY] = str(policy.id)\n policy_json[PolicyKeys.POLICY_NAME_KEY] = policy[PolicyKeys.POLICY_NAME_KEY]\n policy_json[PolicyKeys.NUMBER_OF_RULES_KEY] = policy[PolicyKeys.NUMBER_OF_RULES_KEY]\n policy_json[PolicyKeys.RULES_KEY] = policy[PolicyKeys.RULES_KEY]\n policy_json[PolicyKeys.UPDATE_KEY] = policy[PolicyKeys.UPDATE_KEY]\n\n return policy_json", "def policy_id(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"policy_id\")", "def get_workload_policy(self, workload_policy_id):\n url = \"get_workload_policy/%s\" % workload_policy_id\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBody(resp, body[\"workload_policy\"])", "def get_policy_string(base, policy_index):\n if POLICIES.get_by_index(policy_index) is None:\n raise PolicyError(\"No policy with index %r\" % policy_index)\n return _get_policy_string(base, policy_index)", "def policy_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy_name\")", "def policy_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy_name\")", "def policy_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy_name\")", "def policy_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy_name\")", "def policy_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy_name\")", "def policy_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy_name\")", "def policy_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy_name\")", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n policy: Optional[pulumi.Input[str]] = None,\n resource_arn: Optional[pulumi.Input[str]] = None) -> 'Policy':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _PolicyState.__new__(_PolicyState)\n\n __props__.__dict__[\"policy\"] = policy\n __props__.__dict__[\"resource_arn\"] = resource_arn\n return Policy(resource_name, opts=opts, __props__=__props__)", "def get_user_policy(self, user_name, policy_name):\r\n params = {'UserName' : user_name,\r\n 'PolicyName' : policy_name}\r\n return self.get_response('GetUserPolicy', params, verb='POST')", "def selected_policy(self):\n return self._selected_policy", "def policy_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy_id\")", "def policy_uri(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"policy_uri\")", "def policy_uri(self) -> str:\n return pulumi.get(self, \"policy_uri\")", "def policies(self):\n return self._policies", "def show_firewall_policy(self, firewall_policy, **_params):\r\n return self.get(self.firewall_policy_path % (firewall_policy),\r\n params=_params)", "def policy_id(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"policy_id\")", "def policy_id(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"policy_id\")", "def _get_policy_id(name):\n cohesity_client = _get_client()\n log.info(\"Getting policy with name %s\", name)\n resp = cohesity_client.protection_policies.get_protection_policies(\n names=name)\n if resp:\n return resp[0].id", "def policy_str(self): # -> str:\n ...", "def network_policy(self) -> 'outputs.NetworkPolicyResponse':\n return pulumi.get(self, \"network_policy\")", "def load_policy(self, policy_path, tracker):\r\n if not os.path.exists(policy_path):\r\n return {}\r\n try:\r\n with open(policy_path) as f:\r\n return json.load(f)\r\n except (IOError, ValueError) as err:\r\n msg = \"ERROR: loading course policy from {0}\".format(policy_path)\r\n tracker(msg)\r\n log.warning(msg + \" \" + str(err))\r\n return {}", "def get_access_policy_output(access_policy_id: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAccessPolicyResult]:\n ...", "def policies(self, request):\n policies = OtterPolicies(self.store, self.tenant_id, self.group_id,\n self.dispatcher)\n return policies.app.resource()", "def policy_type(self):\n return (\n list(\n SchedulePolicies.policy_types.keys())[\n list(\n SchedulePolicies.policy_types.values()).index(self._task_json['policyType'])])", "def base_policy(self) -> Optional['outputs.SubResourceResponse']:\n return pulumi.get(self, \"base_policy\")", "def getMergePolicy(self, policy_id: str = None) -> dict:\n if policy_id is None:\n raise Exception(\"Missing the policy id\")\n if self.loggingEnabled:\n self.logger.debug(f\"Starting getMergePolicy\")\n path = f\"/config/mergePolicies/{policy_id}\"\n res = self.connector.getData(self.endpoint + path, headers=self.header)\n return res", "def storage_policies(self, **kwargs):\n self.logger.debug(f\"Get storage policies data\")\n url_path = 'storage/policies'\n body = self._make_body(kwargs)\n return self._common_get(request_path=url_path, parameters=body)", "def get_acl_policy(client, container_name, policy_name, **kwargs):\n acl = _get_acl(client, container_name, **kwargs)\n return acl.get(policy_name)", "def get_security_policy_output(name: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input['SecurityPolicyType']] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSecurityPolicyResult]:\n ...", "def get(self, master_id):\n result = MasterController.fetch_master_policy(master_id)\n if result:\n return make_response(helper.make_rest_success_response(\"Successfully fetched\",\n result), 200)\n\n return make_response(helper.make_rest_fail_response(\"Policy was not found\"), 404)", "def _get_policies(self, cr, uid, context=None):\n return [('optional', _('Optional')),\n ('always', _('Always')),\n ('never', _('Never'))]", "def get_one(self, nodepool_policy_ident):\n context = pecan.request.context\n nodepool_policy = api_utils.get_resource('NodePoolPolicy', nodepool_policy_ident)\n # policy.enforce(context, 'nodepool_policy:get', nodepool_policy,\n # action='nodepool_policy:get')\n\n return NodePoolPolicy.convert_with_links(nodepool_policy)", "def policy_variables(self):\n\n return self.pi_model.variables" ]
[ "0.83910096", "0.7993574", "0.74160916", "0.7270525", "0.7270525", "0.7270525", "0.7178607", "0.71649534", "0.71649534", "0.71649534", "0.71506953", "0.70812213", "0.7078061", "0.70566046", "0.7050848", "0.7042278", "0.6983816", "0.6946659", "0.6928475", "0.6928475", "0.6916226", "0.6898227", "0.68970644", "0.6871987", "0.6804914", "0.6804914", "0.6804914", "0.6795135", "0.6779249", "0.6779249", "0.67643255", "0.67643255", "0.67531514", "0.6730059", "0.66917104", "0.66785663", "0.66530395", "0.6502919", "0.6448101", "0.6410055", "0.64094985", "0.6393442", "0.62900376", "0.6250011", "0.62294984", "0.62294984", "0.62229574", "0.6190349", "0.61865276", "0.61849797", "0.61737174", "0.6159815", "0.6158519", "0.6133095", "0.6129924", "0.6129924", "0.61152655", "0.6113187", "0.6095952", "0.6094134", "0.60699666", "0.60689735", "0.60689735", "0.6064397", "0.60501504", "0.59940636", "0.5990487", "0.596734", "0.596734", "0.596734", "0.596734", "0.596734", "0.596734", "0.596734", "0.5965638", "0.59622467", "0.59564054", "0.5921623", "0.5873561", "0.5867439", "0.58514", "0.5843111", "0.5835591", "0.5814539", "0.5792653", "0.5729544", "0.5702678", "0.5694951", "0.56816536", "0.56655216", "0.56404495", "0.5625214", "0.5605255", "0.5598741", "0.5539909", "0.5538918", "0.5532372", "0.5532142", "0.5500692", "0.54998916" ]
0.5592181
94
Gets the specified tag's information.
def get_tag(self, tag_namespace_id, tag_name, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}/tags/{tagName}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_tag got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id, "tagName": tag_name } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Tag") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Tag")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def info(self, tag):\n\n url = \"https://api.instagram.com/v1/tags/{0}?access_token={1}\".format(tag, self.access_token)\n request = requests.get(url)\n print(request.headers)\n return request.json()", "def get_tag(self, tag):\n resp = self.get(_u.build_uri(\"tag\", domain=self.domain),\n data={'tag': tag})\n return utils.handle_response(resp)", "def get_tag(self, tag, filename):\n return self.get_tag_batch(tag, [filename])[0]", "def get(self):\n return self.tag.get()", "def __getitem__(self, tag):\n return self.get(tag)", "def get_info(self, name):\n return self.info[name]", "async def info(self, ctx, *, tag):\n try:\n self.fetch_tag(ctx, tag)\n except Exception as error:\n return await ctx.send(error)\n data = self._tag_dict[ctx.guild.id][tag]\n author = self.bot.get_user(data['author']) or await self.bot.fetch_user(data['author'])\n embed = discord.Embed(colour=self.bot.colour)\n embed.title = tag\n embed.description = f\"<:author:734991429843157042> **{author}**\\n\"\n embed.description += f\"Uses: **{data['uses']}**\\n\"\n embed.description += f\"ID: **{data['id']}**\"\n embed.set_author(name=str(author), icon_url=author.avatar_url)\n await ctx.send(embed=embed)", "def _get_tag(self):\n return self.__tag", "def get_tag_stats(self, tag):\n resp = self.get(_u.build_uri(\"tag.stats\", domain=self.domain),\n data={'tag': tag})\n return utils.handle_response(resp)", "def get_tag(self, scope, key):\r\n print 'GETTING', scope, key, self._tags\r\n return self._tags[scope].get(key)", "def get_tag(self):\n return self.tag", "def get_tag(self, key):\n return self._entries[key]", "async def get_tag_command(self, ctx):\n await self.get_tag(ctx)", "def getTag(self, authenticationToken, guid):\r\n self.send_getTag(authenticationToken, guid)\r\n return self.recv_getTag()", "def GetFromTag(cls, tag):\n parent_key = cls._GetParentKeyFromTag(tag)\n return cls.query(ancestor=parent_key).get()", "def get_tag(self, tag_name):\n tag_data = self.db.make_query(\n '''\n select tag_name from tag where tag_name = \"{}\"\n '''.format(tag_name)\n )\n\n if len(tag_data) > 0:\n tag_name = tag_data[0][0]\n human_readable_tag = name_util.make_decoded(tag_data[0][0])\n\n rtn_dict = {\n 'tag_name': tag_name,\n 'human_readable_name': human_readable_tag\n }\n\n return rtn_dict", "async def info(self, ctx: \"IceTeaContext\", *, otag: TagConverter):\n tag: models.Tag = otag\n if not tag.alias:\n embed = discord.Embed(description=f\"{ctx.message.guild.name} ``{tag.title}`` tag information\")\n user = ctx.guild.get_member(tag.author)\n embed.set_author(name=user.display_name, icon_url=user.avatar_url)\n embed.add_field(name=\"Tag name\", value=tag.title)\n embed.add_field(name=\"Amount used\", value=str(tag.count))\n embed.timestamp = tag.created\n await ctx.send(embed=embed)\n else:\n embed = discord.Embed(description=f\"{ctx.message.guild.name} ``{tag.title}`` alias information\")\n user = ctx.guild.get_member(tag.author)\n embed.add_field(name=\"Author\", value=user or \"Unknown\")\n embed.add_field(name=\"Amount used\", value=str(tag.count))\n embed.timestamp = tag.created\n await ctx.send(embed=embed)", "def tag(self) -> str:\n return pulumi.get(self, \"tag\")", "def getInfo():", "def tag(self):\n return self._tag", "def getValue(tree, tag):\n try:\n return tree.find(tag).text\n except AttributeError:\n return None", "def tag(self,name):\n return self._tags.get(name,None)", "def read_metadata(self):\n return self.parent.controller.get_tag_metadata()", "def get(self, tag, index):\n raise NotImplementedError", "def get_info(self, info):\r\n pass", "def __getitem__(self, tag):\n return self.__tags.get(tag.lower(), 0)", "def get(self, uuid):\n\n\t\treturn self._get(\"/tag/%s\" % base.getid(uuid), \"tag\")", "def getTagData(tagname,data):\n tags = rhevGet(\"/api/tags\")\n doc = libxml2.parseDoc(tags)\n ctxt = doc.xpathNewContext()\n res = ctxt.xpathEval(\"/tags/tag[name[position()=1]= '\" + tagname + \"']\")\n return res[0].prop(data)", "def get(self):\n res = SmartAPI.get_tags(self.args.field)\n self.finish(res)", "def get(self):\n res = SmartAPI.get_tags(self.args.field)\n self.finish(res)", "def tag(self):\n return self._tag", "def tag(self):\n return self.tag_", "def find_by_id(self, tag, params={}, **options):\n path = \"/tags/%s\" % (tag)\n return self.client.get(path, params, **options)", "def show_tag_details(tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n # find all posts associated with tag...\n # posts = PostTag.query.get('posts.tag_id')\n\n return render_template('tag_details.html', tag=tag)", "def getTag(self, authenticationToken, guid):\r\n pass", "def getInfo(self):\n return self.info", "async def slashtag_info(self, ctx: commands.Context, *, tag: TagConverter):\n await tag.send_info(ctx)", "def get_info(self):\n pass", "def get_info(self):\n pass", "def get_tag(tag):\r\n from tagging.models import Tag\r\n if isinstance(tag, Tag):\r\n return tag\r\n\r\n try:\r\n if isinstance(tag, types.StringTypes):\r\n return Tag.objects.get(name=tag)\r\n elif isinstance(tag, (types.IntType, types.LongType)):\r\n return Tag.objects.get(id=tag)\r\n except Tag.DoesNotExist:\r\n pass\r\n\r\n return None", "def _get_tag(self, tag):\n return self.prefix + tag", "def show_tag_details(tag_id):\r\n\r\n tag = Tag.query.get_or_404(tag_id)\r\n\r\n return render_template('tag-details.html', tag=tag)", "def get_info(self) -> str:\n return self.info", "def show_tag_details(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n\n return render_template('tag-details.html', tag=tag)", "def get_info(self) -> Optional[Dict[str, Any]]:", "def getHTMLTag(self, html, tag):\n soup = BeautifulSoup(html, 'html.parser')\n content = soup.find(tag)\n return content", "def GetDataFromTag(dom, tag):\n tags = dom.getElementsByTagName(tag)\n if not tags:\n return None\n elif tags[0].hasChildNodes():\n return tags[0].firstChild.data\n else:\n return ''", "def info(self, key = None):\n return self.client.get(self.name).getBodyData(key)", "def by_tag(articles_by_tag, tag):\n for a in articles_by_tag:\n if a[0].slug == tag:\n return a[1]", "def get_data_from_tag(self, tag: Tag) -> dict:\n self.verify_tag_structure(tag)\n title = tag.string\n url = tag.contents[0]['href'] # tag.contents[0].name is 'a'\n date_string = tag.next_sibling.next_sibling.contents[0]\n published_date = (self.get_date_from_string(date_string))\n announcement_data = {\n 'id': None,\n 'title': title,\n 'url': url,\n 'check_string': None,\n 'published_datetime': published_date,\n 'updated_datetime': None,\n 'retrieved_datetime': datetime.now(),\n 'stored_timestamp': None\n }\n self.check_announcement_content_validity(announcement_data)\n return announcement_data", "def show_tag_details(tag_id):\n\n tag = Tag.query.get_or_404(tag_id)\n\n return render_template(\"tags/tag_details.html\", tag=tag)", "def get(self) -> Info:\n return InfoService.get()", "def get_tag(tag_id, as_dict=False):\n query = db_session.query(Tags).filter_by(id=tag_id)\n logging.debug('Query executed: %s' % query)\n data = query.first()\n if as_dict:\n columns = Tags.__table__.columns.keys()\n data = to_dict(data, columns)\n return data", "def getInfo(self):\n return self._info", "def getMetaByTags(tags):\n\n if type(tags) is str:\n tag = tags\n else:\n tag = tags[0]\n\n record = getRecordsByTags(tag)\n\n if record is not None:\n geodata_meta = parseMeta(record)\n return geodata_meta\n else:\n error = \"could not get record for tags from gnos\"\n print(error)\n return None", "def getMetaByTags(tags):\n\n if type(tags) is str:\n tag = tags\n else:\n tag = tags[0]\n\n record = getRecordsByTags(tag)\n\n if record is not None:\n geodata_meta = parseMeta(record)\n return geodata_meta\n else:\n error = \"could not get record for tags from gnos\"\n print(error)\n return None", "def get_tag(self) -> int:\n return self.tag", "def get_tag(self, tag_type: str) -> str:\n if tag_type in self.tags:\n return self.tags[tag_type]\n return None", "def getInfo(notification):", "def find_tag(tag : str):\n\tprint(f\"finding tag {tag} . . .\")\n\n\tkeys = db.keys() # lists the database keys\n\n\tif \"tags\" not in keys: # to make sure there's a database\n\t\tdb[\"tags\"] = {} # creates the tag database\n\t\tprint(f\"Initiated databse . . .\")\n\t\n\ttags = db[\"tags\"] # sets the database to a variable for easy use\n\t# tags is a dictionary with keys and values\n\t# to access a tag, use tags[tag]\n\n\treturn_value = None\n\n\tif tag in tags:\n\t\treturn_value = {\n\t\t\t\"key\": tag, # gets the tag name\n\t\t\t\"value\": tags[tag], # gets the tag value frome db\n\t\t\t\"status\": 200\n\t\t}\n\t\tprint(f\"Tag {tag} found with value {tags[tag]}.\")\n\t\n\telif tag not in tags:\n\t\treturn_value = {\n\t\t\t\"key\": tag, # gets the supposed tag name\n\t\t\t\"value\": f\"Tag `{tag}` doesn't exist.\", # returns none\n\t\t\t\"status\": 404\n\t\t}\n\t\tprint(f\"Tag {tag} not found.\")\n\t\tif tag == None:\n\t\t\treturn_value[\"value\"] = None\n\t\n\telse:\n\t\treturn_value = {\n\t\t\t\"key\": None,\n\t\t\t\"value\": None,\n\t\t\t\"status\": 500\n\t\t}\n\t\tprint(f\"An error occured finding {tag}.\")\n\t\n\treturn return_value", "def get_tag_meta(self, tag_name: str) -> TagMeta:\n return self._tag_metas.get(tag_name)", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def details(self, identifier):\n return self.client.request_with_method(Methods.GET % (self.name, identifier,))", "def get_tagname(tags, tagid):\n for tag in tags:\n if tag['id'] == tagid:\n return tag['name']", "def show_tag(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n\n return render_template(\"tags/tag_details.html\", tag=tag)", "def get(self, hash_tag):\n request_args = get_current_request_args()\n\n scope = request_args.get('scope') or DEFAULT_HASH_TAG_FETCH_SCOPE\n if scope not in HASH_TAG_RETRIEVAL_SCOPES:\n raise BadRequest(\n '`scope` must be one of {}'.format(HASH_TAG_RETRIEVAL_SCOPES))\n\n hash_tag = HashTag.get_not_deleted(hash_tag=hash_tag)\n if hash_tag is None:\n raise ResourceNotFound('Hash tag not found')\n\n hash_tag_details = {\n 'meta': lambda x: {\n 'data': None,\n 'meta': None\n },\n 'posts': lambda y: {\n 'data': None,\n 'meta': None\n },\n 'followers': lambda z: {\n 'data': None,\n 'meta': None\n }\n }\n\n scoped_details = hash_tag_details[scope]()\n\n return api_success_response(**scoped_details)", "def show_tag_details(tag_id):\n tag = Tag.query.get_or_404(tag_id)\n posts = tag.posts\n\n return render_template('tags/tag_details.html', tag=tag, posts=posts)", "def read_tag(\n *,\n db: Session = Depends(get_db),\n id: int,\n current_user: DBUser = Depends(get_current_active_user),\n):\n tag = crud.tag.get(db_session=db, id=id)\n if not tag:\n raise HTTPException(status_code=404, detail=\"Tag not found\")\n if not crud.user.is_superuser(current_user) and (tag.owner_id != current_user.id):\n raise HTTPException(status_code=400, detail=\"Not enough permissions\")\n return tag", "def _get_information(self):\n pass", "def get_uuids_from_tag(dictionary, tag):\n return dictionary[tag]", "def info(tag, message=None):\n Log._post(\"info\", tag, message)", "def getTag(self, tag_to_find):\n tree = xml.parse(\"lesson\" + str(self.__id_lesson) + \".xml\")\n root = tree.getroot()\n for tag in root:\n if tag.tag == tag_to_find:\n return tag.text", "def get_info(self, key: str) -> TaskInfo:\n return self.task_graph.nodes[key][\"info\"]", "def gettag(self):\n cmd = [\"git\", \"tag\"]\n p = Popen(cmd, cwd=self.filename, stdout=PIPE)\n data, res = p.communicate()\n return data.decode(\"utf-8\").split(\"\\n\")", "def get_tag(self, sha):\n return self.get_url_data(self.api_url + 'tags/' + sha)", "def get_info(self):\n return None", "def get_tag_for_instance(self, instance_id, tag_key):\n tags = self.get_tags_for_instance(instance_id)\n for tag in tags:\n if tag['Key'] == tag_key:\n return tag['Value']\n return None", "def fetch(self, tag):\n return fetch_image(self.collection.client, tag)", "def get_info(self):\n raise NotImplementedError(\"Robot.get_info\")", "def get_info(self) -> str:\n raise NotImplementedError()", "def info(self):\r\n return self._get('info', {})", "def GetMetadata(self):\n return self.dict['meta']", "def GetInfo(self, reason=None):\n query = []\n _AppendReason(query, reason)\n return self._SendRequest(HTTP_GET, \"/%s/info\" % GANETI_RAPI_VERSION,\n query, None)", "def get(self, currency, entity):\n check_inputs(currency=currency, entity=entity)\n entity_stats = entitiesDAO.get_entity(currency, entity)\n if entity_stats:\n entity_stats['tags'] = entitiesDAO.\\\n list_entity_tags(currency, entity_stats['entity'])\n entity_stats['tag_coherence'] = compute_tag_coherence(\n entity_stats['tags'])\n return entity_stats\n abort(404,\n \"Entity {} not found in currency {}\".format(entity, currency))", "def read(self, request, tag=None):\n tags = Tag.objects\n if tag:\n t = tags.get(slug=tag)\n return t.entry_set.all()\n else:\n return tags.all()", "def info(self) -> Info:\n raw = self._call('GET', 'info')\n return Info.parse_raw(raw)", "def tag(self):\n\n return self._tag", "def get_info_about(attr):\n return getattr(get_info_about, attr)", "def info(self):\n return self._info", "def details(self, identifier):\n return self.client.request_with_method(Methods.GET % (self.name, identifier,))['item']", "def device_get_information_about(pnd, buf):\n return _nfc.device_get_information_about(pnd, buf)", "def get_tag_dict(self):\n return self.tag_dict", "def get_all_tagged(self,tag_name):\n return self.tag2elements[tag_name]", "def find_data_in_soup(soup, tag: str, class_id:str) -> str:\n return soup.find(tag, class_=class_id).get_text()", "def get_profile(tag, platform=\"pc\", region=\"eu\"):\n #\n try:\n context = ssl._create_unverified_context()\n profile = json.load(\n const.codec(urlopen(const.URL + platform + \"/\" + region + \"/\" + tag + \"/profile\", context=context)))\n #\n if \"error\" in profile:\n raise BattleTagNotFound(profile['error'])\n exit(1)\n #\n result = pr.Profile(profile['data']['username'],\n profile['data']['level'],\n profile['data']['games']['quick']['wins'],\n profile['data']['games']['competitive']['wins'],\n profile['data']['games']['competitive']['lost'],\n profile['data']['playtime']['quick'],\n profile['data']['playtime']['competitive'],\n profile['data']['avatar'],\n profile['data']['competitive']['rank'])\n return result\n except urllib.error.URLError as e:\n print(\"An error occurred when fetching stats\\n\" + e)\n exit(1)\n except Exception as e:\n print(\"An error occurred:\\n \" + str(e))\n exit(1)", "def get_content_by_tag(self, tag: str) -> Any:\n result = self.client.get_instances_id_content_tags_path(id_=self.id_, tags_path=tag)\n\n try:\n return result.decode('utf-8').strip().replace('\\x00', '')\n except AttributeError:\n return result", "def get(self, tagname):\n return self.tags.setdefault(tagname, ModelTag(tagname))", "def get_tag_info(xint,conn):\n\n get_tags = ('SELECT DISTINCT fip2.value '\n 'FROM interaction i, feature_interaction fi, feature_interactionprop fip, '\n 'feature f, cvterm cvt, feature_interactionprop fip2, cvterm cvt2 '\n 'WHERE f.feature_id = fi.feature_id AND fi.interaction_id = i.interaction_id '\n 'AND fi.feature_interaction_id = fip.feature_interaction_id '\n 'AND fip.type_id = cvt.cvterm_id AND cvt.name = \\'participating feature\\' '\n 'AND fi.feature_interaction_id = fip2.feature_interaction_id AND fip2.type_id = cvt2.cvterm_id '\n 'AND cvt2.name = \\'comment\\' AND f.uniquename = %s AND i.uniquename = %s')\n tags = connect(get_tags,xint,conn)\n return(tags)", "def get_info(self):\n return \"TODO !\"", "def tag(self) -> str:\n return self._tag", "def info(self):\n path = self._get_path('info')\n \n response = self._GET(path)\n self._set_attrs_to_values(response)\n return response" ]
[ "0.747428", "0.7410492", "0.6692782", "0.6677863", "0.6667355", "0.65872085", "0.65737224", "0.65508467", "0.6460756", "0.6332787", "0.63079554", "0.62929213", "0.6286769", "0.6240733", "0.6209361", "0.6202848", "0.6188568", "0.61841136", "0.6178364", "0.61635315", "0.6150373", "0.61389214", "0.613403", "0.6106511", "0.6089978", "0.60848683", "0.60790443", "0.60614794", "0.60612637", "0.60612637", "0.60443217", "0.6040403", "0.6036092", "0.60102874", "0.5988968", "0.59604704", "0.5956704", "0.5954523", "0.5954523", "0.59257036", "0.59203625", "0.59169394", "0.5897969", "0.5884193", "0.5877219", "0.5876849", "0.5864417", "0.58560133", "0.5851415", "0.5850476", "0.58379424", "0.5831052", "0.5802393", "0.578902", "0.5787594", "0.5787594", "0.5773666", "0.57272065", "0.5708906", "0.56970096", "0.569418", "0.5692885", "0.56768245", "0.56703764", "0.56703514", "0.5663537", "0.56551725", "0.5647706", "0.56457895", "0.5644948", "0.5644946", "0.5644393", "0.56434673", "0.5641962", "0.5623377", "0.5613227", "0.5607178", "0.55957586", "0.5594922", "0.5590595", "0.55769557", "0.5571717", "0.5560911", "0.5557361", "0.5556444", "0.5555827", "0.555298", "0.55415493", "0.554053", "0.553725", "0.5531663", "0.5523936", "0.5518777", "0.5498292", "0.548539", "0.54627466", "0.54577106", "0.54498196", "0.54338926", "0.5428644", "0.5427041" ]
0.0
-1
Retrieves the specified tag default.
def get_tag_default(self, tag_default_id, **kwargs): resource_path = "/tagDefaults/{tagDefaultId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_tag_default got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagDefaultId": tag_default_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="TagDefault") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="TagDefault")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_default_tag(self, tags):\n tags_counter = Counter()\n for tag in tags:\n tags_counter[tag] += 1\n\n if len(tags_counter) == 2 and list(tags_counter.values())[0] == list(tags_counter.values())[1]:\n return ut.find_positive_tag(tags_counter.keys())\n\n return tags_counter.most_common(1)[0][0]", "def getDefault():", "def get_default(self):\n\n\t\treturn self.__default", "def value(self, tag, default=None):\n element = self._root.find(tag)\n if element is not None:\n return convert_to_primitive(element.text)\n return default", "def get_default_value(self, tag, primitive_type, hint=None):\n # initialize\n default_value = self.get_default_value_of_type(primitive_type)\n\n # use example value as default (if exist)\n if self.use_examples_for_default and self.get_examples_values:\n examples_values = self.get_examples_values(tag)\n if examples_values:\n default_value = list(examples_values)[0]\n\n # use response value as default (if exist)\n if self.use_response_for_default and self.get_response_values:\n response_values = self.get_response_values(tag, hint)\n if response_values:\n default_value = response_values[0]\n\n return default_value", "def default(self):\n return self.get(name='Unknown')", "def get(name, default=None):", "def getorelse(self, name, default=None):\n try:\n return self._defaults[name]\n except KeyError:\n return default", "def get(self, name, default=None):\n try:\n return self.__getattribute__(name, default)\n except AttributeError:\n return default", "def Default():\n return _DEFAULT", "def get_default(cls):\n raise NotImplementedError", "def getdefault(self, option, type=str, default=None):\r\n return self.get(Config.DEFAULT_SECTION, option, type, default=default)", "def get(self, name):\n try:\n return self._defaults[name]\n except KeyError:\n raise UndefinedDefault(\"default %s is undefined\" % name)", "def get_attr_default(self, attr_name):\n for defaults in (self._ATTRIBUTE_DEFAULTS.get(self.field_type, {}),\n self._ATTRIBUTE_DEFAULTS['*']):\n try:\n return defaults[attr_name]\n except KeyError:\n continue\n\n return None", "def get_attr_default(self, attr_name):\n for defaults in (self._ATTRIBUTE_DEFAULTS.get(self.field_type, {}),\n self._ATTRIBUTE_DEFAULTS['*']):\n try:\n return defaults[attr_name]\n except KeyError:\n continue\n\n return None", "def default(self):\n # easy enough\n return self._default", "def default(self):\n return self.__default", "def get(self, name, default=None):\n\t\treturn self[name] if self[name] is not None else default", "def get_default(self, name):\n rargs = [_ for _ in reversed(self.args)]\n rdefaults = [_ for _ in reversed(self.defaults)]\n return rdefaults[rargs.index(name)]", "def default(self):\n return self.__default", "def get_default(self):\r\n if self.has_default:\r\n if callable(self.default):\r\n return self.default()\r\n else:\r\n return self.default", "def get_default_value(self):\n pass", "def default(self):\n return self._default", "def default(self):\n return self._default", "def f_get_default(self):\n raise NotImplementedError(\"Should have implemented this.\")", "def default():\n return DefaultSwh.default()", "def get_default_variant(variants):\n for variant in variants:\n if variant.default:\n return variant", "def default(self):\n return self._configs[0] if len(self._configs) else None", "def _get_default(self):\n if callable(self.default):\n return self.default()\n else:\n return self.default", "def get_default_config(self, attr):\n config_val = None\n\n try:\n config_val = getattr(self.config_default, attr)\n except AttributeError:\n pass\n\n return config_val", "def get_default_config(self, attr):\n config_val = None\n\n try:\n config_val = getattr(self.config_default, attr)\n except AttributeError:\n pass\n\n return config_val", "def default(self) -> object:\n return self._default", "def get(value, default=\"\"):\n if value is None:\n return default\n\n return value", "def get_default_value(self, name):\n return self.get_attribute_schema(name).default", "def get(self, key, default=None):\n try:\n return self[key]\n except KeyError:\n return default", "def getAttribute(self, key, default=''):\n return self.attr(key, default=default)", "def getAttribute(self, key, default=''):\n return self.attr(key, default=default)", "def get(\n self, key: str, default: Optional[Any] = None, version: Optional[int] = None\n ) -> Optional[Any]:\n return default", "def get_default(self, stmt, slist):\n for s in slist:\n if s.keyword == \"default\": return s.arg\n dst = stmt.search_one(\"default\")\n if dst: return dst.arg\n return None", "def default_language_tag(self):\n return self.properties.get(\"defaultLanguageTag\", None)", "def get(self, name, default):\n try:\n return self[name]\n except KeyError:\n self.set(name, default)\n return default", "def get_default(self) -> T | None:\n return (\n self.default # TODO: deepcopy mutable defaults?\n if self.default_factory is None\n else self.default_factory()\n )", "def get(self, key, default=None):\n try:\n return self._get(key)\n except Exception:\n return default", "def default(self):\r\n return self.default_value()", "def getOptionalTag(node, tag, option=\"\"):\n try:\n return getTag(node, tag)\n except TagError:\n return option", "def get(self, key, default=None):\r\n try:\r\n return self[key]\r\n except KeyError:\r\n return default", "def get_default(cls, opt):\n try:\n return cls._OPTS[opt].default\n except KeyError:\n raise ValueError('unknown option name %r' % (opt,))", "def default():\n return DefaultPvWattsv8.default()", "def get(self, key: str, default=None) -> Any:\n try:\n return self[key][0]\n except KeyError:\n return default", "def default_value(self) -> Optional[Any]:\n return self.get(\"/DV\")", "def xpath_get(self, path, dtype=str, default=None):\n items = self._tag.xpath(path)\n try:\n return dtype(items[0].text)\n except (TypeError, IndexError):\n return default", "def default(self):\n # get my default value\n default = self._default\n # if it is still at its trivial value\n if default is schemata.component.default:\n # ask my protocol\n return self.protocol.pyre_default\n # otherwise, return it\n return default", "def get_default_version(self):\n # latest is a special case where we don't have to check if it exists\n if self.default_version == 'latest':\n return self.default_version\n # check if the default_version exists\n version_qs = self.versions.filter(\n slug=self.default_version,\n active=True\n )\n if version_qs.exists():\n return self.default_version\n return 'latest'", "def get_default(section, option=\"\"):\n\tif not option:\n\t\tif defaults.has_key(section):\n\t\t\treturn defaults[section]\n\telse:\n\t\tif defaults.has_key(section):\n\t\t\tif defaults[section].has_key(option):\n\t\t\t\treturn defaults[section][option]\n\treturn None", "def get(self, key, default=None):\n return self[key] if key in self else default", "def default():", "def find_with_default(node, path, default):\r\n v = node.find(path)\r\n if v is not None:\r\n return v.text\r\n else:\r\n return default", "def get(self, key: str, default: Any = None) -> Any:\n return self.attributes.get(key, default)", "def default():\n return DefaultPvWattsv5.default()", "def get(self, name, default=UNDEFINED):\n try:\n return self.__getattr__(name)\n except AttributeError:\n return default", "def get_default(cls):\n return cls.NONE", "def get(self, key, default=None):\n return self._d.get(key, default)", "def _get_simple_default_value(simple):\n return _SIMPLE_DEFAULT_VALUES[simple]", "def get_default_resource(self, name):\n if not self._default_resource:\n self._default_resource = self.get(name=name)\n\n return self._default_resource", "def get(self, name, default=None):\n return self._storage.get(name, default)", "def _getDefaultValue(self):\n value = self._getDefaultValue()\n return value.getData() if value else None", "def is_default(self):\n return self._tag == 'default'", "def get_default_value(self, request):\n\n return self.default_value", "def get_value(soup, tag, cond, default=None):\r\n ele = soup.find(tag, cond)\r\n if ele:\r\n return ele.text.strip()\r\n return default", "def get(self, key, default=None):\n try:\n return self.context.get(self.prefix+'.'+key, default)\n except AttributeError:\n return default", "def get(self, key, default=None):\n pass", "def get(self, key: str, default: Any = None) -> Any:\n try:\n return getattr(self, key)\n except AttributeError:\n return default", "def get_tag(self, tag_type: str) -> str:\n if tag_type in self.tags:\n return self.tags[tag_type]\n return None", "def getValue(name, default=None):", "def get(self, key, default=None):\n if key in self:\n return self[key]\n return default", "def getAttribute(self, attribname, default = None):\n return self.attributes.get(attribname, default)", "def get(self, keyname: str, default: Optional[Any] = None) -> Any:\n try:\n return self[keyname]\n except KeyError:\n return default", "def get_tag(self, tag, filename):\n return self.get_tag_batch(tag, [filename])[0]", "def get(self, item, default=NO_DEFAULT):\n try:\n return util.annotate(self.dict_[item])\n except KeyError:\n if default is NO_DEFAULT:\n raise\n return default", "def get_option(options=None, local_tag=None, doc=None, doc_tag=None, default=None, error_on_none=True):\n variable = None\n\n # element level\n if options is not None and local_tag is not None:\n if local_tag in options and options[local_tag] is not None:\n variable = options[local_tag]\n if variable is not None:\n return variable\n\n # doc level\n if doc is not None and doc_tag is not None:\n variable = doc.get_metadata(doc_tag, None)\n if variable is not None:\n return variable\n\n # default level\n variable = default\n if variable is None and error_on_none:\n raise ValueError(\"could not retrieve a value for tag; local={0}, doc={1}\".format(local_tag, doc_tag))\n\n return variable", "def get(self, key, default=0):\n try:\n return self[key]\n except KeyError:\n return default", "def getvalue(self, name, *default):\n try:\n return self.getattr(name).value\n except KeyError:\n if default:\n return default[0]\n raise", "def get(self, key, default=None):\r\n return self._getAttrMap().get(key, default)", "def get(self, key, default=None):\n try:\n return self.__getitem__(key)\n except ValueError:\n if default is not None:\n return default\n else:\n raise", "def get(self, key: Any, default: Optional[Any] = None) -> Any:\n try:\n return self[key]\n except (KeyError, ValueError, IndexError):\n return default", "def get(self):\n return self.tag.get()", "def get(self, attr, default=None):\n logging.info('get {} {}'.format(attr, self._state.get(attr, None)))\n\n return self._state.get(attr, default)", "def get(self, name, default=''):\n return getattr(settings, name, DEFAULT_SETTINGS.get(name, default))", "def get_setting_default(cls, key, **kwargs):\n setting = cls.get_setting_definition(key, **kwargs)\n\n return setting.get('default', '')", "def default():\n return DefaultPvSamv1.default()", "def _get_default(ddict, key, default):\n if ddict is None or key not in ddict or ddict[key] is None:\n return default\n return ddict[key]", "def get(self, key, default=None):\n if self.root is not None:\n res = self._get(key, self.root)\n if res:\n return res\n else:\n return default\n return default", "def get(self, key, default):\n return self.plugin.get(key, default)", "def get_default():\n backend, opts = parse_default()\n assert backend is not None\n return load_backend(backend, opts)", "def getfirst(self, key, default=None):\n \n values = self.getlist(key)\n return values[0] if values else default", "def get(self, key, default=None):", "def get(key, default=None):\n config = _get_config_dict()\n return config.get(key, default)", "def default(self) -> Optional[ExpressionRecord]:\n return self._default", "def get_text_by_tag(start, tagname, default=None):\n node_back = start.getElementsByTagName(tagname)[0]\n for node in node_back.childNodes:\n if node.nodeType == node.TEXT_NODE:\n return node.data\n\n return default", "def getparam(name, default=None, stash=None, params=None):\n v = getparamlist(name, stash=stash, params=params)\n if len(v) > 0: return v[0]\n return default" ]
[ "0.7081396", "0.6996165", "0.6928306", "0.68010557", "0.67721087", "0.6700133", "0.6666502", "0.65979785", "0.65852", "0.65847015", "0.6540291", "0.64615685", "0.64236104", "0.63959", "0.63959", "0.6388913", "0.63801557", "0.6371682", "0.63607526", "0.63218176", "0.63205206", "0.6282456", "0.62779695", "0.62779695", "0.6275138", "0.62660205", "0.6201286", "0.6174433", "0.617404", "0.61736596", "0.61736596", "0.6172632", "0.6166288", "0.61658746", "0.61572826", "0.6156753", "0.6156753", "0.61516875", "0.6143117", "0.614186", "0.6131286", "0.612727", "0.6106015", "0.6094617", "0.6092503", "0.60825074", "0.6057718", "0.60476303", "0.60436773", "0.60426295", "0.6040276", "0.6027845", "0.6026629", "0.60142994", "0.60142404", "0.6002901", "0.6000103", "0.5994348", "0.5991117", "0.5989643", "0.5989494", "0.5986988", "0.5981391", "0.5978621", "0.5965411", "0.5964895", "0.5952528", "0.5933815", "0.5930645", "0.5927769", "0.59193426", "0.5911402", "0.590526", "0.5904571", "0.58946437", "0.5892097", "0.58841884", "0.58818835", "0.5877215", "0.5867334", "0.58646476", "0.58467954", "0.58433414", "0.58421", "0.58356094", "0.5819498", "0.5819296", "0.5809915", "0.5808632", "0.5802777", "0.57922494", "0.5791502", "0.5775107", "0.5771565", "0.5751269", "0.57460594", "0.57438445", "0.5739364", "0.57255334", "0.5723432" ]
0.6928629
2
Gets the specified tag namespace's information.
def get_tag_namespace(self, tag_namespace_id, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_tag_namespace got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="TagNamespace") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="TagNamespace")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def namespace(self, namespace):\n return self.client.call('GET',\n self.name, params={'namespace': namespace})", "def _parse_ns_tag(ns_tag):\n m = _NsTagRE.match(ns_tag)\n return m.groups() if m else (None, None)", "def _get_xml_namespace(root_tag):\n m = re.match(r\"\\{.*\\}\", root_tag)\n return m.group(0) if m else \"\"", "def getnamespaceinfo (self, namespaceguid, jobguid = \"\", executionparams = {}):\n params =dict()\n params['namespaceguid'] = namespaceguid\n executionparams['rootobjecttype'] = 'dssnamespace'\n\n \n return q.workflowengine.actionmanager.startRootobjectAction('dssnamespace', 'getnamespaceinfo', params, jobguid=jobguid, executionparams=executionparams)", "def info(self, tag):\n\n url = \"https://api.instagram.com/v1/tags/{0}?access_token={1}\".format(tag, self.access_token)\n request = requests.get(url)\n print(request.headers)\n return request.json()", "def namespace(self):\n return self._namespace", "def namespace(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"namespace\")", "def GetNamespace(self, namespace_name):\n return self.type_namespaces_map.get(namespace_name, None)", "def namespace(self):\n return VarLookupDict(self._namespaces)", "def namespace(self) -> Optional[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[str]:\n return pulumi.get(self, \"namespace\")", "def get_namespace(self, prefix):\n try:\n return self.parser.namespaces[prefix]\n except KeyError as err:\n raise self.error('FONS0004', 'No namespace found for prefix %s' % str(err))", "def getElementNamespace(self):\n return _libsbml.ASTBasePlugin_getElementNamespace(self)", "def _find_tag(self, token, tagName, prefix):\n fulltag=token.text\n try:\n return self.globalTagDict[fulltag]\n except KeyError:\n pass\n if prefix==self.prefix:\n try:\n return self.localTagDict[tagName]\n except KeyError:\n pass\n \n try:\n return self.root._extraTagDict[fulltag]\n except KeyError:\n pass\n\n p=self.parent\n while p:\n if p.prefix==prefix:\n try:\n return p.localTagDict[tagName]\n except KeyError:\n pass\n p=p.parent\n \n self.handle_error(\"tag not found: %s\" % fulltag, token)", "def getElementNamespace(self):\n return _libsbml.SBasePlugin_getElementNamespace(self)", "def getContext(namespace):", "def get_namespace_info(self, device):\n for info in self.namespaces.values():\n if info.blockdev == device or \\\n (device.startswith(\"/dev/\") and info.blockdev == device[5:]):\n return info", "def get_namespace(self, namespace, lowercase=True, trim_namespace=True):\n\t\treturn self.get_namespace_view(namespace, lowercase, trim_namespace).copy()", "def namespace(self):\n return self.__key.namespace()", "def get_tag(self, tag):\n resp = self.get(_u.build_uri(\"tag\", domain=self.domain),\n data={'tag': tag})\n return utils.handle_response(resp)", "def namespace(self) -> str:\n return pulumi.get(self, \"namespace\")", "def namespace(self):\n assert self._namespace\n return self._namespace", "def namespace_desc(self) -> str:\n return pulumi.get(self, \"namespace_desc\")", "def get_namespace(self) -> str:\n return self._namespace", "def getNamespace(self):\n pass;", "def get_tag_stats(self, tag):\n resp = self.get(_u.build_uri(\"tag.stats\", domain=self.domain),\n data={'tag': tag})\n return utils.handle_response(resp)", "def get_tag(self, tag, filename):\n return self.get_tag_batch(tag, [filename])[0]", "def namespace (self) :\n\n return self.__namespace__", "def _get_cloud_function_namespaces_metadata(offset=0):\n\n res = requests.get(\n f\"{self.cf_namespaces_url}?limit=200&offset={offset}\",\n headers=self.get_headers(),\n )\n return json.loads(res.text)", "def getNamespaces(self):\n return _libsbml.XMLToken_getNamespaces(self)", "def _get_tag(self, tag):\n return self.prefix + tag", "def from_ns(match):\n return ns.get(match.group(1), match.group())", "def get(cls, ns, name):\n key_name = '%s:%s' % (ns, name)\n return cls.get_by_key_name(key_name)", "def get_tag(self, scope, key):\r\n print 'GETTING', scope, key, self._tags\r\n return self._tags[scope].get(key)", "def prefixForNamespace (self, namespace):\n pfxs = self.__inScopePrefixes.get(namespace)\n if pfxs:\n return next(iter(pfxs))\n return None", "def getNamespaces(self):\n return _libsbml.SBase_getNamespaces(self)", "def _getnamespaces(cls):\n return \" \".join(Kmlable._namespaces)", "def get_namespaces():\r\n\r\n print 'Getting namespaces'\r\n tree = etree.parse('http://lesswrong.wikia.com/wiki/Special:AllPages', parser)\r\n options = tree.xpath('//select[@id=\"namespace\"]/option')\r\n namespaces = [option.get('value') for option in options]\r\n pprint(namespaces)\r\n return namespaces", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> str:\n return self._namespace", "def getTagStats(graph, tag):\n r = graph.getroot()\n s = graphToStatCount(r, tag)\n if ':' in tag:\n getExactBranch(s, tag)\n else:\n pruneStatCountBranches(s, tag)\n sendUpStatCountTagCounts(s, tag)\n return s", "def _get_tag(self):\n return self.__tag", "def getNamespaces(self):\n return _libsbml.SBMLDocument_getNamespaces(self)", "def getNamespaces(self, *args):\n return _libsbml.SBMLNamespaces_getNamespaces(self, *args)", "def get_namespace_from_config(k8s_cli):\n # find namespace from config file\n cmd = \"{} config view -o json\".format(k8s_cli)\n return_code, out = run_shell_command(cmd)\n if return_code:\n return None\n config = json.loads(out)\n current_context = config.get('current-context')\n if not current_context:\n return None\n\n for context in config.get('contexts', []):\n if context['name'] == current_context:\n if context['context'].get(\"namespace\"):\n return context['context'][\"namespace\"]\n break\n return None", "def getNamespacePrefix(self, namespace):\n return self.namespaceTable.get(namespace, None)", "def get(self, name, dict_output=False):\n namespace = self.client_core.read_namespace(name=name)\n logger.info(f\"Got namespace {name}\")\n\n # convert the obj to dict if required\n if dict_output:\n namespace = convert_obj_to_dict(namespace)\n else:\n namespace.metadata.resource_version = ''\n\n return namespace", "def namespaceInfo(*args, absoluteName: bool=True, baseName: bool=True, currentNamespace:\n bool=True, dagPath: bool=True, fullName: bool=True, internal: bool=True,\n isRootNamespace: bool=True, listNamespace: bool=True,\n listOnlyDependencyNodes: bool=True, listOnlyNamespaces: bool=True, parent:\n bool=True, recurse: bool=True, shortName: bool=True, **kwargs)->AnyStr:\n pass", "def get_info(self, pkgname):\n for pkg in self.rpc.info(pkgname):\n return pkg", "def _get_workload_namespace(self):\n namespace_yaml_file = os.path.join(\n os.path.join(self.workload_subscription_dir, self.workload_name),\n \"namespace.yaml\",\n )\n namespace_yaml_data = templating.load_yaml(namespace_yaml_file)\n return namespace_yaml_data[\"metadata\"][\"name\"]", "def test_get_namespace(self):\n pass", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespace(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"namespace\")", "def namespaces(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"namespaces\")", "def __getitem__(self, tag):\n return self.get(tag)", "def read_metadata(self):\n return self.parent.controller.get_tag_metadata()", "def get_all_tagged(self,tag_name):\n return self.tag2elements[tag_name]", "def _get_namespace(self, data):\n ns_name = data['filename'].namespace\n try:\n return models.Namespace.objects.get(name=ns_name)\n except models.Namespace.DoesNotExist:\n raise exceptions.ValidationError(\n f'Namespace \"{ns_name}\" does not exist.'\n )", "def get(self):\n return self.tag.get()", "def XmlNamespace(self) -> str:", "def get_services_in_namespace(self, namespace):\n ret = self.v1_service_list.get(namespace=namespace)\n return [each.metadata.name for each in ret.items]", "def _namespace(self) -> str:\n with open(\"/var/run/secrets/kubernetes.io/serviceaccount/namespace\", \"r\") as f:\n return f.read().strip()", "def prefix_to_ns(self, prefix):\n defin = self.module.i_ctx.get_module(\n self.module.i_prefixes[prefix][0])\n return defin.search_one(\"namespace\").arg", "def get_namespaces():\n return list(StaticAsset._load_namespaces().keys())", "def ns(self):\n ret = libxml2mod.xmlNodeGetNs(self._o)\n if ret is None:return None\n __tmp = xmlNs(_obj=ret)\n return __tmp", "def ns_tag(self, tagname):\n return \"{%s}%s\" % (S3_XMLNS, tagname)", "def get_template_namespace(self):\n raise NotImplementedError()", "def test_get_namespaces_names(self):\n pass", "def tag(self) -> str:\n return pulumi.get(self, \"tag\")", "def ns(tags):\n return '/'.join(['*[local-name()=\"%s\"]' % t if t not in ['*', '..', '.'] else t\n for t in tags.split('/') if t])", "def namespaces(self):\n return self.namespaced_fields().namespaces()", "def getnamespace(f):\n namespace = dict(six.get_function_globals(f))\n closure = six.get_function_closure(f)\n freevars = six.get_function_code(f).co_freevars\n if freevars and closure:\n for name, cell in zip(freevars, closure):\n namespace[name] = cell.cell_contents\n return namespace" ]
[ "0.6676316", "0.65238637", "0.6215992", "0.5927034", "0.58749634", "0.5791631", "0.56914693", "0.56914693", "0.56914693", "0.56914693", "0.56914693", "0.5600001", "0.5581799", "0.5574338", "0.5574338", "0.5574338", "0.55718166", "0.5555538", "0.5546786", "0.5539851", "0.5528203", "0.55067545", "0.55008525", "0.54999745", "0.5497428", "0.54787475", "0.5423012", "0.54196924", "0.537974", "0.5362941", "0.5346535", "0.5346085", "0.534111", "0.53322065", "0.5328915", "0.5312933", "0.5305154", "0.52747434", "0.52669644", "0.5249877", "0.52445376", "0.524257", "0.5231884", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.52043366", "0.5202261", "0.51802254", "0.51762205", "0.51739657", "0.5162748", "0.5146842", "0.5138642", "0.513248", "0.51257676", "0.5124234", "0.5119978", "0.5118198", "0.5096856", "0.5096856", "0.5096856", "0.5096856", "0.5096856", "0.5096856", "0.5096856", "0.5096856", "0.5096856", "0.5096856", "0.5090183", "0.50901526", "0.50828505", "0.50744164", "0.5065615", "0.5031438", "0.5018875", "0.5009576", "0.50062805", "0.49965125", "0.49930823", "0.4984772", "0.4974753", "0.49734682", "0.49658147", "0.49575892", "0.49524802", "0.49489236", "0.493988" ]
0.5151831
64
Gets details on a specified work request. The workRequestID is returned in the opcworkrequestid header for any asynchronous operation in the Identity and Access Management service.
def get_tagging_work_request(self, work_request_id, **kwargs): resource_path = "/taggingWorkRequests/{workRequestId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_tagging_work_request got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "workRequestId": work_request_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="TaggingWorkRequest") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="TaggingWorkRequest")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_work_request(self, work_request_id, **kwargs):\n resource_path = \"/workRequests/{workRequestId}\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"get_work_request got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"workRequestId\": work_request_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"WorkRequest\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"WorkRequest\")", "def getwork(self, data: Optional[str] = None) -> Dict[str, Any]:\n assert data is None or type(data) == str\n return self.rpc_call(\"getwork\", data)", "def get_request(request_id=None, workload_id=None, session=None):\n\n try:\n if not request_id and workload_id:\n request_ids = get_request_ids_by_workload_id(workload_id)\n if request_ids and len(request_ids) > 1:\n raise exceptions.IDDSException(\"More than one request with the same workload_id\")\n request_id = request_ids[0]\n\n req_select = \"\"\"select request_id, scope, name, requester, request_type, transform_tag, priority,\n status, locking, workload_id, created_at, updated_at, accessed_at, expired_at, errors,\n request_metadata, processing_metadata\n from atlas_idds.requests where request_id=:request_id\n \"\"\"\n req_stmt = text(req_select)\n result = session.execute(req_stmt, {'request_id': request_id})\n request = result.fetchone()\n\n if request is None:\n raise exceptions.NoObject('request request_id: %s, workload_id: %s cannot be found' % (request_id, workload_id))\n\n request = convert_request_to_dict(request)\n\n return request\n except sqlalchemy.orm.exc.NoResultFound as error:\n raise exceptions.NoObject('request request_id: %s, workload_id: %s cannot be found: %s' % (request_id, workload_id, error))", "def doi_info(self,doi):\n \n doi = _clean_doi(doi)\n \n url = self.BASE_URL + 'works/' + doi\n \n try:\n return self._make_get_request(url,models.work_single)\n except errors.RequestError:\n #TODO: Check for 404\n #last_response.status_code\n #TODO: Do this only if debugging is enabled\n if self.debug:\n #TODO: Also report code\n print(\"Error msg from server: \" + self.last_response.text)\n raise errors.InvalidDOI('Invalid DOI requested: ' + doi)\n \n #return self._make_get_request(url,models.Work,kwargs)", "async def request_job_info(self, job_id: str, *args, **kwargs) -> dict:\n # TODO: implement\n raise NotImplementedError('{} function \"request_job_info\" not implemented yet'.format(self.__class__.__name__))", "def export_getCurrentExecutionOrder(self,requestName):\n\n if type(requestName) in StringTypes:\n result = requestDB._getRequestAttribute('RequestID',requestName=requestName)\n if not result['OK']:\n return result\n requestID = result['Value']\n else:\n requestID = requestName\n\n result = requestDB.getCurrentExecutionOrder(requestID)\n return result", "def get_ride_request(reqID):\n req = RideRequest.query.get(reqID)\n return req", "def getworkunit(worker_id):\r\n\r\n worker_data = identify(worker_id)\r\n global time_start\r\n global started_working\r\n global work_status\r\n if work_status == Db.WorkStatusNames.has_work.value:\r\n\r\n saved_work_unit = Db.get_free_work_unit()\r\n if saved_work_unit is None:\r\n work_status = Db.WorkStatusNames.no_work.value\r\n else:\r\n if not started_working:\r\n print(\"Starting to work!\")\r\n time_start = time.time()\r\n started_working = True\r\n #It counts it's\r\n print(str(saved_work_unit[\"work_unit_id\"]) + \" \" + str(saved_work_unit))\r\n Db.assign_work_unit(saved_work_unit[\"work_unit_id\"], worker_id)\r\n return saved_work_unit\r\n\r\n\r\n\r\n return package_data({\"fail_message\": work_status})", "def getwork(self, data=None):\n if data is None:\n # Only if no data provided, it returns a WorkItem\n return WorkItem(**self.proxy.getwork())\n else:\n return self.proxy.getwork(data)", "def send_announcement_get_work_request(self):\n self.analysis_id = uuid.uuid4().hex\n while True:\n self.announce_socket.send_json(((self.analysis_id, self.work_addr),))\n try:\n return self.awthread.recv(self.work_socket, 250)\n except six.moves.queue.Empty:\n continue", "def export_getRequestFileStatus(self,requestName,lfns):\n if type(requestName) in StringTypes:\n result = requestDB._getRequestAttribute('RequestID',requestName=requestName)\n if not result['OK']:\n return result\n requestID = result['Value']\n else:\n requestID = requestName\n return requestDB.getRequestFileStatus(requestID,lfns)", "def qos_workload_get(self, workload_name, desired_attributes=None):\n return self.request( \"qos-workload-get\", {\n 'workload_name': [ workload_name, 'workload-name', [ basestring, 'None' ], False ],\n 'desired_attributes': [ desired_attributes, 'desired-attributes', [ QosWorkloadInfo, 'None' ], False ],\n }, {\n 'attributes': [ QosWorkloadInfo, False ],\n } )", "def list_work_requests(self, compartment_id, **kwargs):\n resource_path = \"/workRequests\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"resource_identifier\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_work_requests got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"resourceIdentifier\": kwargs.get(\"resource_identifier\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[WorkRequestSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[WorkRequestSummary]\")", "def work_order_receipt_retrieve(self, work_order_id, id=None):\n pass", "def request_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"request_id\")", "def view_request_info(line):\n args = shlex.split(line)\n if not args:\n raise PappyException(\"Request id is required\")\n reqids = args[0]\n\n reqs = yield load_reqlist(reqids)\n\n for req in reqs:\n print ''\n print_request_extended(req)\n print ''", "def get_request(self):\n\t\t#self.__sem.lock()\n\t\ttry:\n\t\t\t\n\t\t\tr = self.get(thread.get_ident(),None)\n\t\t\tif r:\n\t\t\t\treturn r\n\t\t\traise VDOM_exception(_(\"No request associated with current thread\"))\n\t\texcept:\n\t\t\traise VDOM_exception(_(\"No request associated with current thread\"))\n\t\t#finally:\n\t\t#\tself.__sem.unlock()", "def vcac_worklfow_request(self):\n logging.info(\"Inside ucsvm_worklfow_request method base class\")\n return None", "def log_request(self, code='-', size='-'):\n print self._heading(\"HTTP Request\")\n #First, print the resource identifier and desired operation.\n print self.raw_requestline,\n #Second, print the request metadata\n for header, value in self.headers.items(): \n print header + \":\", value", "def export_getRequestStatus(self,requestName):\n\n if type(requestName) in StringTypes:\n result = requestDB._getRequestAttribute('RequestID',requestName=requestName)\n if not result['OK']:\n return result\n requestID = result['Value']\n else:\n requestID = requestName\n\n result = requestDB.getRequestStatus(requestID)\n return result", "def read_request(req_id: int, db: Session = Depends(get_db)):\n db_req = crud.get_request(db, req_id=req_id)\n if db_req is None:\n raise HTTPException(status_code=404, detail=\"Request not found\")\n return db_req", "def queryRequest(self, requestName):\n urlQuery = \"request/%s\" % requestName\n logging.info(\"Querying request '%s'\" % requestName)\n logging.info(\"Query: '%s':\" % urlQuery)\n r = self.restSender.get(urlQuery)\n print str(r)", "def get_review_request(self, rid):\r\n rsp = self.api_call('api/review-requests/%s/' % rid)\r\n return rsp['review_request']", "async def get_job_execution_details(\n self,\n request: metrics.GetJobExecutionDetailsRequest = None,\n *,\n retry: retries.Retry = gapic_v1.method.DEFAULT,\n timeout: float = None,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> pagers.GetJobExecutionDetailsAsyncPager:\n # Create or coerce a protobuf request object.\n request = metrics.GetJobExecutionDetailsRequest(request)\n\n # Wrap the RPC method; this adds retry and timeout information,\n # and friendly error handling.\n rpc = gapic_v1.method_async.wrap_method(\n self._client._transport.get_job_execution_details,\n default_timeout=None,\n client_info=DEFAULT_CLIENT_INFO,\n )\n\n # Send the request.\n response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)\n\n # This method is paged; wrap the response in a pager, which provides\n # an `__aiter__` convenience method.\n response = pagers.GetJobExecutionDetailsAsyncPager(\n method=rpc, request=request, response=response, metadata=metadata,\n )\n\n # Done; return the response.\n return response", "def req_id(self) -> str:\n pass", "def RetrieveWorkerInCapability(**argd):\n checkSign = argd[\"nsid\"] + \",\" + argd[\"renid\"]\n token = EncryptUtil.DecodeURLSafeBase64(argd[\"token\"])\n try:\n tokenRet = EncryptUtil.VerifySign(checkSign, token, GlobalConfigContext.AUTH_NameService_PublicKey)\n except:\n tokenRet = False\n if tokenRet is False:\n return CGateway._UnauthorizedServiceResponse(token)\n flag1, ret1 = CGateway.core.RetrieveHumanWithCapability(GlobalConfigContext.AUTH_INTERNAL_SESSION, argd[\"capabilityName\"])\n flag2, ret2 = CGateway.core.RetrieveAgentWithCapability(GlobalConfigContext.AUTH_INTERNAL_SESSION, argd[\"capabilityName\"])\n return CGateway._DumpResponse(ret1 + ret2)", "def request_id(self) -> Optional[str]:\n return self._request_id", "def work_order_receipt_retrieve(self, work_order_id, id=None):\n if work_order_id is None or not is_hex(work_order_id):\n logging.error(\"Work order id is empty or Invalid\")\n return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,\n \"Worker id is empty or Invalid\")\n\n json_rpc_request = {\n \"jsonrpc\": \"2.0\",\n \"method\": \"WorkOrderReceiptRetrieve\",\n \"id\": id,\n \"params\": {\n \"workOrderId\": work_order_id\n }\n }\n response = self.__uri_client._postmsg(json.dumps(json_rpc_request))\n return response", "def get_intake_detail(request, intake_csid):\n return handle_request(request, 'cspace-services/intakes/%s' % intake_csid)", "def _fetch_request_info(request):\n try:\n subject_id = request.environ['api.cache.subject_id']\n method = request.environ['api.cache.method']\n version = request.environ['api.cache.version']\n except KeyError:\n return None\n else:\n return (subject_id, method, version)", "def read_work(self):\n # PROTECTED REGION ID(AsyncTabata.work_read) ENABLED START #\n return self._work\n # PROTECTED REGION END # // AsyncTabata.work_read", "def get_current_request():\n return crum.get_current_request()", "def export_getRequest(self,requestType):\n gLogger.info(\"RequestHandler.getRequest: Attempting to get request type\", requestType)\n try:\n res = requestDB.getRequest(requestType)\n return res\n except Exception,x:\n errStr = \"RequestManagerHandler.getRequest: Exception while getting request.\"\n gLogger.exception(errStr,requestType,lException=x)\n return S_ERROR(errStr)", "def test_get_provisioning_request_by_id(self):\n response = self.client.open('/api/provisioning/port/{requestId}'.format(requestId='requestId_example'),\n method='GET')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def reqid(self) -> str:", "def reqid(self) -> str:", "def request(self, *args, **kwargs):\n self.work_request_queue.put((args, kwargs))\n return self.result_queue.get()", "def request(self, *args, **kwargs):\n self.work_request_queue.put((args, kwargs))\n return self.result_queue.get()", "def get_request(self, request_id):\n doc_id = bson.objectid.ObjectId(request_id)\n coll = self._db.get_collection(COLLECTION_REQUEST)\n doc = coll.find_one(filter={\n '_id': doc_id\n })\n return doc", "def request_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"request_id\")", "def test_get_workout(self):\n response = self.client.open(\n '/workout/{id}'.format(id='id_example'),\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def work_order_receipt_lookup(self, worker_service_id,\n worker_id,\n requester_id,\n receipt_status, id=None):\n pass", "def make_work_request(self):\n request = StoreRequest()\n self.bb_client.read_wait(request, self.handle_request)", "def __get_request(self, request_id):\r\n if request_id not in self.__requests:\r\n self.__requests[request_id] = Request(request_id)\r\n return self.__requests[request_id]", "def __get_request(self, request_id):\r\n if request_id not in self.__requests:\r\n self.__requests[request_id] = Request(request_id)\r\n return self.__requests[request_id]", "def enqueue_agent_details(self, request_id: str, additional_data: Dict[str, Any]):\n base_data = {\"request_id\": request_id}\n for key, val in additional_data.items():\n base_data[key] = val\n self.message_queue.put(\n Packet(\n packet_type=PACKET_TYPE_AGENT_DETAILS,\n subject_id=self.request_id_to_channel_id[request_id],\n data=base_data,\n )\n )\n self.process_outgoing_queue(self.message_queue)\n self.log_metrics_for_packet(self.request_id_to_packet[request_id])\n # TODO Sometimes this request ID is lost, and we don't quite know why\n del self.request_id_to_channel_id[request_id]\n del self.request_id_to_packet[request_id]", "def get_shift_report_info(self, shift_id=0, shift_reg_id=0):\n try:\n personal_data = f'{\"-\" * 20}\\n'\n general_shift_info = []\n\n if shift_id == 0: # waiter\n shift_id = self.db_handler.get_shift_registration_by_shift_reg_id(shift_reg_id)[1]\n personal_data += self.get_shift_report_info_waiter(shift_reg_id)\n elif shift_reg_id == 0: # manager\n personal_data += self.get_shift_report_info_manager(shift_id)\n\n general_shift_info = self.get_shift_report_general_info(shift_id)\n\n msg = general_shift_info + personal_data\n\n return msg\n except Exception as err:\n method_name = sys._getframe().f_code.co_name\n\n self.logger.write_to_log('exception', 'model')\n self.logger.write_to_err_log(f'exception in method {method_name} - {err}', 'model')", "def info_request():\n return SentmanRequest(SentmanRequest.GET_INFO)", "def workflow_fetch_item_task_spec(dtool_smb_config):\n return {\n 'item_id': {'key': 'search_dict_task->result'},\n 'source': 'smb://test-share/1a1f9fad-8589-413e-9602-5bbd66bfe675',\n 'filename': 'fetched_item.txt',\n 'dtool_config': dtool_smb_config,\n 'stored_data': True,\n }", "def get_details():\r\n return run_operations.get_run_details(experiment_name, job_name).as_dict(key_transformer=camel_case_transformer)", "def GetJob(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def detail(self, req):\n return self.index(req)", "def get_provisioning_request_by_id(requestId):\n doc = PortProvisionRequest.get(id=requestId)\n if doc:\n return doc\n else:\n return 'Not Found', 404", "def RequestInformation(self, request, inInfo, outInfo):\n if self.need_to_read():\n self._read_up_front()\n self._update_time_steps()\n return 1 # NOTE: ALWAYS return 1 on pipeline methods", "def getRequest(self):\n try:\n return self._getRequest()\n except:\n log_func.fatal(u'Error get OLAP server request')\n return dict()", "def showDetails(self, request, access_type,\n page_name=None, params=None, **kwargs):\n\n logic = params['logic']\n ranking = logic.getFromKeyFieldsOr404(kwargs)\n student = ranking.student\n\n all_d = gci_task_model.TaskDifficultyTag.all().fetch(100)\n\n list_params = params.copy()\n list_params['list_description'] = self.DETAILS_MSG_FMT % student.user.name\n list_params['public_field_extra'] = lambda entity: {\n 'task': entity.title,\n 'org': entity.scope.name,\n 'points_difficulty': entity.taskDifficulty(all_d).value\n }\n list_params['public_field_keys'] = [\n 'task', 'org', 'points_difficulty', 'closed_on']\n list_params['public_field_names'] = [\n 'Task', 'Organization', 'Points (Difficulty)', 'Completed on']\n list_params['public_row_extra'] = lambda entity: {\n 'link': redirects.getPublicRedirect(entity, {'url_name': 'gci/task'}),\n }\n\n if lists.isDataRequest(request):\n return self.getListRankingDetailsData(request, list_params, student)\n\n contents = []\n order = ['closed_on']\n list = lists.getListGenerator(request, list_params, order=order, idx=0)\n contents.append(list)\n\n return self._list(request, list_params, contents, page_name)", "def taskbyddmreqid(self, **kwargs):\n rows = self.api.query(None, None, self.Task.TaskByDdmReqid_sql, ddmreqid=kwargs[\"ddmreqid\"])\n return rows", "def work(self, request):\n raise NotImplementedError", "def work_type_info(self,type_id):\n url = self.BASE_URL + 'types/' + type_id\n return self._make_get_request(url,models.pass_through)", "def _get_django_request():\n return execution_context.get_opencensus_attr(REQUEST_THREAD_LOCAL_KEY)", "def get_req(self, path):\n \n res = self.client.get(path)\n return res", "def workflow_details(self) -> Optional[pulumi.Input['ServerWorkflowDetailsArgs']]:\n return pulumi.get(self, \"workflow_details\")", "def workflow_details(self) -> Optional[pulumi.Input['ServerWorkflowDetailsArgs']]:\n return pulumi.get(self, \"workflow_details\")", "def get_request():\n return getattr(transaction.ctx, 'request', None)", "def _get_workload(self):\n return self._workload", "def trace_requests(request):\n tracer = initTracer()\n\n base_name = os.environ.get(\"FUNCTION_NAME\") + \"-trace-\"\n\n with tracer.span(name=base_name + \"metadata-call\"):\n\n import requests\n\n r = requests.get(\"http://metadata.google.internal/computeMetadata/v1/project/numeric-project-id\",\n headers={'Metadata-Flavor': 'Google'})\n\n\n return r.content", "def read(self) -> requests.request:\n # Check if id is set,\n if self.args.id is not None:\n self.REQUEST_URL += str(self.args.id)\n\n # Send GET request\n return requests.get(self.REQUEST_URL)", "def test_get_transaction_details_request(self):\n self.trans_details.get_transaction_details(\n trans_id = 123456,\n )", "def get_work_order_detail(self, date_range):\n work_order_obj = self.env[\"task.line\"]\n start = datetime.strptime(date_range.get(\"date_from\"), \"%Y-%m-%d\")\n end = datetime.strptime(date_range.get(\"date_to\"), \"%Y-%m-%d\")\n step = timedelta(days=1)\n workorder_detail = []\n while start <= end:\n sdate = str(\n datetime.strptime(\n str(start.date()) + \" 00:00:00\", DEFAULT_SERVER_DATETIME_FORMAT\n )\n )\n edate = str(\n datetime.strptime(\n str(start.date()) + \" 23:59:59\", DEFAULT_SERVER_DATETIME_FORMAT\n )\n )\n work_order_ids = work_order_obj.search(\n [(\"date_issued\", \">=\", sdate), (\"date_issued\", \"<=\", edate)]\n )\n if work_order_ids:\n parts_data = {}\n parts_value = []\n for parts_line in work_order_ids:\n if (\n parts_line.fleet_service_id\n and parts_line.fleet_service_id.state == \"done\"\n ):\n parts_dict = {\n \"wo_name\": parts_line.fleet_service_id\n and parts_line.fleet_service_id.name\n or \"\",\n \"vehicle_id\": parts_line.fleet_service_id\n and parts_line.fleet_service_id.vehicle_id\n and parts_line.fleet_service_id.vehicle_id.name\n or \"\",\n \"part_no\": parts_line.product_id\n and parts_line.product_id.default_code\n or \"\",\n \"part_name\": parts_line.product_id\n and parts_line.product_id.name\n or \"\",\n \"vehicle_make\": parts_line.vehicle_make_id\n and parts_line.vehicle_make_id.name\n or \"\",\n \"qty\": parts_line.qty or 0.0,\n \"uom\": parts_line.product_uom\n and parts_line.product_uom.name\n or \"\",\n \"old_part_return\": parts_line.old_part_return\n and \"Yes\"\n or \"No\",\n \"issued_by\": parts_line.issued_by\n and parts_line.issued_by.name\n or \"\",\n \"remarks\": parts_line.fleet_service_id\n and parts_line.fleet_service_id.note\n or \"\",\n }\n parts_value.append(parts_dict)\n if parts_value:\n parts_value = sorted(parts_value, key=lambda k: k[\"wo_name\"])\n parts_data = {\"date\": start.date(), \"value\": parts_value}\n workorder_detail.append(parts_data)\n start += step\n return workorder_detail", "def get_details(codetoget):\n\tTrainingComponentDetailsRequest= client.factory.create('TrainingComponentDetailsRequest')\n\tTrainingComponentDetailsRequest.Code=codetoget\n\tTrainingComponentInformationRequested=client.factory.create('TrainingComponentInformationRequested')\n\tTrainingComponentInformationRequested.ShowReleases=True\n\tTrainingComponentInformationRequested.ShowUnitGrid=True\n\tTrainingComponentInformationRequested.ShowComponents=True\n\tTrainingComponentDetailsRequest.InformationRequest=TrainingComponentInformationRequested\n\treturn client.service.GetDetails(TrainingComponentDetailsRequest)", "def job_detail(request: HttpRequest, job_id: str) -> HttpResponse:\n table = dynamodb.Table(table_name)\n sis_account_id = request.LTI[\"custom_canvas_account_sis_id\"]\n school_id = sis_account_id.split(\":\")[1]\n school_key = f'SCHOOL#{school_id.upper()}'\n job_query_params = {\n 'KeyConditionExpression': Key('pk').eq(school_key) & Key('sk').eq(job_id),\n 'ScanIndexForward': False,\n }\n logger.debug(f'Retrieving job details for job {job_id}.')\n job = table.query(**job_query_params)['Items'][0]\n\n # Update string timestamp to datetime.\n job.update(created_at=parse_datetime(job['created_at']))\n job.update(updated_at=parse_datetime(job['updated_at']))\n\n tasks_query_params = {\n 'KeyConditionExpression': Key('pk').eq(job_id),\n 'ScanIndexForward': False,\n }\n task_query_result = table.query(**tasks_query_params)\n tasks = task_query_result['Items']\n\n # If there are additional items to be retrieved for this job, the LastEvaluatedKey will be present\n # Use this key as the starting point for subsequent queries to build a full list\n while task_query_result.get('LastEvaluatedKey', False):\n tasks_query_params['ExclusiveStartKey'] = task_query_result.get('LastEvaluatedKey')\n task_query_result = table.query(**tasks_query_params)\n tasks.extend(task_query_result['Items'])\n\n context = {\n 'job': job,\n 'tasks': tasks,\n 'canvas_url': settings.CANVAS_URL\n }\n logger.debug(f'Retrieved job details for job {job_id}.', extra=context)\n return render(request, \"bulk_site_creator/job_detail.html\", context=context)", "def execute(self, requestName, conn = None, trans = False):\n self.sql = \"SELECT request_id from reqmgr_request WHERE \"\n self.sql += \"request_name=:request_name\"\n binds = {\"request_name\": requestName}\n reqID = self.dbi.processData(self.sql, binds, conn = conn, transaction = trans)\n result = self.formatOne(reqID)\n if result == []:\n return None\n return result[0]", "def get_current_request(self):\n\n return self.__current_request_mock", "def get(self, request_id):\n request = RequestModel.select_by_id(request_id)\n if request:\n return request.json(), 200\n return {'message': 'Request not found'}, 404", "def _retrieve_task_id(job_name, res_id, job_dict):\n if job_dict:\n workers = list(job_dict.keys())\n for worker in workers:\n for job in job_dict[worker]:\n if 'name' in job:\n if job['name'] == job_name:\n if res_id in job['args']:\n return job['id']\n elif 'request' in job:\n scheduled_job = job['request']\n if 'name' in scheduled_job:\n if scheduled_job['name'] == job_name:\n if res_id in scheduled_job['args']:\n return scheduled_job['id']\n\n return None", "def getObject(self, customerguid, jobguid=\"\",executionparams=None):", "def main():\n url = urllib.request.Request(sys.argv[1])\n with urllib.request.urlopen(url) as response:\n info = response.headers.get(\"X-Request-Id\")\n print(info)", "def x_request_id(self):\n return self._x_request_id", "def printable_request(self):\n req = self.response.request\n msg = \"-- Request : {} | {} -- \\r\\n\".format(req.method, req.url)\n msg += \"Headers: {} \\r\\n\".format(str(req.headers))\n msg += \"Body: {} \\r\\n\\r\\n\".format(str(req.body))\n return msg", "def find_obs_request_comment(self, request_id=None, project_name=None):\n if self.do_comments:\n comments = self.commentapi.get_comments(request_id=request_id, project_name=project_name)\n for c in comments.values():\n m = comment_marker_re.match(c['comment'])\n if m:\n return {\n 'id': c['id'],\n 'state': m.group('state'),\n 'result': m.group('result'),\n 'comment': c['comment'],\n 'revision': m.group('revision')}\n return {}", "def get_request(self):\n return self._request", "def test_get_work_from_edition_data(self):\n data = {\"works\": [{\"key\": \"/work/OL1234W\"}]}\n responses.add(\n responses.GET,\n \"https://openlibrary.org/work/OL1234W\",\n json={\"hi\": \"there\"},\n status=200,\n )\n result = self.connector.get_work_from_edition_data(data)\n self.assertEqual(result, {\"hi\": \"there\"})", "def get_job(self, identifier: str):\n self._log_operation('Getting job {i}'.format(i=identifier))\n return self._job_queue.get_job_details(identifier)", "def getexperimentinfo(expid):\n rdata = {}\n rdata['expId'] = expid\n res = requests.get(scbd_server_address + '/experiments/get_details', json=rdata)\n if res.status_code == 200:\n outstr = ''\n for cres in res.json()['details']:\n outstr += cres[0] + ':' + cres[1] + '<br>'\n # details=res.json()['details']\n return outstr\n return []", "def get_request_id(request_json):\n request_id = request_json['requestInfo'].get('requestId')\n if not request_id:\n request_id = request_json['requestInfo'].get('requestID')\n return request_id", "def _retrieve_job_id(job_name, res_id):\n active_jobs = celery_inspector.active()\n job_id = _retrieve_task_id(job_name, res_id, active_jobs)\n if not job_id:\n reserved_jobs = celery_inspector.reserved()\n job_id = _retrieve_task_id(job_name, res_id, reserved_jobs)\n if not job_id:\n scheduled_jobs = celery_inspector.scheduled()\n job_id = _retrieve_task_id(job_name, res_id, scheduled_jobs)\n return job_id", "def getRequest():\n return getLocal('request')", "def shn_logs_req_rheader(r):\r\n\r\n if r.representation == \"html\":\r\n if r.name == \"req\":\r\n req_record = r.record\r\n if req_record:\r\n rheader_tabs = shn_rheader_tabs( r,\r\n [(T(\"Edit Details\"), None),\r\n (T(\"Items\"), \"req_item\"),\r\n ]\r\n )\r\n rheader = DIV( TABLE(\r\n TR( TH( T(\"Date Requested\") + \": \"),\r\n req_record.date,\r\n TH( T(\"Date Required\") + \": \"),\r\n req_record.date_required,\r\n ),\r\n TR( TH( T(\"Requested By Warehouse\") + \": \"),\r\n inventory_store_represent(req_record.inventory_store_id),\r\n ),\r\n TR( TH( T(\"Commit. Status\") + \": \"),\r\n log_req_status_dict.get(req_record.commit_status),\r\n TH( T(\"Transit. Status\") + \": \"),\r\n log_req_status_dict.get(req_record.transit_status),\r\n TH( T(\"Fulfil. Status\") + \": \"),\r\n log_req_status_dict.get(req_record.fulfil_status)\r\n ), \r\n TR( TH( T(\"Comments\") + \": \"),\r\n TD(req_record.comments, _colspan=3)\r\n ),\r\n ),\r\n rheader_tabs\r\n )\r\n return rheader\r\n return None", "def rinex_info(rinex_fname,\n nav_fname,\n work_path=None):\n if not os.path.isfile(rinex_fname):\n raise ValueError('RINEX observation file {} does not exist'.format(rinex_fname))\n if not os.path.isfile(nav_fname):\n raise ValueError('RINEX navigation file {} does not exist'.format(nav_fname))\n # information mapping\n info = {}\n def process_output(line):\n if line.startswith('Receiver type'):\n info['receiver'] = line.split(':')[1].split('(')[0].strip()\n elif line.lstrip().startswith('antenna WGS 84 (xyz)'):\n # make sure units are [m]\n assert line.rstrip().endswith('(m)')\n info['xyz'] = map(float, line.split(':')[1].split('(')[0].split())\n elif line.lstrip().startswith('antenna WGS 84 (geo)'):\n if line.split(':')[1].lstrip()[0] in ['N', 'S']:\n # skip arcmin, arcsec line\n pass\n else:\n lat, _, lon, _ = line.split(':')[1].split(None, 3)\n info['lat'] = float(lat)\n lon = float(lon)\n while lon > 180:\n lon -= 360\n info['lon'] = lon\n elif line.lstrip().startswith('WGS 84 height'):\n assert line.rstrip().endswith('m')\n info['height'] = float(line.split(':')[1].rstrip()[:-1])\n elif line.startswith('|qc - header| position'):\n # make sure units are [m]\n assert line.rstrip()[-1] == 'm'\n info['xyz error'] = float(line.split(':')[1].rstrip()[:-1])\n elif line.startswith('Observation interval'):\n info['interval'] = float(line.split(':')[1].split()[0])\n elif line.startswith('Moving average MP12'):\n info['MP12'] = float(line.split(':')[1].rstrip()[:-1])\n elif line.startswith('Moving average MP21'):\n info['MP21'] = float(line.split(':')[1].rstrip()[:-1])\n # query the RINEX file via teqc quality check --- process in given\n # work area to avoid intermediate file pollution\n with SmartTempDir(work_path) as work_path:\n intermediate_rinex_fname = replace_path(work_path, rinex_fname)\n os.symlink(os.path.abspath(rinex_fname),\n intermediate_rinex_fname)\n intermediate_nav_fname = replace_path(work_path, nav_fname)\n os.symlink(os.path.abspath(nav_fname),\n intermediate_nav_fname)\n sh.teqc('+qc',\n '+quiet',\n '-R',\n '-S',\n '-E',\n '-C',\n '-J',\n '-nav', intermediate_nav_fname,\n intermediate_rinex_fname,\n _cwd=work_path,\n _out=process_output,\n _err=sys.stderr)\n os.remove(intermediate_rinex_fname)\n os.remove(intermediate_nav_fname)\n return info", "def hit_details(hit_id, sandbox, recruiter):\n prolific_check(recruiter, sandbox)\n rec = by_name(recruiter, skip_config_validation=True)\n details = rec.hit_details(hit_id, sandbox)\n print(json.dumps(details, indent=4, default=str))", "def get_wo_mthly_smry(self, workorder_browse):\n wo_summary_data = []\n wo_check_dict = {}\n no = 0\n if workorder_browse:\n for work_rec in workorder_browse:\n if work_rec.state and work_rec.state == \"done\":\n no += 1\n identification = \"\"\n repair_line_data = \"\"\n if work_rec.vehicle_id:\n identification += work_rec.vehicle_id.name\n if work_rec.vehicle_id.f_brand_id:\n identification += \" \" + work_rec.vehicle_id.f_brand_id.name\n if work_rec.vehicle_id.model_id:\n identification += \" \" + work_rec.vehicle_id.model_id.name\n for repaire_line in work_rec.repair_line_ids:\n if repaire_line.complete is True:\n if (\n repaire_line.repair_type_id\n and repaire_line.repair_type_id.name\n ):\n repair_line_data += (\n repaire_line.repair_type_id.name + \", \"\n )\n if work_rec.parts_ids:\n for parts_line in work_rec.parts_ids:\n if work_rec.id in wo_check_dict.keys():\n parts_data = {\n \"no\": -1,\n \"location\": \"\",\n \"type\": \"\",\n \"wo\": \"\",\n \"identification\": \"\",\n \"vin\": \"\",\n \"plate_no\": \"\",\n \"work_performed\": \"\",\n \"part\": parts_line.product_id\n and parts_line.product_id.default_code\n or \"\",\n \"qty\": parts_line.qty or 0.0,\n \"uom\": parts_line.product_uom\n and parts_line.product_uom.name\n or \"\",\n }\n wo_summary_data.append(parts_data)\n else:\n wo_check_dict[work_rec.id] = work_rec.id\n parts_data = {\n \"no\": no,\n \"location\": work_rec.team_id\n and work_rec.team_id.name\n or \"\",\n \"type\": work_rec.main_type or \"\",\n \"wo\": work_rec.name or \"\",\n \"identification\": identification or \"\",\n \"vin\": work_rec.vehicle_id\n and work_rec.vehicle_id.vin_sn\n or \"\",\n \"plate_no\": work_rec.vehicle_id\n and work_rec.vehicle_id.license_plate\n or \"\",\n \"work_performed\": repair_line_data\n and repair_line_data[:-2]\n or \"\",\n \"part\": parts_line.product_id\n and parts_line.product_id.default_code\n or \"\",\n \"qty\": parts_line.qty or 0.0,\n \"uom\": parts_line.product_uom\n and parts_line.product_uom.name\n or \"\",\n }\n wo_summary_data.append(parts_data)\n else:\n parts_data = {\n \"no\": no,\n \"location\": work_rec.team_id\n and work_rec.team_id.name\n or \"\",\n \"type\": work_rec.main_type or \"\",\n \"wo\": work_rec.name or \"\",\n \"identification\": identification or \"\",\n \"vin\": work_rec.vehicle_id\n and work_rec.vehicle_id.vin_sn\n or \"\",\n \"plate_no\": work_rec.vehicle_id\n and work_rec.vehicle_id.license_plate\n or \"\",\n \"work_performed\": repair_line_data\n and repair_line_data[:-2]\n or \"\",\n \"vehicle_make\": \"\",\n \"qty\": \"\",\n \"uom\": \"\",\n }\n wo_summary_data.append(parts_data)\n if not wo_summary_data:\n msg = _(\n \"Warning! \\n\\\n No data Available for selected work order.\"\n )\n raise UserError(msg)\n return wo_summary_data", "def get(self, request, id):\n workflow = get_object_or_404(Workflow, id=id)\n serializer = WorkflowDetailedSerializer(workflow, context={\"request\": request})\n return Response(serializer.data)", "def get_request_journal(self):\n response = requests.get(self.requests_url)\n if response.status_code != http_client.OK:\n raise ValueError(response.text, response.status_code)\n response_body = json.loads(response.text)\n return response_body[\"requests\"]", "def getJob(workload):\n job = Job()\n job[\"task\"] = workload.getTask(\"reco\").getPathName()\n job[\"workflow\"] = workload.name()\n job[\"location\"] = \"T1_US_FNAL\"\n job[\"owner\"] = \"evansde77\"\n job[\"group\"] = \"DMWM\"\n return job", "def job_details(user_data, cache, job_id):\n user = cache.ensure_user(user_data)\n job = cache.get_job(user, job_id)\n\n if not job or not job.project_id:\n return result_response(JobDetailsResponseRPC(), None)\n\n try:\n job.project = cache.get_project(user, job.project_id)\n except IntermittentProjectIdError:\n pass\n\n return result_response(JobDetailsResponseRPC(), job)", "def describe_workteam(WorkteamName=None):\n pass", "def GetOperation(\n self,\n request: google.longrunning.operations_pb2.GetOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.longrunning.operations_pb2.Operation:", "def retrieveDelegationRequest( self, requestId, userDN ):\n cmd = \"SELECT Pem FROM `ProxyDB_Requests` WHERE Id = %s AND UserDN = '%s'\" % ( requestId,\n userDN )\n retVal = self._query( cmd )\n if not retVal[ 'OK' ]:\n return retVal\n data = retVal[ 'Value' ]\n if len( data ) == 0:\n return S_ERROR( \"No requests with id %s\" % requestId )\n request = X509Request()\n retVal = request.loadAllFromString( data[0][0] )\n if not retVal[ 'OK' ]:\n return retVal\n return S_OK( request )", "def t_get_process(self, *args, **kwargs):\n d_request = {}\n d_ret = {}\n b_status = False\n hits = 0\n for k, v in kwargs.items():\n if k == 'request': d_request = v\n d_meta = d_request['meta']\n str_path = '/api/v1' + d_meta['path']\n d_ret = self.DB_get(path = str_path)\n return {'d_ret': d_ret,\n 'status': True}", "def get_request(self, target_uri, resource_type, params=None):\n message, sc = self.request(target_uri, GET, params=params)\n operation = 'GET {resource_type}'.format(resource_type=resource_type)\n self.check_status_code_success(operation, sc, message)\n return message" ]
[ "0.6255874", "0.5651409", "0.5530125", "0.5476779", "0.5332237", "0.5308513", "0.52760774", "0.51337117", "0.5100316", "0.5034349", "0.4997517", "0.4974544", "0.4972364", "0.49616352", "0.49303", "0.49270195", "0.4919135", "0.49007675", "0.4880712", "0.485593", "0.48542556", "0.4843946", "0.4834108", "0.48334804", "0.48221567", "0.48186445", "0.48131502", "0.48106113", "0.47725657", "0.47497234", "0.4727525", "0.47217935", "0.47201034", "0.4719503", "0.47112638", "0.47112638", "0.46766865", "0.46766865", "0.4671248", "0.4664316", "0.46627924", "0.46217266", "0.4617517", "0.46152377", "0.46152377", "0.46132028", "0.46131882", "0.4590871", "0.45741275", "0.45710313", "0.45672217", "0.45661807", "0.45649183", "0.45579025", "0.45519358", "0.45309764", "0.45119387", "0.45090684", "0.4506318", "0.44892925", "0.44786918", "0.4472319", "0.4472319", "0.44605926", "0.44598687", "0.44563097", "0.44541362", "0.44472858", "0.44450045", "0.44431177", "0.4439871", "0.44335455", "0.44182783", "0.44160473", "0.44086203", "0.4407237", "0.43998277", "0.43935314", "0.43921426", "0.4390126", "0.43805987", "0.43760172", "0.4373987", "0.43688035", "0.43668264", "0.43660918", "0.43550715", "0.43514863", "0.43482444", "0.4342709", "0.43414944", "0.43354154", "0.43329906", "0.43276402", "0.43210518", "0.43137315", "0.4311996", "0.4311979", "0.43081188", "0.43045178" ]
0.4938437
14
Get the specified tenancy's information.
def get_tenancy(self, tenancy_id, **kwargs): resource_path = "/tenancies/{tenancyId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_tenancy got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tenancyId": tenancy_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Tenancy") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Tenancy")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tenants(self):\n # print \"tenant list is %s\" % self.auth.tenants.list()\n if not self._tenancy:\n self._tenancy = {}\n for tenant in self.auth.tenants.list():\n t = Tenant(tenant, self)\n self._tenancy[t[\"name\"]] = t\n return self._tenancy", "def get_tenants(self):", "def get_tenants(self, **kwargs):\n url = self.get_url('tenants', kwargs, ['begin', 'end'])\n return self.api_client.get(url).json()", "def get_tenant_usage(self, tenant_id):\n return self._get(_quota.TenantUsage, tenant_id)", "def get_tenants():\n # these are the tenant_id strings configured for the service -\n tenants_strings = conf.tenants\n result = []\n # the tenants service is a special case, as it must be a) configured to serve all tenants and b) actually maintains\n # the list of tenants in its own DB. in this case, we return the empty list since the tenants service will use direct\n # db access to get necessary data.\n if conf.service_name == 'tenants' and tenants_strings[0] == '*':\n return result\n\n # in dev mode, services can be configured to not use the security kernel, in which case we must get\n # configuration for a \"dev\" tenant directly from the service configs:\n if not conf.use_sk:\n for tenant in tenants_strings:\n t = {'tenant_id': tenant,\n 'iss': conf.dev_iss,\n 'public_key': conf.dev_jwt_public_key,\n 'default_access_token_ttl': conf.dev_default_access_token_ttl,\n 'default_refresh_token_ttl': conf.dev_default_refresh_token_ttl,\n }\n result.append(t)\n\n else:\n # TODO -- look up tenants in the tenants API, get the associated parameters (including sk location)\n pass\n return result", "def get_quotas_tenant(self, **_params):\r\n return self.get(self.quota_path % 'tenant', params=_params)", "def list_tenants(self):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/tenants\"\n _headers = {'x-auth-token': self.cloud_admin_info['token_project']}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\" no response from Server\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\n \" tenant list Failed with status %s \" %\n response.status)\n return response.status\n output = json.loads(response.data)\n LOG_OBJ.info(\"Tenant List : %s \" % output)\n return output[\"tenants\"]", "def tenant(self):\n return self._tenant", "def _get(self, path, params=None):\n return self._api.get_json(path, headers={\"Hawkular-Tenant\": self.tenant_id}, params=params)", "def get_tenant(key, tenant_name):\n for tenant in key.tenants.list():\n if tenant.name == tenant_name:\n return tenant\n\n return None", "def get(self, tenant_id):\n response = self.client.get('/quotas/%s' % tenant_id)\n\n return response.json()", "def get(identifier: str):\n if identifier.startswith('T'):\n return {'message': babel('No information on temp registrations.')}, 200\n\n business = Business.find_by_identifier(identifier)\n\n if not business:\n return jsonify({'message': f'{identifier} not found'}), HTTPStatus.NOT_FOUND\n\n # check authorization\n if not authorized(identifier, jwt, action=['view']):\n return jsonify({'message':\n f'You are not authorized to view business {identifier}.'}), \\\n HTTPStatus.UNAUTHORIZED\n\n return jsonify(business=business.json())", "def tenant_access(self) -> pulumi.Output['outputs.ServiceTenantAccess']:\n return pulumi.get(self, \"tenant_access\")", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def get_all_tenants():\n tenants = identity.Tenant.query.all()\n return tenants", "def get(self) -> Info:\n return InfoService.get()", "def info(self):\n resp = requests.get(\"%s/api/info\"%self.urlbase, verify=False)\n return resp.json", "def get_account_info(self):\n resp = requests.get(\n self.URL + 'info/',\n headers={'Authorization': 'Token ' + self.api_key}\n )\n\n return self.__handle_response(resp)", "def show(self, req, tenant_id, id):\n LOG.info(\"Indexing quota info for tenant '%(id)s'\\n\"\n \"req : '%(req)s'\\n\\n\", {\"id\": id, \"req\": req})\n\n context = req.environ[wsgi.CONTEXT_KEY]\n if id != tenant_id and not context.is_admin:\n raise exception.TroveOperationAuthError(\n tenant_id=tenant_id\n )\n\n usages = quota_engine.get_all_quota_usages_by_tenant(id)\n limits = quota_engine.get_all_quotas_by_tenant(id)\n for key in usages.keys():\n setattr(usages[key], \"limit\", limits[key].hard_limit)\n return wsgi.Result(views.QuotaUsageView(usages).data(), 200)", "def tenant(self, request):\n warnings.warn(\n '\"tenant\" Quota API method is deprecated, use \"project\" instead')\n return self._project(request, 'tenant')", "def getTenantByName(self,tenantName,description):\n\n url = CIC_TENANT_ENDPOINT + \"?\" + urllib.urlencode(\n {\n \"instanceName\":tenantName,\n \"description\":description\n })\n\n logger.debug(\"Calling url {}\".format(url))\n\n try:\n response = self.httpHandler.sendHttpRequest(url)\n except urllib2.HTTPError as e:\n\n logger.debug(traceback.format_exc())\n\n if e.code == 404:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n raise KeyError(\n \"Tenant '{}' could not be found in TMS\".format(tenantName),\n \"CIC_TENANT_LOOKUP_ERROR\")\n\n elif e.code == 403:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n raise RuntimeError(\n \"User {} has no permission to look up 'tenants' in {} {}\".format(self.cicUser, self.cicUrl, body),\n \"CIC_NO_ACCESS\"\n )\n\n else:\n raise\n else:\n responseString = response.read()\n return json.loads(responseString)", "def get_current_tenant():\n return getattr(_thread_locals, \"tenant\", None)", "def info():\n if g.party_id is None:\n # No party is configured for the current site.\n abort(404)\n\n party = party_service.get_party(g.party_id)\n\n return {\n 'party': party,\n }", "def get_tenant_config(tenant_id):\n for tenant in tenants:\n if tenant['tenant_id'] == tenant_id:\n return tenant\n raise errors.BaseTapisError(\"invalid tenant id.\")", "def show_quota(self, tenant_id, **_params):\r\n return self.get(self.quota_path % (tenant_id), params=_params)", "def info(self):\n path = self._get_path('info')\n \n response = self._GET(path)\n self._set_attrs_to_values(response)\n return response", "def get_account_info(self):\n resource = self.domain + \"/account\"\n self.logger.debug(\"Pulling data from {0}\".format(resource))\n response = self.session.get(resource)\n\n if response.status_code != requests.codes.ok:\n return response.raise_for_status()\n data = response.text\n root = Et.fromstring(data)\n bf = BadgerFish(dict_type=dict)\n account_info = bf.data(root)\n return account_info", "def info(self):\n return self._fetch_json('/api/info')", "def tenant(self) -> \"str\":\n return self._attrs.get(\"tenant\")", "def get_tenant_resources(self):\n resources = self.context[\"tenant\"].get(\"resources\", [])\n if not resources:\n msg = (\"No resources found for tenant: %s\"\n % self.context[\"tenant\"].get(\"name\"))\n raise exceptions.NotFoundException(message=msg)\n for res_id in resources:\n self._get_resource(res_id)", "def plan_get(request):\n company = auth_api_key(request)\n plan = get_and_check_plan(request, company)\n return plan", "def details(self, identifier):\n return self.client.request_with_method(Methods.GET % (self.name, identifier,))", "def tenancies(self) -> Iterable[dto.Tenancy]:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )", "def getTenant(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def fusion_api_get_fabric_manager_tenants(self, uri, name=None, param='', api=None, headers=None):\n param = '/tenants/'\n if name:\n param += '?&filter=\"\\'name\\' == \\'%s\\'\"' % (name)\n return self.fabricmanager.get(uri=uri, api=api, headers=headers, param=param)", "def tenant_access(self) -> Optional[pulumi.Input['ServiceTenantAccessArgs']]:\n return pulumi.get(self, \"tenant_access\")", "def tenant_access(self) -> Optional[pulumi.Input['ServiceTenantAccessArgs']]:\n return pulumi.get(self, \"tenant_access\")", "def account_info(request):\r\n user = request.user\r\n\r\n return _api_response(request, user.safe_data())", "def backend_info_get(context, host):\n result = _backend_info_query(context, host)\n return result", "def get(name, tenant_name, client, logger):\n utils.explicit_tenant_name_message(tenant_name, logger)\n graceful_msg = 'Requested site with name `{0}` was not found in this ' \\\n 'tenant'.format(name)\n with handle_client_error(404, graceful_msg, logger):\n logger.info('Getting info for site `{0}`...'.format(name))\n site_details = client.sites.get(name)\n print_single(SITES_COLUMNS, site_details, 'Requested site info:')", "def account_info(self):\n url, params, headers = self.request(\"/account/info\", method='GET')\n\n return self.rest_client.GET(url, headers)", "def getInfo():", "def info(self):\n return requests.get(self.info_url + self.pid).json()", "def get():\n return jsonify({'doctor': 'Doctor API'}), 200", "def get_tenant_by_id(tenant_id):\n tenant = identity.Tenant.query.filter_by(id=tenant_id).first()\n if tenant:\n return tenant\n abort(404, f\"Unable to find tenant with id: {tenant_id}\")", "def get_tenant_id(self, tenant_name):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/tenants\"\n _headers = {'x-auth-token': self.cloud_admin_info['token_project']}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\"No response from Server while getting tenants\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Tenant list Failed with status %s \" %\n response.status)\n return response.status\n output = json.loads(response.data)\n for tenant in output['tenants']:\n if tenant['name'] == tenant_name:\n LOG_OBJ.debug(\"Tenant Details : %s \" % tenant)\n return tenant['id']\n\n LOG_OBJ.error(\"There is NO tenant with name: %s\" % tenant_name)\n return None", "def access_info_get(context, storage_id):\n return _access_info_get(context, storage_id)", "def info(self):\n _, data = yield from self.transport.perform_request('GET', '/')\n return data", "def get_info(self):\n pass", "def get_info(self):\n pass", "def get_account_details(self):\n pass", "def get_info(self, name):\n return self.info[name]", "def get_active_tenants(database_file_path):\n # remove the line below in case you have implemented the query.\n raise NotImplementedError\n\n query = \"\"\"\n \"\"\"\n\n return _fetch_result_from_database(query, database_file_path)", "def _tenant_network(self):\n port = self._connection.network.ports.find_by_device_owner('network:router_interface')\n if port:\n return self._connection.network.networks.get(port.network_id)\n else:\n raise errors.ImproperlyConfiguredError('Could not find tenancy network')", "def get_info(self) -> Optional[Dict[str, Any]]:", "def get_info(self):\n url = self._url_for_op('info')\n data= None # This will be a GET request since data is None\n response = self._get_raw_response(self._get_json_headers,\n self._get_json_response, url, data)\n response = json.loads(response)\n self.api_info = response['results']\n return self.api_info", "def info(self, resource, id):\n return self.request('/' + resource + '/' + str(id))", "def getTenantByUid(self,uid):\n\n logger.debug(\"Call to getTenantByUid - uid: {}\".format(uid))\n\n try:\n response = self.httpHandler.sendHttpRequest(CIC_TENANT_ENDPOINT+\"?uuid=\"+uid)\n except urllib2.HTTPError as e:\n\n logger.debug(traceback.format_exc())\n\n if e.code == 404:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n raise KeyError(\"Tenant with uuid {} could not be found in TMS.\".format(uid),\"CIC_TENANT_UID_NOT_FOUND_ERR\")\n\n elif e.code == 403:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n raise RuntimeError(\n \"User {} has no permission to look up 'tenants' in {} {}\".format(self.cicUser, self.cicUrl, body),\n \"CIC_NO_ACCESS\"\n )\n\n else:\n raise\n else:\n responseString = response.read()\n return json.loads(responseString)", "def get(self):\n return {'klant': self.customer}", "def getInfo(self):\n return self.info", "def requester(get_args: dict) -> dict:\n get_args.update(dict(apikey = apikey))\n response = requests.get(URL, params = get_args)\n return response.json()", "def get_current_tenant_value():\n current_tenant = get_current_tenant()\n if not current_tenant:\n return None\n\n try:\n current_tenant = list(current_tenant)\n except TypeError:\n return current_tenant.tenant_value\n\n values = []\n for t in current_tenant:\n values.append(t.tenant_value)\n return values", "def get_account():\n\n wallet = \"TTfoWGU2M939cgZm8CksPtz1ytJRM9GiN7\"\n\n url = \"https://api.trongrid.io/v1/accounts/{}\".format(wallet)\n\n print(url)\n\n response = requests.request(\"GET\", url)\n\n print(response.text)", "def get_tenant_id(self, **kwargs):\n if self.authenticate() == 200:\n return self.tenant_id\n else:\n return None", "def get_by_id(tenant_id):\n tenant = Tenant.find_by_id(tenant_id)\n if tenant:\n tenant_schema = TenantSchema()\n return tenant_schema.dump(tenant)\n\n raise BusinessException(\"Invalid tenant\", HTTPStatus.BAD_REQUEST)", "def getInfo(self):\n return self._info", "def tenant_id(self) -> str:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> str:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> str:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> str:\n return pulumi.get(self, \"tenant_id\")", "def get(self):\n\n return self.get_request_handler(request.headers).get_all_ethnicities()", "def get_quota(self, tenant_id):\n return self._get(_quota.Quota, tenant_id)", "def getAllTenants(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def quick_info_retrieve_view(request):\n kind_of_ballot_item = request.GET.get('kind_of_ballot_item', \"\")\n ballot_item_we_vote_id = request.GET.get('ballot_item_we_vote_id', \"\")\n return quick_info_retrieve_for_api(kind_of_ballot_item=kind_of_ballot_item,\n ballot_item_we_vote_id=ballot_item_we_vote_id)", "def get_info(request):\n return {'Hello': 'World'}", "def get_info(request):\n return {'Hello': 'World'}", "def get(self):\r\n url = '{0}/subdomains/{1}'.format(self.parent.get_url(),\r\n port.to_u(self.object_id))\r\n return http.Request('GET', url), parsers.parse_json", "def _get(self) -> json_api.generic.Metadata:\n api_endpoint = ApiEndpoints.assets.fields\n return api_endpoint.perform_request(http=self.auth.http, asset_type=self.parent.ASSET_TYPE)", "def find_tenant_information_by_domain_name(self, domain_name):\n return_type = ClientResult(self.context, TenantInformation())\n params = {\"domainName\": domain_name}\n qry = FunctionQuery(self, \"findTenantInformationByDomainName\", params, return_type)\n self.context.add_query(qry)\n return return_type", "def tenant_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"tenant_id\")", "def tenant_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"tenant_id\")", "def getObjectInfo(fluiddb, objectId):\n return fluiddb.objects[objectId].get(showAbout=True)", "def get_object_tenant(instance):\n field = get_tenant_field(instance)\n\n if field.primary_key:\n return instance\n\n return getattr(instance, field.name, None)", "def detail(request, deployment_id):\n if request.method == \"GET\": \n try:\n dep = Deployment.objects.get(pk=deployment_id)\n return JsonResponse(serializers.serialize(\"json\", [dep]), safe=False)\n except ObjectDoesNotExist as e:\n return JsonResponse({'message': 'Error retriving deployment.'})\n else:\n return JsonResponse({'message':'not found'}, status=404)", "def export_tenants(self):\n print('\\n=== Exporting all tenant data...')\n\n tenant = dict(self.client.tenant)\n\n print('- Exporting tenant:', tenant['name'])\n\n json = {\n 'id': self.get_id(tenant),\n 'href': tenant['href'],\n 'name': tenant['name'],\n 'key': tenant['key'],\n 'createdAt': tenant['created_at'].isoformat(),\n 'modifiedAt': tenant['modified_at'].isoformat(),\n 'customData': self.get_custom_data(tenant),\n }\n\n #for application in tenant.applications:\n\n self.write('%s/%s/meta' % (self.location, json['id']), json)\n\n print('=== Done!\\n')", "def get_tenant_keyring(self) -> Optional[ImaKeyring]:\n return self.keyrings.get(\"tenant_keyring\")", "def get_information():\n session = Session()\n session.headers.update(headers)\n try:\n response = session.get(url, params=parameters)\n data = json.loads(response.text)\n with open('data.json', 'w') as file:\n json.dump(data, file, ensure_ascii=False, indent=4)\n return data\n except (ConnectionError, Timeout, TooManyRedirects) as e:\n print(e)", "def get_trending_entry_detail_get(self, identifier, trendingEntryType):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Trending/Details/{trendingEntryType}/{identifier}/\"))", "def tenant(self, name):\n # Returns a Tenant object for the given name.\n # Uses Keystone API to perform a direct name lookup,\n # as this is expected to work via name.\n\n data = self.auth.tenant_by_name(name)\n t = Tenant(data[\"tenant\"], self)\n return t", "async def get(self, **context):\n return {}", "def stats(self):\r\n\t\tdata = self._get('global/', query=None)\r\n\t\treturn data", "def getUsageInfo(self):\n return self.jsonRequest(\"/api/v1/usage\", { \"apiKey\": self._apiKey })", "def get():\n\n # \\todo List of available data, fetched and processed\n\n return jsonify({'valid_resources': ['balance', 'balance_usd', 'trade_history', 'balance_norm_price_history', 'open_orders']})", "def get_building_info(self, prop_id):\n resource = '{0}/building/{1}'.format(self.domain, prop_id)\n self.logger.debug(\"Pulling data from {0}\".format(resource))\n response = self.session.get(resource)\n\n if response.status_code != requests.codes.ok:\n return response.raise_for_status()\n root = Et.fromstring(response.text)\n bf = BadgerFish(dict_type=dict)\n building_info = bf.data(root)\n return building_info", "def get(self):\n\t\treturn self.connection.get(self.base_uri)", "def test_get_tenant_by_id(sample_identity):\n access_token, tenant, tenant_user, tc = sample_identity\n new_access_token = tc.post(\n \"api/v1/authentication/login\",\n json={\"username\": tenant_user.username, \"password\": \"1234\"},\n ).json[\"data\"][\"access_token\"]\n headers = {\"Authorization\": \"Bearer \" + new_access_token}\n response = tc.get(f\"api/v1/identity/tenant/{tenant.id}\", headers=headers)\n assert response.status_code == 200, \"Failed to fetch Tenant By ID\"\n assert response.json[\"data\"][\"name\"] == tenant.name, \"Tenant name doesn't match\"", "def info(client):\n\n return client.get_info()", "def bucket_info(request):\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n return JsonResponse(request.session[\"analytics\"].bucket_info())", "def get(self):\n query = Boat.query()\n results = query.fetch(limit = MAX_BOATS)\n boat_dicts = []\n for match in results:\n boat_dicts.append({'id': match.id, 'name': match.name, 'type': match.type,\n 'length': match.length, 'at_sea': match.at_sea })\n self.response.headers['Content-Type'] = 'application/json'\n self.response.write(json.dumps(boat_dicts))", "def get_torsion_info(self):\n return" ]
[ "0.633115", "0.6270229", "0.6024418", "0.5914824", "0.59081745", "0.5818619", "0.5763514", "0.5751513", "0.5732008", "0.5686052", "0.56732315", "0.5667248", "0.5657612", "0.5629676", "0.561599", "0.56010634", "0.559895", "0.5592223", "0.5585628", "0.5580276", "0.55515385", "0.55401057", "0.5529882", "0.55057806", "0.5500245", "0.54857826", "0.54840803", "0.54407525", "0.5438957", "0.54318666", "0.54173636", "0.5416983", "0.54107666", "0.53991085", "0.53911376", "0.53787285", "0.53787285", "0.53787094", "0.5356689", "0.53403723", "0.5332437", "0.5325345", "0.5304916", "0.52949786", "0.52823865", "0.52760506", "0.527198", "0.52300656", "0.51936716", "0.51936716", "0.5191601", "0.5183605", "0.5167503", "0.51635987", "0.51552314", "0.51535535", "0.5150613", "0.5143649", "0.5113071", "0.5092607", "0.50805455", "0.5067799", "0.50376844", "0.5032607", "0.5026252", "0.5013709", "0.50061774", "0.50061774", "0.50061774", "0.50061774", "0.5005331", "0.50048774", "0.4998905", "0.49988964", "0.49917874", "0.49917874", "0.49871325", "0.4981996", "0.49807307", "0.4976563", "0.4976563", "0.49712813", "0.496962", "0.4969579", "0.496343", "0.4962859", "0.49623492", "0.49536923", "0.49509642", "0.49472147", "0.4935274", "0.49322307", "0.49321666", "0.49320894", "0.49275324", "0.49258527", "0.49253", "0.49183056", "0.49148348", "0.49110457" ]
0.6393299
0
Gets the specified user's information.
def get_user(self, user_id, **kwargs): resource_path = "/users/{userId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_user got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="User") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="User")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def user_info(self):\n response = self.query('user_info')\n return response", "def GetInfoForUser(user):\n return models.UserInfo.gql('WHERE user = :1', user).get()", "def get_users_info(): \n \n data = user_obj.get_users_info()\n return data", "def user_info(user_id):\n return User.query.filter_by(id=user_id).first()", "def user_info(self):\n return self.auth.get_user_by_session()", "def user_info(self):\n resp = self._get(get_url('user'))\n raise_on_error(resp)\n ret = resp.json()\n return UserInfo(ret)", "def get():\n return prepare_response(get_user_info())", "def get_user_info(self) -> str:\n return self._searcher.get_user_info()", "def getUserInfo(userId):\n if(not searchForUser(userId)):\n raise RuntimeError('The user id not exist, the user id=> ' + userId)\n else:\n return client.service.getUser(userid=userId)['return']['user']", "def get_user_info_by_id(self, user_id: int) -> dict:", "def get_user_info(self):\n user_info = self.data_source.get_user_info(self.user_id)\n\n return user_info", "def userinfo(self, **kwargs):\n metadata = self.load_server_metadata()\n resp = self.get(metadata['userinfo_endpoint'], **kwargs)\n resp.raise_for_status()\n data = resp.json()\n return UserInfo(data)", "def get_user_details():\n rv = query_db('select * from user')\n return rv[0] if rv else None", "def getUserInfo(self):\r\n userJson = self.httpGet(ReaderUrl.USER_INFO_URL)\r\n result = json.loads(userJson, strict=False)\r\n self.userId = result['userId']\r\n return result", "def getUserInfo(user_id):\r\n user = session.query(User_info).filter_by(id=user_id).one()\r\n return user", "def user_info(self):\r\n param = {}\r\n param['appid'] = self.apiKey\r\n param['nonce'] = int(time.time()*1000)\r\n param['timestamp'] = int(time.time())\r\n return self.__signed_GET('/api/v1/users/me', param, self.timeout)", "def user_info(self):\n \n return self.auth.get_user_by_session()", "def getUserInfo(user_id):\n user = session.query(User).filter_by(id=user_id).one()\n return user", "def get_user_profile(self):\n return self.request('get', 'id/users')", "def get_user_info(user_id):\n user = session.query(User).filter_by(id=user_id).one()\n return user", "def get_user_info(user_id):\n user = session.query(User).filter_by(id=user_id).one()\n return user", "def getUserInfo(user_id):\r\n try:\r\n session = DBSession()\r\n return session.query(User).filter_by(id=user_id).one()\r\n except:\r\n return None", "def _get_user_info(self):\n\n if not self._refresh_token:\n raise ValueError(\"Refresh Token not set\")\n\n # Add access token to the headers\n add_headers = dict(self._default_headers)\n add_headers['Authorization'] = self._access_token\n\n resp = requests.get(BASE_URL + \"user/{}\".format(self._user_id), headers=add_headers, verify=False)\n if resp.status_code >= 300:\n raise Exception(\"Failed to retrieve user info: {}\".format(resp))\n\n vals = etree_to_dict(ET.XML(resp.content.decode('utf-8')))\n\n # Print generic user info\n print(\"\")\n print(\"== USER INFO ==\")\n print(\"Username: {}\".format(vals.get('user').get('username')))\n print(\"Nickname: {}\".format(vals.get('user').get('nickname')))\n print(\"Usage: {} MB / {} MB\".format(int(int(vals.get('user').get('quota').get('usage')) / (1024*1024)),\n int(int(vals.get('user').get('quota').get('limit')) / (1024*1024))))\n print(\"\")\n\n # Grab folder ids we care about\n self._user_sync_folders_url = vals.get('user').get('syncfolders')", "def lookup_user_info(self, user_id: str) -> Optional[Dict]:\n user_info = None\n try:\n user_info = self.web_client.users_info(user=user_id)\n except Exception:\n LOGGER.exception('Cannot get user info for {}'.format(user_id))\n return user_info", "def getUserInfo(user_id):\n\n user = session.query(User).filter_by(id=user_id).one()\n return user", "def getUserInfo(user_id):\n\n user = session.query(User).filter_by(id=user_id).one()\n return user", "def get(self, user_id):\n\n current_app.logger.info(\"GET: {}\".format(request.full_path))\n\n res = UserModel.get_user_info_by_id(user_id)\n if res is None:\n current_app.logger.warn(\"Resource not found\")\n return {'message': 'user not found'}, 404\n else:\n resp_body = res[0].to_json()\n resp_body.update(res[1].to_json())\n return {'user info': resp_body}, 200", "def getUserInfo(data):\n\tusername = data[\"session_username\"]\n\tuser = Users.objects.filter(username=username).first()\n\n\tresponse = {}\n\n\tif not user:\n\t\treturn {\"Success\": False, \"Error\": \"Unable to retrieve the user information from database\"}\n\n\tresponse[\"Success\"] = True\n\tresponse[\"Username\"] = user.username\n\tresponse[\"Email\"] = user.email\n\tresponse[\"Verified\"] = user.verified\n\tresponse[\"Level\"] = user.level\n\tresponse[\"Experience\"] = user.experience\n\tresponse[\"Coins\"] = user.coins\n\tresponse[\"Preferences\"] = {\"Grid Opacity\": user.pref_grid}\n\n\treturn response", "def get_user_info():\n if session and session.get(\"email\") and session.get(\"display_name\"):\n email = session.get(\"email\")\n display_name = session.get(\"display_name\")\n data = dict(email=email, displayName=display_name)\n app.logger.debug(\"Success in getting log information on user: {} at email: {}\".format(display_name, email))\n return jsonify(data)\n else:\n return jsonify(dict(email=\"error\", display_name=\"Could not get info for this user\"))", "def user(self):\n u = self.user_info\n return self.user_model.get_by_id(u['user_id']) if u else None", "def fetch_user_info(self) -> UserInfo:\n url = buildCommandUrl(\n self.server, \"/as/user/keep\", self.__userInfo.strToken)\n result = json_request(\"GET\", url, token=self.__userInfo.strToken)", "def get_user_details():\n current_user = get_jwt_identity()\n\n if not current_user:\n print('uri=/login error=\"Missing username parameter\"')\n return jsonify({\"msg\": \"Missing username parameter\"}), 400\n\n try:\n username = User.get_username_by_id(current_user)\n result = UserDetail.get_printable_user_detail(username)\n\n if result['userType'] == 'adopter':\n animal_preference = Adopter.get_animal_preference(username)\n result['animalPreference'] = animal_preference\n\n dispositions = UserDetail.get_user_dispositions(User.get_username_by_id(current_user))\n result['dispositions'] = dispositions['dispositions']\n elif result['userType'] == 'shelter worker':\n result['shelter'] = ShelterWorker.get_shelter_by_username(username)\n\n except Exception as e:\n return jsonify(message='{}'.format(e)), 510\n\n if result:\n return jsonify(message=result), 200\n else:\n return jsonify(message='User {} not found'.format(username)), 511", "def getUserInfo(UserId):\n url = f\"https://users.roblox.com/v1/users/{UserId}\"\n r = requests.get(url)\n j = json.loads(r.text)\n displayName = j['displayName']\n name = j['name']\n uid = j['id']\n isBanned = j['isBanned']\n joinDate = j['created']\n description = j['description']\n return displayName,name,uid,isBanned,joinDate,description", "def get(self):\r\n return get_user(request)", "def user_info(self, username: str) -> requests.Response:\n api_urls = self.urls\n url = api_urls['user_url'].format(user=username)\n response = requests.get(url)\n return response", "def getuser(gh, user):\n return gh.users(user).get()", "def get(self, no):\n user = get_a_user(no)\n if not user:\n api.abort(404)\n else:\n return user", "async def userinfo(user: User = Security(require_user)):\n user = await user.query.gino.first()\n return user.to_dict()", "def get_user_info(self, user_id=None, client_id=None):\n\n if client_id is None:\n client_id = self._use_default_client_id()[0]\n\n params = {}\n\n url = self.api_base_url + \"client/{}/user\".format(str(client_id))\n\n if user_id is not None:\n params.update({\"userId\": user_id})\n\n try:\n raw_response = self.request_handler.make_request(ApiRequestHandler.GET, url, params=params)\n except RequestFailed:\n raise\n\n jsonified_response = json.loads(raw_response.text)\n\n return jsonified_response", "async def user_get_data(\n user: User = Depends(get_current_active_user),\n db: Session = Depends(db_session)) -> UserInfo:\n return model2user(user, db)", "def get(\n user_id=None, discord_id=None, google_id=None, email=None,\n ):\n temp_cursor = user_db.cursor()\n\n pos_selectors = {\n \"user_id\": user_id,\n \"discord_id\": discord_id,\n \"google_id\": google_id,\n \"email\": email,\n }\n\n user = None\n for selector in pos_selectors.keys():\n sel_value = pos_selectors[selector]\n if sel_value is None:\n continue\n user = temp_cursor.execute(\n \"SELECT * FROM users WHERE \" + selector + \" = ?\", (sel_value,)\n ).fetchone()\n\n if user is not None:\n return User_Info.init_from_db(user)\n\n return None", "def GetUserInformation(self):\n if _USER_INFO_KEY in self._session:\n return self._session[_USER_INFO_KEY]\n return self._ReCreateUserInfo()", "def user_data(self, access_token, *args, **kwargs):\n data = {'method': 'users.getInfo', 'session_key': access_token}\n return mailru_api(data)[0]", "def get_user_info_by_name(self, username: str) -> dict:", "def get_user_details(self, response):\n token = response.get('access_token')\n headers = {\"Authorization\": \"Bearer %s\" % token}\n endpoint = self.USER_INFO_URL\n response = requests.get(endpoint, headers=headers)\n return {'email': response.json()['email'] or '',\n # We'll need sub, the unique ID, for get_user_id.\n 'sub': response.json()['sub']}", "def get_user(self):\n\n r = requests.get(\n self._url('/usermanagement/userinfo'),\n headers={'Authorization': self.token},\n proxies=self.proxy)\n r.raise_for_status()\n user = r.json()\n log.info('user {:s} currently logged in'.format(user['login']))\n\n return user", "async def get_user_info(\n current_user: AuthUserPublic = Depends(get_current_active_user)) \\\n -> AuthUserPublic:\n return current_user", "def user_info(username):\n print(json.dumps(client.user_info(username)))", "def getUserInfo(self, user):\n return pwd.getpwnam(user)[2:4]", "def GetUserInfo(self):\n user = users.get_current_user()\n user_info = GetInfoForUser(user)\n if user:\n # Check to see if the user has auxiliary info for Swyzl, and if not\n # then create it.\n if not user_info:\n user_info = models.UserInfo()\n user_info.user = user\n user_info.put()\n\n url = users.create_logout_url(self.request.uri)\n url_link_text = 'Logout'\n else:\n url = users.create_login_url(self.request.uri)\n url_link_text = 'Login'\n return (user, url, url_link_text)", "def user_info(self) -> UserInfo:\n return self.__userInfo", "def getUserDetails(self, user_id):\n try:\n con = self.getMetadataDatabaseConnection()\n results = con.cursor()\n con.cursor().callproc('qiime_assets.get_user_details', [user_id, results])\n row = results.fetchone()\n if row:\n user_data = {'email':row[0], 'is_admin':row[1], 'is_locked':row[2], 'last_login':row[3]}\n return user_data\n else:\n return False\n except Exception, e:\n print 'Exception caught: %s.\\nThe error is: %s' % (type(e), e)\n return False", "def user_info_gql(self, user_id: int) -> User:\n user_id = int(user_id)\n # GraphQL haven't method to receive user by id\n return self.user_info_by_username_gql(\n self.username_from_user_id_gql(user_id)\n )", "def get_user(self, user_id):\n uri = 'users/' + user_id\n return self.make_request(uri)", "def userinfo(self):\n return self._userinfo", "def get_user(self, user_id: int) -> dict:\n user = self.call_method('getUser', user_id=user_id)\n return user", "def get_user(username):\n return jsonify(admin.get_user_info(current_app.scoped_session(), username))", "def get_user_me(self):\n url = 'users/me'\n result = self.get(url)\n return result.get('user', result)", "def get(self, id):\n\t\ttry:\n\t\t\tflask_app.logger.debug('We are getting the user: %d', id)\n\t\t\treturn user_service.get(id)\n\t\texcept AssertionError as e:\n\t\t\tuser_space.abort(400, e.args[0], status = \"Could not get user\", statusCode = \"400\")\n\t\texcept Exception as e:\n\t\t\tuser_space.abort(500, e.args[0], status = \"Could not get user\", statusCode = \"500\")", "def get_user_info(credentials):\r\n user_info_service = build(\r\n serviceName='oauth2', version='v2',\r\n http=credentials.authorize(httplib2.Http()))\r\n user_info = None\r\n try:\r\n user_info = user_info_service.userinfo().get().execute()\r\n except errors.HttpError, e:\r\n logging.error('An error occurred: %s', e)\r\n if user_info and user_info.get('id'):\r\n return user_info\r\n else:\r\n raise NoUserIdException()", "async def user_data(self, ctx, user=None):\n if user is None:\n user = ctx.author\n\n for member in ctx.guild.members:\n if member.mention == user:\n user = member\n\n conc, c = await utilities.load_db()\n c.execute(\"SELECT uid, karma FROM members WHERE uid = (:uid)\", {'uid': user.id})\n uid, karma = c.fetchall()[0]\n\n await utilities.single_embed(\n channel=ctx,\n title='User Info',\n thumb_url=user.avatar_url,\n name=user.name,\n value=f'**Nickname**: {user.nick}\\n'\n f'**Karma**: {karma}\\n'\n f'**User ID**: {user.id}\\n'\n f'**Joined Discord**: {user.created_at}\\n'\n f'**Joined {user.guild.name}**: {user.joined_at}\\n'\n f'**Roles**: {\", \".join([role.name for role in user.roles if role.name != \"@everyone\"])}'\n )", "def get_user(self):\n if \"user\" not in self._data:\n self._data[\"user\"] = User.objects.get(pk=self.kwargs[\"user_id\"])\n return self._data[\"user\"]", "def get_user_info(self):\n\n if self._access_token is None:\n raise RequiresAccessTokenError()\n response = self.__make_oauth_request(USER_INFO_URL, token=self._access_token, signed=True)\n return simplejson.loads(response.read())", "def get_user(user_id):\r\n data = slack_client.api_call(\"users.info\", user=user_id)\r\n if not data[\"ok\"]:\r\n return False\r\n response = {}\r\n response[\"username\"] = data[\"user\"][\"name\"]\r\n response[\"name\"] = data[\"user\"][\"profile\"][\"real_name_normalized\"]\r\n response[\"user_id\"] = data[\"user\"][\"id\"]\r\n return response", "def user(self):\n return self.getattr('user')", "def get_user(self, user_name=None):\r\n params = {}\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('GetUser', params)", "def user(self, username='None'):\n\n layout = u\"\"\"\n user: {id}\n created: {created}\n karma: {karma}\n about: {about}\n \"\"\"\n userdata = requests.get(endpoints.USER.format(username)).json()\n return layout.format(**userdata) if userdata else 'user not found'", "def fusion_api_get_user(self, uri=None, param='', api=None, headers=None):\n return self.user.get(uri=uri, api=api, headers=headers, param=param)", "def get_user_info(self) -> Dict:\n try:\n return self.client.get_user()\n except ClientError as cerr:\n if cerr.response['Error']['Code'] == 'AccessDenied':\n # If the user doesn't have access rights to IAMClient\n # we can find the user name in the error response\n user_name = StrUtils.find_expression(str(cerr), self._USER_NAME_REGEX)\n return {'UserName' : user_name,\n 'User' : {'UserName' : user_name,\n 'UserId' : ''}}\n raise cerr\n except Exception as ex:\n raise GetUserInfoError(error_msg=ex)", "def current_user_info():\n\n return current_user", "def get_user_info(username: str) -> dict:\n api = f\"https://api.github.com/users/{username}\"\n\n return requests.get(api).json()", "def get_user():\n userdict = jsonify2(current_user.db_user, 'User')\n return current_app.bitjws.create_response(userdict)", "def user(self):\n u = self.user_info\n return self.user_model.get_by_id(u['user_id']) if u else None", "def get_user(id=None, name=None):\n found_id = get_user_id(id, name)\n if not found_id:\n return\n response = utils.checked_api_call(users_api, 'get_specific', id=found_id)\n if response:\n return response.content", "def get_user():\n\treturn '1', 200", "def get_user_details(client):\n\n try:\n return client.user(user_id='me').get(fields=['login'])\n # print(f\"The email of the user is: {me['login']}\")\n\n except Exception as e:\n print(f\"Error has occurred: {e}\")\n return None", "def get_user(self):\n raise NotImplementedError", "def get_user(id):\n pass", "def _get_user_details():\n with open(USER_DETAILS_FILE) as f:\n fitbit_user = json.load(f)\n access_token = fitbit_user['access_token']\n refresh_token = fitbit_user['refresh_token']\n expires_at = fitbit_user['expires_at']\n\n return access_token, refresh_token, expires_at", "def get_user_info(credentials):\n user_info_service = build(\n serviceName='oauth2', version='v2',\n http=credentials.authorize(httplib2.Http()))\n user_info = None\n try:\n user_info = user_info_service.userinfo().get().execute()\n except errors.HttpError, e:\n logging.error('An error occurred: %s', e)\n if user_info and user_info.get('id'):\n return user_info\n else:\n raise NoUserIdException()", "def user_data(self, access_token, *args, **kwargs):\n headers = {'Authorization': 'Bearer %s' % access_token}\n try:\n resp = requests.get(ASANA_USER_DETAILS_URL,\n headers=headers)\n resp.raise_for_status()\n return resp.json()['data']\n except ValueError:\n return None", "def extract_user_info(user):\r\n return {\r\n 'username': user.username,\r\n 'email': user.email,\r\n 'first_name': user.first_name,\r\n 'last_name': user.last_name,\r\n }", "def extract_user_info(user):\r\n return {\r\n 'username': user.username,\r\n 'email': user.email,\r\n 'first_name': user.first_name,\r\n 'last_name': user.last_name,\r\n }", "def get(self):\n\n user = None\n if self.request.headers.get('X-Pp-User'):\n user = self.request.headers['X-Pp-User']\n\n result_json = {\n \"user\": user\n }\n\n self.success(result_json)", "def _request_user_info(credentials):\n http = httplib2.Http()\n\n # User information stored here\n credentials.authorize(http)\n resp, content = http.request('https://www.googleapis.com/plus/v1/people/me')\n\n if resp.status != 200:\n current_app.logger.error(\"Error while obtaining user profile: %s\" % resp)\n return None\n\n # Check whether user is authenticating with the allowed domain.\n if (current_app.config['RESTRICT_DOMAIN'] is True and \n credentials.id_token.get('hd') != current_app.config['REQUIRED_DOMAIN']):\n\n # Replace with logging for a real app\n print(\"\\n------------------------------------------------------\")\n print(\"User attempted to authenticate with disallowed domain.\")\n print(\"------------------------------------------------------\\n\")\n\n # User information deleted here\n oauth2.storage.delete()\n return None\n\n # Stores the users information in the session profile dictionary\n session['profile'] = json.loads(content.decode('utf-8'))\n\n # Remove this in production. It's here so you can see what information is stored.\n print(\"\\n------------------------------------------------------\")\n print(\"SESSION PROFILE INFORMATION\")\n print(\"------------------------------------------------------\")\n for k,v in session['profile'].items():\n print(k,\"--->\",v)\n print(\"------------------------------------------------------\\n\")", "def user_info(request):\n\n if request.method != 'POST':\n return HttpResponseNotAllowed(['POST'])\n\n data = json.loads(request.body.decode('utf-8'))\n auth_token = str(data.get('auth_token', ''))\n request_username = str(data.get('request_username', ''))\n\n try:\n if not verify_admin(auth_token):\n raise PlantalyticsAuthException(ADMIN_INVALID)\n\n message = (\n 'Retrieving user info for username: {}.'\n ).format(request_username)\n logger.info(message)\n response = cassy.get_user_info(request_username)\n message = (\n 'Successfully retrieved user info for username: {}.'\n ).format(request_username)\n logger.info(message)\n return HttpResponse(\n json.dumps(response),\n content_type='application/json'\n )\n except PlantalyticsException as e:\n message = (\n 'Error attempting to retrieve user info. Error code: {}'\n ).format(str(e))\n logger.warn(message)\n error = custom_error(str(e))\n return HttpResponseForbidden(error, content_type='application/json')\n except Exception as e:\n message = (\n 'Unknown error occurred while attempting to retrieve user info:'\n )\n logger.exception(message)\n error = custom_error(UNKNOWN, str(e))\n return HttpResponseServerError(error, content_type='application/json')", "def get_user(self):\n return self.get('users/self')", "def _on_get_user_info(self, callback, session, user):\n logging.debug('user data from github ' + str(user))\n if user is None:\n callback(None)\n return\n callback({\n \"id\": user[\"id\"],\n \"login\": user[\"login\"],\n \"name\": user.get(\"name\"),\n \"email\": user.get(\"email\"),\n \"access_token\": session[\"access_token\"],\n })", "def get_one_user():", "def view_user(user):\n return {\n \"id\": user.id,\n \"first_name\": user.first_name,\n \"last_name\": user.last_name,\n \"email\": user.email,\n \"profile_pic\": user.profile_pic,\n }", "def get_user_details(self, response):\n return {\n \"username\": response.get(\"username\"),\n \"email\": response.get(\"email\"),\n \"fullname\": response.get(\"username\"),\n }", "def getUser(self, resource):\n if isinstance(resource, int):\n resource = 'users/{0}'.format(resource)\n\n res = self.getRequest(resource)\n\n if res:\n user = vsdModels.User(**res)\n return user\n else:\n return None", "async def userinfo(self, ctx, user: discord.Member = None):\r\n\r\n if not user:\r\n user = ctx.message.author\r\n try:\r\n playinggame = user.game\r\n except:\r\n playinggame = None\r\n server = ctx.message.server\r\n embed = discord.Embed(color=0xda4800)\r\n embed.set_author(name=user.name,\r\n icon_url=user.avatar_url)\r\n embed.add_field(name=\"ID\", value=user.id)\r\n embed.add_field(name=\"Discriminator\", value=user.discriminator)\r\n embed.add_field(name=\"Bot\", value=str(user.bot))\r\n embed.add_field(name=\"Created\", value=user.created_at.strftime(\"%d %b %Y %H:%M\"))\r\n embed.add_field(name=\"Joined\", value=user.joined_at.strftime(\"%d %b %Y %H:%M\"))\r\n embed.add_field(name=\"Playing\", value=playinggame)\r\n embed.add_field(name=\"Status\", value=user.status)\r\n embed.add_field(name=\"Color\", value=str(user.color))\r\n\r\n try:\r\n roles = [x.name for x in user.roles if x.name != \"@everyone\"]\r\n\r\n if roles:\r\n roles = sorted(roles, key=[x.name for x in server.role_hierarchy\r\n if x.name != \"@everyone\"].index)\r\n roles = \", \".join(roles)\r\n else:\r\n roles = \"None\"\r\n embed.add_field(name=\"Roles\", value=roles)\r\n except:\r\n pass\r\n\r\n await self.client.say(embed=embed)", "def show_user_info(self):\n name = self.get_user_name()\n print(f'Name: {name.title()}')\n print(f'Age: {self.age}')\n print(f'Gender: {self.gender.title()}')\n print(f'Mobile: {self.m_number}')", "def post_user_info():\n return jsonify(users.get_user_info(request, client))", "def get_info(email):\n # Get the first user where _id=email\n user = models.User.objects.raw({\"_id\": email}).first()\n return user", "def show_user_info(user_id):\n\n user = User.query.get_or_404(user_id)\n return render_template(\"user_details.html\", user=user)", "def get_user(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload={}, request_type=self.REQUEST_GET, version=\"v2\")", "def get_user_info(self, token, openid, client_id):\n\n url = get_config(\"login.qq.user_info_url\") % (token, client_id, openid)\n user_info_resp = get_remote(url)\n user_info = convert(json.loads(user_info_resp))\n\n if user_info.get(\"ret\") != 0:\n raise Exception(user_info)\n\n return user_info", "def get_user_info(uid):\r\n session = tables.get_session()\r\n account_name = ''\r\n description = ''\r\n if session is None:\r\n return account_name, description\r\n try:\r\n user_account = UserAccount()\r\n account_name = user_account.get_field_by_key(UserAccount.account_name, UserAccount.user_id, uid,\r\n session)\r\n description = user_account.get_field_by_key(UserAccount.description, UserAccount.user_id, uid,\r\n session)\r\n except SQLAlchemyError as err:\r\n LOGGER.error('User login failed: %s', err)\r\n return account_name, description\r\n finally:\r\n session.close()\r\n return account_name, description", "def get_user(user_id=None):\n users = storage.all('User')\n user = users.get('User' + \".\" + user_id)\n if user is None:\n abort(404)\n else:\n return jsonify(user.to_dict()), 200" ]
[ "0.8078219", "0.8060639", "0.8038709", "0.80188537", "0.79876477", "0.7913451", "0.7849845", "0.7831485", "0.78257066", "0.7802099", "0.7783503", "0.7776827", "0.77714074", "0.77186304", "0.7709317", "0.76658565", "0.7657189", "0.7639148", "0.7587892", "0.758278", "0.758278", "0.7551785", "0.7534716", "0.7523771", "0.7513454", "0.7513454", "0.7492404", "0.746305", "0.74596584", "0.745851", "0.7455897", "0.74169964", "0.73753", "0.7370226", "0.73676723", "0.7332438", "0.7331589", "0.73246384", "0.7274709", "0.72548777", "0.7248239", "0.7242473", "0.7228817", "0.72053766", "0.719751", "0.71851295", "0.71837246", "0.7182214", "0.71678424", "0.71671146", "0.716156", "0.71476144", "0.7146807", "0.7140694", "0.7137043", "0.7105543", "0.7102467", "0.7097344", "0.7096162", "0.7092426", "0.70848846", "0.70778275", "0.7071392", "0.70699155", "0.70668703", "0.7063901", "0.7051887", "0.70446354", "0.7044446", "0.70405555", "0.70309126", "0.70194143", "0.70156693", "0.7013339", "0.7010558", "0.7006656", "0.7006582", "0.69995236", "0.6995455", "0.698696", "0.6983366", "0.6974062", "0.6974062", "0.69710904", "0.6962572", "0.69545496", "0.6950093", "0.69414467", "0.6941233", "0.69366664", "0.69328624", "0.6929601", "0.6925427", "0.6924113", "0.69217885", "0.6917285", "0.6909921", "0.6909455", "0.6907866", "0.68984294", "0.68973887" ]
0.0
-1
Gets the specified UserGroupMembership's information.
def get_user_group_membership(self, user_group_membership_id, **kwargs): resource_path = "/userGroupMemberships/{userGroupMembershipId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_user_group_membership got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userGroupMembershipId": user_group_membership_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="UserGroupMembership") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="UserGroupMembership")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch_their_members(our_group):\n\tgroup_id = our_group[\"groupId\"]\n\turl = f'{BASE_URL}/groups/{group_id}/members'\n\tparams = {'$select': 'userPrincipalName,id'}\n\treturn call_api(url, params)", "def get_membership_data_for_current_user(self):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/User/GetMembershipsForCurrentUser/\"))", "def getGroupMembers(group_id):\r\n return Group.getGroupMembers(group_id)", "def get_membership_data_by_id_get(self, membershipId, membershipType):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/User/GetMembershipsById/{membershipId}/{membershipType}/\"))", "def get(self, id):\r\n return UserGroupService.getUserGroup(self, id)", "def get_groups_for_member_get(self, filter, groupType, membershipId, membershipType):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/User/{membershipType}/{membershipId}/{filter}/{groupType}/\"))", "def GetGroupMembers(self, group):\n return []", "def getMembership(config, user):\r\n\r\n seen = set()\r\n for member_of in _getMembership(config, user, seen):\r\n yield member_of\r\n\r\n # everyone is always a member of group \"all\"\r\n yield 'all'", "def get_group_group_members(self, group_id):\n try:\n group_id = self.quote(group_id)\n return self.g.get('groups/%s/groups/' % group_id)\n except HTTPError as e:\n return self._manage_errors(e)", "def getMember(self, *args):\n return _libsbml.Group_getMember(self, *args)", "def getGroup(group: int, name=\"\") -> list:\n groups = mongo.db.groups.find({'id':group},{'_id':0})\n userID_list = []\n user_list = []\n for entry in groups:\n if entry[\"id\"] == group:\n userID_list = userID_list + entry[\"members\"]\n if len(userID_list) != 0:\n for entry in userID_list:\n x = fetchUser(userId=entry)\n user_list = user_list + x\n return user_list", "def get_membersof(self, kwargs):\n group = kwargs[\"group\"]\n verbose = kwargs.get(\"verbose\", False)\n\n results = list(self.engine.query(self.engine.GROUP_DN_FILTER(group), [\"distinguishedName\", \"objectSid\"]))\n if results:\n group_dn = results[0][\"distinguishedName\"]\n else:\n error(\"Group {group} does not exists\".format(group=group))\n\n primary_group_id = results[0][\"objectSid\"].split('-')[-1]\n results = self.engine.query(self.engine.ACCOUNTS_IN_GROUP_FILTER(primary_group_id, group_dn))\n self.display(results, verbose)", "def get_memberships(self):\n return UnitMembership.objects.filter(unit=self).select_related(\"user\")", "def get_members_of_group_get(self, currentpage, groupId, memberType, nameSearch):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/Members/\"))", "def get_groups_details(self, groups):\n assert isinstance(groups, list)\n # It may be require we request the API by splitting the names list\n # If the list is too long to be handled by the Gerrit server (URI)\n query_args = \"?%s\" % \"&\".join([\"q=%s\" % g for g in groups])\n query_args += \"&o=MEMBERS\" if groups else \"o=MEMBERS\"\n\n try:\n ret = self.g.get('groups/%s' % query_args)\n except HTTPError as e:\n return self._manage_errors(e)\n\n return ret", "def get_group_member(self, group):\n fake_group_obj = SimpleNamespace(id=group[\"id\"])\n current_identity = self.context[\"identity\"]\n avatar = current_groups_service.links_item_tpl.expand(\n current_identity, fake_group_obj\n )[\"avatar\"]\n return {\n \"type\": \"group\",\n \"id\": group[\"id\"],\n \"name\": group.get(\"name\") or group[\"id\"],\n \"description\": group.get(\"description\", \"\"),\n \"avatar\": avatar,\n }", "def groups_get(self, mar, request):\n if not mar.viewed_user_auth:\n raise exceptions.NoSuchUserException(request.groupName)\n group_id = mar.viewed_user_auth.user_id\n group_settings = self._services.usergroup.GetGroupSettings(\n mar.cnxn, group_id)\n member_ids, owner_ids = self._services.usergroup.LookupAllMembers(\n mar.cnxn, [group_id])\n (owned_project_ids, membered_project_ids,\n contrib_project_ids) = self._services.project.GetUserRolesInAllProjects(\n mar.cnxn, mar.auth.effective_ids)\n project_ids = owned_project_ids.union(\n membered_project_ids).union(contrib_project_ids)\n if not permissions.CanViewGroupMembers(\n mar.perms, mar.auth.effective_ids, group_settings, member_ids[group_id],\n owner_ids[group_id], project_ids):\n raise permissions.PermissionException(\n 'The user is not allowed to view this group.')\n\n member_ids, owner_ids = self._services.usergroup.LookupMembers(\n mar.cnxn, [group_id])\n\n member_emails = list(self._services.user.LookupUserEmails(\n mar.cnxn, member_ids[group_id]).values())\n owner_emails = list(self._services.user.LookupUserEmails(\n mar.cnxn, owner_ids[group_id]).values())\n\n return api_pb2_v1.GroupsGetResponse(\n groupID=group_id,\n groupSettings=api_pb2_v1_helpers.convert_group_settings(\n request.groupName, group_settings),\n groupOwners=owner_emails,\n groupMembers=member_emails)", "def get_group_info(groupname):\n return jsonify(admin.get_group_info(current_app.scoped_session(), groupname))", "def get(person_group_id):\n url = 'persongroups/{}'.format(person_group_id)\n\n return util.request('GET', url)", "def get_group_members(self, group_key):\n try:\n paged_results = self.repository.members.list(group_key)\n result = api_helpers.flatten_list_results(paged_results, 'members')\n LOGGER.debug('Getting all the members for group_key = %s,'\n ' result = %s', group_key, result)\n return result\n except (errors.HttpError, HttpLib2Error) as e:\n raise api_errors.ApiExecutionError(group_key, e)", "def getGroupInfo(groupId):\n url = f\"https://groups.roblox.com/v1/groups/{groupId}\"\n r = requests.get(url)\n j = json.loads(r.text)\n return j", "def users_in_group(self, group_id):\n users = []\n users = self._get(('user', 'group', str(group_id)))\n for user in users:\n if 'dreamdiary.diary.user' in user['saml_permissions']:\n users.append(user)\n return users", "def list_group_members(self, token, userGroup):\n requestUser = self.get_username_from_token(token)\n dataBase = self.read_database()\n if userGroup not in dataBase['userGroups']:\n raise GroupDoesNotExistException(\"User group does not exist\")\n\n if requestUser not in dataBase['userGroups'][userGroup]['owners']:\n raise UserPermissionException(\"User is not an owner of this group\")\n owners = dataBase['userGroups'][userGroup]['owners']\n members = dataBase['userGroups'][userGroup]['members']\n return {'owners':owners, 'members':members}", "def view_group(request, group_id):\n users = models.UserProfile.all().order('email')\n if group_id:\n group = models.UserGroup.get_by_id(int(group_id))\n if group.users:\n users = models.UserProfile.get(group.users)\n else:\n users = []\n return utility.respond(request, 'admin/view_group', {'users': users})", "def get_group_members(self, group):\n members = []\n result = self.search('ou=groups,dc=mozilla',\n filterstr='cn=%s' % (group))\n if result == False:\n raise self.SearchError\n elif result == []:\n return []\n for group in result[1]:\n members = list(set(members) | set(group[1]['memberUid']))\n return members", "def get(self):\n usergroup_node = graph.find_one(\"Usergroup\",\n property_key='id',\n property_value=self.id)\n return usergroup_node", "def get_group_info(self, data):\n return self.__form_call('channels.info', data)", "def get_membership(user):\n raise Exception(\"Someone needs to fix this method to no longer be dependent on model relationship if they're going to use it!\")", "def get_people(self):\n url = self.base_url + 'memberships'\n\n req = requests.get(headers=self.headers, url=url)\n\n return req.json()", "def get_group_members(self, group_id, max_results=None, paging_token=None):\n route_values = {}\n if group_id is not None:\n route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')\n query_parameters = {}\n if max_results is not None:\n query_parameters['maxResults'] = self._serialize.query('max_results', max_results, 'int')\n if paging_token is not None:\n query_parameters['pagingToken'] = self._serialize.query('paging_token', paging_token, 'str')\n response = self._send(http_method='GET',\n location_id='45a36e53-5286-4518-aa72-2d29f7acc5d8',\n version='6.0-preview.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('PagedGraphMemberList', response)", "def get_group_details(self, group_id):\n url = self.groups_url + \"/\" + group_id\n return requests.get(url, headers=self.headers)", "def get_members(self):\r\n database = main.connect_to_cloudsql()\r\n cursor = database.cursor()\r\n query = (\"SELECT username from \" + ENV_DB + \".Groups WHERE gid='{}'\").format(self.g_id)\r\n cursor.execute(query)\r\n data = cursor.fetchall()\r\n database.close()\r\n return list(i[0] for i in data)", "def group_get_members(self,groupname):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_group_get_members_query+\" ORDER BY $username_field$\",{'groupname':groupname,'username_field':self.sql_username_field,'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: group_get_members: %s\" % (query,))\n\n cursor.execute(query)\n desc=[i[0] for i in cursor.description]\n for row in cursor:\n dictrow=dict(zip(desc,row))\n yield dictrow[self.sql_username_field]", "def is_membership(self, gid, membership):\n if membership not in [ 'member', 'manager', 'owner']:\n raise Exception(\"Membership request is unexpect as: {m}. Only member, owner or manager inquery allowed.\".format(m=membership))\n url = \"{b}/group/is-{m}/{gid}\".format(b=self.base_url, m=membership, gid=gid)\n r = self.get(url)\n print r", "def get_group_users(groupname):\n return jsonify(admin.get_group_users(current_app.scoped_session(), groupname))", "def list_members(self, group_name):\n METHOD = 'GET'\n API_PATH = '/groups/list-members'\n\n data = {'group_name': group_name}\n\n # Make REST call\n resp = self._rest_call[METHOD](API_PATH, data=data)\n\n if resp.status_code == 200:\n return resp.json().get('members')\n\n elif resp.status_code == 403:\n raise AuthorizationError(\"User is not authorized or token is incorrect.\")\n\n else:\n if resp.json().get(\"error_code\") in ERROR_CODES:\n raise ERROR_CODES[resp.json().get('error_code')](resp.json().get('message'))\n else:\n raise APIError(\"Response code {0}: {1} {2}\".format(resp.status_code,\n resp.json().get('error_code'),\n resp.json().get('message')))", "def get_group(self, group_name):\n\n return self._group[group_name]", "def get_banned_members_of_group_get(self, currentpage, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/Banned/\"))", "def get_pending_memberships_get(self, currentpage, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/Members/Pending/\"))", "def get_users_info(): \n \n data = user_obj.get_users_info()\n return data", "def get_membership(self, username, team):\n try:\n return CourseTeamMembership.objects.get(user__username=username, team=team)\n except CourseTeamMembership.DoesNotExist:\n raise Http404 # lint-amnesty, pylint: disable=raise-missing-from", "def get_user_group(self, user_id):\n user_sn = self.id_to_sn(user_id)\n self.send_command(cmd=DEFS.CMD_USERGRP_RRQ,\n data=struct.pack('<I', user_sn))\n self.recv_reply()\n return self.last_payload_data[0]", "def check_group_exists(self, group_name):\n for grp in self.get_list_groups():\n if grp[\"name\"] == group_name:\n return grp[\"id\"], grp[\"members\"]\n\n return None", "def list_user_group_memberships(self, compartment_id, **kwargs):\n resource_path = \"/userGroupMemberships\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"user_id\",\n \"group_id\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_user_group_memberships got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"userId\": kwargs.get(\"user_id\", missing),\n \"groupId\": kwargs.get(\"group_id\", missing),\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[UserGroupMembership]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[UserGroupMembership]\")", "def get_group_members(self, group_id: int, page_size=default_members_page_size) -> List[dict]:\n try:\n chat = self.call_method('getChat', chat_id=group_id) # offline request\n except errors.ObjectNotFound:\n self.get_all_chats()\n chat = self.call_method('getChat', chat_id=group_id) # offline request\n\n if chat['type']['@type'] == 'chatTypeBasicGroup':\n members = self.call_method('getBasicGroupFullInfo',\n basic_group_id=chat['type']['basic_group_id'])['members']\n\n elif chat['type']['@type'] == 'chatTypeSupergroup':\n members = self._get_super_group_members(chat, page_size)\n\n else:\n raise errors.TDLibError('Unknown group type: %s' % chat['type']['@type'])\n\n return members", "def get_member_from_group(member, group_name):\n query= \"SELECT * FROM groupmembers WHERE member='{}' AND group_id='{}'\".format(member, group_name)\n cur.execute(query)\n result = cur.fetchall()\n if len(result) > 1:\n return True\n return False", "def get_potential_groups_for_member_get(self, filter, groupType, membershipId, membershipType):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/User/Potential/{membershipType}/{membershipId}/{filter}/{groupType}/\"))", "def get_group_info(\n self, tenant: str, group_config: dict, authnz_status: dict\n ) -> tuple:\n if not group_config[\"enabled\"]:\n return (\n group_config[\"default_url_group\"],\n group_config[\"default_memberships\"],\n )\n try:\n group_name = url_unescape(self.get_query_argument(\"group\"))\n except HTTPError as e:\n # first check if it is in the url\n found = re.sub(\n r\"/v1/.+/(p[0-9]+-[a-zA-Z0-9-]+-group).*\", r\"\\1\", self.request.uri\n )\n if found == self.request.uri:\n # then it is not there, so we need to use the default\n default_url_group = group_config[\"default_url_group\"]\n if options.tenant_string_pattern in default_url_group:\n group_name = default_url_group.replace(\n options.tenant_string_pattern, tenant\n )\n else:\n group_name = found\n try:\n group_memberships = authnz_status[\"claims\"][\"groups\"]\n except Exception as e:\n logging.info(\n \"Could not get group memberships - choosing default memberships\"\n )\n default_membership = group_config[\"default_memberships\"]\n group_memberships = []\n for group in default_membership:\n if options.tenant_string_pattern in group:\n new = group.replace(options.tenant_string_pattern, tenant)\n else:\n new = group\n group_memberships.append(new)\n return group_name, group_memberships", "def list_group(group):\n\n members = group_members(group)\n ret = {}\n if members:\n for member in members:\n info = get(member)\n if info:\n ret[uid2dn(member)] = info\n return ret", "def getMemberships(self):\n\n extra_rights = {\n 'user': ['user'],\n 'public': ['anyone'],\n 'list': [],\n }\n\n return dicts.merge(extra_rights, self.rights)", "def get_group(group):\n\n return ldapi.lookup(ld, 'cn', group, cfg['ldap_groups_base'])", "def group_members(self) -> pulumi.Output[Optional[Sequence['outputs.GroupMembersItemResponse']]]:\n return pulumi.get(self, \"group_members\")", "def get_user_groups(user):\n auth_groups = user.groups.all()\n # groups = [group.profile for group in auth_group] # not working\n # todo implement better\n groups = [GroupProfile.objects.filter(group=group)[0] for group in auth_groups if GroupProfile.objects.filter(group=group).count()]\n return groups", "def get_memberships(self, kwargs):\n account = kwargs[\"account\"]\n recursive = kwargs.get(\"recursive\", False)\n\n already_printed = set()\n\n def lookup_groups(dn, leading_sp, already_treated):\n results = self.engine.query(self.engine.DISTINGUISHED_NAME(dn), [\"memberOf\", \"primaryGroupID\"])\n for result in results:\n if \"memberOf\" in result:\n for group_dn in result[\"memberOf\"]:\n if group_dn not in already_treated:\n print(\"{g:>{width}}\".format(g=group_dn, width=leading_sp + len(group_dn)))\n already_treated.add(group_dn)\n lookup_groups(group_dn, leading_sp + 4, already_treated)\n\n if \"primaryGroupID\" in result and result[\"primaryGroupID\"]:\n pid = result[\"primaryGroupID\"]\n results = list(self.engine.query(self.engine.PRIMARY_GROUP_ID(pid)))\n if results:\n already_treated.add(results[0][\"dn\"])\n\n return already_treated\n\n results = self.engine.query(self.engine.ACCOUNT_IN_GROUPS_FILTER(account), [\"memberOf\", \"primaryGroupID\"])\n for result in results:\n if \"memberOf\" in result:\n for group_dn in result[\"memberOf\"]:\n print(group_dn)\n if recursive:\n already_printed.add(group_dn)\n s = lookup_groups(group_dn, 4, already_printed)\n already_printed.union(s)\n\n # for some reason, when we request an attribute which is not set on an object,\n # ldap3 returns an empty list as the value of this attribute\n if \"primaryGroupID\" in result and result[\"primaryGroupID\"] != []:\n pid = result[\"primaryGroupID\"]\n results = list(self.engine.query(self.engine.PRIMARY_GROUP_ID(pid)))\n if results:\n print(results[0][\"dn\"])", "def customer_group_get(group_id=None):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n query = \"\"\"\n SELECT \n `group_id`,\n `group_name`,\n `description`,\n `timestamp`,\n `created_by`,\n `creation_time`,\n `is_deleted`,\n `updated_by`,\n `role_id`,\n `is_default`,\n `is_customer`,\n `company_name`,\n `company_address`,\n `company_telephone`,\n `company_fax`,\n `company_website`,\n `company_sales_contact`,\n `company_purchase_contact`,\n `company_business`,\n `company_business_type`,\n `company_sales_email`,\n `company_purchase_email`,\n `company_reg_number`,\n `company_vat_number` \n FROM `groups` \n WHERE `is_customer` = 1\n \"\"\"\n\n if group_id:\n query += \"\"\"\n AND `group_id` = \\\"%s\\\"\n \"\"\" % (group_id)\n\n group_details = None\n cursor = db.cursor()\n\n if cursor.execute(query) != 0:\n group_details = cursor.fetchall()\n\n cursor.close()\n db.close()\n\n return group_details", "def get_users(db, group):\n my_users = {\n user_id\n for user_id, in db(\"\"\"\n select distinct\n users.id\n from users, members\n where\n users.id = members.user_id\n and group_id = %s\n \"\"\",\n group.group_id)\n }\n return my_users", "def locate_group_users(self, group):\n return self.ldap_connection.search_s(\"ou=Groups,dc=redhat,dc=com\",\n ldap.SCOPE_SUBTREE, 'cn={0}'.format(group))", "def groupfinder(user_id, request):\n ret = DBSession.query(User).filter_by(user_id=user_id).all()\n if len(ret) == 0:\n return None\n user = ret[0]\n groups = [x.group_name for x in user.groups]\n return groups", "def test_get_membership(self):\n get_response = lambda: self.client.get(self.url)\n\n self.assert_authentication_required(get_response)\n self.assert_membership_required(get_response)\n\n # alice is a regular group member, works for her:\n self.login_as(\"alice\")\n with self.assertNumQueries(5): # (3) check membership (4) get mem (5) get com\n response = get_response()\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(list(response.data.keys()), self.expected_keys)\n self.assertEqual(response.data[\"user\"][\"id\"], self.USER_ID)\n\n # bob is group admin and it's his data, works for him:\n self.login_as(\"bob\")\n with self.assertNumQueries(5):\n response = get_response()\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(list(response.data.keys()), self.expected_keys)\n self.assertEqual(response.data[\"user\"][\"id\"], self.USER_ID)", "def get_profile_get(self, components, destinyMembershipId, membershipType):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Destiny2/{membershipType}/Profile/{destinyMembershipId}/\"))", "def getMembership(self, status):\n\n if status == 'user':\n return ['user']\n\n if status == 'public':\n return ['anyone']\n\n return self.rights[status]", "def getGroupData(service, groupName, attList):\n # import IPython ; IPython.embed() ; exit(); \n groupsDataList = service.contactGroups().list().execute()[\"contactGroups\"]\n for group in groupsDataList:\n if group[\"name\"] == groupName:\n groupData = []\n for att in attList:\n groupData.append(group[att])\n return groupData", "def get_group(self, group_name, marker=None, max_items=None):\r\n params = {'GroupName' : group_name}\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('GetGroup', params, list_marker='Users')", "def info(self, membership, callback=None):", "def group_members(group):\n\n group = ldapi.lookup(ld, 'cn', group, cfg['ldap_groups_base'])\n\n if group and 'uniqueMember' in group:\n r = re.compile('^uid=([^,]*)')\n return map(lambda x: r.match(x).group(1), group['uniqueMember'])\n return []", "def get_users_in_group(self, group_id):\n members = self.vk.groups.getMembers(group_id=group_id, count=1)\n peoples = members['count']\n ids = []\n while len(ids) < peoples:\n members = self.vk.groups.getMembers(group_id=group_id, offset=len(ids))\n ids += members['items']\n\n return ids", "def get(self):\r\n return UserGroupService.getAllUserGroups(self)", "def getAGroupInfo(group_id):\r\n return Group.getAGroupInfo(group_id)", "def get_groups(self):\n result = self.conn.usergroup.get(status=0, output='extend', selectUsers=\"extend\")\n groups = {group[\"name\"]: Group(\n name=group[\"name\"],\n id=group[\"usrgrpid\"],\n members=group[\"users\"],\n ) for group in result}\n return groups", "def getUserGroups(self, user):\n return [gu[0] for gu in grp.getgrall() if user in gu[3]]", "def get_channel_users(self, channel):\n data = {\n 'token' : app.config['SLACK_BEARER'],\n 'channel' : channel\n }\n\n group = self.__form_call('groups.info', data)\n group_dict = group.json()\n if not group_dict['ok']:\n channel = self.get_group_info(data)\n channel_dict = channel.json()\n return channel_dict['channel']['members']\n return group_dict['group']['members']", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Membership':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = MembershipArgs.__new__(MembershipArgs)\n\n __props__.__dict__[\"arn\"] = None\n __props__.__dict__[\"collaboration_arn\"] = None\n __props__.__dict__[\"collaboration_creator_account_id\"] = None\n __props__.__dict__[\"collaboration_identifier\"] = None\n __props__.__dict__[\"membership_identifier\"] = None\n __props__.__dict__[\"query_log_status\"] = None\n __props__.__dict__[\"tags\"] = None\n return Membership(resource_name, opts=opts, __props__=__props__)", "def _get_group_from_file(self, wanted_group):\n wanted_gid = \"\"\n if (isinstance(wanted_group, int) or\n re.match(\"^\\\\d+$\", wanted_group)):\n wanted_gid = str(wanted_group)\n wanted_group = \"\"\n try:\n ingroup = open(self.group_file)\n except (IOError, OSError):\n return (\"\", \"\", \"\")\n else:\n for line in ingroup:\n (group, dummy, gid, users) = line.strip().split(':')\n if wanted_group and group == wanted_group:\n return (group, gid, users)\n if wanted_gid and gid == wanted_gid:\n return (group, gid, users)\n ingroup.close()\n return (\"\", \"\", \"\")", "def group_membership_claims(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"group_membership_claims\")", "def test_get_group_users(self):\n response = self.client.get_group_users(\"ABC123\")\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v2/groups/ABC123/users\")\n self.assertEqual(\n util.params_to_dict(args),\n {\n \"account_id\": [self.client.account_id],\n \"limit\": [\"100\"],\n \"offset\": [\"0\"],\n },\n )", "def get_for_user(self, user, access=None):\n from sentry.models import TeamMember\n\n if not user.is_authenticated():\n return SortedDict()\n\n qs = TeamMember.objects.filter(\n user=user,\n is_active=True,\n ).select_related('team')\n if access is not None:\n qs = qs.filter(type__lte=access)\n\n results = SortedDict()\n for tm in sorted(qs, key=lambda x: x.team.name):\n team = tm.team\n team.membership = tm\n results[team.slug] = team\n\n return results", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def test_get_groups_users(self):\n api.user.create(\n username='chuck',\n email='chuck@norris.org',\n password='secret',\n )\n api.group.create(groupname='staff')\n api.group.add_user(username='chuck', groupname='staff')\n\n users = api.user.get_users(groupname='staff')\n usernames = [user.getUserName() for user in users]\n\n self.assertEqual(usernames, ['chuck'])", "def getusersinfo(request):\n baseurl = request.data.get(\"baseurl\", None)\n facility_id = request.data.get(\"facility_id\", None)\n username = request.data.get(\"username\", None)\n password = request.data.get(\"password\", None)\n\n user_info_url = urljoin(baseurl, reverse(\"kolibri:core:publicuser-list\"))\n params = {\n \"facility_id\": facility_id,\n }\n try:\n response = requests.get(\n user_info_url,\n data=params,\n auth=(\n \"username={}&{}={}\".format(\n username, FACILITY_CREDENTIAL_KEY, facility_id\n ),\n password,\n ),\n )\n response.raise_for_status()\n except (CommandError, HTTPError, ConnectionError) as e:\n if not username and not password:\n raise PermissionDenied()\n else:\n raise AuthenticationFailed(e)\n auth_info = response.json()\n if len(auth_info) > 1:\n user_info = [u for u in response.json() if u[\"username\"] == username][0]\n else:\n user_info = auth_info[0]\n facility_info = {\"user\": user_info, \"users\": auth_info}\n return facility_info", "def customer_group_get_related(group_id):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n query = \"\"\"\n SELECT \n `group_id`,\n `group_name`,\n `description`,\n `timestamp`,\n `created_by`,\n `creation_time`,\n `is_deleted`,\n `updated_by`,\n `role_id`,\n `is_default`,\n `is_customer`,\n `company_name`,\n `company_address`,\n `company_telephone`,\n `company_fax`,\n `company_website`,\n `company_sales_contact`,\n `company_purchase_contact`,\n `company_business`,\n `company_business_type`,\n `company_sales_email`,\n `company_purchase_email`,\n `company_reg_number`,\n `company_vat_number` \n FROM `groups` \n WHERE `groups`.`company_name` = (\n SELECT `asshole`.`company_name` \n FROM \n (\n SELECT * \n FROM `groups` \n WHERE `group_id` = \"%s\"\n ) AS `asshole`\n )\n \"\"\" %(group_id)\n \n group_details = None\n cursor = db.cursor()\n\n if cursor.execute(query) != 0:\n group_details = cursor.fetchall()\n\n cursor.close()\n db.close()\n\n return group_details", "async def get_group_info(self, group_id: int) -> models.Group:\n results = await self._api.call('group', 'get_group_info', gid=group_id)\n return models.Group(results.payload)", "def get(user):\n if user:\n return Member.get_by_key_name(user.user_id())", "def group(self, group_cn):\n group = self.search(base=GROUPS, cn=group_cn)\n\n if len(group) == 0:\n return []\n else:\n group_members = group[0]['attributes']['member']\n\n members = []\n for member in group_members:\n members.append(self.search(dn=member))\n\n if self.objects:\n return self.member_objects(members)\n\n return members", "def get_group_id_and_members(self, group_name, group_type=\"exclusivity\"):\n group_details = self.check_group_exists(group_name)\n\n try:\n if group_details is None:\n GeneralLogger.log_info(\"Creating group\")\n create_response = self.create_group(group_name, group_type)\n return create_response.json()[\"id\"], \\\n create_response.json()[\"members\"]\n else:\n GeneralLogger.log_info(\"Group exists\")\n\n return group_details\n except Exception:\n GeneralLogger.log_error(traceback.format_exc())", "def get_uw_group_members(\n gws_base_url: str,\n gws_ca_cert: str,\n gws_client_cert: str,\n gws_client_key: str,\n uw_group: str,\n) -> list:\n\n r = requests.get(\n gws_base_url + \"/group/\" + uw_group + \"/member\",\n verify=gws_ca_cert,\n cert=(gws_client_cert, gws_client_key),\n )\n\n group_members = []\n\n for member in r.json()[\"data\"]:\n if member[\"type\"] == \"uwnetid\":\n # Verify personal NetID\n # https://wiki.cac.washington.edu/pages/viewpage.action?spaceKey=infra&title=UW+NetID+Namespace\n if re.match(\"^[a-z][a-z0-9]{0,7}$\", member[\"id\"]):\n group_members.append(member[\"id\"])\n\n return group_members", "def get_by_id(cls, id):\n usergroup_node = graph.find_one('Usergroup',\n property_key='id',\n property_value=id)\n return usergroup_node", "def list_group_users(self, group_id, **params):\n url = 'groups/%s/users' % group_id\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def get(self, security_group_id: str = '') -> Union[Dict, List[Dict]]:\n\t\troute = f'{AWSSecurityGroupConsts.CLOUD_SECURITY_GROUP.value}/{security_group_id}'\n\t\treturn self._get(route=route)", "def get_user_group(username):\n sql = \"SELECT user_group \" \\\n \"FROM users \" \\\n \"WHERE username=:username\"\n result = db.session.execute(sql, {\"username\": username})\n user = result.fetchone()\n if user is None:\n return \"\"\n user_group = user[0]\n return user_group", "def get_members(id): # pylint: disable=I0011,W0622\n\n l = Legacy.query.get_or_404(id)\n\n if current_app.config.get('IGNORE_AUTH') is not True: # pragma: no cover\n if not l.can_view(g.user.id):\n raise Http403('Access denied')\n\n return {'members': [m.to_dict(public_only=True) for m in l.members]}", "def get(self):\n self._group = self._client.get(\n url=self._client.get_full_url(\n self.get_path(\n 'single', realm=self._realm_name, group_id=self._group_id\n )\n )\n )\n self._group_id = self._group[\"id\"]\n return self._group", "def get(self, session_id=None):\n if not session_id:\n session_id = self.session_id\n return self._cache.get(self.group, session_id)", "def remove_user_from_group(self, user_group_membership_id, **kwargs):\n resource_path = \"/userGroupMemberships/{userGroupMembershipId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"remove_user_from_group got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userGroupMembershipId\": user_group_membership_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def members(self):\n data = UserProfile.objects.filter(\n organization_id=self.id\n ).order_by(\n 'display_name', 'first_name', 'last_name'\n )\n\n return data", "def users_groups():\n if request.method == \"GET\":\n query = {\"token\": ciconnect_api_token, \"globus_id\": session[\"primary_identity\"]}\n # Get user info to derive unix name\n user = get_user_info(session)\n unix_name = user[\"metadata\"][\"unix_name\"]\n # Get user's group membership info based on session unix name\n users_group_memberships = get_user_group_memberships(session, unix_name)\n\n multiplexJson = {}\n group_membership_status = {}\n for group in users_group_memberships:\n if group[\"state\"] not in [\"nonmember\"]:\n group_name = group[\"name\"]\n group_query = (\n \"/v1alpha1/groups/\" + group_name + \"?token=\" + query[\"token\"]\n )\n multiplexJson[group_query] = {\"method\": \"GET\"}\n group_membership_status[group_query] = group[\"state\"]\n # POST request for multiplex return\n multiplex = get_multiplex(multiplexJson)\n\n users_groups = []\n for group in multiplex:\n if (\n session[\"url_host\"][\"unix_name\"]\n in (json.loads(multiplex[group][\"body\"])[\"metadata\"][\"name\"])\n ) and (\n len(\n (json.loads(multiplex[group][\"body\"])[\"metadata\"][\"name\"]).split(\n \".\"\n )\n )\n > 1\n ):\n users_groups.append(\n (\n json.loads(multiplex[group][\"body\"]),\n group_membership_status[group],\n )\n )\n # users_groups = [group for group in users_groups if len(group['name'].split('.')) == 3]\n\n # Query user's pending project requests\n pending_project_requests = get_user_pending_project_requests(unix_name)\n # Check user's member status of root connect group\n connect_group = session[\"url_host\"][\"unix_name\"]\n user_status = get_user_connect_status(unix_name, connect_group)\n\n domain_name = domain_name_edgecase()\n\n with open(\n brand_dir\n + \"/\"\n + domain_name\n + \"/form_descriptions/group_unix_name_description.md\",\n \"r\",\n ) as file:\n group_unix_name_description = file.read()\n\n return render_template(\n \"users_groups.html\",\n groups=users_groups,\n project_requests=pending_project_requests,\n user_status=user_status,\n group_unix_name_description=group_unix_name_description,\n )", "def get_group(self, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/\"))", "def get_participants(self, group):\n from sentry.models import User, UserOption, UserOptionValue\n\n # Identify all members of a project -- we'll use this to start figuring\n # out who could possibly be associated with this group due to implied\n # subscriptions.\n users = User.objects.filter(\n sentry_orgmember_set__teams=group.project.team,\n is_active=True,\n )\n\n # Obviously, users who have explicitly unsubscribed from this issue\n # aren't considered participants.\n users = users.exclude(\n id__in=GroupSubscription.objects.filter(\n group=group,\n is_active=False,\n user__in=users,\n ).values('user')\n )\n\n # Fetch all of the users that have been explicitly associated with this\n # issue.\n participants = {\n subscription.user: subscription.reason\n for subscription in GroupSubscription.objects.filter(\n group=group,\n is_active=True,\n user__in=users,\n ).select_related('user')\n }\n\n # Find users which by default do not subscribe.\n participating_only = set(\n uo.user_id\n for uo in UserOption.objects.filter(\n Q(project__isnull=True) | Q(project=group.project),\n user__in=users,\n key='workflow:notifications',\n ).exclude(\n user__in=[\n uo.user_id for uo in UserOption.objects.filter(\n project=group.project,\n user__in=users,\n key='workflow:notifications',\n ) if uo.value == UserOptionValue.all_conversations\n ]\n ) if uo.value == UserOptionValue.participating_only\n )\n\n if participating_only:\n excluded = participating_only.difference(participants.keys())\n if excluded:\n users = users.exclude(id__in=excluded)\n\n results = {}\n\n for user in users:\n results[user] = GroupSubscriptionReason.implicit\n\n for user, reason in participants.items():\n results[user] = reason\n\n return results", "def test_03_get_members_of_group(self):\n g1 = Group.query.first().mongo_id\n rv = self.app.get('groups/' + str(g1) + '/members')\n data = json.loads(rv.data)\n self.assertEqual(data[\"status\"], \"success\")", "def get_user_profile(self):\n return self.request('get', 'id/users')", "def get(cls, group_id, db_session=None):\n db_session = get_db_session(db_session)\n return db_session.query(cls.model).get(group_id)" ]
[ "0.6235419", "0.61992306", "0.60605794", "0.60599184", "0.6059044", "0.60332453", "0.6025113", "0.5984252", "0.5957381", "0.5928913", "0.5920698", "0.59183306", "0.5917003", "0.59147394", "0.59113026", "0.58406615", "0.5840103", "0.5822994", "0.5788936", "0.57630724", "0.5758944", "0.57501817", "0.5692017", "0.5656566", "0.5654035", "0.5626449", "0.5625217", "0.56005925", "0.55951107", "0.55732036", "0.5568596", "0.5543029", "0.55425763", "0.553661", "0.55189234", "0.55159587", "0.55127394", "0.5512122", "0.5508565", "0.5494736", "0.54817927", "0.5465885", "0.546539", "0.5462091", "0.5456675", "0.542356", "0.5404768", "0.54006594", "0.5398623", "0.537272", "0.5369259", "0.536144", "0.53570783", "0.53285724", "0.53203624", "0.5294067", "0.52914864", "0.52742004", "0.52666533", "0.52657056", "0.5256913", "0.5253176", "0.52395105", "0.5232332", "0.52155703", "0.52006555", "0.51941776", "0.51640016", "0.51618916", "0.51556903", "0.5152096", "0.5144941", "0.51443905", "0.5137134", "0.5115675", "0.51153344", "0.5115016", "0.5112622", "0.51111263", "0.50975096", "0.5071908", "0.5047661", "0.5042503", "0.5037139", "0.5025855", "0.50205106", "0.501622", "0.5014865", "0.50141114", "0.5013288", "0.4988391", "0.49881852", "0.49843413", "0.49803033", "0.49801484", "0.4979211", "0.49765992", "0.49667007", "0.49628323", "0.49588233" ]
0.67357963
0
Gets the specified user's console password information. The returned object contains the user's OCID, but not the password itself. The actual password is returned only when created or reset.
def get_user_ui_password_information(self, user_id, **kwargs): resource_path = "/users/{userId}/uiPassword" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_user_ui_password_information got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="UIPasswordInformation") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="UIPasswordInformation")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_password(self):\n return self.controller.dbfilter.db.get('passwd/user-password')", "def get_password_from_user():\n pwd = ''\n keyboard = xbmc.Keyboard('', ADDON_NAME + ': ' + localise(32022), True)\n keyboard.doModal()\n if keyboard.isConfirmed():\n pwd = keyboard.getText()\n return pwd", "def password(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"password\")", "def GetPassword(self):\n return self._password", "def get_password_from_db(user_info):\n collection = get_collection(\"user\")\n user_from_db = collection.find_one({'name': user_info['name']})\n return user_from_db.get('password')", "def _get_password(self):\n return self._password", "def _get_user_password(self):\n return self.__user_password", "def _get_password(self):\r\n return self._password", "def get_passwd(self):\n if self.__password:\n aes_cipher = AESCipher()\n return aes_cipher.decrypt(self.__password, self.__aes_key)", "def getPassword(self):\n\t\treturn self.Password", "def get_password_data(self, instance_id):\r\n\r\n params = {'InstanceId' : instance_id}\r\n rs = self.get_object('GetPasswordData', params, ResultSet, verb='POST')\r\n return rs.passwordData", "def get_password(self):\n return self.__password", "def password(self) -> str:\n return pulumi.get(self, \"password\")", "def password(self) -> str:\n return pulumi.get(self, \"password\")", "def password(self) -> str:\n return pulumi.get(self, \"password\")", "def get_passwords() -> object:\n passwords = DBConnection.fetch_records(\"SELECT * FROM passwords\")\n return passwords", "def get_user_password(password):\n user = User.query.get(password)\n result = userSchema.dump(user)\n return jsonify(result)", "def GetPassword(self):\n pass", "def _get_password(self):\n if self._password != None:\n return self._password\n raise DbiException(\"Can't get password\")", "def get_account_password(self, accountid):\n payload = {'appkey': self._lr_object._get_api_key(), 'appsecret': self._lr_object._get_api_secret(),\n 'accountid':accountid}\n url = SECURE_API_URL + \"raas/v1/account/password\"\n return self._lr_object._get_json(url, payload)", "def get_verified_password(self):\n return self.controller.dbfilter.db.get('passwd/user-password-again')", "def passwords(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"passwords\")", "def get_password(self):\n raise NotImplementedError('get_password')", "def get_password(self) -> str:\n return self._password", "def password(self):\n return self._password", "def password(self):\n return self._password", "def password(self):\n return self._password", "def password(self):\n return self._password", "def Password(self):\n return self._Password", "def password(self) :\n\t\ttry :\n\t\t\treturn self._password\n\t\texcept Exception as e:\n\t\t\traise e", "def password( self ):\n return self._password", "def password(self):\n return self._password()", "def get_lc_passwd(self):\n if self.__lc_password:\n aes_cipher = AESCipher()\n return aes_cipher.decrypt(self.__lc_password, self.__aes_key)", "def get_password(username, interactive=sys.stdout.isatty()):\n try:\n return get_password_from_keyring(username)\n except PyiCloudNoStoredPasswordAvailableException:\n if not interactive:\n raise\n\n return getpass.getpass(\n \"Enter iCloud password for {username}: \".format(username=username,)\n )", "def passwords(self) -> Sequence[str]:\n return pulumi.get(self, \"passwords\")", "def device_password(self) -> str:\n return pulumi.get(self, \"device_password\")", "def device_password(self) -> str:\n return pulumi.get(self, \"device_password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"password\")", "def password(self):\n return self.factory.server_password", "def password(self) -> str:\n return self.get_env_var(self.password_var)", "def password(self) -> str:\n return self.get_env_var(self.password_var)", "def git_password_user_attribute(self):\n return self._git_password_user_attribute", "def get_user_password(username):\n return Migration.objects.select_related().get(\n mailbox__user__username=username\n ).password", "def get_user_password(text):\n return getpass.getpass(text)", "def password(self) -> str:\n return self._password", "def password(self) -> str:\n return self._password", "def password(cls):\n return User.CryptComparator(cls.password_hashed)", "def password(self, repository):\r\n return self._password(repository)", "def display_passwords(cls):\n return cls.passwords", "def _password(self):\n if 'password' in self._config:\n return self._config['password']\n else:\n while True:\n password = self._UI.get_password(\"Please enter your trac password: \")\n password2 = self._UI.get_password(\"Please confirm your trac password: \")\n if password != password2:\n self._UI.show(\"Passwords do not agree.\")\n else: break\n if self._UI.confirm(\"Do you want your password to be stored on your local system? (your password will be stored in plaintext in a file only readable by you)\", default_yes=False):\n self._config['password'] = password\n self._config._write_config()\n return password", "def get_password(self):\n mpw = master_pass.MPW(self.user, self.master_password)\n return mpw.password(self.ucs_server)", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"password\")", "def _get_password(self, service_name, username, reenter=False):\n\n password_from_keyring = None\n if reenter is False:\n try:\n password_from_keyring = keyring.get_password(\n service_name, username)\n except keyring.errors.KeyringError as exc:\n log.warning(\"Failed to get a valid keyring for password \"\n \"storage: {}\".format(exc))\n\n if password_from_keyring is None:\n log.warning(\"No password was found in the keychain for the \"\n \"provided username.\")\n if system_tools.in_ipynb():\n log.warning(\"You may be using an ipython notebook:\"\n \" the password form will appear in your terminal.\")\n password = getpass.getpass(\"{0}, enter your password:\\n\"\n .format(username))\n else:\n password = password_from_keyring\n\n return password, password_from_keyring", "def passwords(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"passwords\")", "def passwords(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"passwords\")", "def get_password(self) -> str:\n try:\n return self[\"password\"]\n except KeyError:\n raise MarathonNotConfigured(\n \"Could not find marathon password in system marathon config\"\n )", "def get_verified_password(self):\n raise NotImplementedError('get_verified_password')", "def get_domain_passwd(self):\n return self.domain_passwd.get_text()", "def GET_password(self):\r\n return BoringPage(_(\"Password\"), content=Password()).render()", "def password_profile(self):\n if \"passwordProfile\" in self._prop_dict:\n if isinstance(self._prop_dict[\"passwordProfile\"], OneDriveObjectBase):\n return self._prop_dict[\"passwordProfile\"]\n else :\n self._prop_dict[\"passwordProfile\"] = PasswordProfile(self._prop_dict[\"passwordProfile\"])\n return self._prop_dict[\"passwordProfile\"]\n\n return None", "def password_profile(self):\n if \"passwordProfile\" in self._prop_dict:\n if isinstance(self._prop_dict[\"passwordProfile\"], OneDriveObjectBase):\n return self._prop_dict[\"passwordProfile\"]\n else :\n self._prop_dict[\"passwordProfile\"] = PasswordProfile(self._prop_dict[\"passwordProfile\"])\n return self._prop_dict[\"passwordProfile\"]\n\n return None", "def getpass(self, type='IRC'):\n try:\n return self.data.passwords[type]\n except KeyError: return", "def get_password_from_keyring(username):\n result = keyring.get_password(KEYRING_SYSTEM, username)\n if result is None:\n raise PyiCloudNoStoredPasswordAvailableException(\n \"No pyicloud password for {username} could be found \"\n \"in the system keychain. Use the `--store-in-keyring` \"\n \"command-line option for storing a password for this \"\n \"username.\".format(username=username,)\n )\n\n return result", "def device_password(self) -> Optional[str]:\n return pulumi.get(self, \"device_password\")", "def device_password(self) -> Optional[str]:\n return pulumi.get(self, \"device_password\")", "def db_password(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"db_password\")", "def get_pass(self, item):\n text = str(self.get_contents(item), encoding=\"utf-8\")\n lines = text.split(\"\\n\")\n password = lines[0]\n return password", "def log_in_password(self):\n password_elem = waiter.find_element(self.driver, 'password', by=NAME)\n return password_elem.get_attribute('value')", "def get_auth_password():\n password = AUTH_PASSWORD_SCRIPT.get()\n if password:\n return password\n return DEFAULT_AUTH_PASSWORD.get()", "def auth_password(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"auth_password\")", "def password_builder():\n password = Credentials.password_buidler()\n return password", "def clean_password(self):\n return self.initial[\"password\"]", "def password(self) -> str:", "def get_password(self, service, username):\n init_part = self._keyring.get_password(service, username)\n if init_part:\n parts = [init_part]\n i = 1\n while True:\n next_part = self._keyring.get_password(\n service, '%s{{part_%d}}' % (username, i)\n )\n if next_part:\n parts.append(next_part)\n i += 1\n else:\n break\n return ''.join(parts)\n return None", "def getUserInfo(self, user):\n return pwd.getpwnam(user)[2:4]", "def current_password(self) -> str:\n return pulumi.get(self, \"current_password\")", "def password(self):\n raise AttributeError(\"password is not a readable attribute.\")" ]
[ "0.7236961", "0.68295044", "0.681905", "0.6805937", "0.66924196", "0.66504323", "0.66416496", "0.664007", "0.66202456", "0.66089183", "0.66012734", "0.6560518", "0.6538691", "0.6538691", "0.6538691", "0.6501631", "0.64902675", "0.6487195", "0.64705867", "0.6461097", "0.6410778", "0.63929635", "0.6380189", "0.63151896", "0.62929624", "0.62929624", "0.62929624", "0.62929624", "0.6250915", "0.62508494", "0.6250199", "0.6216237", "0.6178455", "0.6168867", "0.61335266", "0.6106436", "0.6106436", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.6096751", "0.60814595", "0.6079815", "0.6079815", "0.60754585", "0.6029526", "0.6025944", "0.60152435", "0.60152435", "0.60016924", "0.599981", "0.5990735", "0.59781396", "0.5977775", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5950368", "0.5864826", "0.5856505", "0.5856505", "0.585606", "0.5841315", "0.58392763", "0.58319235", "0.5830044", "0.5830044", "0.5826868", "0.5811679", "0.5808721", "0.5808721", "0.57981575", "0.5775255", "0.57735187", "0.5770997", "0.5759939", "0.57585216", "0.57360536", "0.57306135", "0.57156634", "0.57084167", "0.5696565", "0.56656927" ]
0.0
-1
Gets details on a specified work request. The workRequestID is returned in the opcworkrequestid header for any asynchronous operation in the Identity and Access Management service.
def get_work_request(self, work_request_id, **kwargs): resource_path = "/workRequests/{workRequestId}" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "get_work_request got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "workRequestId": work_request_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="WorkRequest") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="WorkRequest")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getwork(self, data: Optional[str] = None) -> Dict[str, Any]:\n assert data is None or type(data) == str\n return self.rpc_call(\"getwork\", data)", "def get_request(request_id=None, workload_id=None, session=None):\n\n try:\n if not request_id and workload_id:\n request_ids = get_request_ids_by_workload_id(workload_id)\n if request_ids and len(request_ids) > 1:\n raise exceptions.IDDSException(\"More than one request with the same workload_id\")\n request_id = request_ids[0]\n\n req_select = \"\"\"select request_id, scope, name, requester, request_type, transform_tag, priority,\n status, locking, workload_id, created_at, updated_at, accessed_at, expired_at, errors,\n request_metadata, processing_metadata\n from atlas_idds.requests where request_id=:request_id\n \"\"\"\n req_stmt = text(req_select)\n result = session.execute(req_stmt, {'request_id': request_id})\n request = result.fetchone()\n\n if request is None:\n raise exceptions.NoObject('request request_id: %s, workload_id: %s cannot be found' % (request_id, workload_id))\n\n request = convert_request_to_dict(request)\n\n return request\n except sqlalchemy.orm.exc.NoResultFound as error:\n raise exceptions.NoObject('request request_id: %s, workload_id: %s cannot be found: %s' % (request_id, workload_id, error))", "def doi_info(self,doi):\n \n doi = _clean_doi(doi)\n \n url = self.BASE_URL + 'works/' + doi\n \n try:\n return self._make_get_request(url,models.work_single)\n except errors.RequestError:\n #TODO: Check for 404\n #last_response.status_code\n #TODO: Do this only if debugging is enabled\n if self.debug:\n #TODO: Also report code\n print(\"Error msg from server: \" + self.last_response.text)\n raise errors.InvalidDOI('Invalid DOI requested: ' + doi)\n \n #return self._make_get_request(url,models.Work,kwargs)", "async def request_job_info(self, job_id: str, *args, **kwargs) -> dict:\n # TODO: implement\n raise NotImplementedError('{} function \"request_job_info\" not implemented yet'.format(self.__class__.__name__))", "def export_getCurrentExecutionOrder(self,requestName):\n\n if type(requestName) in StringTypes:\n result = requestDB._getRequestAttribute('RequestID',requestName=requestName)\n if not result['OK']:\n return result\n requestID = result['Value']\n else:\n requestID = requestName\n\n result = requestDB.getCurrentExecutionOrder(requestID)\n return result", "def get_ride_request(reqID):\n req = RideRequest.query.get(reqID)\n return req", "def getworkunit(worker_id):\r\n\r\n worker_data = identify(worker_id)\r\n global time_start\r\n global started_working\r\n global work_status\r\n if work_status == Db.WorkStatusNames.has_work.value:\r\n\r\n saved_work_unit = Db.get_free_work_unit()\r\n if saved_work_unit is None:\r\n work_status = Db.WorkStatusNames.no_work.value\r\n else:\r\n if not started_working:\r\n print(\"Starting to work!\")\r\n time_start = time.time()\r\n started_working = True\r\n #It counts it's\r\n print(str(saved_work_unit[\"work_unit_id\"]) + \" \" + str(saved_work_unit))\r\n Db.assign_work_unit(saved_work_unit[\"work_unit_id\"], worker_id)\r\n return saved_work_unit\r\n\r\n\r\n\r\n return package_data({\"fail_message\": work_status})", "def getwork(self, data=None):\n if data is None:\n # Only if no data provided, it returns a WorkItem\n return WorkItem(**self.proxy.getwork())\n else:\n return self.proxy.getwork(data)", "def send_announcement_get_work_request(self):\n self.analysis_id = uuid.uuid4().hex\n while True:\n self.announce_socket.send_json(((self.analysis_id, self.work_addr),))\n try:\n return self.awthread.recv(self.work_socket, 250)\n except six.moves.queue.Empty:\n continue", "def export_getRequestFileStatus(self,requestName,lfns):\n if type(requestName) in StringTypes:\n result = requestDB._getRequestAttribute('RequestID',requestName=requestName)\n if not result['OK']:\n return result\n requestID = result['Value']\n else:\n requestID = requestName\n return requestDB.getRequestFileStatus(requestID,lfns)", "def qos_workload_get(self, workload_name, desired_attributes=None):\n return self.request( \"qos-workload-get\", {\n 'workload_name': [ workload_name, 'workload-name', [ basestring, 'None' ], False ],\n 'desired_attributes': [ desired_attributes, 'desired-attributes', [ QosWorkloadInfo, 'None' ], False ],\n }, {\n 'attributes': [ QosWorkloadInfo, False ],\n } )", "def list_work_requests(self, compartment_id, **kwargs):\n resource_path = \"/workRequests\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"resource_identifier\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_work_requests got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"resourceIdentifier\": kwargs.get(\"resource_identifier\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[WorkRequestSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[WorkRequestSummary]\")", "def work_order_receipt_retrieve(self, work_order_id, id=None):\n pass", "def get_tagging_work_request(self, work_request_id, **kwargs):\n resource_path = \"/taggingWorkRequests/{workRequestId}\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"get_tagging_work_request got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"workRequestId\": work_request_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"TaggingWorkRequest\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"TaggingWorkRequest\")", "def request_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"request_id\")", "def view_request_info(line):\n args = shlex.split(line)\n if not args:\n raise PappyException(\"Request id is required\")\n reqids = args[0]\n\n reqs = yield load_reqlist(reqids)\n\n for req in reqs:\n print ''\n print_request_extended(req)\n print ''", "def get_request(self):\n\t\t#self.__sem.lock()\n\t\ttry:\n\t\t\t\n\t\t\tr = self.get(thread.get_ident(),None)\n\t\t\tif r:\n\t\t\t\treturn r\n\t\t\traise VDOM_exception(_(\"No request associated with current thread\"))\n\t\texcept:\n\t\t\traise VDOM_exception(_(\"No request associated with current thread\"))\n\t\t#finally:\n\t\t#\tself.__sem.unlock()", "def vcac_worklfow_request(self):\n logging.info(\"Inside ucsvm_worklfow_request method base class\")\n return None", "def log_request(self, code='-', size='-'):\n print self._heading(\"HTTP Request\")\n #First, print the resource identifier and desired operation.\n print self.raw_requestline,\n #Second, print the request metadata\n for header, value in self.headers.items(): \n print header + \":\", value", "def export_getRequestStatus(self,requestName):\n\n if type(requestName) in StringTypes:\n result = requestDB._getRequestAttribute('RequestID',requestName=requestName)\n if not result['OK']:\n return result\n requestID = result['Value']\n else:\n requestID = requestName\n\n result = requestDB.getRequestStatus(requestID)\n return result", "def read_request(req_id: int, db: Session = Depends(get_db)):\n db_req = crud.get_request(db, req_id=req_id)\n if db_req is None:\n raise HTTPException(status_code=404, detail=\"Request not found\")\n return db_req", "def queryRequest(self, requestName):\n urlQuery = \"request/%s\" % requestName\n logging.info(\"Querying request '%s'\" % requestName)\n logging.info(\"Query: '%s':\" % urlQuery)\n r = self.restSender.get(urlQuery)\n print str(r)", "async def get_job_execution_details(\n self,\n request: metrics.GetJobExecutionDetailsRequest = None,\n *,\n retry: retries.Retry = gapic_v1.method.DEFAULT,\n timeout: float = None,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> pagers.GetJobExecutionDetailsAsyncPager:\n # Create or coerce a protobuf request object.\n request = metrics.GetJobExecutionDetailsRequest(request)\n\n # Wrap the RPC method; this adds retry and timeout information,\n # and friendly error handling.\n rpc = gapic_v1.method_async.wrap_method(\n self._client._transport.get_job_execution_details,\n default_timeout=None,\n client_info=DEFAULT_CLIENT_INFO,\n )\n\n # Send the request.\n response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)\n\n # This method is paged; wrap the response in a pager, which provides\n # an `__aiter__` convenience method.\n response = pagers.GetJobExecutionDetailsAsyncPager(\n method=rpc, request=request, response=response, metadata=metadata,\n )\n\n # Done; return the response.\n return response", "def get_review_request(self, rid):\r\n rsp = self.api_call('api/review-requests/%s/' % rid)\r\n return rsp['review_request']", "def req_id(self) -> str:\n pass", "def RetrieveWorkerInCapability(**argd):\n checkSign = argd[\"nsid\"] + \",\" + argd[\"renid\"]\n token = EncryptUtil.DecodeURLSafeBase64(argd[\"token\"])\n try:\n tokenRet = EncryptUtil.VerifySign(checkSign, token, GlobalConfigContext.AUTH_NameService_PublicKey)\n except:\n tokenRet = False\n if tokenRet is False:\n return CGateway._UnauthorizedServiceResponse(token)\n flag1, ret1 = CGateway.core.RetrieveHumanWithCapability(GlobalConfigContext.AUTH_INTERNAL_SESSION, argd[\"capabilityName\"])\n flag2, ret2 = CGateway.core.RetrieveAgentWithCapability(GlobalConfigContext.AUTH_INTERNAL_SESSION, argd[\"capabilityName\"])\n return CGateway._DumpResponse(ret1 + ret2)", "def request_id(self) -> Optional[str]:\n return self._request_id", "def work_order_receipt_retrieve(self, work_order_id, id=None):\n if work_order_id is None or not is_hex(work_order_id):\n logging.error(\"Work order id is empty or Invalid\")\n return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,\n \"Worker id is empty or Invalid\")\n\n json_rpc_request = {\n \"jsonrpc\": \"2.0\",\n \"method\": \"WorkOrderReceiptRetrieve\",\n \"id\": id,\n \"params\": {\n \"workOrderId\": work_order_id\n }\n }\n response = self.__uri_client._postmsg(json.dumps(json_rpc_request))\n return response", "def get_intake_detail(request, intake_csid):\n return handle_request(request, 'cspace-services/intakes/%s' % intake_csid)", "def _fetch_request_info(request):\n try:\n subject_id = request.environ['api.cache.subject_id']\n method = request.environ['api.cache.method']\n version = request.environ['api.cache.version']\n except KeyError:\n return None\n else:\n return (subject_id, method, version)", "def read_work(self):\n # PROTECTED REGION ID(AsyncTabata.work_read) ENABLED START #\n return self._work\n # PROTECTED REGION END # // AsyncTabata.work_read", "def get_current_request():\n return crum.get_current_request()", "def export_getRequest(self,requestType):\n gLogger.info(\"RequestHandler.getRequest: Attempting to get request type\", requestType)\n try:\n res = requestDB.getRequest(requestType)\n return res\n except Exception,x:\n errStr = \"RequestManagerHandler.getRequest: Exception while getting request.\"\n gLogger.exception(errStr,requestType,lException=x)\n return S_ERROR(errStr)", "def test_get_provisioning_request_by_id(self):\n response = self.client.open('/api/provisioning/port/{requestId}'.format(requestId='requestId_example'),\n method='GET')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def reqid(self) -> str:", "def reqid(self) -> str:", "def request(self, *args, **kwargs):\n self.work_request_queue.put((args, kwargs))\n return self.result_queue.get()", "def request(self, *args, **kwargs):\n self.work_request_queue.put((args, kwargs))\n return self.result_queue.get()", "def get_request(self, request_id):\n doc_id = bson.objectid.ObjectId(request_id)\n coll = self._db.get_collection(COLLECTION_REQUEST)\n doc = coll.find_one(filter={\n '_id': doc_id\n })\n return doc", "def test_get_workout(self):\n response = self.client.open(\n '/workout/{id}'.format(id='id_example'),\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def request_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"request_id\")", "def work_order_receipt_lookup(self, worker_service_id,\n worker_id,\n requester_id,\n receipt_status, id=None):\n pass", "def make_work_request(self):\n request = StoreRequest()\n self.bb_client.read_wait(request, self.handle_request)", "def __get_request(self, request_id):\r\n if request_id not in self.__requests:\r\n self.__requests[request_id] = Request(request_id)\r\n return self.__requests[request_id]", "def __get_request(self, request_id):\r\n if request_id not in self.__requests:\r\n self.__requests[request_id] = Request(request_id)\r\n return self.__requests[request_id]", "def get_shift_report_info(self, shift_id=0, shift_reg_id=0):\n try:\n personal_data = f'{\"-\" * 20}\\n'\n general_shift_info = []\n\n if shift_id == 0: # waiter\n shift_id = self.db_handler.get_shift_registration_by_shift_reg_id(shift_reg_id)[1]\n personal_data += self.get_shift_report_info_waiter(shift_reg_id)\n elif shift_reg_id == 0: # manager\n personal_data += self.get_shift_report_info_manager(shift_id)\n\n general_shift_info = self.get_shift_report_general_info(shift_id)\n\n msg = general_shift_info + personal_data\n\n return msg\n except Exception as err:\n method_name = sys._getframe().f_code.co_name\n\n self.logger.write_to_log('exception', 'model')\n self.logger.write_to_err_log(f'exception in method {method_name} - {err}', 'model')", "def enqueue_agent_details(self, request_id: str, additional_data: Dict[str, Any]):\n base_data = {\"request_id\": request_id}\n for key, val in additional_data.items():\n base_data[key] = val\n self.message_queue.put(\n Packet(\n packet_type=PACKET_TYPE_AGENT_DETAILS,\n subject_id=self.request_id_to_channel_id[request_id],\n data=base_data,\n )\n )\n self.process_outgoing_queue(self.message_queue)\n self.log_metrics_for_packet(self.request_id_to_packet[request_id])\n # TODO Sometimes this request ID is lost, and we don't quite know why\n del self.request_id_to_channel_id[request_id]\n del self.request_id_to_packet[request_id]", "def info_request():\n return SentmanRequest(SentmanRequest.GET_INFO)", "def workflow_fetch_item_task_spec(dtool_smb_config):\n return {\n 'item_id': {'key': 'search_dict_task->result'},\n 'source': 'smb://test-share/1a1f9fad-8589-413e-9602-5bbd66bfe675',\n 'filename': 'fetched_item.txt',\n 'dtool_config': dtool_smb_config,\n 'stored_data': True,\n }", "def get_details():\r\n return run_operations.get_run_details(experiment_name, job_name).as_dict(key_transformer=camel_case_transformer)", "def GetJob(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def detail(self, req):\n return self.index(req)", "def get_provisioning_request_by_id(requestId):\n doc = PortProvisionRequest.get(id=requestId)\n if doc:\n return doc\n else:\n return 'Not Found', 404", "def RequestInformation(self, request, inInfo, outInfo):\n if self.need_to_read():\n self._read_up_front()\n self._update_time_steps()\n return 1 # NOTE: ALWAYS return 1 on pipeline methods", "def getRequest(self):\n try:\n return self._getRequest()\n except:\n log_func.fatal(u'Error get OLAP server request')\n return dict()", "def showDetails(self, request, access_type,\n page_name=None, params=None, **kwargs):\n\n logic = params['logic']\n ranking = logic.getFromKeyFieldsOr404(kwargs)\n student = ranking.student\n\n all_d = gci_task_model.TaskDifficultyTag.all().fetch(100)\n\n list_params = params.copy()\n list_params['list_description'] = self.DETAILS_MSG_FMT % student.user.name\n list_params['public_field_extra'] = lambda entity: {\n 'task': entity.title,\n 'org': entity.scope.name,\n 'points_difficulty': entity.taskDifficulty(all_d).value\n }\n list_params['public_field_keys'] = [\n 'task', 'org', 'points_difficulty', 'closed_on']\n list_params['public_field_names'] = [\n 'Task', 'Organization', 'Points (Difficulty)', 'Completed on']\n list_params['public_row_extra'] = lambda entity: {\n 'link': redirects.getPublicRedirect(entity, {'url_name': 'gci/task'}),\n }\n\n if lists.isDataRequest(request):\n return self.getListRankingDetailsData(request, list_params, student)\n\n contents = []\n order = ['closed_on']\n list = lists.getListGenerator(request, list_params, order=order, idx=0)\n contents.append(list)\n\n return self._list(request, list_params, contents, page_name)", "def taskbyddmreqid(self, **kwargs):\n rows = self.api.query(None, None, self.Task.TaskByDdmReqid_sql, ddmreqid=kwargs[\"ddmreqid\"])\n return rows", "def work(self, request):\n raise NotImplementedError", "def work_type_info(self,type_id):\n url = self.BASE_URL + 'types/' + type_id\n return self._make_get_request(url,models.pass_through)", "def _get_django_request():\n return execution_context.get_opencensus_attr(REQUEST_THREAD_LOCAL_KEY)", "def get_req(self, path):\n \n res = self.client.get(path)\n return res", "def workflow_details(self) -> Optional[pulumi.Input['ServerWorkflowDetailsArgs']]:\n return pulumi.get(self, \"workflow_details\")", "def workflow_details(self) -> Optional[pulumi.Input['ServerWorkflowDetailsArgs']]:\n return pulumi.get(self, \"workflow_details\")", "def _get_workload(self):\n return self._workload", "def get_request():\n return getattr(transaction.ctx, 'request', None)", "def trace_requests(request):\n tracer = initTracer()\n\n base_name = os.environ.get(\"FUNCTION_NAME\") + \"-trace-\"\n\n with tracer.span(name=base_name + \"metadata-call\"):\n\n import requests\n\n r = requests.get(\"http://metadata.google.internal/computeMetadata/v1/project/numeric-project-id\",\n headers={'Metadata-Flavor': 'Google'})\n\n\n return r.content", "def read(self) -> requests.request:\n # Check if id is set,\n if self.args.id is not None:\n self.REQUEST_URL += str(self.args.id)\n\n # Send GET request\n return requests.get(self.REQUEST_URL)", "def test_get_transaction_details_request(self):\n self.trans_details.get_transaction_details(\n trans_id = 123456,\n )", "def get_work_order_detail(self, date_range):\n work_order_obj = self.env[\"task.line\"]\n start = datetime.strptime(date_range.get(\"date_from\"), \"%Y-%m-%d\")\n end = datetime.strptime(date_range.get(\"date_to\"), \"%Y-%m-%d\")\n step = timedelta(days=1)\n workorder_detail = []\n while start <= end:\n sdate = str(\n datetime.strptime(\n str(start.date()) + \" 00:00:00\", DEFAULT_SERVER_DATETIME_FORMAT\n )\n )\n edate = str(\n datetime.strptime(\n str(start.date()) + \" 23:59:59\", DEFAULT_SERVER_DATETIME_FORMAT\n )\n )\n work_order_ids = work_order_obj.search(\n [(\"date_issued\", \">=\", sdate), (\"date_issued\", \"<=\", edate)]\n )\n if work_order_ids:\n parts_data = {}\n parts_value = []\n for parts_line in work_order_ids:\n if (\n parts_line.fleet_service_id\n and parts_line.fleet_service_id.state == \"done\"\n ):\n parts_dict = {\n \"wo_name\": parts_line.fleet_service_id\n and parts_line.fleet_service_id.name\n or \"\",\n \"vehicle_id\": parts_line.fleet_service_id\n and parts_line.fleet_service_id.vehicle_id\n and parts_line.fleet_service_id.vehicle_id.name\n or \"\",\n \"part_no\": parts_line.product_id\n and parts_line.product_id.default_code\n or \"\",\n \"part_name\": parts_line.product_id\n and parts_line.product_id.name\n or \"\",\n \"vehicle_make\": parts_line.vehicle_make_id\n and parts_line.vehicle_make_id.name\n or \"\",\n \"qty\": parts_line.qty or 0.0,\n \"uom\": parts_line.product_uom\n and parts_line.product_uom.name\n or \"\",\n \"old_part_return\": parts_line.old_part_return\n and \"Yes\"\n or \"No\",\n \"issued_by\": parts_line.issued_by\n and parts_line.issued_by.name\n or \"\",\n \"remarks\": parts_line.fleet_service_id\n and parts_line.fleet_service_id.note\n or \"\",\n }\n parts_value.append(parts_dict)\n if parts_value:\n parts_value = sorted(parts_value, key=lambda k: k[\"wo_name\"])\n parts_data = {\"date\": start.date(), \"value\": parts_value}\n workorder_detail.append(parts_data)\n start += step\n return workorder_detail", "def get_details(codetoget):\n\tTrainingComponentDetailsRequest= client.factory.create('TrainingComponentDetailsRequest')\n\tTrainingComponentDetailsRequest.Code=codetoget\n\tTrainingComponentInformationRequested=client.factory.create('TrainingComponentInformationRequested')\n\tTrainingComponentInformationRequested.ShowReleases=True\n\tTrainingComponentInformationRequested.ShowUnitGrid=True\n\tTrainingComponentInformationRequested.ShowComponents=True\n\tTrainingComponentDetailsRequest.InformationRequest=TrainingComponentInformationRequested\n\treturn client.service.GetDetails(TrainingComponentDetailsRequest)", "def job_detail(request: HttpRequest, job_id: str) -> HttpResponse:\n table = dynamodb.Table(table_name)\n sis_account_id = request.LTI[\"custom_canvas_account_sis_id\"]\n school_id = sis_account_id.split(\":\")[1]\n school_key = f'SCHOOL#{school_id.upper()}'\n job_query_params = {\n 'KeyConditionExpression': Key('pk').eq(school_key) & Key('sk').eq(job_id),\n 'ScanIndexForward': False,\n }\n logger.debug(f'Retrieving job details for job {job_id}.')\n job = table.query(**job_query_params)['Items'][0]\n\n # Update string timestamp to datetime.\n job.update(created_at=parse_datetime(job['created_at']))\n job.update(updated_at=parse_datetime(job['updated_at']))\n\n tasks_query_params = {\n 'KeyConditionExpression': Key('pk').eq(job_id),\n 'ScanIndexForward': False,\n }\n task_query_result = table.query(**tasks_query_params)\n tasks = task_query_result['Items']\n\n # If there are additional items to be retrieved for this job, the LastEvaluatedKey will be present\n # Use this key as the starting point for subsequent queries to build a full list\n while task_query_result.get('LastEvaluatedKey', False):\n tasks_query_params['ExclusiveStartKey'] = task_query_result.get('LastEvaluatedKey')\n task_query_result = table.query(**tasks_query_params)\n tasks.extend(task_query_result['Items'])\n\n context = {\n 'job': job,\n 'tasks': tasks,\n 'canvas_url': settings.CANVAS_URL\n }\n logger.debug(f'Retrieved job details for job {job_id}.', extra=context)\n return render(request, \"bulk_site_creator/job_detail.html\", context=context)", "def execute(self, requestName, conn = None, trans = False):\n self.sql = \"SELECT request_id from reqmgr_request WHERE \"\n self.sql += \"request_name=:request_name\"\n binds = {\"request_name\": requestName}\n reqID = self.dbi.processData(self.sql, binds, conn = conn, transaction = trans)\n result = self.formatOne(reqID)\n if result == []:\n return None\n return result[0]", "def get_current_request(self):\n\n return self.__current_request_mock", "def get(self, request_id):\n request = RequestModel.select_by_id(request_id)\n if request:\n return request.json(), 200\n return {'message': 'Request not found'}, 404", "def _retrieve_task_id(job_name, res_id, job_dict):\n if job_dict:\n workers = list(job_dict.keys())\n for worker in workers:\n for job in job_dict[worker]:\n if 'name' in job:\n if job['name'] == job_name:\n if res_id in job['args']:\n return job['id']\n elif 'request' in job:\n scheduled_job = job['request']\n if 'name' in scheduled_job:\n if scheduled_job['name'] == job_name:\n if res_id in scheduled_job['args']:\n return scheduled_job['id']\n\n return None", "def getObject(self, customerguid, jobguid=\"\",executionparams=None):", "def main():\n url = urllib.request.Request(sys.argv[1])\n with urllib.request.urlopen(url) as response:\n info = response.headers.get(\"X-Request-Id\")\n print(info)", "def x_request_id(self):\n return self._x_request_id", "def printable_request(self):\n req = self.response.request\n msg = \"-- Request : {} | {} -- \\r\\n\".format(req.method, req.url)\n msg += \"Headers: {} \\r\\n\".format(str(req.headers))\n msg += \"Body: {} \\r\\n\\r\\n\".format(str(req.body))\n return msg", "def find_obs_request_comment(self, request_id=None, project_name=None):\n if self.do_comments:\n comments = self.commentapi.get_comments(request_id=request_id, project_name=project_name)\n for c in comments.values():\n m = comment_marker_re.match(c['comment'])\n if m:\n return {\n 'id': c['id'],\n 'state': m.group('state'),\n 'result': m.group('result'),\n 'comment': c['comment'],\n 'revision': m.group('revision')}\n return {}", "def get_request(self):\n return self._request", "def test_get_work_from_edition_data(self):\n data = {\"works\": [{\"key\": \"/work/OL1234W\"}]}\n responses.add(\n responses.GET,\n \"https://openlibrary.org/work/OL1234W\",\n json={\"hi\": \"there\"},\n status=200,\n )\n result = self.connector.get_work_from_edition_data(data)\n self.assertEqual(result, {\"hi\": \"there\"})", "def get_job(self, identifier: str):\n self._log_operation('Getting job {i}'.format(i=identifier))\n return self._job_queue.get_job_details(identifier)", "def getexperimentinfo(expid):\n rdata = {}\n rdata['expId'] = expid\n res = requests.get(scbd_server_address + '/experiments/get_details', json=rdata)\n if res.status_code == 200:\n outstr = ''\n for cres in res.json()['details']:\n outstr += cres[0] + ':' + cres[1] + '<br>'\n # details=res.json()['details']\n return outstr\n return []", "def _retrieve_job_id(job_name, res_id):\n active_jobs = celery_inspector.active()\n job_id = _retrieve_task_id(job_name, res_id, active_jobs)\n if not job_id:\n reserved_jobs = celery_inspector.reserved()\n job_id = _retrieve_task_id(job_name, res_id, reserved_jobs)\n if not job_id:\n scheduled_jobs = celery_inspector.scheduled()\n job_id = _retrieve_task_id(job_name, res_id, scheduled_jobs)\n return job_id", "def get_request_id(request_json):\n request_id = request_json['requestInfo'].get('requestId')\n if not request_id:\n request_id = request_json['requestInfo'].get('requestID')\n return request_id", "def getRequest():\n return getLocal('request')", "def rinex_info(rinex_fname,\n nav_fname,\n work_path=None):\n if not os.path.isfile(rinex_fname):\n raise ValueError('RINEX observation file {} does not exist'.format(rinex_fname))\n if not os.path.isfile(nav_fname):\n raise ValueError('RINEX navigation file {} does not exist'.format(nav_fname))\n # information mapping\n info = {}\n def process_output(line):\n if line.startswith('Receiver type'):\n info['receiver'] = line.split(':')[1].split('(')[0].strip()\n elif line.lstrip().startswith('antenna WGS 84 (xyz)'):\n # make sure units are [m]\n assert line.rstrip().endswith('(m)')\n info['xyz'] = map(float, line.split(':')[1].split('(')[0].split())\n elif line.lstrip().startswith('antenna WGS 84 (geo)'):\n if line.split(':')[1].lstrip()[0] in ['N', 'S']:\n # skip arcmin, arcsec line\n pass\n else:\n lat, _, lon, _ = line.split(':')[1].split(None, 3)\n info['lat'] = float(lat)\n lon = float(lon)\n while lon > 180:\n lon -= 360\n info['lon'] = lon\n elif line.lstrip().startswith('WGS 84 height'):\n assert line.rstrip().endswith('m')\n info['height'] = float(line.split(':')[1].rstrip()[:-1])\n elif line.startswith('|qc - header| position'):\n # make sure units are [m]\n assert line.rstrip()[-1] == 'm'\n info['xyz error'] = float(line.split(':')[1].rstrip()[:-1])\n elif line.startswith('Observation interval'):\n info['interval'] = float(line.split(':')[1].split()[0])\n elif line.startswith('Moving average MP12'):\n info['MP12'] = float(line.split(':')[1].rstrip()[:-1])\n elif line.startswith('Moving average MP21'):\n info['MP21'] = float(line.split(':')[1].rstrip()[:-1])\n # query the RINEX file via teqc quality check --- process in given\n # work area to avoid intermediate file pollution\n with SmartTempDir(work_path) as work_path:\n intermediate_rinex_fname = replace_path(work_path, rinex_fname)\n os.symlink(os.path.abspath(rinex_fname),\n intermediate_rinex_fname)\n intermediate_nav_fname = replace_path(work_path, nav_fname)\n os.symlink(os.path.abspath(nav_fname),\n intermediate_nav_fname)\n sh.teqc('+qc',\n '+quiet',\n '-R',\n '-S',\n '-E',\n '-C',\n '-J',\n '-nav', intermediate_nav_fname,\n intermediate_rinex_fname,\n _cwd=work_path,\n _out=process_output,\n _err=sys.stderr)\n os.remove(intermediate_rinex_fname)\n os.remove(intermediate_nav_fname)\n return info", "def shn_logs_req_rheader(r):\r\n\r\n if r.representation == \"html\":\r\n if r.name == \"req\":\r\n req_record = r.record\r\n if req_record:\r\n rheader_tabs = shn_rheader_tabs( r,\r\n [(T(\"Edit Details\"), None),\r\n (T(\"Items\"), \"req_item\"),\r\n ]\r\n )\r\n rheader = DIV( TABLE(\r\n TR( TH( T(\"Date Requested\") + \": \"),\r\n req_record.date,\r\n TH( T(\"Date Required\") + \": \"),\r\n req_record.date_required,\r\n ),\r\n TR( TH( T(\"Requested By Warehouse\") + \": \"),\r\n inventory_store_represent(req_record.inventory_store_id),\r\n ),\r\n TR( TH( T(\"Commit. Status\") + \": \"),\r\n log_req_status_dict.get(req_record.commit_status),\r\n TH( T(\"Transit. Status\") + \": \"),\r\n log_req_status_dict.get(req_record.transit_status),\r\n TH( T(\"Fulfil. Status\") + \": \"),\r\n log_req_status_dict.get(req_record.fulfil_status)\r\n ), \r\n TR( TH( T(\"Comments\") + \": \"),\r\n TD(req_record.comments, _colspan=3)\r\n ),\r\n ),\r\n rheader_tabs\r\n )\r\n return rheader\r\n return None", "def get_wo_mthly_smry(self, workorder_browse):\n wo_summary_data = []\n wo_check_dict = {}\n no = 0\n if workorder_browse:\n for work_rec in workorder_browse:\n if work_rec.state and work_rec.state == \"done\":\n no += 1\n identification = \"\"\n repair_line_data = \"\"\n if work_rec.vehicle_id:\n identification += work_rec.vehicle_id.name\n if work_rec.vehicle_id.f_brand_id:\n identification += \" \" + work_rec.vehicle_id.f_brand_id.name\n if work_rec.vehicle_id.model_id:\n identification += \" \" + work_rec.vehicle_id.model_id.name\n for repaire_line in work_rec.repair_line_ids:\n if repaire_line.complete is True:\n if (\n repaire_line.repair_type_id\n and repaire_line.repair_type_id.name\n ):\n repair_line_data += (\n repaire_line.repair_type_id.name + \", \"\n )\n if work_rec.parts_ids:\n for parts_line in work_rec.parts_ids:\n if work_rec.id in wo_check_dict.keys():\n parts_data = {\n \"no\": -1,\n \"location\": \"\",\n \"type\": \"\",\n \"wo\": \"\",\n \"identification\": \"\",\n \"vin\": \"\",\n \"plate_no\": \"\",\n \"work_performed\": \"\",\n \"part\": parts_line.product_id\n and parts_line.product_id.default_code\n or \"\",\n \"qty\": parts_line.qty or 0.0,\n \"uom\": parts_line.product_uom\n and parts_line.product_uom.name\n or \"\",\n }\n wo_summary_data.append(parts_data)\n else:\n wo_check_dict[work_rec.id] = work_rec.id\n parts_data = {\n \"no\": no,\n \"location\": work_rec.team_id\n and work_rec.team_id.name\n or \"\",\n \"type\": work_rec.main_type or \"\",\n \"wo\": work_rec.name or \"\",\n \"identification\": identification or \"\",\n \"vin\": work_rec.vehicle_id\n and work_rec.vehicle_id.vin_sn\n or \"\",\n \"plate_no\": work_rec.vehicle_id\n and work_rec.vehicle_id.license_plate\n or \"\",\n \"work_performed\": repair_line_data\n and repair_line_data[:-2]\n or \"\",\n \"part\": parts_line.product_id\n and parts_line.product_id.default_code\n or \"\",\n \"qty\": parts_line.qty or 0.0,\n \"uom\": parts_line.product_uom\n and parts_line.product_uom.name\n or \"\",\n }\n wo_summary_data.append(parts_data)\n else:\n parts_data = {\n \"no\": no,\n \"location\": work_rec.team_id\n and work_rec.team_id.name\n or \"\",\n \"type\": work_rec.main_type or \"\",\n \"wo\": work_rec.name or \"\",\n \"identification\": identification or \"\",\n \"vin\": work_rec.vehicle_id\n and work_rec.vehicle_id.vin_sn\n or \"\",\n \"plate_no\": work_rec.vehicle_id\n and work_rec.vehicle_id.license_plate\n or \"\",\n \"work_performed\": repair_line_data\n and repair_line_data[:-2]\n or \"\",\n \"vehicle_make\": \"\",\n \"qty\": \"\",\n \"uom\": \"\",\n }\n wo_summary_data.append(parts_data)\n if not wo_summary_data:\n msg = _(\n \"Warning! \\n\\\n No data Available for selected work order.\"\n )\n raise UserError(msg)\n return wo_summary_data", "def hit_details(hit_id, sandbox, recruiter):\n prolific_check(recruiter, sandbox)\n rec = by_name(recruiter, skip_config_validation=True)\n details = rec.hit_details(hit_id, sandbox)\n print(json.dumps(details, indent=4, default=str))", "def get(self, request, id):\n workflow = get_object_or_404(Workflow, id=id)\n serializer = WorkflowDetailedSerializer(workflow, context={\"request\": request})\n return Response(serializer.data)", "def get_request_journal(self):\n response = requests.get(self.requests_url)\n if response.status_code != http_client.OK:\n raise ValueError(response.text, response.status_code)\n response_body = json.loads(response.text)\n return response_body[\"requests\"]", "def getJob(workload):\n job = Job()\n job[\"task\"] = workload.getTask(\"reco\").getPathName()\n job[\"workflow\"] = workload.name()\n job[\"location\"] = \"T1_US_FNAL\"\n job[\"owner\"] = \"evansde77\"\n job[\"group\"] = \"DMWM\"\n return job", "def job_details(user_data, cache, job_id):\n user = cache.ensure_user(user_data)\n job = cache.get_job(user, job_id)\n\n if not job or not job.project_id:\n return result_response(JobDetailsResponseRPC(), None)\n\n try:\n job.project = cache.get_project(user, job.project_id)\n except IntermittentProjectIdError:\n pass\n\n return result_response(JobDetailsResponseRPC(), job)", "def describe_workteam(WorkteamName=None):\n pass", "def GetOperation(\n self,\n request: google.longrunning.operations_pb2.GetOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.longrunning.operations_pb2.Operation:", "def retrieveDelegationRequest( self, requestId, userDN ):\n cmd = \"SELECT Pem FROM `ProxyDB_Requests` WHERE Id = %s AND UserDN = '%s'\" % ( requestId,\n userDN )\n retVal = self._query( cmd )\n if not retVal[ 'OK' ]:\n return retVal\n data = retVal[ 'Value' ]\n if len( data ) == 0:\n return S_ERROR( \"No requests with id %s\" % requestId )\n request = X509Request()\n retVal = request.loadAllFromString( data[0][0] )\n if not retVal[ 'OK' ]:\n return retVal\n return S_OK( request )", "def t_get_process(self, *args, **kwargs):\n d_request = {}\n d_ret = {}\n b_status = False\n hits = 0\n for k, v in kwargs.items():\n if k == 'request': d_request = v\n d_meta = d_request['meta']\n str_path = '/api/v1' + d_meta['path']\n d_ret = self.DB_get(path = str_path)\n return {'d_ret': d_ret,\n 'status': True}", "def get_request(self, target_uri, resource_type, params=None):\n message, sc = self.request(target_uri, GET, params=params)\n operation = 'GET {resource_type}'.format(resource_type=resource_type)\n self.check_status_code_success(operation, sc, message)\n return message" ]
[ "0.56530815", "0.5529833", "0.54778296", "0.5333147", "0.53069115", "0.5274904", "0.51362526", "0.51026773", "0.5036096", "0.49953598", "0.49753773", "0.49725127", "0.4961871", "0.4939883", "0.49278948", "0.4925855", "0.49189013", "0.49009278", "0.4879232", "0.48540133", "0.48538992", "0.4844337", "0.48351547", "0.4832383", "0.4820794", "0.48182362", "0.48112178", "0.48111847", "0.47723818", "0.47482735", "0.47309303", "0.4721636", "0.47186375", "0.47172943", "0.47096983", "0.47096983", "0.46770075", "0.46770075", "0.46698737", "0.46633202", "0.46621037", "0.46211863", "0.46182507", "0.4614701", "0.4614701", "0.46139473", "0.46122292", "0.45908633", "0.45731682", "0.45720387", "0.4567816", "0.45659664", "0.45632237", "0.45585865", "0.45511302", "0.45322567", "0.45114988", "0.4510647", "0.4507255", "0.44896504", "0.44780427", "0.4473678", "0.4473678", "0.4461377", "0.44601235", "0.44561014", "0.44529033", "0.44472227", "0.4445561", "0.44439447", "0.44413438", "0.44319654", "0.44172597", "0.44155693", "0.44081482", "0.44060424", "0.43976575", "0.4391656", "0.4391124", "0.43897644", "0.43805274", "0.43768036", "0.43756044", "0.43698347", "0.43666732", "0.4365514", "0.43542355", "0.43496925", "0.43495426", "0.43432128", "0.434176", "0.43362164", "0.43336037", "0.43285945", "0.43220285", "0.4314511", "0.4311724", "0.4310772", "0.4307707", "0.43028852" ]
0.6257329
0
Lists the API signing keys for the specified user. A user can have a maximum of three keys. Every user has permission to use this API call for their own user ID. An administrator in your organization does not need to write a policy to give users this ability.
def list_api_keys(self, user_id, **kwargs): resource_path = "/users/{userId}/apiKeys" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_api_keys got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[ApiKey]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[ApiKey]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_keys(user_id):\n\n db_conn = sqlite3.connect(db_path)\n db = db_conn.cursor()\n keys = []\n try:\n for row in db.execute(\"SELECT public_key FROM public_keys WHERE username=? AND status=?\", [user_id, PK_STATUS_OK]):\n keys.append({\"public\": row[0]})\n db_conn.close()\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n if(keys == []):\n abort(404)\n return jsonify({'user':{'username':user_id, 'keys':keys}})", "def get_ssh_keys(self, user_id):\n _gu = self.get_user(user_id)\n if _gu is None:\n return []\n\n # build URL and make request\n return self._get('/users/{0}/keys'.format(_gu['id']))", "def list_user_keys(self):\n return AlgoliaUtils_request(self.headers, self.read_hosts, \"GET\", \"/1/keys\", self.timeout)", "def get_all_access_keys(self, user_name, marker=None, max_items=None):\r\n params = {'UserName' : user_name}\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('ListAccessKeys', params,\r\n list_marker='AccessKeyMetadata')", "def keys(self, bucket, user=None):\n raise NotImplementedError('TODO')", "def list_user_keys(self):\n return AlgoliaUtils_request(self.client.headers, self.read_hosts, \"GET\", \"/1/indexes/%s/keys\" % self.url_index_name, self.client.timeout)", "def list(ctx: CLIContext, user_id, is_active, filter_, order, offset, limit) -> None:\n fields = [\n keypair_fields['user_id'],\n keypair_fields['full_name'],\n keypair_fields['access_key'],\n keypair_fields['secret_key'],\n keypair_fields['is_active'],\n keypair_fields['is_admin'],\n keypair_fields['created_at'],\n keypair_fields['last_used'],\n keypair_fields['resource_policy'],\n keypair_fields['rate_limit'],\n keypair_fields['concurrency_used'],\n ]\n try:\n with Session() as session:\n fetch_func = lambda pg_offset, pg_size: session.KeyPair.paginated_list(\n is_active,\n user_id=user_id,\n fields=fields,\n page_offset=pg_offset,\n page_size=pg_size,\n filter=filter_,\n order=order,\n )\n ctx.output.print_paginated_list(\n fetch_func,\n initial_page_offset=offset,\n page_size=limit,\n )\n except Exception as e:\n ctx.output.print_error(e)\n sys.exit(1)", "def describe_user_encryption_key_list(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_user_encryption_key_list_with_options(request, runtime)", "def get_list_keys(rpc_user, rpc_pwd):\n data = '{\"jsonrpc\":\"2.0\",\"id\":\"1\",\"method\":\"listkeys\"}'\n return call_rpc(rpc_user, rpc_pwd, data)", "def list_customer_secret_keys(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/customerSecretKeys\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_customer_secret_keys got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[CustomerSecretKeySummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[CustomerSecretKeySummary]\")", "def apikeys(request):\n display = get_boolean_value(request.GET.get('display', False))\n\n return render(request, 'gui/profile/profile_api_keys_list.html', {\n 'user': request.user,\n 'display_keys': display\n })", "def get_s3_keys(bucket, user_keys = None):\n keys = []\n if user_keys is None:\n \t\t\t\ts3 = boto3.client('s3')\n else:\n s3 = boto3.client('s3', \n aws_access_key_id = user_keys[\"AWS_ACCESS_KEY_ID\"], \n aws_secret_access_key = user_keys[\"AWS_SECRET_ACCESS_KEY\"], \n region_name = user_keys[\"REGION_NAME\"]\n ) \t \n \n resp = s3.list_objects_v2(Bucket= bucket)\n for obj in resp['Contents']:\n keys.append(obj['Key'])\n return keys", "def get_api_keys(owner):\n api.get_all(owner)", "async def describe_user_encryption_key_list_async(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n runtime = util_models.RuntimeOptions()\n return await self.describe_user_encryption_key_list_with_options_async(request, runtime)", "def test_get_user_api_keys(self):\n pass", "def describe_user_encryption_key_list_with_options(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.target_region_id):\n query['TargetRegionId'] = request.target_region_id\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeUserEncryptionKeyList',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeUserEncryptionKeyListResponse(),\n self.call_api(params, req, runtime)\n )", "def get_api_keys(self, **kwargs):\n\n all_params = ['page', 'per_page', '_from', 'to', 'sort_dir', 'sort_field', 'filters']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_api_keys\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/apikeys'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'page' in params:\n query_params['_page'] = params['page']\n if 'per_page' in params:\n query_params['_perPage'] = params['per_page']\n if '_from' in params:\n query_params['_from'] = params['_from']\n if 'to' in params:\n query_params['_to'] = params['to']\n if 'sort_dir' in params:\n query_params['_sortDir'] = params['sort_dir']\n if 'sort_field' in params:\n query_params['_sortField'] = params['sort_field']\n if 'filters' in params:\n query_params['_filters'] = params['filters']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['privileges', 'apikey']\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='list[ApiKey]',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def get_key(self, user):\r\n from delicious_cake.models import ApiKey\r\n\r\n try:\r\n key = ApiKey.objects.get(user=user)\r\n except ApiKey.DoesNotExist:\r\n return False\r\n\r\n return key.key", "def List(self, user=None):\n with self.acc_lock:\n self._load()\n\n result = []\n if user:\n for k, v in self.tasks.iteritems():\n if v['user'] != user:\n continue\n d = dict(v)\n d['key'] = k\n result.append(d)\n else:\n for k, v in self.tasks.iteritems():\n d = dict(v)\n d['key'] = k\n result.append(d)\n return result", "def get_api_key_params(user):\n if user and user.is_authenticated():\n api_key, _ = APIKey.objects.get_or_create(user=user)\n return urlencode({'user': user.pk, 'key': api_key.key})\n return ''", "def get_key(self, user, api_key):\n return True", "async def list_keys(request: web.Request) -> web.Response:\n keys = [\n {'uri': '/wifi/keys/{}'.format(key.directory),\n 'id': key.directory,\n 'name': os.path.basename(key.file)} for key in wifi.list_keys()\n ]\n return web.json_response({'keys': keys}, status=200)", "def api_key( self, trans, user_id, **kwd ):\n user = self.get_user( trans, user_id )\n key = self.create_api_key( trans, user )\n return key", "def get_key_list(self, email=\"\"):\n\t\tif email:\n\t\t\twhere_clause = \" where email = '%s'\" % email\n\t\telse:\n\t\t\twhere_clause = \"\"\n\n\t\treturn self.app.db.query(\n\t\t\t\"\"\"\n\t\t\tselect\n\t\t\t\tapi_key,\n\t\t\t\towner,\n\t\t\t\tapp_name,\n\t\t\t\temail,\n\t\t\t\turl,\n\t\t\t\tcreated\n\t\t\tfrom\n\t\t\t\tapi_keys\n\t\t\t%s\n\t\t\t\"\"\" % where_clause)", "def ListAppKeys(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_keys(user_id):\n\n if not request.json:\n abort(400)\n\n new_pub_keys = request.json[\"public_keys\"]\n\n db_conn = sqlite3.connect(db_path)\n db = db_conn.cursor()\n db_pub_keys = []\n try:\n for row in db.execute(\"SELECT public_key FROM public_keys WHERE username=? AND status=?;\", [user_id, PK_STATUS_OK]):\n db_pub_keys.append(row[0])\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n\n to_add = []\n to_revoke = []\n\n # Put the keys not present in the database in the list of keys to add\n for new_key in new_pub_keys:\n if(new_key not in db_pub_keys):\n to_add.append((user_id, new_key, PK_STATUS_OK))\n # Put the keys not in the new list in the list of keys to revoke\n for db_key in db_pub_keys:\n if(db_key not in new_pub_keys):\n to_revoke.append((PK_STATUS_REVOKED, user_id, db_key))\n\n try:\n db.executemany('INSERT INTO public_keys (username, public_key, status) VALUES (?,?,?);', to_add)\n db.executemany('UPDATE public_keys SET status=? WHERE username=? AND public_key=?;', to_revoke)\n db_conn.commit()\n db_conn.close()\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n\n return jsonify({'status':True})", "def get_api_key_from_user_id(self, user_id: str) -> str:\n response = self.get(self.url + \"/my-account\", params={\"id\": user_id})\n return self.get_api_key_from_response(response)", "def get_user_auth_keys(self, username):\n if username in self.users_keys:\n return self.users_keys[username]\n\n self.users_keys[username] = []\n\n userdir = os.path.expanduser(\"~\" + username)\n if not userdir:\n return self.users_keys[username]\n\n keyfile = os.path.join(userdir, \".ssh/authorized_keys\")\n if not keyfile or not os.path.exists(keyfile):\n return self.users_keys[username]\n\n with open(keyfile) as f:\n for line in f.readlines():\n line = line.strip()\n if not line or line.startswith(\"#\"):\n continue\n values = [x.strip() for x in line.split()]\n\n exp = None\n try:\n int(values[0]) # bits value?\n except ValueError:\n # Type 1 or type 2, type 1 is bits in second value\n options_ktype = values[0]\n try:\n int(values[1]) # bits value?\n except ValueError:\n # type 2 with options\n ktype = options_ktype\n data = values[1]\n else:\n # Type 1 no options.\n exp = int(values[1])\n data = values[2]\n else:\n # Type 1 no options.\n exp = int(values[1])\n data = values[2]\n\n # XXX For now skip type 1 keys\n if exp is not None:\n continue\n\n if data:\n import base64\n if ktype == \"ssh-rsa\":\n key = ssh.RSAKey(data=base64.decodebytes(data.encode('ascii')))\n elif ktype == \"ssh-dss\":\n key = ssh.DSSKey(data=base64.decodebytes(data.encode('ascii')))\n else:\n key = None\n if key:\n self.users_keys[username].append(key)\n return self.users_keys[username]", "async def describe_user_encryption_key_list_with_options_async(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.target_region_id):\n query['TargetRegionId'] = request.target_region_id\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeUserEncryptionKeyList',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeUserEncryptionKeyListResponse(),\n await self.call_api_async(params, req, runtime)\n )", "def get_all_signing_certs(self, marker=None, max_items=None,\r\n user_name=None):\r\n params = {}\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('ListSigningCertificates',\r\n params, list_marker='Certificates')", "def ListKeys(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def refresh_jwt_public_keys(user_api=None, logger=None):\n logger = logger or get_logger(__name__, log_level=\"info\")\n # First, make sure the app has a ``jwt_public_keys`` attribute set up.\n missing_public_keys = (\n not hasattr(flask.current_app, \"jwt_public_keys\")\n or not flask.current_app.jwt_public_keys\n )\n if missing_public_keys:\n flask.current_app.jwt_public_keys = {}\n user_api = user_api or flask.current_app.config.get(\"USER_API\")\n if not user_api:\n raise ValueError(\"no URL(s) provided for user API\")\n path = get_keys_url(user_api)\n jwt_public_keys = httpx.get(path).json()[\"keys\"]\n logger.info(\n \"refreshing public keys; updated to:\\n\"\n + json.dumps(str(jwt_public_keys), indent=4)\n )\n flask.current_app.jwt_public_keys.update({user_api: OrderedDict(jwt_public_keys)})", "def list_credentials(user):\n return Credentials.list_credentials(user)", "def get_keys(weat_db):\n import updater\n keys = updater.list_keys(weat_db, verbose=False)\n return keys", "def JWT_API_KEYS(default=None):\n return ParamStore.get('JWT_API_KEYS', default=default).to_list(delimiter=',')", "def get_public_webhook_keys(self, query_params: Dict[str, object] = None) -> List[PublicWebhookKey]:\n if query_params is None:\n query_params = {}\n\n path_params = {\n }\n\n path = Template(\"/system/action/v1beta2/webhook/keys\").substitute(path_params)\n url = self.base_client.build_url(path)\n response = self.base_client.get(url, params=query_params)\n return handle_response(response, PublicWebhookKey)", "def get_user_access_tokens(request, user):\n manager = internal_keystoneclient(request).oauth2.access_tokens\n\n return manager.list_for_user(user=user)", "def get_wishlist_key(self, user):\n user_id = self.auth.get_user_id(user)\n p_key = ndb.Key(Profile, user_id)\n\n wishlists = Wishlist.query(ancestor=p_key).fetch()\n if wishlists:\n return wishlists[0].key\n\n wl_id = Wishlist.allocate_ids(size=1, parent=p_key)[0]\n wl_k = ndb.Key(Wishlist, wl_id, parent=p_key)\n Wishlist(**{'key': wl_k}).put()\n\n return wl_k", "def getuserrepos_keys(gh, user):\n repos = getuserrepos(gh, user)\n return repos[0].keys()", "def view_list_users(self, user):\r\n return user.realm._users.keys()", "def api_key(request):\r\n user_acct = request.user\r\n return _api_response(request, {\r\n 'api_key': user_acct.api_key,\r\n 'username': user_acct.username\r\n })", "def hs_signers(self):\n return [{'name': u.get_full_name(), 'email': u.email} for u in [self.workspace.lawyer, self.user]]", "def get_signatories(account_id):\n query = iroha.query(\"GetSignatories\", account_id=account_id)\n ic.sign_query(query, user_private_key)\n response = net.send_query(query)\n data = MessageToDict(response)\n pprint(data, indent=2)", "def get_key(self, user, api_key):\r\n from delicious_cake.models import ApiKey\r\n\r\n try:\r\n ApiKey.objects.get(user=user, key=api_key)\r\n except ApiKey.DoesNotExist:\r\n return self._unauthorized()\r\n\r\n return True", "def get(self, id=None):\n response = []\n publickeys = []\n if id:\n # For testing\n #if action == \"delete_key\":\n # self.delete()\n # For testing\n #elif action == \"edit_key\":\n # self.put()\n #else\n id = str(urllib.unquote(id))\n publickeys = [PublicKey.get_by_id(long(id))]\n else:\n publickeys = PublicKey.all().run(batch_size=1000)\n \n for seq, publickey in enumerate(publickeys):\n response.append({ 'key_name' : publickey.name, 'key_description' : publickey.description, \n 'key_owner' : str(publickey.owner.email()), 'created' : str(publickey.created), \n 'is_default_key' : publickey.is_default_key, 'key_id' : publickey.key().id()})\n self.response.out.write(json.dumps(response))", "def delete_all_keypairs(self, user):\n msg = \"delete_all_keypairs not implemented\"\n raise NotImplementedError(msg)", "def list_all_keys(riak_host,riak_port,bucket):\n url='http://%s:%s/buckets/%s/keys?keys=true' % (riak_host,riak_port,bucket)\n #print url\n r=requests.get(url)\n print json.dumps(r.json(), sort_keys=True, indent=4)", "def list_keys(self, label=None):\r\n _filter = NestedDict({})\r\n if label:\r\n _filter['sshKeys']['label'] = query_filter(label)\r\n\r\n return self.client['Account'].getSshKeys(filter=_filter.to_dict())", "def list_tokens(user):\n return AppSpecificAuthToken.select().where(AppSpecificAuthToken.user == user)", "def ListKeys(project, show_deleted=None, page_size=None, limit=None):\n client = GetClientInstance(calliope_base.ReleaseTrack.GA)\n messages = client.MESSAGES_MODULE\n\n request = messages.ApikeysProjectsLocationsKeysListRequest(\n parent=GetParentResourceName(project), showDeleted=show_deleted)\n return list_pager.YieldFromList(\n client.projects_locations_keys,\n request,\n limit=limit,\n batch_size_attribute='pageSize',\n batch_size=page_size,\n field='keys')", "def keys(self):\n return self.get_list(self.cloudman.list_keypairs(),\n kind=\"key\")", "def list(self, all_tenants=True):\n query = {}\n path = '/os-keypairs'\n if all_tenants is True:\n query['all_tenants'] = 1\n \n path = '%s?%s' % (path, urlencode(query)) \n \n res = self.client.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack key pairs: %s' % truncate(res))\n return res[0]['keypairs']", "def get_user_tasks(self, user_pk: int) -> APIResponse:\n user_args = {\"pk\": user_pk}\n return self._get(\"user_list\", user_args)", "def api_keys(self) -> dict:\n return self.AUTH.get_api_keys()", "def list_project_keys(self):\n return self.client._perform_json(\"GET\", \"/project-folders/%s\" % self.project_folder_id).get(\"projectKeys\", [])", "def key_request(self, user):\n\t\tclient_log.debug(f'Запрос публичного ключа для {user}')\n\t\treq = {\n\t\t\tACTION: PUBLIC_KEY_REQUEST,\n\t\t\tTIME: time.time(),\n\t\t\tACCOUNT_NAME: user\n\t\t}\n\t\twith socket_lock:\n\t\t\tsend_message(self.transport, req)\n\t\t\tans = get_message(self.transport)\n\t\tif RESPONSE in ans and ans[RESPONSE] == 511:\n\t\t\treturn ans[DATA]\n\t\telse:\n\t\t\tclient_log.error(f'Не удалось получить ключ собеседника{user}.')", "def get_playlists_for_user(self, request): \n user = Account.find_by_id(request.userid)\n playlists = Playlist.find_by_owner(user.key).fetch(20)\n return self.build_playlist_response(playlists)", "def get_all(user_id):\n return Bucketlist.query.filter_by(created_by=user_id)", "def GetAllUserFiles(user, mkdir=False, dircheck=True, _homedir_fn=None):\n helper = compat.partial(GetUserFiles, user, mkdir=mkdir, dircheck=dircheck,\n _homedir_fn=_homedir_fn)\n result = [(kind, helper(kind=kind)) for kind in constants.SSHK_ALL]\n\n authorized_keys = [i for (_, (_, _, i)) in result]\n\n assert len(frozenset(authorized_keys)) == 1, \\\n \"Different paths for authorized_keys were returned\"\n\n return (authorized_keys[0],\n dict((kind, (privkey, pubkey))\n for (kind, (privkey, pubkey, _)) in result))", "def download_keys(file, bucket, user_keys = None ,verbose = False):\n if user_keys is None:\n \t\t\t\ts3 = boto3.client('s3')\n else:\n s3 = boto3.client('s3', \n aws_access_key_id = user_keys[\"AWS_ACCESS_KEY_ID\"], \n aws_secret_access_key = user_keys[\"AWS_SECRET_ACCESS_KEY\"], \n region_name = user_keys[\"REGION_NAME\"]\n ) \n \n keys = get_s3_keys(bucket, user_keys = None)\n create_dirs(keys, file)\n \n for i,key in enumerate(keys):\n if verbose:\n print(key)\n try:\n # download as local file\n s3.download_file(bucket, key, os.path.join(file,key))\n except:\n \t\t\t\traise\n return print(\"{} files were downloaded!\".format(i))", "def get_keys(self):\n return list(self.public_keys.keys())", "def buckets(self, user=None):\n raise NotImplementedError('TODO')", "def account_keys(chain):\n return chain.backend.account_keys", "def get_all_books_for_user(user, KEY):\n\n gr_id = user.gr_id\n shelves = check_for_shelves(gr_id, KEY)\n\n for shelf in shelves: # iterate over list of shelves and create books!\n time.sleep(1.00)\n get_books_from_shelf(gr_id, shelf.name, KEY)\n print \"Got all books from \" + shelf.name + \" shelf.\"\n\n return", "def list(self, resource, url_prefix, auth, session, send_opts):\n\n req = self.get_metadata_request(\n resource, 'GET', 'application/json', url_prefix, auth)\n\n prep = session.prepare_request(req)\n resp = session.send(prep, **send_opts)\n if resp.status_code == 200:\n keys_dict = resp.json()\n return keys_dict['keys']\n\n err = ('List failed on {}, got HTTP response: ({}) - {}'.format(\n resource.name, resp.status_code, resp.text))\n raise HTTPError(err, request = req, response = resp)", "def api_keys(self):\n logger.info(\"Reading API Keys from the file.\")\n expected_data = config_utils.read_file(self.keys_file_path)\n return [data.strip() for data in expected_data]", "def get_keys(self):\r\n\t\tlogger.debug(\"Getting the keys\")\r\n\t\t\r\n\t\treturn db.get_items('keys')", "def keys(self, *args, **kwargs):\n return self._list(*args, **kwargs)", "def view_list_containers_by_user(self, user, userID):\r\n return user._realm.getUser(userID).containers.keys()", "def keys(self):\n return sorted(super(UserSSHKeys, self).keys())", "def get_keys(self, yk_publicname):\n query = \"\"\"SELECT yk_publicname\n FROM yubikeys\n WHERE active = 1\"\"\"\n params = None\n if yk_publicname != 'all':\n query += ' AND yk_publicname = %s'\n params = (yk_publicname,)\n self._execute(query, params)\n return self._dictfetchall()", "def get(self, user):\n search = True if self.request.args.get('q') else False\n limit = int(self.request.args.get('limit')) if self.request.args.get('limit') else 20\n page = int(self.request.args.get('page')) if self.request.args.get('page') else 1\n bucketlists = user.bucketlists.paginate(page, limit, True).items\n bucketlists = user.bucketlists.filter(Bucketlist.name.contains(self.request.args.get('q'))) if self.request.args.get('q') else bucketlists\n\n bucketlists = [\n {'id': bucketlist.id,\n 'name': bucketlist.name,\n 'items': [\n {'id': item.id,\n 'name': item.description,\n 'date_created': str(item.date_created),\n 'date_modified': str(item.date_modified),\n 'done': str(item.is_done)\n } for item in bucketlist.items\n ],\n 'date_created': str(bucketlist.date_created),\n 'date_modified': str(bucketlist.date_modified),\n 'created_by': bucketlist.created_by\n } for bucketlist in bucketlists\n ]\n\n # if empty retutn no bucketlists added\n if not bucketlists:\n return \"You have no avialable bucketlists\", 200\n\n return bucketlists, 200", "def users_view():\n data = get_data()\n return [{'user_id': i, 'name': 'User {0}'.format(str(i))}\n for i in data.keys()]", "def get_developer_apps_by_user(user_id: int) -> List[Dict]:\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n developer_apps = (\n session.query(DeveloperApp)\n .filter(\n DeveloperApp.user_id == user_id,\n DeveloperApp.is_current == True,\n DeveloperApp.is_delete == False,\n )\n .all()\n )\n return query_result_to_list(developer_apps)", "def getKeys(name = None):\n if name == None:\n name = session.get('name')\n\n keys = hl.getUser(\"Name\",name)[\"Keys\"]\n hl.keyDistributeFlag(name)\n #If on a production server, use actual path\n if os.path.isdir(keys_dir):\n filename = keys_dir + keys + '.ovpn' \n\n #if not os.path.exists(filename):\n # hl.zipUserKeys(keys) \n\n return send_file(filename, as_attachment=True)\n #Else use relative dev path\n else:\n return send_file('static\\\\Test_client1.zip', as_attachment=True)", "def _gpg_keys(self) -> ListKeys:\n return self.gpg.list_keys()", "def cli(ctx):\n return ctx.gi.cannedkeys.get_keys()", "def list_hnd(self, request, **kwargs):\n prefix = request.POST.get(\"prefix\", \"\")\n marker = request.POST.get(\"marker\", \"\")\n delimiter = request.POST.get(\"delimiter\", \"\")\n\n max_keys = int(request.POST.get(\"max_keys\", 1000))\n max_keys = max((1, max_keys)) # >= 1\n max_keys = min((1000, max_keys)) # <= 1000\n\n bucket = store.get_bucket()\n\n # prefix \"prefix\" with user dir\n eff_prefix = store.prefix_with_user_dir(request.user, prefix)\n\n # get list iterator from s3\n file_iter = bucket.list(prefix=eff_prefix, delimiter=delimiter,\n marker=marker, headers=None,\n encoding_type=None)\n\n # convert to list, try to get +1 item to be able\n # to determine if the results are truncated\n files = [key.key.split(\"/\", 1)[1]\n for key in itertools.islice(file_iter, 0, max_keys+1)]\n\n # if max_keys is less then there are more results\n # -> truncated = True\n truncated = len(files) > max_keys\n if truncated:\n # return 1 item less\n files = files[:-1]\n\n return self.create_response(request, {\n \"files\": files,\n \"truncated\": truncated\n })", "def list_user():\n\tbegin = 0\n\tlength = 25\n\ttry:\n\t\tif request.json != None:\n\t\t\tbegin = int(request.json.get('begin', 0))\n\t\t\tlength = int(request.json.get('length', 25))\n\texcept:\n\t\tabort(403)\n\tif length > 100 :\n\t\tlength = 100\n\tuserList = User.list(begin, length)\n\tif userList == None:\n\t\tabort(400)\n\treturn jsonify({'users': map(lambda(e): e.output(), userList), 'begin': begin, 'length': len(userList)})", "def keypairs(self):\n return list(self._list(_keypair.Keypair, paginated=False))", "def user_list():\n users = User.objects.all()\n return {\"users\": users}", "def list(self, user_ids: Optional[List[UserId]]) -> List[U]:\n ...", "def _list_tokens(self, user_id, tenant_id=None, trust_id=None,\n consumer_id=None):\n raise exception.NotImplemented() # pragma: no cover", "def _get_list(self, user=None):\n request = self.factory.get(self.list_url, format='json')\n force_authenticate(request, user)\n resp = self.list_view(request)\n resp.render()\n return resp", "def user_list(request):\r\n params = request.params\r\n order = params.get('order', None)\r\n limit = params.get('limit', None)\r\n user_list = UserMgr.get_list(order=order, limit=limit)\r\n ret = {\r\n 'count': len(user_list),\r\n 'users': [dict(h) for h in user_list],\r\n }\r\n return _api_response(request, ret)", "def list_user_access(self, instance, user):\n return instance.list_user_access(user)", "def get_tokens_for_user(user):\n\n refresh = RefreshToken.for_user(user)\n\n return {\n 'refresh': str(refresh),\n 'access': str(refresh.access_token),\n }", "def get_keys(request):\n\n keys=[]\n reports = Report.objects.all().exclude(institute = 'PUBMED')\n for report in reports:\n json_rep = report.report_json\n for el in json_rep.keys():\n if el not in keys:\n keys.append(el)\n json_resp = {'keys':keys}\n return JsonResponse(json_resp)", "def show_users():\n users_list = []\n all_users = storage.all('User')\n for obj in all_users.values():\n users_list.append(obj.to_dict())\n return jsonify(users_list)", "def userkey(hash):\n user = hl.getUser(\"Name\",session['name'])\n flagCheck = hl.checkDistributeFlag(user[\"Name\"])\n if flagCheck == False:\n return getKeys()\n elif flagCheck == True:\n flash(\"You have been logged out. Please contact your system administrator\")\n return redirect(url_for('logout'))", "def get_user_pages(self, user_id, fields=None, batch=False):\n path = '%s/accounts' % user_id\n args = {}\n if fields:\n args['fields'] = json.dumps(fields)\n return self.make_request(path, 'GET', args, batch=batch)", "def get_all_keys(self, headers=None, **params):\r\n key = Key(self.name, self.contained_key)\r\n return SimpleResultSet([key])", "def list_keys_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n limit = arg_to_number(args.get('limit')) or DEFAULT_LIMIT\n offset = arg_to_number(args.get('offset')) or DEFAULT_OFFSET\n response = client.list_keys_request(vault_name, limit, offset)\n outputs = copy.deepcopy(response)\n readable_response = []\n\n for key in outputs:\n readable_response.append({\n 'key_id': key.get('kid'),\n 'managed': key.get('managed'),\n **convert_attributes_to_readable(key.get('attributes', {}).copy()),\n })\n key[VAULT_NAME_CONTEXT_FIELD] = vault_name\n key['attributes'] = convert_time_attributes_to_iso(key['attributes'])\n\n readable_output = tableToMarkdown(\n f'{vault_name} Keys List',\n readable_response,\n ['key_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Key',\n outputs_key_field='kid',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results", "def get_users(user_id):\n my_user = storage.get(\"User\", user_id)\n if my_user:\n return jsonify(my_user.to_dict()), 200\n else:\n abort(404)", "def generate_keys(self):\n self.keys = []\n key = string_to_bit_array(self.passwd)\n key = self.permutation(key, CP_1) # Perform initial permutation on the key\n g, d = split_into_n(key, 28) # Split into g (LEFT) & d (RIGHT)\n for i in range(16): # Apply the 16 rounds\n g, d = self.shift(g, d, ROUND_KEY_SHIFT[i]) # Shift the key according to the round\n tmp = g + d # Merge them\n self.keys.append(self.permutation(tmp, CP_2)) # Perform the permutation to get the Ki", "def keys(self):\n return DeviceKeyCollection(client=self)", "def create_access_key(self, user_name=None):\r\n params = {'UserName' : user_name}\r\n return self.get_response('CreateAccessKey', params)", "def get_developer_apps_with_grant_for_user(user_id: int) -> List[Dict]:\n\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n rows = (\n session.query(\n DeveloperApp.address,\n DeveloperApp.name,\n DeveloperApp.description,\n Grant.user_id.label(\"grantor_user_id\"),\n Grant.created_at.label(\"grant_created_at\"),\n Grant.updated_at.label(\"grant_updated_at\"),\n ) # Note: will want to grab Grant permissions too once we have those\n .outerjoin(Grant, Grant.grantee_address == DeveloperApp.address)\n .filter(\n Grant.user_id == user_id,\n Grant.is_revoked == False,\n Grant.is_current == True,\n DeveloperApp.is_current == True,\n DeveloperApp.is_delete == False,\n )\n .order_by(asc(Grant.updated_at))\n .all()\n )\n return [\n {\n \"address\": row[0],\n \"name\": row[1],\n \"description\": row[2],\n \"grantor_user_id\": row[3],\n \"grant_created_at\": row[4],\n \"grant_updated_at\": row[5],\n }\n for row in rows\n ]", "def display_users():\n users = storage.all(\"User\").values()\n users_list = []\n for obj in users:\n users_list.append(obj.to_dict())\n return jsonify(users_list)", "def user_list(request_dict):\n users = User.query.all()\n users_list = list()\n for user in users:\n users_list.append(user)\n\n return JSONTools.user_list_reply(users_list)" ]
[ "0.6866222", "0.66910446", "0.6670618", "0.64299655", "0.6349597", "0.6267026", "0.622857", "0.62016267", "0.6186303", "0.6160741", "0.60831136", "0.5966327", "0.5913713", "0.58797574", "0.58408535", "0.57975286", "0.5738591", "0.571278", "0.5685982", "0.5669537", "0.56538075", "0.5618135", "0.5544118", "0.5529223", "0.5461351", "0.5461012", "0.54606354", "0.541753", "0.54036325", "0.5372311", "0.53654104", "0.53646904", "0.535147", "0.5325841", "0.53019214", "0.5264808", "0.52602726", "0.525493", "0.5252894", "0.5249968", "0.5228863", "0.52177566", "0.5188714", "0.516738", "0.5164146", "0.51601917", "0.51596755", "0.51571876", "0.51433784", "0.5136173", "0.51223654", "0.510567", "0.51000935", "0.5097959", "0.5087826", "0.508511", "0.50802624", "0.5076434", "0.5074756", "0.50721216", "0.5061071", "0.50599027", "0.5056753", "0.5052024", "0.50248027", "0.50240105", "0.5023173", "0.501952", "0.4994856", "0.4991315", "0.49830258", "0.4981087", "0.49805427", "0.4972077", "0.49703816", "0.49691182", "0.4947331", "0.49369693", "0.49356747", "0.49174455", "0.49086347", "0.4892528", "0.48909047", "0.488622", "0.48861742", "0.4880042", "0.48755604", "0.48668954", "0.4857574", "0.4838402", "0.48294142", "0.4827212", "0.48256224", "0.48199013", "0.4818663", "0.4813485", "0.48106948", "0.48074985", "0.48011327", "0.47976163" ]
0.69830835
0
Lists the auth tokens for the specified user. The returned object contains the token's OCID, but not the token itself. The actual token is returned only upon creation.
def list_auth_tokens(self, user_id, **kwargs): resource_path = "/users/{userId}/authTokens" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_auth_tokens got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[AuthToken]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[AuthToken]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_tokens(user):\n return AppSpecificAuthToken.select().where(AppSpecificAuthToken.user == user)", "def get_user_access_tokens(request, user):\n manager = internal_keystoneclient(request).oauth2.access_tokens\n\n return manager.list_for_user(user=user)", "def _list_tokens(self, user_id, tenant_id=None, trust_id=None,\n consumer_id=None):\n raise exception.NotImplemented() # pragma: no cover", "def get_tokens_for_user(user):\n\n refresh = RefreshToken.for_user(user)\n\n return {\n 'refresh': str(refresh),\n 'access': str(refresh.access_token),\n }", "def tokens(self):\n return self.rpc.call(MsfRpcMethod.AuthTokenList)['tokens']", "def test_get_all_tokens_authenticated_user(self):\r\n\r\n user = UserFactory.create_batch(2)[1]\r\n user.info = create_tokens_for(user)\r\n\r\n res = self.app.get('api/token?api_key=' + user.api_key)\r\n data = json.loads(res.data)\r\n\r\n for provider in TokenAPI.oauth_providers:\r\n token_name = '%s_token' % provider\r\n assert data.get(token_name) is not None, data", "def getTokens(self):\n self.__require_privilaged_access()\n with DBSession(self.__config_db) as session:\n user = self.getLoggedInUser()\n sessionTokens = session.query(Session) \\\n .filter(Session.user_name == user) \\\n .filter(Session.can_expire.is_(False)) \\\n .all()\n\n result = []\n for t in sessionTokens:\n result.append(SessionTokenData(\n t.token,\n t.description,\n str(t.last_access)))\n\n return result", "def get(self, filters=None, pagination=None, sort=None):\n filters = filters or {}\n if not is_user_action_allowed('manage_others_tokens'):\n filters['_user_fk'] = current_user.id\n\n sm = get_storage_manager()\n\n result = sm.list(models.Token, filters=filters,\n pagination=pagination, sort=sort)\n\n return result", "def getTokens(username):\n tokens = users.find({\"Username\": username})[0][\"Tokens\"]\n return tokens", "def gettoken(tool_id, user_id):\n oauth_tokens = {\n 'access_token': '',\n 'user': {\n 'id': user_id\n }\n }\n params = {\n 'user_id': user_id\n }\n tokenq = \"\"\"select\naccess_token, refresh_token, expires_at, token_type, expires_in, user_name\nfrom tokens\nwhere user_id = :user_id\norder by expires_at desc\n\"\"\"\n tconn = dbconnect(CONFIG[CONFIG['app']['dbserver']])\n tcurr = tconn.cursor()\n try:\n results = tcurr.execute(tokenq, params).fetchone()\n except cx_Oracle.DatabaseError as err:\n LOG.error(\"Database error in retrieving tokens: %s\", err)\n\n if tcurr.rowcount > 0:\n oauth_tokens = {\n 'access_token': results[0],\n 'refresh_token': results[1],\n 'expires_at': results[2],\n 'token_type': results[3],\n 'expires_in': results[4],\n 'user': {\n 'name': results[5],\n 'id': user_id\n }\n }\n else:\n LOG.error(\"no token found for \" + str(tool_id) + ', ' + user_id)\n tcurr.close()\n tconn.close()\n return oauth_tokens", "def get_data_source_tokens_by_user(self, user_id: int):\n all_data_source_tokens_array = []\n user = None\n try:\n user: User = UserService.get_user_by_id(self, user_id)\n except Exception:\n raise\n\n try:\n for data_source_token in DataSourceToken.select(\n DataSourceToken,\n user).where(DataSourceToken.user_id == user_id):\n all_data_source_tokens_array.append(\n model_to_dict(data_source_token, recurse=False))\n return all_data_source_tokens_array\n except Exception:\n raise", "def tokens(self):\n user_token = RefreshToken.for_user(self)\n return {\n 'refresh': str(user_token),\n 'access': str(user_token.access_token),\n }", "def test_get_all_existing_tokens_authenticated_user(self):\r\n\r\n user = UserFactory.create_batch(2)[1]\r\n user.info = create_tokens_for(user)\r\n del user.info['google_token']\r\n\r\n res = self.app.get('api/token?api_key=' + user.api_key)\r\n data = json.loads(res.data)\r\n\r\n assert data.get('twitter_token') is not None, data\r\n assert data.get('facebook_token') is not None, data\r\n assert data.get('google_token') is None, data", "def list_credentials(user):\n return Credentials.list_credentials(user)", "def fetch_token(self, user_id, password):\n url = buildCommandUrl(self.server, \"/as/user/token\")\n result = json_request(\"POST\", url, {\n \"userId\": user_id,\n \"password\": password\n })\n return result[\"token\"]", "def _get_list(self, user=None):\n request = self.factory.get(self.list_url, format='json')\n force_authenticate(request, user)\n resp = self.list_view(request)\n resp.render()\n return resp", "def _create_auth_token(self, user=None):\n token, created = Token.objects.get_or_create(user=user)\n return token", "def get_token(self, user):\n\n jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER\n jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER\n payload = jwt_payload_handler(user)\n token = jwt_encode_handler(payload)\n return token", "def token_auth_get_user_roles(user):\n print(user)\n return user.get_roles()", "def access_token(self):\n social_auth = self.social_auth.get()\n return social_auth.tokens", "def UserToken(self) -> object:", "def list_revoked_tokens(self):\n raise exception.NotImplemented() # pragma: no cover", "def for_user(cls, user):\n\n token = super().for_user(user)\n\n TokenMeta.objects.get_or_create(\n jti=token['jti'],\n token=str(token),\n )\n\n return token", "def generate_token_from_user(user, expires_at=None):\n issued_at = datetime.utcnow()\n token = AccessToken()\n token.payload.update(\n {\n \"email\": user.email,\n \"exp\": expires_at or issued_at + timedelta(days=2),\n \"iat\": issued_at,\n \"language\": user.language,\n \"username\": user.username,\n \"full_name\": user.get_full_name(),\n }\n )\n return token", "def get_token(cls, user, full_result=False):\n if user is None:\n return EMPTY_KNOX_TOKEN\n result = AuthToken.objects.create(user=user)\n return result if full_result else result[1]", "def get_token(self, user_id, token_id):\n query = \"\"\"SELECT yubikeys.attribute_association_id AS yubikeys_attribute_association_id,\n yubikeys.id AS yubikeys_id,\n yubikeys.prefix AS yubikeys_prefix,\n yubikeys.enabled AS yubikeys_enabled\n FROM yubikeys, user_yubikeys\n WHERE user_yubikeys.user_id = %s\n AND yubikeys.prefix = %s\n AND yubikeys.id = user_yubikeys.yubikey_id\n ORDER BY yubikeys.prefix\"\"\"\n self._execute(query, (user_id, token_id))\n return self._dictfetchone()", "def get(self):\n if current_user and not current_user.is_anonymous:\n user = current_user\n tok = Token(user, 3600)\n return tok\n return jsonify({404: 'User not found'})", "def view_list_users(self, user):\r\n return user.realm._users.keys()", "def get_token(self):\r\n token = {'id': self.catalog['access']['token']['id'],\r\n 'expires': self.catalog['access']['token']['expires'], }\r\n try:\r\n token['user_id'] = self.catalog['access']['user']['id']\r\n token['tenant_id'] = (\r\n self.catalog['access']['token']['tenant']['id'])\r\n except Exception:\r\n # just leave the tenant and user out if it doesn't exist\r\n pass\r\n return token", "def list_user_access(self, instance, user):\n return instance.list_user_access(user)", "def get(self, user, since=None, token=None, sort=None):\n q = {}\n if user:\n q['userId'] = user['_id']\n else:\n q['tokenId'] = token['_id']\n\n if since is not None:\n q['updated'] = {'$gt': since}\n\n return self.find(q, sort=sort)", "def get_tokens(self):\r\n return self.token_set", "def list_user_access(self, user):\n return self._user_manager.list_user_access(user)", "def get_token(self, user_id, token_id):\n query = \"\"\"SELECT yubikeys.attribute_association_id AS yubikeys_attribute_association_id,\n yubikeys.id AS yubikeys_id,\n yubikeys.prefix AS yubikeys_prefix,\n yubikeys.enabled AS yubikeys_enabled\n FROM yubikeys\n INNER JOIN user_yubikeys\n ON user_yubikeys.yubikey_id = yubikeys.id\n WHERE user_yubikeys.user_id = %s\n AND yubikeys.prefix = %s\"\"\"\n self._execute(query, (user_id, token_id))\n return self._dictfetchone()", "def list(uid: int):\n\n return Token.list(uid)", "def bearer_tokens(self):\n return self._bearer_tokens", "def get_tokens(self):\r\n return TokenGroup.get_tokens(self._tu, self.extent)", "def get_auth_token(cls):\n return jsonify({\n 'user': current_user.serialize(),\n 'token': current_user.get_auth_token(),\n })", "def access_token(global_config, existing_user, id_api):\n yield id_api.get_access_token_for_user(existing_user.email, existing_user.password)", "def test_get_existing_token_authenticated_user(self):\r\n\r\n user = UserFactory.create_batch(2)[1]\r\n user.info = create_tokens_for(user)\r\n\r\n # If the token exists, it should be retrieved\r\n res = self.app.get('/api/token/twitter?api_key=' + user.api_key)\r\n data = json.loads(res.data)\r\n\r\n assert data.get('twitter_token') is not None, data\r\n assert data.get('twitter_token')['oauth_token'] == 'token-for-%s' % user.name\r\n assert data.get('twitter_token')['oauth_token_secret'] == 'secret-for-%s' % user.name\r\n # And no other tokens should\r\n assert data.get('facebook_token') is None, data", "def tokens():\n return ['access token', 'refresh token']", "def user_objects(cls, user):\n return cls.objects.filter(UserAccess.Q(user))", "def get_user_access_token(self, user_id, give_json=False):\n url = Constants.BASE_URL + 'domains/users/accesstokens'\n response = requests.get(url=url,\n params={'key': self.api_key, 'domain_api_secret': self.api_secret, 'user_id': user_id})\n json_obj = response.json()\n self.user_access_token = json_obj[\"result\"][\"user_access_token\"]\n if give_json:\n return json_obj\n else:\n return response.text", "def tokens(self):\n # type: () -> List[Token]\n return self._tokens", "def user_list():\n for values in USERS:\n user = User.objects.create_user(\n values[\"username\"], values[\"email\"], values[\"password\"]\n )\n user.first_name = values[\"first_name\"]\n user.last_name = values[\"last_name\"]\n user.is_staff = values[\"staff\"]\n user.is_superuser = values[\"super\"]\n user.save()\n Token.objects.create(key=values[\"token\"], user_id=user.id)\n\n # print('users created')", "def __repr__(self) -> str:\n return \"<Twilio.Oauth.V1.TokenList>\"", "def get_master_tokens(user, repo, config):\n url = \"{}/repos/{}/{}/master_tokens\".format(config['url_base'], user, repo)\n\n try:\n resp = (api_call(url, 'get', config['debug']))\n tokens = resp.json()\n except ValueError as ex:\n abort(\"Unexpected response from packagecloud API: \"\n \"{}\".format(ex.message))\n\n return tokens", "def list_users(access_token):\n request_url = OKTA_URL + \"api/v1/users\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def _get_token(self):\n return user.get_token()", "def create_token(user):\n access_token = create_access_token(user)\n payload = jwt.decode(\n access_token,\n app.config['JWT_SECRET_KEY'],\n algorithms=app.config['JWT_DECODE_ALGORITHMS'])\n data = {\n 'token':access_token,\n 'username': user.username,\n }\n data.update(payload)\n data['exp'] = datetime.fromtimestamp(data['exp'])\n app.logger.debug(str(data))\n if app.config.get('KEEP_TOKEN'):\n # deletes old tokens\n tokens = app.data.driver.db[config.DOMAIN['token']['datasource']['source']]\n tokens.delete_many({'username': user.username})\n # insets new token\n result = app.data.insert('token', data)\n return access_token, str(result[0])\n\n return access_token, None", "def tenants_for_token(self, context):\n token_ref = self.token_api.get_token(context=context,\n token_id=context['token_id'])\n assert token_ref is not None\n\n user_ref = token_ref['user']\n tenant_refs = []\n for tenant_id in user_ref['tenants']:\n tenant_refs.append(self.identity_api.get_tenant(\n context=context,\n tenant_id=tenant_id))\n return self._format_tenants_for_token(tenant_refs)", "def List(self, user=None):\n with self.acc_lock:\n self._load()\n\n result = []\n if user:\n for k, v in self.tasks.iteritems():\n if v['user'] != user:\n continue\n d = dict(v)\n d['key'] = k\n result.append(d)\n else:\n for k, v in self.tasks.iteritems():\n d = dict(v)\n d['key'] = k\n result.append(d)\n return result", "def get(self):\n\n user = context_property.request_user\n Log.info(\"Refresh access token for %i\" % user.id)\n\n return {\n \"accessToken\" : create_access_token(user.id)\n }, 200", "def find_token_by_user_id(session, user_id):\n return session.query(Token).filter(Token.user_id == user_id).one_or_none()", "def _authenticate(self, user):\n res = self.client.post(\n '/api/v1/auth/login',\n data=json.dumps({\n 'email': user['email'],\n 'password': 'secret'\n }),\n headers={'Content-Type': 'application/json'})\n result = json.loads(res.get_data(as_text=True))\n return {\n 'Content-Type': 'application/json',\n 'Authorization': 'Bearer {}'.format(result['access_token'])\n }", "def get_user_access(self, user):\n return self._access_lists.get_user_access(user)", "def token_by_owner(username):\n return Token.query.filter_by(owner=username).first()", "def get_user_active_list(self, user_id):\n return self.api.get_active_version_manager_by_user_id(user_id)", "def view_list_robots_by_user(self, user, userID):\r\n return user._realm.getUser(userID).robots.keys()", "def get_current_user():\n token = request.headers['token']\n decoded_token = decode_token(token)\n userId = decoded_token[\"userId\"]\n for user_obj in users_table:\n if user_obj.userId == userId:\n return {\"userId\": userId, \"isAdmin\": user_obj.isAdmin}", "def test_list_o_auth_access_token(self):\n pass", "def test_authtoken_user(self):\n crusoe = self.fixtures.crusoe\n auth_client = self.fixtures.auth_client\n\n user_session = models.UserSession(buid=buid(), user=crusoe)\n auth_token_with_user_session = models.AuthToken(\n user=crusoe, user_session=user_session\n )\n self.assertIsInstance(\n auth_token_with_user_session.user_session.user, models.User\n )\n self.assertEqual(auth_token_with_user_session.user_session.user, crusoe)\n\n auth_token_without_user_session = models.AuthToken(\n auth_client=auth_client, user=crusoe\n )\n self.assertIsInstance(auth_token_without_user_session._user, models.User)\n self.assertEqual(auth_token_without_user_session._user, crusoe)", "def tokens_json(self):\n token_id, secret = self.decoded_token\n token_row = self.unauthenticated_token_row\n tokens_encoded = Fernet(secret).decrypt(\n token_row.tokens_fernet.encode('ascii'))\n return json.loads(tokens_encoded.decode('ascii'))", "def get_tokens(self) -> List[str]:\n return self.tokens", "def get_tokens(self, use_refresh=False):\r\n post_data = {\r\n \"grant_type\": \"refresh_token\" if use_refresh else \"authorization_code\",\r\n \"code\": get_value(SPOTIFY_AUTHORIZATION_CODE),\r\n \"redirect_uri\": REDIRECT_URL\r\n }\r\n if use_refresh:\r\n post_data[\"refresh_token\"] = get_value(SPOTIFY_REFRESH_TOKEN)\r\n\r\n auth_key = base64.urlsafe_b64encode(f\"{SPOTIFY_CLIENT_ID}:{SPOTIFY_CLIENT_SECRET}\".encode()).decode()\r\n\r\n r = requests.post(\r\n TOKEN_URL,\r\n headers={\r\n \"Accept\": \"application/json\",\r\n \"Content-Type\": \"application/x-www-form-urlencoded\",\r\n \"Authorization\": f\"Basic {auth_key}\"\r\n },\r\n data=\"&\".join([f\"{quote(key)}={quote(value)}\" for key, value in post_data.items()])\r\n )\r\n\r\n if r.status_code != requests.codes.ok:\r\n return\r\n\r\n self.is_authorized = True\r\n data = r.json()\r\n set_value(SPOTIFY_ACCESS_TOKEN, data[\"access_token\"])\r\n self.access_token = data[\"access_token\"]\r\n if \"refresh_token\" in data:\r\n set_value(SPOTIFY_REFRESH_TOKEN, data[\"refresh_token\"])\r\n return", "def user_token(app_env, user_refresh):\n cred = tk.Credentials(*app_env)\n\n try:\n yield cred.refresh_user_token(user_refresh)\n except tk.HTTPError as error:\n skip_or_fail(tk.HTTPError, \"Error in retrieving user token!\", error)\n cred.close()", "def get_all_users():\n token = request.headers.get('token')\n\n # Token Validation\n token_valid, response = is_token_valid(token)\n if not token_valid:\n return response\n token_username = response\n\n # Privilege handling\n if token_username != 'admin':\n return jsonify({'message': \"You aren't allowed to access this\"}), 404\n\n return jsonify(list(Users.values())), 200", "def list_for_user(cls, user, cursor_url=None, limit=10):\n cursor = Cursor(urlsafe=cursor_url)\n messages, next_cursor, more = cls.list_query(user).fetch_page(limit, start_cursor=cursor)\n return (messages, next_cursor, more)", "def users():\n access_token = session['access_token']\n return \"%s\" % list_users(access_token)", "def get(self):\n # Login of authorized user stores in Flask g object\n user = User.query.filter_by(username=g.user.username).first()\n # Generate token\n token = user.generate_auth_token()\n # Send token in ASCII format\n return {'token': token.decode('ascii')}", "def list_users():\n if not check_content_type():\n return jsonify(status=CONTENT_TYPE_ERROR)\n reqdata = request.json\n if not check_token(reqdata[\"token\"]):\n return jsonify(status=TOKEN_ERROR)\n users = db.session.query(User).all()\n resdata = []\n for user in users:\n resdata.append({\"id\" : user.id, \"login\" : user.login, \"password\" : user.hash_password})\n return jsonify(data=resdata, status=OK_STATUS)", "def user_list(request):\r\n params = request.params\r\n order = params.get('order', None)\r\n limit = params.get('limit', None)\r\n user_list = UserMgr.get_list(order=order, limit=limit)\r\n ret = {\r\n 'count': len(user_list),\r\n 'users': [dict(h) for h in user_list],\r\n }\r\n return _api_response(request, ret)", "def getTokens(self):\n return self.__token", "def get_token():\n if not request.is_json:\n return jsonify({\"msg\": \"Missing JSON in request\"}), 400\n username = request.json.get('username', None)\n password = request.json.get('password', None)\n\n if not username:\n abort(400, \"Invalid username or password\")\n if not password:\n abort(400, \"Invalid username or password\")\n users = app.data.driver.db[config.DOMAIN['user']['datasource']['source']]\n user = users.find_one({'email':username})\n # validate the user in the user's service\n if not user:\n abort(401, \"Invalid username or password\")\n if not check_password_hash(user.get('password'), password):\n abort(401, \"Invalid username or password\")\n role = user.get('role', 'user')\n user_id = str(user.get('_id'))\n user = User(user_id, username, role)\n access_token, refresh_token = create_token(user)\n return jsonify(\n token=access_token,\n type='bearer',\n roles=role,\n user=username,\n refreshToken=refresh_token), 200", "def get_lists(user):\n list_options = {}\n list_objects = twitter.lists_all(screen_name=user)\n for list_ in list_objects:\n list_options[list_.id] = list_.name\n return list_options.items()", "def token_auth(self):\n self.client = APIClient()\n self.user = User.objects.create_user(username='testuser', email='test@test.com', password='testpassword')\n self.token = Token.objects.create(user=self.user)\n self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key)", "def delete_tokens_for_user(self, user_id, project_id=None):\n if not CONF.token.revoke_by_id:\n return\n self.delete_tokens(user_id, tenant_id=project_id)\n for trust in self.trust_api.list_trusts_for_trustee(user_id):\n # Ensure we revoke tokens associated to the trust / project\n # user_id combination.\n self.delete_tokens(user_id, trust_id=trust['id'],\n tenant_id=project_id)\n for trust in self.trust_api.list_trusts_for_trustor(user_id):\n # Ensure we revoke tokens associated to the trust / project /\n # user_id combination where the user_id is the trustor.\n\n # NOTE(morganfainberg): This revocation is a bit coarse, but it\n # covers a number of cases such as disabling of the trustor user,\n # deletion of the trustor user (for any number of reasons). It\n # might make sense to refine this and be more surgical on the\n # deletions (e.g. don't revoke tokens for the trusts when the\n # trustor changes password). For now, to maintain previous\n # functionality, this will continue to be a bit overzealous on\n # revocations.\n self.delete_tokens(trust['trustee_user_id'], trust_id=trust['id'],\n tenant_id=project_id)", "def get_auth_tokens(self, oauth_verifier):\n\n url = self.access_token_url + '?oauth_verifier=' + oauth_verifier\n\n try:\n response = self.client.get(url, headers=self.headers, auth=self.auth)\n except requests.exceptions.RequestException:\n raise NetflixAuthError('An unknown error occurred.')\n\n if response.status_code != 200:\n raise NetflixAuthError('Getting access tokens failed: %s Response Status' % response.status_code)\n\n try:\n auth_tokens = dict(parse_qsl(response.content))\n except AttributeError:\n raise NetflixAuthError('Unable to obtain auth tokens.')\n\n return auth_tokens", "def get_auth_tokens(self, oauth_verifier):\n\n url = self.access_token_url + '?oauth_verifier=' + oauth_verifier\n\n try:\n response = self.client.get(url, headers=self.headers, auth=self.auth)\n except requests.exceptions.RequestException:\n raise NetflixAuthError('An unknown error occurred.')\n\n if response.status_code != 200:\n raise NetflixAuthError('Getting access tokens failed: %s Response Status' % response.status_code)\n\n try:\n auth_tokens = dict(parse_qsl(response.content))\n except AttributeError:\n raise NetflixAuthError('Unable to obtain auth tokens.')\n\n return auth_tokens", "def user_list():\n users = User.objects.all()\n return {\"users\": users}", "def login(self, *, app, user):\n method = 'POST'\n path = self.path('login')\n app = extract_id(app)\n user = extract_name(user)\n data = {'app_id': app,\n 'user_id': user}\n\n token = yield from authenticate(self.req_handler,\n method,\n path,\n json=data)\n return token", "def get_api_token(self, app, user, pwd):\n authorization = ('Basic ' + base64.b64encode(user + \":\" + pwd))\n api_token_resp = app.post('/v1/api_token', headers={'Authorization': authorization})\n if api_token_resp.status != '200 OK':\n raise ValueError(api_token_resp.status)\n api_token = json.loads(api_token_resp.data)['api_token']\n return api_token", "def delete_tokens(self, user_id, tenant_id=None, trust_id=None,\n consumer_id=None):\n if not CONF.token.revoke_by_id:\n return\n token_list = self._list_tokens(user_id,\n tenant_id=tenant_id,\n trust_id=trust_id,\n consumer_id=consumer_id)\n\n for token in token_list:\n try:\n self.delete_token(token)\n except exception.NotFound:\n pass", "def list(self, user, limit = 0, offset = 0, sort = None):\n userId = user['_id'] if user else None\n cursor = self.find({'ownerId': userId}, sort = sort)\n\n for r in self.filterResultsByPermission(cursor = cursor, user = user,\n level = AccessType.READ, limit = limit, offset = offset):\n yield r", "def getByUser(user):\n\n # set page_limits. The default is 1 \n pages_limit = request.args.get('pages_limit') or 1\n pages_limit = int(pages_limit)\n\n raw_response = get_response(tw_api, 'statuses/user_timeline', {'screen_name' : user, 'count': 100 }, pages_limit)\n list_response = convert_resp2list(raw_response)\n return jsonify(list_response)", "def get_tokens(self, document):\n raise NotImplementedError()", "def access_token_profile(global_config, existing_user):\n client_app = global_config.client_apps.profile_app\n api = IdApi(global_config.id_home, client_app.id, client_app.password, global_config.urls.id.api)\n yield api.get_access_token_for_user(existing_user.email, existing_user.password)", "def get_token(self):\n\t\tself.client.post('/api/v1/auth/signup', data=json.dumps(self.signup_user), content_type='application/json')\n\t\tresponse = self.client.post('/api/v1/auth/login', data=json.dumps(self.login_user), content_type='application/json')\n\t\tresp = json.loads(response.data.decode())\n\t\treturn 'Bearer ' + resp['access_token']", "def do_login(user):\n\n access_token = create_access_token(identity=user)\n return (jsonify(token=access_token), 200)", "def test_get_user_u2ftokens(self):\n response = self.client.get_user_u2ftokens(\"DU012345678901234567\")\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v1/users/DU012345678901234567/u2ftokens\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def deserialize_tokens():\n\ttry:\n\t\twith open(config.TOKENPATH, \"r+\") as f:\n\t\t\tcontext = f.read()\n\t\t\tres = eval(context)\n\t\t\t# load into memory\n\t\t\treturn res[\"access_token\"], res[\"refresh_token\"]\n\texcept:\n\t\t# unexcept token format\n\t\tfrom common import ApplicationException\n\t\traise ApplicationException(\"authorization file is broken, please run init\")", "def tokens(self) -> list:\n if self._tokens is None:\n tokens_ = sorted(list(self.elements()))\n self._tokens = tokens_\n return self._tokens", "def get_token_information(self):\n GetTokenInformation = ctypes.windll.advapi32.GetTokenInformation\n GetTokenInformation.argtypes = [\n wintypes.HANDLE, # TokenHandle\n ctypes.c_uint, # TOKEN_INFORMATION_CLASS value\n wintypes.LPVOID, # TokenInformation\n wintypes.DWORD, # TokenInformationLength\n ctypes.POINTER(wintypes.DWORD), # ReturnLength\n ]\n GetTokenInformation.restype = wintypes.BOOL\n\n CopySid = ctypes.windll.advapi32.CopySid\n CopySid.argtypes = [\n wintypes.DWORD, # nDestinationSidLength\n ctypes.c_void_p, # pDestinationSid,\n ctypes.c_void_p # pSourceSid\n ]\n CopySid.restype = wintypes.BOOL\n\n GetLengthSid = ctypes.windll.advapi32.GetLengthSid\n GetLengthSid.argtypes = [\n ctypes.POINTER(SID) # PSID\n ]\n GetLengthSid.restype = wintypes.DWORD\n\n return_length = wintypes.DWORD(0)\n buffer = ctypes.create_string_buffer(SECURITY_MAX_SID_SIZE)\n\n res = GetTokenInformation(self.get_process_token(),\n TOKEN_INFORMATION_CLASS.TokenUser,\n buffer,\n SECURITY_MAX_SID_SIZE,\n ctypes.byref(return_length)\n )\n assert res > 0, \"Error in second GetTokenInformation (%d)\" % res\n\n token_user = ctypes.cast(buffer, ctypes.POINTER(TOEKN_USER)).contents\n CopySid(SECURITY_MAX_SID_SIZE,\n self.identity.Value.AccountSid.Data,\n token_user.User.Sid\n )\n self.identity.Type = WINBIO_ID_TYPE_SID\n self.identity.Value.AccountSid.Size = GetLengthSid(token_user.User.Sid)", "def test_csc_authorization_request_list_authlist_user(self):\n # Arrange:\n self.client.credentials(\n HTTP_AUTHORIZATION=\"Token \" + self.token_user_authlist.key\n )\n\n # Act:\n url = reverse(\"authlistrequest-list\")\n response = self.client.get(url, format=\"json\")\n\n # Assert\n self.assertEqual(response.status_code, 200)\n self.assertEqual(len(response.data), 3)", "def tokens(self):\n return self._tokens", "def tokens(self):\n return self._tokens", "def tokens(self):\n return self._tokens", "def tokens(cls, instance):\n token = super(TumblrOAuth, cls).tokens(instance)\n if token and 'access_token' in token:\n token = dict(tok.split('=')\n for tok in token['access_token'].split('&'))\n return token", "def get_user_spotify_token(user: PlaylstrUser) -> dict:\n if not user.spotify_linked():\n return {\"error\": \"spotify not linked\"}\n if user.spotify_token_expiry is None or user.spotify_token_expiry <= timezone.now():\n updated_token = user.update_spotify_tokens()\n if updated_token != \"success\":\n return {\"error\": updated_token}\n return {\n \"access_token\": user.spotify_access_token,\n \"expires_in\": floor(\n (user.spotify_token_expiry - timezone.now()).total_seconds()\n ),\n }", "def get_token_list():\n token_list = []\n tokens_dir_path = os.path.join(BASE_DIR, TOKENS_DIR)\n for dir, dirs, files in os.walk(tokens_dir_path):\n for file_name in files:\n file = open(os.path.join(tokens_dir_path, file_name), 'r')\n token_list.append(file.read().strip())\n file.close()\n return token_list" ]
[ "0.7914114", "0.728109", "0.70411885", "0.68726313", "0.6513083", "0.6368803", "0.6219715", "0.60768193", "0.60738933", "0.6033857", "0.59760535", "0.59416234", "0.5925432", "0.5917948", "0.59171313", "0.58211654", "0.58108604", "0.578572", "0.5776958", "0.5731432", "0.56818086", "0.567487", "0.566744", "0.56449723", "0.5636147", "0.56041145", "0.5592336", "0.55755794", "0.5566267", "0.55584544", "0.55511665", "0.55064225", "0.54904914", "0.5490261", "0.5488347", "0.5469701", "0.5466617", "0.54665893", "0.5435758", "0.54096806", "0.539161", "0.53773236", "0.53566235", "0.53529686", "0.53430766", "0.53423977", "0.53322285", "0.53285646", "0.5321197", "0.5304591", "0.52889585", "0.52719444", "0.5270767", "0.5245764", "0.52372384", "0.52242404", "0.52119064", "0.52102524", "0.52085656", "0.5206258", "0.5204755", "0.5185847", "0.5182385", "0.5181368", "0.5168128", "0.51642925", "0.5161796", "0.5159322", "0.5156477", "0.5148292", "0.5146322", "0.51427567", "0.5142163", "0.5140283", "0.51390654", "0.5135383", "0.5132401", "0.51171345", "0.51171345", "0.5110162", "0.51089483", "0.50975084", "0.5094651", "0.5089858", "0.5087215", "0.5074302", "0.50710404", "0.5070558", "0.5070491", "0.50633395", "0.50627786", "0.50619876", "0.5056863", "0.50533897", "0.504708", "0.504708", "0.504708", "0.5042623", "0.5037592", "0.5037251" ]
0.72673726
2
Lists the availability domains in your tenancy. Specify the OCID of either the tenancy or another of your compartments as the value for the compartment ID (remember that the tenancy is simply the root compartment). See `Where to Get the Tenancy's OCID and User's OCID`__. Note that the order of the results returned can change if availability domains are added or removed; therefore, do not create a dependency on the list order.
def list_availability_domains(self, compartment_id, **kwargs): resource_path = "/availabilityDomains" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_availability_domains got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[AvailabilityDomain]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[AvailabilityDomain]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_fault_domains(self, compartment_id, availability_domain, **kwargs):\n resource_path = \"/faultDomains\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_fault_domains got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"availabilityDomain\": availability_domain\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[FaultDomain]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[FaultDomain]\")", "def show_domains(self):\n show_domains(self.system.cavity_gri)", "def organizations(self):\n return self.get('{}/orgs'.format(ApiVersion.A1.value))", "def cb_listdomains(self, cmd):\n for cur in sorted(self.d.listDomains(),\n key=lambda x: _domreverse(x['domain'])):\n print \"%(domain)60s %(expiration_date)15s\" % cur", "def case_search_enabled_domains():\n return CaseSearchConfig.objects.filter(enabled=True).values_list('domain', flat=True)", "def list_compartments(self, compartment_id, **kwargs):\n resource_path = \"/compartments\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"access_level\",\n \"compartment_id_in_subtree\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_compartments got unknown kwargs: {!r}\".format(extra_kwargs))\n\n if 'access_level' in kwargs:\n access_level_allowed_values = [\"ANY\", \"ACCESSIBLE\"]\n if kwargs['access_level'] not in access_level_allowed_values:\n raise ValueError(\n \"Invalid value for `access_level`, must be one of {0}\".format(access_level_allowed_values)\n )\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"accessLevel\": kwargs.get(\"access_level\", missing),\n \"compartmentIdInSubtree\": kwargs.get(\"compartment_id_in_subtree\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Compartment]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Compartment]\")", "def list_zones(self, **kwargs):\r\n return self.client['Account'].getDomains(**kwargs)", "def listDomains(self):\n reply = self.rpc.getDomains(self.username,\n self.password)\n if reply[0] == 'UNKNOWN_ERROR':\n raise Exception(\"RPC returned error: \" + reply[0])\n return reply", "def availability_domain(self):\n return self._availability_domain", "def list_keystone_v3_domains(self):\n LOG_OBJ.debug(\"List the domains.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/domains\"\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while creating domain\")\n print (\"No response from Server while creating domain\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\" Listing domains Failed with status %s \"\n \"and error : %s\" % response.status, response.data)\n print (\" Listing domains Failed with status %s and error : %s\" %\n response.status, response.data)\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Domains list : %s \" % output)\n print (\"Domains list : %s \" % output)\n return output['domains']", "async def get_organizations(request: Request):\n redis = request.app.state.redis\n organizations_obj = orjson.loads(await redis.get_key(\"influxdb_organizations\"))\n return [org for org in organizations_obj]", "def list_all():\n\n return (_conn.listDefinedDomains() +\n [_conn.lookupByID(id).name() for id in _conn.listDomainsID()])", "def get_domains(self):\n\n response = self.call(method='getDomains')\n domains = []\n for d in response:\n domain = self.domain(domain=d['domain'])\n domains.append(domain)\n return domains", "def describe_availability_options(DomainName=None, Deployed=None):\n pass", "def get_domains() -> List[str]:\n ret = _call_endpoint(\"v1/domains\")\n # Example response:\n # [{'createdAt': '2016-06-25T03:08:44.000Z',\n # 'domain': 'mydomain.com',\n # 'domainId': 12345678,\n # 'expirationProtected': False,\n # 'expires': '2020-06-25T03:08:44.000Z',\n # 'holdRegistrar': False,\n # 'locked': True,\n # 'nameServers': None,\n # 'privacy': False,\n # 'renewAuto': True,\n # 'renewDeadline': '2020-08-09T03:08:44.000Z',\n # 'renewable': True,\n # 'status': 'ACTIVE',\n # 'transferProtected': False},]\n domains = [d[\"domain\"] for d in ret]\n return domains", "def list_domain_names():\n pass", "def list_all_organizations(ctx):\n pprint(ctx.obj.orgs.get().data)", "def company_lists(self):\n return self.client.get('company/named-lists')", "def get_org_list():\r\n\r\n resp = requests.get(''.join([Kegg.BASE_URL, 'list/organism']))\r\n return resp.text", "def list_orgs(self):\n orgs = list(self.orgs.keys())\n orgs.sort()\n return orgs", "def getDomains(self, company):\n return self.db.getDomains(company)", "def list_tenants(self):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/tenants\"\n _headers = {'x-auth-token': self.cloud_admin_info['token_project']}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\" no response from Server\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\n \" tenant list Failed with status %s \" %\n response.status)\n return response.status\n output = json.loads(response.data)\n LOG_OBJ.info(\"Tenant List : %s \" % output)\n return output[\"tenants\"]", "def get_storage_domains(cohesity_client):\n storage_domain_list = cohesity_client.view_boxes.get_view_boxes()\n for domain in storage_domain_list:\n exported_res_dict[\"Storage Domains\"].append(domain.name)\n return storage_domain_list", "async def getDepartments(self, ):\n payload = {}\n \n\n # Parameter validation\n schema = CatalogValidator.getDepartments()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(self._conf.domain, f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/departments\", \"\"\"{\"required\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + await self._conf.getAccessToken()\n }\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(self._conf.domain, \"get\", await create_url_without_domain(f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/departments\", ), query_string, headers, \"\", exclude_headers=exclude_headers), data=\"\")", "def list_all_agencies():\n return JsonResponse.create(StatusCode.OK, get_all_agencies())", "def get_ad_entries(cohesity_client):\n resp = cohesity_client.active_directory.get_active_directory_entry()\n if resp:\n ad_list = list()\n for each_ad in resp:\n ad_list.append(each_ad.domain_name)\n config_dict[each_ad.domain_name] = [\n \"username\", \"password\", \"machine_accounts\"]\n exported_res_dict[\"Active directories\"] = ad_list\n return resp", "def listRR(self):\n reply = self.rpc.getSubdomains(self.username,\n self.password,\n self.domain)\n\n if len(reply) and reply[0] in ('UNKNOWN_ERROR',\n 'RATE_LIMITED'):\n raise Exception(\"RPC returned error: \" + reply[0])\n return reply", "def _list_orgs(self, context):\r\n try:\r\n rtn = {'context': context,\r\n 'orgs': sorted(list(self._bbreader.cache[context].keys()))}\r\n except KeyError:\r\n raise RequestError('Context {} not found'.format(context))\r\n return rtn", "def companies():\n res = requests.get('http://0.0.0.0:5002/companies')\n return jsonify(res.json())", "def list_domain_names(self) -> Dict:\n pass", "def get_departments(self) -> list:\n return self.client.departments.get_all()", "def domains(self):\n return DomainCollection(self.request)", "def listOrganizations(self, name='', type=''):\n return self.get_json('/organization', {'name': name, 'type': type})", "def get(self, request):\n conn = get_sdk_connection(request)\n availability_zone_list = _sdk_object_to_list(\n conn.load_balancer.availability_zones()\n )\n\n return {'items': availability_zone_list}", "def list_allocation_candidates(req):\n context = req.environ['placement.context']\n context.can(policies.LIST)\n want_version = req.environ[microversion.MICROVERSION_ENVIRON]\n get_schema = _get_schema(want_version)\n util.validate_query_params(req, get_schema)\n\n rqparams = lib.RequestWideParams.from_request(req)\n groups = lib.RequestGroup.dict_from_request(req, rqparams)\n\n if not rqparams.group_policy:\n # group_policy is required if more than one numbered request group was\n # specified.\n if len([rg for rg in groups.values() if rg.use_same_provider]) > 1:\n raise webob.exc.HTTPBadRequest(\n 'The \"group_policy\" parameter is required when specifying '\n 'more than one \"resources{N}\" parameter.')\n\n # We can't be aware of nested architecture with old microversions\n nested_aware = want_version.matches((1, 29))\n\n try:\n cands = ac_obj.AllocationCandidates.get_by_requests(\n context, groups, rqparams, nested_aware=nested_aware)\n except exception.ResourceClassNotFound as exc:\n raise webob.exc.HTTPBadRequest(\n 'Invalid resource class in resources parameter: %(error)s' %\n {'error': exc})\n except exception.TraitNotFound as exc:\n raise webob.exc.HTTPBadRequest(str(exc))\n\n response = req.response\n trx_cands = _transform_allocation_candidates(cands, groups, want_version)\n json_data = jsonutils.dumps(trx_cands)\n response.body = encodeutils.to_utf8(json_data)\n response.content_type = 'application/json'\n if want_version.matches((1, 15)):\n response.cache_control = 'no-cache'\n response.last_modified = timeutils.utcnow(with_timezone=True)\n return response", "def availability_zones(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"availability_zones\")", "def domains(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"domains\")", "def organization_list(request):\n return [o.slug for o in Organization.objects.all()]", "def get_companies(self):\n url = 'companies'\n result = self.get(url)\n return result['companies']", "def tracking_domain_list(self):\r\n params = base.get_params(None, locals())\r\n return self._get('tracking_domain_list', params)", "def _list(self, account, page):\n response = self.client.get(self.get_url(account), data={\"page\": page})\n return [\n DomainResource(**item) for item in response['data']\n ], response['pagination']", "def orca_list():\n val = []\n val.append('orca')\n val.append('orca-b3lyp')\n return val", "def get_all_courses(self) -> List[str]:\n\n print(\"Downloading all Courses from all Domains...\")\n all_courses_url = []\n for domain in DOMAINS:\n print(\"Selected Domain: \", domain)\n selected_domain_url = ROOT_URL + \"/\" + domain\n courses, courses_url = self.get_courses(selected_domain_url)\n all_courses_url += courses_url\n\n return all_courses_url", "def domains(cls):\n return [cls.domain]", "def AllowedDomains(self)->list:\n return self._allowedDomains", "def domain_list_all(self):\n page = 1\n on_page = 100\n ret = []\n while True:\n r = self.domain_list(page=page, on_page=on_page)\n ret += r['domains']\n if len(ret) >= r['total']:\n break\n page += 1\n return ret", "def list_vms(connection: str = None) -> list:\n with libvirt.open(connection) as conn:\n return conn.listAllDomains()", "def allowed_domains(self):\n if self._allowed_domains is None:\n uri = \"/loadbalancers/alloweddomains\"\n resp, body = self.method_get(uri)\n dom_list = body[\"allowedDomains\"]\n self._allowed_domains = [itm[\"allowedDomain\"][\"name\"]\n for itm in dom_list]\n return self._allowed_domains", "def get_companies(self):\n response = self.do_request('/undertaking/list')\n if response:\n return response.json()", "def get_subdomains(self):\n\n response = self.call(method='getSubdomains', args=[self.domainname])\n subdomains = []\n for s in response:\n subdomain = self.subdomain(domain=self.domainname, subdomain=s)\n subdomains.append(subdomain)\n return subdomains", "def get_departments() -> list:\n return Department.query.all()", "def domains(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"domains\")", "def get_all_domains(cursor):\r\n try:\r\n cursor.execute(\"select domain_url, max_pages from domains where working = 'no'\")\r\n return cursor.fetchall()\r\n except:\r\n raise RuntimeError(\"An Exception happened with the Database, make sure you are connected\")", "def list_accounts(self):\r\n\r\n account = self.client['Account']\r\n mask = 'cdnAccounts[%s]' % ', '.join(['id',\r\n 'createDate',\r\n 'cdnAccountName',\r\n 'cdnSolutionName',\r\n 'cdnAccountNote',\r\n 'status'])\r\n return account.getObject(mask=mask).get('cdnAccounts', [])", "def list_domain(self, feed_id=None):\n resources = self.list_resource(feed_id=feed_id, resource_type_id='Host Controller')\n domains = []\n if resources:\n for resource in resources:\n resource_data = self.get_config_data(\n feed_id=resource.path.feed_id, resource_id=resource.id)\n domain_data = resource_data.value\n domains.append(Domain(resource.id, resource.name, resource.path, domain_data))\n return domains", "def getEnvironments(request):\n environments = Environment.objects.all()\n serializer = environmentSerializer(environments, many=True)\n result = {'data':serializer.data, 'code':HTTP_200_OK, 'message':OK}\n return result", "def get_availability_zones(self, context, filters=None, fields=None,\n sorts=None, limit=None, marker=None,\n page_reverse=False):", "def get_input_domains():\n df = pandas.read_excel(\"AutoScrapy/files/EBE21 - Top 100 Onlineshops to scrapp.ods\", engine=\"odf\")\n list_of_addresses = df['Domain'].to_list()\n list_of_addresses = [(\"http://\" + address) for address in list_of_addresses]\n print(list_of_addresses)\n return list_of_addresses", "def list_zones(self):\n data = self._paginated_request(\"/v2/domains\", \"domains\")\n return list(map(self._to_zone, data))", "def extract_domains(self, resp):\n return", "def domains_v2():\n # Is this public?\n configs = get_configs()\n if configs['api_requests'] == 'auth':\n # Auth token in headers\n try:\n auth_token = Token.query.filter_by(auth_token=request.headers.get('Authorization')).first()\n except:\n return {\"alternatives\" : \"Database Error with token!\"}\n if not auth_token:\n return {\"alternatives\": \"Unauthorized!\"}\n\n req_data = request.get_json()\n url = req_data['url']\n if not url:\n return {\"alternatives\" : 'None'}\n \n domain_data = check(url)\n alternatives = {\"alternatives\": domain_data['available_alternatives']}\n return alternatives", "def getlist(self):\n self.__domainlist.sort()\n\n outstr = \"{ \"\n for index, domain in enumerate(self.__domainlist):\n outstr += domain + \" \"\n if (index % 50 == 0) and index > 0:\n outstr += \"}\\n{ \"\n\n outstr += \"}\"\n\n return outstr", "def list_hosts():\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n res = hosts.get_all(db)\n res = {'list': res}\n return jsonify(res)", "def organizations(self):\n self.elements('organizations')", "def list(self) -> List[Organisation]:\n ...", "def relevant_domains(self):\n pass", "def get_available_companies(team):", "def get_search_domains(self):\n\t\treturn handle_to_object(call_sdk_function('PrlSrvCfg_GetSearchDomains', self.handle))", "def get_delta_domains():\n url = os.getenv('DELTAS_URL')\n if url is None:\n raise Exception('Delta report URL configuration not set!')\n\n json = requests.get(url, timeout=10).json()\n return [domain\n for (domain,)\n in json['values']\n if dnstwist.is_valid_domain(domain)]", "def get_host_list():\n gparr = GpArray.initFromCatalog(dbconn.DbURL(port = MASTER_PORT), utility = True)\n segs = gparr.getDbList()\n\n master = None\n standby_host = None\n segment_host_list = []\n\n for seg in segs:\n if seg.isSegmentStandby(current_role=True):\n standby_host = seg.getSegmentHostName()\n elif not seg.isSegmentMaster(current_role=True):\n segment_host_list.append(seg.getSegmentHostName())\n elif seg.isSegmentMaster(current_role=True):\n master = seg.getSegmentHostName()\n\n #Deduplicate the hosts so that we\n #dont install multiple times on the same host\n segment_host_list = list(set(segment_host_list))\n if master in segment_host_list:\n segment_host_list.remove(master)\n\n return (standby_host, segment_host_list)", "def get_botnet_domains():\n\n fw = \"<HTTPS://YOUR_FORTIGATE_IP:YOUR_FORTIGATE_PORT>\"\n\n path = \"/api/v2/monitor/system/botnet-domains/hits/?access_token=\"\n\n token = \"<YOUR_API_KEY>\"\n\n content_filter = \"\"\n\n if content_filter != \"\":\n url = fw + path + token + content_filter\n else:\n url = fw + path + token\n\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n \n try:\n r = requests.get(url, verify=False).json()\n except Exception:\n print(\"Something went wrong. Is the url correct? Exiting...\")\n sys.exit()\n\n for key in r['results']:\n print()\n for k,v in key.items():\n print(\"{0:6} : {1}\".format(k.upper(), str(v)))", "def get_companies():\n all_companies = storage.all(Company).values()\n list_companies = []\n for company in all_companies:\n list_companies.append(company.to_dict())\n return jsonify(list_companies)", "def get_available_agendas(self):\n pass", "def get_all_biz_orgs(self):\n return [org for org in Org.objects.filter(status='biz')]", "def list(self, **kwargs):\n\n return self.getResourceManager() \\\n .getSdk() \\\n .hosts \\\n .list(**kwargs)", "async def All_orgs():\n\n links_13 = []\n links_14 = []\n valid_url = \"/?archive/?gsoc/\\d+[0-9]/orgs/[a-zA-Z]+\"\n for year in range(2009, 2016):\n year_url = melange + \"/archive/gsoc/{}\".format(year)\n soup = await get_page(year_url)\n\n for url in soup.find_all('a'):\n if re.match(valid_url, url.get(\"href\")):\n if year <= 2013:\n links_13.append(join(melange, url.get(\"href\")[1:]))\n else:\n links_14.append(join(melange, url.get(\"href\")[1:]))\n return links_13, links_14", "def list_type_A_domain(self, domain):\n r53 = self.connections.get_route53()\n # Get Zone ID\n zone = r53.get_zone(domain)\n zone_id = zone.id\n # Get all type A records\n records = r53.get_all_rrsets(hosted_zone_id=zone_id, name='A')\n for record in records:\n print(record)", "def apt_list(cal, c_id, start, end):\n\n # Get the appointments returning it as list of dictionaries\n appointments_result = cal.events().list(\n calendarId=c_id,\n timeMin=start,\n timeMax=end,\n singleEvents=True,\n orderBy='startTime'\n ).execute()\n appointments = appointments_result.get('items', [])\n return appointments", "def fetch_list(self):\n\t\treturn self.fetch(self.list_url % ART_SERVER_HOST)", "def get_queryset(self):\n return self.request.user.setting_set.get().companies", "def get_queryset(self):\n return self.request.user.setting_set.get().companies", "def get_queryset(self):\n return self.request.user.setting_set.get().companies", "def get_queryset(self):\n return self.request.user.setting_set.get().companies", "def ls():\n return dynamodb.ls(OrganizationModel)", "def list_deployments() -> JSONResponse:\n\n deploy_manager = DeployManager()\n deployments = deploy_manager.list()\n return JSONResponse(deployments)", "def allowed_domains(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"allowed_domains\")", "def list_zones(self):\n action = \"/api_dns_list.asp\"\n if self.reseller_id is not None:\n action = \"/api_dns_list_reseller.asp\"\n zones = self.connection.request(action)\n if len(zones.body) == 0:\n return []\n else:\n return self._to_zones(zones.body)", "def get_companies(self):\n response = self.do_request('/management/companies/export/json')\n if response:\n return response.json()", "def test_get_contact_lists(self):\n url, parsed = self.prepare_urls('v1:contact_list-list', subdomain=self.company.subdomain)\n \n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n content = json.loads(response.content)\n self.assertEqual(len(content), self.contact_lists_count)", "def _get_domain(self):\n self.ensure_one()\n domain = ['|', ('active', '=', True), ('active', '=', False)]\n # Check active\n if self.active == 'true':\n domain += [('active', '=', True)]\n elif self.active == 'false':\n domain += [('active', '=', False)]\n # Check partner type\n if self.partner_type == 'customer_or_supplier':\n domain += ['|', ('customer', '=', True), ('supplier', '=', True)]\n elif self.partner_type == 'customer_and_supplier':\n domain += [('customer', '=', True), ('supplier', '=', True)]\n elif self.partner_type == 'customer':\n domain += [('customer', '=', True)]\n elif self.partner_type == 'supplier':\n domain += [('supplier', '=', True)]\n # Check category\n if self.category_ids:\n domain += [('category_id', 'in', self.category_ids.ids)]\n return domain", "def show_all_departments():\n\n logger.debug('Function show_all_departments(). Routed to /departments')\n titles = ['Name', 'Average Salary', 'Employees']\n departments = ds.get_all()\n logger.info('Get list of departments, length is %i', len(departments))\n return render_template('departments.html',\n title='Departments',\n table_title='List of Departments',\n headers=titles,\n departments=departments)", "def acceptedOrgs(self, request, access_type,\n page_name=None, params=None, filter=None, **kwargs):\n\n from soc.modules.ghop.views.models.organization import view as org_view\n\n logic = params['logic']\n\n program_entity = logic.getFromKeyFieldsOr404(kwargs)\n\n fmt = {'name': program_entity.name}\n\n params = params.copy()\n params['list_msg'] = program_entity.accepted_orgs_msg\n params['list_description'] = self.DEF_PARTICIPATING_ORGS_MSG_FMT % fmt\n# TODO(LIST)\n return self.list(request, 'any_access', page_name=page_name, params=params)", "def option_domains_always_in_scope(self):\n return six.next(six.itervalues(self.zap._request(self.zap.base + 'spider/view/optionDomainsAlwaysInScope/')))", "def get_companies(request):\n try:\n companies = []\n for company in Company.objects.all():\n companies.append(company.dump_to_dict())\n\n return format_ajax_response(True, \"Companies list retrieved successfully.\", {'companies': companies})\n except Exception as ex:\n logging.error(\"failed to get_companies: %s\" % ex)\n return format_ajax_response(False, \"There was a problem retrieving the companies listing.\")", "def get_search_domains(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_GetSearchDomains', self.handle))", "def get_organization_links_by_page(self):\n return self.get_resource_by_page(\"/orgs\")", "def ListDomains(self, perPage=0, page=1):\n\n class Result(Model):\n domains = ListField(ModelField(Domain))\n\n if perPage != 0:\n headers = {\"perPage\": perPage, \"page\": page}\n response = self.client.http_get(\"/v4/domains\", headers)\n else:\n response = self.client.http_get(\"/v4/domains\")\n\n return parse_response(response, Result)", "def domains(cls):\n return (cls.domain, )", "def all_domains():\n vd = virtual_domains()\n ad = actual_domains()\n return vd.union(ad)", "def check_domains(self, service_id, service_version):\n domain_list = self.fastly_cache[service_id]['domain_list']\n\n return domain_list" ]
[ "0.5952278", "0.5752911", "0.56866777", "0.5658533", "0.5655036", "0.56334436", "0.5622109", "0.55967", "0.5533269", "0.55126745", "0.5500697", "0.5494048", "0.5493609", "0.5492394", "0.54891664", "0.54583323", "0.5394912", "0.5390071", "0.5349818", "0.5348451", "0.53267604", "0.53245103", "0.53077286", "0.53016484", "0.5293885", "0.5280641", "0.5271698", "0.5207974", "0.5188195", "0.5128355", "0.5118705", "0.51174355", "0.51070684", "0.50993997", "0.50929034", "0.5092333", "0.50747937", "0.5073553", "0.50689656", "0.50418985", "0.50363374", "0.5030613", "0.50270194", "0.5022588", "0.4985475", "0.4978667", "0.49782193", "0.4973791", "0.49720135", "0.49601123", "0.49328062", "0.4931108", "0.4920124", "0.49126798", "0.4901963", "0.4900324", "0.48895448", "0.4887815", "0.4883379", "0.48786938", "0.4878674", "0.4866199", "0.48508996", "0.484026", "0.4837214", "0.48319218", "0.4828377", "0.4822591", "0.4816623", "0.48067528", "0.48054466", "0.48009178", "0.4800904", "0.48008713", "0.47989228", "0.4794219", "0.4793023", "0.47857022", "0.47771347", "0.47715583", "0.47715583", "0.47715583", "0.47715583", "0.47682098", "0.47681028", "0.47479713", "0.4734521", "0.47329113", "0.4729656", "0.4725687", "0.47222358", "0.47219634", "0.47152218", "0.4713682", "0.47104892", "0.47073728", "0.46947706", "0.46936202", "0.4683127", "0.46776658" ]
0.72129303
0
Lists the compartments in a specified compartment. The members of the list returned depends on the values set for several parameters. With the exception of the tenancy (root compartment), the ListCompartments operation returns only the firstlevel child compartments in the parent compartment specified in `compartmentId`. The list does not include any subcompartments of the child compartments (grandchildren). The parameter `accessLevel` specifies whether to return only those compartments for which the requestor has INSPECT permissions on at least one resource directly or indirectly (the resource can be in a subcompartment). The parameter `compartmentIdInSubtree` applies only when you perform ListCompartments on the tenancy (root compartment). When set to true, the entire hierarchy of compartments can be returned. To get a full list of all compartments and subcompartments in the tenancy (root compartment), set the parameter `compartmentIdInSubtree` to true and `accessLevel` to ANY. See `Where to Get the Tenancy's OCID and User's OCID`__.
def list_compartments(self, compartment_id, **kwargs): resource_path = "/compartments" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "access_level", "compartment_id_in_subtree" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_compartments got unknown kwargs: {!r}".format(extra_kwargs)) if 'access_level' in kwargs: access_level_allowed_values = ["ANY", "ACCESSIBLE"] if kwargs['access_level'] not in access_level_allowed_values: raise ValueError( "Invalid value for `access_level`, must be one of {0}".format(access_level_allowed_values) ) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "accessLevel": kwargs.get("access_level", missing), "compartmentIdInSubtree": kwargs.get("compartment_id_in_subtree", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[Compartment]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[Compartment]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getListOfCompartments(self):\n return self.model.getListOfCompartments()", "def getListOfCompartments(self, *args):\n return _libsbml.Model_getListOfCompartments(self, *args)", "def get_compartment(self, compartment_id, **kwargs):\n resource_path = \"/compartments/{compartmentId}\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"get_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"compartmentId\": compartment_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Compartment\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Compartment\")", "def getListOfCompartmentReferences(self, *args):\n return _libsbml.MultiCompartmentPlugin_getListOfCompartmentReferences(self, *args)", "def get(self, *args):\n return _libsbml.ListOfCompartments_get(self, *args)", "def getListOfCompartmentTypes(self, *args):\n return _libsbml.Model_getListOfCompartmentTypes(self, *args)", "def getCompartment(self, *args):\n return _libsbml.Model_getCompartment(self, *args)", "def list_groups(self, compartment_id, **kwargs):\n resource_path = \"/groups\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_groups got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Group]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Group]\")", "def list_policies(self, compartment_id, **kwargs):\n resource_path = \"/policies\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_policies got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Policy]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Policy]\")", "def getCompartment(self):\n return _libsbml.CompartmentReference_getCompartment(self)", "def get(self, *args):\n return _libsbml.ListOfCompartmentReferences_get(self, *args)", "def get_compounds(self, ctx, params):\n # ctx is the context object\n # return variables are: out_compounds\n #BEGIN get_compounds\n self._check_param(params, ['compounds'])\n out_compounds = []\n for x in params['compounds']:\n id = x.split('/')[-1]\n comp = self.compounds.get(id, None)\n if comp:\n comp['aliases'] = self.comp_aliases.get(id, '')\n out_compounds.append(comp)\n #END get_compounds\n\n # At some point might do deeper type checking...\n if not isinstance(out_compounds, list):\n raise ValueError('Method get_compounds return value ' +\n 'out_compounds is not type list as required.')\n # return the results\n return [out_compounds]", "def findcomps():\n try:\n appuser, _ = util.authenticate()\n tlid = dbacc.reqarg(\"tlid\", \"dbid\", required=True)\n where = (\"WHERE tlid = \" + tlid + \" AND userid != \" + appuser[\"dsId\"] +\n \" ORDER BY modified DESC LIMIT 50\")\n tlcs = dbacc.query_entity(\"TLComp\", where)\n except ValueError as e:\n return util.serve_value_error(e)\n return util.respJSON(tlcs)", "def isSetCompartment(self):\n return _libsbml.CompartmentReference_isSetCompartment(self)", "def setCompartment(self, *args):\n return _libsbml.CompartmentReference_setCompartment(self, *args)", "def capacitygroup_list(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_capacitygroup_list(cmd_ctx, cpc, options))", "def generate_compartments(parameterdict):\n\n refcmpts, model = [parameterdict[i] for i in ['refcmpts', 'model']]\n\n peripherals = [] # List for peripheral compartments\n # Iterates through compartments. Adds peripherals to peripheral list,\n # creates main and optionally sub compartment (if in SC model).\n # Doesn't allow multiple main/sub compartments.\n for cmpt in refcmpts:\n if cmpt[2] == 'Peripheral':\n peripherals.append(Compartment(cmpt[0], cmpt[1]))\n\n elif cmpt[2] == 'Main':\n if 'maincmpt' in locals():\n raise ValueError(\"Can't have two main compartments.\")\n else:\n maincmpt = Compartment(cmpt[0], cmpt[1])\n\n elif cmpt[2] == 'Sub' and model == 'sc':\n if 'subcmpt' in locals():\n raise ValueError(\"Can't have two subcompartments.\")\n else:\n subcmpt = Compartment(cmpt[0], cmpt[1])\n if subcmpt not in locals():\n subcmpt = None\n\n return maincmpt, peripherals, subcmpt", "def list_availability_domains(self, compartment_id, **kwargs):\n resource_path = \"/availabilityDomains\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_availability_domains got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[AvailabilityDomain]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[AvailabilityDomain]\")", "def list_components(self, request, context):\n response = ListComponentsResponse()\n for component in self._delegator.list_components():\n response.components.append(component)\n return response", "def getCompartmentType(self):\n return _libsbml.Compartment_getCompartmentType(self)", "def get_all_comps(self,only_leaves=False):\n def get_comp_comps(comp):\n if not comp.is_kit_invoice_comp:\n return [comp] # stop condition\n if only_leaves: # meaning we don't want under-kits\n res = []\n else:\n res = [comp]\n for c in comp.child_ids:\n res += get_comp_comps(c) # recursive call\n return res\n self.ensure_one()\n if not self.is_kit_invoice_line:\n return []\n result = []\n for comp in self.direct_child_ids:\n result += get_comp_comps(comp)\n return result", "def isSetCompartment(self):\n return _libsbml.QualitativeSpecies_isSetCompartment(self)", "def addCompartment(self, vol=1, comp_id=\"\"):\n\n c1 = self.model.createCompartment()\n self.check(c1, \"create compartment\")\n if len(comp_id) == 0:\n comp_id = \"c\" + str(self.model.getNumCompartments())\n self.check(c1.setId(comp_id), \"set compartment id\")\n self.check(c1.setConstant(True), 'set compartment \"constant\"')\n self.check(c1.setSpatialDimensions(3), \"set compartment dimensions\")\n\n self.check(c1.setSize(vol), 'set compartment \"size\"')\n self.check(c1.setUnits(\"litre\"), \"set compartment size units\")\n return c1", "def compartment_id(self):\n return self._compartment_id", "def compartment_id(self):\n return self._compartment_id", "def List(self, parent_id=None, batch_mode=False, only_generate_request=False):\n\n if batch_mode:\n requests = [self._MakeListRequestTuple(parent_id)]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n return [self._service.List(self._MakeListRequestTuple(parent_id)[2])]", "def components_list(self, mar, _request):\n config = self._services.config.GetProjectConfig(mar.cnxn, mar.project_id)\n components = [api_pb2_v1_helpers.convert_component_def(\n cd, mar, self._services) for cd in config.component_defs]\n return api_pb2_v1.ComponentsListResponse(\n components=components)", "def getCompartment(self):\n return _libsbml.Reaction_getCompartment(self)", "def isSetCompartment(self):\n return _libsbml.Reaction_isSetCompartment(self)", "def get_campus_list(self, conn, offset=0, limit=100):\n path = urls.FLOOR_PLAN[\"GET_CAMPUS_LIST\"]\n params = {\n \"offset\": offset,\n \"limit\": limit\n }\n resp = conn.command(apiMethod=\"GET\", apiPath=path, apiParams=params)\n return resp", "def getCompartment(self):\n return _libsbml.QualitativeSpecies_getCompartment(self)", "def perform_list(params):\n\n cpc_name = params.get('cpc_name', None)\n\n session, logoff = open_session(params)\n try:\n client = zhmcclient.Client(session)\n\n # The \"List Permitted Partitions\" operation was added in HMC\n # version 2.14.0. The operation depends only on the HMC version and not\n # on the SE/CPC version, so it is supported e.g. for a 2.14 HMC managing\n # a z13 CPC.\n hmc_version = client.query_api_version()['hmc-version']\n hmc_version_info = [int(x) for x in hmc_version.split('.')]\n if hmc_version_info < [2, 14, 0]:\n # List the partitions in the traditional way\n if cpc_name:\n LOGGER.debug(\"Listing partitions of CPC %s\", cpc_name)\n cpc = client.cpcs.find(name=cpc_name)\n partitions = cpc.partitions.list()\n else:\n LOGGER.debug(\"Listing partitions of all managed CPCs\")\n cpcs = client.cpcs.list()\n partitions = []\n for cpc in cpcs:\n partitions.extend(cpc.partitions.list())\n else:\n # List the partitions using the new operation\n if cpc_name:\n LOGGER.debug(\"Listing permitted partitions of CPC %s\", cpc_name)\n filter_args = {'cpc-name': cpc_name}\n else:\n LOGGER.debug(\"Listing permitted partitions of all managed CPCs\")\n filter_args = None\n partitions = client.consoles.console.list_permitted_partitions(\n filter_args=filter_args)\n # The default exception handling is sufficient for the above.\n\n se_versions = {}\n partition_list = []\n for partition in partitions:\n\n # se-version has been added to the result of List Permitted\n # Partitions in HMC/SE 2.14.1. Before that, it triggers the\n # retrieval of CPC properties.\n parent_cpc = partition.manager.cpc\n try:\n se_version = se_versions[parent_cpc.name]\n except KeyError:\n try:\n se_version = partition.properties['se-version']\n except KeyError:\n se_version = parent_cpc.get_property('se-version')\n se_versions[parent_cpc.name] = se_version\n\n partition_properties = {\n \"name\": partition.name,\n \"cpc_name\": parent_cpc.name,\n \"se_version\": se_version,\n \"status\": partition.get_property('status'),\n \"has_unacceptable_status\": partition.get_property(\n 'has-unacceptable-status'),\n }\n partition_list.append(partition_properties)\n\n return partition_list\n\n finally:\n close_session(session, logoff)", "def getCompartment(self):\n return _libsbml.Species_getCompartment(self)", "def get(self, *args):\n return _libsbml.ListOfCompartmentTypes_get(self, *args)", "def recover_compartment(self, compartment_id, **kwargs):\n resource_path = \"/compartments/{compartmentId}/actions/recoverCompartment\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\",\n \"opc_request_id\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"recover_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"compartmentId\": compartment_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing),\n \"opc-request-id\": kwargs.get(\"opc_request_id\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Compartment\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Compartment\")", "def list_catalogs(self):\n return self._json_object_field_to_list(\n self._get_catalogs_json(), self.__MISSION_STRING)", "def get_compliment():\n name = request.args.get('name')\n show_compliments = request.args.get('show_compliments')\n compliments_to_show = sample(compliments, 3)\n\n return render_template(\n 'compliments.html',\n name=name,\n show_compliments=show_compliments,\n compliments=compliments_to_show)", "def list_tag_namespaces(self, compartment_id, **kwargs):\n resource_path = \"/tagNamespaces\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"include_subcompartments\",\n \"lifecycle_state\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_tag_namespaces got unknown kwargs: {!r}\".format(extra_kwargs))\n\n if 'lifecycle_state' in kwargs:\n lifecycle_state_allowed_values = [\"ACTIVE\", \"INACTIVE\", \"DELETING\", \"DELETED\"]\n if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:\n raise ValueError(\n \"Invalid value for `lifecycle_state`, must be one of {0}\".format(lifecycle_state_allowed_values)\n )\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"includeSubcompartments\": kwargs.get(\"include_subcompartments\", missing),\n \"lifecycleState\": kwargs.get(\"lifecycle_state\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[TagNamespaceSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[TagNamespaceSummary]\")", "def isSetCompartment(self):\n return _libsbml.Species_isSetCompartment(self)", "def getCoursesList(self, pageSize=100):\n results = self.service.courses().list(pageSize=pageSize).execute()\n self.courses = results.get('courses', [])\n if not self.courses:\n return []\n return self.courses # Might not have to return self.courses, but it's useful for now", "def compartment_id(self, compartment_id):\n self._compartment_id = compartment_id", "def compartment_id(self, compartment_id):\n self._compartment_id = compartment_id", "def getCompartmentType(self):\n return _libsbml.MultiCompartmentPlugin_getCompartmentType(self)", "def list_allocation_candidates(req):\n context = req.environ['placement.context']\n context.can(policies.LIST)\n want_version = req.environ[microversion.MICROVERSION_ENVIRON]\n get_schema = _get_schema(want_version)\n util.validate_query_params(req, get_schema)\n\n rqparams = lib.RequestWideParams.from_request(req)\n groups = lib.RequestGroup.dict_from_request(req, rqparams)\n\n if not rqparams.group_policy:\n # group_policy is required if more than one numbered request group was\n # specified.\n if len([rg for rg in groups.values() if rg.use_same_provider]) > 1:\n raise webob.exc.HTTPBadRequest(\n 'The \"group_policy\" parameter is required when specifying '\n 'more than one \"resources{N}\" parameter.')\n\n # We can't be aware of nested architecture with old microversions\n nested_aware = want_version.matches((1, 29))\n\n try:\n cands = ac_obj.AllocationCandidates.get_by_requests(\n context, groups, rqparams, nested_aware=nested_aware)\n except exception.ResourceClassNotFound as exc:\n raise webob.exc.HTTPBadRequest(\n 'Invalid resource class in resources parameter: %(error)s' %\n {'error': exc})\n except exception.TraitNotFound as exc:\n raise webob.exc.HTTPBadRequest(str(exc))\n\n response = req.response\n trx_cands = _transform_allocation_candidates(cands, groups, want_version)\n json_data = jsonutils.dumps(trx_cands)\n response.body = encodeutils.to_utf8(json_data)\n response.content_type = 'application/json'\n if want_version.matches((1, 15)):\n response.cache_control = 'no-cache'\n response.last_modified = timeutils.utcnow(with_timezone=True)\n return response", "def createCompartment(self):\n return _libsbml.Model_createCompartment(self)", "def get_complexes(self):\n q_cmplx = prefixes + \"\"\"\n SELECT ?complexTerm ?childName ?child ?stmt\n WHERE {\n {\n {?stmt belvoc:hasSubject ?complexTerm}\n UNION\n {?stmt belvoc:hasObject ?complexTerm .}\n UNION\n {?stmt belvoc:hasSubject ?term .\n ?term belvoc:hasChild ?complexTerm .}\n UNION\n {?stmt belvoc:hasObject ?term .\n ?term belvoc:hasChild ?complexTerm .}\n }\n ?complexTerm a belvoc:Term .\n ?complexTerm a belvoc:ComplexAbundance .\n ?complexTerm belvoc:hasChild ?child .\n ?child belvoc:hasConcept ?childName .\n }\n \"\"\"\n # Run the query\n res_cmplx = self.g.query(q_cmplx)\n\n # Store the members of each complex in a dict of lists, keyed by the\n # term for the complex\n cmplx_dict = collections.defaultdict(list)\n cmplx_ev = {}\n for stmt in res_cmplx:\n stmt_uri = stmt[3]\n ev = self._get_evidence(stmt_uri)\n for e in ev:\n e.epistemics['direct'] = True\n cmplx_name = term_from_uri(stmt[0])\n cmplx_id = stmt_uri + '#' + cmplx_name\n child = self._get_agent(stmt[1], stmt[2])\n cmplx_dict[cmplx_id].append(child)\n # This might be written multiple times but with the same\n # evidence\n cmplx_ev[cmplx_id] = ev\n # Now iterate over the stored complex information and create binding\n # statements\n for cmplx_id, cmplx_list in cmplx_dict.items():\n if len(cmplx_list) < 2:\n msg = 'Complex %s has less than 2 members! Skipping.' % \\\n cmplx_name\n logger.warning(msg)\n else:\n self.statements.append(Complex(cmplx_list,\n evidence=cmplx_ev[cmplx_id]))", "async def get_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.get_certificate_contacts(\n vault_base_url=self._vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "def delete_compartment(self, compartment_id, **kwargs):\n resource_path = \"/compartments/{compartmentId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"compartmentId\": compartment_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def isSetCompartmentId(self):\n return _libsbml.CompartmentGlyph_isSetCompartmentId(self)", "def isSetCompartmentType(self):\n return _libsbml.Compartment_isSetCompartmentType(self)", "def getCompartment(self):\n return _libsbml.MultiSpeciesType_getCompartment(self)", "def isSetCompartment(self):\n return _libsbml.MultiSpeciesType_isSetCompartment(self)", "def list_dynamic_groups(self, compartment_id, **kwargs):\n resource_path = \"/dynamicGroups\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_dynamic_groups got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[DynamicGroup]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[DynamicGroup]\")", "def isSetCompartmentType(self):\n return _libsbml.MultiCompartmentPlugin_isSetCompartmentType(self)", "def setCompartmentType(self, *args):\n return _libsbml.Compartment_setCompartmentType(self, *args)", "def isCompartmentVolume(self):\n return _libsbml.Rule_isCompartmentVolume(self)", "def get_compo_list(self):\n if not os.path.isdir(self.datapath):\n os.makedirs(self.datapath)\n return self.components", "def get_listable_courses(self):\n listable_courses = []\n for course in self.get_public_courses():\n if (course.should_list or self.is_enrolled(course)):\n listable_courses.append(self.get_course_info(course))\n return listable_courses", "def createCompartmentReference(self):\n return _libsbml.MultiCompartmentPlugin_createCompartmentReference(self)", "async def getDepartments(self, ):\n payload = {}\n \n\n # Parameter validation\n schema = CatalogValidator.getDepartments()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(self._conf.domain, f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/departments\", \"\"\"{\"required\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + await self._conf.getAccessToken()\n }\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(self._conf.domain, \"get\", await create_url_without_domain(f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/departments\", ), query_string, headers, \"\", exclude_headers=exclude_headers), data=\"\")", "def get_courses(browser, college, dept_node):\n # enter the department course list\n dept_node.click()\n wait_for_load(browser)\n\n # parse course entries\n entries = browser.find_element_by_id('courseListHolder')\n\n courses = []\n for i in entries.find_elements_by_xpath('div/div'):\n course = parse_course(browser, college, i)\n if course:\n courses.append(course)\n\n # go back to the department index\n back_button = browser.find_element_by_id('btn-deptlist')\n back_button.click()\n wait_for_load(browser)\n\n return courses", "def oc(self, stimulusID):\r\n global stimulusAPI\r\n try:\r\n pageList = stimulusAPI.getStimulusScope(stimulusID)\r\n agentSet = set([])\r\n for page in pageList:\r\n localAgentList = stimulusAPI.getAllAgentsWithViewOfSpecifiedPage(page)\r\n localAgentSet = set(localAgentList)\r\n agentSet.update(localAgentSet)\r\n agentList = list(agentSet)\r\n return agentList\r\n except Exceptions.InvalidStimulusProcessingType as e:\r\n raise e\r\n except Exceptions.ScriptError as e:\r\n raise e\r\n #self.execute(stimulusID)\r\n except Exception as e:\r\n raise Exceptions.ScriptError(e)", "def comports(include_links=False):\n return list(iterate_comports())", "def getCompartmentReference(self, *args):\n return _libsbml.MultiCompartmentPlugin_getCompartmentReference(self, *args)", "def list_components(self) -> Dict[str, Any]:\n return self._manager.list_components()", "def ListCollaborations(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def cones_and_cylinders(\n target,\n pore_diameter='pore.diameter',\n throat_diameter='throat.diameter'\n):\n from openpnm.models.geometry import conduit_lengths\n out = conduit_lengths.cones_and_cylinders(\n target, pore_diameter=pore_diameter, throat_diameter=throat_diameter\n )\n return out[:, 1]", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def get_recursive_components (self, comp_name):\r\n comp_list = []\r\n # Current component\r\n comp_data = self.__get_component_structure( comp_name ) \r\n\r\n while comp_data is not None:\r\n comp_list.append(comp_data)\r\n # Parent components \r\n p_comp = comp_data[\"superclass\"]\r\n comp_data = self.__get_component_structure(p_comp)\r\n\r\n return comp_list", "def browse_mentors(request):\n all_mentors = Mentor.objects.all()\n mentors = [mentor for mentor in all_mentors if mentor.mentor.has_capacity()]\n return render(request, 'match/browse_mentors.html', {'mentors': mentors})", "def clone(self):\n return _libsbml.ListOfCompartments_clone(self)", "def convert_compartments_to_list(self, compartments):\n\n return [compartments[l] for l in self.labels]", "def create_compartment(self, create_compartment_details, **kwargs):\n resource_path = \"/compartments\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n body=create_compartment_details,\n response_type=\"Compartment\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n body=create_compartment_details,\n response_type=\"Compartment\")", "def list_cost_tracking_tags(self, compartment_id, **kwargs):\n resource_path = \"/tagNamespaces/actions/listCostTrackingTags\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_cost_tracking_tags got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Tag]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Tag]\")", "def setCompartment(self, *args):\n return _libsbml.Reaction_setCompartment(self, *args)", "def get_components_list(self):\n\n components_list = self.driver.find_elements(*BasePageLocators.LIST_COMPONENS)\n return components_list", "def getListOfCompartmentGlyphs(self):\n return _libsbml.Layout_getListOfCompartmentGlyphs(self)", "def _add_compartments(self, comps):\n return self._cm.add_compartments(comps)", "def list(self, detailed=True, search_opts=None):\n\n query_string = utils.build_query_param(search_opts)\n\n detail = \"\"\n if detailed:\n detail = \"/detail\"\n\n return self._list(\"/consistencygroups%s%s\" % (detail, query_string),\n \"consistencygroups\")", "def list_missions(self):\n\n # getting all the histogram information\n service = \"Mast.Caom.All\"\n params = {}\n response = self.service_request_async(service, params, format='extjs')\n jsonResponse = response[0].json()\n\n # getting the list of missions\n histData = jsonResponse['data']['Tables'][0]['Columns']\n for facet in histData:\n if facet['text'] == \"obs_collection\":\n missionInfo = facet['ExtendedProperties']['histObj']\n missions = list(missionInfo.keys())\n missions.remove('hist')\n return missions", "def list(\n self,\n name: Optional[str] = None,\n *,\n list_view_type: ListViewType = ListViewType.ACTIVE_ONLY,\n ) -> Iterable[Environment]:\n if name:\n return (\n self._version_operations.list(\n name=name,\n registry_name=self._registry_name,\n cls=lambda objs: [Environment._from_rest_object(obj) for obj in objs],\n **self._scope_kwargs,\n **self._kwargs,\n )\n if self._registry_name\n else self._version_operations.list(\n name=name,\n workspace_name=self._workspace_name,\n cls=lambda objs: [Environment._from_rest_object(obj) for obj in objs],\n list_view_type=list_view_type,\n **self._scope_kwargs,\n **self._kwargs,\n )\n )\n return (\n self._containers_operations.list(\n registry_name=self._registry_name,\n cls=lambda objs: [Environment._from_container_rest_object(obj) for obj in objs],\n **self._scope_kwargs,\n **self._kwargs,\n )\n if self._registry_name\n else self._containers_operations.list(\n workspace_name=self._workspace_name,\n cls=lambda objs: [Environment._from_container_rest_object(obj) for obj in objs],\n list_view_type=list_view_type,\n **self._scope_kwargs,\n **self._kwargs,\n )\n )", "def getCompartmentId(self):\n return _libsbml.CompartmentGlyph_getCompartmentId(self)", "def list(self,\n component_type=None,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n summary=None,\n sync=None,\n ):\n return self._invoke('list',\n {\n 'component_type': component_type,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n 'summary': summary,\n 'sync': sync,\n })", "def components(self, predicate=None):\n \n if predicate is None:\n return self._get(\"components\").json()\n else:\n return self._get(\"components/search\", params={\"predicate\":predicate}).json()", "def setCompartmentId(self, *args):\n return _libsbml.CompartmentGlyph_setCompartmentId(self, *args)", "def getCompartmentType(self, *args):\n return _libsbml.Model_getCompartmentType(self, *args)", "def get_courses(self, depth=0):\r\n return self.courses.values()", "def show_all_cabs(self):\n try:\n cabs = self.admin_repository.show_all_cabs()\n\n if cabs:\n for cab in cabs:\n print(\"Cab Number: {}\".format(cab[0]))\n print(\"Seating Capacity : {}\".format(cab[1]))\n print(\"----------------------------\")\n return True\n else:\n print(\"No records found.\")\n return False\n except Exception as e:\n print(\"Some Error occurred.Please try again\")\n return False", "def all_complaints(request):\n context = get_context()\n context['title'] = 'Simple App'\n context['complaints'] = Complaint.objects.all()\n return render(request, 'all_complaints.html', context)", "def build_contracts_list():\n ns_getcontracts_filter = '''\n <nc:filter type=\"xpath\"\n xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\"\n xmlns:na=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\"\n xmlns:rb=\"http://cisco.com/ns/yang/Cisco-IOS-XE-bgp\"\n select=\"/na:native/ip/rb:extcommunity-list/standard\"\n />\n '''\n m = manager.connect( host='10.112.83.100',\n port=830,\n username='cisco',\n password='cisco',\n hostkey_verify=False)\n answer = m.get_config(source='running', filter=ns_getcontracts_filter).data_xml\n c = xmltodict.parse (answer)\n # build the list\n liste_contracts = [ { 'name': r['name'], 'id': r['permit']['rt']['name'][6:] } for r in c['data']['native']['ip']['extcommunity-list']['standard'] ]\n return liste_contracts", "def get_complaints(visit):\r\n return visit.complaints.all()", "def addCompartment(self, *args):\n return _libsbml.Model_addCompartment(self, *args)", "def get_courses_by_department(self, code: str, *filters: str, \n year=None) -> List[Course]:\n\n filters = list(filters)\n filters.append(f\"filter-departmentcode-{code}\")\n\n return self.get_courses_by_query(code, *filters, year=year)", "def list_identity_providers(self, protocol, compartment_id, **kwargs):\n resource_path = \"/identityProviders\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_identity_providers got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"protocol\": protocol,\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[IdentityProvider]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[IdentityProvider]\")", "def setCompartmentType(self, *args):\n return _libsbml.MultiCompartmentPlugin_setCompartmentType(self, *args)", "def createCompartmentType(self):\n return _libsbml.Model_createCompartmentType(self)", "def setCompartment(self, *args):\n return _libsbml.QualitativeSpecies_setCompartment(self, *args)", "def run(\n self, bases_indices: Optional[List[int]] = None, destructive_mode: bool = False\n ) -> List[str]:\n charms: List[str] = []\n\n managed_mode = charmcraft.env.is_charmcraft_running_in_managed_mode()\n if not managed_mode and not destructive_mode:\n charmcraft.providers.ensure_provider_is_available(self.provider)\n\n build_plan = charmcraft.providers.create_build_plan(\n bases=self.config.bases,\n bases_indices=bases_indices,\n destructive_mode=destructive_mode,\n managed_mode=managed_mode,\n provider=self.provider,\n )\n if not build_plan:\n raise CraftError(\n \"No suitable 'build-on' environment found in any 'bases' configuration.\"\n )\n\n charms = []\n for plan in build_plan:\n emit.debug(f\"Building for 'bases[{plan.bases_index:d}][{plan.build_on_index:d}]'.\")\n if managed_mode or destructive_mode:\n if self.shell:\n # Execute shell in lieu of build.\n launch_shell()\n continue\n\n try:\n with charmcraft.instrum.Timer(\"Building the charm\"):\n charm_name = self.build_charm(plan.bases_config)\n except (CraftError, RuntimeError) as error:\n if self.debug:\n emit.debug(f\"Launching shell as charm building ended in error: {error}\")\n launch_shell()\n raise\n\n if self.shell_after:\n launch_shell()\n else:\n charm_name = self.pack_charm_in_instance(\n bases_index=plan.bases_index,\n build_on=plan.build_on,\n build_on_index=plan.build_on_index,\n )\n charms.append(charm_name)\n\n return charms", "def list_spacecraft_available_contacts(contact_profile: Optional[pulumi.InputType['ContactParametersContactProfile']] = None,\n end_time: Optional[str] = None,\n ground_station_name: Optional[str] = None,\n resource_group_name: Optional[str] = None,\n spacecraft_name: Optional[str] = None,\n start_time: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListSpacecraftAvailableContactsResult:\n __args__ = dict()\n __args__['contactProfile'] = contact_profile\n __args__['endTime'] = end_time\n __args__['groundStationName'] = ground_station_name\n __args__['resourceGroupName'] = resource_group_name\n __args__['spacecraftName'] = spacecraft_name\n __args__['startTime'] = start_time\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('azure-native:orbital:listSpacecraftAvailableContacts', __args__, opts=opts, typ=ListSpacecraftAvailableContactsResult).value\n\n return AwaitableListSpacecraftAvailableContactsResult(\n next_link=pulumi.get(__ret__, 'next_link'),\n value=pulumi.get(__ret__, 'value'))", "def getListNestedCVTerms(self, *args):\n return _libsbml.CVTerm_getListNestedCVTerms(self, *args)" ]
[ "0.62096506", "0.60235274", "0.5435602", "0.5372225", "0.52894926", "0.5113388", "0.50968504", "0.5042258", "0.49255446", "0.4908474", "0.48918715", "0.4878826", "0.4840922", "0.46979246", "0.46899638", "0.46821752", "0.468023", "0.45857018", "0.45253602", "0.4391601", "0.4379423", "0.43772116", "0.43746123", "0.4372277", "0.4372277", "0.4354483", "0.43497947", "0.434443", "0.4339763", "0.4327338", "0.43234998", "0.43183652", "0.4309049", "0.4298407", "0.42962438", "0.42877585", "0.4286317", "0.42815015", "0.42761657", "0.4267223", "0.4266482", "0.4266482", "0.42634994", "0.42419428", "0.42286247", "0.42270935", "0.4218103", "0.41963217", "0.41786185", "0.41692206", "0.4152537", "0.41520163", "0.41440433", "0.41400257", "0.41282356", "0.41222712", "0.41187248", "0.41112489", "0.41090727", "0.41063002", "0.41060182", "0.40921506", "0.40892744", "0.408279", "0.40754914", "0.4074993", "0.40745622", "0.40738994", "0.4071413", "0.4069174", "0.40628013", "0.40615728", "0.40611354", "0.4060923", "0.40593752", "0.40469489", "0.40410525", "0.40366796", "0.40262803", "0.40192482", "0.40151653", "0.4006225", "0.40052003", "0.4003776", "0.40031624", "0.40011427", "0.3979569", "0.39769864", "0.3971986", "0.39617866", "0.39486042", "0.39446375", "0.3936844", "0.3936525", "0.39364606", "0.3927688", "0.39217633", "0.39109224", "0.3895756", "0.388454" ]
0.84061605
0
Lists all the tags enabled for costtracking in the specified tenancy. For information about costtracking tags, see `Using Costtracking Tags`__.
def list_cost_tracking_tags(self, compartment_id, **kwargs): resource_path = "/tagNamespaces/actions/listCostTrackingTags" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_cost_tracking_tags got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[Tag]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[Tag]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listTags(self, authenticationToken):\r\n pass", "def list_tags(self, session):\n result = self._tag(session.get, session=session)\n return result['tags']", "def list_tags():\r\n tags = Tag.query.order_by(Tag.name).all()\r\n return render_template('tags.html', tags=tags)", "def get_tags_list(*args, **kwargs):\n return Tag.objects.active()", "def get_tags_list(*args, **kwargs):\n return Tag.objects.active()", "def list_tags():\n\n tags = Tag.query.all()\n return render_template('tags/list_tags.html', tags=tags)", "def list_all_tags(self,obs):", "def tag_list(context, addon, dev_tags=None, user_tags=None):\n if not dev_tags and not user_tags:\n return ''\n if not dev_tags:\n dev_tags = []\n if not user_tags:\n user_tags = []\n\n c = {\n 'request': context['request'],\n 'addon': addon,\n 'dev_tags': dev_tags,\n 'user_tags': user_tags,\n }\n t = env.get_template('tags/tag_list.html').render(**c)\n return jinja2.Markup(t)", "def getTagList(self):\n if not self.proxy:\n self.proxy = self.session.service(\"ALBehaviorManager\")\n return self.proxy.getTagList()", "def getTags(number=None):", "def list(self):\n\n\t\treturn self._list(\"/tag\", \"tag\")", "def tags():", "def handle_tags(self, request):\n \"\"\"\n @api {get} /tags List tags\n @apiName GetTags\n @apiGroup Misc\n @apiVersion 1.0.0\n\n @apiDescription List currenty used tags\n\n @apiSuccessExample {json} Example response:\n [\n \"tag1\",\n \"tag2\"\n ]\n \"\"\"\n\n headers = {\n 'Content-Type': 'application/javascript',\n 'Access-Control-Allow-Origin': '*'\n }\n\n tags = []\n\n for task in self.cluster.config.get('tasks').values():\n if 'tags' in task:\n tags += task['tags']\n\n tags = list(set(tags))\n\n return HTTPReply(code = 200, body = json.dumps(tags), headers = headers)", "def tag_list(request):\r\n rdict = request.matchdict\r\n username = rdict.get(\"username\", None)\r\n if username:\r\n username = username.lower()\r\n\r\n tags_found = TagMgr.find(username=username)\r\n\r\n return {\r\n 'tag_list': tags_found,\r\n 'tag_count': len(tags_found),\r\n 'username': username,\r\n }", "def tags(self, request, tag_list, group):\n return tag_list", "async def guild_tags(self, ctx):\n guild_tags = self._tag_dict.get(ctx.guild.id)\n if not guild_tags:\n raise commands.BadArgument(f'This guild does not have any tags!')\n tags = sorted(guild_tags.items(), key=lambda x: x[1]['uses'], reverse=True)\n data = [f'{tag[0]} - {tag[1][\"uses\"]} uses' for tag in tags]\n embed = discord.Embed(colour=self.bot.colour)\n embed.set_author(name=f\"All Tags in {ctx.guild}\", icon_url=ctx.guild.icon_url)\n source = IndexedListSource(data=data, embed=embed, title=\"Tags\")\n await CatchAllMenu(source=source).start(ctx)", "def ListTags(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def __gitTagList(self):\n self.vcs.gitListTagBranch(self.project.getProjectPath(), True)", "def show_tags():\n\n tags = Tag.query.all()\n\n return render_template(\"tags/tag_list.html\", tags=tags)", "def get_all_tags():\n try:\n tags = g.projects.distinct('tags')\n return jsonify(sorted(tags, key=str.lower))\n except Exception as err:\n raise ApiException(str(err), 500)", "def list_tags(ResourceArn=None):\n pass", "def tags(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'tags')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def list(self):\n return self._post(\n request='list',\n uri=ApiUri.TAGS.value,\n ).get('tags')", "def list_tags_for_resource(Resource=None):\n pass", "def get_tags(self) -> List:\n LOGGER.info('Get all the tags')\n\n with self.client.create_session() as session:\n tag_count = (func.count(RDSTableTag.table_rk)\n + func.count(RDSDashboardTag.dashboard_rk)).label('tag_count')\n\n records = session.query(\n RDSTag.rk.label('tag_name'),\n tag_count\n )\\\n .outerjoin(RDSTableTag)\\\n .outerjoin(RDSDashboardTag)\\\n .filter(RDSTag.tag_type == 'default')\\\n .group_by(RDSTag.rk)\\\n .having(tag_count > 0)\\\n .all()\n\n results = []\n for record in records:\n results.append(TagDetail(tag_name=record.tag_name,\n tag_count=record.tag_count))\n\n return results", "def get(self, currency, address):\n check_inputs(address=address, currency=currency) # abort if fails\n address_tags = commonDAO.list_address_tags(currency, address)\n return address_tags # can be empty list", "def view_tags():\n tags = dict([ [k[8:],v] for k,v in os.environ.items() if k.startswith(\"HTTPBIN_\") ])\n\n if not tags:\n return Response(response=\"{}\", status=404, mimetype=\"application/json\")\n\n return jsonify(tags)", "def get_all_tags():\n try:\n data = ReadTag().run()\n except Exception as ex:\n return jsonify({'code': '500','message':'Internal server error'})\n else:\n return jsonify({'code': '200','data': data})", "def tagging_criteria(self) -> 'outputs.AdhocBasedTaggingCriteriaResponse':\n return pulumi.get(self, \"tagging_criteria\")", "def display_tags(self):\n from evennia.typeclasses.tags import Tag\n\n qs = (\n Tag.objects.filter(db_tagtype=None, db_category=None, db_data=None)\n .exclude(db_key__icontains=\"barracks\")\n .exclude(db_key__icontains=\"owned_room\")\n .exclude(db_key__icontains=\"_favorite\")\n )\n string = list_to_string([ob.db_key for ob in qs])\n self.msg(\n \"Types of tags (excluding custom ones for individuals, or those with categories): %s\"\n % string\n )", "def show_tags():\n tags = Tag.query.all()\n\n return render_template('tags/show_tags.html', tags=tags)", "def get(self, currency, entity):\n check_inputs(currency=currency, entity=entity)\n tags = entitiesDAO.list_entity_tags(currency, entity)\n return tags", "def getTagsUsingId(self,resourceId):\n response = requests.get('https://api.imagga.com/v1/tagging?content=%s' % resourceId,\n auth=(self.apikey, self.secret))\n #print ('printing response')\n #print (response.json())", "def dataset_tags(connection):\n assert connection\n query = \"\"\"select * from tags()\"\"\"\n result = sqlio.read_sql_query(query, connection)\n return [item.strip() for item in result['name']], [tag_id.strip() for tag_id in result['tag_id']]", "def show_tags(session, foodgroup=None):\n nutrition_tags = session.query(LocalNutritionaliase.ingkey,\n\t\t LocalNutrition.desc, func.group_concat(Tag.name)) \\\n .filter(TagItem.ndbno==LocalNutrition.ndbno) \\\n\t.filter(Tag.id==TagItem.tag_id) \\\n .filter(LocalNutritionaliase.ndbno==LocalNutrition.ndbno) \n if foodgroup is not None:\n nutrition_tags = nutrition_tags \\\n .filter(LocalNutrition.foodgroup==foodgroup) \n nutrition_tags = nutrition_tags \\\n\t.group_by(LocalNutrition.ndbno) \\\n .order_by(LocalNutritionaliase.ingkey)\n for ingkey, desc, tags in nutrition_tags:\n print(ingkey, \" || \", desc)\n print(\" \", tags)", "def find_all(self, params={}, **options):\n return self.client.get_collection(\"/tags\", params, **options)", "def get_all_tags_list(cls):\n all_tags_list = []\n # obj_list = cls.objects.filter(status=0).order_by('-update_time')\n obj_list = Article.objects.all()\n for obj in obj_list:\n all_tags_list = all_tags_list + obj.tags_list()\n # for tag in obj.tags.split(','):\n # all_tags_list.append(tag)\n return all_tags_list", "def get_tags(self):\n resp = self.get(_u.build_uri(\"tags\", domain=self.domain))\n return utils.handle_response(resp)", "def tags(self) -> list[str]:\n _args: list[Arg] = []\n _ctx = self._select(\"tags\", _args)\n return _ctx.execute_sync(list[str])", "def do_list_tags(cs, args):\n resp, tags = cs.repositories.list_tags(args.repository)\n tags = [{\"Tag\": t} for t in tags]\n utils.print_list(tags, [\"Tag\"], sortby=\"Tag\")", "def get_tags(self, *args, **kwargs):\n \n tags_data = api.get_tags(\n *args,\n api_key=self.__creds.api_key_v2,\n **kwargs)\n return [en.Tag(tag_data) for tag_data in tags_data]", "def list_tags(self) -> PagingList[Tag]:\n return PagingList(lambda offset, limit: self._generate_tags(None, offset, limit), 128)", "def get_tags(self):\n return self.tags", "def all_tags(self):\n tags = set()\n query = self.sql_session.query(Feature).all()\n for tag in query:\n tags.add((tag.key, json.loads(tag.value)))\n return tags", "def tags(self):\r\n url = self.base_url + 'tags/'\r\n return json.loads(self.bb.load_url(url))", "def tags(self):\r\n url = '{0}/tags/'.format(self.get_url())\r\n request = http.Request('GET', url)\r\n\r\n return request, parsers.parse_json", "def tags(self):\n return self.get(\"tags\")", "def describe_tags(resourceArns=None):\n pass", "def list_tags(self, entry_name):\n return self.__datacatalog.list_tags(parent=entry_name)", "def tags(request):\n return Tag.objects.filter(user=request.user)", "def prepare_tags(self, obj):\n return [tag.name for tag in obj.tags.all()]", "def get_all_tagged(self,tag_name):\n return self.tag2elements[tag_name]", "def getTags(self,):\n\t\treturn self.tags;", "def tags(self) -> List[str]:\n return self._db_data.tags", "def all_tags(self) -> Sequence[str]:\n return pulumi.get(self, \"all_tags\")", "def get_tags(filter, api_site_parameter, page = 1, pagesize = 10, sort = 'popular'):\n path = \"tags\"\n \n query_filter = ')(Yb(vlSfU'\n \n results = __fetch_results(path, api_site_parameter, inname= filter, page = page, pagesize = pagesize, filter = query_filter, sort = sort)\n return results", "async def test_list_address_tags_by_entity(self):\n await test_service.list_address_tags_by_entity(self)", "async def tags(self, ctx, member: discord.Member = None):\n member = member or ctx.author\n guild_tags = self._tag_dict.get(ctx.guild.id)\n if not guild_tags:\n raise commands.BadArgument(f'This guild does not have any tags!')\n tags = guild_tags.items()\n tags = sorted(tags, key=lambda x: x[1]['uses'], reverse=True)\n data = [f'{tag[0]} - {tag[1][\"uses\"]} uses' for tag in tags if tag[1]['author'] == member.id] # only add to list comp if belongs to author instead of removing from dict items in above lines\n embed = discord.Embed(colour=self.bot.colour)\n embed.set_author(name=f\"All of {ctx.author}'s Tags in {ctx.guild}\", icon_url=ctx.author.avatar_url)\n source = IndexedListSource(data=data, embed=embed, title=\"Tags\")\n await CatchAllMenu(source=source).start(ctx)", "def get_tags(request):\n as_list = request.params.get('as_list')\n if as_list:\n return [\n tag.name\n for tag in Tag.query.all()\n ]\n else:\n return [\n {\n 'name': tag.name,\n 'id': tag.id\n }\n for tag in Tag.query.all()\n ]", "def get_antags(self):\n antags = []\n for obj in self.antagobjs.group_by(AntagObjective.mindkey):\n antag = {'key': obj.mindkey, 'name': obj.mindname, 'role': obj.special_role}\n antags.append(antag)\n return antags", "def describe_tags(filters=None, maxResults=None, nextToken=None):\n pass", "def tag_strings(self):\n return [tag.tag_text for tag in self.tags.all()]", "def get_tags(self) -> Dict:\n return self.orthanc.get_instance_tags(self.identifier)", "def freeform_tags(self):\n return self._freeform_tags", "def tags(self):\n tag_docs = self.tag_data\n tags = set([x[\"tag\"] for x in tag_docs])\n # remove the \"thawed\" tag\n tags.discard(\"thawed\")\n return tags", "def get_tags(self, obj):\n tags = obj.tags.all()\n serializer = TagSerializer(tags, many=True)\n return serializer.data", "def get_active_tags(self) -> typing.List[str]:\n return [k for k, tog in self._tkdct.items() if tog.is_A_state()]", "def list_tags_for_resource(ResourceId=None, NextToken=None, Limit=None):\n pass", "async def _list(self, ctx: \"IceTeaContext\", target: discord.Member = None):\n target = target or ctx.author\n tags = await ctx.guild_data.get_member_tags(target.id)\n paginator = TagPaginator(ctx, entries=tags)\n await paginator.paginate()", "def get_tags(self, tag_name: str):\n return self.soup.find_all(tag_name)", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def listTags(self, authenticationToken):\r\n self.send_listTags(authenticationToken)\r\n return self.recv_listTags()", "def api_get_tags(request):\n\n # TODO Get favorite tags for the given user ID\n\n tags = Tag.objects.get_not_empty_tags()\n tag_names = []\n for tag in tags:\n tag_names.append(tag.name)\n\n return HttpResponse(content=json.dumps(tag_names))", "def get_tags(self):\n\n return self.tags", "def get_tags(self):\n return self.get_url_data(self.api_url + 'refs/tags')", "def get_all_tags(self, dataset: \"Dataset\") -> List[\"DatasetTag\"]:\n raise NotImplementedError", "def list_tags() -> Optional[Dict[str, Target]]:\n if hasattr(_ffi_api, \"TargetTagListTags\"):\n return _ffi_api.TargetTagListTags()\n return None", "def get_tags(self, status='all'):\n\n if status == 'all':\n tags = dal.actions.all().distinct('tags')\n tags.extend(dal.plans.all().distinct('tags'))\n elif status == 'active':\n tags = dal.actions.active().distinct('tags')\n tags.extend(dal.plans.active().distinct('tags'))\n elif status == 'inactive':\n old = self.get_tags('inactive')\n new = self.get_tags('active')\n a = set(old)\n b = set(new)\n c = a - b\n tags = qlist()\n tags.extend(c)\n else:\n raise AttributeError(\"{} is not a supported status argument. Must be 'all', 'active', or 'inactive'\")\n\n return tags.sort().distinct()", "def listTagsByNotebook(self, authenticationToken, notebookGuid):\r\n pass", "def list_tags_for_resource(ResourceArn=None):\n pass", "def __list_all_tags(self):\n\n tags_dict = get_data.get_tagnames_dict()\n if len(tags_dict) > 0:\n first_str = 'tag'\n second_str = 'top posts scraped'\n third_str = 'recent posts scraped'\n descriptor = '{:<40} {:<20} {}'\n print('')\n print(descriptor.format(first_str, second_str, third_str))\n print(descriptor.format(len(first_str) * '-', len(second_str) * '-',\n len(third_str) * '-'))\n for number, tag in tags_dict.items():\n space_str = ' ' if len(str(number)) > 1 else ' '\n first = '[' + space_str + str(number) + '] ' + tag\n second = str(get_data.get_top_tag_post_count(tag))\n third = str(get_data.get_recent_tag_post_count(tag))\n print(descriptor.format(first, second, third))\n else:\n print('no tags found in the database')", "def tag_list(self, obj):\n logger.debug('Called Tag_list in admin: %s', self)\n return u\", \".join(o.name for o in obj.tags.all())", "def list_tags(ResourceArn=None, MaxResults=None, NextToken=None):\n pass", "def get_tags(self) -> Set[Text]:\r\n return {tag for tag in self.tags}", "def tags(self):\n return self._item.get(\"tags\")", "def get_tags(self):\n tags = self.AWS_TAGS\n\n label_selector = self.label_selector.split('=')\n label_tag = {'Key': label_selector[0], 'Value': label_selector[1]}\n tags.append(label_tag)\n\n annotation_tag = {'Key': self.expire_annotation, 'Value': str(int(self.now + self.DAY_AND_NIGHT))}\n tags.append(annotation_tag)\n\n return tags", "def get(self):\n res = SmartAPI.get_tags(self.args.field)\n self.finish(res)", "def get(self):\n res = SmartAPI.get_tags(self.args.field)\n self.finish(res)", "def _get_cache_tags(self):\n try:\n project = self._get_project()\n version = self._get_version()\n except Exception:\n log.warning(\n \"Error while retrieving project or version for this view.\",\n exc_info=True,\n )\n return []\n\n tags = []\n if project:\n tags.append(project.slug)\n if project and version:\n tags.append(get_cache_tag(project.slug, version.slug))\n if project and self.project_cache_tag:\n tags.append(get_cache_tag(project.slug, self.project_cache_tag))\n return tags", "def list_tags(ResourceArn=None, NextToken=None, MaxResults=None):\n pass", "def tag_list(request, queryset=None, **kwargs):\n if queryset is None:\n queryset = Tag.objects.all()\n \n if 'queryset' in kwargs:\n del kwargs['queryset']\n \n if 'template_name' not in kwargs:\n kwargs['template_name'] = 'flickrsets/tag/list.html'\n \n if 'template_object_name' not in kwargs:\n kwargs['template_object_name'] = 'tag'\n \n return list_detail.object_list(request, queryset, **kwargs)", "def collect_tags(self):\n tags = []\n for document in self.documents:\n for tag_token in document.tags:\n tags.append(tag_token)\n return tags", "def address_tags(self):\n return self._address_tags" ]
[ "0.63822997", "0.61948174", "0.6163116", "0.6132447", "0.6132447", "0.6095878", "0.60922146", "0.6062289", "0.60137403", "0.6006326", "0.59625673", "0.5926329", "0.5855336", "0.5804048", "0.57833034", "0.57692766", "0.57686114", "0.5734323", "0.57283866", "0.57195485", "0.5714692", "0.56930023", "0.5682361", "0.5677101", "0.5653604", "0.5633774", "0.56310505", "0.56263924", "0.561428", "0.561036", "0.5608402", "0.56072134", "0.55946726", "0.5586023", "0.55780876", "0.5573439", "0.55568916", "0.55542344", "0.5540406", "0.55271095", "0.5507188", "0.5501935", "0.54869974", "0.548611", "0.54776", "0.5473684", "0.5471587", "0.546508", "0.5425304", "0.542168", "0.54188854", "0.54152256", "0.54093885", "0.540795", "0.5403142", "0.5400717", "0.53939146", "0.53914905", "0.53812313", "0.53808445", "0.5372219", "0.5369483", "0.5364305", "0.53576964", "0.5357599", "0.5357595", "0.53543067", "0.53540313", "0.5353783", "0.53473145", "0.5347198", "0.5347198", "0.5347198", "0.5347198", "0.5347198", "0.5347198", "0.5347198", "0.5347198", "0.53460264", "0.534179", "0.53349185", "0.5310529", "0.5301902", "0.5296248", "0.5291002", "0.5285299", "0.527648", "0.52711856", "0.5254485", "0.52362555", "0.52317023", "0.52170527", "0.52104795", "0.5209235", "0.5209235", "0.5202454", "0.52017164", "0.5198579", "0.51982087", "0.51945144" ]
0.6232547
1
Lists the secret keys for the specified user. The returned object contains the secret key's OCID, but not the secret key itself. The actual secret key is returned only upon creation.
def list_customer_secret_keys(self, user_id, **kwargs): resource_path = "/users/{userId}/customerSecretKeys" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_customer_secret_keys got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[CustomerSecretKeySummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[CustomerSecretKeySummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_user_encryption_key_list(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_user_encryption_key_list_with_options(request, runtime)", "def get_ssh_keys(self, user_id):\n _gu = self.get_user(user_id)\n if _gu is None:\n return []\n\n # build URL and make request\n return self._get('/users/{0}/keys'.format(_gu['id']))", "def get_keys(user_id):\n\n db_conn = sqlite3.connect(db_path)\n db = db_conn.cursor()\n keys = []\n try:\n for row in db.execute(\"SELECT public_key FROM public_keys WHERE username=? AND status=?\", [user_id, PK_STATUS_OK]):\n keys.append({\"public\": row[0]})\n db_conn.close()\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n if(keys == []):\n abort(404)\n return jsonify({'user':{'username':user_id, 'keys':keys}})", "def secret_keys(self):\n return self._secret_keys", "def list_credentials(user):\n return Credentials.list_credentials(user)", "async def describe_user_encryption_key_list_async(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n runtime = util_models.RuntimeOptions()\n return await self.describe_user_encryption_key_list_with_options_async(request, runtime)", "def list_user_keys(self):\n return AlgoliaUtils_request(self.headers, self.read_hosts, \"GET\", \"/1/keys\", self.timeout)", "def GetSecretKey(cls, user_id):\n uid = hashlib.sha256(str(user_id)).hexdigest()\n entity = ndb.Key(cls, uid).get()\n if not entity:\n entity = cls(id=uid, secret_key=GenerateRandomHexKey())\n entity.put()\n return entity.secret_key", "def _all_secrets(cls, *, secretsmanager_client):\n return secretsmanager_client.list_secrets()['SecretList']", "def list_api_keys(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/apiKeys\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_api_keys got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[ApiKey]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[ApiKey]\")", "def list_secret(self, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.list_secret_with_http_info(**kwargs)\n else:\n (data) = self.list_secret_with_http_info(**kwargs)\n return data", "def list_user_keys(self):\n return AlgoliaUtils_request(self.client.headers, self.read_hosts, \"GET\", \"/1/indexes/%s/keys\" % self.url_index_name, self.client.timeout)", "async def list_secrets(self):\n pass", "def list(ctx: CLIContext, user_id, is_active, filter_, order, offset, limit) -> None:\n fields = [\n keypair_fields['user_id'],\n keypair_fields['full_name'],\n keypair_fields['access_key'],\n keypair_fields['secret_key'],\n keypair_fields['is_active'],\n keypair_fields['is_admin'],\n keypair_fields['created_at'],\n keypair_fields['last_used'],\n keypair_fields['resource_policy'],\n keypair_fields['rate_limit'],\n keypair_fields['concurrency_used'],\n ]\n try:\n with Session() as session:\n fetch_func = lambda pg_offset, pg_size: session.KeyPair.paginated_list(\n is_active,\n user_id=user_id,\n fields=fields,\n page_offset=pg_offset,\n page_size=pg_size,\n filter=filter_,\n order=order,\n )\n ctx.output.print_paginated_list(\n fetch_func,\n initial_page_offset=offset,\n page_size=limit,\n )\n except Exception as e:\n ctx.output.print_error(e)\n sys.exit(1)", "def describe_user_encryption_key_list_with_options(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.target_region_id):\n query['TargetRegionId'] = request.target_region_id\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeUserEncryptionKeyList',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeUserEncryptionKeyListResponse(),\n self.call_api(params, req, runtime)\n )", "def keys(self, bucket, user=None):\n raise NotImplementedError('TODO')", "def get_db_secrets():\n secret_response = secrets_client.get_secret_value(SecretId=db_secret_name)\n secrets = json.loads(secret_response['SecretString'])\n return secrets", "def get_list_keys(rpc_user, rpc_pwd):\n data = '{\"jsonrpc\":\"2.0\",\"id\":\"1\",\"method\":\"listkeys\"}'\n return call_rpc(rpc_user, rpc_pwd, data)", "def List(self, user=None):\n with self.acc_lock:\n self._load()\n\n result = []\n if user:\n for k, v in self.tasks.iteritems():\n if v['user'] != user:\n continue\n d = dict(v)\n d['key'] = k\n result.append(d)\n else:\n for k, v in self.tasks.iteritems():\n d = dict(v)\n d['key'] = k\n result.append(d)\n return result", "def view_list_robots_by_user(self, user, userID):\r\n return user._realm.getUser(userID).robots.keys()", "def get_wishlist_key(self, user):\n user_id = self.auth.get_user_id(user)\n p_key = ndb.Key(Profile, user_id)\n\n wishlists = Wishlist.query(ancestor=p_key).fetch()\n if wishlists:\n return wishlists[0].key\n\n wl_id = Wishlist.allocate_ids(size=1, parent=p_key)[0]\n wl_k = ndb.Key(Wishlist, wl_id, parent=p_key)\n Wishlist(**{'key': wl_k}).put()\n\n return wl_k", "def get_all(user_id):\n return Bucketlist.query.filter_by(created_by=user_id)", "def get_bookmarked_items(user_id):\n return list(Bookmark.objects.filter(user=user_id).values_list(\n 'item_id', flat=True))", "def get_secrets(request):\n secret_keys = (\n 'neptune_sql_credentials',\n 'triton_sql_credentials',\n 'saturn_sql_credentials',\n 'qualtrics_credentials',\n 'rserve_service_account_credentials',\n )\n secrets = {s: json.loads(SecretValue.get(s, 'null'))\n for s in secret_keys}\n\n # Add the mandrill api key, which isn't a JSON string.\n if request.get('send_email', None) != 'false':\n secrets['mandrill_api_key'] = SecretValue.get(\n 'mandrill_api_key', '')\n\n return secrets", "def view_list_containers_by_user(self, user, userID):\r\n return user._realm.getUser(userID).containers.keys()", "def list_tokens(user):\n return AppSpecificAuthToken.select().where(AppSpecificAuthToken.user == user)", "def get_s3_keys(bucket, user_keys = None):\n keys = []\n if user_keys is None:\n \t\t\t\ts3 = boto3.client('s3')\n else:\n s3 = boto3.client('s3', \n aws_access_key_id = user_keys[\"AWS_ACCESS_KEY_ID\"], \n aws_secret_access_key = user_keys[\"AWS_SECRET_ACCESS_KEY\"], \n region_name = user_keys[\"REGION_NAME\"]\n ) \t \n \n resp = s3.list_objects_v2(Bucket= bucket)\n for obj in resp['Contents']:\n keys.append(obj['Key'])\n return keys", "async def describe_user_encryption_key_list_with_options_async(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.target_region_id):\n query['TargetRegionId'] = request.target_region_id\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeUserEncryptionKeyList',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeUserEncryptionKeyListResponse(),\n await self.call_api_async(params, req, runtime)\n )", "def list(**kwargs):\n cluster_call(\"secret_list\", **kwargs)", "def list_namespaced_secret(self, namespace, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.list_namespaced_secret_with_http_info(namespace, **kwargs)\n else:\n (data) = self.list_namespaced_secret_with_http_info(namespace, **kwargs)\n return data", "def get_buckets_for_user(self):\n s3 = self.credentials.session.resource('s3')\n bucket_list = [bucket.name for bucket in s3.buckets.all()]\n\n return bucket_list;", "def get_user_access_tokens(request, user):\n manager = internal_keystoneclient(request).oauth2.access_tokens\n\n return manager.list_for_user(user=user)", "def list_keys(self, label=None):\r\n _filter = NestedDict({})\r\n if label:\r\n _filter['sshKeys']['label'] = query_filter(label)\r\n\r\n return self.client['Account'].getSshKeys(filter=_filter.to_dict())", "def secrets(self): # pylint: disable=no-self-use\n return []", "def list_secret_with_http_info(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_secret\" % key\n )\n params[key] = val\n del params['kwargs']\n\n resource_path = '/api/v1/secrets'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n return self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1SecretList',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'))", "def view_list_users(self, user):\r\n return user.realm._users.keys()", "def get_keys(weat_db):\n import updater\n keys = updater.list_keys(weat_db, verbose=False)\n return keys", "def disk_secrets(self) -> Sequence['outputs.DiskSecretResponse']:\n return pulumi.get(self, \"disk_secrets\")", "def secret_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"secret_key\")", "def get_secret(self):\n return Buffer.from_mpz(self._secret_key)", "def get(self, user, id):\n # Search for bucketlist\n bucketlist = Bucketlist.query.filter_by(\n id=id, created_by=user.email).first()\n\n # return 400 if bucketlist non exixtant or not belongs to this user\n if bucketlist is None:\n return 'Bucketlist not found', 202\n\n # serialize items if ann\n bucketlists_items = [\n {'id': item.id,\n 'name': item.description,\n 'date_created': str(item.date_created),\n 'date_modified': str(item.date_modified),\n 'done': str(item.is_done)\n } for item in bucketlist.items\n ]\n\n # serialize bucketlist\n response_bucketlist = [\n {'id': bucketlist.id,\n 'name': bucketlist.name,\n 'items': bucketlists_items,\n 'date_created': str(bucketlist.date_created),\n 'date_modified': str(bucketlist.date_modified),\n 'created_by': bucketlist.created_by\n }\n ]\n\n return response_bucketlist, 200", "def secrets(self):\n return self._secrets", "def get_user_auth_keys(self, username):\n if username in self.users_keys:\n return self.users_keys[username]\n\n self.users_keys[username] = []\n\n userdir = os.path.expanduser(\"~\" + username)\n if not userdir:\n return self.users_keys[username]\n\n keyfile = os.path.join(userdir, \".ssh/authorized_keys\")\n if not keyfile or not os.path.exists(keyfile):\n return self.users_keys[username]\n\n with open(keyfile) as f:\n for line in f.readlines():\n line = line.strip()\n if not line or line.startswith(\"#\"):\n continue\n values = [x.strip() for x in line.split()]\n\n exp = None\n try:\n int(values[0]) # bits value?\n except ValueError:\n # Type 1 or type 2, type 1 is bits in second value\n options_ktype = values[0]\n try:\n int(values[1]) # bits value?\n except ValueError:\n # type 2 with options\n ktype = options_ktype\n data = values[1]\n else:\n # Type 1 no options.\n exp = int(values[1])\n data = values[2]\n else:\n # Type 1 no options.\n exp = int(values[1])\n data = values[2]\n\n # XXX For now skip type 1 keys\n if exp is not None:\n continue\n\n if data:\n import base64\n if ktype == \"ssh-rsa\":\n key = ssh.RSAKey(data=base64.decodebytes(data.encode('ascii')))\n elif ktype == \"ssh-dss\":\n key = ssh.DSSKey(data=base64.decodebytes(data.encode('ascii')))\n else:\n key = None\n if key:\n self.users_keys[username].append(key)\n return self.users_keys[username]", "def get_secrets(session, secret_id):\n secretsmanager = session.client('secretsmanager')\n secrets = json.loads(secretsmanager.get_secret_value(SecretId=secret_id)['SecretString'])\n formatted_secrets = []\n for (key, value) in secrets.items():\n skipped_secrets = [ # cloudformation doesn't need these, so skip them\n 'TABLE_NAME',\n 'DATATRUST_KEY',\n 'JWT_SECRET_KEY',\n 'DB_URL',\n 'RPC_PATH',\n 'LOG_LEVEL',\n 'CELERY_BROKER_URL',\n 'CELERY_RESULT_BACKEND',\n 'S3_DESTINATION'\n ]\n if key not in skipped_secrets:\n formatted_secrets.append(\n {\"ParameterKey\": key, \"ParameterValue\": value}\n )\n formatted_secrets.append(\n {\"ParameterKey\": \"StackName\", \"ParameterValue\": STACK_NAME}\n )\n return formatted_secrets", "def get_developer_apps_by_user(user_id: int) -> List[Dict]:\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n developer_apps = (\n session.query(DeveloperApp)\n .filter(\n DeveloperApp.user_id == user_id,\n DeveloperApp.is_current == True,\n DeveloperApp.is_delete == False,\n )\n .all()\n )\n return query_result_to_list(developer_apps)", "def get_keys(self, yk_publicname):\n query = \"\"\"SELECT yk_publicname\n FROM yubikeys\n WHERE active = 1\"\"\"\n params = None\n if yk_publicname != 'all':\n query += ' AND yk_publicname = %s'\n params = (yk_publicname,)\n self._execute(query, params)\n return self._dictfetchall()", "def get_secret():\n\n secret_name = \"Jido-Active-Directory-Service-Account\"\n\n # Create a Secrets Manager client\n session = boto3.session.Session()\n client = session.client(\n service_name='secretsmanager',\n region_name= os.environ.get(\"AWS_DEFAULT_REGION\")\n )\n try:\n get_secret_value_response = client.get_secret_value(\n SecretId= secret_name\n )\n except ClientError as e:\n print(\"Error getting secret key!: \" + str(e))\n return None\n else:\n # Decrypts secret using the associated KMS CMK.\n if 'SecretString' in get_secret_value_response:\n return get_secret_value_response['SecretString']\n\n return None", "def list(cls):\n\n db = get_db_handle()\n\n secret_basic_configs = []\n for secret in db.secret_table.select():\n secret_basic_configs.append(secret.get_detail_dict())\n\n return secret_basic_configs", "def get_all_access_keys(self, user_name, marker=None, max_items=None):\r\n params = {'UserName' : user_name}\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('ListAccessKeys', params,\r\n list_marker='AccessKeyMetadata')", "def secrets(self):\n return self._secrets_store", "def ListAppKeys(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def buckets(self, user=None):\n raise NotImplementedError('TODO')", "def list_secrets(self, MaxResults: int = None, NextToken: str = None) -> Dict:\n pass", "def secret_key(self):\n return self._secret_key", "def getSecretKeys():\n secretKeys = dict()\n for (key, value) in os.environ.items():\n if( key.startswith(\"SM_\")):\n secretKeys[key] = value\n if(bool(secretKeys)):\n return secretKeys\n else:\n raise ValueError(\"Secrets Manager environment variable key not found, make sure there is atleast an env var with 'SM_' prefix for the init-container\")", "def get_banned_user_obj_list(user_obj):\n banned_user_obj_list = [user_banned_list_obj.banned_user for user_banned_list_obj in user_obj.banned_user_set.all()]\n return banned_user_obj_list", "def get_secrets(self, id: str) -> dict[str, Any]:\n\n return self.client.get(self._url(\"%s/secrets\" % id))", "def apikeys(request):\n display = get_boolean_value(request.GET.get('display', False))\n\n return render(request, 'gui/profile/profile_api_keys_list.html', {\n 'user': request.user,\n 'display_keys': display\n })", "def get(self, user):\n search = True if self.request.args.get('q') else False\n limit = int(self.request.args.get('limit')) if self.request.args.get('limit') else 20\n page = int(self.request.args.get('page')) if self.request.args.get('page') else 1\n bucketlists = user.bucketlists.paginate(page, limit, True).items\n bucketlists = user.bucketlists.filter(Bucketlist.name.contains(self.request.args.get('q'))) if self.request.args.get('q') else bucketlists\n\n bucketlists = [\n {'id': bucketlist.id,\n 'name': bucketlist.name,\n 'items': [\n {'id': item.id,\n 'name': item.description,\n 'date_created': str(item.date_created),\n 'date_modified': str(item.date_modified),\n 'done': str(item.is_done)\n } for item in bucketlist.items\n ],\n 'date_created': str(bucketlist.date_created),\n 'date_modified': str(bucketlist.date_modified),\n 'created_by': bucketlist.created_by\n } for bucketlist in bucketlists\n ]\n\n # if empty retutn no bucketlists added\n if not bucketlists:\n return \"You have no avialable bucketlists\", 200\n\n return bucketlists, 200", "def get_key(self, user):\r\n from delicious_cake.models import ApiKey\r\n\r\n try:\r\n key = ApiKey.objects.get(user=user)\r\n except ApiKey.DoesNotExist:\r\n return False\r\n\r\n return key.key", "def view_list_robots(self, user):\r\n return user.robots.keys()", "def list_credentials(self, **_params):\r\n return self.get(self.credentials_path, params=_params)", "def get_secret(value):\n\n if not isinstance(value, collections.abc.Mapping):\n return value\n\n keychain = get_keychain(value.pop(\"keychain\"))\n return keychain.get(**value)", "def _list_known_secret_tokens():\n global _secret_token_map\n\n keys = list(_secret_token_map.keys())\n keys.sort()\n\n ret = ''\n for key in keys:\n if ret != '':\n ret += ', '\n ret += \"'\" + key + \"'\"\n return ret", "def test_read_namespaced_secret_list_secrets(self):\n pass", "def keys(self):\n return sorted(super(UserSSHKeys, self).keys())", "def cli(ctx):\n return ctx.gi.cannedkeys.get_keys()", "def _list_tokens(self, user_id, tenant_id=None, trust_id=None,\n consumer_id=None):\n raise exception.NotImplemented() # pragma: no cover", "def get_keys(opts):\n hosts = KnownHostsStore()\n serverkey = hosts.serverkey(opts.vip_address)\n key_store = KeyStore()\n publickey = key_store.public\n secretkey = key_store.secret\n return {\"publickey\": publickey, \"secretkey\": secretkey,\n \"serverkey\": serverkey}", "def watch_secret_list(self, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.watch_secret_list_with_http_info(**kwargs)\n else:\n (data) = self.watch_secret_list_with_http_info(**kwargs)\n return data", "def keypairs(self):\n return list(self._list(_keypair.Keypair, paginated=False))", "def GetSecretKey():\n _LOG.info('Getting webapp2_secret_key.')\n return (Webapp2SecretKey.get_by_id('current_secret_key')\n .secret_key.encode('ascii', 'ignore'))", "def get_developer_apps_with_grant_for_user(user_id: int) -> List[Dict]:\n\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n rows = (\n session.query(\n DeveloperApp.address,\n DeveloperApp.name,\n DeveloperApp.description,\n Grant.user_id.label(\"grantor_user_id\"),\n Grant.created_at.label(\"grant_created_at\"),\n Grant.updated_at.label(\"grant_updated_at\"),\n ) # Note: will want to grab Grant permissions too once we have those\n .outerjoin(Grant, Grant.grantee_address == DeveloperApp.address)\n .filter(\n Grant.user_id == user_id,\n Grant.is_revoked == False,\n Grant.is_current == True,\n DeveloperApp.is_current == True,\n DeveloperApp.is_delete == False,\n )\n .order_by(asc(Grant.updated_at))\n .all()\n )\n return [\n {\n \"address\": row[0],\n \"name\": row[1],\n \"description\": row[2],\n \"grantor_user_id\": row[3],\n \"grant_created_at\": row[4],\n \"grant_updated_at\": row[5],\n }\n for row in rows\n ]", "def secrets(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SecretArgs']]]]:\n return pulumi.get(self, \"secrets\")", "def get_user_books(user_id):\n return session.query(Book).filter(Book.user_id == user_id).all()", "def getSecret(self, clientIP):\n\n now = time.time() \n\n #i wanted to limit to 3 (limit=3) but, boto kept barfing\n results = self._secret_table.query(ip_address__eq=clientIP,not_before__lt=now,consistent=False)\n client_secret = None\n client_secret_not_before = 0\n\n for result in results:\n if result['not_after'] >= now:\n if client_secret_not_before < result['not_before']:\n client_secret_not_before = result['not_before']\n client_secret = self.decryptSecret(result['secret'])\n\n\n logging.debug('retrieved secret for %s' % (clientIP))\n return client_secret", "def secret_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret_key\")", "def secret_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret_key\")", "def wishlist_sessions(self, user):\n wishlist_key = self.get_wishlist_key(user)\n session_keys = [ndb.Key(urlsafe=wsck) for wsck in\n wishlist_key.get().sessionKeys]\n sessions = ndb.get_multi(session_keys)\n return sessions", "def get_dropbox_app_keys(self):\n return settings.DROPBOX_APP_KEY, settings.DROPBOX_APP_SECRET,settings.DROPBOX_ACCESS_TYPE", "def getSecret(self, clientIP):\n\n now = time.time() \n\n #i wanted to limit to 3 (limit=3) but, boto kept barfing\n results = self._secretdb.execute('select secret,not_before,not_after from %s where ip_address=:ip_address and not_before < :not_before and not_after >= :not_after order by not_before,not_after' % self._table_name,\n {'ip_address': clientIP,\n 'not_before': now,\n 'not_after': now}).fetchall()\n client_secret = None\n client_secret_not_before = 0\n \n for result in results:\n client_secret = str(result[0])\n\n logging.debug('retrieved secret for %s' % (clientIP))\n return client_secret\n #only should be reached if it is None\n return client_secret", "def get_api_key_from_user_id(self, user_id: str) -> str:\n response = self.get(self.url + \"/my-account\", params={\"id\": user_id})\n return self.get_api_key_from_response(response)", "def GetSSHKeys():\n keydict = {}\n for rec in database.db.itervalues():\n if 'keys' in rec:\n keydict[rec['name']] = rec['keys']\n return keydict", "def list(self, all_tenants=True):\n query = {}\n path = '/os-keypairs'\n if all_tenants is True:\n query['all_tenants'] = 1\n \n path = '%s?%s' % (path, urlencode(query)) \n \n res = self.client.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack key pairs: %s' % truncate(res))\n return res[0]['keypairs']", "def get_all_books_for_user(user, KEY):\n\n gr_id = user.gr_id\n shelves = check_for_shelves(gr_id, KEY)\n\n for shelf in shelves: # iterate over list of shelves and create books!\n time.sleep(1.00)\n get_books_from_shelf(gr_id, shelf.name, KEY)\n print \"Got all books from \" + shelf.name + \" shelf.\"\n\n return", "def secrets_bucket(self):\n return self.s3.get_bucket(self.secrets_bucket_name)", "def get_tokens_for_user(user):\n\n refresh = RefreshToken.for_user(user)\n\n return {\n 'refresh': str(refresh),\n 'access': str(refresh.access_token),\n }", "def generate_keys(self):\n self.keys = []\n key = string_to_bit_array(self.passwd)\n key = self.permutation(key, CP_1) # Perform initial permutation on the key\n g, d = split_into_n(key, 28) # Split into g (LEFT) & d (RIGHT)\n for i in range(16): # Apply the 16 rounds\n g, d = self.shift(g, d, ROUND_KEY_SHIFT[i]) # Shift the key according to the round\n tmp = g + d # Merge them\n self.keys.append(self.permutation(tmp, CP_2)) # Perform the permutation to get the Ki", "def secret_key(self):\n return None", "def get(self):\n ok = check_token(app.config.get('SECRET_KEY'))\n if False in ok:\n return BAD(err5, ok[1], ok[2]) # ok[2] is response code\n\n client = ManagePsb(credentials, databaseName)\n cursor = client.Filter(collection)\n info = list(cursor)\n newInfo = ManageKeys(info)\n for data in info:\n data['_id'] = str(data['_id'])\n\n return newInfo.LikeJson()", "def watch_namespaced_secret_list(self, namespace, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.watch_namespaced_secret_list_with_http_info(namespace, **kwargs)\n else:\n (data) = self.watch_namespaced_secret_list_with_http_info(namespace, **kwargs)\n return data", "def update_keys(user_id):\n\n if not request.json:\n abort(400)\n\n new_pub_keys = request.json[\"public_keys\"]\n\n db_conn = sqlite3.connect(db_path)\n db = db_conn.cursor()\n db_pub_keys = []\n try:\n for row in db.execute(\"SELECT public_key FROM public_keys WHERE username=? AND status=?;\", [user_id, PK_STATUS_OK]):\n db_pub_keys.append(row[0])\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n\n to_add = []\n to_revoke = []\n\n # Put the keys not present in the database in the list of keys to add\n for new_key in new_pub_keys:\n if(new_key not in db_pub_keys):\n to_add.append((user_id, new_key, PK_STATUS_OK))\n # Put the keys not in the new list in the list of keys to revoke\n for db_key in db_pub_keys:\n if(db_key not in new_pub_keys):\n to_revoke.append((PK_STATUS_REVOKED, user_id, db_key))\n\n try:\n db.executemany('INSERT INTO public_keys (username, public_key, status) VALUES (?,?,?);', to_add)\n db.executemany('UPDATE public_keys SET status=? WHERE username=? AND public_key=?;', to_revoke)\n db_conn.commit()\n db_conn.close()\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n\n return jsonify({'status':True})", "def keys(self):\n return DeviceKeyCollection(client=self)", "def get_keys(self):\r\n\t\tlogger.debug(\"Getting the keys\")\r\n\t\t\r\n\t\treturn db.get_items('keys')", "def get_secrets():\n client = datastore.Client()\n query = client.query(kind='env_vars')\n entity = query.fetch()\n secrets = list(entity)[0]\n return secrets", "def _get_private_key(self, user_obj):\n return user_obj.private_key.encode('utf-8')", "def getsecret(path=None):\n\n if path:\n print(vault.read(path))\n\n else:\n for section in vault.list():\n for key in vault.list(section):\n print(f'{section}{key}')", "def get_key_list(self, email=\"\"):\n\t\tif email:\n\t\t\twhere_clause = \" where email = '%s'\" % email\n\t\telse:\n\t\t\twhere_clause = \"\"\n\n\t\treturn self.app.db.query(\n\t\t\t\"\"\"\n\t\t\tselect\n\t\t\t\tapi_key,\n\t\t\t\towner,\n\t\t\t\tapp_name,\n\t\t\t\temail,\n\t\t\t\turl,\n\t\t\t\tcreated\n\t\t\tfrom\n\t\t\t\tapi_keys\n\t\t\t%s\n\t\t\t\"\"\" % where_clause)", "def return_keys(self):\r\n\r\n keys = list(self.piDD.keys())\r\n return keys", "def keys(self):\r\n if self.db == None:\r\n raise AssertionError(\"DB not open\")\r\n\r\n self.lock.acquire()\r\n try:\r\n usernames = self.db.keys()\r\n finally:\r\n self.lock.release()\r\n usernames = [u for u in usernames if not u.startswith(\"--Reserved--\")]\r\n return usernames" ]
[ "0.66501856", "0.6520959", "0.6316638", "0.6299123", "0.62734264", "0.627234", "0.62511283", "0.606225", "0.60029274", "0.59841985", "0.5940731", "0.5805145", "0.5801294", "0.5739835", "0.5712983", "0.5704147", "0.566274", "0.5550773", "0.5525107", "0.5505345", "0.5473902", "0.5453241", "0.53608596", "0.5350772", "0.53441215", "0.53431183", "0.53249586", "0.5310021", "0.52864754", "0.52864033", "0.5261782", "0.5257434", "0.5235528", "0.5222612", "0.5218325", "0.52089036", "0.52068937", "0.51897293", "0.5175616", "0.5148854", "0.5146108", "0.51379657", "0.5132145", "0.5131782", "0.51014906", "0.50607634", "0.5053285", "0.5037211", "0.5020053", "0.4995215", "0.4994563", "0.4986521", "0.49855116", "0.49771923", "0.49761835", "0.49729347", "0.49610394", "0.4957598", "0.49567503", "0.49560708", "0.49473011", "0.49468565", "0.49465796", "0.4930957", "0.49128225", "0.48852932", "0.48851278", "0.48841217", "0.48707926", "0.485552", "0.4849758", "0.4844367", "0.48376313", "0.48365417", "0.48351377", "0.48346046", "0.48344955", "0.48344955", "0.48276153", "0.48244607", "0.48157796", "0.481201", "0.47955307", "0.47880605", "0.477587", "0.4770361", "0.47674045", "0.4761575", "0.47598758", "0.47561282", "0.47550735", "0.47499245", "0.4749858", "0.4749493", "0.47469503", "0.47457185", "0.47358397", "0.47305936", "0.47249043", "0.4724449" ]
0.7246356
0
Lists the dynamic groups in your tenancy. You must specify your tenancy's OCID as the value for the compartment ID (remember that the tenancy is simply the root compartment). See `Where to Get the Tenancy's OCID and User's OCID`__.
def list_dynamic_groups(self, compartment_id, **kwargs): resource_path = "/dynamicGroups" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_dynamic_groups got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[DynamicGroup]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[DynamicGroup]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list(request):\n return render_to_response('rteacher/manage_groups_list.html', request, **klist(\n request=request\n ))", "def groups(self):\n #return self.get('{}/groups'.format(ApiVersion.A1.value))\n return self.get('{}/groups'.format(ApiVersion.CM1.value))", "def groups():\n access_token = session['access_token']\n return \"%s\" % list_groups(access_token)", "def get_all_groups():\n return jsonify(admin.get_all_groups(current_app.scoped_session()))", "def list_groups(access_token):\n request_url = OKTA_URL + \"api/v1/groups\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def list_groups():\n return _list_tindyb_unique_values(\"group\", dbpath=__dbpath__)", "def ObjectGroups(object_id):\n rhino_object = rhutil.coercerhinoobject(object_id, True, True)\n if rhino_object.GroupCount<1: return []\n group_indices = rhino_object.GetGroupList()\n rc = [scriptcontext.doc.Groups.GroupName(index) for index in group_indices]\n return rc", "def product_group_list(obj):\n client = get_client(obj)\n\n res = client.product_group_list()\n\n print(json.dumps(res, indent=4))", "def groups(self):\n return self.get_data(\"groups\")", "def get_groups():\r\n if 'username' not in flask.session:\r\n return flask.jsonify(**{'message': 'Forbidden', 'status_code': 403})\r\n\r\n context = {}\r\n context['url'] = flask.request.path\r\n context['groups'] = []\r\n\r\n # Retreive query variables\r\n query_num_groups = flask.request.args.get('size') \r\n query_page = flask.request.args.get('page') \r\n num_groups = int(query_num_groups) if query_num_groups != None else 10\r\n page_number = int(query_page) if query_page != None else 0\r\n\r\n groups = get_group_listing(flask.session['username'], \r\n num_groups, page_number)\r\n for g in groups:\r\n context['groups'].append({\r\n 'id': g[0],\r\n 'name': g[1]\r\n })\r\n\r\n if (num_groups == 10):\r\n context['next'] = '{}?page={}'.format(context['url'], page_number + 1)\r\n else:\r\n context['next'] = '{}?page={}&size={}'.format(context['url'], \r\n page_number + 1, num_groups)\r\n\r\n return flask.jsonify(**context)", "def list_groups(request):\n groups = models.UserGroup.all().order('name')\n return utility.respond(request, 'admin/list_groups', {'groups': groups})", "def list_groups(self):\n return self.get_admin(\"groups\")", "def get_groups(self, obj):\n groupsForCompany = get_groups_with_perms(obj)\n return [x.id for x in groupsForCompany]", "def get_group_list(org_id):\n tList = get_template('app/usermanagementorg/group_list.html')\n groups = get_groups(org_id)\n return tList.render(Context({ 'groups': groups, }))", "def groups(self):\n return []", "def list_template_groups(context):\n template_groups = get_oneoffixx_template_groups()\n terms = []\n for group in template_groups:\n terms.append(SimpleVocabulary.createTerm(group.get(\"id\"),\n group.get(\"id\"),\n group.get(\"localizedName\")))\n return MutableObjectVocabulary(terms)", "def customer_group_list(h):\n global html\n html = h\n\n common_elements = customer_common_elements(group=True)\n\n css_list = common_elements[\"css_list\"]\n\n javascript_list = common_elements[\"javascript_list\"]\n\n all_btn = common_elements[\"all_btn\"]\n\n html.new_header(\"Customer Organization\", \"customer_group_management.py\", all_btn, css_list, javascript_list)\n customer_string = \"\"\"\n <div>\n <table id=\"customers\" cellpadding=\"0\" cellspacing=\"0\" border=\"0\" class=\"display\" style=\"text-align:center\">\n <thead>\n <tr>\n <th>\n Company System Role\n </th>\n <th>\n Company Name\n </th>\n <th>\n Company Address\n </th>\n <th>\n Company Telephone\n </th>\n <th>\n Company Website\n </th>\n <th>\n Company Business\n </th>\n <th>\n Company Registration Number\n </th>\n <th>\n Company VAT Number\n </th>\n <th>\n Company Sales Contact\n </th>\n <th>\n Company Purchase Contact\n </th>\n <th>\n Actions\n </th>\n </tr>\n </thead>\n </table>\n </div>\n \"\"\"\n customer_string += \"\"\"\n <script>\n get_customer_groups();\n </script>\n \"\"\"\n html.write(customer_string)\n html.new_footer()\n pass", "def get_all(isamAppliance, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieving groups\", \"/sysaccount/groups/v1\")", "def getCSPGroups(**kwargs):\n sessiontoken = kwargs['sessiontoken']\n ORG_ID = kwargs['ORG_ID']\n strCSPProdURL = kwargs['strCSPProdURL']\n\n if kwargs['search_term'] is None:\n json_response = get_csp_groups_json(strCSPProdURL, ORG_ID, sessiontoken)\n print(\"Got the groups\")\n if json_response is not None:\n groups = json_response['results']\n num_groups = len(groups)\n if num_groups == 0:\n print(\"No results returned.\")\n else:\n print(str(num_groups) + \" result\" + (\"s\" if num_groups > 1 else \"\") + \" returned:\")\n table = PrettyTable(['ID', 'Name', 'Group Type', 'User Count'])\n for grp in groups:\n table.add_row([grp['id'], grp['displayName'], grp['groupType'], grp['usersCount']])\n print(table)\n else:\n search_term = kwargs['search_term']\n json_response = get_csp_groups_searchterm_json(strCSPProdURL, ORG_ID, sessiontoken, search_term)\n if json_response is not None:\n groups = json_response['results']\n num_groups = len(groups)\n if num_groups == 0:\n print(\"No results returned.\")\n else:\n print(str(num_groups) + \" result\" + (\"s\" if num_groups > 1 else \"\") + \" returned:\")\n table = PrettyTable(['ID', 'Name', 'Group Type', 'User Count'])\n for grp in groups:\n table.add_row([grp['id'], grp['displayName'], grp['groupType'], grp['usersCount']])\n print(table)\n else:\n print(\"API Error\")\n sys.exit(1)", "def generate_groups(ctx):\n asyncio.run(generate_groups_impl(ctx.obj[\"config\"]))", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def get_list_groups(self):\n list_response = requests.get(self.groups_url, headers=self.headers)\n return list_response.json()[\"groups\"]", "def groups(self):\r\n return resources.Groups(self)", "def list_group():\n data, code, message = FIELD_SERVICE.list_group()\n return __result(data, code, message)", "def list_groups(self):\n return self._get(\"cloudConnectorGroups\").list", "def groups_json(request):\n resp = []\n group_list = ResearchGroup.objects.order_by('name')\n for group in group_list:\n resp.append({'name': group.name, 'id': group.id})\n return HttpResponse(json.dumps(resp, ensure_ascii=False), content_type=\"application/json; charset=utf-8\")", "def set_up_groups(self):\n groups = []\n groups.append({'groupname': 'th',\n 'grouptitle': 'TH',\n 'path': '/'})\n groups.append({'groupname': 'neutronics',\n 'grouptitle': 'Neutronics',\n 'path': '/'})\n groups.append({'groupname': 'metadata',\n 'grouptitle': 'Simulation Metadata',\n 'path': '/'})\n return groups", "def get_groups(self):\n return Client._get(self)", "def get_groups(self):\n response = self._get(\"groups\")\n\n return response.json()", "def list_groups(args):\n\n for group in get_groups(args):\n print(group)", "def getGroups():\r\n return Group.getGroups()", "def groups(request, group_id = 1):\n group = get_object_or_404(ResearchGroup, pk=group_id)\n groups = ResearchGroup.objects.order_by('name')\n group_list = []\n for g in groups:\n if g.id is not group.id:\n group_list.append({'name': g.name, 'id': g.id})\n # default showing group\n # chosen group info\n group_info = {}\n group_info['name'] = group.name\n personnel = list()\n for p in group.personnel.all():\n personnel.append(p.username)\n group_info['personnel'] = \" \".join(str(x) for x in personnel)\n group_info['projects'] = group.projects\n group_info['directions'] = group.directions\n group_info['papers'] = group.papers.split()\n context = {'group_list': group_list, 'group_info': group_info}\n return render(request, 'sacms/groups.html', context)", "def get_groups():\n\n # FUTURE: Properly reutrn error, Mongo is giving it's own\n if current_user.groups:\n return Response(response=json.dumps([g.to_dict() for g in current_user.groups]), status=200, mimetype=\"application/json\")\n else:\n return return_json_error('No groups assigned to', 500)", "def list_groups_factory(context, request):\n return ListGroupsService(session=request.db,\n request_authority=request.authority,\n route_url=request.route_url)", "def get_all_access_groups():\n\treturn {\"access_groups\": [ag.serialize for ag in AccessGroup.query.all()]}, 200", "def capacitygroup_list(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_capacitygroup_list(cmd_ctx, cpc, options))", "def list_secgroups(self, name=None):", "def all_groups(request):\r\n group = Group()\r\n return HttpResponse(json.dumps(group.parseFile()))", "def get_RegisteredGroupsList(test_case, only_discoverable=False, override_headers=null, override_cookies=null):\n # type: (AnyMagpieTestCaseType, bool, Optional[HeadersType], Optional[CookiesType]) -> List[Str]\n app_or_url = get_app_or_url(test_case)\n path = \"/register/groups\" if only_discoverable else \"/groups\"\n resp = test_request(app_or_url, \"GET\", path,\n headers=override_headers if override_headers is not null else test_case.json_headers,\n cookies=override_cookies if override_cookies is not null else test_case.cookies)\n json_body = check_response_basic_info(resp, 200, expected_method=\"GET\")\n return json_body[\"group_names\"]", "def get_group_list(ip_address, headers):\n group_list = None\n group_url = 'https://%s/api/GroupService/Groups' % ip_address\n response = requests.get(group_url, headers=headers, verify=False)\n if response.status_code == 200:\n group_response = response.json()\n if group_response['@odata.count'] > 0:\n group_list = [x['Id'] for x in group_response['value']]\n else:\n print(\"No groups found at \", ip_address)\n else:\n print(\"No groups found at \", ip_address)\n return group_list", "def return_groups(app_filter):\n groups = \"\"\n if app_filter == 'APP_GROUP' or app_filter == 'OKTA_GROUP':\n groups = get_okta_groups(url, filter='type eq \"' + app_filter + '\"')\n #print(groups)\n else:\n print(\"Not a valid group filter, must be APP_GROUP or OKTA_GROUP.\")\n return groups", "def get_queryset(self):\n user = self.request.user\n return user.group_set.all()", "def test_get_resource_group_list(self):\n pass", "def test_get_groups(self):\n response = self.client.get_groups()\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v1/groups\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def hostgroup_list(self):\n return self.ezx.get_hostgroup_list()", "def get_groups():\n\n groups = [\"shelter\", \"sharing\", \"unsheltered\", \"motel\"]\n\n for item in groups:\n group = Group(group_name=item)\n\n db.session.add(group)\n\n db.session.commit()", "def list_groups(self, **params):\n url = 'groups'\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def get_all():\n\n return AGE_GROUPS", "def groups_by_id(request, gid):\r\n group = Group()\r\n filtered_groups = group.query({\"gid\":str(gid)})\r\n if len(filtered_groups) == 0:\r\n badRequest(\"No available group under GID \"+str(gid))\r\n return HttpResponse(json.dumps(filtered_groups))", "def get(isamAppliance, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieve a list of all ISAM groups\",\n \"{0}\".format(uri),\n requires_modules=requires_modules, requires_version=requires_version)", "def get_case_list_by_group(config):\n # Identity = namedtuple('Identity', ['service', 'id'])\n groups = config.get('groups')\n full_case_lists = {}\n for group_name, group in groups.items():\n cases = group['cases']\n if group.get('dependencies'):\n for dep in group.get('dependencies'):\n dependencies_tests = groups.get(dep).get('cases')\n cases += dependencies_tests\n full_case_lists[group_name] = cases\n return full_case_lists", "def groups_with_entity(hass: HomeAssistant, entity_id: str) -> list[str]:\n if DOMAIN not in hass.data:\n return []\n\n groups = []\n\n for group in hass.data[DOMAIN].entities:\n if entity_id in group.tracking:\n groups.append(group.entity_id)\n\n return groups", "def test_get_groups(self):\n pass", "def test_get_groups(self):\n pass", "def list_groups(self, compartment_id, **kwargs):\n resource_path = \"/groups\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_groups got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Group]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Group]\")", "def groups(request):\n groups = GroupProfile.objects.all()\n user = request.user\n user_permissions = 'allowed' if user.is_authenticated() else ''\n show_errors_in_form = False\n group_form = GroupProfileForm()\n return render_to_response('home.html', locals())", "def customer_group_customer_widget():\n widget = \"<select id='groups' name='groups'>\"\n\n all_groups = customer_group_get_all()\n for group in all_groups:\n group = list(group)\n widget += \"<option value='%s'> %s </option>\" %(group[0], group[1])\n widget += \"</select>\"\n return widget", "def generate_options_for_resource_group(control_value=None, **kwargs):\n if control_value is None:\n return []\n\n env = Environment.objects.get(id=control_value)\n\n if CB_VERSION_93_PLUS:\n # Get the Resource Groups as defined on the Environment. The Resource Group is a\n # CustomField that is only updated on the Env when the user syncs this field on the\n # Environment specific parameters.\n resource_groups = env.custom_field_options.filter(\n field__name=\"resource_group_arm\"\n )\n return [rg.str_value for rg in resource_groups]\n else:\n rh = env.resource_handler.cast()\n groups = rh.armresourcegroup_set.all()\n return [g.name for g in groups]", "def get_cli_groups():\n\n return get_component(CLIPackage.COMPONENT_NAME).get_cli_groups()", "def pull_all_rhds_group(self):\n return self.ldap_connection.search_s(\"ou=managedGroups,dc=redhat,dc=com\",\n ldap.SCOPE_SUBTREE)", "def get_groups(args):\n\n args.suppress_verify_output = True\n if verify(args) != 0:\n # restore stdout\n sys.stdout = sys.__stdout__\n print(\"Config file not valid, please use the verify function to debug\")\n return []\n\n with open(args.file, \"r\") as f:\n config_json = json.load(f)\n\n groups = []\n for group in config_json[\"groups\"]:\n groups.append(group[\"name\"])\n return groups", "def get_all_as_groups(as_connection):\n as_groups_list = []\n get_as_groups = as_connection.get_all_groups()\n as_groups_list.extend(get_as_groups)\n\n token = get_as_groups.next_token\n while token is not None:\n get_as_groups = as_connection.get_all_groups(\n next_token=token)\n as_groups_list.extend(get_as_groups)\n token = get_as_groups.next_token\n print \"Processed {0} AutoScaling Group\"\\\n .format(len(as_groups_list))\n return as_groups_list", "def get_group(tkn: Token = Depends(from_authotization_header_nondyn),):\n assert_has_clearance(tkn.owner, \"sni.read_group\")\n return [\n GetGroupShortOut(group_id=str(grp.pk), group_name=grp.group_name)\n for grp in Group.objects().order_by(\"group_name\")\n ]", "def auto_cohort_groups(self):\r\n if self.cohort_config is None:\r\n return []\r\n else:\r\n return self.cohort_config.get(\"auto_cohort_groups\", [])", "def generate_groups(client_list, criteria_index):\n groups_dict = {}\n for index, client in enumerate(client_list):\n value = client.data[criteria_index]\n if not value in groups_dict:\n groups_dict[value] = []\n groups_dict[value].append((index, client))\n\n groups_list = [[client for (index, client) in list] for value, list in groups_dict.items()]\n groups_index_list = [[index for (index, client) in list] for value, list in groups_dict.items()]\n return groups_list, groups_index_list", "def test_function(self):\n self.ms_client.http_request(method='GET', url_suffix='groups', params={'$orderby': 'displayName'})\n demisto.results('ok')", "def request_access_to_groups(self, ceph):\n for ceph_group in (\"volumes\", \"images\", \"vms\"):\n ceph.request_access_to_group(\n name=ceph_group,\n object_prefix_permissions={\"class-read\": [\"rbd_children\"]},\n permission=\"rwx\",\n )", "def nfvi_get_instance_groups(callback):\n cmd_id = _compute_plugin.invoke_plugin('get_instance_groups',\n callback=callback)\n return cmd_id", "def list_namespaced_group(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_group\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/groups'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1GroupList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def get_all_groups(self):\n self.cursor.execute(\"select * from groups\")\n self.connection.commit()\n return self.cursor.fetchall()", "def list_projects(ctx):\n pprint(ctx.obj.groups.get().data)", "def get_group_definitions_list(mmtf_dict):\n\n\n group_definitions = []\n for group in mmtf_dict[\"groupList\"]:\n atoms = [{\n \"name\": name, \"element\": element.upper(), \"charge\": charge\n } for name, element, charge in zip(\n group[\"atomNameList\"], group[\"elementList\"], group[\"formalChargeList\"],\n )]\n group_definitions.append({\n \"name\": group[\"groupName\"], \"atoms\": atoms\n })\n return group_definitions", "def customer_group_get_all():\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n query = \"\"\"\n SELECT \n `group_id`, \n `group_name`, \n `description`, \n `timestamp`, \n `created_by`, \n `creation_time`, \n `is_deleted`, \n `updated_by`, \n `role_id`, \n `is_default`, \n `is_customer` \n FROM `groups` \n WHERE `is_customer` = 1\n \"\"\"\n user_group_details = None\n cursor = db.cursor()\n if cursor.execute(query) != 0:\n user_group_details = cursor.fetchall()\n cursor.close()\n db.close()\n return user_group_details", "def get(self):\n status = ErrorCode.SUCCESS\n try:\n res = []\n cid = self.get_argument('cid', None)\n if not (cid is None):\n res = QueryHelper.get_groups_by_cid(cid, self.db)\n self.write_ret(status,\n dict_=DotDict(res=res))\n except Exception as e:\n logging.exception(\"[UWEB] Get groups failed. Exception: %s\",\n e.args)\n status = ErrorCode.SERVER_BUSY\n self.write_ret(status)", "def index():\n groups = list(map(lambda x: x.json(), GroupModel.query.all())) \n return render_template('dashboard/schedules.html', groups=groups)", "def compute_server_groups(self):\n path = '/os-server-groups'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack server groups: %s' % truncate(res))\n return res[0]['server_groups']", "def get_groups(self, content=None):\n if content:\n ctype = ContentType.objects.get_for_model(content)\n prrs = PrincipalRoleRelation.objects.filter(role=self,\n content_id__in=(None, content.id),\n content_type__in=(None, ctype)).exclude(group=None)\n else:\n prrs = PrincipalRoleRelation.objects.filter(role=self,\n content_id=None, content_type=None).exclude(group=None)\n\n return [prr.group for prr in prrs]", "def get_groups(self):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_get_groups_query+\" ORDER BY $groupname_field$\",{'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: get_groups: %s\" % (query,))\n\n cursor.execute(query)\n desc=[i[0] for i in cursor.description]\n for row in cursor:\n dictrow=dict(zip(desc,row))\n yield dictrow[self.sql_groupname_field]", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def list_group(self, groupname):\n return self.get_admin(\"groups/{}\".format(groupname))", "def get_psample_list_groups(dut):\n return st.show(dut, \"sudo psample --list-groups\", skip_tmpl=True)", "def test_api_v1_groups_get(self):\n pass", "def export_groups(self):\n print('=== Exporting all group data...')\n\n for group in self.client.tenant.groups:\n print('- Exporting group:', group.name)\n\n json = {\n 'id': self.get_id(group),\n 'href': group.href,\n 'name': group.name,\n 'description': group.description,\n 'status': group.status,\n 'createdAt': group.created_at.isoformat(),\n 'modifiedAt': group.modified_at.isoformat(),\n 'customData': self.get_custom_data(group),\n 'directory': {\n 'id': self.get_id(group.directory),\n 'href': group.directory.href,\n 'name': group.directory.name,\n 'description': group.directory.description,\n 'status': group.directory.status,\n 'createdAt': group.directory.created_at.isoformat(),\n 'modifiedAt': group.directory.modified_at.isoformat(),\n },\n 'accounts': [],\n }\n\n for account in group.accounts:\n json['accounts'].append({\n 'id': self.get_id(account),\n 'href': account.href,\n 'username': account.username,\n 'email': account.email,\n 'fullName': account.full_name,\n 'givenName': account.given_name,\n 'middleName': account.middle_name,\n 'surname': account.surname,\n 'status': account.status,\n 'createdAt': account.created_at.isoformat(),\n 'modifiedAt': account.modified_at.isoformat(),\n })\n\n tenant = self.get_id(self.client.tenant)\n self.write('%s/%s/groups/%s' % (self.location, tenant, json['id']), json)\n\n print('=== Done!\\n')", "def getGroups(self):\n return [g[0] for g in grp.getgrall()]", "def test_page_groups(backend: Backend) -> None:\n page_groups: Dict[str, List[str]] = cast(Any, backend.metadata[\"pageGroups\"])\n assert page_groups == {\"Group 1\": [\"index\", \"index\"]}", "def seat_group_index(party_id):\n party = _get_party_or_404(party_id)\n\n groups = seat_group_service.get_all_seat_groups_for_party(party.id)\n\n return {\n 'party': party,\n 'groups': groups,\n }", "def getGroupData(service, groupName, attList):\n # import IPython ; IPython.embed() ; exit(); \n groupsDataList = service.contactGroups().list().execute()[\"contactGroups\"]\n for group in groupsDataList:\n if group[\"name\"] == groupName:\n groupData = []\n for att in attList:\n groupData.append(group[att])\n return groupData", "def get_groups(self, env, token, memcache_client=None):\n groups = None\n key = '%s/token/%s' % (self.reseller_prefix, token)\n cached_auth_data = memcache_client and memcache_client.get(key)\n if cached_auth_data:\n start, expiration, groups = cached_auth_data\n if time() - start > expiration:\n groups = None\n\n headers = {}\n if env.get('HTTP_AUTHORIZATION'):\n groups = None\n headers[\"Authorization\"] = env.get('HTTP_AUTHORIZATION')\n\n if not groups:\n with Timeout(self.timeout):\n conn = http_connect(self.auth_host, self.auth_port, 'GET',\n '%stoken/%s' % (self.auth_prefix, token),\n headers, ssl=self.ssl)\n\n resp = conn.getresponse()\n resp.read()\n conn.close()\n if resp.status // 100 != 2:\n return None\n expiration = float(resp.getheader('x-auth-ttl'))\n groups = resp.getheader('x-auth-groups')\n if memcache_client:\n memcache_client.set(key, (time(), expiration, groups),\n timeout=expiration)\n\n if env.get('HTTP_AUTHORIZATION'):\n account, user, sign = \\\n env['HTTP_AUTHORIZATION'].split(' ')[-1].split(':')\n cfaccount = resp.getheader('x-auth-account-suffix')\n path = env['PATH_INFO']\n env['PATH_INFO'] = \\\n path.replace(\"%s:%s\" % (account, user), cfaccount, 1)\n\n return groups", "def get_definition(self):\n return self.client._perform_json(\n \"GET\", \"/admin/groups/%s\" % self.name)", "def get_inv_groups(**kwargs):\n proxy = kwargs['proxy']\n sessiontoken = kwargs['sessiontoken']\n gw = kwargs['gateway']\n if kwargs['objectname'] is None:\n if gw == \"both\":\n gw = [\"mgw\", \"cgw\"]\n for item in gw:\n json_response = get_sddc_inventory_groups_json(proxy, sessiontoken, item)\n gw_group = json_response['results']\n gw_table = PrettyTable(['ID', 'Name'])\n for i in gw_group:\n gw_table.add_row([i['id'], i['display_name']])\n print(f'Here are the {str.upper(item)} Groups:')\n print(gw_table)\n else:\n json_response = get_sddc_inventory_groups_json(proxy, sessiontoken, gw)\n gw_group = json_response['results']\n gw_table = PrettyTable(['ID', 'Name'])\n for i in gw_group:\n gw_table.add_row([i['id'], i['display_name']])\n print(f'Here are the {str.upper(gw)} Groups:')\n print(gw_table)\n elif kwargs['objectname'] is not None and gw == \"both\":\n print(\"When specifying a specific group, please be sure to specify either CGW or MGW for the group domain.\")\n sys.exit(1)\n else:\n group_id = kwargs['objectname']\n \"\"\" Gets a single SDDC Group. Use 'mgw' or 'cgw' as the parameter. Displays effective membership and criteria for group\"\"\"\n json_response = get_sddc_inventory_group_id_json(proxy, sessiontoken, gw, group_id)\n\n # Define tables\n table = PrettyTable(['Member Type', 'Key', 'Operator', 'Value', 'Conjunction Operator'])\n vm_table = PrettyTable(['VM Name'])\n segment_table = PrettyTable(['Segment Name', 'Path'])\n segment_port_table = PrettyTable(['Segment Port Name'])\n ip_address_table = PrettyTable(['IP Address'])\n vif_table = PrettyTable(['VIFs'])\n group_table = PrettyTable(['Group Path'])\n mac_table = PrettyTable(['MAC Addresses'])\n ad_group_table = PrettyTable(['AD Groups'])\n\n # Checking for groups with defined criteria with the following command.\n if json_response['expression'] == []:\n print(\"This group has no criteria defined.\")\n elif 'expression' in json_response:\n group_criteria = json_response['expression']\n for g in group_criteria:\n if g[\"resource_type\"] == \"Condition\":\n group = json_response['expression']\n print(\"The group \" + group_id + \" has these criteria defined:\")\n for i in group:\n if 'member_type' in i.keys():\n table.add_row([i['member_type'], i['key'], i['operator'], i['value'], \"-\"])\n elif 'conjunction_operator' in i.keys():\n table.add_row([\"\", \"\", \"\", \"\", i['conjunction_operator']])\n else:\n print(\"There has been an error\")\n print(table)\n print(\"Based on the above criteria, the effective group membership is:\")\n for i in group:\n if i['resource_type'] == 'ConjunctionOperator':\n continue\n elif i['member_type'] == 'VirtualMachine':\n group_vm_membership_json = get_inventory_group_vm_membership_json(proxy, sessiontoken, gw, group_id)\n group_vm_membership = group_vm_membership_json['results']\n for x in group_vm_membership:\n vm_table.add_row([x['display_name']])\n print(\"Here is the list of VMs included in this group\")\n print(vm_table)\n elif i['member_type'] == 'Segment':\n group_segment_membership_json = get_inventory_group_segment_json(proxy, sessiontoken, gw, group_id)\n group_segment_membership = group_segment_membership_json['results']\n for y in group_segment_membership:\n segment_table.add_row([y['display_name']])\n print(\"Here is the list of Segments included in this group\")\n print(segment_table)\n elif i['member_type'] == 'SegmentPort':\n group_segment_port_membership_json = get_inventory_group_segment_port_json(proxy, sessiontoken, gw, group_id)\n group_segment_port_membership = group_segment_port_membership_json['results']\n for z in group_segment_port_membership:\n segment_port_table.add_row([z['display_name']])\n print(\"Here is the list of Segment Ports included in this group\")\n print(segment_port_table)\n elif i['member_type'] == 'IPSet':\n group_ip_address_membership_json = get_inventory_group_ip_address_json(proxy, sessiontoken, gw, group_id)\n group_id_address_membership = group_ip_address_membership_json['results']\n for a in group_id_address_membership:\n ip_address_table.add_row([a['displan_name']])\n print(\"Here is the list of IP Addresses included in this group\")\n print(ip_address_table)\n else:\n print(\"No effective group member\")\n\n elif g[\"resource_type\"] == \"IPAddressExpression\":\n ip_addr = get_inventory_group_ip_address_json(proxy, sessiontoken, gw, group_id)\n ips = ip_addr['results']\n for i in ips:\n ip_address_table.add_row([i])\n print(\"The group \" + group_id + \" is based on the IP addresses criteria:\")\n print(ip_address_table)\n elif g[\"resource_type\"] == \"ExternalIDExpression\" and g['member_type'] == 'VirtualMachine':\n group_vm = get_inventory_group_vm_membership_json(proxy, sessiontoken, gw, group_id)\n vms = group_vm['results']\n for v in vms:\n vm_table.add_row([v['display_name']])\n print(f\"The VMs in group {group_id} are:\")\n print(vm_table)\n elif g[\"resource_type\"] == \"ExternalIDExpression\" and g['member_type'] == 'VirtualNetworkInterface':\n group_vif = get_inventory_group_vif_json(proxy, sessiontoken, gw, group_id)\n vifs = group_vif['results']\n for v in vifs:\n vif_table.add_row([v['display_name']])\n print(f'The VIFs included in the group {group_id} are:')\n print(vif_table)\n elif g['resource_type'] == \"PathExpression\":\n paths = g['paths']\n for p in paths:\n if '/infra/domains/cgw/groups/' in p:\n group_table.add_row([p])\n elif '/infra/tier-1s/' in p:\n group_segments = get_inventory_group_segment_json(proxy, sessiontoken, gw, group_id)\n segments = group_segments['results']\n for s in segments:\n segment_table.add_row([s['display_name'], s['path']])\n print(f\"The group {group_id} contain these groups/segments\")\n print(group_table)\n print(segment_table)\n elif g['resource_type'] == 'MACAddressExpression':\n mac_addrs = g['mac_addresses']\n for m in mac_addrs:\n mac_table.add_row([m])\n print(f'The group {group_id} contains these MAC Addresses')\n print(mac_table)\n elif g['resource_type'] == 'ConjunctionOperator':\n continue\n else:\n print(\"We currently do not support displaying groups of this configuration\")\n else:\n print(\"whoops\")\n return", "def fusion_api_get_directory_groups(self, body, api=None, headers=None):\n return self.logindomain.groups(body, api, headers)", "def test_get_device_groups(self):\n pass", "def list_instances_xhr():\n if request.method == \"GET\":\n instances = list_instances_request()\n user_groups_list = list_user_groups(session)\n user_groups = []\n for groups in user_groups_list:\n user_groups.append(groups[\"metadata\"][\"name\"])\n return jsonify(instances, user_groups)", "def security_groups(self, oid):\n try:\n path = u'/servers/%s/os-security-groups' % oid\n res = self.client.call(path, u'GET', data=u'', \n token=self.manager.identity.token)\n self.logger.debug(u'Get openstack server security groups: %s' % truncate(res))\n return res[0][u'security_groups']\n except Exception as error:\n self.logger.error(error, exc_info=True)\n data = []\n return res", "def get_candidates(data):\n return data.groups[\"Candidates\"].objects", "def getGroups(self, resource='groups', rpp=None, page=None):\n\n groups = list()\n res = self.getRequest(resource, rpp, page)\n ppObj = vsdModels.Pagination(**res)\n\n for g in ppObj.items:\n group = vsdModels.Group(**g)\n groups.append(group)\n\n return groups, ppObj", "def group(requestContext, *seriesLists):\n seriesGroup = []\n for s in seriesLists:\n seriesGroup.extend(s)\n\n return seriesGroup" ]
[ "0.65068406", "0.6469846", "0.63963234", "0.6134552", "0.6109888", "0.6099704", "0.60848904", "0.60196185", "0.6018239", "0.6011199", "0.5968778", "0.5935231", "0.59249777", "0.587962", "0.5860688", "0.58439285", "0.58141536", "0.5788746", "0.5758732", "0.57442486", "0.56795675", "0.56738865", "0.5656402", "0.56422925", "0.5641386", "0.56351846", "0.56318486", "0.5625607", "0.56103766", "0.5600625", "0.5585995", "0.5580695", "0.55736893", "0.55638635", "0.5559975", "0.5550254", "0.55058384", "0.54965854", "0.5476531", "0.54697496", "0.54696304", "0.54341745", "0.54238355", "0.5419401", "0.5417226", "0.5417018", "0.5411917", "0.54068387", "0.53794825", "0.5375985", "0.5374037", "0.53434694", "0.5342525", "0.5342525", "0.5336958", "0.5327569", "0.53109646", "0.530206", "0.530052", "0.53004026", "0.5289514", "0.52821296", "0.52738", "0.5271494", "0.52555364", "0.525545", "0.5254267", "0.5249044", "0.52417994", "0.5241736", "0.5236956", "0.520818", "0.52026427", "0.5186415", "0.51850975", "0.51806474", "0.5180464", "0.5168851", "0.51657796", "0.51657796", "0.51657796", "0.51657796", "0.5154578", "0.5150677", "0.51466453", "0.51420206", "0.5134319", "0.513083", "0.5125017", "0.51241714", "0.512083", "0.5110894", "0.51089054", "0.5102553", "0.51021624", "0.5096227", "0.5081205", "0.507651", "0.5075212", "0.50709707" ]
0.671684
0
Lists the Fault Domains in your tenancy. Specify the OCID of either the tenancy or another of your compartments as the value for the compartment ID (remember that the tenancy is simply the root compartment). See `Where to Get the Tenancy's OCID and User's OCID`__.
def list_fault_domains(self, compartment_id, availability_domain, **kwargs): resource_path = "/faultDomains" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_fault_domains got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "availabilityDomain": availability_domain } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[FaultDomain]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[FaultDomain]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listDomains(self):\n reply = self.rpc.getDomains(self.username,\n self.password)\n if reply[0] == 'UNKNOWN_ERROR':\n raise Exception(\"RPC returned error: \" + reply[0])\n return reply", "def get_storage_domains(cohesity_client):\n storage_domain_list = cohesity_client.view_boxes.get_view_boxes()\n for domain in storage_domain_list:\n exported_res_dict[\"Storage Domains\"].append(domain.name)\n return storage_domain_list", "def list_keystone_v3_domains(self):\n LOG_OBJ.debug(\"List the domains.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/domains\"\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while creating domain\")\n print (\"No response from Server while creating domain\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\" Listing domains Failed with status %s \"\n \"and error : %s\" % response.status, response.data)\n print (\" Listing domains Failed with status %s and error : %s\" %\n response.status, response.data)\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Domains list : %s \" % output)\n print (\"Domains list : %s \" % output)\n return output['domains']", "def list_all():\n\n return (_conn.listDefinedDomains() +\n [_conn.lookupByID(id).name() for id in _conn.listDomainsID()])", "def cb_listdomains(self, cmd):\n for cur in sorted(self.d.listDomains(),\n key=lambda x: _domreverse(x['domain'])):\n print \"%(domain)60s %(expiration_date)15s\" % cur", "def get_input_domains():\n df = pandas.read_excel(\"AutoScrapy/files/EBE21 - Top 100 Onlineshops to scrapp.ods\", engine=\"odf\")\n list_of_addresses = df['Domain'].to_list()\n list_of_addresses = [(\"http://\" + address) for address in list_of_addresses]\n print(list_of_addresses)\n return list_of_addresses", "def show_domains(self):\n show_domains(self.system.cavity_gri)", "def list_domain_names():\n pass", "def get_ad_entries(cohesity_client):\n resp = cohesity_client.active_directory.get_active_directory_entry()\n if resp:\n ad_list = list()\n for each_ad in resp:\n ad_list.append(each_ad.domain_name)\n config_dict[each_ad.domain_name] = [\n \"username\", \"password\", \"machine_accounts\"]\n exported_res_dict[\"Active directories\"] = ad_list\n return resp", "async def getDepartments(self, ):\n payload = {}\n \n\n # Parameter validation\n schema = CatalogValidator.getDepartments()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(self._conf.domain, f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/departments\", \"\"\"{\"required\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + await self._conf.getAccessToken()\n }\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(self._conf.domain, \"get\", await create_url_without_domain(f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/departments\", ), query_string, headers, \"\", exclude_headers=exclude_headers), data=\"\")", "def get_delta_domains():\n url = os.getenv('DELTAS_URL')\n if url is None:\n raise Exception('Delta report URL configuration not set!')\n\n json = requests.get(url, timeout=10).json()\n return [domain\n for (domain,)\n in json['values']\n if dnstwist.is_valid_domain(domain)]", "def list_zones(self, **kwargs):\r\n return self.client['Account'].getDomains(**kwargs)", "def doctors(self) -> DoctorsList:\n data = self.get(\"minhealth_doctors\")\n \n ls = [Doctors(**doc) for doc in data]\n return DoctorsList(items=ls)", "def list_domain(self, feed_id=None):\n resources = self.list_resource(feed_id=feed_id, resource_type_id='Host Controller')\n domains = []\n if resources:\n for resource in resources:\n resource_data = self.get_config_data(\n feed_id=resource.path.feed_id, resource_id=resource.id)\n domain_data = resource_data.value\n domains.append(Domain(resource.id, resource.name, resource.path, domain_data))\n return domains", "def get_domains(self):\n\n response = self.call(method='getDomains')\n domains = []\n for d in response:\n domain = self.domain(domain=d['domain'])\n domains.append(domain)\n return domains", "def getDomains(self, company):\n return self.db.getDomains(company)", "def list_availability_domains(self, compartment_id, **kwargs):\n resource_path = \"/availabilityDomains\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_availability_domains got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[AvailabilityDomain]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[AvailabilityDomain]\")", "def districts(self):\n catalog = getToolByName(self.context, 'portal_catalog')\n d = [dict(url=district.getURL(), title=district.Title,\n address=district.Description) for district in\n catalog({'object_provides': IDistrict.__identifier__,\n 'path': dict(query='/'.join(self.context.getPhysicalPath()),\n depth=1), 'sort_on': 'sortable_title'})]\n print d\n return d", "def get_departments(self) -> list:\n return self.client.departments.get_all()", "def dict_of_domains(fc):\r\n # need to find root database (GDB or SDE)\r\n db_root = os.path.dirname(fc)\r\n while db_root[-4:].lower() != '.gdb' and db_root[-4:].lower() != '.sde':\r\n old_db_root = db_root # protect against infinite loop\r\n db_root = os.path.dirname(db_root)\r\n if old_db_root == db_root: # protect against infinite loop\r\n break\r\n arcpy.AddMessage(\"Retrieving Domains from \" + str(db_root))\r\n return {domain.name: domain.codedValues for domain in arcpy.da.ListDomains(db_root)}", "def tracking_domain_list(self):\r\n params = base.get_params(None, locals())\r\n return self._get('tracking_domain_list', params)", "def handle_domains(\n actapi: act.api.Act, content: Text, domains: List[Text]\n) -> List[act.api.fact.Fact]:\n\n feeds_facts: List[act.api.fact.Fact] = []\n\n for domain in domains:\n\n chain = []\n\n chain.append(\n actapi.fact(\"connectsTo\").source(\"content\", content).destination(\"uri\", \"*\")\n )\n chain.append(\n actapi.fact(\"componentOf\").source(\"fqdn\", domain).destination(\"uri\", \"*\")\n )\n\n feeds_facts += act.api.fact.fact_chain(*chain)\n\n return feeds_facts", "def GetListDoctors(self):\n\t\treturn self.ClientsMap.values()", "def listRR(self):\n reply = self.rpc.getSubdomains(self.username,\n self.password,\n self.domain)\n\n if len(reply) and reply[0] in ('UNKNOWN_ERROR',\n 'RATE_LIMITED'):\n raise Exception(\"RPC returned error: \" + reply[0])\n return reply", "def list(self, domain):\n return request(\n API_LIST.DNS_LIST.value,\n {\n 'email': self.email,\n 'token': self.token,\n 'domain': domain\n }\n )", "def domains(self):\n return DomainCollection(self.request)", "def get_departments() -> list:\n return Department.query.all()", "def list_domain_names(self) -> Dict:\n pass", "def get_doctors():\n all_doctors = Doctor.query.all()\n result = doctors_schema.dump(all_doctors)\n return jsonify(result.data)", "def extract_domains(self, resp):\n return", "def getSDDChosts(**kwargs):\n strProdURL = kwargs[\"strProdURL\"]\n orgID = kwargs[\"ORG_ID\"]\n sessiontoken = kwargs[\"sessiontoken\"]\n sddcID = kwargs[\"SDDC_ID\"]\n\n jsonResponse = get_sddc_info_json(strProdURL, orgID, sessiontoken, sddcID)\n if jsonResponse == None:\n print(\"API Error\")\n sys.exit(1)\n\n cdcID = jsonResponse['resource_config']['vc_ip']\n cdcID = cdcID.split(\"vcenter\")\n cdcID = cdcID[1]\n cdcID = cdcID.split(\"/\")\n cdcID = cdcID[0]\n clusters = jsonResponse['resource_config']['clusters']\n table = PrettyTable(['Cluster', 'Name', 'Status', 'ID'])\n for c in clusters:\n for i in c['esx_host_list']:\n hostName = i['name'] + cdcID\n table.add_row([c['cluster_name'], hostName, i['esx_state'], i['esx_id']])\n print(\"SDDC Hosts:\")\n print(table)", "def test_dos_create_service_domain_list(self):\n # create a huge list of domain\n self.reset_defaults()\n for k in range(1, 30000):\n self.domain_list.append({\"domain\": \"w.t%s.com\" % k})\n\n # send MAX_ATTEMPTS requests\n for k in range(1, self.MAX_ATTEMPTS):\n self.service_name = str(uuid.uuid1())\n self.check_one_request()", "def get_domains() -> List[str]:\n ret = _call_endpoint(\"v1/domains\")\n # Example response:\n # [{'createdAt': '2016-06-25T03:08:44.000Z',\n # 'domain': 'mydomain.com',\n # 'domainId': 12345678,\n # 'expirationProtected': False,\n # 'expires': '2020-06-25T03:08:44.000Z',\n # 'holdRegistrar': False,\n # 'locked': True,\n # 'nameServers': None,\n # 'privacy': False,\n # 'renewAuto': True,\n # 'renewDeadline': '2020-08-09T03:08:44.000Z',\n # 'renewable': True,\n # 'status': 'ACTIVE',\n # 'transferProtected': False},]\n domains = [d[\"domain\"] for d in ret]\n return domains", "def dc_list(request, format=None):\n if request.method == 'GET':\n dcs = DeviceCustomization.objects.all()\n serializer = DeviceCustomizationSerializer(dcs, many=True)\n return Response(serializer.data)\n\n elif request.method == 'POST':\n serializer = DeviceCustomizationSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def get_botnet_domains():\n\n fw = \"<HTTPS://YOUR_FORTIGATE_IP:YOUR_FORTIGATE_PORT>\"\n\n path = \"/api/v2/monitor/system/botnet-domains/hits/?access_token=\"\n\n token = \"<YOUR_API_KEY>\"\n\n content_filter = \"\"\n\n if content_filter != \"\":\n url = fw + path + token + content_filter\n else:\n url = fw + path + token\n\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n \n try:\n r = requests.get(url, verify=False).json()\n except Exception:\n print(\"Something went wrong. Is the url correct? Exiting...\")\n sys.exit()\n\n for key in r['results']:\n print()\n for k,v in key.items():\n print(\"{0:6} : {1}\".format(k.upper(), str(v)))", "def show_all_doctors():\n if request.method == \"GET\":\n location = \"San Francisco, CA\"\n category=\"\"\n else:\n args = request.form\n location = args[\"location\"]\n category = args[\"speciality\"]\n\n doctors = get_doctors(\"doctors\", location, category)\n specialities = unique_elements([doc[\"categories\"][0][\"alias\"] for doc in doctors])\n locations = [\"San Franciso, CA\", \"Los Angeles, CA\", \"New York City, NY\"]\n \n return render_template(\"index.html\",\n list_doctors_info=doctors,\n doc_speciality=specialities,\n selected_location=location, \n location=locations)", "def case_search_enabled_domains():\n return CaseSearchConfig.objects.filter(enabled=True).values_list('domain', flat=True)", "def __get_faults_list(self, faults):\n r_faults = []\n for x in faults:\n if faults[x]['value']:\n r_faults.append(faults[x]['name'])\n return r_faults", "def fetch_domain_certs(domain):\n url = BASE_URL.format(domain)\n result = requests.get(url)\n if result.status_code != 200:\n result.raise_for_status()\n return result.json()", "def show_all_departments():\n\n logger.debug('Function show_all_departments(). Routed to /departments')\n titles = ['Name', 'Average Salary', 'Employees']\n departments = ds.get_all()\n logger.info('Get list of departments, length is %i', len(departments))\n return render_template('departments.html',\n title='Departments',\n table_title='List of Departments',\n headers=titles,\n departments=departments)", "def get_dashboard_components(doctype, field):\n\n\tdashs = frappe.get_all(\"Dashmanager\", filters={\"ref_doctype\": doctype,\"ref_docfield\":doctype+\"-\"+field})\n\tdash = frappe.get_doc(\"Dashmanager\", dashs[0])\n\treturn dash.build_dashboard_components()", "def dnc_lists(self):\n return self._dnc_lists", "def get_domain(id):\n return query(WEB_EXAMPLE_BASE + f\"/classical/domain/{id}\")", "def domain(self):\n return self.keys()", "def basespace_list(\n host,\n email,\n password,\n api_key,\n):\n BaseSpaceList(\n Credentials(email=email, password=password, api_key=api_key),\n Optionals(host=host),\n ).run()", "def test_get_contact_lists(self):\n url, parsed = self.prepare_urls('v1:contact_list-list', subdomain=self.company.subdomain)\n \n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n content = json.loads(response.content)\n self.assertEqual(len(content), self.contact_lists_count)", "def OspfBierSubDomainList(self):\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.ospfbiersubdomainlist import OspfBierSubDomainList\n return OspfBierSubDomainList(self)._select()", "def list_domain_policy(self, _):\n FILETIME_TIMESTAMP_FIELDS = {\n \"lockOutObservationWindow\": (60, \"mins\"),\n \"lockoutDuration\": (60, \"mins\"),\n \"maxPwdAge\": (86400, \"days\"),\n \"minPwdAge\": (86400, \"days\"),\n \"forceLogoff\": (60, \"mins\")\n }\n\n FOREST_LEVELS = {\n 7: \"Windows Server 2016\",\n 6: \"Windows Server 2012 R2\",\n 5: \"Windows Server 2012\",\n 4: \"Windows Server 2008 R2\",\n 3: \"Windows Server 2008\",\n 2: \"Windows Server 2003\",\n 1: \"Windows Server 2003 operating system through Windows Server 2016\",\n 0: \"Windows 2000 Server operating system through Windows Server 2008 operating system\"\n }\n\n FIELDS_TO_PRINT = [\n \"dc\",\n \"distinguishedName\",\n \"lockOutObservationWindow\",\n \"lockoutDuration\",\n \"lockoutThreshold\",\n \"maxPwdAge\",\n \"minPwdAge\",\n \"minPwdLength\",\n \"pwdHistoryLength\",\n \"pwdProperties\",\n \"ms-DS-MachineAccountQuota\",\n \"msDS-Behavior-Version\"]\n\n policy = list(self.engine.query(self.engine.DOMAIN_INFO_FILTER()))\n if policy:\n policy = policy[0]\n for field in FIELDS_TO_PRINT:\n val = policy.get(field, None)\n if val is None:\n continue\n\n if field == \"lockOutObservationWindow\" and isinstance(val, timedelta):\n val = int(val.total_seconds()) / 60\n elif field in FILETIME_TIMESTAMP_FIELDS.keys() and type(val) == int:\n val = int((fabs(float(val)) / 10**7) / FILETIME_TIMESTAMP_FIELDS[field][0])\n if field in FILETIME_TIMESTAMP_FIELDS.keys():\n val = \"%d %s\" % (val, FILETIME_TIMESTAMP_FIELDS[field][1])\n if field == \"msDS-Behavior-Version\" and isinstance(val, int):\n val = \"%s\" % (FOREST_LEVELS[policy[field]])\n\n print(\"%s: %s\" % (field, val))", "def contacts(self, gdi_oids, session):\n contacts = []\n\n ResourceContact = self.config_models.model('resource_contact')\n Contact = self.config_models.model('contact')\n query = session.query(ResourceContact) \\\n .filter(ResourceContact.gdi_oid_resource.in_(gdi_oids)) \\\n .order_by(ResourceContact.id_contact_role)\n # eager load relations\n query = query.options(\n joinedload(ResourceContact.contact)\n .joinedload(Contact.organisation)\n )\n for res_contact in query.all():\n person = res_contact.contact\n person_data = {\n 'id': person.id,\n 'name': person.name,\n 'function': person.function,\n 'email': person.email,\n 'phone': person.phone,\n 'street': person.street,\n 'house_no': person.house_no,\n 'zip': person.zip,\n 'city': person.city,\n 'country_code': person.country_code\n }\n\n organisation_data = None\n organisation = person.organisation\n if organisation is not None:\n organisation_data = {\n 'id': organisation.id,\n 'name': organisation.name,\n 'unit': organisation.unit,\n 'abbreviation': organisation.abbreviation,\n 'street': organisation.street,\n 'house_no': organisation.house_no,\n 'zip': organisation.zip,\n 'city': organisation.city,\n 'country_code': organisation.country_code\n }\n\n contacts.append({\n 'person': person_data,\n 'organisation': organisation_data\n })\n\n return contacts", "def _get_domain(self):\n self.ensure_one()\n domain = []\n return domain", "def get_search_domains(self):\n\t\treturn handle_to_object(call_sdk_function('PrlSrvCfg_GetSearchDomains', self.handle))", "def _list(self, account, page):\n response = self.client.get(self.get_url(account), data={\"page\": page})\n return [\n DomainResource(**item) for item in response['data']\n ], response['pagination']", "def _get_domain(self, doid):\n SQL = render_template(\"/\".join([self.template_path,\n 'get_domain.sql']),\n doid=doid)\n status, res = self.conn.execute_2darray(SQL)\n\n if not status:\n return False, internal_server_error(errormsg=res)\n if len(res['rows']) == 0:\n raise ObjectGone(self.not_found_error_msg('Domain'))\n\n return res['rows'][0]['schema'], res['rows'][0]['domain']", "def relevant_domains(self):\n pass", "def _get_domain(self):\n self.ensure_one()\n dom = [('loan_agreement_id.supplier_invoice_id.date_paid', '!=',\n False), ('loan_agreement_id.sale_id.state', 'not in',\n ('draft', 'cancel')),\n ('invoice_plan_id.ref_invoice_id.date_due', '<=',\n self.date_report),\n ('invoice_plan_id.ref_invoice_id.date_paid', '=', False),\n ('invoice_plan_id.ref_invoice_id.cancel_move_id', '=', False)]\n if self.partner_ids:\n dom += [('loan_agreement_id.borrower_partner_id', 'in',\n self.partner_ids.ids)]\n if self.bank_id:\n dom += [('loan_agreement_id.bank_id.bank', '=',\n self.bank_id.id)]\n if self.bank_branch_id:\n dom += [('loan_agreement_id.bank_id.bank_branch', '=',\n self.bank_branch_id.id)]\n return dom", "def domains(cls):\n return [cls.domain]", "def get_hosting_device_resources(self, context, id, complementary_id,\n tenant_id, mgmt_nw_id):\n pass", "def list_incidents_command():\n cursor = COLLECTION.find({}, {'_id': False})\n incidents = []\n results: list = []\n for incident in cursor:\n for name in incident:\n incidents.append(name)\n for i in incidents:\n if i not in results:\n results.append(i)\n human_readable = tableToMarkdown(f'List of incidents in collecion {COLLECTION_NAME}', results,\n headers=['Incidents'])\n return human_readable, {}, {}", "def getlist(self):\n self.__domainlist.sort()\n\n outstr = \"{ \"\n for index, domain in enumerate(self.__domainlist):\n outstr += domain + \" \"\n if (index % 50 == 0) and index > 0:\n outstr += \"}\\n{ \"\n\n outstr += \"}\"\n\n return outstr", "def get():\n\n logger.debug('Catch GET request by URL /api/departments.')\n departments = ds.get_all()\n return marshal_departments(departments)", "def get_all_companies_and_people():", "async def get_organizations(request: Request):\n redis = request.app.state.redis\n organizations_obj = orjson.loads(await redis.get_key(\"influxdb_organizations\"))\n return [org for org in organizations_obj]", "def entities(address_book):\n return zope.component.getUtility(IEntities)", "def companies():\n res = requests.get('http://0.0.0.0:5002/companies')\n return jsonify(res.json())", "def hospital_list(self, request, **dict):\n\t\tdata = self.get_serializer(self.get_queryset(), many=True).data\n\t\treturn Response(data, status.HTTP_200_OK)", "def get_companies(self):\n response = self.do_request('/undertaking/list')\n if response:\n return response.json()", "def list(ctx, show_hidden, oath_type, period):\n ensure_validated(ctx)\n controller = ctx.obj['controller']\n creds = [cred\n for cred in controller.list()\n if show_hidden or not cred.is_hidden\n ]\n creds.sort()\n for cred in creds:\n click.echo(cred.printable_key, nl=False)\n if oath_type:\n click.echo(u', {}'.format(cred.oath_type.name), nl=False)\n if period:\n click.echo(', {}'.format(cred.period), nl=False)\n click.echo()", "def departments():\n # gather data from db about all departments\n return render_template(\"departments.html\")", "def test_api_can_get_all_departments(self):\n res = self.client().get(service_url)\n self.assertEqual(res.status_code, 200)\n self.assertIn('dep 1', str(res.data))\n self.assertIn('dep 2', str(res.data))\n self.assertIn('dep 3', str(res.data))", "def display_departmentlist():\n\tdeptid = 0\n\tprint\n\tprint '[*] Fetching departments list'\n\n\t# call the api function\n\tsupportdepartments = whmcs.getsupportdepartments()\n\tif supportdepartments == None:\n\t\tprint '[x] WHMCS getsupportdepartments API function call failed.'\n\t\tprint '[!] exiting.'\n\t\t_exit(0)\n\n\t# reconnect if ssl or url error orccured\n\twhile supportdepartments == 'sslerror' or supportdepartments == 'urlerror':\n\t\tprint '[!] Re-establishing connection after 5 seconds'\n\t\ttry: time.sleep(5)\n\t\texcept KeyboardInterrupt: print '\\n[!] exiting.'; _exit()\n\t\tsupportdepartments = whmcs.getsupportdepartments()\n\n\tresult = supportdepartments.get('result')\n\ttotalresults = supportdepartments.get('totalresults')\n\tif result != 'success' or totalresults == 0:\n\t\tprint '[x] Unable to find any support departments on (%s).' % (parser.get('whmcs', 'server'))\n\t\tprint '[x] %s.' % supportdepartments.get('message')\n\t\t_exit()\n\n\t#############################\n\t## Display Department List ##\n\t#############################\n\t# Eg: {'departments': { 'department': [{'id': ,'name': ,'awaitingreply': ,'opentickets': ,}, {...}]}}\n\n\tdepartments = supportdepartments.get('departments').get('department')\n\trowformat = '| %-5s | %-20s | %-15s | %-15s |'\n\theader = ('ID', 'Department', 'Awaiting Reply', 'Open Tickets')\n\ttitle = rowformat % header\n\tprint '-' * len(title)\n\tprint title\n\tprint '-' * len(title)\n\tdeptlist = []\n\tfor department in departments:\n\t\tdeptid = department['id']\n\t\tdeptlist.append(deptid)\n\t\tdeptname=department['name']\n\t\tif len(deptname) > 20:\n\t\t\tdeptname = deptname[:20-4]+'...'\n\t\tprint rowformat % (deptid, deptname, department.get('awaitingreply'), department.get('opentickets'))\n\t\tprint '-' * len(title)\n\n\t# Display department ID selection prompt\n\twhile 1:\n\t\ttry:\n\t\t\tdeptid = raw_input('[+] Select Department ID: ')\n\t\texcept KeyboardInterrupt:\n\t\t\tprint '\\n[!] exiting.cleanly.'\n\t\t\texit()\n\n\t\tif type(deptid) != int and deptid not in deptlist:\n\t\t\tprint '[!] Invalid Department ID (%s).' % deptid\n\t\telse:\n\t\t\tbreak\n\treturn deptid", "def info(self):\n\n return self.call(method='getDomain', args=[self.domainname])", "def get_departments():\n\n term = '201931' # Get current term from somewhered\n maxCount = 300\n\n # Call getsubjects\n params = {\n 'dataType': 'json',\n 'term': term,\n 'offset': 1,\n 'max': maxCount\n }\n\n r = requests.get(BASE_URL, params=params)\n\n json = ''\n # Attempt to convert it to JSON\n try:\n json = r.json()\n except:\n print('Error converting depts to JSON')\n\n return json", "def domain_list_all(self):\n page = 1\n on_page = 100\n ret = []\n while True:\n r = self.domain_list(page=page, on_page=on_page)\n ret += r['domains']\n if len(ret) >= r['total']:\n break\n page += 1\n return ret", "def _get_guests():\n _guests = list()\n try:\n conn = libvirt.open(None)\n if conn:\n _domains = conn.listAllDomains(0)\n else:\n raise ValueError('Failed to contact hypervisor.')\n except libvirt.libvirtError as e:\n _logger.error('Failed to contact hypervisor')\n raise ValueError('Failed to contact hypervisor.')\n finally:\n conn.close()\n return _domains", "def fetch_contacts(owner_account_id):\n resp = oauth.tapkey.get(f\"Owners/{owner_account_id}/Contacts?$select=id,identifier\")\n contacts = resp.json()\n return contacts", "def provDistributionList(ldp_conf, ldap_query, attrs):\n for dn, entry in ldapQuery(ldp_conf, ldap_query, attrs):\n distribution_list_name = entry['mail'][0]\n print 'cdl', distribution_list_name\n print 'adlm', distribution_list_name, \n if not 'zimbraMailForwardingAddress' in entry:\n continue\n for member in entry['zimbraMailForwardingAddress']:\n print member,\n # Break line, distribution list member finished\n print\n # Finish\n print", "def registered_dde_schemas(verbose=False):\n url = DDE_SCHEMA_BASE_URL + \"?field=_id&size=20\"\n if verbose:\n print(f'Loading registered DDE schema list from \"{url}\"')\n data = load_json_or_yaml(url)\n return [s[\"namespace\"] for s in data[\"hits\"]]", "def company_lists(self):\n return self.client.get('company/named-lists')", "def add_domain_routes(app):\n\n @app.route(\"/v1/list_agencies/\", methods=[\"GET\"])\n @get_dabs_sub_tier_agencies\n def list_agencies(cgac_sub_tiers, frec_sub_tiers):\n \"\"\" Get all agencies the current user has DABS access to.\n Args:\n cgac_sub_tiers - List of all CGAC SubTierAgencies generated by the get_dabs_sub_tier_agencies decorator,\n required to list only sub_tier_agencies that user has DABS permissions for\n frec_sub_tiers - List of all FREC SubTierAgencies generated by the get_dabs_sub_tier_agencies decorator,\n required to list only sub_tier_agencies that user has DABS permissions for\n \"\"\"\n return JsonResponse.create(StatusCode.OK, get_accessible_agencies(cgac_sub_tiers, frec_sub_tiers))\n\n @app.route(\"/v1/list_all_agencies/\", methods=[\"GET\"])\n def list_all_agencies():\n \"\"\" List all CGAC and FREC Agencies \"\"\"\n return JsonResponse.create(StatusCode.OK, get_all_agencies())\n\n @app.route(\"/v1/list_sub_tier_agencies/\", methods=[\"GET\"])\n @get_fabs_sub_tier_agencies\n def list_sub_tier_agencies(sub_tier_agencies):\n \"\"\" List all Sub-Tier Agencies user has FABS permissions for\n Args:\n sub_tier_agencies - List of all SubTierAgencies generated by the get_fabs_sub_tier_agencies decorator,\n required to list only sub_tier_agencies that user has FABS permissions for\n \"\"\"\n return JsonResponse.create(StatusCode.OK, organize_sub_tier_agencies(sub_tier_agencies))", "def get_search_domains(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmDevNet_GetSearchDomains', self.handle))", "def get_search_domains(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_GetSearchDomains', self.handle))", "def detect_domains (nffg):\n return {infra.domain for infra in nffg.infras}", "def get_search_domains(self):\n\t\treturn handle_to_object(call_sdk_function('PrlSrvCfgNet_GetSearchDomains', self.handle))", "def getFileCatalogHosts(thisExperiment):\n # Since FAX can download files from many sources, all hosts need to be queried for the replicas\n # In the case of ATLAS, TiersOfATLAS is used as a source of the hosts\n\n hosts_list = [thisExperiment.getFileCatalog()]\n\n tolog(\"Will extend file catalog host list\")\n hosts = thisExperiment.getFileCatalogHosts()\n if hosts != []:\n for host in hosts:\n if not host in hosts_list:\n hosts_list.append(host)\n else:\n tolog(\"(No additional hosts)\")\n\n tolog(\"File catalog host list: %s\" % str(hosts_list))\n\n return hosts_list", "def list_domain(self, feed_id=None):\n domains = self.list_resource(feed_id=feed_id,\n resource_type_id='Domain Host',\n cls=Domain,\n list_children=True,\n include_data=True)\n return domains", "def order_domain_values(csp, variable):\n domain = variable.domain\n returned = []\n \"\"\"\n print variable\n for a in csp.constraints[variable]:\n print a\n \"\"\"\n for x in domain:\n returned.append(conflict_count(csp, variable,x))\n\n ret = sorted(returned, key=itemgetter(1))\n rett = []\n for x in ret:\n rett.append(x[0])\n \n return rett\n # TODO implement this\n pass", "def get_entity_contracts():\n url = 'http://www.base.gov.pt/base2/rest/contratos?' \\\n 'adjudicatariaid=%d' % entity.base_id\n\n response = requests.get(url, headers={'Range': 'items=0-1000000'})\n return json.loads(response.text)", "def domain_command():\n # 1. Get input host from Demisto\n domain = demisto.args().get('domain')\n # 2. Get the host reputation from SlashNext API\n response = domain_lookup(domain=domain)\n if response.get('errorNo') != 0:\n return\n # 3. Parse and format the response\n dbot_score_cont, domain_cont = get_dbot_std_context(\n domain, 'Domain', response.get('threatData').get('verdict'), response.get('threatData').get('threatType'))\n\n snx_ioc_cont = get_snx_host_ioc_context(domain, 'Domain', response.get('threatData'))\n\n ec = {\n 'SlashNext.Domain(val.Value === obj.Value)': snx_ioc_cont,\n 'DBotScore': dbot_score_cont,\n 'Domain': domain_cont\n }\n\n domain = domain.encode('idna')\n\n title = 'SlashNext Phishing Incident Response - Domain Lookup\\n' \\\n '##### domain = {}'.format(domain.decode())\n\n md = tableToMarkdown(\n title,\n snx_ioc_cont,\n ['Value',\n 'Type',\n 'Verdict',\n 'ThreatStatus',\n 'ThreatName',\n 'ThreatType',\n 'FirstSeen',\n 'LastSeen']\n )\n\n return_outputs(md, ec, snx_ioc_cont)", "def registered_dde_schemas(self):\n url = DDE_SCHEMA_BASE_URL + \"?field=_id&size=20\"\n if self.verbose:\n print(f'Loading registered DDE schema list from \"{url}\"')\n data = load_json_or_yaml(url)\n return [s[\"namespace\"] for s in data[\"hits\"]]", "def test_get_specific_contact_list(self):\n contact_list = ContactList.objects.first()\n url, parsed = self.prepare_urls('v1:contact_list-detail', subdomain=self.company.subdomain, kwargs={'pk':contact_list.id})\n\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def domains(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"domains\")", "def ListOIDCClients(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def dfs_from_soap(collection, ctype, resolution=75, catom=False):\n if ctype not in [\"RDF\", \"ADF\"]:\n raise ValueError(\"'ctype' must be either 'RDF' or 'ADF'.\")\n \n from gblearn.base import nprocs\n if ctype == \"RDF\":\n x = np.linspace(0., collection.decomposer.rcut, resolution)\n else:\n x = np.linspace(0., np.pi, resolution)\n \n if nprocs is not None:\n from multiprocessing import Pool\n mpool = Pool(nprocs)\n compute = [mpool.apply_async(_multiproc_execute, (v, ctype, (x, catom)))\n for v in collection]\n dfs = []\n for j, df in enumerate(compute):\n dfs.append(df.get())\n else:\n dfs = [getattr(v, ctype)(x) for v in collection]\n return dfs", "def _identify_domains(self):\n\n domains = [FEMDomain(TR3, MeshPart(self.mesh, labels=(0,)), self.media, self.labels)]\n return domains", "def list_compartments(self, compartment_id, **kwargs):\n resource_path = \"/compartments\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"access_level\",\n \"compartment_id_in_subtree\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_compartments got unknown kwargs: {!r}\".format(extra_kwargs))\n\n if 'access_level' in kwargs:\n access_level_allowed_values = [\"ANY\", \"ACCESSIBLE\"]\n if kwargs['access_level'] not in access_level_allowed_values:\n raise ValueError(\n \"Invalid value for `access_level`, must be one of {0}\".format(access_level_allowed_values)\n )\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"accessLevel\": kwargs.get(\"access_level\", missing),\n \"compartmentIdInSubtree\": kwargs.get(\"compartment_id_in_subtree\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Compartment]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Compartment]\")", "def list_vms(connection: str = None) -> list:\n with libvirt.open(connection) as conn:\n return conn.listAllDomains()", "def get_dns_list(self):\n return self.get_ipv4_dns_list()", "def test_client_get_domains(mocker, client_all_domains_input):\n mocker.patch(\"tracker_client.client.get_auth_token\")\n mocker.patch(\"tracker_client.client.create_client\")\n test_client = Client()\n test_client.execute_query = mocker.MagicMock(return_value=client_all_domains_input)\n\n domain_list = test_client.get_domains()\n\n test_client.execute_query.assert_called_once_with(\n queries.GET_ALL_DOMAINS, {\"after\": \"abc\", \"search\": \"\"}\n )\n assert domain_list[0].domain_name == \"foo.bar\"\n assert domain_list[1].dmarc_phase == \"not implemented\"\n assert domain_list[2].last_ran == \"2021-01-27 23:24:26.911236\"\n assert domain_list[0].dkim_selectors == []", "def list_authorities():\n try:\n certs = client().certificates.get_authorities()\n if not certs:\n logger.info(\n 'ctl:cert:authorities', 'No certificate authorities found'\n )\n return\n llen = len(sorted(certs, key=lambda x: len(x[\"id\"]))[-1][\"id\"])\n for x in sorted(certs, key=lambda x: x[\"id\"]):\n click.echo(\n click.style(\n '{name: <{fill}}'.format(name=x[\"id\"], fill=llen + 3),\n fg=\"white\", bold=True) + \"Expires \" +\n click.style(x[\"expiry\"].strftime(\"%c\"), fg=\"yellow\")\n )\n except Exception as e:\n raise CLIException(str(e))", "def organization_list(request):\n return [o.slug for o in Organization.objects.all()]" ]
[ "0.57624483", "0.5618712", "0.5579227", "0.55783147", "0.5538795", "0.54575324", "0.5412407", "0.53967834", "0.5350825", "0.5334975", "0.52671653", "0.52478313", "0.52312326", "0.52176356", "0.5216194", "0.520539", "0.5197498", "0.5193738", "0.51901394", "0.5165269", "0.51523757", "0.5151562", "0.51459605", "0.5128849", "0.5116295", "0.5092359", "0.50779206", "0.5067776", "0.5048499", "0.50379896", "0.5013391", "0.5008615", "0.50047386", "0.4979964", "0.49627575", "0.49442676", "0.48797157", "0.48691636", "0.4864404", "0.48573145", "0.4849366", "0.48452792", "0.48347571", "0.48282242", "0.48281494", "0.48189062", "0.48125827", "0.48029968", "0.47972998", "0.4791622", "0.4769876", "0.47693294", "0.47691184", "0.4768166", "0.47651747", "0.47590917", "0.4758091", "0.47511593", "0.47489184", "0.47466904", "0.47399846", "0.47389007", "0.47231537", "0.4719293", "0.47144657", "0.4712355", "0.4710565", "0.47069472", "0.46998012", "0.4683695", "0.4682836", "0.4676562", "0.4666666", "0.4664519", "0.46644264", "0.4658309", "0.46536282", "0.4650979", "0.463571", "0.4632352", "0.46312043", "0.4630879", "0.46281275", "0.4624831", "0.46245992", "0.46226788", "0.46215716", "0.46141538", "0.46070957", "0.46057042", "0.46018884", "0.45997772", "0.45940268", "0.45864832", "0.45825705", "0.45810768", "0.45807135", "0.45799837", "0.45737538", "0.4572785" ]
0.65855885
0
Lists the groups in your tenancy. You must specify your tenancy's OCID as the value for the compartment ID (remember that the tenancy is simply the root compartment). See `Where to Get the Tenancy's OCID and User's OCID`__.
def list_groups(self, compartment_id, **kwargs): resource_path = "/groups" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_groups got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[Group]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[Group]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def groups():\n access_token = session['access_token']\n return \"%s\" % list_groups(access_token)", "def list_groups(access_token):\n request_url = OKTA_URL + \"api/v1/groups\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def groups(self):\n #return self.get('{}/groups'.format(ApiVersion.A1.value))\n return self.get('{}/groups'.format(ApiVersion.CM1.value))", "def list_groups(self):\n return self.get_admin(\"groups\")", "def list_groups():\n return _list_tindyb_unique_values(\"group\", dbpath=__dbpath__)", "def list(request):\n return render_to_response('rteacher/manage_groups_list.html', request, **klist(\n request=request\n ))", "def list_groups(request):\n groups = models.UserGroup.all().order('name')\n return utility.respond(request, 'admin/list_groups', {'groups': groups})", "def get_groups(self, obj):\n groupsForCompany = get_groups_with_perms(obj)\n return [x.id for x in groupsForCompany]", "def get_groups(self):\n return Client._get(self)", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def get_list_groups(self):\n list_response = requests.get(self.groups_url, headers=self.headers)\n return list_response.json()[\"groups\"]", "def get_group_list(org_id):\n tList = get_template('app/usermanagementorg/group_list.html')\n groups = get_groups(org_id)\n return tList.render(Context({ 'groups': groups, }))", "def get_groups(self):\n response = self._get(\"groups\")\n\n return response.json()", "def getGroups():\r\n return Group.getGroups()", "def groups(self):\n return self.get_data(\"groups\")", "def ObjectGroups(object_id):\n rhino_object = rhutil.coercerhinoobject(object_id, True, True)\n if rhino_object.GroupCount<1: return []\n group_indices = rhino_object.GetGroupList()\n rc = [scriptcontext.doc.Groups.GroupName(index) for index in group_indices]\n return rc", "def list_groups(self):\n return self._get(\"cloudConnectorGroups\").list", "def get_all_groups():\n return jsonify(admin.get_all_groups(current_app.scoped_session()))", "def list_groups(self, **params):\n url = 'groups'\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def groups(self):\r\n return resources.Groups(self)", "def get_all(isamAppliance, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieving groups\", \"/sysaccount/groups/v1\")", "def groups(self):\n return []", "def list_groups(args):\n\n for group in get_groups(args):\n print(group)", "def get_groups():\n\n # FUTURE: Properly reutrn error, Mongo is giving it's own\n if current_user.groups:\n return Response(response=json.dumps([g.to_dict() for g in current_user.groups]), status=200, mimetype=\"application/json\")\n else:\n return return_json_error('No groups assigned to', 500)", "def getGroups(self):\n return [g[0] for g in grp.getgrall()]", "def get_queryset(self):\n user = self.request.user\n return user.group_set.all()", "def list_group():\n data, code, message = FIELD_SERVICE.list_group()\n return __result(data, code, message)", "def test_get_groups(self):\n response = self.client.get_groups()\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v1/groups\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def get_group_list(ip_address, headers):\n group_list = None\n group_url = 'https://%s/api/GroupService/Groups' % ip_address\n response = requests.get(group_url, headers=headers, verify=False)\n if response.status_code == 200:\n group_response = response.json()\n if group_response['@odata.count'] > 0:\n group_list = [x['Id'] for x in group_response['value']]\n else:\n print(\"No groups found at \", ip_address)\n else:\n print(\"No groups found at \", ip_address)\n return group_list", "def get_groups():\r\n if 'username' not in flask.session:\r\n return flask.jsonify(**{'message': 'Forbidden', 'status_code': 403})\r\n\r\n context = {}\r\n context['url'] = flask.request.path\r\n context['groups'] = []\r\n\r\n # Retreive query variables\r\n query_num_groups = flask.request.args.get('size') \r\n query_page = flask.request.args.get('page') \r\n num_groups = int(query_num_groups) if query_num_groups != None else 10\r\n page_number = int(query_page) if query_page != None else 0\r\n\r\n groups = get_group_listing(flask.session['username'], \r\n num_groups, page_number)\r\n for g in groups:\r\n context['groups'].append({\r\n 'id': g[0],\r\n 'name': g[1]\r\n })\r\n\r\n if (num_groups == 10):\r\n context['next'] = '{}?page={}'.format(context['url'], page_number + 1)\r\n else:\r\n context['next'] = '{}?page={}&size={}'.format(context['url'], \r\n page_number + 1, num_groups)\r\n\r\n return flask.jsonify(**context)", "def get_group_list(self):\n return [(item[0], item[1][0]) for item in self.contacts_by_group_list]", "def get_all():\n\n return AGE_GROUPS", "def product_group_list(obj):\n client = get_client(obj)\n\n res = client.product_group_list()\n\n print(json.dumps(res, indent=4))", "def capacitygroup_list(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_capacitygroup_list(cmd_ctx, cpc, options))", "def groups(self):\r\n return users.Groups(self)", "def get_all_groups(self):\n self.cursor.execute(\"select * from groups\")\n self.connection.commit()\n return self.cursor.fetchall()", "def getListOfGroups(self, *args):\n return _libsbml.GroupsModelPlugin_getListOfGroups(self, *args)", "def list_groups_factory(context, request):\n return ListGroupsService(session=request.db,\n request_authority=request.authority,\n route_url=request.route_url)", "def get_all_access_groups():\n\treturn {\"access_groups\": [ag.serialize for ag in AccessGroup.query.all()]}, 200", "def getCSPGroups(**kwargs):\n sessiontoken = kwargs['sessiontoken']\n ORG_ID = kwargs['ORG_ID']\n strCSPProdURL = kwargs['strCSPProdURL']\n\n if kwargs['search_term'] is None:\n json_response = get_csp_groups_json(strCSPProdURL, ORG_ID, sessiontoken)\n print(\"Got the groups\")\n if json_response is not None:\n groups = json_response['results']\n num_groups = len(groups)\n if num_groups == 0:\n print(\"No results returned.\")\n else:\n print(str(num_groups) + \" result\" + (\"s\" if num_groups > 1 else \"\") + \" returned:\")\n table = PrettyTable(['ID', 'Name', 'Group Type', 'User Count'])\n for grp in groups:\n table.add_row([grp['id'], grp['displayName'], grp['groupType'], grp['usersCount']])\n print(table)\n else:\n search_term = kwargs['search_term']\n json_response = get_csp_groups_searchterm_json(strCSPProdURL, ORG_ID, sessiontoken, search_term)\n if json_response is not None:\n groups = json_response['results']\n num_groups = len(groups)\n if num_groups == 0:\n print(\"No results returned.\")\n else:\n print(str(num_groups) + \" result\" + (\"s\" if num_groups > 1 else \"\") + \" returned:\")\n table = PrettyTable(['ID', 'Name', 'Group Type', 'User Count'])\n for grp in groups:\n table.add_row([grp['id'], grp['displayName'], grp['groupType'], grp['usersCount']])\n print(table)\n else:\n print(\"API Error\")\n sys.exit(1)", "def list_projects(ctx):\n pprint(ctx.obj.groups.get().data)", "def get(self):\n status = ErrorCode.SUCCESS\n try:\n res = []\n cid = self.get_argument('cid', None)\n if not (cid is None):\n res = QueryHelper.get_groups_by_cid(cid, self.db)\n self.write_ret(status,\n dict_=DotDict(res=res))\n except Exception as e:\n logging.exception(\"[UWEB] Get groups failed. Exception: %s\",\n e.args)\n status = ErrorCode.SERVER_BUSY\n self.write_ret(status)", "def return_groups(app_filter):\n groups = \"\"\n if app_filter == 'APP_GROUP' or app_filter == 'OKTA_GROUP':\n groups = get_okta_groups(url, filter='type eq \"' + app_filter + '\"')\n #print(groups)\n else:\n print(\"Not a valid group filter, must be APP_GROUP or OKTA_GROUP.\")\n return groups", "def groups(self):\n return self._groups", "def groups(self):\n return self._groups", "def groups(self):\n return self._groups", "def list(self):\n METHOD = 'GET'\n API_PATH = '/groups/list'\n\n # Make REST call\n resp = self._rest_call[METHOD](API_PATH)\n\n if resp.status_code == 200:\n return resp.json().get('group_names')\n\n elif resp.status_code == 403:\n raise AuthorizationError(\"User is not authorized or token is incorrect.\")\n\n else:\n if resp.json().get(\"error_code\") in ERROR_CODES:\n raise ERROR_CODES[resp.json().get('error_code')](resp.json().get('message'))\n else:\n raise APIError(\"Response code {0}: {1} {2}\".format(resp.status_code,\n resp.json().get('error_code'),\n resp.json().get('message')))", "def grouplist(self, namespace=None):\n source = self._source(namespace)\n return self._list(source, 'list')", "def list_secgroups(self, name=None):", "def get_pingroups(self):\n return self.groups[:]", "def get_groups(self, content=None):\n if content:\n ctype = ContentType.objects.get_for_model(content)\n prrs = PrincipalRoleRelation.objects.filter(role=self,\n content_id__in=(None, content.id),\n content_type__in=(None, ctype)).exclude(group=None)\n else:\n prrs = PrincipalRoleRelation.objects.filter(role=self,\n content_id=None, content_type=None).exclude(group=None)\n\n return [prr.group for prr in prrs]", "def get_groups(self, env, token, memcache_client=None):\n groups = None\n key = '%s/token/%s' % (self.reseller_prefix, token)\n cached_auth_data = memcache_client and memcache_client.get(key)\n if cached_auth_data:\n start, expiration, groups = cached_auth_data\n if time() - start > expiration:\n groups = None\n\n headers = {}\n if env.get('HTTP_AUTHORIZATION'):\n groups = None\n headers[\"Authorization\"] = env.get('HTTP_AUTHORIZATION')\n\n if not groups:\n with Timeout(self.timeout):\n conn = http_connect(self.auth_host, self.auth_port, 'GET',\n '%stoken/%s' % (self.auth_prefix, token),\n headers, ssl=self.ssl)\n\n resp = conn.getresponse()\n resp.read()\n conn.close()\n if resp.status // 100 != 2:\n return None\n expiration = float(resp.getheader('x-auth-ttl'))\n groups = resp.getheader('x-auth-groups')\n if memcache_client:\n memcache_client.set(key, (time(), expiration, groups),\n timeout=expiration)\n\n if env.get('HTTP_AUTHORIZATION'):\n account, user, sign = \\\n env['HTTP_AUTHORIZATION'].split(' ')[-1].split(':')\n cfaccount = resp.getheader('x-auth-account-suffix')\n path = env['PATH_INFO']\n env['PATH_INFO'] = \\\n path.replace(\"%s:%s\" % (account, user), cfaccount, 1)\n\n return groups", "def list_template_groups(context):\n template_groups = get_oneoffixx_template_groups()\n terms = []\n for group in template_groups:\n terms.append(SimpleVocabulary.createTerm(group.get(\"id\"),\n group.get(\"id\"),\n group.get(\"localizedName\")))\n return MutableObjectVocabulary(terms)", "def get_all_as_groups(as_connection):\n as_groups_list = []\n get_as_groups = as_connection.get_all_groups()\n as_groups_list.extend(get_as_groups)\n\n token = get_as_groups.next_token\n while token is not None:\n get_as_groups = as_connection.get_all_groups(\n next_token=token)\n as_groups_list.extend(get_as_groups)\n token = get_as_groups.next_token\n print \"Processed {0} AutoScaling Group\"\\\n .format(len(as_groups_list))\n return as_groups_list", "def test_get_resource_group_list(self):\n pass", "def getGroups(self, resource='groups', rpp=None, page=None):\n\n groups = list()\n res = self.getRequest(resource, rpp, page)\n ppObj = vsdModels.Pagination(**res)\n\n for g in ppObj.items:\n group = vsdModels.Group(**g)\n groups.append(group)\n\n return groups, ppObj", "def test_api_v1_groups_get(self):\n pass", "def list_group(self, groupname):\n return self.get_admin(\"groups/{}\".format(groupname))", "def RetrieveAllGroups(**argd):\n flag, ret = CGateway.core.RetrieveAllGroup(argd[\"session\"])\n xFlag = CGateway._HandleExceptionAndUnauthorized(flag, ret, argd[\"session\"])\n if xFlag is not None:\n return xFlag\n hmBuilder = []\n for hm in ret:\n hmBuilder.append(hm.ToJsonDict())\n return CGateway._SuccessResponse({'return': hmBuilder})", "def get_groups(self):\n return [self.primary_group] + list(self.secondary_groups)", "def list(self, detailed=True, search_opts=None):\n\n query_string = utils.build_query_param(search_opts)\n\n detail = \"\"\n if detailed:\n detail = \"/detail\"\n\n return self._list(\"/consistencygroups%s%s\" % (detail, query_string),\n \"consistencygroups\")", "def get_group(tkn: Token = Depends(from_authotization_header_nondyn),):\n assert_has_clearance(tkn.owner, \"sni.read_group\")\n return [\n GetGroupShortOut(group_id=str(grp.pk), group_name=grp.group_name)\n for grp in Group.objects().order_by(\"group_name\")\n ]", "def security_groups(self, oid):\n try:\n path = u'/servers/%s/os-security-groups' % oid\n res = self.client.call(path, u'GET', data=u'', \n token=self.manager.identity.token)\n self.logger.debug(u'Get openstack server security groups: %s' % truncate(res))\n return res[0][u'security_groups']\n except Exception as error:\n self.logger.error(error, exc_info=True)\n data = []\n return res", "def get_groups():\n\n groups = [\"shelter\", \"sharing\", \"unsheltered\", \"motel\"]\n\n for item in groups:\n group = Group(group_name=item)\n\n db.session.add(group)\n\n db.session.commit()", "def lists(start=None, top=None):\n url = 'persongroups'\n params = {\n 'start': start,\n 'top': top,\n }\n\n return util.request('GET', url, params=params)", "def test_get_groups(self):\n pass", "def test_get_groups(self):\n pass", "def get_RegisteredGroupsList(test_case, only_discoverable=False, override_headers=null, override_cookies=null):\n # type: (AnyMagpieTestCaseType, bool, Optional[HeadersType], Optional[CookiesType]) -> List[Str]\n app_or_url = get_app_or_url(test_case)\n path = \"/register/groups\" if only_discoverable else \"/groups\"\n resp = test_request(app_or_url, \"GET\", path,\n headers=override_headers if override_headers is not null else test_case.json_headers,\n cookies=override_cookies if override_cookies is not null else test_case.cookies)\n json_body = check_response_basic_info(resp, 200, expected_method=\"GET\")\n return json_body[\"group_names\"]", "def customer_group_list(h):\n global html\n html = h\n\n common_elements = customer_common_elements(group=True)\n\n css_list = common_elements[\"css_list\"]\n\n javascript_list = common_elements[\"javascript_list\"]\n\n all_btn = common_elements[\"all_btn\"]\n\n html.new_header(\"Customer Organization\", \"customer_group_management.py\", all_btn, css_list, javascript_list)\n customer_string = \"\"\"\n <div>\n <table id=\"customers\" cellpadding=\"0\" cellspacing=\"0\" border=\"0\" class=\"display\" style=\"text-align:center\">\n <thead>\n <tr>\n <th>\n Company System Role\n </th>\n <th>\n Company Name\n </th>\n <th>\n Company Address\n </th>\n <th>\n Company Telephone\n </th>\n <th>\n Company Website\n </th>\n <th>\n Company Business\n </th>\n <th>\n Company Registration Number\n </th>\n <th>\n Company VAT Number\n </th>\n <th>\n Company Sales Contact\n </th>\n <th>\n Company Purchase Contact\n </th>\n <th>\n Actions\n </th>\n </tr>\n </thead>\n </table>\n </div>\n \"\"\"\n customer_string += \"\"\"\n <script>\n get_customer_groups();\n </script>\n \"\"\"\n html.write(customer_string)\n html.new_footer()\n pass", "def get_groups(self):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_get_groups_query+\" ORDER BY $groupname_field$\",{'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: get_groups: %s\" % (query,))\n\n cursor.execute(query)\n desc=[i[0] for i in cursor.description]\n for row in cursor:\n dictrow=dict(zip(desc,row))\n yield dictrow[self.sql_groupname_field]", "def getOrgsAndStationGroups(self, **kwargs):\n\n searchQuery = self._client.factory.create('getOrgsAndStationGroupsSearchQuery')\n for k, v in kwargs.items():\n setattr(searchQuery, k, v)\n response = self._soap_service.getOrgsAndStationGroups(searchQuery)\n return CPAPIResponse(response)", "def groups(self):\n # type: (...) -> Set[str]\n return self._groups", "def get(self, *args):\n return _libsbml.ListOfGroups_get(self, *args)", "def groups(request, group_id = 1):\n group = get_object_or_404(ResearchGroup, pk=group_id)\n groups = ResearchGroup.objects.order_by('name')\n group_list = []\n for g in groups:\n if g.id is not group.id:\n group_list.append({'name': g.name, 'id': g.id})\n # default showing group\n # chosen group info\n group_info = {}\n group_info['name'] = group.name\n personnel = list()\n for p in group.personnel.all():\n personnel.append(p.username)\n group_info['personnel'] = \" \".join(str(x) for x in personnel)\n group_info['projects'] = group.projects\n group_info['directions'] = group.directions\n group_info['papers'] = group.papers.split()\n context = {'group_list': group_list, 'group_info': group_info}\n return render(request, 'sacms/groups.html', context)", "def compute_server_groups(self):\n path = '/os-server-groups'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack server groups: %s' % truncate(res))\n return res[0]['server_groups']", "async def get_groups(self) -> list:\n results = await self._api.call('group', 'get_groups')\n return [models.Group(grp) for grp in results.payload]", "def get_groups(self, customer_id='my_customer'):\n try:\n paged_results = self.repository.groups.list(customer=customer_id)\n flattened_results = api_helpers.flatten_list_results(\n paged_results, 'groups')\n LOGGER.debug('Getting all the groups for customer_id = %s,'\n ' flattened_results = %s',\n customer_id, flattened_results)\n return flattened_results\n except RefreshError as e:\n # Authentication failed, log before raise.\n LOGGER.exception(GSUITE_AUTH_FAILURE_MESSAGE)\n raise e\n except (errors.HttpError, HttpLib2Error) as e:\n raise api_errors.ApiExecutionError('groups', e)", "def groups(self) -> list[Group]:\n return self._connection.groups", "def generate_groups(ctx):\n asyncio.run(generate_groups_impl(ctx.obj[\"config\"]))", "def get_identity_groups(self):\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.identitygroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/identitygroup'.format(self.url_base))\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\t###\n\t\t\tx = ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']\n\t\t\tprint (\"x\", len(x))\n\t\t\tprint (x[0])\n\t\t\tfor element in x[0]:\n\t\t\t\tprint (element,x[0][element])\n\t\t\t###\n\t\t\tresult['response'] = [(i['@name'], i['@id'], i['@description'],i['link']['@href'])\n\t\t\t\t\t\t\t\t for i in ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def set_up_groups(self):\n groups = []\n groups.append({'groupname': 'th',\n 'grouptitle': 'TH',\n 'path': '/'})\n groups.append({'groupname': 'neutronics',\n 'grouptitle': 'Neutronics',\n 'path': '/'})\n groups.append({'groupname': 'metadata',\n 'grouptitle': 'Simulation Metadata',\n 'path': '/'})\n return groups", "def fusion_api_get_directory_groups(self, body, api=None, headers=None):\n return self.logindomain.groups(body, api, headers)", "def get_all_groups(self):\n return self.groups + ['all']", "def get_cli_groups():\n\n return get_component(CLIPackage.COMPONENT_NAME).get_cli_groups()", "def get_groups(args):\n\n args.suppress_verify_output = True\n if verify(args) != 0:\n # restore stdout\n sys.stdout = sys.__stdout__\n print(\"Config file not valid, please use the verify function to debug\")\n return []\n\n with open(args.file, \"r\") as f:\n config_json = json.load(f)\n\n groups = []\n for group in config_json[\"groups\"]:\n groups.append(group[\"name\"])\n return groups", "def request_access_to_groups(self, ceph):\n for ceph_group in (\"volumes\", \"images\", \"vms\"):\n ceph.request_access_to_group(\n name=ceph_group,\n object_prefix_permissions={\"class-read\": [\"rbd_children\"]},\n permission=\"rwx\",\n )", "def get(isamAppliance, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieve a list of all ISAM groups\",\n \"{0}\".format(uri),\n requires_modules=requires_modules, requires_version=requires_version)", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def hostgroup_list(self):\n return self.ezx.get_hostgroup_list()", "def get_groups(id_project):\n data = sql.list_groups(id_project)\n names = [(d['id'], d['name']) for d in data]\n return names", "def get_groups(phone_num):\n\n phone_num = json.loads(phone_num)\n resp = con.get_groups(phone_num)\n\n emit(\"groups_update\", json.dumps(resp))", "def users_groups():\n if request.method == \"GET\":\n query = {\"token\": ciconnect_api_token, \"globus_id\": session[\"primary_identity\"]}\n # Get user info to derive unix name\n user = get_user_info(session)\n unix_name = user[\"metadata\"][\"unix_name\"]\n # Get user's group membership info based on session unix name\n users_group_memberships = get_user_group_memberships(session, unix_name)\n\n multiplexJson = {}\n group_membership_status = {}\n for group in users_group_memberships:\n if group[\"state\"] not in [\"nonmember\"]:\n group_name = group[\"name\"]\n group_query = (\n \"/v1alpha1/groups/\" + group_name + \"?token=\" + query[\"token\"]\n )\n multiplexJson[group_query] = {\"method\": \"GET\"}\n group_membership_status[group_query] = group[\"state\"]\n # POST request for multiplex return\n multiplex = get_multiplex(multiplexJson)\n\n users_groups = []\n for group in multiplex:\n if (\n session[\"url_host\"][\"unix_name\"]\n in (json.loads(multiplex[group][\"body\"])[\"metadata\"][\"name\"])\n ) and (\n len(\n (json.loads(multiplex[group][\"body\"])[\"metadata\"][\"name\"]).split(\n \".\"\n )\n )\n > 1\n ):\n users_groups.append(\n (\n json.loads(multiplex[group][\"body\"]),\n group_membership_status[group],\n )\n )\n # users_groups = [group for group in users_groups if len(group['name'].split('.')) == 3]\n\n # Query user's pending project requests\n pending_project_requests = get_user_pending_project_requests(unix_name)\n # Check user's member status of root connect group\n connect_group = session[\"url_host\"][\"unix_name\"]\n user_status = get_user_connect_status(unix_name, connect_group)\n\n domain_name = domain_name_edgecase()\n\n with open(\n brand_dir\n + \"/\"\n + domain_name\n + \"/form_descriptions/group_unix_name_description.md\",\n \"r\",\n ) as file:\n group_unix_name_description = file.read()\n\n return render_template(\n \"users_groups.html\",\n groups=users_groups,\n project_requests=pending_project_requests,\n user_status=user_status,\n group_unix_name_description=group_unix_name_description,\n )", "def get_groups(self):\n result = self.conn.usergroup.get(status=0, output='extend', selectUsers=\"extend\")\n groups = {group[\"name\"]: Group(\n name=group[\"name\"],\n id=group[\"usrgrpid\"],\n members=group[\"users\"],\n ) for group in result}\n return groups", "def get_groups(self):\n user_node = self.get()\n grouplist = []\n if user_node:\n for rel in graph.match(start_node=user_node, rel_type='in'):\n grouplist.append(Usergroup(id=rel.end_node()['id']))\n return grouplist", "def groups_by_id(request, gid):\r\n group = Group()\r\n filtered_groups = group.query({\"gid\":str(gid)})\r\n if len(filtered_groups) == 0:\r\n badRequest(\"No available group under GID \"+str(gid))\r\n return HttpResponse(json.dumps(filtered_groups))", "def listGroups(self):\n return tuple(Group.create(groupName, self._modelDataManager) for groupName in self.pm_getUserManager().listGroups())", "def test_getGroups(self):\n\t\turl = \"/groups/\"\n\t\tresponse = self.client.get(url, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(response.data[\"count\"], 1)" ]
[ "0.71493185", "0.7144411", "0.71203303", "0.6876979", "0.68021864", "0.6760384", "0.6710377", "0.67021704", "0.6697574", "0.6696664", "0.6681032", "0.6635194", "0.66194427", "0.6614419", "0.65979445", "0.65908253", "0.65883386", "0.65745854", "0.6557424", "0.6525643", "0.64546686", "0.6342797", "0.63134116", "0.6297702", "0.6291358", "0.6252941", "0.6229558", "0.62225723", "0.61725026", "0.61374915", "0.61302817", "0.6116392", "0.6115861", "0.6080164", "0.6059959", "0.60516083", "0.6047407", "0.6042616", "0.6037696", "0.60289323", "0.60276335", "0.60148066", "0.5979653", "0.59789026", "0.59789026", "0.59789026", "0.59609616", "0.5955679", "0.59556305", "0.5950014", "0.5927906", "0.5919374", "0.5898584", "0.5881367", "0.5874029", "0.5868172", "0.5865939", "0.5852529", "0.58488584", "0.58418363", "0.5836977", "0.5832522", "0.5829311", "0.58071613", "0.580511", "0.5804812", "0.5804812", "0.5793067", "0.5782178", "0.5777204", "0.57687074", "0.57657456", "0.5757799", "0.57567024", "0.5755174", "0.5745242", "0.5744569", "0.57378614", "0.57360107", "0.5731993", "0.57096875", "0.57055295", "0.5703141", "0.57023346", "0.5693158", "0.56913894", "0.5683292", "0.56817245", "0.56817245", "0.56817245", "0.56817245", "0.5681427", "0.5678393", "0.56721795", "0.56693083", "0.5667474", "0.566201", "0.56504387", "0.56394076", "0.5634459" ]
0.6430042
21
Lists the identity provider groups.
def list_identity_provider_groups(self, identity_provider_id, **kwargs): resource_path = "/identityProviders/{identityProviderId}/groups" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_identity_provider_groups got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) query_params = { "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[IdentityProviderGroupSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[IdentityProviderGroupSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_groups(self):\n return self.get_admin(\"groups\")", "def get_list_groups(self):\n list_response = requests.get(self.groups_url, headers=self.headers)\n return list_response.json()[\"groups\"]", "def list_groups(self):\n return self._get(\"cloudConnectorGroups\").list", "def list_groups(self, **params):\n url = 'groups'\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def list(self):\n METHOD = 'GET'\n API_PATH = '/groups/list'\n\n # Make REST call\n resp = self._rest_call[METHOD](API_PATH)\n\n if resp.status_code == 200:\n return resp.json().get('group_names')\n\n elif resp.status_code == 403:\n raise AuthorizationError(\"User is not authorized or token is incorrect.\")\n\n else:\n if resp.json().get(\"error_code\") in ERROR_CODES:\n raise ERROR_CODES[resp.json().get('error_code')](resp.json().get('message'))\n else:\n raise APIError(\"Response code {0}: {1} {2}\".format(resp.status_code,\n resp.json().get('error_code'),\n resp.json().get('message')))", "def get_groups(self):\n response = self._get(\"groups\")\n\n return response.json()", "def groups():\n access_token = session['access_token']\n return \"%s\" % list_groups(access_token)", "def get_identity_groups(self):\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.identitygroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/identitygroup'.format(self.url_base))\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\t###\n\t\t\tx = ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']\n\t\t\tprint (\"x\", len(x))\n\t\t\tprint (x[0])\n\t\t\tfor element in x[0]:\n\t\t\t\tprint (element,x[0][element])\n\t\t\t###\n\t\t\tresult['response'] = [(i['@name'], i['@id'], i['@description'],i['link']['@href'])\n\t\t\t\t\t\t\t\t for i in ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def list_groups(request):\n groups = models.UserGroup.all().order('name')\n return utility.respond(request, 'admin/list_groups', {'groups': groups})", "def get_groups(self):\n return Client._get(self)", "def get_group_list(org_id):\n tList = get_template('app/usermanagementorg/group_list.html')\n groups = get_groups(org_id)\n return tList.render(Context({ 'groups': groups, }))", "def getListOfGroups(self, *args):\n return _libsbml.GroupsModelPlugin_getListOfGroups(self, *args)", "def list_groups(self, **filters):\n\t\tresult = self.client.get(self._endpoint + \"/group\", params=filters)\n\t\treturn PaginatedList(result, Group, (self.user_id, self.site_id), \"group_id\")", "def listGroups(self):\n return tuple(Group.create(groupName, self._modelDataManager) for groupName in self.pm_getUserManager().listGroups())", "def get_all_groups(self):\n self.cursor.execute(\"select * from groups\")\n self.connection.commit()\n return self.cursor.fetchall()", "def get_groups(self):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_get_groups_query+\" ORDER BY $groupname_field$\",{'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: get_groups: %s\" % (query,))\n\n cursor.execute(query)\n desc=[i[0] for i in cursor.description]\n for row in cursor:\n dictrow=dict(zip(desc,row))\n yield dictrow[self.sql_groupname_field]", "def getGroups():\r\n return Group.getGroups()", "def get_all_groups():\n return jsonify(admin.get_all_groups(current_app.scoped_session()))", "async def get_groups(self) -> list:\n results = await self._api.call('group', 'get_groups')\n return [models.Group(grp) for grp in results.payload]", "def list(request):\n return render_to_response('rteacher/manage_groups_list.html', request, **klist(\n request=request\n ))", "def list_groups():\n return _list_tindyb_unique_values(\"group\", dbpath=__dbpath__)", "def groups(self):\n return self.get_data(\"groups\")", "def groups(self):\r\n return resources.Groups(self)", "def list_groups(access_token):\n request_url = OKTA_URL + \"api/v1/groups\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def groups(self):\n #return self.get('{}/groups'.format(ApiVersion.A1.value))\n return self.get('{}/groups'.format(ApiVersion.CM1.value))", "def groups(self):\n if not self._groups:\n self._groups = self._init_repository(\n _AdminDirectoryGroupsRepository)\n return self._groups", "def get_groups(self):\n result = self.conn.usergroup.get(status=0, output='extend', selectUsers=\"extend\")\n groups = {group[\"name\"]: Group(\n name=group[\"name\"],\n id=group[\"usrgrpid\"],\n members=group[\"users\"],\n ) for group in result}\n return groups", "def groups(self):\r\n return users.Groups(self)", "def list_groups_factory(context, request):\n return ListGroupsService(session=request.db,\n request_authority=request.authority,\n route_url=request.route_url)", "def security_groups(self, oid):\n try:\n path = u'/servers/%s/os-security-groups' % oid\n res = self.client.call(path, u'GET', data=u'', \n token=self.manager.identity.token)\n self.logger.debug(u'Get openstack server security groups: %s' % truncate(res))\n return res[0][u'security_groups']\n except Exception as error:\n self.logger.error(error, exc_info=True)\n data = []\n return res", "def get_all(isamAppliance, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieving groups\", \"/sysaccount/groups/v1\")", "def list_security_groups(self, retrieve_all=True, **_params):\r\n return self.list('security_groups', self.security_groups_path,\r\n retrieve_all, **_params)", "def get_groups(self, obj):\n groupsForCompany = get_groups_with_perms(obj)\n return [x.id for x in groupsForCompany]", "def groups(self) -> list[Group]:\n return self._connection.groups", "def get_RegisteredGroupsList(test_case, only_discoverable=False, override_headers=null, override_cookies=null):\n # type: (AnyMagpieTestCaseType, bool, Optional[HeadersType], Optional[CookiesType]) -> List[Str]\n app_or_url = get_app_or_url(test_case)\n path = \"/register/groups\" if only_discoverable else \"/groups\"\n resp = test_request(app_or_url, \"GET\", path,\n headers=override_headers if override_headers is not null else test_case.json_headers,\n cookies=override_cookies if override_cookies is not null else test_case.cookies)\n json_body = check_response_basic_info(resp, 200, expected_method=\"GET\")\n return json_body[\"group_names\"]", "def groups(self):\r\n if not self.user_id:\r\n raise base.MethodNotSupported()\r\n\r\n return groups.Groups(self)", "def list_group(self, groupname):\n return self.get_admin(\"groups/{}\".format(groupname))", "def get_group_list(ip_address, headers):\n group_list = None\n group_url = 'https://%s/api/GroupService/Groups' % ip_address\n response = requests.get(group_url, headers=headers, verify=False)\n if response.status_code == 200:\n group_response = response.json()\n if group_response['@odata.count'] > 0:\n group_list = [x['Id'] for x in group_response['value']]\n else:\n print(\"No groups found at \", ip_address)\n else:\n print(\"No groups found at \", ip_address)\n return group_list", "def get(self):\r\n return UserGroupService.getAllUserGroups(self)", "def getGroups(self):\n return [g[0] for g in grp.getgrall()]", "def get_groups(id_project):\n data = sql.list_groups(id_project)\n names = [(d['id'], d['name']) for d in data]\n return names", "def groups(self):\n return self._groups", "def groups(self):\n return self._groups", "def groups(self):\n return self._groups", "def compute_server_groups(self):\n path = '/os-server-groups'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack server groups: %s' % truncate(res))\n return res[0]['server_groups']", "def list_group():\n data, code, message = FIELD_SERVICE.list_group()\n return __result(data, code, message)", "def list_groups(args):\n\n for group in get_groups(args):\n print(group)", "def GetGroups(self, bulk=False, reason=None):\n query = []\n _AppendIf(query, bulk, (\"bulk\", 1))\n _AppendReason(query, reason)\n\n groups = self._SendRequest(HTTP_GET, \"/%s/groups\" % GANETI_RAPI_VERSION,\n query, None)\n if bulk:\n return groups\n else:\n return [g[\"name\"] for g in groups]", "def list_groups(self, order_by: str = None, next_link: str = None, top: int = None, filter_: str = None):\n if next_link: # pagination\n return self.ms_client.http_request(method='GET', full_url=next_link)\n # default value = 100\n params = {'$top': top}\n if order_by:\n params['$orderby'] = order_by # type: ignore\n if filter_:\n params['$filter'] = filter_ # type: ignore\n return self.ms_client.http_request(\n method='GET',\n url_suffix='groups',\n params=params)", "def nfvi_get_instance_groups(callback):\n cmd_id = _compute_plugin.invoke_plugin('get_instance_groups',\n callback=callback)\n return cmd_id", "def grouplist(self, namespace=None):\n source = self._source(namespace)\n return self._list(source, 'list')", "def test_get_groups(self):\n response = self.client.get_groups()\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v1/groups\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def get_groups():\r\n if 'username' not in flask.session:\r\n return flask.jsonify(**{'message': 'Forbidden', 'status_code': 403})\r\n\r\n context = {}\r\n context['url'] = flask.request.path\r\n context['groups'] = []\r\n\r\n # Retreive query variables\r\n query_num_groups = flask.request.args.get('size') \r\n query_page = flask.request.args.get('page') \r\n num_groups = int(query_num_groups) if query_num_groups != None else 10\r\n page_number = int(query_page) if query_page != None else 0\r\n\r\n groups = get_group_listing(flask.session['username'], \r\n num_groups, page_number)\r\n for g in groups:\r\n context['groups'].append({\r\n 'id': g[0],\r\n 'name': g[1]\r\n })\r\n\r\n if (num_groups == 10):\r\n context['next'] = '{}?page={}'.format(context['url'], page_number + 1)\r\n else:\r\n context['next'] = '{}?page={}&size={}'.format(context['url'], \r\n page_number + 1, num_groups)\r\n\r\n return flask.jsonify(**context)", "def get_groups():\n\n # FUTURE: Properly reutrn error, Mongo is giving it's own\n if current_user.groups:\n return Response(response=json.dumps([g.to_dict() for g in current_user.groups]), status=200, mimetype=\"application/json\")\n else:\n return return_json_error('No groups assigned to', 500)", "def get_group_names(self):\r\n return self.groups.keys()", "def security_groups(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"security_groups\")", "def get(self):\n status = ErrorCode.SUCCESS\n try:\n res = []\n cid = self.get_argument('cid', None)\n if not (cid is None):\n res = QueryHelper.get_groups_by_cid(cid, self.db)\n self.write_ret(status,\n dict_=DotDict(res=res))\n except Exception as e:\n logging.exception(\"[UWEB] Get groups failed. Exception: %s\",\n e.args)\n status = ErrorCode.SERVER_BUSY\n self.write_ret(status)", "def get_all_groups(self, path_prefix='/', marker=None, max_items=None):\r\n params = {}\r\n if path_prefix:\r\n params['PathPrefix'] = path_prefix\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('ListGroups', params,\r\n list_marker='Groups')", "def management_groups(self) -> pulumi.Output[Optional[Sequence['outputs.ResourceIdResponse']]]:\n return pulumi.get(self, \"management_groups\")", "def get_groups(self, customer_id='my_customer'):\n try:\n paged_results = self.repository.groups.list(customer=customer_id)\n flattened_results = api_helpers.flatten_list_results(\n paged_results, 'groups')\n LOGGER.debug('Getting all the groups for customer_id = %s,'\n ' flattened_results = %s',\n customer_id, flattened_results)\n return flattened_results\n except RefreshError as e:\n # Authentication failed, log before raise.\n LOGGER.exception(GSUITE_AUTH_FAILURE_MESSAGE)\n raise e\n except (errors.HttpError, HttpLib2Error) as e:\n raise api_errors.ApiExecutionError('groups', e)", "def getPeopleGroups(self):\n return [FoursquarePeopleGroup(le) for le in self.base.get(\"groups\", [])]", "def get(self, *args):\n return _libsbml.ListOfGroups_get(self, *args)", "def fusion_api_get_directory_groups(self, body, api=None, headers=None):\n return self.logindomain.groups(body, api, headers)", "def get_queryset(self):\n user = self.request.user\n return user.group_set.all()", "def list_groups(self):\n\n for counter, label in enumerate(self.exp_labels_list):\n print('Key {}: {} \\n'.format(str(counter), label))", "def groups(self):\n return []", "def get_groups(self, env, token, memcache_client=None):\n groups = None\n key = '%s/token/%s' % (self.reseller_prefix, token)\n cached_auth_data = memcache_client and memcache_client.get(key)\n if cached_auth_data:\n start, expiration, groups = cached_auth_data\n if time() - start > expiration:\n groups = None\n\n headers = {}\n if env.get('HTTP_AUTHORIZATION'):\n groups = None\n headers[\"Authorization\"] = env.get('HTTP_AUTHORIZATION')\n\n if not groups:\n with Timeout(self.timeout):\n conn = http_connect(self.auth_host, self.auth_port, 'GET',\n '%stoken/%s' % (self.auth_prefix, token),\n headers, ssl=self.ssl)\n\n resp = conn.getresponse()\n resp.read()\n conn.close()\n if resp.status // 100 != 2:\n return None\n expiration = float(resp.getheader('x-auth-ttl'))\n groups = resp.getheader('x-auth-groups')\n if memcache_client:\n memcache_client.set(key, (time(), expiration, groups),\n timeout=expiration)\n\n if env.get('HTTP_AUTHORIZATION'):\n account, user, sign = \\\n env['HTTP_AUTHORIZATION'].split(' ')[-1].split(':')\n cfaccount = resp.getheader('x-auth-account-suffix')\n path = env['PATH_INFO']\n env['PATH_INFO'] = \\\n path.replace(\"%s:%s\" % (account, user), cfaccount, 1)\n\n return groups", "def all_groups(self):\n return self._all_groups", "def available_groups(cls):\n raise NotImplementedError", "def get_groups_details(self, groups):\n assert isinstance(groups, list)\n # It may be require we request the API by splitting the names list\n # If the list is too long to be handled by the Gerrit server (URI)\n query_args = \"?%s\" % \"&\".join([\"q=%s\" % g for g in groups])\n query_args += \"&o=MEMBERS\" if groups else \"o=MEMBERS\"\n\n try:\n ret = self.g.get('groups/%s' % query_args)\n except HTTPError as e:\n return self._manage_errors(e)\n\n return ret", "def api_groups(self):\n return self._api_groups", "def security_groups(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"security_groups\")", "def security_groups(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"security_groups\")", "def hostgroup_list(self):\n return self.ezx.get_hostgroup_list()", "def get_pingroups(self):\n return self.groups[:]", "def get_device_groups(self):\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.network.networkdevicegroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/networkdevicegroup'.format(self.url_base))\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = [(i['@name'], i['@id'])\n\t\t\t\t\t\t\t\t for i in ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def list_secgroups(self, name=None):", "def get_groups(self):\n return [self.primary_group] + list(self.secondary_groups)", "def get_all():\n\n return AGE_GROUPS", "def groups(self):\n # type: (...) -> Set[str]\n return self._groups", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def list_projects(ctx):\n pprint(ctx.obj.groups.get().data)", "def group_list(request, format=None):\n if request.method == 'GET':\n snippets = RoleList.objects.all()\n serializer = GroupSerializer(snippets, many=True)\n return Response(serializer.data)\n elif request.method == 'POST':\n if not request.user.has_perm('ops.change_group'):\n return Response(status=status.HTTP_403_FORBIDDEN)\n serializer = GroupSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n recordAssets.delay(user=str(request.user),\n content=\"添加用户组:{group_name}\".format(group_name=request.data.get(\"name\")), type=\"group\",\n id=serializer.data.get('id'))\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def get_groups():\n\n groups = [\"shelter\", \"sharing\", \"unsheltered\", \"motel\"]\n\n for item in groups:\n group = Group(group_name=item)\n\n db.session.add(group)\n\n db.session.commit()", "def list_all_scaling_groups(self, request, paginate):\n\n def format_list(results):\n group_states, actives = results\n groups = [{\n 'id': state.group_id,\n 'links': get_autoscale_links(state.tenant_id, state.group_id),\n 'state': format_state_dict(state, active)\n } for state, active in zip(group_states, actives)]\n return {\n \"groups\": groups,\n \"groups_links\": get_groups_links(\n groups, self.tenant_id, None, **paginate)\n }\n\n def fetch_active_caches(group_states):\n if not tenant_is_enabled(self.tenant_id, config_value):\n return group_states, [None] * len(group_states)\n d = gatherResults(\n [get_active_cache(\n self.store.reactor, self.store.connection, self.tenant_id,\n state.group_id)\n for state in group_states])\n return d.addCallback(lambda cache: (group_states, cache))\n\n deferred = self.store.list_scaling_group_states(\n self.log, self.tenant_id, **paginate)\n deferred.addCallback(fetch_active_caches)\n deferred.addCallback(format_list)\n deferred.addCallback(json.dumps)\n return deferred", "def RetrieveAllGroups(**argd):\n flag, ret = CGateway.core.RetrieveAllGroup(argd[\"session\"])\n xFlag = CGateway._HandleExceptionAndUnauthorized(flag, ret, argd[\"session\"])\n if xFlag is not None:\n return xFlag\n hmBuilder = []\n for hm in ret:\n hmBuilder.append(hm.ToJsonDict())\n return CGateway._SuccessResponse({'return': hmBuilder})", "def get_groups_for_current_user():\n return UserAPI.get_groups_for_user_id(current_user)", "def get_groups(self, uuid=None):\n return self._get_query('groups', self._build_params(uuid=uuid), Group)", "def get_all_groups(self):\n return self.groups + ['all']", "def get_group_list(self):\n return [(item[0], item[1][0]) for item in self.contacts_by_group_list]", "def getSourceGroups(self):\n ret = self.jsonRequest(\"/api/v1/sourceGroup/getSourceGroups\", {})\n return ret", "def get_endpoint_groups(self):\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.endpointgroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/endpointgroup'.format(self.url_base))\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\t\n\t\t\tresult['response'] = [(i['@name'], i['@id'], i['@description'])\n\t\t\t\t\t\t\t\t for i in ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def list_secgroups(self, name=None):\n groups = self.cloudman.network.security_groups()\n\n # print (\"TTTTT\")\n # for g in groups:\n # pprint(g)\n\n if name is not None:\n for entry in groups:\n\n if entry['name'] == name:\n groups = [entry]\n break\n\n return self.get_list(\n groups,\n kind=\"secgroup\")", "def get(isamAppliance, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieve a list of all ISAM groups\",\n \"{0}\".format(uri),\n requires_modules=requires_modules, requires_version=requires_version)", "def list_user_groups(self, token):\n requestUser = self.get_username_from_token(token)\n dataBase = self.read_database()\n groups = dataBase['userGroups']\n groupList = list()\n for group in groups:\n members = groups[group]['members']\n owners = groups[group]['owners']\n if requestUser in members or requestUser in owners:\n groupList.append(group)\n return groupList", "def get_groups(self, uuids=None, name=None, pager=None):\n params = self._build_params(uuid=uuids, name=name)\n return Group.deserialize_list(self._get_multiple('groups', params, pager))" ]
[ "0.7612845", "0.74096733", "0.7197947", "0.71231127", "0.7057429", "0.7055238", "0.70541674", "0.6953628", "0.69469327", "0.69342464", "0.69159657", "0.69158304", "0.6810128", "0.6757535", "0.6740337", "0.67368454", "0.67164993", "0.66926956", "0.66826135", "0.6666242", "0.66548234", "0.66136134", "0.65175796", "0.6501843", "0.6498401", "0.64971447", "0.6480499", "0.6468472", "0.64617443", "0.6440693", "0.64259267", "0.6413463", "0.6378549", "0.63718903", "0.63703585", "0.63665766", "0.63657033", "0.63561773", "0.6341116", "0.6289363", "0.62837607", "0.62617683", "0.6252886", "0.6252886", "0.6252886", "0.6217131", "0.6197586", "0.61659116", "0.6150175", "0.6137929", "0.6086922", "0.6085683", "0.6084597", "0.6068257", "0.6066122", "0.6038715", "0.60268146", "0.6026496", "0.6018982", "0.60024154", "0.597798", "0.5975318", "0.5961194", "0.59534675", "0.5951156", "0.5934764", "0.59231037", "0.592257", "0.5922462", "0.59210426", "0.59200114", "0.5915407", "0.5911997", "0.5911997", "0.59014136", "0.5889077", "0.5888872", "0.5879293", "0.5867514", "0.58596665", "0.5854771", "0.58529246", "0.58529246", "0.58529246", "0.58529246", "0.58486444", "0.58408415", "0.5840511", "0.5838192", "0.58328474", "0.58254933", "0.582263", "0.5815693", "0.5813062", "0.57983416", "0.57980776", "0.57972986", "0.5793208", "0.57801175", "0.5774804" ]
0.69324946
10
Lists all the identity providers in your tenancy. You must specify the identity provider type (e.g., `SAML2` for identity providers using the SAML2.0 protocol). You must specify your tenancy's OCID as the value for the compartment ID (remember that the tenancy is simply the root compartment). See `Where to Get the Tenancy's OCID and User's OCID`__.
def list_identity_providers(self, protocol, compartment_id, **kwargs): resource_path = "/identityProviders" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_identity_providers got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "protocol": protocol, "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[IdentityProvider]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[IdentityProvider]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list(conn):\n try:\n return conn.get(url='/auth-providers')['providers']\n except SystemError as e:\n raise e", "def providers(self) -> List[str]:\n return [\n getattr(auth_account, \"provider\")\n for auth_account in self.auth_accounts # pylint: disable=not-an-iterable\n ]", "def get_providers(self):\n \n r = requests.get(\n self._url('/dataproviders'),\n headers={'Authorization': self.token},\n proxies=self.proxy)\n r.raise_for_status()\n providers = r.json()\n self.providers = [p['name'] for p in providers if (p['user'] is not None and p['user']!='SCRIPTING ENGINE')]\n log.info('{:d} providers found'.format(len(self.providers)))\n\n return", "def providers(self):\n return [p for p in self._db.providers.values() if self._dbattr(p.IDATTR)]", "def providers(self):\n return [p for p in self._db.providers.values() if self._dbattr(p.IDATTR)]", "def get_providers(providers: list, provider_type: str = 'Author') -> list:\n return [Node('Provider', name=provider, type=provider_type) for provider in providers]", "def get_auth_providers(cls):\n return [cls.os_primary.auth_provider]", "def all_providers(self) -> List[ProviderInfo]:\n sp_key = self.__providers_key()\n value = self.get(name=sp_key)\n if value is None:\n return []\n js = utf8_decode(data=value)\n array = json_decode(string=js)\n return ProviderInfo.convert(array=array)", "def get_providers(self):\n return self.keys", "def get_providers(self):\n return self.keys", "def get_providers(self):\n return self.keys", "def get_providers(self):\n return self.keys", "def get_providers(self):\n return self.keys", "def get_providers(self):\n return self.keys", "def get_tenants():\n # these are the tenant_id strings configured for the service -\n tenants_strings = conf.tenants\n result = []\n # the tenants service is a special case, as it must be a) configured to serve all tenants and b) actually maintains\n # the list of tenants in its own DB. in this case, we return the empty list since the tenants service will use direct\n # db access to get necessary data.\n if conf.service_name == 'tenants' and tenants_strings[0] == '*':\n return result\n\n # in dev mode, services can be configured to not use the security kernel, in which case we must get\n # configuration for a \"dev\" tenant directly from the service configs:\n if not conf.use_sk:\n for tenant in tenants_strings:\n t = {'tenant_id': tenant,\n 'iss': conf.dev_iss,\n 'public_key': conf.dev_jwt_public_key,\n 'default_access_token_ttl': conf.dev_default_access_token_ttl,\n 'default_refresh_token_ttl': conf.dev_default_refresh_token_ttl,\n }\n result.append(t)\n\n else:\n # TODO -- look up tenants in the tenants API, get the associated parameters (including sk location)\n pass\n return result", "def get_queryset(self):\n if self.requested_enterprise_uuid is None:\n raise ParseError('Required enterprise_customer_uuid is missing')\n enterprise_customer_idp = get_object_or_404(\n EnterpriseCustomerIdentityProvider,\n enterprise_customer__uuid=self.requested_enterprise_uuid\n )\n try:\n saml_provider = SAMLProviderConfig.objects.current_set().get(\n slug=convert_saml_slug_provider_id(enterprise_customer_idp.provider_id))\n except SAMLProviderConfig.DoesNotExist:\n raise Http404('No matching SAML provider found.') # lint-amnesty, pylint: disable=raise-missing-from\n return SAMLProviderData.objects.filter(entity_id=saml_provider.entity_id)", "def get(self):\n return get_all_provider()", "def provider_list(cls, args, config):\n # print \"MOLNSProvider.provider_list(args={0}, config={1})\".format(args, config)\n providers = config.list_objects(kind='Provider')\n if len(providers) == 0:\n print \"No providers configured\"\n else:\n table_data = []\n for p in providers:\n table_data.append([p.name, p.type])\n # table_print(['name', 'type'], table_data)\n r = {'type': 'table', 'column_names': ['name', 'type'], 'data': table_data}\n return r", "def get_registered_providers():\n return _instance.providers_cls.keys()", "def get_accounts(self):\n uri = '/credentials'\n response = gate_request(uri=uri)\n assert response.ok, 'Failed to get accounts: {0}'.format(response.text)\n\n all_accounts = response.json()\n self.log.debug('Accounts in Spinnaker:\\n%s', all_accounts)\n\n filtered_accounts = []\n for account in all_accounts:\n if account['type'] == self.provider:\n filtered_accounts.append(account)\n\n if not filtered_accounts:\n raise ForemastError('No Accounts matching {0}.'.format(self.provider))\n\n return filtered_accounts", "def getProvidersReferences(self):\n field = self.getWrappedField('provider')\n providers = list(field._Vocabulary(self).items())\n providers.sort(lambda a, b: cmp(a[1].lower(), b[1].lower()))\n return atapi.DisplayList(providers)", "def list_accounts(self):\n information = []\n for provider in self._accounts.values():\n information.append({\n 'token': provider.credentials.token,\n 'url': provider.credentials.url,\n })\n\n return information", "def get_all_tenants():\n tenants = identity.Tenant.query.all()\n return tenants", "def get_tenants(self):", "def list_tenants(self):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/tenants\"\n _headers = {'x-auth-token': self.cloud_admin_info['token_project']}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\" no response from Server\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\n \" tenant list Failed with status %s \" %\n response.status)\n return response.status\n output = json.loads(response.data)\n LOG_OBJ.info(\"Tenant List : %s \" % output)\n return output[\"tenants\"]", "def registered_providers():\n return list(_DEFAULT_PROVIDER.providers)", "def tenancies(self) -> Iterable[dto.Tenancy]:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )", "def network_service_providers(self):\n path = '/v2.0/service-providers'\n res = self.network.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack network service providers: %s' % \n truncate(res))\n return res[0]['service_providers']", "def get_all_providers() -> list[str]:\n return list(ALL_PROVIDERS)", "def ids(self):\n return ['%s:%s' % (p.NAME, self._dbattr(p.IDATTR)) for p in self.providers]", "def ids(self):\n return ['%s:%s' % (p.NAME, self._dbattr(p.IDATTR)) for p in self.providers]", "def organizations(self):\n return self.get('{}/orgs'.format(ApiVersion.A1.value))", "def create_providers(cls) -> Iterable['BaseProvider']:\n return []", "def fetch_owner_accounts():\n resp = oauth.tapkey.get('Owners')\n owner_accounts = resp.json()\n return owner_accounts", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_providers(self):\n datasets = [\n \"Heineken\",\n \"Eisenbahn\",\n \"Corona\",\n \"Brahma\",\n \"Skol\",\n \"Bohemia\"\n ]\n return datasets", "def _load_providers(self, **kwargs):\n return super()._load_providers(providers=\"TIProviders\", **kwargs)", "def get_instance_list():\n return parse_list_output(Popen('nova list --all-tenants'.split(),\n stdout=STDOUT, stderr=STDERR).communicate()[0])", "def list_all_organizations(ctx):\n pprint(ctx.obj.orgs.get().data)", "def listOrganizations(self, name='', type=''):\n return self.get_json('/organization', {'name': name, 'type': type})", "def get_providers() -> List[Type[ProviderApi]]:\n providers = get_supported_dataset_providers()\n return sorted(providers, key=lambda p: p.priority.value) # type: ignore", "def get_providers_orcid_first():\n p_list = providers.registry.get_list()\n for idx, p in enumerate(p_list):\n if p.id == 'orcid':\n o = p_list.pop(idx)\n return [o] + p_list", "def get_providers_orcid_first():\n p_list = providers.registry.get_list()\n for idx, p in enumerate(p_list):\n if p.id == 'orcid':\n o = p_list.pop(idx)\n return [o] + p_list", "def tenants(self):\n # print \"tenant list is %s\" % self.auth.tenants.list()\n if not self._tenancy:\n self._tenancy = {}\n for tenant in self.auth.tenants.list():\n t = Tenant(tenant, self)\n self._tenancy[t[\"name\"]] = t\n return self._tenancy", "def _generate_accounts(self):\n accounts = []\n auth_url = 'http://{}:5000/v3/'.format(self.host)\n\n for tenant, network in self.tenants:\n account = RwcalYang.CloudAccount.from_dict({\n 'name': 'rift.auto.openstack',\n 'account_type': 'openstack',\n 'openstack': {\n 'key': self.user or self._DEFAULT_USERNAME,\n 'secret': self._DEFAULT_PASSWORD,\n 'auth_url': auth_url,\n 'tenant': tenant,\n 'mgmt_network': network}})\n\n accounts.append(account)\n\n return accounts", "def get_providers(obj):\n\n return scan_methods(obj, lambda attr: attr.check(Tags.PROVIDER))", "def get_providers(self):\n return [\"Rainfall\", \"Average Rainfall Sea\", \"Average Rainfall Land\"]", "def display_accounts(cls):\n return cls.account_list", "def list_available_authenticators(avail_auths):\n output_lines = [\"Available authenticators:\"]\n for auth_name, auth in avail_auths.iteritems():\n output_lines.append(\" - %s : %s\" % (auth_name, auth.description))\n return '\\n'.join(output_lines)", "def get_providers(self):\n return [\"Temperature\", \"Average Temperature Sea\", \"Average Temperature Land\"]", "def list_accounts(self):\n pass", "def myorgs(request):\n context = RequestContext(request)\n \n user = request.user\n orgs = user.orgusers.get_query_set()\n \n context['orgs'] = orgs\n return render_to_response('myorgs.html', context)", "def list(ctx, show_hidden, oath_type, period, password, remember):\n _init_session(ctx, password, remember)\n session = ctx.obj[\"session\"]\n creds = [\n cred\n for cred in session.list_credentials()\n if show_hidden or not is_hidden(cred)\n ]\n creds.sort()\n for cred in creds:\n click.echo(_string_id(cred), nl=False)\n if oath_type:\n click.echo(f\", {cred.oath_type.name}\", nl=False)\n if period:\n click.echo(f\", {cred.period}\", nl=False)\n click.echo()", "def parameter_providers(self):\n return self._parameter_providers", "def get_tenants(self, **kwargs):\n url = self.get_url('tenants', kwargs, ['begin', 'end'])\n return self.api_client.get(url).json()", "def list_provider_traits(self, rp_uuid):\n url = '/resource_providers/%s/traits' % rp_uuid\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def get_providers_list(prefix: str, identifier: str) -> Sequence[Tuple[str, str]]:\n rv = []\n for provider, get_url in PROVIDER_FUNCTIONS.items():\n link = get_url(prefix, identifier)\n if link is not None:\n rv.append((provider, link))\n if not rv:\n return rv\n\n bioregistry_link = _get_bioregistry_link(prefix, identifier)\n if not bioregistry_link:\n return rv\n\n # if a default URL is available, it goes first. otherwise the bioregistry URL goes first.\n rv.insert(1 if rv[0][0] == \"default\" else 0, (\"bioregistry\", bioregistry_link))\n return rv", "def identity_provider_type(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def list(ctx, show_hidden, oath_type, period):\n ensure_validated(ctx)\n controller = ctx.obj['controller']\n creds = [cred\n for cred in controller.list()\n if show_hidden or not cred.is_hidden\n ]\n creds.sort()\n for cred in creds:\n click.echo(cred.printable_key, nl=False)\n if oath_type:\n click.echo(u', {}'.format(cred.oath_type.name), nl=False)\n if period:\n click.echo(', {}'.format(cred.period), nl=False)\n click.echo()", "def active_authenticators(self, email, username, password):\n try:\n for authenticator in self.authenticators:\n filter_template = authenticator.filter_template\n if filter_template:\n filter_str = filter_template.format(email=email, username=username, password=password)\n passed_filter = eval(filter_str, {\"__builtins__\": None}, {'str': str})\n if not passed_filter:\n continue # skip to next\n yield authenticator.plugin, authenticator.options\n except Exception:\n log.exception( \"Active Authenticators Failure\" )\n raise", "def listOrganizationTypes(self):\n return self.get_json('/organizationType')", "def organizations(self):\n self.elements('organizations')", "async def get_organizations(request: Request):\n redis = request.app.state.redis\n organizations_obj = orjson.loads(await redis.get_key(\"influxdb_organizations\"))\n return [org for org in organizations_obj]", "def list_orgs(self):\n orgs = list(self.orgs.keys())\n orgs.sort()\n return orgs", "def list_identities(self, realm=None, type=\"users\", query=None):\n if query is None:\n query = '*'\n\n type = self._type_validator(type=type)\n uri = self._uri_realm_creator(realm=realm, uri=type + '/?_queryID=' + query)\n data = self._get(uri=uri, headers=self.headers)\n return data.json()", "def fetch_accounts(self):\n return self.fetch('/accounts')", "def get_queryset(self):\n return SAMLConfiguration.objects.current_set().filter(is_public=True)", "def get_identities(environ, start_response):\n store = environ['tiddlyweb.store']\n username = environ['wsgiorg.routing_args'][1]['username']\n usersign = environ['tiddlyweb.usersign']['name']\n roles = environ['tiddlyweb.usersign']['roles']\n\n if username != usersign and 'ADMIN' not in roles:\n raise HTTP403('Bad user for action')\n\n identities = []\n try:\n mapped_bag = store.get(Bag('MAPUSER'))\n tiddlers = store.list_bag_tiddlers(mapped_bag)\n matched_tiddlers = control.filter_tiddlers(tiddlers,\n 'select=mapped_user:%s' % username, environ)\n identities = [tiddler.title for tiddler in matched_tiddlers]\n except NoBagError:\n pass\n\n start_response('200 OK', [\n ('Content-Type', 'application/json; charset=UTF-8')])\n return [simplejson.dumps(identities)]", "def list_auth_policies(self, kwargs):\n verbose = kwargs.get(\"verbose\", False)\n attributes = ALL if verbose else [\"cn\", \"objectClass\"]\n\n self.display(\n self.engine.query(\n self.engine.AUTH_POLICIES_FILTER(),\n attributes, base=','.join([\"CN=AuthN Policy Configuration,CN=Services,CN=Configuration\", self.engine.base_dn])\n ),\n verbose\n )", "def my_courses(self, signer):\n return list(chain(*[p.user_courses(signer=signer) for p in self.providers]))", "def list_device_pools(arn=None, type=None, nextToken=None):\n pass", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def list_users(self, compartment_id, **kwargs):\n resource_path = \"/users\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"identity_provider_id\",\n \"external_identifier\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_users got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"identityProviderId\": kwargs.get(\"identity_provider_id\", missing),\n \"externalIdentifier\": kwargs.get(\"external_identifier\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[User]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[User]\")", "def get_all_auths(self):\n return self.all_auths", "def individuals(self, filters={}):\n return self.__get_list_client(Individual)(filters=filters)", "def get_providers(prefix: str, identifier: str) -> Mapping[str, str]:\n return dict(get_providers_list(prefix, identifier))", "def get_catalogs_by_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def _list_orgs(self, context):\r\n try:\r\n rtn = {'context': context,\r\n 'orgs': sorted(list(self._bbreader.cache[context].keys()))}\r\n except KeyError:\r\n raise RequestError('Context {} not found'.format(context))\r\n return rtn", "def get_authenticators_for_view(self, view_name):\n pass", "def tenants_for_token(self, context):\n token_ref = self.token_api.get_token(context=context,\n token_id=context['token_id'])\n assert token_ref is not None\n\n user_ref = token_ref['user']\n tenant_refs = []\n for tenant_id in user_ref['tenants']:\n tenant_refs.append(self.identity_api.get_tenant(\n context=context,\n tenant_id=tenant_id))\n return self._format_tenants_for_token(tenant_refs)", "def list(self, all_tenants=True):\n query = {}\n path = '/os-keypairs'\n if all_tenants is True:\n query['all_tenants'] = 1\n \n path = '%s?%s' % (path, urlencode(query)) \n \n res = self.client.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack key pairs: %s' % truncate(res))\n return res[0]['keypairs']", "def display_credentials(cls):\n return cls.credential_list", "def display_credentials(cls):\n return cls.credential_list", "def display_credentials(cls):\n return cls.credential_list", "def get_org_list():\r\n\r\n resp = requests.get(''.join([Kegg.BASE_URL, 'list/organism']))\r\n return resp.text", "def _fetch_providers(self, looking_for, providers=None):\n if providers is None:\n providers = self._reverse_mapping.get(looking_for, [])\n default_providers = []\n atom_providers = []\n for p in providers:\n if p.name in (_TRANSIENT_PROVIDER, self.injector_name):\n default_providers.append(p)\n else:\n atom_providers.append(p)\n return default_providers, atom_providers", "def get_idp_sso_supported_bindings(idp_entity_id=None, config=None):\n if config is None:\n # avoid circular import\n from penndjangosaml2.conf import get_config\n config = get_config()\n # load metadata store from config\n meta = getattr(config, 'metadata', {})\n # if idp is None, assume only one exists so just use that\n if idp_entity_id is None:\n # .keys() returns dict_keys in python3.5+\n idp_entity_id = list(available_idps(config).keys()).pop()\n try:\n return meta.service(idp_entity_id, 'idpsso_descriptor', 'single_sign_on_service').keys()\n except UnknownSystemEntity:\n return []", "def get_authenticators(self):\n authenticators = self.authentication_classes or ()\n\n if hasattr(self, 'action'):\n # action gets populated on the second time we are called\n per_view = self.get_authenticators_for_view(self.action)\n if per_view is not None:\n authenticators = per_view\n\n return [auth() for auth in authenticators]", "def list_authorities():\n try:\n certs = client().certificates.get_authorities()\n if not certs:\n logger.info(\n 'ctl:cert:authorities', 'No certificate authorities found'\n )\n return\n llen = len(sorted(certs, key=lambda x: len(x[\"id\"]))[-1][\"id\"])\n for x in sorted(certs, key=lambda x: x[\"id\"]):\n click.echo(\n click.style(\n '{name: <{fill}}'.format(name=x[\"id\"], fill=llen + 3),\n fg=\"white\", bold=True) + \"Expires \" +\n click.style(x[\"expiry\"].strftime(\"%c\"), fg=\"yellow\")\n )\n except Exception as e:\n raise CLIException(str(e))", "def organizations(self):\r\n return organizations.Organizations(self)", "def identity_provider_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def identity_provider_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def list(self,\n provider_id,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n ):\n return self._invoke('list',\n {\n 'provider_id': provider_id,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n })", "def list(self,\n provider_id,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n ):\n return self._invoke('list',\n {\n 'provider_id': provider_id,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n })", "def list(self,\n provider_id,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n ):\n return self._invoke('list',\n {\n 'provider_id': provider_id,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n })", "def list(self,\n provider_id,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n ):\n return self._invoke('list',\n {\n 'provider_id': provider_id,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n })", "def list(self,\n provider_id,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n ):\n return self._invoke('list',\n {\n 'provider_id': provider_id,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n })", "def list(self,\n provider_id,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n ):\n return self._invoke('list',\n {\n 'provider_id': provider_id,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n })", "def list(self,\n provider_id,\n cursor=None,\n included_fields=None,\n page_size=None,\n sort_ascending=None,\n sort_by=None,\n ):\n return self._invoke('list',\n {\n 'provider_id': provider_id,\n 'cursor': cursor,\n 'included_fields': included_fields,\n 'page_size': page_size,\n 'sort_ascending': sort_ascending,\n 'sort_by': sort_by,\n })", "def get_providers(cls, values=None):\n rv = super(PaymentGatewayStripe, cls).get_providers()\n stripe_record = ('stripe', 'Stripe')\n if stripe_record not in rv:\n rv.append(stripe_record)\n return rv" ]
[ "0.6727649", "0.64191025", "0.6271538", "0.62455416", "0.62455416", "0.6074249", "0.6066744", "0.5960773", "0.59022737", "0.59022737", "0.59022737", "0.59022737", "0.59022737", "0.59022737", "0.5825487", "0.5819967", "0.581062", "0.5770538", "0.576516", "0.5720312", "0.571423", "0.56943977", "0.5692585", "0.5669317", "0.5665904", "0.56125253", "0.5610321", "0.557578", "0.55615956", "0.54955745", "0.54955745", "0.5458019", "0.53670543", "0.53283614", "0.5324609", "0.53140885", "0.5288722", "0.5265209", "0.5257863", "0.5230787", "0.5173527", "0.5164792", "0.5164792", "0.5156684", "0.5110133", "0.51072955", "0.51059693", "0.5078055", "0.5065252", "0.5051903", "0.50295705", "0.50210404", "0.5006569", "0.50028867", "0.5000264", "0.49939924", "0.49791074", "0.49754238", "0.4965292", "0.49593973", "0.49456942", "0.4901387", "0.48988256", "0.4886169", "0.48833013", "0.48794225", "0.48766172", "0.48512852", "0.4813982", "0.48127997", "0.4811034", "0.47929594", "0.4784082", "0.478037", "0.47800386", "0.47737238", "0.47725403", "0.4760645", "0.47601083", "0.4750342", "0.47381818", "0.47346324", "0.47346324", "0.47346324", "0.47237766", "0.47226232", "0.47140852", "0.4710656", "0.471007", "0.47090796", "0.46953183", "0.46953183", "0.46938115", "0.46938115", "0.46938115", "0.46938115", "0.46938115", "0.46938115", "0.46938115", "0.46869725" ]
0.64960575
1
Lists the group mappings for the specified identity provider.
def list_idp_group_mappings(self, identity_provider_id, **kwargs): resource_path = "/identityProviders/{identityProviderId}/groupMappings" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_idp_group_mappings got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) query_params = { "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[IdpGroupMapping]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[IdpGroupMapping]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def groups():\n access_token = session['access_token']\n return \"%s\" % list_groups(access_token)", "def get_identity_groups(self):\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.identitygroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/identitygroup'.format(self.url_base))\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\t###\n\t\t\tx = ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']\n\t\t\tprint (\"x\", len(x))\n\t\t\tprint (x[0])\n\t\t\tfor element in x[0]:\n\t\t\t\tprint (element,x[0][element])\n\t\t\t###\n\t\t\tresult['response'] = [(i['@name'], i['@id'], i['@description'],i['link']['@href'])\n\t\t\t\t\t\t\t\t for i in ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def list_identity_provider_groups(self, identity_provider_id, **kwargs):\n resource_path = \"/identityProviders/{identityProviderId}/groups\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_identity_provider_groups got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"identityProviderId\": identity_provider_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n query_params = {\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[IdentityProviderGroupSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[IdentityProviderGroupSummary]\")", "def get_group_list(org_id):\n tList = get_template('app/usermanagementorg/group_list.html')\n groups = get_groups(org_id)\n return tList.render(Context({ 'groups': groups, }))", "def get_groups(self):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_get_groups_query+\" ORDER BY $groupname_field$\",{'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: get_groups: %s\" % (query,))\n\n cursor.execute(query)\n desc=[i[0] for i in cursor.description]\n for row in cursor:\n dictrow=dict(zip(desc,row))\n yield dictrow[self.sql_groupname_field]", "def list(request):\n return render_to_response('rteacher/manage_groups_list.html', request, **klist(\n request=request\n ))", "def list_groups(self):\n return self.get_admin(\"groups\")", "def list_groups(request):\n groups = models.UserGroup.all().order('name')\n return utility.respond(request, 'admin/list_groups', {'groups': groups})", "def list_groups(self, **params):\n url = 'groups'\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def get_list_groups(self):\n list_response = requests.get(self.groups_url, headers=self.headers)\n return list_response.json()[\"groups\"]", "def get_map_groups(user_map):\n pg = user_map.permission_group_user_map.all()\n gids = list(pg.values_list('group', flat=True))\n if len(gids) > 0:\n return Group.objects.filter(id__in=gids)\n\n return Group.objects.filter(name=DEFAULT_GROUP)", "def get_all_groups():\n return jsonify(admin.get_all_groups(current_app.scoped_session()))", "def nfvi_get_instance_groups(callback):\n cmd_id = _compute_plugin.invoke_plugin('get_instance_groups',\n callback=callback)\n return cmd_id", "def groups(self):\n #return self.get('{}/groups'.format(ApiVersion.A1.value))\n return self.get('{}/groups'.format(ApiVersion.CM1.value))", "def list_groups(self):\n return self._get(\"cloudConnectorGroups\").list", "def get_all_groups(self):\n self.cursor.execute(\"select * from groups\")\n self.connection.commit()\n return self.cursor.fetchall()", "def get_group_list(ip_address, headers):\n group_list = None\n group_url = 'https://%s/api/GroupService/Groups' % ip_address\n response = requests.get(group_url, headers=headers, verify=False)\n if response.status_code == 200:\n group_response = response.json()\n if group_response['@odata.count'] > 0:\n group_list = [x['Id'] for x in group_response['value']]\n else:\n print(\"No groups found at \", ip_address)\n else:\n print(\"No groups found at \", ip_address)\n return group_list", "def get_groups():\r\n if 'username' not in flask.session:\r\n return flask.jsonify(**{'message': 'Forbidden', 'status_code': 403})\r\n\r\n context = {}\r\n context['url'] = flask.request.path\r\n context['groups'] = []\r\n\r\n # Retreive query variables\r\n query_num_groups = flask.request.args.get('size') \r\n query_page = flask.request.args.get('page') \r\n num_groups = int(query_num_groups) if query_num_groups != None else 10\r\n page_number = int(query_page) if query_page != None else 0\r\n\r\n groups = get_group_listing(flask.session['username'], \r\n num_groups, page_number)\r\n for g in groups:\r\n context['groups'].append({\r\n 'id': g[0],\r\n 'name': g[1]\r\n })\r\n\r\n if (num_groups == 10):\r\n context['next'] = '{}?page={}'.format(context['url'], page_number + 1)\r\n else:\r\n context['next'] = '{}?page={}&size={}'.format(context['url'], \r\n page_number + 1, num_groups)\r\n\r\n return flask.jsonify(**context)", "def list_groups(self):\n\n for counter, label in enumerate(self.exp_labels_list):\n print('Key {}: {} \\n'.format(str(counter), label))", "def list_groups(access_token):\n request_url = OKTA_URL + \"api/v1/groups\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def list_groups():\n return _list_tindyb_unique_values(\"group\", dbpath=__dbpath__)", "def list_groups_factory(context, request):\n return ListGroupsService(session=request.db,\n request_authority=request.authority,\n route_url=request.route_url)", "def get_groups(self):\n response = self._get(\"groups\")\n\n return response.json()", "def list_groups(args):\n\n for group in get_groups(args):\n print(group)", "def get_groups(self):\n return Client._get(self)", "def management_groups(self) -> pulumi.Output[Optional[Sequence['outputs.ResourceIdResponse']]]:\n return pulumi.get(self, \"management_groups\")", "def groups(self):\n return self.get_data(\"groups\")", "def get_groups(id_project):\n data = sql.list_groups(id_project)\n names = [(d['id'], d['name']) for d in data]\n return names", "def get_groups():\n\n # FUTURE: Properly reutrn error, Mongo is giving it's own\n if current_user.groups:\n return Response(response=json.dumps([g.to_dict() for g in current_user.groups]), status=200, mimetype=\"application/json\")\n else:\n return return_json_error('No groups assigned to', 500)", "def list(self):\n METHOD = 'GET'\n API_PATH = '/groups/list'\n\n # Make REST call\n resp = self._rest_call[METHOD](API_PATH)\n\n if resp.status_code == 200:\n return resp.json().get('group_names')\n\n elif resp.status_code == 403:\n raise AuthorizationError(\"User is not authorized or token is incorrect.\")\n\n else:\n if resp.json().get(\"error_code\") in ERROR_CODES:\n raise ERROR_CODES[resp.json().get('error_code')](resp.json().get('message'))\n else:\n raise APIError(\"Response code {0}: {1} {2}\".format(resp.status_code,\n resp.json().get('error_code'),\n resp.json().get('message')))", "def generate_groups(ctx):\n asyncio.run(generate_groups_impl(ctx.obj[\"config\"]))", "def _groupNamesToList(settings):\n return [getattr(GroupName, val) for val in settings.dhGroups]", "def groups(request, group_id = 1):\n group = get_object_or_404(ResearchGroup, pk=group_id)\n groups = ResearchGroup.objects.order_by('name')\n group_list = []\n for g in groups:\n if g.id is not group.id:\n group_list.append({'name': g.name, 'id': g.id})\n # default showing group\n # chosen group info\n group_info = {}\n group_info['name'] = group.name\n personnel = list()\n for p in group.personnel.all():\n personnel.append(p.username)\n group_info['personnel'] = \" \".join(str(x) for x in personnel)\n group_info['projects'] = group.projects\n group_info['directions'] = group.directions\n group_info['papers'] = group.papers.split()\n context = {'group_list': group_list, 'group_info': group_info}\n return render(request, 'sacms/groups.html', context)", "def getListOfGroups(self, *args):\n return _libsbml.GroupsModelPlugin_getListOfGroups(self, *args)", "def get_all():\n\n return AGE_GROUPS", "def get_endpoint_groups(self):\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.endpointgroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/endpointgroup'.format(self.url_base))\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\t\n\t\t\tresult['response'] = [(i['@name'], i['@id'], i['@description'])\n\t\t\t\t\t\t\t\t for i in ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def get_all_access_groups():\n\treturn {\"access_groups\": [ag.serialize for ag in AccessGroup.query.all()]}, 200", "def get_groups(self):\n result = self.conn.usergroup.get(status=0, output='extend', selectUsers=\"extend\")\n groups = {group[\"name\"]: Group(\n name=group[\"name\"],\n id=group[\"usrgrpid\"],\n members=group[\"users\"],\n ) for group in result}\n return groups", "def fusion_api_get_directory_groups(self, body, api=None, headers=None):\n return self.logindomain.groups(body, api, headers)", "def lists(start=None, top=None):\n url = 'persongroups'\n params = {\n 'start': start,\n 'top': top,\n }\n\n return util.request('GET', url, params=params)", "def group_list(message=''):\n return Response(render_template('admin/group/list.html',\n groups=Group.query.all(),\n message=message),\n mimetype='text/html')", "def groups(self):\n return []", "def group_nodes(self, group, namespace=None):\n source = self._source(namespace)\n return self._list(source, 'map', group)", "def list_group(self, groupname):\n return self.get_admin(\"groups/{}\".format(groupname))", "def test_get_groups(self):\n response = self.client.get_groups()\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v1/groups\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def get_instance_group_managers(self, zone):\n response = self.call_api('/zones/%s/instanceGroupManagers' % zone)\n return {manager['name']: manager for manager in response.get('items', [])}", "def _get_gid_map(self, level=0):\n fof_rdd = self.fof_rdd\n sc = self.sc\n\n nPartitions = sc.defaultParallelism*5\n\n groups_map = (fof_rdd.flatMap(lambda p: p[np.where(p['is_ghost'])[0]])\n .map(pid_gid)\n .groupByKey(nPartitions)\n .values()\n .filter(lambda x: len(x)>1)\n .map(lambda x: sorted(x))\n .flatMap(lambda gs: [(g, gs[0]) for g in gs[1:]]))\n\n return groups_map", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def get_RegisteredGroupsList(test_case, only_discoverable=False, override_headers=null, override_cookies=null):\n # type: (AnyMagpieTestCaseType, bool, Optional[HeadersType], Optional[CookiesType]) -> List[Str]\n app_or_url = get_app_or_url(test_case)\n path = \"/register/groups\" if only_discoverable else \"/groups\"\n resp = test_request(app_or_url, \"GET\", path,\n headers=override_headers if override_headers is not null else test_case.json_headers,\n cookies=override_cookies if override_cookies is not null else test_case.cookies)\n json_body = check_response_basic_info(resp, 200, expected_method=\"GET\")\n return json_body[\"group_names\"]", "def groups(self):\r\n return resources.Groups(self)", "def getGroups():\r\n return Group.getGroups()", "def grouplist(self, namespace=None):\n source = self._source(namespace)\n return self._list(source, 'list')", "def list_eip_groups(self, id=None, name=None, status=None,\n marker=None, max_keys=None, config=None):\n path = self._get_path()\n params = {}\n if id is not None:\n params[b'id'] = id\n if name is not None:\n params[b'name'] = name\n if status is not None:\n params[b'status'] = status\n if marker is not None:\n params[b'marker'] = marker\n if max_keys is not None:\n params[b'maxKeys'] = max_keys\n return self._send_request(http_methods.GET, path,\n params=params, config=config)", "def get_all_groups(self, path_prefix='/', marker=None, max_items=None):\r\n params = {}\r\n if path_prefix:\r\n params['PathPrefix'] = path_prefix\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('ListGroups', params,\r\n list_marker='Groups')", "def get_groups(self):\n return [self.primary_group] + list(self.secondary_groups)", "def list_groups(self, **filters):\n\t\tresult = self.client.get(self._endpoint + \"/group\", params=filters)\n\t\treturn PaginatedList(result, Group, (self.user_id, self.site_id), \"group_id\")", "def get_all(isamAppliance, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieving groups\", \"/sysaccount/groups/v1\")", "def get_user_ida_groups():\n if not is_authenticated() or 'samlUserdata' not in session:\n return None\n\n groups = session.get('samlUserdata', {}).get(SAML_ATTRIBUTES.get('idm_groups', None), False)\n\n return [group for group in groups if group.startswith('IDA')] if groups else not_found('groups')\n return None", "def list_group():\n data, code, message = FIELD_SERVICE.list_group()\n return __result(data, code, message)", "def get_groups(self, details_level: DetailsLevel, limit: int = 500, offset: int = 0) -> dict:\n endpoint = f\"{self.server_and_port}/web_api/show-groups\"\n\n payload = {\n \"limit\": limit,\n \"offset\": offset,\n \"details-level\": details_level.value,\n \"show-as-ranges\": True,\n }\n\n headers = self.get_headers()\n result = requests.post(endpoint, headers=headers, json=payload, verify=self.ssl_verify)\n\n try:\n result.raise_for_status()\n except Exception as e:\n raise PluginException(cause=\"Unable to get groups from Check Point NGFW.\", assistance=result.text, data=e)\n\n return result.json()", "def get_groups():\n\n groups = [\"shelter\", \"sharing\", \"unsheltered\", \"motel\"]\n\n for item in groups:\n group = Group(group_name=item)\n\n db.session.add(group)\n\n db.session.commit()", "def get_groups(args):\n\n args.suppress_verify_output = True\n if verify(args) != 0:\n # restore stdout\n sys.stdout = sys.__stdout__\n print(\"Config file not valid, please use the verify function to debug\")\n return []\n\n with open(args.file, \"r\") as f:\n config_json = json.load(f)\n\n groups = []\n for group in config_json[\"groups\"]:\n groups.append(group[\"name\"])\n return groups", "def compute_server_groups(self):\n path = '/os-server-groups'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack server groups: %s' % truncate(res))\n return res[0]['server_groups']", "def get_group_names(self):\r\n return self.groups.keys()", "def get_groups(self):\n url = \"https://m.facebook.com/groups/?seemore\"\n groups = dict()\n self.get(url)\n br = self.find_elements_by_class_name(\"br\")\n for b in br:\n try:\n notis = int(b.text[-2:])\n group_name = b.text[:-2]\n except ValueError:\n group_name = b.text\n notis = 0\n try:\n link = b.find_element_by_tag_name(\"a\").get_attribute('href')\n groups[group_name] = (mfacebookToBasic(link), notis)\n except Exception as e:\n log.error(\"Can't get group link\")\n return groups", "def info_materials_groups_get():\n session = info_map.Session()\n\n mat = aliased(info_map.Material)\n grp = aliased(info_map.Group)\n\n q = session.query(mat.group_id,grp.name).join(grp).distinct()\n groups = [Group(group=row.group_id,name=row.name) for row in q.all()]\n return groups, 200", "def index( self, trans, deleted=False, **kwd ):\n group_dicts = []\n deleted = util.asbool( deleted )\n if deleted and not trans.user_is_admin():\n raise AdminRequiredException( 'Only administrators can query deleted groups.' )\n for group in self.group_manager.list( trans, deleted ):\n group_dicts.append( self._populate( trans, group ) )\n return group_dicts", "def get_groups_list(mmtf_dict, group_definitions):\n\n sec_struct = [\n \"helices\", None, \"helices\", \"strands\", \"helices\", \"strands\", None, None\n ]\n return [{\n \"number\": id, \"insert\": insert, \"secondary_structure\": sec_struct[ss],\n **group_definitions[type_]\n } for id, insert, ss, type_, in zip(\n mmtf_dict[\"groupIdList\"], mmtf_dict[\"insCodeList\"],\n mmtf_dict.get(\"secStructList\", [-1] * len(mmtf_dict[\"groupIdList\"])),\n mmtf_dict[\"groupTypeList\"]\n )]", "def pull_groups(self, org):\n pass", "def list_regions():\n regions_areas = (\n db.session.query(\n models.Region.code.label(\"region_code\"),\n models.Region.name.label(\"region_name\"),\n db.case([(models.District.code.is_(None),\n db.literal_column(\"'admin_area'\"))],\n else_=db.literal_column(\"'district'\")).label(\"area_type\"),\n db.case([(models.District.code.is_(None), models.AdminArea.code)],\n else_=models.District.code).label(\"area_code\"),\n db.case([(models.District.code.is_(None), models.AdminArea.name)],\n else_=models.District.name).label(\"area_name\")\n ).select_from(models.Region)\n .join(models.Region.areas)\n .outerjoin(models.AdminArea.districts)\n .filter(models.Region.code != \"GB\")\n .order_by(\"region_name\", \"area_name\")\n .all()\n )\n regions = {}\n areas = {}\n for row in regions_areas:\n regions[row.region_code] = row.region_name\n areas.setdefault(row.region_code, []).append(row)\n\n return render_template(\"regions.html\", regions=regions, areas=areas)", "def getGroupInfo(groupId):\n url = f\"https://groups.roblox.com/v1/groups/{groupId}\"\n r = requests.get(url)\n j = json.loads(r.text)\n return j", "def test_list_entries_groups(self):\r\n group_id = None # Change me!!\r\n topic_id = None # Change me!!\r\n\r\n r = self.client.list_entries_groups(group_id, topic_id, ids=None)", "def list_groups(self, order_by: str = None, next_link: str = None, top: int = None, filter_: str = None):\n if next_link: # pagination\n return self.ms_client.http_request(method='GET', full_url=next_link)\n # default value = 100\n params = {'$top': top}\n if order_by:\n params['$orderby'] = order_by # type: ignore\n if filter_:\n params['$filter'] = filter_ # type: ignore\n return self.ms_client.http_request(\n method='GET',\n url_suffix='groups',\n params=params)", "def get_groups(phone_num):\n\n phone_num = json.loads(phone_num)\n resp = con.get_groups(phone_num)\n\n emit(\"groups_update\", json.dumps(resp))", "def groups_by_id(request, gid):\r\n group = Group()\r\n filtered_groups = group.query({\"gid\":str(gid)})\r\n if len(filtered_groups) == 0:\r\n badRequest(\"No available group under GID \"+str(gid))\r\n return HttpResponse(json.dumps(filtered_groups))", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def api_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_groups\")", "def listGroups(self):\n return tuple(Group.create(groupName, self._modelDataManager) for groupName in self.pm_getUserManager().listGroups())", "def list_secgroups(self, name=None):", "def _list_groups_callback(self, broker_id, response):\n for consumer_group, group_type in self.kafka_client._list_consumer_groups_process_response(response):\n # consumer groups from Kafka < 0.9 that store their offset in Kafka don't use Kafka for group-coordination\n # so their group_type is empty\n if group_type in ('consumer', ''):\n single_group_offsets_future = self._list_consumer_group_offsets_send_request(\n group_id=consumer_group, group_coordinator_id=broker_id\n )\n single_group_offsets_future.add_callback(self._single_group_offsets_callback, consumer_group)\n self._consumer_futures.append(single_group_offsets_future)", "def set_up_groups(self):\n groups = []\n groups.append({'groupname': 'th',\n 'grouptitle': 'TH',\n 'path': '/'})\n groups.append({'groupname': 'neutronics',\n 'grouptitle': 'Neutronics',\n 'path': '/'})\n groups.append({'groupname': 'metadata',\n 'grouptitle': 'Simulation Metadata',\n 'path': '/'})\n return groups", "def get_group_list(self):\n return [(item[0], item[1][0]) for item in self.contacts_by_group_list]", "def get_device_groups(self):\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.network.networkdevicegroup.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/networkdevicegroup'.format(self.url_base))\n\n\t\tif resp.status_code == 200:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = [(i['@name'], i['@id'])\n\t\t\t\t\t\t\t\t for i in ERS._to_json(resp.text)['ns3:searchResult']['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result", "def provider_list(cls, args, config):\n # print \"MOLNSProvider.provider_list(args={0}, config={1})\".format(args, config)\n providers = config.list_objects(kind='Provider')\n if len(providers) == 0:\n print \"No providers configured\"\n else:\n table_data = []\n for p in providers:\n table_data.append([p.name, p.type])\n # table_print(['name', 'type'], table_data)\n r = {'type': 'table', 'column_names': ['name', 'type'], 'data': table_data}\n return r", "def authenticator_groups_config(self) -> 'outputs.AuthenticatorGroupsConfigResponse':\n return pulumi.get(self, \"authenticator_groups_config\")", "def seat_group_index(party_id):\n party = _get_party_or_404(party_id)\n\n groups = seat_group_service.get_all_seat_groups_for_party(party.id)\n\n return {\n 'party': party,\n 'groups': groups,\n }", "def groupManager(request):\n \n return render_to_response('centers.html',context_instance=RequestContext(request))", "def security_groups(self, oid):\n try:\n path = u'/servers/%s/os-security-groups' % oid\n res = self.client.call(path, u'GET', data=u'', \n token=self.manager.identity.token)\n self.logger.debug(u'Get openstack server security groups: %s' % truncate(res))\n return res[0][u'security_groups']\n except Exception as error:\n self.logger.error(error, exc_info=True)\n data = []\n return res", "def get(self, *args):\n return _libsbml.ListOfGroups_get(self, *args)", "def groupfinder(userid, request):\n if userid in ADMINS:\n return ['g:admins']\n else:\n return []", "def groups_settings_list(self, mar, request):\n all_groups = self._services.usergroup.GetAllUserGroupsInfo(mar.cnxn)\n group_settings = []\n for g in all_groups:\n setting = g[2]\n wrapper = api_pb2_v1_helpers.convert_group_settings(g[0], setting)\n if not request.importedGroupsOnly or wrapper.ext_group_type:\n group_settings.append(wrapper)\n return api_pb2_v1.GroupsSettingsListResponse(\n groupSettings=group_settings)", "def groupfinder(name, request):\n #FIXME: Implement\n return ()\n return request.context.get_groups(name)", "def get_groups(self, obj):\n groupsForCompany = get_groups_with_perms(obj)\n return [x.id for x in groupsForCompany]", "def list_worker_groups(cls, args, config):\n groups = config.list_objects(kind='WorkerGroup')\n if len(groups) == 0:\n raise MOLNSException(\"No worker groups configured\")\n else:\n table_data = []\n for g in groups:\n # provider_name = config.get_object_by_id(g.provider_id, 'Provider').name\n try:\n p = config.get_object_by_id(g.provider_id, 'Provider')\n provider_name = p.name\n except DatastoreException as e:\n provider_name = 'ERROR: {0}'.format(e)\n try:\n c = config.get_object_by_id(g.controller_id, 'Controller')\n controller_name = c.name\n except DatastoreException as e:\n controller_name = 'ERROR: {0}'.format(e)\n table_data.append([g.name, provider_name, controller_name])\n return {'type': 'table', 'column_names': ['name', 'provider', 'controller'], 'data': table_data}", "def groups(self):\n if not self._groups:\n self._groups = self._init_repository(\n _AdminDirectoryGroupsRepository)\n return self._groups", "def getGroups(self, proteinId):\n return [self.groups[gId] for gId in self._proteinToGroupIds[proteinId]]", "def get_groups(self, env, token, memcache_client=None):\n groups = None\n key = '%s/token/%s' % (self.reseller_prefix, token)\n cached_auth_data = memcache_client and memcache_client.get(key)\n if cached_auth_data:\n start, expiration, groups = cached_auth_data\n if time() - start > expiration:\n groups = None\n\n headers = {}\n if env.get('HTTP_AUTHORIZATION'):\n groups = None\n headers[\"Authorization\"] = env.get('HTTP_AUTHORIZATION')\n\n if not groups:\n with Timeout(self.timeout):\n conn = http_connect(self.auth_host, self.auth_port, 'GET',\n '%stoken/%s' % (self.auth_prefix, token),\n headers, ssl=self.ssl)\n\n resp = conn.getresponse()\n resp.read()\n conn.close()\n if resp.status // 100 != 2:\n return None\n expiration = float(resp.getheader('x-auth-ttl'))\n groups = resp.getheader('x-auth-groups')\n if memcache_client:\n memcache_client.set(key, (time(), expiration, groups),\n timeout=expiration)\n\n if env.get('HTTP_AUTHORIZATION'):\n account, user, sign = \\\n env['HTTP_AUTHORIZATION'].split(' ')[-1].split(':')\n cfaccount = resp.getheader('x-auth-account-suffix')\n path = env['PATH_INFO']\n env['PATH_INFO'] = \\\n path.replace(\"%s:%s\" % (account, user), cfaccount, 1)\n\n return groups", "def list_all_scaling_groups(self, request, paginate):\n\n def format_list(results):\n group_states, actives = results\n groups = [{\n 'id': state.group_id,\n 'links': get_autoscale_links(state.tenant_id, state.group_id),\n 'state': format_state_dict(state, active)\n } for state, active in zip(group_states, actives)]\n return {\n \"groups\": groups,\n \"groups_links\": get_groups_links(\n groups, self.tenant_id, None, **paginate)\n }\n\n def fetch_active_caches(group_states):\n if not tenant_is_enabled(self.tenant_id, config_value):\n return group_states, [None] * len(group_states)\n d = gatherResults(\n [get_active_cache(\n self.store.reactor, self.store.connection, self.tenant_id,\n state.group_id)\n for state in group_states])\n return d.addCallback(lambda cache: (group_states, cache))\n\n deferred = self.store.list_scaling_group_states(\n self.log, self.tenant_id, **paginate)\n deferred.addCallback(fetch_active_caches)\n deferred.addCallback(format_list)\n deferred.addCallback(json.dumps)\n return deferred" ]
[ "0.60451424", "0.6032505", "0.5956654", "0.5918813", "0.58596", "0.5801229", "0.5754967", "0.5714305", "0.5660738", "0.56143993", "0.55850315", "0.5576164", "0.55513936", "0.5545537", "0.5543902", "0.5542496", "0.5516063", "0.5498167", "0.5491729", "0.546018", "0.5443648", "0.5429444", "0.54240775", "0.5374941", "0.53657424", "0.533331", "0.5328101", "0.5327889", "0.5309783", "0.52744496", "0.5273242", "0.5245074", "0.5201043", "0.5193559", "0.51929355", "0.5187821", "0.51667464", "0.51627326", "0.51501244", "0.5146529", "0.51402545", "0.5138483", "0.51382375", "0.51364434", "0.51290494", "0.5128033", "0.51257986", "0.51253045", "0.511862", "0.5116682", "0.5112369", "0.5088646", "0.50830597", "0.50717723", "0.50715697", "0.5071141", "0.5061178", "0.50522316", "0.50408137", "0.50381666", "0.5037049", "0.5034403", "0.50268584", "0.5018272", "0.5014814", "0.5010389", "0.50077844", "0.5005032", "0.5004729", "0.4998817", "0.4994991", "0.4994222", "0.49865764", "0.4984634", "0.49838364", "0.4983197", "0.4983197", "0.4983197", "0.4983197", "0.4981503", "0.49794298", "0.4971573", "0.49671096", "0.49657127", "0.496232", "0.49615365", "0.49462253", "0.49411985", "0.49247158", "0.49205318", "0.49131423", "0.4907454", "0.49044853", "0.49023792", "0.49016634", "0.4897415", "0.48926282", "0.48905826", "0.48832124", "0.48811734" ]
0.6934423
0
Lists the MFA TOTP devices for the specified user. The returned object contains the device's OCID, but not the seed. The seed is returned only upon creation or when the IAM service regenerates the MFA seed for the device.
def list_mfa_totp_devices(self, user_id, **kwargs): resource_path = "/users/{userId}/mfaTotpDevices" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "sort_by", "sort_order" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_mfa_totp_devices got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) if 'sort_by' in kwargs: sort_by_allowed_values = ["TIMECREATED", "NAME"] if kwargs['sort_by'] not in sort_by_allowed_values: raise ValueError( "Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values) ) if 'sort_order' in kwargs: sort_order_allowed_values = ["ASC", "DESC"] if kwargs['sort_order'] not in sort_order_allowed_values: raise ValueError( "Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values) ) query_params = { "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "sortBy": kwargs.get("sort_by", missing), "sortOrder": kwargs.get("sort_order", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[MfaTotpDeviceSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[MfaTotpDeviceSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def retrieve_user_devices(self, user_id):\n if self.database is None:\n raise Exception(\"No database.\")\n if user_id is None or len(user_id) == 0:\n raise Exception(\"Bad parameter.\")\n devices = self.database.retrieve_user_devices(user_id)\n if devices is not None:\n devices = list(set(devices)) # De-duplicate\n return devices", "def create_mfa_totp_device(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_mfa_totp_device got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDevice\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDevice\")", "def get_all_mfa_devices(self, user_name, marker=None, max_items=None):\r\n params = {'UserName' : user_name}\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('ListMFADevices',\r\n params, list_marker='MFADevices')", "def devices_for_user(self, user, confirmed=None):\n devices = self.model.objects.filter(user=user)\n if confirmed is not None:\n devices = devices.filter(confirmed=bool(confirmed))\n\n return devices", "def retrieve_user_devices(self, user_id):\n if user_id is None:\n self.log_error(MongoDatabase.retrieve_user_devices.__name__ + \"Unexpected empty object: user_id\")\n return None\n\n try:\n user_id_obj = ObjectId(user_id)\n user = self.users_collection.find_one({\"_id\": user_id_obj})\n if user is not None:\n if 'devices' in user:\n return user['devices']\n except:\n traceback.print_exc(file=sys.stdout)\n self.log_error(sys.exc_info()[0])\n return None", "def list_tokens(user):\n return AppSpecificAuthToken.select().where(AppSpecificAuthToken.user == user)", "def generate_totp_seed(self, user_id, mfa_totp_device_id, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}/actions/generateSeed\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"generate_totp_seed got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"mfaTotpDeviceId\": mfa_totp_device_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDevice\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDevice\")", "def get_mfa_totp_device(self, user_id, mfa_totp_device_id, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"get_mfa_totp_device got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"mfaTotpDeviceId\": mfa_totp_device_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDeviceSummary\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"MfaTotpDeviceSummary\")", "def list_devices(self):\n response = self.oauth.get(url=f'{self.base_url}/json/devices/list')\n\n result = response.json()['device']\n for device in result:\n print(device)", "def get_list_of_devices(self, give_json=False):\n\n url = Constants.BASE_URL + 'users/devices'\n response = requests.get(url=url, params={'key': self.user_access_token})\n\n if give_json:\n return response.json()\n else:\n return response.text", "def get_user_access_tokens(request, user):\n manager = internal_keystoneclient(request).oauth2.access_tokens\n\n return manager.list_for_user(user=user)", "def list_devices():\n return _lib.SeaTeaseAPI().list_devices()", "def get_user_devices_adapter(json_response):\n\n if 'devices' in json_response:\n ret = {\"result\": []}\n for device in json_response['devices']:\n ret[\"result\"].append(\n {\"name\": device[\"name\"],\n \"type\": device[\"type\"],\n \"id\": device[\"id\"],\n \"is_active\": device[\"is_active\"]})\n return ret\n return json_response", "def get_data_source_tokens_by_user(self, user_id: int):\n all_data_source_tokens_array = []\n user = None\n try:\n user: User = UserService.get_user_by_id(self, user_id)\n except Exception:\n raise\n\n try:\n for data_source_token in DataSourceToken.select(\n DataSourceToken,\n user).where(DataSourceToken.user_id == user_id):\n all_data_source_tokens_array.append(\n model_to_dict(data_source_token, recurse=False))\n return all_data_source_tokens_array\n except Exception:\n raise", "def test_user_get_topteams():\n app = create_ctfd()\n with app.app_context():\n register_user(app)\n client = login_as_user(app)\n r = client.get('/top/10')\n assert r.status_code == 200\n destroy_ctfd(app)", "def test_multiple_devices(self) -> None:\n\n self.http_client.request = AsyncMock(\n return_value=FakeResponse.json(\n code=200,\n payload={\n \"active\": True,\n \"sub\": SUBJECT,\n \"scope\": \" \".join(\n [\n MATRIX_USER_SCOPE,\n f\"{MATRIX_DEVICE_SCOPE_PREFIX}AABBCC\",\n f\"{MATRIX_DEVICE_SCOPE_PREFIX}DDEEFF\",\n ]\n ),\n \"username\": USERNAME,\n },\n )\n )\n request = Mock(args={})\n request.args[b\"access_token\"] = [b\"mockAccessToken\"]\n request.requestHeaders.getRawHeaders = mock_getRawHeaders()\n self.get_failure(self.auth.get_user_by_req(request), AuthError)", "def get_devices(self):\n return self.api_request('GET', self.url + '/device', {})", "def delete_mfa_totp_device(self, user_id, mfa_totp_device_id, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_mfa_totp_device got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"mfaTotpDeviceId\": mfa_totp_device_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def get_user_tasks(self, user_pk: int) -> APIResponse:\n user_args = {\"pk\": user_pk}\n return self._get(\"user_list\", user_args)", "def get_tokens_for_user(user):\n\n refresh = RefreshToken.for_user(user)\n\n return {\n 'refresh': str(refresh),\n 'access': str(refresh.access_token),\n }", "def devices(self, query=None):\n if query is not None:\n query = clean(query, self.devices_parameters)\n query = \"?\" + urllib.parse.urlencode(query)\n else:\n query = \"\"\n return self.get(\"/devices\" + query)", "def load_devices(self):\n response = self.oauth.get(url=f'{self.base_url}/json/devices/list')\n\n result = response.json()['device']\n return [(device['id'], device['name'], device['state']) for device in result]", "def _list_tokens(self, user_id, tenant_id=None, trust_id=None,\n consumer_id=None):\n raise exception.NotImplemented() # pragma: no cover", "def device_list(self, plant_id):\n return self.plant_info(plant_id)['deviceList']", "def test_for_user(self):\n devices = Device.objects.for_user(self.user)\n self.assertEqual(len(devices), 1)\n self.assertEqual(devices[0], self.device)", "def list_devices(arn=None, nextToken=None):\n pass", "def get_devices(self):\n return get_devices(self.api_key)", "def get_devices():\n devices = []\n for device_id in range(pm.lib.Pm_CountDevices()):\n devices.append(DeviceInfo(device_id))\n\n return devices", "def plant_list(self, user_id):\n response = self.session.get(self.get_url('PlantListAPI.do'),\n params={'userId': user_id},\n allow_redirects=False)\n if response.status_code != 200:\n raise RuntimeError(\"Request failed: %s\", response)\n data = json.loads(response.content.decode('utf-8'))\n return data['back']", "def view_list_users(self, user):\r\n return user.realm._users.keys()", "def get_registered_tvs(auth_header, user, async_kvstore_client, device_ids=None):\n\n LOGGER.debug('Getting registered tvs with device_ids=%s user=%s', device_ids, user)\n devices_table = constants.REGISTERED_DEVICES_COLLECTION_NAME\n try:\n registered_devices = []\n\n # optionally filter by device ids\n params = None\n query = {constants.REGISTERED_DEVICES_DEVICE_TYPE: constants.APPLE_TV}\n if device_ids:\n query = build_containedin_clause(constants.DEVICE_ID, device_ids)\n\n\n LOGGER.debug('get drone mode tvs query=%s', query)\n params = {constants.QUERY: json.dumps(query)}\n response = yield async_kvstore_client.async_kvstore_get_request(devices_table,\n owner=user,\n auth_header=auth_header,\n params=params)\n registered_devices = yield response.json() if response.code == http.OK else []\n\n LOGGER.debug('get_registered_tvs returned=%s', registered_devices)\n defer.returnValue(registered_devices)\n\n except Exception:\n LOGGER.exception('Exception getting registered_devices')\n defer.returnValue([])", "def list_devices():\r\n DeviceManagerCLI.BuildDeviceList()\r\n return DeviceManagerCLI.GetDeviceList()", "def get_user_answers(user_id):\n dynamodb = boto3.resource(\"dynamodb\", region_name=\"eu-central-1\")\n answer_table = dynamodb.Table(\"Answers\")\n\n filterexpression = Attr(\"UserId\").eq(user_id)\n response = answer_table.scan(FilterExpression=filterexpression)\n answers = response.get(\"Items\")\n\n return answers", "def get_devices():\n try:\n with open(DEVICES, 'r') as f:\n data = json.load(f)['devices']\n except (IOError, ValueError) as err:\n raise SwiftlmCheckFailure('Failure opening %s: %s' % (DEVICES, err))\n\n devices = []\n for d in data:\n l = d.get('label', LABEL_CHECK_DISABLED)\n devices.append(Device(\n device=d['name'],\n mount=MOUNT_PATH+d['swift_drive_name'],\n label=l\n ))\n\n return devices", "def getDeviceList(self):\r\n\r\n self._logger.debug(\"In getDeviceList()...\")\r\n\r\n # update the security token if needed \r\n if self._checkToken():\r\n\r\n response = self._callAPI(_API_GET_DEVICE_LIST, useSession=True)\r\n\r\n if response is not None:\r\n\r\n deviceInfo = response.json()\r\n \r\n if response.status_code == 200 and \"items\" in deviceInfo:\r\n\r\n deviceList = []\r\n\r\n for dev in deviceInfo[\"items\"]:\r\n\r\n # pull out common attributes\r\n deviceID = dev[\"serial_number\"]\r\n deviceType = dev[\"device_family\"]\r\n description = dev.get(\"name\", deviceType + \" \" + deviceID[-4:])\r\n\r\n # uncomment the next line to inspect the devices returned from the MyQ service\r\n self._logger.debug(\"Device Found - Device ID: %s, Device Type: %s, Description: %s\", deviceID, deviceType, description)\r\n\r\n # add device to the list with properties based on type\r\n if deviceType == API_DEVICE_TYPE_GATEWAY:\r\n\r\n # get gateway attributes\r\n online = dev[\"state\"][\"online\"]\r\n lastUpdated = dev[\"state\"][\"last_status\"]\r\n\r\n # add gateway device to list\r\n deviceList.append({\r\n \"type\": deviceType,\r\n \"id\": deviceID,\r\n \"description\": description,\r\n \"online\": online,\r\n \"last_updated\": lastUpdated\r\n })\r\n\r\n elif deviceType == API_DEVICE_TYPE_OPENER:\r\n \r\n # get the door attributes\r\n parentID = dev[\"parent_device_id\"] \r\n state = dev[\"state\"][\"door_state\"]\r\n lastChanged = dev[\"state\"][\"last_update\"]\r\n lastUpdated = dev[\"state\"][\"last_status\"]\r\n\r\n # add garage door opener device to list\r\n deviceList.append({\r\n \"type\": deviceType,\r\n \"id\": deviceID,\r\n \"parent_id\": parentID,\r\n \"description\": description,\r\n \"state\": state,\r\n \"last_changed\": lastChanged,\r\n \"last_updated\": lastUpdated\r\n })\r\n \r\n elif deviceType == API_DEVICE_TYPE_LAMP:\r\n\r\n # get the lamp attributes\r\n parentID = dev[\"parent_device_id\"] \r\n state = dev[\"state\"][\"lamp_state\"] \r\n lastChanged = dev[\"state\"][\"last_update\"]\r\n lastUpdated = dev[\"state\"][\"last_status\"]\r\n\r\n # add lamp device to list\r\n deviceList.append({\r\n \"type\": deviceType,\r\n \"id\": deviceID,\r\n \"parent_id\": parentID,\r\n \"description\": description,\r\n \"state\": state,\r\n \"last_changed\": lastChanged,\r\n \"last_updated\": lastUpdated\r\n })\r\n \r\n return deviceList\r\n \r\n elif response.status_code == 401:\r\n \r\n self._logger.error(\"There was an authentication error with the MyQ account: %s\", _parseResponseMsg(response))\r\n return None\r\n\r\n else:\r\n \r\n self._logger.error(\"Error retrieving device list: %s\", _parseResponseMsg(response))\r\n return None\r\n\r\n else:\r\n # Error logged in _callAPI function\r\n return None\r\n\r\n else:\r\n # Check token failed - wait and see if next call successful\r\n return None", "def list_users(self, user=None):\n from expfactory.database.models import Participant\n\n participants = Participant.query.all()\n users = []\n for user in participants:\n users.append(self.print_user(user))\n return users", "def get_users(self):\n return self.mycam.devicemgmt.GetUsers()", "def list_auth_tokens(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/authTokens\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_auth_tokens got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[AuthToken]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[AuthToken]\")", "def get_issued_devices_for_user(employee_id: int) -> list:\n\n conn = database_connect()\n if(conn is None):\n return None\n cur = conn.cursor()\n\n try:\n # SQL statement and execute\n sql = \"\"\"SELECT Device.deviceID, Device.purchaseDate, Device.modelNumber, Device.manufacturer\n FROM Employee JOIN Device ON(Employee.empid = Device.issuedTo)\n WHERE Employee.empid = %s\"\"\"\n cur.execute(sql, (employee_id,));\n\n # Attempt to fetch all\n result = cur.fetchall()\n\n if result == None:\n cur.close()\n conn.close()\n return []\n\n devices = []\n for row in result:\n devices.append(\n [row[0], row[1], row[2], row[3]]\n )\n\n cur.close()\n conn.close()\n return devices\n except Exception as e:\n print(\"eee\")\n print(e)\n # If login failed, return None\n cur.close()\n conn.close()\n return []", "def listdevices(request,clouditem,tokenID,form):\n\n\tdajax = Dajax()\n\tprint \"ciao\"\n\ttry:\t\n\t\tcloudQuery = checkCloudItem(clouditem,request.user.id)\n\t\ttkn = checkAccessToken(tokenID,cloudQuery)\n\t\n\t\tdc = DeviceListController(tkn,None,None)\n\t\tdc.listDevices()\n\n\t\tprint \"test\"\n\texcept Exception as e:\n\t\tprint formatException(e)\n\n\treturn dajax.json()", "def get_network_devices(user, passwd, base_api_url):\n network_devices = ''\n response = connect_to_idrac(user, passwd, base_api_url)\n if response and response.json():\n network_devices_info = response.json()\n try:\n network_devices = network_devices_info[u'Members']\n except KeyError:\n network_devices = ''\n get_user_response(message='could not get network devices info')\n else:\n get_user_response(message='idrac connection status code is 401')\n\n return network_devices", "def device_gen(device_path):\r\n with rigol.usbtmc.Usbtmc(device_path) as device:\r\n yield device", "def jwt_otp_payload(user, device=None):\n # username_field = get_username_field()\n username = get_username(user)\n\n payload = {\n 'user_id': user.pk,\n 'username': username,\n 'exp': datetime.utcnow() + api_settings.JWT_EXPIRATION_DELTA\n }\n\n # Include original issued at time for a brand new token,\n # to allow token refresh\n if api_settings.JWT_ALLOW_REFRESH:\n payload['orig_iat'] = timegm(\n datetime.utcnow().utctimetuple()\n )\n\n if api_settings.JWT_AUDIENCE is not None:\n payload['aud'] = api_settings.JWT_AUDIENCE\n\n if api_settings.JWT_ISSUER is not None:\n payload['iss'] = api_settings.JWT_ISSUER\n\n # custom additions\n is_user_and_device = user is not None and device is not None\n is_users_device = is_user_and_device and device.user_id == user.id\n is_device_confirmed = is_users_device and device.confirmed is True\n if is_device_confirmed:\n payload['otp_device_id'] = device.persistent_id\n else:\n payload['otp_device_id'] = None\n\n return payload", "def user_suggestions(self, user_id):\r\n return suggestions.ForumUserSuggestions(self, user_id)", "def trusted_devices(self):\n request = self.session.get(\n f\"{self.SETUP_ENDPOINT}/listDevices\", params=self.params\n )\n return request.json().get(\"devices\")", "def list_devices(self):\n return [x for x in self.devices.keys()]", "def user(self, user_token, user_device=None):\n self.set('user', user_token)\n self.set('device', user_device)", "def test_get_all_tokens_authenticated_user(self):\r\n\r\n user = UserFactory.create_batch(2)[1]\r\n user.info = create_tokens_for(user)\r\n\r\n res = self.app.get('api/token?api_key=' + user.api_key)\r\n data = json.loads(res.data)\r\n\r\n for provider in TokenAPI.oauth_providers:\r\n token_name = '%s_token' % provider\r\n assert data.get(token_name) is not None, data", "def get_user_transactions(session, user_id, expand=False):\n if expand:\n result = session.query(Transaction).filter(Transaction.user_phone == user_id).all()\n transaction_schema = TransactionSchema()\n transactions = [transaction_schema.dump(r).data for r in result]\n else:\n result = session.query(Transaction.id).filter(Transaction.user_phone == user_id).all()\n transactions = [TRANSACTION_GET_URI.format(transaction_id=r.id) for r in result]\n return transactions", "def test_get_all_existing_tokens_authenticated_user(self):\r\n\r\n user = UserFactory.create_batch(2)[1]\r\n user.info = create_tokens_for(user)\r\n del user.info['google_token']\r\n\r\n res = self.app.get('api/token?api_key=' + user.api_key)\r\n data = json.loads(res.data)\r\n\r\n assert data.get('twitter_token') is not None, data\r\n assert data.get('facebook_token') is not None, data\r\n assert data.get('google_token') is None, data", "def list_user_access(self, user):\n return self._user_manager.list_user_access(user)", "def get_all_devices(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_GetAllDevices', self.handle))", "def get_devices(self):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"get_account_units\",\n \"account_token\": self.api_token\n }\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_pin\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n if not response_json.get(\"success\"):\n raise ValueError(response_json.get(\"error_message\"))\n\n units_json = response_json.get(\"units\")\n devices = []\n for unit in units_json:\n device = Charger(unit, self)\n device.update_state()\n devices.append(device)\n\n return devices", "def devices(self) -> api.Devices:\n return self._get_model(model=api.Devices)", "def YieldAllDevices(self):\n\n for deviceManagedObject in self.managedObject.config.hardware.device:\n yield Device(vm=self, managedObject=deviceManagedObject)", "def getDevices(self, node, flatDeviceHierarchy=False):\n nodeInfo = self.getNode(node, flatDeviceHierarchy=flatDeviceHierarchy)\n if nodeInfo is None:\n self.log.error(\"could not get device list because node '%s' does not exist\", node)\n return {}\n return nodeInfo.devices", "def get_lists(user):\n list_options = {}\n list_objects = twitter.lists_all(screen_name=user)\n for list_ in list_objects:\n list_options[list_.id] = list_.name\n return list_options.items()", "def get_mavens(user_id):\n following = get_following(user_id)\n return list(User.objects.exclude(pk__in=(following + [user_id])) \\\n .order_by('-userstatistics__karma') \\\n .values_list('id', flat=True))", "def list_credentials(user):\n return Credentials.list_credentials(user)", "def peer_device(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"peer_device\"), kwargs)", "def list_user_access(self, instance, user):\n return instance.list_user_access(user)", "def get_devices(self):\n devices = []\n for i in self.devices:\n devices.append(self.devices[i])\n\n return devices", "def infer_user(self, user_list):\n return self._reader_user(user_list)", "def users(self):\n from sagas.ofbiz.entities import OfEntity as e, oc\n rs=e().allUserLogin()\n for r in rs:\n print(r['userLoginId'])", "def get_devices(jwt: str) -> List:\n LOGGER.debug(\"Retrieving devices...\")\n\n args = {\n \"url\": \"{0}/device\".format(CONFIG['dojot']['url']),\n \"headers\": {\n \"Content-Type\": \"application/json\",\n \"Authorization\": \"Bearer {0}\".format(jwt),\n },\n }\n\n res = DojotAPI.call_api(requests.get, args)\n\n devices_ids = [device['id'] for device in res['devices']]\n\n LOGGER.debug(\"... retrieved the devices\")\n\n return devices_ids", "def List(self, user=None):\n with self.acc_lock:\n self._load()\n\n result = []\n if user:\n for k, v in self.tasks.iteritems():\n if v['user'] != user:\n continue\n d = dict(v)\n d['key'] = k\n result.append(d)\n else:\n for k, v in self.tasks.iteritems():\n d = dict(v)\n d['key'] = k\n result.append(d)\n return result", "async def get_device_list(self):\n self.logger.debug(\"Retrieving device list information.\")\n #url = 'https://{}/api/user/device'.format(self.apiHost) #suddenly stopped worrking, so use\n '''\n #full version\n url = 'https://{}/api/user/device?lang=en&apiKey={}&getTags=1&version={}&ts={}&nonce={}&appid={}&imei={}&os={}&model={}&romVersion={}&appVersion={}'.format(self.apiHost,\n self.apikey,\n self.timestamp,\n self._version,\n self._nonce,\n self._appid,\n self._imei,\n self._os,\n self._model,\n self._romVersion,\n self._appVersion)\n '''\n url = 'https://{}/api/user/device?version={}&appid={}'.format(self.apiHost, self._version, self._appid)\n headers = {\n 'Authorization': 'Bearer %s' % self.authenticationToken,\n }\n self.logger.debug('url: %s, headers: %s' % (url, headers))\n async with ClientSession() as session:\n async with session.get(url, headers=headers) as response:\n json_response = await response.json()\n \n self.logger.debug('received response status: %s' % response.status) \n self.logger.debug('received response: %s' % self.pprint(json_response))\n if response.status != 200:\n self.logger.error('error: %s received' % response.status)\n return\n \n if json_response.get(\"devicelist\"):\n self.logger.info('New response format found')\n json_response = json_response[\"devicelist\"]\n \n self.logger.debug('number of device(s) is: %d' % len(json_response))\n \n self._devices = json_response #list of devices and current configurations\n \n self._create_client_devices()\n \n '''\n Example Response:\n [\n {\n \"__v\": 0,\n \"_id\": \"5becffa6d2b4a3c34cb79b38\",\n \"apikey\": \"530303a6-cf2c-4246-894c-xxxxxxxxxxx\",\n \"brandName\": \"AUTOSLIDE\",\n \"createdAt\": \"2018-11-15T05:09:58.341Z\",\n \"deviceStatus\": \"\",\n \"deviceUrl\": \"\",\n \"deviceid\": \"100050xxxxx\",\n \"devicekey\": \"4123ec79-d2c3-4d32-930a-xxxxxxxxxxxxx\",\n \"extra\": {\n \"_id\": \"xxxxxxxxxxxxxxxx\",\n \"extra\": {\n \"apmac\": \"xx:xx:xx:xx:xx:xx\",\n \"brandId\": \"5a6fcf00f620073c67efc280\",\n \"description\": \"20180813001\",\n \"mac\": \"xx:xx:xx0:xx:xx:xx\",\n \"manufacturer\": \"\\u9752\\u5c9b\\u6fb3\\u601d\\u5fb7\\u667a\\u80fd\\u95e8\\u63a7\\u7cfb\\u7edf\\u6709\\u9650\\u516c\\u53f8\",\n \"model\": \"PSA-BTA-GL\",\n \"modelInfo\": \"5af3f5332c8642b001540dac\",\n \"ui\": \"\\u63a8\\u62c9\\u5ba0\\u7269\\u95e8\",\n \"uiid\": 54\n }\n },\n \"group\": \"\",\n \"groups\": [],\n \"ip\": \"xxx.xx.xx.xxx\",\n \"location\": \"\",\n \"name\": \"Patio Door\",\n \"offlineTime\": \"2018-12-31T07:23:31.018Z\",\n \"online\": true,\n \"onlineTime\": \"2018-12-31T12:19:33.216Z\",\n \"params\": {\n \"a\": \"3\",\n \"b\": \"3\",\n \"c\": \"1\",\n \"d\": \"1\",\n \"e\": \"1\",\n \"f\": \"1\",\n \"fwVersion\": \"2.0.2\",\n \"g\": \"0\",\n \"h\": \"1\",\n \"i\": \"0\",\n \"j\": \"00\",\n \"k\": \"0\",\n \"l\": \"1\",\n \"m\": \"2\",\n \"n\": \"0\",\n \"rssi\": -53,\n \"staMac\": \"xx:xx:xx:xx:xx:xx\"\n },\n \"productModel\": \"WFA-1\",\n \"settings\": {\n \"alarmNotify\": 1,\n \"opsHistory\": 1,\n \"opsNotify\": 0\n },\n \"sharedTo\": [\n {\n \"note\": \"\",\n \"permit\": 15,\n \"phoneNumber\": \"e-mail@gmail.com\",\n \"shareTime\": 1542259546087\n }\n ],\n \"showBrand\": true,\n \"type\": \"10\",\n \"uiid\": 54\n }\n ]\n \n or New format:\n {\n \"devicelist\": [\n {\n \"__v\": 0,\n \"_id\": \"5c3665d012d28ae6ba4943c8\",\n \"apikey\": \"530303a6-cf2c-4246-894c-50855b00e6d8\",\n \"brandLogoUrl\": \"https://us-ota.coolkit.cc/logo/KRZ54OifuGmjoEMxT1YYM3Ybu2fj5K2C.png\",\n \"brandName\": \"Sonoff\",\n \"createdAt\": \"2019-01-09T21:21:20.402Z\",\n \"devConfig\": {},\n \"devGroups\": [],\n \"deviceStatus\": \"\",\n ... as before\n '''", "def get_all_user_meter_ids(session):\n\n return [meter_id[0] for meter_id in session.query(User.meter_id).all()]", "def user_list():\n for values in USERS:\n user = User.objects.create_user(\n values[\"username\"], values[\"email\"], values[\"password\"]\n )\n user.first_name = values[\"first_name\"]\n user.last_name = values[\"last_name\"]\n user.is_staff = values[\"staff\"]\n user.is_superuser = values[\"super\"]\n user.save()\n Token.objects.create(key=values[\"token\"], user_id=user.id)\n\n # print('users created')", "def list_user():\n\tbegin = 0\n\tlength = 25\n\ttry:\n\t\tif request.json != None:\n\t\t\tbegin = int(request.json.get('begin', 0))\n\t\t\tlength = int(request.json.get('length', 25))\n\texcept:\n\t\tabort(403)\n\tif length > 100 :\n\t\tlength = 100\n\tuserList = User.list(begin, length)\n\tif userList == None:\n\t\tabort(400)\n\treturn jsonify({'users': map(lambda(e): e.output(), userList), 'begin': begin, 'length': len(userList)})", "def getDevices(i):\n devices = Account['KTFLR'].devices('monpressprod')\n device = devices[i]\n return device", "def get_todos(user_id):\n full_url = base_url + 'get-to-dos?userId=' + user_id + '&key=' + key\n response = requests.get(full_url)\n if response.status_code != 200:\n raise RequestException('Get To Dos failed with status code: {}'.format(response.status_code))\n return json.loads(response.text)", "async def find_devices() -> List[DeviceInfo]:\n return await Discovery.search_devices()", "def get_user_lists(user):\n if not user: return []\n memberships = db.Query(TaskListMember).filter('user =', user)\n return [m.task_list for m in memberships]", "def user_objects(cls, user):\n return cls.objects.filter(UserAccess.Q(user))", "def list(self, tenant_id=None):\n\n if not tenant_id:\n return self._list(\"/users\", \"users\")\n else:\n return self._list(\"/tenants/%s/users\" % tenant_id, \"users\")", "def user(self, user_token, user_device=None):\n\n self.user_token = user_token\n self.user_device = user_device", "def fetch_token(self, user_id, password):\n url = buildCommandUrl(self.server, \"/as/user/token\")\n result = json_request(\"POST\", url, {\n \"userId\": user_id,\n \"password\": password\n })\n return result[\"token\"]", "def get_for_user_in_bulk(self, user):\n if not user.is_authenticated:\n return None\n #TODO: This one will need more refinement.\n return self.filter(voter=user)", "def get_friends(user):\r\n try:\r\n friends = user.friends()\r\n return friends[:]\r\n except tweepy.error.RateLimitError:\r\n print(\"Rate limit reached! Waiting...\")\r\n wait_15_minutes()\r\n return get_friends(user)\r\n except tweepy.error.TweepError:\r\n print(\"Skipping user whose information is protected.\")\r\n return list()", "def test_get_user_u2ftokens(self):\n response = self.client.get_user_u2ftokens(\"DU012345678901234567\")\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v1/users/DU012345678901234567/u2ftokens\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def device_list():\n click.echo(\"\\nRetrieving the devices.\")\n\n url = base_url + \"/device\"\n\n response = requests.get(url=url, headers=header,verify=False)\n if response.status_code == 200:\n items = response.json()['data']\n else:\n print(\"Failed to get list of devices \" + str(response.text))\n exit()\n\n headers = [\"Host-Name\", \"Device Type\", \"Device ID\", \"System IP\", \"Site ID\", \"Version\", \"Device Model\"]\n table = list()\n\n for item in items:\n tr = [item.get('host-name'), item.get('device-type'), item.get('uuid'), item.get('system-ip'), item.get('site-id'), item.get('version'), item.get('device-model')]\n table.append(tr)\n try:\n click.echo(tabulate.tabulate(table, headers, tablefmt=\"fancy_grid\"))\n except UnicodeEncodeError:\n click.echo(tabulate.tabulate(table, headers, tablefmt=\"grid\"))", "async def get_devices_list(api_key, session: ClientSession, device_label=\"\"):\n\n result = {}\n\n async with session.get(\n API_DEVICES,\n headers=_headers(api_key),\n raise_for_status=True,\n ) as resp:\n device_list = await resp.json()\n\n if device_list:\n _LOGGER.debug(\"SmartThings available devices: %s\", str(device_list))\n\n for dev in device_list.get(\"items\", []):\n if (device_id := dev.get(\"deviceId\")) is None:\n continue\n if dev.get(\"type\", \"\") != DEVICE_TYPE_OCF:\n continue\n\n label = dev.get(\"label\", \"\")\n if device_label:\n if label != device_label:\n continue\n elif dev.get(\"deviceTypeName\", \"\") != DEVICE_TYPE_NAME_TV:\n continue\n\n result[device_id] = {\n \"name\": dev.get(\"name\", f\"TV ID {device_id}\"),\n \"label\": label,\n }\n\n _LOGGER.info(\"SmartThings discovered TV devices: %s\", str(result))\n\n return result", "def list_user_commands(self, uid):\n uid = self._check_uid(uid)\n\n uc_data = self._router_request(\n self._make_request_data(\n 'getUserCommands',\n data=dict(\n uid=uid,\n )\n )\n )\n\n return uc_data['data']", "def retr_device( device_id ) :\n\n\t\t\t_logger.info( '...retr_device...' )\n\t\t\toutput = []\n\n\t\t\tdb = mongo.db.auth_devices\n\t\t\tdev = db.find( { 'device_id' : device_id } )\n\t\t\tif dev.count() == 0 :\n\t\t\t\t_logger.error( '...retr_device %s' % e.message )\n\t\t\t\traise mongo_no_resource_exception( 'no tokenized device found')\n\t\t\tfor device in dev :\n\t\t\t\toutput = {'moniker' : device['device_moniker'] ,\n\t\t\t\t\t\t 'description' : device['description'] ,\n\t\t\t\t\t\t 'active' : device['active'] ,\n\t\t\t\t\t\t 'device_id' : device['device_id'] ,\n\t\t\t\t\t\t 'spawned' : device['spawned'] ,\n\t\t\t\t\t\t 'last_known_remote_ip' : device['last_known_remote_ip'] ,\n\t\t\t\t\t\t 'canonical_user' : device['canonical_user'] ,\n\t\t\t\t\t\t 'segment' : device['segment'] ,\n\t\t\t\t\t\t 'auth_apps' : device['auth_apps'] ,\n\t\t\t\t\t\t 'cloak_origin' : device['cloak_origin'] ,\n\t\t\t\t\t\t 'cloak_monitor_stream' : device['cloak_monitor_stream'] ,\n\t\t\t\t\t\t 'auth_http_id' : device['auth_http_id']\n\t\t\t\t\t\t }\n\n\t\t\treturn jsonify({'result' : output})", "def list_devices(context, connstrings):\n return _nfc.list_devices(context, connstrings)", "def user_medias(self, user_id: int, amount: int = 50) -> List[Media]:\n amount = int(amount)\n user_id = int(user_id)\n try:\n try:\n medias = self.user_medias_gql(user_id, amount)\n except ClientLoginRequired as e:\n if not self.inject_sessionid_to_public():\n raise e\n medias = self.user_medias_gql(user_id, amount) # retry\n except Exception as e:\n if not isinstance(e, ClientError):\n self.logger.exception(e)\n # User may been private, attempt via Private API\n # (You can check is_private, but there may be other reasons,\n # it is better to try through a Private API)\n medias = self.user_medias_v1(user_id, amount)\n return medias", "def devices(self, **kwargs):\n return self._get(API.DEVICES.value, check_202=True, **kwargs)", "def list_devices(cls, filters={}):\n return cls.dbdriver.list_devices(filters)", "def get_devices(self):\n e = ctypes.POINTER(rs_error)()\n n_devices = lrs.rs_get_device_count(self.ctx, ctypes.byref(e))\n _check_error(e)\n\n lrs.rs_get_device.restype = ctypes.POINTER(rs_device)\n for idx in range(n_devices):\n dev = lrs.rs_get_device(self.ctx, idx, ctypes.byref(e))\n _check_error(e)\n\n name = pp(lrs.rs_get_device_name, dev, ctypes.byref(e))\n _check_error(e)\n\n serial = pp(lrs.rs_get_device_serial, dev, ctypes.byref(e))\n _check_error(e)\n\n version = pp(lrs.rs_get_device_firmware_version, dev, ctypes.byref(e))\n _check_error(e)\n\n is_streaming = lrs.rs_is_device_streaming(dev, ctypes.byref(e))\n _check_error(e)\n\n yield {'id': idx, 'name': name, 'serial': serial,\n 'firmware': version, 'is_streaming': is_streaming}", "def delete_tokens_for_user(self, user_id, project_id=None):\n if not CONF.token.revoke_by_id:\n return\n self.delete_tokens(user_id, tenant_id=project_id)\n for trust in self.trust_api.list_trusts_for_trustee(user_id):\n # Ensure we revoke tokens associated to the trust / project\n # user_id combination.\n self.delete_tokens(user_id, trust_id=trust['id'],\n tenant_id=project_id)\n for trust in self.trust_api.list_trusts_for_trustor(user_id):\n # Ensure we revoke tokens associated to the trust / project /\n # user_id combination where the user_id is the trustor.\n\n # NOTE(morganfainberg): This revocation is a bit coarse, but it\n # covers a number of cases such as disabling of the trustor user,\n # deletion of the trustor user (for any number of reasons). It\n # might make sense to refine this and be more surgical on the\n # deletions (e.g. don't revoke tokens for the trusts when the\n # trustor changes password). For now, to maintain previous\n # functionality, this will continue to be a bit overzealous on\n # revocations.\n self.delete_tokens(trust['trustee_user_id'], trust_id=trust['id'],\n tenant_id=project_id)", "def get_available_devices(self):\n try:\n out = self.get_output(\"devices\")\n except BluetoothctlError, e:\n print(e)\n return None\n else:\n available_devices = []\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n\n return available_devices", "def getRecipesByUser(cls, user=None):\n\n recipes = (db.session.query(Recipe).join(RecipeUser).\\\n filter(Recipe.recipe_id == RecipeUser.recipe_fk).\\\n filter(RecipeUser.user_fk == user).all())\n\n return recipes", "def get_tacs_from_devices(cls, devices):\n tacs = []\n for device in devices:\n tac = device.get('tac')\n tacs.append(tac)\n return tacs", "def for_user(cls, user):\n\n token = super().for_user(user)\n\n TokenMeta.objects.get_or_create(\n jti=token['jti'],\n token=str(token),\n )\n\n return token", "def user_sends_get_call_to_the_devices():\n web_app.list_devices()", "def get_user_device_addressbooks(self, device_id, give_json=False):\n\n url = Constants.BASE_URL + 'users/addressbooks/device'\n response = requests.post(url=url, params={'key': self.user_access_token, 'device_id': device_id})\n\n if give_json:\n return response.json()\n else:\n return response.text", "def devices_list_view(request):\n return read_json(request.registry.settings['devices_path'], [])", "def get_devices():\n devices, errors = [], []\n\n for path in hookenv.action_get('devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n errors.append('{}: Not absolute path.'.format(path))\n elif not os.path.exists(path):\n errors.append('{}: Device does not exist.'.format(path))\n else:\n devices.append(path)\n\n if errors:\n raise ZapDiskError(\", \".join(errors))\n\n return devices", "def get_user_active_list(self, user_id):\n return self.api.get_active_version_manager_by_user_id(user_id)" ]
[ "0.63648194", "0.62265784", "0.6117706", "0.61175627", "0.5826038", "0.574232", "0.5716294", "0.5668577", "0.53647625", "0.52985567", "0.5172679", "0.5028343", "0.501832", "0.49658716", "0.4858226", "0.48548654", "0.4841386", "0.48357037", "0.48057312", "0.47699174", "0.4757789", "0.47471172", "0.47447142", "0.47440553", "0.4720507", "0.47186446", "0.47182205", "0.47145575", "0.47067425", "0.46776956", "0.4666172", "0.4615231", "0.46109357", "0.4602119", "0.46013606", "0.45929357", "0.45615754", "0.45502487", "0.45459452", "0.45449704", "0.45141694", "0.44845575", "0.44779548", "0.44622627", "0.4461685", "0.44532526", "0.44403774", "0.4437335", "0.44340307", "0.44302833", "0.43929368", "0.43926197", "0.43903652", "0.4389995", "0.43816334", "0.43791145", "0.4374547", "0.43743253", "0.4372633", "0.4372018", "0.4369534", "0.4368929", "0.43549925", "0.4345195", "0.433079", "0.4325458", "0.43093112", "0.43071628", "0.43001175", "0.42990187", "0.42941818", "0.42734358", "0.42669657", "0.42664468", "0.42588964", "0.42537686", "0.42428482", "0.42408746", "0.4233112", "0.4232203", "0.4219962", "0.42121252", "0.4211615", "0.42055455", "0.42042893", "0.4204117", "0.4200879", "0.42006123", "0.41976157", "0.41830564", "0.41803503", "0.4177574", "0.41775185", "0.41747633", "0.41741377", "0.41738617", "0.41698158", "0.41682863", "0.4166096", "0.41558033" ]
0.6962967
0
Lists the network sources in your tenancy. You must specify your tenancy's OCID as the value for the compartment ID (remember that the tenancy is simply the root compartment). See `Where to Get the Tenancy's OCID and User's OCID`__.
def list_network_sources(self, compartment_id, **kwargs): resource_path = "/networkSources" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_network_sources got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[NetworkSourcesSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[NetworkSourcesSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_sources():\n url = base_url + \"sources\"\n params = {\"language\": \"en\"}\n resp = requests.get(url, params=params)\n data = resp.json()\n sources = [src['id'].strip() for src in data['sources']]\n print(\"all the sources:\")\n print(sources)\n return sources", "def paths_list(ctx):\n for path in ctx.obj['CLIENT'].paths.list():\n if not path.source.name:\n cidr_blocks = [subnetwork.cidr_block for subnetwork in path.source.subnetworks]\n source_name = \",\".join(cidr_blocks)\n network_name = \"external\"\n else:\n source_name = path.source.name\n network_name = path.source.network.name\n click.echo(\"%s:%s -(%s)-> %s:%s\" % (network_name, source_name, path.port,\n path.network.name, path.destination.name))", "def ListSources(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def get_sources():\n url = base_url + \"sources\"\n params = {\"language\": \"en\"}\n resp = requests.get(url, params=params)\n data = resp.json()\n sources = [src['id'].strip() for src in data['sources']]\n print(\"all the sources\")\n print(sources)\n return sources", "def show_sources_all():\n response = requests.get(SOURCE_URL)\n json = response.json()\n for source in json['sources']:\n print(u\"{0}: <{1}> {2}\".format(\"News Code\", source['id'], source['name']))", "def get_datasource_list():\n global datasource_list\n\n if not datasource_list:\n datasource_list = stixhelpers.get_datasources(get_srcs())\n\n return datasource_list", "def listsources():\n\tmain_url = \" https://newsapi.org/v2/sources?apiKey=5f81b593f35d42a8980313250c03d7e7\"\n\n\t# fetching data in json format \n\topen_source = requests.get(main_url).json() \n\n\t# getting all articles in a string sources\n\tsource = open_source[\"sources\"] \n\n\t# empty list which will \n\t# contain all trending newssources \n\tresults = [] \n\t\n\tfor k in source: \n results.append(k[\"id\"])\n \n \t\n\tfor w in results[0:4]:\n print(w)", "def network_list(request):\n flatpage = get_flatpage_or_none(request)\n network_list = Network.objects.filter(user_id=0)\n\n return {\n 'flatpage': flatpage,\n 'network_list': network_list,\n }", "def network_list_for_tenant(request, tenant_id, include_external=False,\n include_pre_auto_allocate=False, page_data=None,\n **params):\n\n # Pagination is implemented consistently with nova and cinder views,\n # which means it is a bit hacky:\n # - it requests X units but displays X-1 units\n # - it ignores the marker metadata from the API response and uses its own\n # Here we have extra hacks on top of that, because we have to merge the\n # results of 3 different queries, and decide which one of them we are\n # actually paginating.\n # The 3 queries consist of:\n # 1. Shared=True networks\n # 2. Project non-shared networks\n # 3. External non-shared non-project networks\n # The main reason behind that order is to maintain the current behavior\n # for how external networks are retrieved and displayed.\n # The include_external assumption of whether external networks should be\n # displayed is \"overridden\" whenever the external network is shared or is\n # the tenant's. Therefore it refers to only non-shared non-tenant external\n # networks.\n # To accomplish pagination, we check the type of network the provided\n # marker is, to determine which query we have last run and whether we\n # need to paginate it.\n\n LOG.debug(\"network_list_for_tenant(): tenant_id=%(tenant_id)s, \"\n \"params=%(params)s, page_data=%(page_data)s\", {\n 'tenant_id': tenant_id,\n 'params': params,\n 'page_data': page_data,\n })\n\n page_data, marker_net = _configure_pagination(\n request, params, page_data, tenant_id=tenant_id)\n\n query_kwargs = {\n 'request': request,\n 'include_external': include_external,\n 'tenant_id': tenant_id,\n 'page_data': page_data,\n **params,\n }\n\n return _perform_query(\n _query_nets_for_tenant, query_kwargs, marker_net,\n include_pre_auto_allocate)", "def get_results_from_aggregation_sources(self, context):\n sources = context.getContentSources()\n results = []\n for source in sources:\n sresults = source.queryCatalog()\n if not sresults:\n continue\n results.append({\n 'id': source.id,\n 'title': source.Title(),\n 'description': source.Description(),\n 'uid': source.UID(),\n 'portal_type': sresults[0].portal_type,\n 'brains': sresults,\n 'brains_count': len(sresults),\n })\n return results", "def list_protection_sources(cohesity_client, env=\"kView\"):\n sources = cohesity_client.protection_sources.list_protection_sources(\n environments=env\n )\n sources = sources if sources else []\n return sources", "def networks(view):\n return \"network?\" \\\n \"_return_fields=\" \\\n \"extattrs,\" \\\n \"comment,\" \\\n \"network,\" \\\n \"network_view,\" \\\n \"utilization&\" \\\n \"network_view=\" + view + \\\n \"&_max_results=-25000\"", "def show_networks():\n return get_networks()", "def list_sources(config, base_dir, verbose=False):\n for source in config.sources_under(abspath(base_dir)):\n if verbose:\n print(\"# %s (%s)\" % (source.nicedir, ' '.join(source.info)))\n else:\n print(source.nicedir)", "def sources(self) -> Sequence[Any]:\n return pulumi.get(self, \"sources\")", "def do_network_list(cs, args):\n opts = {}\n opts['container'] = args.container\n opts = zun_utils.remove_null_parms(**opts)\n networks = cs.containers.network_list(**opts)\n zun_utils.list_container_networks(networks)", "def getNetworksList():\n logger.debug('Start.')\n code, res = rest_requests.get(networks_url)\n if code != requests.codes.ok:\n logger.error((code, res))\n return None\n return res[\"networks\"]", "def load_network_templates(self) -> List:\n try:\n network_templates = self.api.get(host=self.host, endpoint=f\"/api/v1/orgs/{self.oid}/networktemplates\")\n except Exception as e:\n logger.error(f\"{TextColors.FAIL}Error getting network templates:{TextColors.ENDC} {e}\")\n raise e\n self.network_templates = network_templates", "def list_sources(username, token=None):\n mapbox_api = _get_api()\n mapbox_token = _get_token(token)\n url = \"{0}/tilesets/v1/sources/{1}?access_token={2}\".format(\n mapbox_api, username, mapbox_token\n )\n r = requests.get(url)\n if r.status_code == 200:\n for source in r.json():\n click.echo(source[\"id\"])\n else:\n raise errors.TilesetsError(r.text)", "def netlist(self):\n return self._netlist", "def sources(self) -> Optional[Sequence['outputs.AddressPrefixItemResponse']]:\n return pulumi.get(self, \"sources\")", "def source_list(self):\n return list(self._client.group.streams_by_name().keys())", "def list_net(self):\n _url = \"http://\" + self.host_ip + \":9696/v2.0/networks\"\n _headers = {'Content-type': 'application/json',\n 'x-auth-token': self.project_info[\"token_project\"]}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\"No response from Server while listing the networks\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get network list Failed with status %s \" %\n response.status)\n return response.status\n output = json.loads(response.data)\n\n LOG_OBJ.info(\"Network List : %s \" % output)\n return output['networks']", "def get_all_feed_sources(request):\n feed_sources = FeedSource.objects.all().order_by('-id')\n return get_feed_sources_list(feed_sources)", "def get_all_host(self, conf, tenant_id, network_id):\n\t\tpass", "def get_network_list(network = None, include_details = True):\n \n if network == None: \n json_obj = requests.get(api_base_url + 'networks')\n return json.loads(json_obj.content)['networks']\n rq_url = api_base_url + '{}/sites'.format(network)\n json_obj = requests.get(rq_url)\n sites_list = json.loads(json_obj.content)\n d = OrderedDict(zip([x.pop('network_siteid') for x in sites_list['sites']], \n sites_list['sites']))\n if include_details: return d\n return d.keys()", "def Sources():\n return _sources", "def list_subnets(self, kwargs):\n verbose = kwargs.get(\"verbose\", False)\n\n if not verbose:\n attributes = [\"distinguishedName\", \"name\", \"description\"]\n else:\n attributes = ALL\n\n if verbose:\n self.display(\n self.engine.query(\n self.engine.SITES_FILTER(),\n attributes, base=','.join([\"CN=Configuration\", self.engine.base_dn])\n ),\n verbose\n )\n else:\n entries = self.engine.query(self.engine.SITES_FILTER(), attributes, base=','.join([\"CN=Configuration\", self.engine.base_dn]))\n\n site_dn = \"\"\n site_name = \"\"\n site_description = \"\"\n # subnet_dn = \"\"\n subnet_name = \"\"\n subnet_description = \"\"\n for entry in entries:\n site_dn = entry[\"distinguishedName\"] if entry[\"distinguishedName\"] else \"\"\n site_name = entry[\"name\"] if entry[\"name\"] else \"\"\n site_description = entry[\"description\"][0] if entry[\"description\"] else \"\"\n subnet_entries = self.engine.query(self.engine.SUBNET_FILTER(site_dn), attributes, base=','.join([\"CN=Sites,CN=Configuration\", self.engine.base_dn]))\n for subnet in subnet_entries:\n # subnet_dn = subnet[\"distinguishedName\"] if subnet[\"distinguishedName\"] else \"\"\n subnet_name = subnet[\"name\"] if subnet[\"name\"] else \"\"\n subnet_description = subnet[\"description\"][0] if subnet[\"description\"] else \"\"\n servers = self.engine.query(\"(objectClass=server)\", ['cn'], base=site_dn)\n servers_list = [d['cn'] for d in servers]\n\n output = \"Site: {}\".format(site_name)\n output += \" | Subnet: {}\".format(subnet_name) if subnet_name else \"\"\n output += \" | Site description: {}\".format(site_description) if site_description else \"\"\n output += \" | Subnet description: {}\".format(subnet_description) if subnet_description else \"\"\n output += \" | Servers: {}\".format(', '.join(servers_list)) if servers_list else \"\"\n print(output)", "def fetch_list(self):\n\t\treturn self.fetch(self.list_url % ART_SERVER_HOST)", "def sources(self):\n return self._sources", "def node_sources(self, node):\r\n node = self.coalesce_node(node)\r\n nodes =[conn[0] for conn in self.connections if conn[1] == node]\r\n return nodes", "def getSDDCnetworks(**kwargs):\n sessiontoken = kwargs['sessiontoken']\n proxy = kwargs['proxy']\n json_response = get_cgw_segments_json(proxy, sessiontoken)\n if json_response != False:\n sddc_networks = json_response['results']\n table = PrettyTable(['Name', 'id', 'Type', 'Network', 'Default Gateway'])\n table_extended = PrettyTable(['Name', 'id','Tunnel ID'])\n for i in sddc_networks:\n if ( i['type'] == \"EXTENDED\"):\n table_extended.add_row([i['display_name'], i['id'], i['l2_extension']['tunnel_id']])\n elif ( i['type'] == \"DISCONNECTED\"):\n table.add_row([i['display_name'], i['id'], i['type'],\"-\", \"-\"])\n else:\n table.add_row([i['display_name'], i['id'], i['type'], i['subnets'][0]['network'], i['subnets'][0]['gateway_address']])\n print(\"Routed Networks:\")\n print(table)\n print(\"Extended Networks:\")\n print(table_extended)\n else:\n print(\"Something went wrong, please try again.\")\n sys.exit(1)", "def Sources(self):\n return self._sources", "def getNets(self):\n\t\treturn NetLoader.listNetworks()", "def build_sites_list():\n ns_getsites_filter = '''\n <nc:filter type=\"xpath\"\n xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\"\n xmlns:rm=\"http://cisco.com/ns/yang/Cisco-IOS-XE-route-map\"\n select=\"/native/route-map[substring(name, 1, 3)='To_']/name\"\n />\n '''\n m = manager.connect( host='10.112.83.100',\n port=830,\n username='cisco',\n password='cisco',\n hostkey_verify=False)\n answer = m.get_config(source='running', filter=ns_getsites_filter).data_xml\n c = xmltodict.parse (answer)\n # build the list\n liste_sites = [ r['name'][3:] for r in c['data']['native']['route-map'] ]\n return liste_sites", "def GetEventSources(self):\n return self._GetAttributeContainers('event_source')", "def source_list(self):\n return [\n source.Name for source in self.coordinator.data.sources if not source.Hidden\n ]", "def get_org_list():\r\n\r\n resp = requests.get(''.join([Kegg.BASE_URL, 'list/organism']))\r\n return resp.text", "def list_networks(session):\n # type: (Session) -> List[Dict[str, Any]]\n url_tail = f\"/{CoordConstsV2.RSC_NETWORKS}\"\n return _get_list(session, url_tail)", "def show_sources_category(category):\n if category not in NEWS_CATEGORIES:\n print(\"Invalid category\")\n sys.exit(1)\n\n url = \"?category={category_type}\"\n response = requests.get((SOURCE_URL+url).format(category_type=category))\n json = response.json()\n for source in json['sources']:\n print(u\"{0}: <{1}> {2}\".format(\"News Code\", source['id'], source['name']))", "def get_sources(**kwargs):\n\n instance = Ceic._get_instance()\n\n get_dictionaries_method = instance._dictionary_facade.get_sources\n result = instance._make_request(get_dictionaries_method, **kwargs)\n\n return result", "def get_networks(self):\n url = '%s/v2.0/networks' % self.catalog['network']\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['networks']\n else:\n LOG.error('Get networks failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)", "def sources(self):\n return self._sources.keys()", "def get(self):\n\n return self.get_request_handler(request.headers).get_all_sources()", "def list_networks():\n return __sets.keys()", "def source_list(self):\n return self._source_list", "def source_list(self):\n return self._source_list", "def source_list(self):\n return list(self._group.streams_by_name().keys())", "def __call__(self) -> list:\n return self.network", "def organizations(self):\n return self.get('{}/orgs'.format(ApiVersion.A1.value))", "def source(self) -> list:\n sources = self.source_control.list_sources()\n sources_list = [source['label'] for source in sources]\n return sources_list", "def get_sources(self, target: Tuple[str, str], relation: str = None) -> List[Node]:\n return self.get_common_sources([target], relation)", "def ns_list(self):\n return sorted(self.get_ns_name(ns) for ns in self.profile.authoritative_servers)", "def get_data_sources(self) -> [DataSource]:\n return []", "def get_protection_sources(cohesity_client):\n sources = cohesity_client.protection_sources.list_protection_sources_root_nodes()\n sources = sources if sources else []\n for source in sources:\n keys = None\n environment = source.protection_source.environment\n if source.protection_source.environment == env_enum.K_VMWARE:\n name = source.protection_source.name\n elif environment in [env_enum.KISILON, env_enum.KNETAPP]:\n name = source.protection_source.isilon_protection_source.name \\\n if source.protection_source.isilon_protection_source else \\\n source.protection_source.netapp_protection_source.name\n keys = [\"password\", \"smb_password\"]\n elif environment == \"kCassandra\":\n name = source.protection_source.name\n keys = [\"username\", \"password\", \"db_username\", \"db_password\"]\n else:\n continue\n config_dict[name] = keys\n return sources", "def nodes(self):\n return self.source_net.nodes()", "def list_server_datasource(self, feed_id=None):\n resources = self.list_resource(feed_id=feed_id, resource_type_id='Datasource')\n resources.extend(self.list_resource(\n feed_id=feed_id,\n resource_type_id='XA Datasource'))\n datasources = []\n if resources:\n for resource in resources:\n datasources.append(Datasource(resource.id, resource.name, resource.path))\n return datasources", "def getNodes(self):\n nodes = [{\"address\": \"http://0.0.0.0:100\"}\n ,{\"address\": \"http://0.0.0.0:200\"}\n ,{\"address\": \"http://0.0.0.0:300\"}\n ,{\"address\": \"http://0.0.0.0:400\"}\n ,{\"address\": \"http://0.0.0.0:500\"}]\n return nodes", "def getSourcesFromVehicle(vehicleName):\n pass", "def get_networks() -> dict:\n nets_rq = request(\n method=\"GET\", url=app.config[\"NETWORKS_REF\"], headers=build_header()\n )\n\n if not nets_rq:\n raise HTTPError(nets_rq.status_code)\n\n return nets_rq.json()", "def list_server_datasource(self, feed_id=None):\n datasources = self.list_resource(feed_id=feed_id,\n resource_type_id='Datasource',\n cls=Datasource,\n list_children=True)\n datasources.extend(self.list_resource(\n feed_id=feed_id,\n resource_type_id='XA Datasource',\n cls=Datasource,\n list_children=True))\n return datasources", "def get_network_on_vc(options):\n datacenter = get_datacenter(options)\n networks = datacenter.network\n\n name = get_network_name(options)\n for network in networks:\n if re.search(name, network.name):\n return network", "def getNodeNetworks(self,node):\n data = self.connect('get','nodes/%s/network' % (node),None)\n return data", "def list_connections(self, show_passthrough=True):\n return self._exprmapper.list_connections(show_passthrough)", "def list_connections(self):\n return self.network.list_connections()", "def get_indexes(self):\n resources = []\n\n data = self.get_resource_data(type='sources')\n data['url'] = constants.URLS['sources'] % data\n resources.append(data)\n\n for arch in self.data['arch']:\n data = self.get_resource_data(type='packages', arch=arch)\n data['url'] = constants.URLS['packages'] % data\n resources.append(data)\n return resources", "def networkcontainers(view):\n return \"networkcontainer?\" \\\n \"_return_fields=\" \\\n \"extattrs,\" \\\n \"comment,\" \\\n \"network,\" \\\n \"network_view,\" \\\n \"utilization&\" \\\n \"network_view=\" + view + \\\n \"&_max_results=-25000\"", "def getOrtURLs(self, results):\n pcat = self.portal_catalog\n newresults = []\n for i in results:\n raw_webcode = i.get('webcode')\n if isinstance(raw_webcode, float):\n webcode = str(int(raw_webcode))\n elif isinstance(raw_webcode, int):\n webcode = str(raw_webcode)\n else:\n webcode = raw_webcode\n brains = pcat(Webcode = webcode)\n if len(brains) == 1:\n i['orturl'] = brains[0].getURL()\n else:\n i['orturl'] = ''\n newresults.append(i)\n return newresults", "def fetch_router_list(args):\n nd = NetDevices(production_only=opts.nonprod)\n ret = []\n blocked_groups = []\n if args:\n for arg in args:\n # Try to find the device, but fail gracefully if it can't be found\n device = device_match(arg)\n if not pass_filters(device) or device is None:\n continue\n ret.append(device)\n\n else:\n for entry in nd.itervalues():\n if entry.owningTeam in blocked_groups:\n continue\n if not pass_filters(entry):\n continue\n ret.append(entry)\n\n return sorted(ret, reverse=True)", "def project_list_networks(project):\n q = client.project.networks_in(project)\n sys.stdout.write(\n \"Networks allocated to %s\\t: %s\\n\" % (project, \" \".join(q))\n )", "def dashboard_article_sources():\n sources = dict()\n past_30 = offset_time_past(30, str=True)\n articles = mongo.db[app.config['ARTICLES_COLLECTION']]\n results = articles.find({'collected': {'$gt': past_30}}, {'_id': 0})\n for result in results:\n sources[result['source']] = sources.get(result['source'], 0) + 1\n sources = sorted(sources.items(), key=operator.itemgetter(1), reverse=True)\n data = sources[:10]\n return jsonify(data)", "def network_list(self, kwargs=None):\n try:\n scode, networks = Rest.get('Network')\n except docker.errors.APIError as e:\n Console.error(e.explanation)\n return\n\n if len(networks) == 0:\n Console.info(\"No network exist\")\n return\n\n n = 1\n e = {}\n data = []\n for network in networks:\n d = {}\n d['Ip'] = network['Ip']\n d['Id'] = network['Id']\n d['Name'] = network['Name']\n d['Containers'] = network['Containers']\n e[n] = d\n n = n + 1\n Console.ok(str(Printer.dict_table(e, order=['Ip', 'Id', 'Name', 'Containers'])))", "def collectNet(self):\n network = self.options.net\n # net option from the config file is a string\n if isinstance(network, basestring):\n network = [network]\n # in case someone uses 10.0.0.0,192.168.0.1 instead of\n # --net 10.0.0.0 --net 192.168.0.1\n if isinstance(network, (list, tuple)) and \",\" in network[0]:\n network = [n.strip() for n in network[0].split(\",\")]\n count = 0\n devices = []\n if not network:\n network = yield self.config().callRemote(\"getDefaultNetworks\")\n\n if not network:\n self.log.warning(\"No networks configured\")\n defer.returnValue(None)\n\n for net in network:\n try:\n nets = yield self.config().callRemote(\n \"getNetworks\", net, self.options.subnets\n )\n if not nets:\n self.log.warning(\"No networks found for %s\", net)\n continue\n ips = yield self.discoverIps(nets)\n devices += ips\n count += len(ips)\n except Exception as ex:\n self.log.exception(\n \"Error performing net discovery on %s: %s\", net, ex\n )\n self.log.info(\"Working on devices: %s\", devices)\n\n foundDevices = []\n for device in devices:\n result = yield self.discoverDevice(\n device, self.options.deviceclass, self.options.productionState\n )\n if result is not None:\n foundDevices.append(result)\n defer.returnValue(foundDevices)", "def asset_net_list(request, format=None):\n if request.method == 'GET':\n snippets = Network_Assets.objects.all()\n serializer = NetworkSerializer(snippets, many=True)\n return Response(serializer.data)\n elif request.method == 'POST':\n if (request.data.get('data')):\n data = request.data.get('data')\n else:\n data = request.data\n serializer = NetworkSerializer(data=data)\n if serializer.is_valid():\n serializer.save()\n recordAssets.delay(user=str(request.user), content=\"添加网络设备资产:{ip}\".format(ip=data.get(\"ip\")), type=\"net\",\n id=serializer.data.get('id'))\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def list_clients(self):\n\n return self.clients_info", "def network_acls(self) -> Optional['outputs.DataCollectionEndpointResponseNetworkAcls']:\n return pulumi.get(self, \"network_acls\")", "def dnc_lists(self):\n return self._dnc_lists", "def source_name_list(self):\n return list(self._sources.keys())", "def get_all_netids(self):\n self.setQuery(\"\"\"\n Select ?netid where {\n ?who <http://vivo.dartmouth.edu/ontology/netId> ?netid .\n }\"\"\")\n\n try:\n rval = self.query()\n g = rval.convert()\n return [x['netid']['value'] for x in g['results']['bindings']]\n except:\n print \"Select failed\"\n traceback.print_exc(file=sys.stdout)", "def get_queryset(self):\n query_set = super(NetworkProxyManager, self).get_queryset()\n return query_set", "def list(self):\n path = 'orgProvisioning/ipGreTunnelInfo'\n return self._session.get(path)", "def test_networking_project_network_list(self):\n pass", "def list_tenants(self):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/tenants\"\n _headers = {'x-auth-token': self.cloud_admin_info['token_project']}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\" no response from Server\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\n \" tenant list Failed with status %s \" %\n response.status)\n return response.status\n output = json.loads(response.data)\n LOG_OBJ.info(\"Tenant List : %s \" % output)\n return output[\"tenants\"]", "def list_networks(self, filters=None):\n # If the cloud is running nova-network, just return an empty list.\n if not self.has_service('network'):\n return []\n\n # Translate None from search interface to empty {} for kwargs below\n if not filters:\n filters = {}\n return list(self.network.networks(**filters))", "def source_connections_info(self) -> pulumi.Output[Sequence['outputs.MapperSourceConnectionsInfoResponse']]:\n return pulumi.get(self, \"source_connections_info\")", "def list_requests(self, src_rse, dst_rse, request_states):\n path = '/'.join([self.REQUEST_BASEURL, 'list']) + '?' + '&'.join(['src_rse={}'.format(src_rse), 'dst_rse={}'.format(\n dst_rse), 'request_states={}'.format(request_states)])\n url = build_url(choice(self.list_hosts), path=path)\n r = self._send_request(url, type_='GET')\n\n if r.status_code == codes.ok:\n return self._load_json_data(r)\n else:\n exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)\n raise exc_cls(exc_msg)", "def test_list_net_namespace(self):\n pass", "def labelset_list(request):\n\n publicSources = Source.objects.filter(visibility=Source.VisibilityTypes.PUBLIC)\n publicSourcesWithLabelsets = publicSources.exclude(labelset=LabelSet.getEmptyLabelset())\n\n return render_to_response('annotations/labelset_list.html', {\n 'publicSourcesWithLabelsets': publicSourcesWithLabelsets,\n },\n context_instance=RequestContext(request)\n )", "def clients(self):\n items = []\n for elem in self.query('/clients'):\n baseurl = 'http://%s:%s' % (elem.attrib['address'],\n elem.attrib['port'])\n items.append(PlexClient(baseurl, server=self, data=elem))\n return items", "def list_network_profiles(self, **params):\r\n return self.get(self.network_profiles_path, params=params)", "def getSourceGroups(self):\n ret = self.jsonRequest(\"/api/v1/sourceGroup/getSourceGroups\", {})\n return ret", "def getListOfSites(self):\n with self.config.TaskWorker.envForCMSWEB:\n sites = self.resourceCatalog.getAllPSNs()\n filteredSites = [site for site in sites if not site.startswith(\"T1_\")]\n\n return filteredSites", "def getNSites(self):\n return self.nsites", "def get_list_of_nets(self):\n return self.mfp.get_list_of_nets()", "def build_contracts_list():\n ns_getcontracts_filter = '''\n <nc:filter type=\"xpath\"\n xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\"\n xmlns:na=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\"\n xmlns:rb=\"http://cisco.com/ns/yang/Cisco-IOS-XE-bgp\"\n select=\"/na:native/ip/rb:extcommunity-list/standard\"\n />\n '''\n m = manager.connect( host='10.112.83.100',\n port=830,\n username='cisco',\n password='cisco',\n hostkey_verify=False)\n answer = m.get_config(source='running', filter=ns_getcontracts_filter).data_xml\n c = xmltodict.parse (answer)\n # build the list\n liste_contracts = [ { 'name': r['name'], 'id': r['permit']['rt']['name'][6:] } for r in c['data']['native']['ip']['extcommunity-list']['standard'] ]\n return liste_contracts", "def get_source_info_list(self):\n self._get_source_info_list = pa_source_info_cb_t(self._source_info_cb)\n pa_context_get_source_info_list(self._context,\n self._get_source_info_list,\n None)", "def getSites(dataSource):\n pointsLayer = dataSource.GetLayer()\n pointsLayer.SetAttributeFilter(\"id >= 0\")\n return pointsLayer", "def source_list(self):\n return self._playlists", "def get_clients(self):\n clis = []\n for c in self._clients:\n clis.append(c.get_address())\n return clis", "def get_clients(self):\n clis = []\n for c in self._clients:\n clis.append(c.get_address())\n return clis" ]
[ "0.6069622", "0.60684687", "0.6046004", "0.60339135", "0.59831995", "0.59754866", "0.59675294", "0.5898026", "0.586566", "0.5807168", "0.5727688", "0.5714986", "0.5702389", "0.5680545", "0.564806", "0.5645466", "0.56405693", "0.56157684", "0.55836064", "0.55833864", "0.55707765", "0.5557252", "0.55565894", "0.55148464", "0.54938745", "0.54872566", "0.54819494", "0.5478539", "0.5476802", "0.54744625", "0.54617375", "0.5432542", "0.5421746", "0.54212886", "0.5411535", "0.5406531", "0.53789073", "0.5372672", "0.5359445", "0.5358109", "0.53478104", "0.53351724", "0.5330752", "0.5329881", "0.5319075", "0.5292804", "0.5292804", "0.5286219", "0.52849305", "0.52829033", "0.5282203", "0.5275912", "0.5234738", "0.5219923", "0.5212893", "0.52128464", "0.5201881", "0.5196901", "0.5193886", "0.51855725", "0.51853347", "0.51816523", "0.51789427", "0.5170817", "0.51672286", "0.51629215", "0.5151382", "0.5143039", "0.5135042", "0.51340413", "0.51177883", "0.509904", "0.5094772", "0.50874865", "0.50856847", "0.50746703", "0.5073132", "0.5070604", "0.50690114", "0.5066756", "0.50598115", "0.5054643", "0.5047188", "0.5039454", "0.5030215", "0.50231", "0.5011868", "0.4993924", "0.49930173", "0.49831447", "0.49811578", "0.496839", "0.49679762", "0.49657318", "0.4964838", "0.4963637", "0.49581707", "0.49509773", "0.494817", "0.494817" ]
0.66614133
0
List of Oauth tokens for the user
def list_o_auth_client_credentials(self, user_id, **kwargs): resource_path = "/users/{userId}/oauth2ClientCredentials" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "lifecycle_state" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_o_auth_client_credentials got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) if 'lifecycle_state' in kwargs: lifecycle_state_allowed_values = ["CREATING", "ACTIVE", "INACTIVE", "DELETING", "DELETED"] if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values: raise ValueError( "Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values) ) query_params = { "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "lifecycleState": kwargs.get("lifecycle_state", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[OAuth2ClientCredentialSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[OAuth2ClientCredentialSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_tokens(user):\n return AppSpecificAuthToken.select().where(AppSpecificAuthToken.user == user)", "def tokens(self):\n return self.rpc.call(MsfRpcMethod.AuthTokenList)['tokens']", "def tokens():\n return ['access token', 'refresh token']", "def get_user_access_tokens(request, user):\n manager = internal_keystoneclient(request).oauth2.access_tokens\n\n return manager.list_for_user(user=user)", "def access_token(self):\n social_auth = self.social_auth.get()\n return social_auth.tokens", "def test_get_all_tokens_authenticated_user(self):\r\n\r\n user = UserFactory.create_batch(2)[1]\r\n user.info = create_tokens_for(user)\r\n\r\n res = self.app.get('api/token?api_key=' + user.api_key)\r\n data = json.loads(res.data)\r\n\r\n for provider in TokenAPI.oauth_providers:\r\n token_name = '%s_token' % provider\r\n assert data.get(token_name) is not None, data", "def getTokens(self):\n self.__require_privilaged_access()\n with DBSession(self.__config_db) as session:\n user = self.getLoggedInUser()\n sessionTokens = session.query(Session) \\\n .filter(Session.user_name == user) \\\n .filter(Session.can_expire.is_(False)) \\\n .all()\n\n result = []\n for t in sessionTokens:\n result.append(SessionTokenData(\n t.token,\n t.description,\n str(t.last_access)))\n\n return result", "def _list_tokens(self, user_id, tenant_id=None, trust_id=None,\n consumer_id=None):\n raise exception.NotImplemented() # pragma: no cover", "def bearer_tokens(self):\n return self._bearer_tokens", "def list_revoked_tokens(self):\n raise exception.NotImplemented() # pragma: no cover", "def tokens(self):\n user_token = RefreshToken.for_user(self)\n return {\n 'refresh': str(user_token),\n 'access': str(user_token.access_token),\n }", "def test_list_o_auth_access_token(self):\n pass", "def users():\n access_token = session['access_token']\n return \"%s\" % list_users(access_token)", "def list_users(access_token):\n request_url = OKTA_URL + \"api/v1/users\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def test_get_all_existing_tokens_authenticated_user(self):\r\n\r\n user = UserFactory.create_batch(2)[1]\r\n user.info = create_tokens_for(user)\r\n del user.info['google_token']\r\n\r\n res = self.app.get('api/token?api_key=' + user.api_key)\r\n data = json.loads(res.data)\r\n\r\n assert data.get('twitter_token') is not None, data\r\n assert data.get('facebook_token') is not None, data\r\n assert data.get('google_token') is None, data", "def getTokens(username):\n tokens = users.find({\"Username\": username})[0][\"Tokens\"]\n return tokens", "def get_tokens_for_user(user):\n\n refresh = RefreshToken.for_user(user)\n\n return {\n 'refresh': str(refresh),\n 'access': str(refresh.access_token),\n }", "def get_all_users():\n token = request.headers.get('token')\n\n # Token Validation\n token_valid, response = is_token_valid(token)\n if not token_valid:\n return response\n token_username = response\n\n # Privilege handling\n if token_username != 'admin':\n return jsonify({'message': \"You aren't allowed to access this\"}), 404\n\n return jsonify(list(Users.values())), 200", "def get_auth_tokens(self, oauth_verifier):\n\n url = self.access_token_url + '?oauth_verifier=' + oauth_verifier\n\n try:\n response = self.client.get(url, headers=self.headers, auth=self.auth)\n except requests.exceptions.RequestException:\n raise NetflixAuthError('An unknown error occurred.')\n\n if response.status_code != 200:\n raise NetflixAuthError('Getting access tokens failed: %s Response Status' % response.status_code)\n\n try:\n auth_tokens = dict(parse_qsl(response.content))\n except AttributeError:\n raise NetflixAuthError('Unable to obtain auth tokens.')\n\n return auth_tokens", "def get_auth_tokens(self, oauth_verifier):\n\n url = self.access_token_url + '?oauth_verifier=' + oauth_verifier\n\n try:\n response = self.client.get(url, headers=self.headers, auth=self.auth)\n except requests.exceptions.RequestException:\n raise NetflixAuthError('An unknown error occurred.')\n\n if response.status_code != 200:\n raise NetflixAuthError('Getting access tokens failed: %s Response Status' % response.status_code)\n\n try:\n auth_tokens = dict(parse_qsl(response.content))\n except AttributeError:\n raise NetflixAuthError('Unable to obtain auth tokens.')\n\n return auth_tokens", "def tokens(cls, instance):\n token = super(TumblrOAuth, cls).tokens(instance)\n if token and 'access_token' in token:\n token = dict(tok.split('=')\n for tok in token['access_token'].split('&'))\n return token", "def get_authentication_tokens(self):\n\n url = self.request_token_url + '?oauth_callback=' + self.callback_url\n response = self.client.get(url, headers=self.headers, auth=self.auth)\n\n if response.status_code != 200:\n raise NetflixAuthError('There was a problem retrieving an authentication url.')\n\n try:\n request_tokens = dict(parse_qsl(response.content))\n except requests.exceptions.RequestException:\n raise NetflixAuthError('Unable to obtain auth tokens.')\n\n auth_url_params = {\n 'oauth_token': request_tokens['oauth_token'],\n 'oauth_callback': self.callback_url,\n 'oauth_consumer_key': self.api_key,\n }\n\n request_tokens['auth_url'] = '%s?%s' % (self.authorize_url, urllib.urlencode(auth_url_params))\n return request_tokens", "def get_authentication_tokens(self):\n\n url = self.request_token_url + '?oauth_callback=' + self.callback_url\n response = self.client.get(url, headers=self.headers, auth=self.auth)\n\n if response.status_code != 200:\n raise NetflixAuthError('There was a problem retrieving an authentication url.')\n\n try:\n request_tokens = dict(parse_qsl(response.content))\n except requests.exceptions.RequestException:\n raise NetflixAuthError('Unable to obtain auth tokens.')\n\n auth_url_params = {\n 'oauth_token': request_tokens['oauth_token'],\n 'oauth_callback': self.callback_url,\n 'oauth_consumer_key': self.api_key,\n }\n\n request_tokens['auth_url'] = '%s?%s' % (self.authorize_url, urllib.urlencode(auth_url_params))\n return request_tokens", "def get_tokens(self) -> List[str]:\n return self.tokens", "def test_list_o_auth_authorize_token(self):\n pass", "def gettoken(tool_id, user_id):\n oauth_tokens = {\n 'access_token': '',\n 'user': {\n 'id': user_id\n }\n }\n params = {\n 'user_id': user_id\n }\n tokenq = \"\"\"select\naccess_token, refresh_token, expires_at, token_type, expires_in, user_name\nfrom tokens\nwhere user_id = :user_id\norder by expires_at desc\n\"\"\"\n tconn = dbconnect(CONFIG[CONFIG['app']['dbserver']])\n tcurr = tconn.cursor()\n try:\n results = tcurr.execute(tokenq, params).fetchone()\n except cx_Oracle.DatabaseError as err:\n LOG.error(\"Database error in retrieving tokens: %s\", err)\n\n if tcurr.rowcount > 0:\n oauth_tokens = {\n 'access_token': results[0],\n 'refresh_token': results[1],\n 'expires_at': results[2],\n 'token_type': results[3],\n 'expires_in': results[4],\n 'user': {\n 'name': results[5],\n 'id': user_id\n }\n }\n else:\n LOG.error(\"no token found for \" + str(tool_id) + ', ' + user_id)\n tcurr.close()\n tconn.close()\n return oauth_tokens", "def list_auth_tokens(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/authTokens\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_auth_tokens got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[AuthToken]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[AuthToken]\")", "def get(self, filters=None, pagination=None, sort=None):\n filters = filters or {}\n if not is_user_action_allowed('manage_others_tokens'):\n filters['_user_fk'] = current_user.id\n\n sm = get_storage_manager()\n\n result = sm.list(models.Token, filters=filters,\n pagination=pagination, sort=sort)\n\n return result", "def tokens(self):\n # type: () -> List[Token]\n return self._tokens", "def list_credentials(user):\n return Credentials.list_credentials(user)", "def list_namespaced_o_auth_access_token(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_o_auth_access_token\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/oauthaccesstokens'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1OAuthAccessTokenList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def get_tokens(self):\r\n return self.token_set", "def __repr__(self) -> str:\n return \"<Twilio.Oauth.V1.TokenList>\"", "def get_token_list():\n token_list = []\n tokens_dir_path = os.path.join(BASE_DIR, TOKENS_DIR)\n for dir, dirs, files in os.walk(tokens_dir_path):\n for file_name in files:\n file = open(os.path.join(tokens_dir_path, file_name), 'r')\n token_list.append(file.read().strip())\n file.close()\n return token_list", "def UserToken(self) -> object:", "def user_list():\n for values in USERS:\n user = User.objects.create_user(\n values[\"username\"], values[\"email\"], values[\"password\"]\n )\n user.first_name = values[\"first_name\"]\n user.last_name = values[\"last_name\"]\n user.is_staff = values[\"staff\"]\n user.is_superuser = values[\"super\"]\n user.save()\n Token.objects.create(key=values[\"token\"], user_id=user.id)\n\n # print('users created')", "def list():\n rino.login.list()", "def list_users():\n if not check_content_type():\n return jsonify(status=CONTENT_TYPE_ERROR)\n reqdata = request.json\n if not check_token(reqdata[\"token\"]):\n return jsonify(status=TOKEN_ERROR)\n users = db.session.query(User).all()\n resdata = []\n for user in users:\n resdata.append({\"id\" : user.id, \"login\" : user.login, \"password\" : user.hash_password})\n return jsonify(data=resdata, status=OK_STATUS)", "def get_oauth_tokens(fn):\n\tif os.path.isfile(oauth_fn): # Does the token file exist?\n\t\ttokens = twitter.oauth.read_token_file(oauth_fn)\n\t\treturn tokens\n\treturn None", "def get_auths(self):\n return self.__auths", "def get_token(args, f_users):\n status = requests.post(\"https://api.intra.42.fr/oauth/token?%s\" % (\"&\".join(args)))\n if check_connection_status(status): \n print \"+++++++++++++++++++++++++++++++++++\"\n print \"Connected to the 42 API.\"\n print \"+++++++++++++++++++++++++++++++++++\"\n response = status.json()\n response_args = [\n 'access_token=%s' % response[u'access_token'],\n 'token_type=%s' % response[u'token_type'],\n 'filter[active]=true'\n ]\n return response_args", "def list_accounts(self):\n information = []\n for provider in self._accounts.values():\n information.append({\n 'token': provider.credentials.token,\n 'url': provider.credentials.url,\n })\n\n return information", "def get_all_access():\n\t# Get the email from the user making the request\n\temail = get_jwt_identity()\n\treturn get_all_access_helper(email)", "def auths(self):\n return self._auths", "def auths(self):\n return self._auths", "def all_tokens(self) -> List[Hashable]:\n return self._all_tokens", "def list_namespaced_o_auth_authorize_token(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_o_auth_authorize_token\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/oauthauthorizetokens'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1OAuthAuthorizeTokenList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def list(conn):\n try:\n return conn.get(url='/auth-providers')['providers']\n except SystemError as e:\n raise e", "def get_push_access_tokens(orcids):\n return (\n db.session.query(UserIdentity.id, RemoteToken.access_token)\n .filter(\n RemoteToken.id_remote_account == RemoteAccount.id,\n RemoteAccount.user_id == UserIdentity.id_user,\n UserIdentity.id.in_(orcids),\n cast(RemoteAccount.extra_data, JSONB).contains({\"allow_push\": True}),\n )\n .all()\n )", "def get_tokens(self, code):\n url = url_base + \"token\"\n params = {\"grant_type\": \"ecobeePin\", \"code\": code, \"client_id\": app_key}\n temp = requests.post(url, params=params).json()\n return (temp[\"access_token\"], temp[\"refresh_token\"])", "def tokens(self):\n return self._tokens", "def tokens(self):\n return self._tokens", "def tokens(self):\n return self._tokens", "def access_token(global_config, existing_user, id_api):\n yield id_api.get_access_token_for_user(existing_user.email, existing_user.password)", "def get_tokens(self, use_refresh=False):\r\n post_data = {\r\n \"grant_type\": \"refresh_token\" if use_refresh else \"authorization_code\",\r\n \"code\": get_value(SPOTIFY_AUTHORIZATION_CODE),\r\n \"redirect_uri\": REDIRECT_URL\r\n }\r\n if use_refresh:\r\n post_data[\"refresh_token\"] = get_value(SPOTIFY_REFRESH_TOKEN)\r\n\r\n auth_key = base64.urlsafe_b64encode(f\"{SPOTIFY_CLIENT_ID}:{SPOTIFY_CLIENT_SECRET}\".encode()).decode()\r\n\r\n r = requests.post(\r\n TOKEN_URL,\r\n headers={\r\n \"Accept\": \"application/json\",\r\n \"Content-Type\": \"application/x-www-form-urlencoded\",\r\n \"Authorization\": f\"Basic {auth_key}\"\r\n },\r\n data=\"&\".join([f\"{quote(key)}={quote(value)}\" for key, value in post_data.items()])\r\n )\r\n\r\n if r.status_code != requests.codes.ok:\r\n return\r\n\r\n self.is_authorized = True\r\n data = r.json()\r\n set_value(SPOTIFY_ACCESS_TOKEN, data[\"access_token\"])\r\n self.access_token = data[\"access_token\"]\r\n if \"refresh_token\" in data:\r\n set_value(SPOTIFY_REFRESH_TOKEN, data[\"refresh_token\"])\r\n return", "def get_oauth_data():", "def get_tokens(self):\r\n return TokenGroup.get_tokens(self._tu, self.extent)", "def tokens_json(self):\n token_id, secret = self.decoded_token\n token_row = self.unauthenticated_token_row\n tokens_encoded = Fernet(secret).decrypt(\n token_row.tokens_fernet.encode('ascii'))\n return json.loads(tokens_encoded.decode('ascii'))", "def get_authenticators_for_view(self, view_name):\n pass", "def list_tokens(self) -> str:\n\n return self._format_symbol_table_content(\"Tokens\", self._symbols.keys())", "def user_list(request):\r\n params = request.params\r\n order = params.get('order', None)\r\n limit = params.get('limit', None)\r\n user_list = UserMgr.get_list(order=order, limit=limit)\r\n ret = {\r\n 'count': len(user_list),\r\n 'users': [dict(h) for h in user_list],\r\n }\r\n return _api_response(request, ret)", "def user_list(ctx):\n data = ctx.obj.get_all_users()\n output_json_data(data)", "def tokens(self):\n return self.__tokens", "def list(ctx, show_hidden, oath_type, period, password, remember):\n _init_session(ctx, password, remember)\n session = ctx.obj[\"session\"]\n creds = [\n cred\n for cred in session.list_credentials()\n if show_hidden or not is_hidden(cred)\n ]\n creds.sort()\n for cred in creds:\n click.echo(_string_id(cred), nl=False)\n if oath_type:\n click.echo(f\", {cred.oath_type.name}\", nl=False)\n if period:\n click.echo(f\", {cred.period}\", nl=False)\n click.echo()", "def get_all_auths(self):\n return self.all_auths", "def get_oauth_token():\n return session.get('remote_oauth')", "def get_token(self):\n oauth_provider = UserSocialAuth.objects.get(provider='drchrono')\n access_token = oauth_provider.extra_data['access_token']\n return access_token", "def get_user_ids(session, access_token):\n endpoint = \"https://graph.microsoft.com/v1.0/users?$select=id\"\n r = session.get(endpoint, headers={\"Authorization\": \"Bearer \" + access_token})\n response = json.loads(r.text)\n return response[\"value\"]", "def _get_users_list(self):\n return self.users['user_id'].tolist()", "def list(uid: int):\n\n return Token.list(uid)", "def tokens(self) -> list:\n if self._tokens is None:\n tokens_ = sorted(list(self.elements()))\n self._tokens = tokens_\n return self._tokens", "def user_list(self, mapp, url_of_liveserver):\n return mapp.getjson(url_of_liveserver)['result'].keys()", "def test_get_all_tokens_anonymous_user(self):\r\n\r\n # Anonymoues users should be unauthorized, no matter which kind of token are requesting\r\n res = self.app.get('/api/token')\r\n err = json.loads(res.data)\r\n\r\n assert res.status_code == 401, err\r\n assert err['status'] == 'failed', err\r\n assert err['status_code'] == 401, err\r\n assert err['exception_cls'] == 'Unauthorized', err\r\n assert err['target'] == 'token', err", "def describe_users(AuthenticationType=None, MaxResults=None, NextToken=None):\n pass", "def listTags(self, authenticationToken):\r\n pass", "def GetTokens(auth_code):\n params = {}\n params['client_id'] = Constants.USER['CLIENT_ID']\n params['client_secret'] = Constants.USER['CLIENT_SECRET']\n params['code'] = auth_code\n params['redirect_uri'] = Constants.AUTH['REDIRECT']\n params['grant_type'] = 'authorization_code'\n\n data = urllib.urlencode(params)\n\n headers = {\n 'User-Agent': 'LogoCert Client',\n 'Content-Type': 'application/x-www-form-urlencoded',\n 'Accept': 'text/html, */*',\n }\n\n request_url = Constants.OAUTH_TOKEN\n\n request = urllib2.Request(request_url, data, headers)\n res = urllib2.urlopen(request)\n response = res.read()\n return json.loads(response)", "def tenants_for_token(self, context):\n token_ref = self.token_api.get_token(context=context,\n token_id=context['token_id'])\n assert token_ref is not None\n\n user_ref = token_ref['user']\n tenant_refs = []\n for tenant_id in user_ref['tenants']:\n tenant_refs.append(self.identity_api.get_tenant(\n context=context,\n tenant_id=tenant_id))\n return self._format_tenants_for_token(tenant_refs)", "def users(self):\n from sagas.ofbiz.entities import OfEntity as e, oc\n rs=e().allUserLogin()\n for r in rs:\n print(r['userLoginId'])", "def get_all_users():", "def get_expiring_tokens(user, soon):\n soon_datetime = datetime.now() + soon\n return AppSpecificAuthToken.select().where(\n AppSpecificAuthToken.user == user,\n AppSpecificAuthToken.expiration <= soon_datetime,\n AppSpecificAuthToken.expiration > datetime.now(),\n )", "def _list_known_secret_tokens():\n global _secret_token_map\n\n keys = list(_secret_token_map.keys())\n keys.sort()\n\n ret = ''\n for key in keys:\n if ret != '':\n ret += ', '\n ret += \"'\" + key + \"'\"\n return ret", "def fetch_all_users():\n url = \"{}/workspace/{}/users\".format(V1_API_URL, WORKSPACE_ID)\n responses = requests.get(url, headers=HEADERS)\n return [\n {\n \"acronym\": user[\"name\"].lower(),\n \"clockify_id\": user[\"id\"],\n \"email\": user[\"email\"].lower(),\n }\n for user in responses.json()\n ]", "def list_available_authenticators(avail_auths):\n output_lines = [\"Available authenticators:\"]\n for auth_name, auth in avail_auths.iteritems():\n output_lines.append(\" - %s : %s\" % (auth_name, auth.description))\n return '\\n'.join(output_lines)", "def get_Tokens(self):\n return self._output.get('Tokens', None)", "def deserialize_tokens():\n\ttry:\n\t\twith open(config.TOKENPATH, \"r+\") as f:\n\t\t\tcontext = f.read()\n\t\t\tres = eval(context)\n\t\t\t# load into memory\n\t\t\treturn res[\"access_token\"], res[\"refresh_token\"]\n\texcept:\n\t\t# unexcept token format\n\t\tfrom common import ApplicationException\n\t\traise ApplicationException(\"authorization file is broken, please run init\")", "def user_ids(self):\n return list(self.get_users())", "def token_auth_get_user_roles(user):\n print(user)\n return user.get_roles()", "def get(self):\n\n user = context_property.request_user\n Log.info(\"Refresh access token for %i\" % user.id)\n\n return {\n \"accessToken\" : create_access_token(user.id)\n }, 200", "def get_user_principals(access_token):\r\n #Make request to user info and preferences to get principals for login\r\n user_url = 'https://api.tdameritrade.com/v1/userprincipals'\r\n headers = {'Authorization': 'Bearer {}'.format(access_token)}\r\n params = {'fields':'streamerSubscriptionKeys,streamerConnectionInfo'}\r\n user_principals_json = requests.get(url=user_url,headers=headers,params=params)\r\n user_principals = user_principals_json.json()\r\n\r\n #convert token timestamp to milliseconds (required for login to websocket)\r\n tokenTimeStamp = user_principals['streamerInfo']['tokenTimestamp']\r\n token_date = dateutil.parser.parse(tokenTimeStamp,ignoretz=True)\r\n epoch = datetime.datetime.utcfromtimestamp(0)\r\n tokenTimeStampAsMs = int((token_date-epoch).total_seconds()*1000.0)\r\n \r\n return (user_principals,tokenTimeStampAsMs)", "def user_list(request_dict):\n users = User.query.all()\n users_list = list()\n for user in users:\n users_list.append(user)\n\n return JSONTools.user_list_reply(users_list)", "def users(self):\n return self.get_data(\"users\")", "def get(self):\n # TODO this endpoint returns null is instead of respoinse message when token is not in the header, read about error handling to solve this issue\n return get_users()", "def access_token_profile(global_config, existing_user):\n client_app = global_config.client_apps.profile_app\n api = IdApi(global_config.id_home, client_app.id, client_app.password, global_config.urls.id.api)\n yield api.get_access_token_for_user(existing_user.email, existing_user.password)", "def known(self) -> List[str]:\n return [k for k in self._config.get('auths', {}).keys()]", "def view_list_users(self, user):\r\n return user.realm._users.keys()", "def getTokens(self):\n return self.__token", "def get_token(self):\n\t\tself.client.post('/api/v1/auth/signup', data=json.dumps(self.signup_user), content_type='application/json')\n\t\tresponse = self.client.post('/api/v1/auth/login', data=json.dumps(self.login_user), content_type='application/json')\n\t\tresp = json.loads(response.data.decode())\n\t\treturn 'Bearer ' + resp['access_token']", "def auth_token(self):", "def list_credentials(self, **_params):\r\n return self.get(self.credentials_path, params=_params)", "def list_users(BrokerId=None, MaxResults=None, NextToken=None):\n pass", "def get_token_names(self) -> List[str]:\n return list(self._tokens.keys())" ]
[ "0.76735115", "0.7521337", "0.7381115", "0.72044426", "0.71589434", "0.7055708", "0.70043594", "0.69068575", "0.67511743", "0.67317975", "0.66999596", "0.6661664", "0.6636696", "0.6495753", "0.64924324", "0.63827467", "0.63599586", "0.63553977", "0.6325954", "0.6325954", "0.6312468", "0.62530315", "0.62530315", "0.6244723", "0.61573917", "0.6086399", "0.60772276", "0.6076854", "0.6074169", "0.6063199", "0.6047817", "0.60375255", "0.6005086", "0.60044825", "0.6000851", "0.596671", "0.5942635", "0.592044", "0.59144676", "0.59135824", "0.5891553", "0.58804226", "0.5869536", "0.58620965", "0.58620965", "0.58613527", "0.58451104", "0.5816243", "0.58145154", "0.5806554", "0.5805616", "0.5805616", "0.5805616", "0.5800553", "0.5785154", "0.5756523", "0.57546407", "0.57523793", "0.5748733", "0.57422566", "0.5731991", "0.57279134", "0.5725009", "0.5722525", "0.57161313", "0.571477", "0.5712464", "0.57026356", "0.568276", "0.56467736", "0.56446177", "0.5638549", "0.5636534", "0.5632193", "0.56270164", "0.5623736", "0.5623264", "0.5622856", "0.56206584", "0.56193376", "0.5619333", "0.56126386", "0.5609591", "0.5599876", "0.5595968", "0.559301", "0.5592353", "0.55890125", "0.55851394", "0.55830604", "0.5575705", "0.5573955", "0.55738264", "0.5573287", "0.5563937", "0.55592126", "0.55574775", "0.5557001", "0.5549372", "0.5546819", "0.5545402" ]
0.0
-1
Lists the policies in the specified compartment (either the tenancy or another of your compartments). See `Where to Get the Tenancy's OCID and User's OCID`__. To determine which policies apply to a particular group or compartment, you must view the individual statements inside all your policies. There isn't a way to automatically obtain that information via the API.
def list_policies(self, compartment_id, **kwargs): resource_path = "/policies" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_policies got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[Policy]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[Policy]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_policies(self):\n client = self.connect(VAULT_TOKEN)\n return client.list_policies()", "def list_policies(policystore_url, verbose):\n\n if verbose:\n logging.info('Listing policies')\n\n list_url = policystore_url + POLICYSTORE_PREFIX + 'ListEntitlementPolicies'\n\n r = requests.post(list_url, headers=headers(), json={})\n if r.status_code != 200:\n logging.error(f'ERROR: Unexpected response: {r.status_code}')\n pprint.pprint(r.json())\n sys.exit('Failed to list policies')\n\n logging.info('SUCCESS: Listed policies')\n\n resp = r.json()\n\n if verbose:\n logging.info('Policies retrieved')\n pprint.pprint(resp)\n\n return resp", "def list_policies(profile=None, api_key=None):\n return salt.utils.pagerduty.list_items(\n \"escalation_policies\",\n \"id\",\n __salt__[\"config.option\"](profile),\n api_key,\n opts=__opts__,\n )", "def list_policies(self):\n return self.con.list_policies(\n Scope='Local'\n )", "def policies(self):\n return self._data.get('policies')", "def list_auth_policies(self, kwargs):\n verbose = kwargs.get(\"verbose\", False)\n attributes = ALL if verbose else [\"cn\", \"objectClass\"]\n\n self.display(\n self.engine.query(\n self.engine.AUTH_POLICIES_FILTER(),\n attributes, base=','.join([\"CN=AuthN Policy Configuration,CN=Services,CN=Configuration\", self.engine.base_dn])\n ),\n verbose\n )", "def ListPolicies(self, request, global_params=None):\n config = self.GetMethodConfig('ListPolicies')\n return self._RunMethod(\n config, request, global_params=global_params)", "def _get_policies(self):\n flag, response = self._commcell_object._cvpysdk_object.make_request('GET', self._POLICY)\n\n if flag:\n if response.json() and 'taskDetail' in response.json():\n policies = response.json()['taskDetail']\n policies_dict = {}\n\n for policy in policies:\n temp_name = policy['task']['taskName'].lower()\n temp_id = str(policy['task']['taskId']).lower()\n policies_dict[temp_name] = temp_id\n\n return policies_dict\n else:\n raise SDKException('Response', '102')\n else:\n response_string = self._commcell_object._update_response_(response.text)\n raise SDKException('Response', '101', response_string)", "def policies(self, request):\n policies = OtterPolicies(self.store, self.tenant_id, self.group_id,\n self.dispatcher)\n return policies.app.resource()", "def get_policies():\r\n policy = policies.values()\r\n return policy", "def policies(self):\n return self._policies", "def rbac_policy_list(request, **kwargs):\n policies = neutronclient(request).list_rbac_policies(\n **kwargs).get('rbac_policies')\n return [RBACPolicy(p) for p in policies]", "def policy_list(request, **kwargs):\n policies = neutronclient(request).list_qos_policies(\n **kwargs).get('policies')\n return [QoSPolicy(p) for p in policies]", "def list_acl_policies(client, container_name, **kwargs):\n return _get_acl(client, container_name, **kwargs)", "def list_policies(policies, verbosity):\n print()\n if verbosity < 1:\n rows = []\n for p in sorted_by_name(policies):\n rows.append((p.name, p.generator, p.length, p.frequency))\n print_table(('NAME', 'GEN', 'LEN', 'FREQ'), rows)\n else:\n for policy in sorted_by_name(policies):\n chars = NONE\n if policy.disallowed_characters:\n chars = ''.join(sorted(policy.disallowed_characters))\n print_detail(\n policy.name, (\n ('description', nullable(policy.description)),\n ('specs', get_policy_specs(policy)),\n ('∅ chars', chars),\n ),\n )\n print()", "def list_workload_policies(self, params=None):\n uri = 'proj/list_workload_policies'\n if params:\n uri += '?%s' % urllib.urlencode(params)\n \n resp, body = self.get(uri)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBodyList(resp, body['workload_policies'])", "def list_org_policies(self, resource, fields=None, max_results=None,\n verb='listOrgPolicies', **kwargs):\n arguments = {'resource': resource, 'fields': fields, 'body': {}}\n if max_results:\n arguments['body']['pageSize'] = max_results\n\n if kwargs:\n arguments.update(kwargs)\n\n for resp in self.execute_search_query(\n verb=verb,\n verb_arguments=arguments):\n yield resp", "def list_policy(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_policy\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/policies'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1PolicyList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def list_compartments(self, compartment_id, **kwargs):\n resource_path = \"/compartments\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"access_level\",\n \"compartment_id_in_subtree\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_compartments got unknown kwargs: {!r}\".format(extra_kwargs))\n\n if 'access_level' in kwargs:\n access_level_allowed_values = [\"ANY\", \"ACCESSIBLE\"]\n if kwargs['access_level'] not in access_level_allowed_values:\n raise ValueError(\n \"Invalid value for `access_level`, must be one of {0}\".format(access_level_allowed_values)\n )\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"accessLevel\": kwargs.get(\"access_level\", missing),\n \"compartmentIdInSubtree\": kwargs.get(\"compartment_id_in_subtree\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Compartment]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Compartment]\")", "def available_policies(self):\n return tuple(self._policies.keys())", "def GetPolicies(self):\n policy = {}\n if json is None:\n logging.error('No JSON module, cannot parse policy information')\n else :\n try:\n policy = json.loads(open(self.policy_path).read(), strict=False)\n except IOError:\n logging.error('Failed to load policies from %s' % self.policy_path)\n return policy", "def test_get_hyperflex_vcenter_config_policy_list(self):\n pass", "def role_policy_statements(role_name):\n policies = []\n role = iam.Role(role_name)\n \n role_policies = [p.policy_document['Statement'] for p in role.policies.all()]\n for p in role_policies:\n policies.extend(p)\n \n attached_policies = [p.default_version.document['Statement'] for p in role.attached_policies.all()]\n for p in attached_policies:\n policies.extend(p)\n\n return policies", "def child_policies(self) -> Sequence['outputs.SubResourceResponse']:\n return pulumi.get(self, \"child_policies\")", "def test_get_hyperflex_ucsm_config_policy_list(self):\n pass", "def test_list_policy_for_all_namespaces(self):\n pass", "def describe_service_access_policies(DomainName=None, Deployed=None):\n pass", "def list_ikepolicies(self, retrieve_all=True, **_params):\r\n return self.list('ikepolicies', self.ikepolicies_path, retrieve_all,\r\n **_params)", "def test_list_ikepolicy_sort(self):\r\n resources = \"ikepolicies\"\r\n cmd = ikepolicy.ListIKEPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_list_resources(resources, cmd,\r\n sort_key=[\"name\", \"id\"],\r\n sort_dir=[\"asc\", \"desc\"])", "def service_endpoint_policies(self) -> Optional[Sequence['outputs.ServiceEndpointPolicyResponse']]:\n return pulumi.get(self, \"service_endpoint_policies\")", "def service_placement_policies(self) -> pulumi.Output[Optional[Sequence['outputs.ServicePlacementPolicyDescriptionResponse']]]:\n return pulumi.get(self, \"service_placement_policies\")", "def delete_policies():\n if PoliciesOutput.POLICIES_EVENT not in ctx.instance.runtime_properties:\n return\n\n service_component_name = ctx.instance.runtime_properties.get(\n PoliciesOutput.SERVICE_COMPONENT_NAME\n )\n if not service_component_name:\n ctx.logger.warn(\"failed to find service_component_name to delete_policies in consul-kv\")\n return\n\n delete_policies = [\n PoliciesOutput._gen_txn_operation(\n PoliciesOutput.OPERATION_DELETE_FOLDER, service_component_name\n )\n ]\n PoliciesOutput._run_transaction(\"delete_policies\", delete_policies)", "def storage_policies(self, **kwargs):\n self.logger.debug(f\"Get storage policies data\")\n url_path = 'storage/policies'\n body = self._make_body(kwargs)\n return self._common_get(request_path=url_path, parameters=body)", "def token_policies(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"token_policies\")", "def all_schedule_policies(self):\n return self._policies", "def list_policy_profiles(self, **params):\r\n return self.get(self.policy_profiles_path, params=params)", "def test_get_hyperflex_sys_config_policy_list(self):\n pass", "def _get_policies(self, cr, uid, context=None):\n return [('optional', _('Optional')),\n ('always', _('Always')),\n ('never', _('Never'))]", "def get_selected_policies(actor):\n dike_model, _ = get_model_for_problem_formulation(actor)\n levers = [lever.name for lever in dike_model.levers]\n policies_df = pd.read_csv('simulation/selected/selected_policies_' + actor + '.csv')\n policies_df = policies_df.loc[:, levers]\n policies = []\n\n for i, row in policies_df.iterrows():\n policy = Policy(f'Policy {i}', **row.to_dict())\n policies.append(policy)\n\n return policies", "def list_ipsecpolicies(self, retrieve_all=True, **_params):\r\n return self.list('ipsecpolicies',\r\n self.ipsecpolicies_path,\r\n retrieve_all,\r\n **_params)", "def cmd_list(nexus_client):\n policies = nexus_client.cleanup_policies.list()\n if len(policies) == 0:\n return exception.CliReturnCode.POLICY_NOT_FOUND.value\n\n table = Texttable(max_width=constants.TTY_MAX_WIDTH)\n table.add_row(\n ['Name', 'Format', 'Downloaded', 'Updated', 'Regex'])\n table.set_deco(Texttable.HEADER)\n for policy in policies:\n p = policy.configuration\n table.add_row([\n p['name'], p['format'],\n p['criteria'].get('lastDownloaded', 'null'),\n p['criteria'].get('lastBlobUpdated', 'null'),\n p['criteria'].get('regex', 'null')],\n )\n\n print(table.draw())\n return exception.CliReturnCode.SUCCESS.value", "def get_protection_policies(cohesity_client):\n policy_list = cohesity_client.protection_policies.get_protection_policies()\n policy_list = policy_list if policy_list else []\n for policy in policy_list:\n exported_res_dict[\"Protection Policies\"].append(policy.name)\n return policy_list", "def policy_info(self) -> 'outputs.PolicyInfoResponse':\n return pulumi.get(self, \"policy_info\")", "def test_list_ipsecpolicy_sort(self):\r\n resources = \"ipsecpolicies\"\r\n cmd = ipsecpolicy.ListIPsecPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_list_resources(resources, cmd,\r\n sort_key=[\"name\", \"id\"],\r\n sort_dir=[\"asc\", \"desc\"])", "def subscribed_osp_policies(self):\n return self._subscribed_osp_policies", "def list(ctx, show_hidden, oath_type, period):\n ensure_validated(ctx)\n controller = ctx.obj['controller']\n creds = [cred\n for cred in controller.list()\n if show_hidden or not cred.is_hidden\n ]\n creds.sort()\n for cred in creds:\n click.echo(cred.printable_key, nl=False)\n if oath_type:\n click.echo(u', {}'.format(cred.oath_type.name), nl=False)\n if period:\n click.echo(', {}'.format(cred.period), nl=False)\n click.echo()", "def test_get_dispatch_policy_list(self):\n pass", "def get(self):\n try:\n coll_policy_id = views_helper.get_request_value(self.request, \"coll_policy_id\", \"GET\")\n obj = CollPolicy.objects.get(coll_policy_id=coll_policy_id)\n is_used = False\n if not Tool.get_policy_status(coll_policy_id):\n is_used = True\n is_not_in_group = True\n if len(PolicysGroups.objects.filter(policy=coll_policy_id)) > 0:\n is_not_in_group = False\n column_status={\n 'name': True,\n 'desc': True,\n 'ostype': is_not_in_group,\n 'cli_command': is_used\n }\n serializer = CollPolicyEditSerializer(obj)\n data = {\n 'data': {\n 'policy_detail': serializer.data,\n 'verify_result': column_status\n },\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.TRUE,\n constants.MESSAGE: constants.SUCCESS\n }\n }\n return api_return(data=data)\n except Exception as e:\n if constants.DEBUG_FLAG:\n print traceback.format_exc(e)\n return exception_handler(e)", "def policy_rules(self) -> Sequence[Any]:\n return pulumi.get(self, \"policy_rules\")", "def list_missions(self):\n\n # getting all the histogram information\n service = \"Mast.Caom.All\"\n params = {}\n response = self.service_request_async(service, params, format='extjs')\n jsonResponse = response[0].json()\n\n # getting the list of missions\n histData = jsonResponse['data']['Tables'][0]['Columns']\n for facet in histData:\n if facet['text'] == \"obs_collection\":\n missionInfo = facet['ExtendedProperties']['histObj']\n missions = list(missionInfo.keys())\n missions.remove('hist')\n return missions", "def test_get_hyperflex_proxy_setting_policy_list(self):\n pass", "def test_list_namespaced_policy(self):\n pass", "def inline_policies_json(self):\n policies = {}\n for policy in self.inline_policies:\n policies[policy.policy_id] = policy.json\n return policies", "def test_get_hyperflex_node_config_policy_list(self):\n pass", "def getMergePolicies(self, limit: int = 100) -> dict:\n if self.loggingEnabled:\n self.logger.debug(f\"Starting getMergePolicies\")\n path = \"/config/mergePolicies\"\n params = {\"limit\": limit}\n res = self.connector.getData(\n self.endpoint + path, params=params, headers=self.header\n )\n data = res[\"children\"]\n nextPage = res[\"_links\"][\"next\"].get(\"href\", \"\")\n while nextPage != \"\":\n path = \"/config/mergePolicies?\" + nextPage.split(\"?\")[1]\n res = self.connector.getData(\n self.endpoint + path, params=params, headers=self.header\n )\n data += res[\"children\"]\n nextPage = res[\"_links\"][\"next\"].get(\"href\", \"\")\n return data", "def permission_policies(self) -> pulumi.Output[Optional[Sequence['outputs.AccessConfigurationPermissionPolicy']]]:\n return pulumi.get(self, \"permission_policies\")", "def read(self, policy_name):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.debug(\"Reading the policy: %s\", address)\n response = self.vault.requests_request(\n \"GET\", address, headers=self.vault.token_header\n )\n policy_details = response.json()[\"data\"][\"policy\"]\n return policy_details", "def detail(self, marker=None, limit=None, sort_key='id',\n sort_dir='asc'):\n context = pecan.request.context\n\n # NOTE(lucasagomes): /detail should only work against collections\n parent = pecan.request.path.split('/')[:-1][-1]\n if parent != \"nodepool_policies\":\n raise exception.HTTPNotFound\n\n expand = True\n resource_url = '/'.join(['nodepool_policies', 'detail'])\n return self._get_nodepool_policies_collection(marker, limit,\n sort_key, sort_dir, expand,\n resource_url)", "def test_list_policy_binding_for_all_namespaces(self):\n pass", "def list_pso(self, _):\n FILETIME_TIMESTAMP_FIELDS = {\n \"msDS-LockoutObservationWindow\": (60, \"mins\"),\n \"msDS-MinimumPasswordAge\": (86400, \"days\"),\n \"msDS-MaximumPasswordAge\": (86400, \"days\"),\n \"msDS-LockoutDuration\": (60, \"mins\")\n }\n\n FIELDS_TO_PRINT = [\n \"cn\",\n \"msDS-PasswordReversibleEncryptionEnabled\",\n \"msDS-PasswordSettingsPrecedence\",\n \"msDS-MinimumPasswordLength\",\n \"msDS-PasswordHistoryLength\",\n \"msDS-PasswordComplexityEnabled\",\n \"msDS-LockoutObservationWindow\",\n \"msDS-LockoutDuration\",\n \"msDS-LockoutThreshold\",\n \"msDS-MinimumPasswordAge\",\n \"msDS-MaximumPasswordAge\",\n \"msDS-PSOAppliesTo\",\n ]\n\n psos = self.engine.query(self.engine.PSO_INFO_FILTER())\n for policy in psos:\n for field in FIELDS_TO_PRINT:\n val = policy.get(field, None)\n if val is None:\n continue\n if isinstance(val, list):\n targets = []\n for target in val:\n targets.append(target)\n val = \" | \".join(targets)\n else:\n val = policy[field]\n\n if field in FILETIME_TIMESTAMP_FIELDS.keys():\n val = int((fabs(float(val)) / 10**7) / FILETIME_TIMESTAMP_FIELDS[field][0])\n val = \"{val} {typ}\".format(val=val, typ=FILETIME_TIMESTAMP_FIELDS[field][1])\n print(\"{field}: {val}\".format(field=field, val=val))", "def test_get_bios_policy_list(self):\n pass", "def list_policy_profile_bindings(self, **params):\r\n return self.get(self.policy_profile_bindings_path, params=params)", "def policy_tags(self):\n return self.client.policy.getPolicyTags()", "def distribution_policy_zones(self) -> pulumi.Output[List[str]]:\n return pulumi.get(self, \"distribution_policy_zones\")", "def policy(self) -> pulumi.Output['outputs.ServicePolicy']:\n return pulumi.get(self, \"policy\")", "def build_policies_report(**kwargs):\n # All report functions support kwargs to support a unified interface,\n # even if they don't use them.\n _ = kwargs\n jss_connection = JSSConnection.get()\n all_policies = jss_connection.Policy().retrieve_all(\n subset=[\"general\", \"scope\"])\n if not all_policies:\n return Report(\"Policy\", [], \"Policy Usage Report\", {})\n\n all_policies_result = Result([(policy.id, policy.name) for policy in\n all_policies], False, \"All Policies\")\n unscoped_policies = [(policy.id, policy.name) for policy in all_policies if\n policy.findtext(\"scope/all_computers\") == \"false\" and\n not policy.findall(\"scope/computers/computer\") and\n not policy.findall(\n \"scope/computer_groups/computer_group\") and\n not policy.findall(\"scope/buildings/building\") and\n not policy.findall(\"scope/departments/department\")]\n desc = (\"Policies which are not scoped to any computers, computer groups, \"\n \"buildings, departments, or to the all_computers meta-scope.\")\n unscoped = Result(unscoped_policies, True, \"Policies not Scoped\", desc)\n unscoped_cruftiness = calculate_cruft(unscoped_policies, all_policies)\n\n disabled_policies = [(policy.id, policy.name) for policy in all_policies if\n policy.findtext(\"general/enabled\") == \"false\"]\n disabled = Result(disabled_policies, True, \"Disabled Policies\",\n \"Policies which are currently disabled \"\n \"(Policy/General/Enabled toggle).\")\n disabled_cruftiness = calculate_cruft(disabled_policies, all_policies)\n\n report = Report(\"Policy\", [unscoped, disabled, all_policies_result],\n \"Policy Report\", {\"Cruftiness\": {}})\n\n report.metadata[\"Cruftiness\"][\"Unscoped Policy Cruftiness\"] = (\n get_cruft_strings(unscoped_cruftiness))\n report.metadata[\"Cruftiness\"][\"Disabled Policy Cruftiness\"] = (\n get_cruft_strings(disabled_cruftiness))\n\n return report", "def get_policy_info(self):\n policy_info = []\n for pol in self:\n # delete from /info if deprecated\n if pol.is_deprecated:\n continue\n policy_entry = {}\n policy_entry['name'] = pol.name\n if pol.is_default:\n policy_entry['default'] = pol.is_default\n policy_info.append(policy_entry)\n return policy_info", "def token_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"token_policies\")", "def token_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"token_policies\")", "def attached_managed_policies_json(self):\n policies = {}\n for policy in self.attached_managed_policies:\n policies[policy.policy_id] = policy.json\n return policies", "def keys(self):\n if self.policies is None:\n return set([])\n return self.policies.keys()", "def list_app_policy_groups(self):\n resp, body = self.get(self.get_uri(self.resource))\n body = json.loads(body)\n self.expected_success(http_client.OK, resp.status)\n return rest_client.ResponseBody(resp, body)", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def service_placement_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServicePlacementPolicyDescriptionArgs']]]]:\n return pulumi.get(self, \"service_placement_policies\")", "def list_policy_executions(self, workload_policy_id):\n url = \"%s/list_policy_executions/%s\" % ('None', workload_policy_id)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBody(resp, body)", "def getCapsules(self):\n return self.data.capsules", "def list_qos_policies(self, filters=None):\n if not self._has_neutron_extension('qos'):\n raise exc.OpenStackCloudUnavailableExtension(\n 'QoS extension is not available on target cloud'\n )\n # Translate None from search interface to empty {} for kwargs below\n if not filters:\n filters = {}\n return list(self.network.qos_policies(**filters))", "def policy_identifiers(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"policy_identifiers\")", "def test_list_namespaced_policy_binding(self):\n pass", "def getListOfCompartments(self):\n return self.model.getListOfCompartments()", "def list_domain_policy(self, _):\n FILETIME_TIMESTAMP_FIELDS = {\n \"lockOutObservationWindow\": (60, \"mins\"),\n \"lockoutDuration\": (60, \"mins\"),\n \"maxPwdAge\": (86400, \"days\"),\n \"minPwdAge\": (86400, \"days\"),\n \"forceLogoff\": (60, \"mins\")\n }\n\n FOREST_LEVELS = {\n 7: \"Windows Server 2016\",\n 6: \"Windows Server 2012 R2\",\n 5: \"Windows Server 2012\",\n 4: \"Windows Server 2008 R2\",\n 3: \"Windows Server 2008\",\n 2: \"Windows Server 2003\",\n 1: \"Windows Server 2003 operating system through Windows Server 2016\",\n 0: \"Windows 2000 Server operating system through Windows Server 2008 operating system\"\n }\n\n FIELDS_TO_PRINT = [\n \"dc\",\n \"distinguishedName\",\n \"lockOutObservationWindow\",\n \"lockoutDuration\",\n \"lockoutThreshold\",\n \"maxPwdAge\",\n \"minPwdAge\",\n \"minPwdLength\",\n \"pwdHistoryLength\",\n \"pwdProperties\",\n \"ms-DS-MachineAccountQuota\",\n \"msDS-Behavior-Version\"]\n\n policy = list(self.engine.query(self.engine.DOMAIN_INFO_FILTER()))\n if policy:\n policy = policy[0]\n for field in FIELDS_TO_PRINT:\n val = policy.get(field, None)\n if val is None:\n continue\n\n if field == \"lockOutObservationWindow\" and isinstance(val, timedelta):\n val = int(val.total_seconds()) / 60\n elif field in FILETIME_TIMESTAMP_FIELDS.keys() and type(val) == int:\n val = int((fabs(float(val)) / 10**7) / FILETIME_TIMESTAMP_FIELDS[field][0])\n if field in FILETIME_TIMESTAMP_FIELDS.keys():\n val = \"%d %s\" % (val, FILETIME_TIMESTAMP_FIELDS[field][1])\n if field == \"msDS-Behavior-Version\" and isinstance(val, int):\n val = \"%s\" % (FOREST_LEVELS[policy[field]])\n\n print(\"%s: %s\" % (field, val))", "def refresh(self):\n self._policies = self._get_policies()", "def policy_parameters(self) -> Optional['outputs.PolicyParametersResponse']:\n return pulumi.get(self, \"policy_parameters\")", "def store_policies(action, policy_bodies):\n service_component_name = ctx.instance.runtime_properties.get(\n PoliciesOutput.SERVICE_COMPONENT_NAME\n )\n if not service_component_name:\n ctx.logger.warn(\"failed to find service_component_name to store_policies in consul-kv\")\n return False\n\n event = {\n \"action\": action,\n \"timestamp\": (datetime.utcnow().isoformat()[:-3] + 'Z'),\n \"update_id\": str(uuid.uuid4()),\n \"policies_count\": len(policy_bodies)\n }\n ctx.instance.runtime_properties[PoliciesOutput.POLICIES_EVENT] = event\n\n store_policies = [\n PoliciesOutput._gen_txn_operation(PoliciesOutput.OPERATION_SET, service_component_name,\n \"items/\" + policy_id, json.dumps(policy_body))\n for policy_id, policy_body in policy_bodies.items()\n ]\n txn = [\n PoliciesOutput._gen_txn_operation(\n PoliciesOutput.OPERATION_DELETE_FOLDER, service_component_name),\n PoliciesOutput._gen_txn_operation(\n PoliciesOutput.OPERATION_SET, service_component_name, \"event\", json.dumps(event))\n ]\n idx_step = PoliciesOutput.MAX_OPS_PER_TXN - len(txn)\n for idx in range(0, len(store_policies), idx_step):\n txn += store_policies[idx : idx + idx_step]\n if not PoliciesOutput._run_transaction(\"store_policies\", txn):\n return False\n txn = []\n\n PoliciesOutput._run_transaction(\"store_policies\", txn)\n return True", "def permission_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AccessConfigurationPermissionPolicyArgs']]]]:\n return pulumi.get(self, \"permission_policies\")", "def permission_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AccessConfigurationPermissionPolicyArgs']]]]:\n return pulumi.get(self, \"permission_policies\")", "def test_list_cluster_policy_binding(self):\n pass", "def test_get_hyperflex_auto_support_policy_list(self):\n pass", "def dns_policies(self, **kwargs):\n url_path = 'dns/policies'\n self.logger.debug(f\"Get RealTime DNS Policies\")\n body = self._make_body(kwargs)\n return self._common_get(request_path=url_path, parameters=body)", "def list_firewall_policies(self, retrieve_all=True, **_params):\r\n # Pass filters in \"params\" argument to do_request\r\n\r\n return self.list('firewall_policies', self.firewall_policies_path,\r\n retrieve_all, **_params)", "def authorizations(self) -> Sequence['outputs.AuthorizationResponse']:\n return pulumi.get(self, \"authorizations\")", "def list_authorities():\n try:\n certs = client().certificates.get_authorities()\n if not certs:\n logger.info(\n 'ctl:cert:authorities', 'No certificate authorities found'\n )\n return\n llen = len(sorted(certs, key=lambda x: len(x[\"id\"]))[-1][\"id\"])\n for x in sorted(certs, key=lambda x: x[\"id\"]):\n click.echo(\n click.style(\n '{name: <{fill}}'.format(name=x[\"id\"], fill=llen + 3),\n fg=\"white\", bold=True) + \"Expires \" +\n click.style(x[\"expiry\"].strftime(\"%c\"), fg=\"yellow\")\n )\n except Exception as e:\n raise CLIException(str(e))", "def policies(self, policies):\n\n self._policies = policies", "def test_list_all_response_descriptor_policies_machine_policy_machine_policy_resource_spaces(self):\n pass", "def get_queryset(self):\n return Objective.objects.filter(perspective__description='Learning and Capacity').order_by('code')", "def parse_storage_policies(conf):\n policies = []\n for section in conf.sections():\n if not section.startswith('storage-policy:'):\n continue\n policy_index = section.split(':', 1)[1]\n # map config option name to StoragePolicy paramater name\n config_to_policy_option_map = {\n 'name': 'name',\n 'default': 'is_default',\n 'deprecated': 'is_deprecated',\n }\n policy_options = {}\n for config_option, value in conf.items(section):\n try:\n policy_option = config_to_policy_option_map[config_option]\n except KeyError:\n raise PolicyError('Invalid option %r in '\n 'storage-policy section %r' % (\n config_option, section))\n policy_options[policy_option] = value\n policy = StoragePolicy(policy_index, **policy_options)\n policies.append(policy)\n\n return StoragePolicyCollection(policies)", "def list(conn):\n try:\n return conn.get(url='/auth-providers')['providers']\n except SystemError as e:\n raise e", "def policies(self, value):\n policies = {}\n for domain, obj in six.iteritems(value):\n if isinstance(obj, Policy):\n policies[domain] = obj\n else:\n policies[domain] = Policy(obj, self.policy_aliases)\n self._set_attr('policies', policies)" ]
[ "0.7189241", "0.6911353", "0.69109756", "0.6786608", "0.6505484", "0.6489422", "0.6399176", "0.6282769", "0.6273543", "0.62666065", "0.6170152", "0.61578345", "0.59558356", "0.5855535", "0.5853909", "0.58492464", "0.57598", "0.57497317", "0.5630949", "0.56128603", "0.56102735", "0.5597646", "0.55645853", "0.5524058", "0.54679096", "0.54492956", "0.54452443", "0.5431996", "0.5408298", "0.5382999", "0.53722167", "0.5358343", "0.5347292", "0.53160083", "0.5298914", "0.5298291", "0.5249572", "0.5224277", "0.5170257", "0.51580286", "0.51557964", "0.5144445", "0.5134439", "0.509889", "0.5092791", "0.50882906", "0.5067698", "0.5067128", "0.5041108", "0.50059533", "0.50050163", "0.49866265", "0.49705184", "0.49677917", "0.49292052", "0.49248093", "0.49112168", "0.49032772", "0.48922563", "0.48867574", "0.4880318", "0.48396647", "0.48370108", "0.48167598", "0.48074415", "0.47986066", "0.47861004", "0.47848216", "0.47848216", "0.47766626", "0.4765079", "0.4763775", "0.4746362", "0.4746362", "0.4746362", "0.47447073", "0.47410995", "0.47400725", "0.47256434", "0.4720252", "0.47140226", "0.47068796", "0.47066563", "0.47059825", "0.47026733", "0.4700287", "0.46957463", "0.46957463", "0.46917567", "0.46915883", "0.4689129", "0.46795082", "0.4671663", "0.46614087", "0.46494457", "0.4646959", "0.4635669", "0.4630822", "0.46160305", "0.4609744" ]
0.7190767
0
Lists the region subscriptions for the specified tenancy.
def list_region_subscriptions(self, tenancy_id, **kwargs): resource_path = "/tenancies/{tenancyId}/regionSubscriptions" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_region_subscriptions got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tenancyId": tenancy_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[RegionSubscription]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[RegionSubscription]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getAllSubscriptions(self):\n return self.request(\n \"getAllSubscriptions\",\n )", "def listSubscriptions() -> object:\n\n db = Db()\n return db.Subscriptions.objects().to_json()", "def GetSubscriptions(self):\n\n return self.__GetJson(\"/subscriptions\", True)", "def subscriptions(self):\r\n return subs.AccountSubscriptions(self)", "def list_subscriptions(profile=None):\n if profile is None:\n profile = subscription_profile()\n cred, _, _ = profile.get_login_credentials()\n sub_client = SubscriptionClient(cred)\n return [\n {\"Index\": i, \"Name\": sub.display_name, \"id\": sub.subscription_id}\n for i, sub in enumerate(sub_client.subscriptions.list())\n ]", "def get(self, orgname):\n permission = AdministerOrganizationPermission(orgname)\n if permission.can():\n organization = model.organization.get_organization(orgname)\n query = model.organization_skus.get_org_subscriptions(organization.id)\n\n if query:\n subscriptions = list(query.dicts())\n for subscription in subscriptions:\n subscription[\"sku\"] = marketplace_subscriptions.get_subscription_sku(\n subscription[\"subscription_id\"]\n )\n return subscriptions\n else:\n return []\n abort(401)", "def list_subscriptions(\n connection, project_id, fields=None, offset=0, limit=-1, error_msg=None\n):\n return connection.get(\n url=f'{connection.base_url}/api/subscriptions',\n params={'offset': offset, 'limit': limit, 'fields': fields},\n headers={'X-MSTR-ProjectID': project_id},\n )", "def ListSubscriptions(): # pylint: disable=unused-variable\n\n try:\n list_request = json_format.Parse(request.get_data(),\n sheriff_config_pb2.ListRequest())\n except json_format.ParseError as error:\n return jsonify(\n {'messages': [{\n 'severity': 'ERROR',\n 'text': '%s' % (error)\n }]}), 400\n list_response = sheriff_config_pb2.ListResponse()\n configs = list(luci_config.ListAllConfigs(datastore_client))\n configs = match_policy.FilterSubscriptionsByIdentity(\n auth_client, list_request, configs)\n for config_set, revision, subscription in configs:\n subscription_metadata = list_response.subscriptions.add()\n subscription_metadata.config_set = config_set\n subscription_metadata.revision = revision\n luci_config.CopyNormalizedSubscription(subscription,\n subscription_metadata.subscription)\n return (json_format.MessageToJson(\n list_response, preserving_proto_field_name=True), 200, {\n 'Content-Type': 'application/json'\n })", "def get_subscriptions(self):\n url = '{}/v2/subscriptions'.format(self.url)\n r = requests.get(url, headers=self.headers_v2)\n return r.json()", "def list(cls, **kwargs):\n response = Yola().list_subscriptions(**kwargs)\n return [cls(**sub) for sub in response['results']]", "def get_all_subscriptions(self, next_token=None):\r\n params = {'ContentType' : 'JSON'}\r\n if next_token:\r\n params['NextToken'] = next_token\r\n response = self.make_request('ListSubscriptions', params, '/', 'GET')\r\n body = response.read()\r\n if response.status == 200:\r\n return json.loads(body)\r\n else:\r\n boto.log.error('%s %s' % (response.status, response.reason))\r\n boto.log.error('%s' % body)\r\n raise self.ResponseError(response.status, response.reason, body)", "def test_list_all_response_descriptor_subscriptions_subscription_subscription_resource_spaces(self):\n pass", "def get_subscriptions(self):\n return self.subscriptions.all()", "def subscriptions(self) -> pulumi.Output[Optional[Sequence['outputs.ResourceIdResponse']]]:\n return pulumi.get(self, \"subscriptions\")", "def subscriptions(self):\r\n return v3.Subscriptions(self)", "def get_subscriptions(self) -> Iterator[\"Subscription\"]:\n yield from self._subscriptions[self.id]", "def getSubscriptions(self):\n\n address = self.getAddress()\n if address is None:\n return []\n else:\n return [\n \"shellies/announce\",\n \"{}/online\".format(address),\n \"{}/emeter/{}/energy\".format(address, self.getChannel()),\n \"{}/emeter/{}/returned_energy\".format(address, self.getChannel()),\n \"{}/emeter/{}/power\".format(address, self.getChannel()),\n \"{}/emeter/{}/reactive_power\".format(address, self.getChannel()),\n \"{}/emeter/{}/voltage\".format(address, self.getChannel()),\n \"{}/emeter/{}/total\".format(address, self.getChannel()),\n \"{}/emeter/{}/total_returned\".format(address, self.getChannel())\n ]", "def list(pat: str, resource_registration_endpoint: str,\n secure: bool = False) -> List[str]:\n headers={\"Authorization\": \"Bearer \"+pat}\n\n disable_warnings_if_debug(secure)\n response = request(\"GET\", resource_registration_endpoint, headers=headers, verify=secure)\n\n if not is_ok(response):\n raise Exception(\"An error occurred while listing resources: \"+str(response.status_code)+\":\"+str(response.reason)+\":\"+str(response.text))\n\n return response.json()", "def get_all_subscriptions(cls, **kwargs):\n return Subscription.query.filter(**kwargs).all()", "def subscriptions(self):\n if not hasattr(self, '_subscriptions'):\n subscriptions_resource = self.resource.subscriptions\n self._subscriptions = Subscriptions(\n subscriptions_resource, self.client)\n return self._subscriptions", "def list_subscriptions_command(client: KeyVaultClient) -> CommandResults:\n response = client.list_subscriptions_request()\n\n readable_output = tableToMarkdown('Subscriptions List',\n response,\n ['subscriptionId', 'tenantId',\n 'state', 'displayName'\n ],\n removeNull=True, headerTransform=string_to_table_header)\n return CommandResults(\n outputs_prefix='AzureKeyVault.Subscription',\n outputs_key_field='id',\n outputs=response,\n raw_response=response,\n readable_output=readable_output,\n )", "def list_subscriptions_async(\n future_session: \"FuturesSession\",\n connection,\n project_id,\n fields=None,\n offset=0,\n limit=-1,\n):\n params = {'offset': offset, 'limit': limit, 'fields': fields}\n url = f'{connection.base_url}/api/subscriptions'\n headers = {'X-MSTR-ProjectID': project_id}\n\n return future_session.get(url=url, headers=headers, params=params)", "def _get_cloudwatch_subscriptions(self):\n return self._get_subscriptions(self.cloudwatch_arn)", "def test_list_all_response_descriptor_subscriptions_subscription_subscription_resource(self):\n pass", "def list_regions(self, **kwargs):\n resource_path = \"/regions\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_regions got unknown kwargs: {!r}\".format(extra_kwargs))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n response_type=\"list[Region]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n response_type=\"list[Region]\")", "def region_clients(self, **kwargs):\n return stats.region_clients(self._host, self._session, **kwargs)", "def list_subscriptions(self):\n return {'abonnementen': self.customer.abonnementen}", "def _get_subscriptions(self, topic_arn):\n return self.conn.get_all_subscriptions_by_topic(topic_arn)['ListSubscriptionsByTopicResponse']['ListSubscriptionsByTopicResult']['Subscriptions']", "def list_tenants(self):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/tenants\"\n _headers = {'x-auth-token': self.cloud_admin_info['token_project']}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\" no response from Server\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\n \" tenant list Failed with status %s \" %\n response.status)\n return response.status\n output = json.loads(response.data)\n LOG_OBJ.info(\"Tenant List : %s \" % output)\n return output[\"tenants\"]", "def get_all_reservations(config):\n reservations = []\n region_list = regions(aws_access_key_id=config.keys.api,\n aws_secret_access_key=config.keys.secret)\n for region in region_list:\n _logger.info(\"Searching %s\", region)\n cnx = region.connect(aws_access_key_id=config.keys.api,\n aws_secret_access_key=config.keys.secret)\n for reservation in cnx.get_all_instances():\n _logger.info(\"Found %s %s\", reservation,\n [str(i.id) for i in reservation.instances])\n reservations.append(reservation)\n return reservations", "def test_index_response_descriptor_subscriptions_subscription_subscription_resource_spaces(self):\n pass", "def get_subscriptions(self, use_threading=False):\r\n \r\n if self._subscriptions is None:\r\n if use_threading:\r\n self.load_subscriptions_threaded()\r\n else:\r\n self._subscriptions = []\r\n for page in range(self._subscription_pages):\r\n self._load_subscriptions(page=page+1)\r\n return self._subscriptions", "def subscriptions(self):\r\n return subs.Subscriptions(self)", "def __call__(\n self,\n request: pubsub.ListSubscriptionsRequest,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Optional[float] = None,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> pubsub.ListSubscriptionsResponse:\n\n http_options: List[Dict[str, str]] = [\n {\n \"method\": \"get\",\n \"uri\": \"/v1/{project=projects/*}/subscriptions\",\n },\n ]\n request, metadata = self._interceptor.pre_list_subscriptions(\n request, metadata\n )\n pb_request = pubsub.ListSubscriptionsRequest.pb(request)\n transcoded_request = path_template.transcode(http_options, pb_request)\n\n uri = transcoded_request[\"uri\"]\n method = transcoded_request[\"method\"]\n\n # Jsonify the query params\n query_params = json.loads(\n json_format.MessageToJson(\n transcoded_request[\"query_params\"],\n including_default_value_fields=False,\n use_integers_for_enums=True,\n )\n )\n query_params.update(self._get_unset_required_fields(query_params))\n\n query_params[\"$alt\"] = \"json;enum-encoding=int\"\n\n # Send the request\n headers = dict(metadata)\n headers[\"Content-Type\"] = \"application/json\"\n response = getattr(self._session, method)(\n \"{host}{uri}\".format(host=self._host, uri=uri),\n timeout=timeout,\n headers=headers,\n params=rest_helpers.flatten_query_params(query_params, strict=True),\n )\n\n # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception\n # subclass.\n if response.status_code >= 400:\n raise core_exceptions.from_http_response(response)\n\n # Return the response\n resp = pubsub.ListSubscriptionsResponse()\n pb_resp = pubsub.ListSubscriptionsResponse.pb(resp)\n\n json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)\n resp = self._interceptor.post_list_subscriptions(resp)\n return resp", "def get(self):\n user = get_authenticated_user()\n account_number = marketplace_users.get_account_number(user)\n if not account_number:\n raise NotFound()\n\n user_subscriptions = marketplace_subscriptions.get_list_of_subscriptions(account_number)\n\n for subscription in user_subscriptions:\n bound_to_org, organization = organization_skus.subscription_bound_to_org(\n subscription[\"id\"]\n )\n # fill in information for whether a subscription is bound to an org\n if bound_to_org:\n subscription[\"assigned_to_org\"] = organization.username\n else:\n subscription[\"assigned_to_org\"] = None\n\n return user_subscriptions", "def api_get_regions():\n db_session = DBSession()\n\n rows = []\n criteria = '%'\n if request.args and request.args.get('q'):\n criteria += request.args.get('q') + '%'\n else:\n criteria += '%'\n\n regions = db_session.query(Region).filter(Region.name.like(criteria)).order_by(Region.name.asc()).all()\n if len(regions) > 0:\n if request.args.get('show_all'):\n rows.append({'id': 0, 'text': 'ALL'})\n for region in regions:\n rows.append({'id': region.id, 'text': region.name})\n\n return jsonify(**{'data': rows})", "def list(self, **params):\n # This is to ensure tenant_id key is not populated\n # if tenant_id=None is specified.\n tenant_id = params.pop('tenant_id', self.request.user.tenant_id)\n if tenant_id:\n params['tenant_id'] = tenant_id\n return self._list(**params)", "def subscriptions(self):\n if self.__subscriptions_manager is None:\n self.__subscriptions_manager = SubscriptionsManager(\n \"/subscriptions\", self._client\n )\n return self.__subscriptions_manager", "def post_list_subscriptions(\n self, response: pubsub.ListSubscriptionsResponse\n ) -> pubsub.ListSubscriptionsResponse:\n return response", "def tenancies(self) -> Iterable[dto.Tenancy]:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )", "def subscriptions(id='None'):\n\trows = mongo_data({}, [\"publisher_id\",\"dt_hour\", \"new_subs\"],\"subscribers\")\n\t#returns [{_id:...,field1:...,field2:...}]\n\n\n\tCOLS = [\"publisher_id\", \"dt_hour\", \"new subs\"]\n\tROWS = [[y[\"publisher_id\"],y[\"dt_hour\"],y[\"new_subs\"]] for y in rows]\n\n\tTITLE = 'SUBSCRIPTIONS'\n\n\treturn render_template(\"simple_tester_report.html\", cols=COLS, rows=ROWS, report_title=TITLE);", "def get_subscriptions(self, client_id):\n return self.hub.get_subscriptions(self.get_private_key(), client_id)", "def subscriptions(self) -> list[Subscription]:\n return [\n *chain.from_iterable(self._simple_subscriptions.values()),\n *self._wildcard_subscriptions,\n ]", "def subdivision_list(cls):\n Subdivision = Pool().get('country.subdivision')\n\n country = int(request.args.get('country', 0))\n if country not in [c.id for c in current_website.countries]:\n abort(404)\n subdivisions = Subdivision.search([('country', '=', country)])\n return jsonify(\n result=[s.serialize() for s in subdivisions]\n )", "async def get_subscriptions(\n self,\n\t\tfields: Optional[List[BaseUserGroupFields]] = None,\n\t\toffset: Optional[int] = None,\n\t\tcount: Optional[int] = None,\n\t\t**kwargs\n ) -> donut.GetSubscriptionsResponseModel:\n\n params = self.get_set_params(locals())\n response = await self.api.request(\"donut.getSubscriptions\", params)\n model = donut.GetSubscriptionsResponse\n return model(**response).response", "def list_subnets(auth=None, **kwargs):\n cloud = get_operator_cloud(auth)\n kwargs = _clean_kwargs(**kwargs)\n return cloud.list_subnets(**kwargs)", "def subscriptions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ResourceIdArgs']]]]:\n return pulumi.get(self, \"subscriptions\")", "def get_subsciptions_by_user(user_id):\n user = get_user_by_id(user_id)\n subscriptions = user.subscriptions\n return subscriptions", "def get_subscriptions(self, private_key, client_id):\n return self._samp_hub.getSubscriptions(private_key, client_id)", "def test_get_subscriptions(self):\n pass", "def test_list_template_subscriptions(self):\n pass", "def getsubscriptions(self):\n subs = {}\n for sub in self._subscriptions.values():\n subs[sub.ID] = sub.asTuple()\n return subs", "def get_tenants(self, **kwargs):\n url = self.get_url('tenants', kwargs, ['begin', 'end'])\n return self.api_client.get(url).json()", "def main():\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('reseller', 'v1', http=http)\n\n results = service.subscriptions().list(maxResults=10).execute()\n subscriptions = results.get('subscriptions', [])\n if not subscriptions:\n print('No subscriptions found.')\n else:\n print('Subscriptions:')\n for subscription in subscriptions:\n print('{0} ({1}, {2})'.format(subscription['customerId'],\n subscription['skuId'], subscription['plan']['planName']))", "def do_GET(self):\n # Check that basic auth is used.\n authorization = self.headers.get(\"Authorization\")\n if authorization == \"\" or authorization is None:\n self.send_response(400)\n self.end_headers()\n return\n\n if \"/subscriptions\" in self.path:\n self.send_response(200)\n self.end_headers()\n self.wfile.write(json.dumps(sample_client_subscription).encode())\n else:\n self.send_response(200)\n self.end_headers()\n self.wfile.write(json.dumps(sample_client_list_response).encode())", "def get_subscriptions(self, topic_name):\r\n resp = self._make_request('get',\r\n 'topics/%s/subscriptions' % topic_name)\r\n return resp.json()", "def get_subscribers(self) -> Iterator[Any]:\n for subscription in self._subscriptions[self.id]:\n yield subscription.subscriber", "def get_subscription(self):\n return self.request({\n 'path': '/' + UUID + '/subscription'})", "def GetSubscriptionsFrom(self):\n\n return self.__GetJson(\"/subscriptions/from\", True)", "def list_regions():\n regions_areas = (\n db.session.query(\n models.Region.code.label(\"region_code\"),\n models.Region.name.label(\"region_name\"),\n db.case([(models.District.code.is_(None),\n db.literal_column(\"'admin_area'\"))],\n else_=db.literal_column(\"'district'\")).label(\"area_type\"),\n db.case([(models.District.code.is_(None), models.AdminArea.code)],\n else_=models.District.code).label(\"area_code\"),\n db.case([(models.District.code.is_(None), models.AdminArea.name)],\n else_=models.District.name).label(\"area_name\")\n ).select_from(models.Region)\n .join(models.Region.areas)\n .outerjoin(models.AdminArea.districts)\n .filter(models.Region.code != \"GB\")\n .order_by(\"region_name\", \"area_name\")\n .all()\n )\n regions = {}\n areas = {}\n for row in regions_areas:\n regions[row.region_code] = row.region_name\n areas.setdefault(row.region_code, []).append(row)\n\n return render_template(\"regions.html\", regions=regions, areas=areas)", "def subscribers_by_id(self, repository_id, access_token=None):\n return self._complete_request_by_id(\n repository_id, \"subscribers\", access_token)", "def create_region_subscription(self, create_region_subscription_details, tenancy_id, **kwargs):\n resource_path = \"/tenancies/{tenancyId}/regionSubscriptions\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_region_subscription got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tenancyId\": tenancy_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=create_region_subscription_details,\n response_type=\"RegionSubscription\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=create_region_subscription_details,\n response_type=\"RegionSubscription\")", "def list_resource_pool(client, private_cloud, location):\n return client.list(location, private_cloud)", "def test_get_subscription_templates(self):\n pass", "def get_all_tenants():\n tenants = identity.Tenant.query.all()\n return tenants", "def list_by_subscription(\n self, filter=None, top=None, custom_headers=None, raw=False, **operation_config):\n def prepare_request(next_link=None):\n if not next_link:\n # Construct URL\n url = self.list_by_subscription.metadata['url']\n path_format_arguments = {\n 'subscriptionId': self._serialize.url(\"self.config.subscription_id\", self.config.subscription_id, 'str')\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {}\n query_parameters['api-version'] = self._serialize.query(\"self.api_version\", self.api_version, 'str')\n if filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"filter\", filter, 'str')\n if top is not None:\n query_parameters['$top'] = self._serialize.query(\"top\", top, 'int')\n\n else:\n url = next_link\n query_parameters = {}\n\n # Construct headers\n header_parameters = {}\n header_parameters['Accept'] = 'application/json'\n if self.config.generate_client_request_id:\n header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())\n if custom_headers:\n header_parameters.update(custom_headers)\n if self.config.accept_language is not None:\n header_parameters['accept-language'] = self._serialize.header(\"self.config.accept_language\", self.config.accept_language, 'str')\n\n # Construct and send request\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def internal_paging(next_link=None):\n request = prepare_request(next_link)\n\n response = self._client.send(request, stream=False, **operation_config)\n\n if response.status_code not in [200]:\n exp = CloudError(response)\n exp.request_id = response.headers.get('x-ms-request-id')\n raise exp\n\n return response\n\n # Deserialize response\n header_dict = None\n if raw:\n header_dict = {}\n deserialized = models.TopicPaged(internal_paging, self._deserialize.dependencies, header_dict)\n\n return deserialized", "def get_all_in_region(self, cloud_account_id: str, region_id: str) -> List[Dict]:\n\t\tquery_parameters = {'cloudAccountId': cloud_account_id, 'regionId': region_id}\n\t\treturn self._get(route=AWSSecurityGroupConsts.CLOUD_SECURITY_GROUP.value, params=query_parameters)", "def list_sub_tier_agencies(sub_tier_agencies):\n return JsonResponse.create(StatusCode.OK, organize_sub_tier_agencies(sub_tier_agencies))", "def test_index_response_descriptor_subscriptions_subscription_subscription_resource(self):\n pass", "def ls(region_name=DEFAULT_REGION):\n s3conn = s3.connect_to_region(region_name)\n buckets = s3conn.get_all_buckets()\n for bucket in buckets:\n print(bucket.name)", "def GetSubscriptionsForUser(self, userName):\n\n return self.__GetJson(\"/users/\"+userName+\"/subscriptions\", True)", "def get_cloudwatch_email_subscriptions(self):\n return map(lambda subscription: subscription['Endpoint'], self._get_cloudwatch_subscriptions())", "def get_tenant_resources(self):\n resources = self.context[\"tenant\"].get(\"resources\", [])\n if not resources:\n msg = (\"No resources found for tenant: %s\"\n % self.context[\"tenant\"].get(\"name\"))\n raise exceptions.NotFoundException(message=msg)\n for res_id in resources:\n self._get_resource(res_id)", "def test_successful_subscriptions_list_subscribers(self) -> None:\n result = self.api_get(\n self.test_user,\n \"/api/v1/users/me/subscriptions\",\n {\"include_subscribers\": \"true\"},\n )\n json = self.assert_json_success(result)\n self.assertIn(\"subscriptions\", json)\n for stream in json[\"subscriptions\"]:\n self.assertIsInstance(stream[\"name\"], str)\n self.assertIsInstance(stream[\"color\"], str)\n self.assertIsInstance(stream[\"invite_only\"], bool)\n # check that the stream name corresponds to an actual\n # stream; will throw Stream.DoesNotExist if it doesn't\n get_stream(stream[\"name\"], self.test_realm)\n list_streams = [stream[\"name\"] for stream in json[\"subscriptions\"]]\n # also check that this matches the list of your subscriptions\n self.assertEqual(sorted(list_streams), sorted(self.streams))", "def list_key_vaults_request(self, subscription_id: str = None,\n limit: int = DEFAULT_LIMIT, offset: int = DEFAULT_OFFSET) -> list[dict]:\n ful_url = urljoin(self.azure_cloud.endpoints.resource_manager,\n f'subscriptions/{subscription_id}/providers/Microsoft.KeyVault/'\n f'vaults?$top={limit}')\n response = self.http_request(\n 'GET', full_url=ful_url, ok_codes=[200])\n return self.get_entities_independent_of_pages(response, limit, offset)", "def ListRegions(self):\n project = properties.VALUES.core.project.GetOrFail()\n request = self.messages.CloudfunctionsProjectsLocationsListRequest(\n name='projects/' + project\n )\n return list_pager.YieldFromList(\n service=self.client.projects_locations,\n request=request,\n field='locations',\n batch_size_attribute='pageSize',\n )", "def list(self, request):\n urls = {\n 'msg': 'Must use bulk_by_sample to get SCCmec Subtype hits',\n }\n\n return Response(urls)", "def get_subscriptions(self):\n return {}", "def list_resource_groups_request(self, subscription_id: str, tag: str, limit: int) -> list[dict]:\n full_url = urljoin(self.azure_cloud.endpoints.resource_manager, f'subscriptions/{subscription_id}/resourcegroups?')\n filter_by_tag = azure_tag_formatter(tag) if tag else None\n\n response = self.http_request('GET', full_url=full_url, resource=self.get_management_resource(),\n params={'$filter': filter_by_tag, '$top': limit,\n 'api-version': '2021-04-01'}, ok_codes=[200])\n return self.get_entities_independent_of_pages(first_page=response, limit=limit, offset=DEFAULT_OFFSET,\n resource=self.get_management_resource())", "def list_storage_accounts(resource_group_name=None):\n scf = storage_client_factory()\n if resource_group_name:\n accounts = scf.storage_accounts.list_by_resource_group(resource_group_name)\n else:\n accounts = scf.storage_accounts.list()\n return list(accounts)", "def GetSubscriptionsTo(self):\n\n return self.__GetJson(\"/subscriptions/to\", True)", "def get_subscriptions_from_self(self):\n return self._roster.get_my_subscriptions()", "def get_subscriptions(task):\n annotations = getattr(task, '__annotations__', {})\n def _get_subscriptions():\n for key, value in annotations.items():\n if isinstance(value, str):\n yield key, value\n else:\n inner_annotation = getattr(value, '__annotations__', {})\n wrapper_return = inner_annotation.get('return', None)\n if wrapper_return == _Subscription:\n yield key, value\n return list(_get_subscriptions())", "def test_cmd_cs_subscription_list(self, mocker):\n\n mock_response = {\n 'foo': 'bar'\n }\n mocker.patch.object(\n SubscriptionClient,\n \"list\",\n return_value=mock_response\n )\n\n result = self.runner.invoke(cli, ['subscription', 'list'])\n assert result.output == json.dumps(mock_response, indent=4, sort_keys=True) + '\\n'", "def get_tenants(self):", "def get_subscription(self):\n if not hasattr(self, '_subscription'):\n self._subscription = self.admin.subscriptions.select_related('plan').get_overlapping(\n self.admin_id, DateRange(self.period, self.period_end, bounds='[]'))\n return self._subscription", "def list(request, queryset, *args, **kwargs):\r\n return object_list(\r\n request,\r\n queryset.filter(account = request.account), \r\n *args, \r\n **kwargs\r\n )", "def list_envelopes():\n\n #\n # Step 1. Prepare the options object\n #\n from_date = datetime.min.isoformat()\n #\n # Step 2. Get and display the results\n #\n api_client = ApiClient()\n api_client.host = base_path\n api_client.set_default_header(\"Authorization\", \"Bearer \" + access_token)\n\n envelope_api = EnvelopesApi(api_client)\n results = envelope_api.list_status_changes(account_id, from_date=from_date)\n return results", "def get_subregions(xint,conn):\n\n subregions = ('SELECT DISTINCT cvt.name, fip.value, f.name '\n 'FROM interaction i, feature_interaction fi, feature_interactionprop fip, ' \n 'feature f, cvterm cvt, cvterm cvt2, feature_relationship fr, feature f2 '\n 'WHERE f.feature_id = fi.feature_id AND fi.interaction_id = i.interaction_id '\n 'AND fi.feature_interaction_id = fip.feature_interaction_id '\n 'AND fi.role_id = cvt.cvterm_id '\n 'AND fip.type_id = cvt2.cvterm_id AND '\n 'cvt2.name = \\'subpart_info\\' AND f.feature_id = fr.subject_id '\n 'AND f2.feature_id = fr.object_id AND f.is_obsolete = \\'f\\' AND '\n 'f2.uniquename = %s AND i.uniquename = %s')\n subs = connect(subregions,xint,conn)\n return(subs)", "def list_all_agencies():\n return JsonResponse.create(StatusCode.OK, get_all_agencies())", "def transcriptions(self):\r\n return recordings.Transcriptions(self)", "def list_sqs(region, filter_by_kwargs):\n conn = boto.sqs.connect_to_region(region)\n queues = conn.get_all_queues()\n return lookup(queues, filter_by=filter_by_kwargs)", "def list_agencies(cgac_sub_tiers, frec_sub_tiers):\n return JsonResponse.create(StatusCode.OK, get_accessible_agencies(cgac_sub_tiers, frec_sub_tiers))", "def test_list_pending_template_subscriptions(self):\n pass", "def list(self, tenant=None, network=None, gateway_ip=None, cidr=None):\n path = '%s/subnets' % self.ver \n \n query = {}\n if tenant is not None:\n query['tenant_id'] = tenant\n if network is not None:\n query['network_id'] = network\n if gateway_ip is not None:\n query['gateway_ip '] = gateway_ip\n if cidr is not None:\n query['cidr '] = cidr \n path = '%s?%s' % (path, urlencode(query))\n \n res = self.client.call(path, 'GET', data='', token=self.manager.identity.token)\n self.logger.debug('Get openstack subnets: %s' % truncate(res))\n return res[0]['subnets']", "def get_tenants():\n # these are the tenant_id strings configured for the service -\n tenants_strings = conf.tenants\n result = []\n # the tenants service is a special case, as it must be a) configured to serve all tenants and b) actually maintains\n # the list of tenants in its own DB. in this case, we return the empty list since the tenants service will use direct\n # db access to get necessary data.\n if conf.service_name == 'tenants' and tenants_strings[0] == '*':\n return result\n\n # in dev mode, services can be configured to not use the security kernel, in which case we must get\n # configuration for a \"dev\" tenant directly from the service configs:\n if not conf.use_sk:\n for tenant in tenants_strings:\n t = {'tenant_id': tenant,\n 'iss': conf.dev_iss,\n 'public_key': conf.dev_jwt_public_key,\n 'default_access_token_ttl': conf.dev_default_access_token_ttl,\n 'default_refresh_token_ttl': conf.dev_default_refresh_token_ttl,\n }\n result.append(t)\n\n else:\n # TODO -- look up tenants in the tenants API, get the associated parameters (including sk location)\n pass\n return result", "def perform_fanout_all_regions(context, cli=False, installed_region='us-east-1'):\n\n sns_topic = utils.get_topic_arn('CreateSnapshotTopic')\n LOG.debug('perform_fanout_all_regions using SNS topic %s', sns_topic)\n\n # configure replication based on extant configs for snapshots\n if type(context) is not MockContext: # don't do in unit tests\n ensure_cloudwatch_rule_for_replication(context, installed_region)\n\n # get regions with instances running or stopped\n regions = utils.get_regions(must_contain_instances=True)\n for region in regions:\n sleep(5) # API rate limiting help\n\n send_fanout_message(\n context=context,\n region=region,\n sns_topic=sns_topic,\n cli=cli)", "def _ns_subscriptions(self, queue):\n return self._ns(queue, \"consumers\")", "def list_subscription_events(self,\n subscription_id,\n cursor=None,\n limit=None):\n\n return super().new_api_call_builder.request(\n RequestBuilder().server('default')\n .path('/v2/subscriptions/{subscription_id}/events')\n .http_method(HttpMethodEnum.GET)\n .template_param(Parameter()\n .key('subscription_id')\n .value(subscription_id)\n .should_encode(True))\n .query_param(Parameter()\n .key('cursor')\n .value(cursor))\n .query_param(Parameter()\n .key('limit')\n .value(limit))\n .header_param(Parameter()\n .key('accept')\n .value('application/json'))\n .auth(Single('global'))\n ).response(\n ResponseHandler()\n .deserializer(APIHelper.json_deserialize)\n .is_api_response(True)\n .convertor(ApiResponse.create)\n ).execute()", "def Run(self, args):\n orgs_client = organizations.Client()\n return orgs_client.List(limit=args.limit, page_size=args.page_size)" ]
[ "0.6887986", "0.64734304", "0.6237735", "0.6100922", "0.6089093", "0.6054437", "0.6008919", "0.5998848", "0.5985489", "0.5876979", "0.58604753", "0.58339596", "0.58058745", "0.57581854", "0.57325035", "0.5700422", "0.56921446", "0.56718814", "0.5643824", "0.56429935", "0.5636409", "0.56253004", "0.56243", "0.5569091", "0.5507758", "0.5484625", "0.54808044", "0.5449644", "0.5409465", "0.54038334", "0.53935486", "0.5350759", "0.5345852", "0.5341285", "0.5338066", "0.5310425", "0.52988446", "0.52790165", "0.5273368", "0.5249783", "0.5245502", "0.52274376", "0.52208644", "0.51974976", "0.5195937", "0.51953346", "0.51795155", "0.5176378", "0.516575", "0.5162234", "0.514293", "0.5134413", "0.5129228", "0.5125157", "0.51187533", "0.510007", "0.5076701", "0.50694317", "0.5062347", "0.5049392", "0.5035843", "0.502661", "0.50034523", "0.49856547", "0.49773443", "0.4958293", "0.49570352", "0.49483392", "0.49444008", "0.4941098", "0.49354127", "0.49235797", "0.49226332", "0.49201813", "0.491617", "0.49157968", "0.489612", "0.48926547", "0.48858708", "0.48785225", "0.4878356", "0.48710412", "0.48673472", "0.48624712", "0.48609403", "0.48397446", "0.48364827", "0.4831071", "0.48200876", "0.48056746", "0.48031378", "0.48016483", "0.47996318", "0.4792255", "0.47889203", "0.4784572", "0.4773705", "0.47712603", "0.47707292", "0.47643346" ]
0.7903534
0
Lists all the regions offered by Oracle Cloud Infrastructure.
def list_regions(self, **kwargs): resource_path = "/regions" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_regions got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, response_type="list[Region]") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, response_type="list[Region]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_regions():\n regions_areas = (\n db.session.query(\n models.Region.code.label(\"region_code\"),\n models.Region.name.label(\"region_name\"),\n db.case([(models.District.code.is_(None),\n db.literal_column(\"'admin_area'\"))],\n else_=db.literal_column(\"'district'\")).label(\"area_type\"),\n db.case([(models.District.code.is_(None), models.AdminArea.code)],\n else_=models.District.code).label(\"area_code\"),\n db.case([(models.District.code.is_(None), models.AdminArea.name)],\n else_=models.District.name).label(\"area_name\")\n ).select_from(models.Region)\n .join(models.Region.areas)\n .outerjoin(models.AdminArea.districts)\n .filter(models.Region.code != \"GB\")\n .order_by(\"region_name\", \"area_name\")\n .all()\n )\n regions = {}\n areas = {}\n for row in regions_areas:\n regions[row.region_code] = row.region_name\n areas.setdefault(row.region_code, []).append(row)\n\n return render_template(\"regions.html\", regions=regions, areas=areas)", "def api_get_regions():\n db_session = DBSession()\n\n rows = []\n criteria = '%'\n if request.args and request.args.get('q'):\n criteria += request.args.get('q') + '%'\n else:\n criteria += '%'\n\n regions = db_session.query(Region).filter(Region.name.like(criteria)).order_by(Region.name.asc()).all()\n if len(regions) > 0:\n if request.args.get('show_all'):\n rows.append({'id': 0, 'text': 'ALL'})\n for region in regions:\n rows.append({'id': region.id, 'text': region.name})\n\n return jsonify(**{'data': rows})", "def get_regions(ec2_client=None):\n if not ec2_client:\n ec2_client = boto3.client('ec2')\n resp = ec2_client.describe_regions()\n return [region['RegionName'] for region in resp.get('Regions', [])]", "def ListRegions(self):\n project = properties.VALUES.core.project.GetOrFail()\n request = self.messages.CloudfunctionsProjectsLocationsListRequest(\n name='projects/' + project\n )\n return list_pager.YieldFromList(\n service=self.client.projects_locations,\n request=request,\n field='locations',\n batch_size_attribute='pageSize',\n )", "def get_regions(**kwargs):\n\n instance = Ceic._get_instance()\n\n get_dictionaries_method = instance._dictionary_facade.get_regions\n result = instance._make_request(get_dictionaries_method, **kwargs)\n\n return result", "def regions(self) -> Sequence[str]:\n return pulumi.get(self, \"regions\")", "def RegionList(self):\n command = \"\"\"\n IPython.notebook.kernel.execute(\"RegionList=\" + JSON.stringify(JS9.GetShapes(\"regions\", {{display: '{wid}JS9'}})));\n \"\"\".format(wid=self.wid)\n get_ipython().run_cell_magic('javascript', '', command)", "def _get_available_regions():\n session = boto3.session.Session()\n\n return session.get_available_regions(service_name='s3')", "def get_available_regions(service_name):\n session = boto3.session.Session()\n return session.get_available_regions(service_name)", "def get_available_regions(service_name):\n session = boto3.session.Session()\n return session.get_available_regions(service_name)", "def get_regions():\n\n # Also known as the 'climbing directory'\n route_guide = urlopen('https://www.mountainproject.com/route-guide',\n context=ctx)\n # Opens HTML\n region_html = route_guide.read()\n # Parses HTML with BS package\n region_soup = BeautifulSoup(region_html, 'html.parser')\n # Finds regions area of the page\n regions = region_soup.find('div', id='route-guide')\\\n .find_all('div', class_='mb-half')\n\n for region in regions:\n # Link to region area guide\n url = region.find('a')['href']\n # English name of region\n region_name = region.find('a').get_text()\n # Writes region name and url to Areas DB. This gives the region a\n # unique id automatically\n cursor.execute('''\n INSERT INTO Areas(url, name)\n VALUES ('%s', '%s')\n ON CONFLICT DO NOTHING\n ''' % (url, region_name))\n # Commits to DB\n conn.commit()", "def region_clients(self, **kwargs):\n return stats.region_clients(self._host, self._session, **kwargs)", "def get_regions(self,online=False):\n clients = HWIOS.pb_server.get_clients()\n regions = []\n for client in clients:\n for service in client.region_services:\n if online: \n if service['status'] == 'ON':\n for region in service['regions']:\n regions.append(region)\n else:\n for region in service['regions']:\n region['status'] = service['status']\n regions.append(region)\n return regions", "def describe_regions(\n self,\n request: dds_20151201_models.DescribeRegionsRequest,\n ) -> dds_20151201_models.DescribeRegionsResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_regions_with_options(request, runtime)", "def regions(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"regions\")", "def get_all_in_region(self, cloud_account_id: str, region_id: str) -> List[Dict]:\n\t\tquery_parameters = {'cloudAccountId': cloud_account_id, 'regionId': region_id}\n\t\treturn self._get(route=AWSSecurityGroupConsts.CLOUD_SECURITY_GROUP.value, params=query_parameters)", "def get_regions(self):\n return self._regions", "def get_valid_regions(self):\n conn = self._boto.ec2.connect_to_region(self.cli_region)\n\n regions = []\n for region in conn.get_all_regions():\n if getattr(RegionCode.Region, region.name, None) is not None:\n regions.append(RegionCode.Region[region.name])\n else:\n regions.append(region.name)\n\n return regions", "def regions(self):\n\n class RegionIter(object):\n def __init__(self, region_based):\n self._region_based = region_based\n\n def __len__(self):\n return self._region_based._region_len()\n\n def __iter__(self):\n return self()\n\n def _fix_chromosome(self, regions):\n for r in regions:\n r.fix_chromosome(copy=True)\n\n def __call__(self, key=None, *args, **kwargs):\n fix_chromosome = kwargs.pop('fix_chromosome', False)\n\n if key is None:\n iterator = self._region_based._region_iter(*args, **kwargs)\n else:\n if isinstance(key, string_types) or isinstance(key, GenomicRegion):\n iterator = self._region_based.region_subset(key, *args, **kwargs)\n else:\n iterator = self._region_based._get_regions(key, *args, **kwargs)\n\n if fix_chromosome:\n return self._fix_chromosome(iterator)\n else:\n return iterator\n\n def __getitem__(self, item):\n if isinstance(item, string_types) or isinstance(item, GenomicRegion):\n return self._region_based.region_subset(item)\n return self._region_based._get_regions(item)\n\n return RegionIter(self)", "def regions(self):\n return self._regions", "def get_valid_regions(self):\n client = self._boto.client('ec2')\n\n regions = []\n for region in client.describe_regions().get('Regions', []):\n if getattr(RegionCode.Region, region.get('RegionName'), None) is not None:\n regions.append(RegionCode.Region[region.get('RegionName')])\n else:\n regions.append(region.get('RegionName'))\n\n return regions", "def listPredefinedRegions(self):\n\n res = self._Client__proxy.listPredefinedRegions(\n self._Client__session)\n\n self.checkResult(res)\n return res[\"predefinedRegions\"]", "def DescribeAccessRegions(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DescribeAccessRegions\", params, headers=headers)\n response = json.loads(body)\n model = models.DescribeAccessRegionsResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def get_db_regions(self, context, regions):\n regions_objs = self.dns_manager.get_db_regions(context, regions)\n return regions_objs", "def test_aws_service_api_regions_get(self):\n pass", "def get_all_db_region(self, context):\n zone_objs = self.dns_manager.get_all_db_region(context)\n return zone_objs", "def filter_regions(self):\n return self.filter_nodes('/DistrictBuilder/Regions/Region')", "def get_all_regions(self, region_names=None, filters=None):\r\n params = {}\r\n if region_names:\r\n self.build_list_params(params, region_names, 'RegionName')\r\n if filters:\r\n self.build_filter_params(params, filters)\r\n regions = self.get_list('DescribeRegions', params,\r\n [('item', RegionInfo)], verb='POST')\r\n for region in regions:\r\n region.connection_cls = EC2Connection\r\n return regions", "async def describe_regions_async(\n self,\n request: dds_20151201_models.DescribeRegionsRequest,\n ) -> dds_20151201_models.DescribeRegionsResponse:\n runtime = util_models.RuntimeOptions()\n return await self.describe_regions_with_options_async(request, runtime)", "def regions(self) -> Optional[Sequence['outputs.GetTrafficPolicyDocumentRuleRegionResult']]:\n return pulumi.get(self, \"regions\")", "def get_all_reservations(config):\n reservations = []\n region_list = regions(aws_access_key_id=config.keys.api,\n aws_secret_access_key=config.keys.secret)\n for region in region_list:\n _logger.info(\"Searching %s\", region)\n cnx = region.connect(aws_access_key_id=config.keys.api,\n aws_secret_access_key=config.keys.secret)\n for reservation in cnx.get_all_instances():\n _logger.info(\"Found %s %s\", reservation,\n [str(i.id) for i in reservation.instances])\n reservations.append(reservation)\n return reservations", "def getStudyRegions():\n comp_name = os.environ['COMPUTERNAME']\n conn = py.connect('Driver=ODBC Driver 11 for SQL Server;SERVER=' +\n comp_name + '\\HAZUSPLUSSRVR; UID=SA;PWD=Gohazusplus_02')\n exclusionRows = ['master', 'tempdb', 'model', 'msdb', 'syHazus', 'CDMS', 'flTmpDB']\n cursor = conn.cursor()\n cursor.execute('SELECT [StateID] FROM [syHazus].[dbo].[syState]') \n for state in cursor:\n exclusionRows.append(state[0])\n cursor = conn.cursor()\n cursor.execute('SELECT * FROM sys.databases')\n studyRegions = []\n for row in cursor:\n if row[0] not in exclusionRows:\n studyRegions.append(row[0])\n studyRegions.sort(key=lambda x: x.lower())\n return studyRegions", "def describe_regions_with_options(\n self,\n request: dds_20151201_models.DescribeRegionsRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeRegionsResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.accept_language):\n query['AcceptLanguage'] = request.accept_language\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.region_id):\n query['RegionId'] = request.region_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeRegions',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeRegionsResponse(),\n self.call_api(params, req, runtime)\n )", "def get_availability_zones(self, context, filters=None, fields=None,\n sorts=None, limit=None, marker=None,\n page_reverse=False):", "def regions(self, member_state):\n rates = self._get_rates(member_state)\n return list(rates.regions.keys())", "def add_all_regions_vp():\n gene_id = request.json['gene_id']\n vpanel_id = request.json['vpanel_id']\n panel_id = request.json['panel_id']\n add_all_regions_to_vp(s, panel_id, gene_id, vpanel_id)\n return jsonify({\"genes\": [gene_id, ]})", "def scope(self) -> List[Region]:\n return [self]", "def _get_available_region_options():\n available_regions = sorted(_get_available_regions())\n options = [ConfigurationOption(region, region) for region in available_regions]\n\n return options", "def lookups(self, request, model_admin):\r\n list_of_regions = []\r\n queryset = Region.objects.filter(parent__isnull=True).order_by(\"name\")\r\n for region in queryset:\r\n list_of_regions.append((str(region.id), region.name))\r\n return list_of_regions", "def regions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"regions\")", "def regions(self):\n regions = set()\n for report in self._reports:\n region = report.model.region\n if region is None or region in regions:\n continue\n yield region", "def get_instances(self, region):\n try:\n conn = ec2.connect_to_region(region, **self.credentials)\n region_instances = []\n reservations = conn.get_all_reservations()\n for reservation in reservations:\n for instance in reservation.instances:\n region_instances.append(instance)\n except boto.exception.EC2ResponseError:\n return []\n return region_instances", "def get_regions(self):\n if self.initiated is False:\n raise RuntimeError(\"Initiate first\")\n\n return self.R", "def getStudyRegions(self):\n exclusionRows = ['master', 'tempdb', 'model', 'msdb', 'syHazus', 'CDMS', 'flTmpDB']\n self.cursor.execute('SELECT [StateID] FROM [syHazus].[dbo].[syState]') \n for state in self.cursor:\n exclusionRows.append(state[0])\n query = 'SELECT * FROM sys.databases'\n df = pd.read_sql(query, self.conn)\n studyRegions = df[~df['name'].isin(exclusionRows)]['name']\n studyRegions = studyRegions.reset_index()\n studyRegions = studyRegions.drop('index', axis=1)\n self.studyRegions = studyRegions\n return studyRegions", "def load_all_countries(self):\n core = self.core\n regionNodes = core.load_children(self.META[\"Countries\"])\n countryNodes = []\n if regionNodes:\n for regionNode in regionNodes:\n if core.get_base_type(regionNode) == self.META[\"Region\"]:\n countryNodes += core.load_children(regionNode)\n return countryNodes\n else:\n print(\"There are no regions in the database\")", "def getRegions(self, clearCache=False):\n if clearCache:\n self._regionCache = None\n if self._regionCache is not None:\n return self._regionCache\n\n self.lock.acquire()\n\n regions = []\n self._regionsByName = {}\n\n # Iterate over all descriptors (even numbered regions)\n for index in range(0, MAX_REGIONS, 2):\n def storeDescriptor(descriptor, index=index):\n size = struct.unpack(\"<I\", descriptor[:4])[0]\n name = descriptor[4:].split('\\x00')[0]\n if name:\n region = Region(index + 1, size, name)\n regions.append(region)\n self._regionsByName[name] = region\n\n # Send the command the low-level way, since we already have the lock.\n self.recv.queue.put((MAX_DESCRIPTOR_LEN, storeDescriptor))\n self.send.queue.put(opSetRegion(index) + opReadLongs(MAX_DESCRIPTOR_LEN))\n\n self.recv.queue.join()\n self._regionCache = regions\n\n self.lock.release()\n return regions", "def _get_global_table_all_regions(table_name: str) -> List[dict]:\n description = _describe_table(table_name=table_name)\n replicas = description['Table'].get('Replicas', [])\n return replicas", "def regions_by_tag(self, *tags: str) -> Iterable[str]:\n node = self.shards_xml(\"regionsbytag\", tags=\",\".join(tags))[\"regions\"]\n text = node.text if node.text else \"\"\n return text.split(\",\")", "def region(self):\n return [node.region for node in self]", "def region(self):\n return regions.lookup(self.state)", "def operating_regions(self) -> pulumi.Output[Optional[Sequence['outputs.IpamOperatingRegion']]]:\n return pulumi.get(self, \"operating_regions\")", "def add_all_regions():\n gene_id = request.json['gene_id']\n panel_id = request.json['panel_id']\n tx_id = request.json['tx_id']\n gene_name = request.json['gene_name']\n project_id = get_project_id_by_panel_id(s, panel_id)\n\n add_preftxs_to_panel(s, project_id, [{\"gene\": gene_name, \"tx_id\": tx_id}, ])\n add_genes_to_panel_with_ext(s, panel_id, gene_id)\n return jsonify({\"genes\": [gene_id, ]})", "def listInstancesRegionZone(region,zone):\n\tprint \"-\"*80\n\tprint \"# Region :\",region,\" Zone\", zone\t\n\tprint \"-\"*80\n\tinstances = getInstancesRegionZone(region,zone)\n\tif instances:\n\t\tfor instance in instances:\n\t\t\tprint \"[\",instance.ami_launch_index,\"]\",instance.ip_address,\" (\",instance.private_ip_address,\") \",instance.instance_type,\" key=\",instance.key_name", "def get(self, request):\n conn = get_sdk_connection(request)\n availability_zone_list = _sdk_object_to_list(\n conn.load_balancer.availability_zones()\n )\n\n return {'items': availability_zone_list}", "def regions_dict(self):\n regions_dict = dict()\n for i, r in enumerate(self.regions):\n regions_dict[getattr(r, 'ix', i)] = r\n return regions_dict", "def test_api_regions(self):\n # load api base\n r = requests.get('{server}/api/0.1/'.format(\n server=self.get_server_url())).json()\n # load regions from url specified in api base\n r = requests.get(r['regions']).json()\n self.assertIn('count', r)\n self.assertIn('next', r)\n self.assertIn('prev', r)\n self.assertIn('regions', r)", "def get_valid_regions(self):\n pass", "def get_regions(locale):\n\n def json_file(name):\n return os.path.join(json_dir, 'regions', '%s.json' % name)\n\n filepath = json_file(locale)\n\n if not os.path.exists(filepath):\n filepath = json_file('en-US')\n if not os.path.exists(filepath):\n raise Exception('Unable to load region data')\n\n with codecs.open(filepath, encoding='utf8') as fd:\n return json.load(fd)", "def compute_zones(self):\n path = '/os-availability-zone/detail'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack availability zone: %s' % truncate(res))\n return res[0]['availabilityZoneInfo']", "def scope(self) -> List[Region]:\n return self._scope", "def get_region_services(self,format=None):\n clients = HWIOS.pb_server.get_clients()\n region_services = []\n for client in clients:\n region_services.extend(client.region_services)\n #for django forms\n if format == 'tuple':\n tuple_list = []\n for region_service in region_services:\n tuple_list.append((region_service['uuid'],region_service['name']))\n return tuple_list\n return region_services", "def _regions(self, voronoi_diagram, unique_id, ids, crs):\n vertices = pd.Series(voronoi_diagram.regions).take(voronoi_diagram.point_region)\n polygons = []\n for region in vertices:\n if -1 not in region:\n polygons.append(pygeos.polygons(voronoi_diagram.vertices[region]))\n else:\n polygons.append(None)\n\n regions_gdf = gpd.GeoDataFrame(\n {unique_id: ids}, geometry=polygons, crs=crs\n ).dropna()\n regions_gdf = regions_gdf.loc[\n regions_gdf[unique_id] != -1\n ] # delete hull-based cells\n\n return regions_gdf", "def region(self, args):\n m = MessageClass()\n print('123124')\n data = {'list': []}\n data['list'].append({\"Region_Name\": \"us-east-1\"})\n data['list'].append({\"Region_Name\": \"us-east-2\"})\n data['list'].append({\"Region_Name\": \"us-west-1\"})\n data['list'].append({\"Region_Name\": \"us-west-2\"})\n data['list'].append({\"Region_Name\": \"ap-northeast-1\"})\n data['list'].append({\"Region_Name\": \"ap-northeast-2\"})\n data['list'].append({\"Region_Name\": \"ap-south-1\"})\n data['list'].append({\"Region_Name\": \"ap-southeast-1\"})\n data['list'].append({\"Region_Name\": \"ap-southeast-1\"})\n data['list'].append({\"Region_Name\": \"ca-central-1\"})\n data['list'].append({\"Region_Name\": \"eu-central-1\"})\n data['list'].append({\"Region_Name\": \"eu-west-1\"})\n data['list'].append({\"Region_Name\": \"eu-west-2\"})\n data['list'].append({\"Region_Name\": \"eu-west-3\"})\n data['list'].append({\"Region_Name\": \"sa-east-1\"})\n m.data = data\n return m.to_json()", "def DescribeDestRegions(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DescribeDestRegions\", params, headers=headers)\n response = json.loads(body)\n model = models.DescribeDestRegionsResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "async def describe_regions_with_options_async(\n self,\n request: dds_20151201_models.DescribeRegionsRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeRegionsResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.accept_language):\n query['AcceptLanguage'] = request.accept_language\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.region_id):\n query['RegionId'] = request.region_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeRegions',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeRegionsResponse(),\n await self.call_api_async(params, req, runtime)\n )", "def list_ec2(region, filter_by_kwargs):\n conn = boto.ec2.connect_to_region(region)\n instances = conn.get_only_instances()\n return lookup(instances, filter_by=filter_by_kwargs)", "def list(self, args):\n try:\n cloud = self._context.getCloudService()\n vdcs = cloud.listVirtualDatacenters()\n pprint_vdcs(vdcs)\n except (AbiquoException, AuthorizationException), ex:\n print \"Error: %s\" % ex.getMessage()", "def _getBrailleRegionsForMenu(self, obj):\n\n self._debugGenerator(\"_getBrailleRegionsForMenu\", obj)\n\n text = \"\"\n text = self._script.appendString(\n text, self._script.getDisplayedLabel(obj))\n text = self._script.appendString(\n text, self._script.getDisplayedText(obj))\n text = self._script.appendString(\n text, rolenames.getBrailleForRoleName(obj))\n\n if obj == orca_state.locusOfFocus:\n text = self._script.appendString(\n text, self._getTextForAvailability(obj))\n text = self._script.appendString(text,\n self._getTextForAccelerator(obj),\n \"\")\n\n regions = []\n componentRegion = braille.Component(obj, text)\n regions.append(componentRegion)\n\n return [regions, componentRegion]", "def subdivision_list(cls):\n Subdivision = Pool().get('country.subdivision')\n\n country = int(request.args.get('country', 0))\n if country not in [c.id for c in current_website.countries]:\n abort(404)\n subdivisions = Subdivision.search([('country', '=', country)])\n return jsonify(\n result=[s.serialize() for s in subdivisions]\n )", "def GetWorldRegions():\n return GetDataFromCsvFile('world_regions.csv')", "def _getBrailleRegionsForComboBox(self, obj):\n\n self._debugGenerator(\"_getBrailleRegionsForComboBox\", obj)\n\n regions = []\n\n focusedRegionIndex = 0\n label = self._script.getDisplayedLabel(obj)\n if label and (len(label) > 0):\n regions.append(braille.Region(label + \" \"))\n focusedRegionIndex = 1\n\n # Check to see if the text is editable. If so, then we want\n # to show the text attributes (such as selection -- see bug\n # 496846 for more details).\n #\n textObj = None\n for child in obj:\n if child and child.getRole() == pyatspi.ROLE_TEXT:\n textObj = child\n if textObj and textObj.getState().contains(pyatspi.STATE_EDITABLE):\n textRegion = braille.Text(textObj)\n regions.append(textRegion)\n else:\n displayedText = self._script.getDisplayedText(obj)\n if displayedText:\n regions.append(braille.Region(displayedText))\n\n regions.append(braille.Region(\n \" \" + rolenames.getBrailleForRoleName(obj)))\n\n # Things may not have gone as expected above, so we'll do some\n # defensive programming to make sure we don't get an index out\n # of bounds.\n #\n if focusedRegionIndex >= len(regions):\n focusedRegionIndex = 0\n if len(regions) == 0:\n focusedRegion = None\n else:\n focusedRegion = regions[focusedRegionIndex]\n\n # [[[TODO: WDW - perhaps if a text area was created, we should\n # give focus to it.]]]\n #\n return [regions, focusedRegion]", "def list_ecr_repositories():\n repositories = ECS_MANAGER.list_ecr_repositories()\n\n if repositories:\n print(str_sep)\n print(\"Listing repositories available in {}\".format(SESSION.region_name.upper()))\n print(\"{:30}{:60}\".format('NAME', 'URI'))\n print(str_sep)\n\n for rep in repositories['repositories']:\n print(\"{:30}{:60}\".format(rep['repositoryName'], rep['repositoryUri']))", "def describe_availability_zones(\n self,\n request: dds_20151201_models.DescribeAvailabilityZonesRequest,\n ) -> dds_20151201_models.DescribeAvailabilityZonesResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_availability_zones_with_options(request, runtime)", "def vpc_region(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"vpc_region\")", "def _getBrailleRegionsForImage(self, obj):\n\n self._debugGenerator(\"_getBrailleRegionsForImage\", obj)\n\n return self._getDefaultBrailleRegions(obj, pyatspi.ROLE_IMAGE)", "def list_resource_pool(client, private_cloud, location):\n return client.list(location, private_cloud)", "def get_region(rid):\n region = Region.query.get_or_404(rid)\n return jsonify(region.to_long_json())", "def _getBrailleRegionsForHtmlContainer(self, obj):\n\n self._debugGenerator(\"_getBrailleRegionsForHtmlContainer\", obj)\n\n return self._getDefaultBrailleRegions(obj)", "def operating_regions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IpamOperatingRegionArgs']]]]:\n return pulumi.get(self, \"operating_regions\")", "def geneExonicRegions(self, df):\n scaffold = df.iloc[0].scaffold\n strand = df.iloc[0].strand\n gene_type = df.iloc[0].gene_type\n gene_id = df.iloc[0].gene_id\n gene_name = df.iloc[0].gene_name\n start = df.start.min()\n end = df.end.max()\n bp = [False] * (end - start + 1)\n for i in range(df.shape[0]):\n s = df.iloc[i]['start'] - start\n e = df.iloc[i]['end'] - start + 1\n bp[s:e] = [True] * (e - s)\n regions = list(range(start, end + 1))\n groups = []\n\n for i, j in groupby(bp):\n groups.append((i, len(list(j))))\n e_start = 0\n\n for i in groups:\n e_end = e_start + i[1]\n if i[0]:\n record = Record(scaffold=scaffold, start=regions[e_start],\n end=regions[e_end - 1], gene_type=gene_type, gene_id=gene_id,\n gene_name=gene_name, strand=strand)\n yield record\n e_start += i[1]", "def rendered_regions(self, obj):\n return obj.render_json(self.context.get('request'))", "def ex_list_availability_zones(self, only_available=True):\n params = {'Action': 'DescribeAvailabilityZones'}\n\n if only_available:\n params.update({'Filter.0.Name': 'state'})\n params.update({'Filter.0.Value.0': 'available'})\n\n params.update({'Filter.1.Name': 'region-name'})\n params.update({'Filter.1.Value.0': self.region_name})\n\n result = self.connection.request(self.path,\n params=params.copy()).object\n\n availability_zones = []\n for element in self._findall(result, 'availabilityZoneInfo/item'):\n name = self._findtext(element, 'zoneName')\n zone_state = self._findtext(element, 'zoneState')\n region_name = self._findtext(element, 'regionName')\n\n availability_zone = ExEC2AvailabilityZone(\n name=name,\n zone_state=zone_state,\n region_name=region_name\n )\n availability_zones.append(availability_zone)\n\n return availability_zones", "def regions_json(self, filename):\n with open(filename) as f:\n return json.load(f)", "def DescribeAccessRegionsByDestRegion(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DescribeAccessRegionsByDestRegion\", params, headers=headers)\n response = json.loads(body)\n model = models.DescribeAccessRegionsByDestRegionResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def getregion(self, *args, **kwargs):\n return _image.image_getregion(self, *args, **kwargs)", "def list_instances(self):\n\n response = self.client.service.instances().aggregatedList(\n project=self.client.project_id).execute()\n\n zones = response.get('items', {})\n instances = []\n for zone in zones.values():\n for instance in zone.get('instances', []):\n instances.append(instance)\n\n return instances", "def list_rds(region, filter_by_kwargs):\n conn = boto.rds.connect_to_region(region)\n instances = conn.get_all_dbinstances()\n return lookup(instances, filter_by=filter_by_kwargs)", "def _regions(self, voronoi_diagram, unique_id, ids, crs):\n # generate DataFrame of results\n regions = pd.DataFrame()\n regions[unique_id] = ids # add unique id\n regions[\"region\"] = voronoi_diagram.point_region # add region id for each point\n\n # add vertices of each polygon\n vertices = []\n for region in regions.region:\n vertices.append(voronoi_diagram.regions[region])\n regions[\"vertices\"] = vertices\n\n # convert vertices to Polygons\n polygons = []\n for region in tqdm(regions.vertices, desc=\"Vertices to Polygons\"):\n if -1 not in region:\n polygons.append(Polygon(voronoi_diagram.vertices[region]))\n else:\n polygons.append(None)\n # save polygons as geometry column\n regions[\"geometry\"] = polygons\n\n # generate GeoDataFrame\n regions_gdf = gpd.GeoDataFrame(regions.dropna(), geometry=\"geometry\")\n regions_gdf = regions_gdf.loc[\n regions_gdf[\"geometry\"].length < 1000000\n ] # delete errors\n regions_gdf = regions_gdf.loc[\n regions_gdf[unique_id] != -1\n ] # delete hull-based cells\n regions_gdf.crs = crs\n return regions_gdf", "def get_all_zones():\n cf = CloudFlare.CloudFlare(raw=True)\n page_number = 0\n total_pages = 1\n all_zones = []\n while page_number < total_pages:\n page_number += 1\n raw_results = cf.zones.get(params={'per_page':100, 'page':page_number})\n zones = raw_results['result']\n all_zones += zones\n total_pages = raw_results['result_info']['total_pages']\n return all_zones", "def endpoint_group_region(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"endpoint_group_region\")", "def ReadRegions(self, fname=\"temp\"):\n self.fname = fname\n command = \"\"\"IPython.notebook.kernel.execute('file = open(\"temp\", \"w\"); [file.write(x[\"wcsstr\"]) for x in '+ JSON.stringify(JS9.GetShapes(\"regions\", {{display: '{wid}JS9'}})) +']; file.close()');\"\"\".format(wid=self.wid)\n get_ipython().run_cell_magic('javascript', '', command)", "def ls(region_name=DEFAULT_REGION):\n s3conn = s3.connect_to_region(region_name)\n buckets = s3conn.get_all_buckets()\n for bucket in buckets:\n print(bucket.name)", "def _choose_regions(self, display_regions=False):\n dstl = Load_DSTL()\n if self.class_type == 1:\n # Select regions where there are buildings (with red roofs)\n test_image, test_mask = dstl.extract_region_pos(2300, 3000, cutout_size=[400, 400], object_class=self.class_type)\n train_image, train_mask = dstl.extract_region_pos(1900, 3100, cutout_size=[400, 400], object_class=self.class_type)\n cv_image, cv_mask = dstl.extract_region_pos(950, 1450, cutout_size=[200, 200], object_class=self.class_type)\n elif self.class_type == 5:\n train_image, train_mask = dstl.extract_region_pos(1150, 2150, cutout_size=[400, 400], object_class=self.class_type)\n test_image, test_mask = dstl.extract_region_pos(2300, 3000, cutout_size=[400, 400], object_class=self.class_type)\n cv_image, cv_mask = dstl.extract_region_pos(1900, 1950, cutout_size=[400, 400], object_class=self.class_type)\n else:\n pass\n self.images = {'train': train_image, 'cv': cv_image, 'test': test_image}\n self.masks = {'train': train_mask, 'cv': cv_mask, 'test': test_mask}\n if display_regions:\n for key in self.images.keys():\n display_three_band(self.images[key], self.masks[key], colors='green', title='{:} region'.format(key))", "def get_regions_in_partition(self, prefix=None, delimiter='/'):\n if prefix is None:\n prefix = self.s3_path\n else:\n prefix = self._strip_slashes(prefix)\n\n query_params = {\n 'Bucket': self.s3_bucket,\n 'Prefix': prefix + '/',\n 'Delimiter': delimiter\n }\n\n # We currently should be able to get all regions in a single request\n # TODO: Fail if we get a next token - there's more to this prefix than meets the eye\n region_list = []\n response = self.s3_client.list_objects_v2(**query_params)\n for c_prefix in response.get('CommonPrefixes', []):\n region = self._extract_region_from_prefix(c_prefix)\n if region:\n region_list.append(region)\n\n return region_list", "def main(self, _):\n all_addresses = find_addresses.probe_regions()\n\n print(\"\")\n if not all_addresses:\n print(\"No namespace elastic IP addresses found.\")\n\n for region in consts.REGIONS:\n region_addresses = [address for address in all_addresses\n if address['region'] == region]\n if not region_addresses:\n continue\n\n print(f\"{region}: {len(region_addresses)} address(es) found:\")\n for address in region_addresses:\n if 'instance_name' in address:\n print(f\" {address['ip']} ({address['instance_name']})\")\n elif 'association_id' in address:\n print(f\" {address['ip']} (unknown association)\")\n else:\n print(f\" {address['ip']} (not associated)\")", "def get_regionlist(chosenmodel):\n regionlist = list(chosenmodel.regions.keys())\n [ regionlist.remove(key) for key in regionlist\n if type(chosenmodel.regions[key]) is dict ]\n return regionlist", "def get_regions_from_genes(gene_list, gtf_pr):\n\n regions = []\n\n bad_genes = []\n for gene in gene_list:\n\n gene_pr = gtf_pr[gtf_pr.gene_name == gene]\n\n if gene_pr.empty:\n bad_genes.append(gene)\n continue\n\n chrom = gene_pr.df.Chromosome.to_list()[0]\n start = gene_pr.df.Start.min() - 100\n end = gene_pr.df.End.max() + 100\n\n regions.append(\"{}:{}-{}\".format(chrom, start, end))\n\n if bad_genes:\n print(\n \"\\n!!ERROR!! At least one gene from the list was not found in the gtf file. Please make sure the gene symbol provided is correct and in the gtf file. If the symbol is a correct symbol, check for alternative gene symbols in the gtf file.\"\n )\n print(\"Bad Gene(s):\\n\\t- {}\\n\".format(\"\\n\\t- \".join(bad_genes)))\n sys.exit(1)\n\n return regions", "def to_representation(self, instance):\n\n return instance.region", "def list(region, profile):\n ini_data = {}\n environment = {}\n\n if region:\n environment['region'] = region\n else:\n environment['region'] = find_myself()\n\n if profile:\n environment['profile'] = profile\n\n ini_data['environment'] = environment\n if start_list(ini_data):\n sys.exit(0)\n else:\n sys.exit(1)", "def children(self) -> List[Region]:\n return []" ]
[ "0.7262118", "0.7250822", "0.7223589", "0.70556355", "0.68627703", "0.65822685", "0.6565507", "0.6476824", "0.6466699", "0.6466699", "0.644316", "0.6336067", "0.6334379", "0.6329589", "0.63073355", "0.6281749", "0.62563527", "0.6244125", "0.62346613", "0.6221511", "0.6212274", "0.6182304", "0.6176221", "0.6171696", "0.61682296", "0.61677974", "0.6072196", "0.5903326", "0.590019", "0.58367443", "0.583467", "0.5831028", "0.5818329", "0.57916427", "0.57903224", "0.57340103", "0.5726184", "0.57256097", "0.57202905", "0.57149196", "0.5700994", "0.56969154", "0.5633915", "0.56143415", "0.5605217", "0.55921537", "0.5567348", "0.556731", "0.5563761", "0.55278975", "0.54959756", "0.548831", "0.54709584", "0.54625", "0.5458015", "0.5441608", "0.5439951", "0.5410245", "0.54034436", "0.5399922", "0.5369902", "0.5359562", "0.53441906", "0.53391737", "0.53267306", "0.5281628", "0.5279236", "0.5268129", "0.5264979", "0.5243102", "0.5239248", "0.52338386", "0.52274615", "0.52097386", "0.5195014", "0.51903695", "0.5188749", "0.51854396", "0.5168568", "0.51680255", "0.51665473", "0.5161573", "0.5156582", "0.5149726", "0.5128032", "0.5127744", "0.5127578", "0.5125774", "0.5119723", "0.5119039", "0.5110655", "0.5110089", "0.5095359", "0.50944954", "0.5089708", "0.50833434", "0.5078201", "0.50618637", "0.5061173", "0.5059552" ]
0.71180004
3
Lists the SMTP credentials for the specified user. The returned object contains the credential's OCID, the SMTP user name but not the SMTP password. The SMTP password is returned only upon creation.
def list_smtp_credentials(self, user_id, **kwargs): resource_path = "/users/{userId}/smtpCredentials" method = "GET" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_smtp_credentials got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[SmtpCredentialSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="list[SmtpCredentialSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_credentials(user):\n return Credentials.list_credentials(user)", "def get_user_credentials(connection):\n\n response = connection.get_json('user')\n user_data = response.get('user', None)\n if user_data is None:\n raise SAPCliError('gCTS response does not contain \\'user\\'')\n\n config_data = user_data.get('config', None)\n if config_data is None:\n return []\n\n user_credentials = [cred for cred in config_data if cred['key'] == 'USER_AUTH_CRED_ENDPOINTS']\n return json.loads(user_credentials[0]['value'])", "def list_credentials(self, **_params):\r\n return self.get(self.credentials_path, params=_params)", "def get_user_cred(self):\n if Config.eap_outer == 'PEAP' or Config.eap_outer == 'TTLS':\n self.__get_credentials_from_config()", "def create_smtp_credential(self, create_smtp_credential_details, user_id, **kwargs):\n resource_path = \"/users/{userId}/smtpCredentials\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_smtp_credential got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=create_smtp_credential_details,\n response_type=\"SmtpCredential\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=create_smtp_credential_details,\n response_type=\"SmtpCredential\")", "def GetCredentials(self):\n return self._session.get(_CREDENTIAL_KEY, credentials.MapdCredentials())", "def display_credentials(cls):\n return cls.credential_list", "def display_credentials(cls):\n return cls.credential_list", "def display_credentials(cls):\n return cls.credential_list", "def email_user(user, template_path, from_address, context_dict):\n return email_list([user.email], template_path, from_address, context_dict)", "def get_all_credentials():\n session = db.get_session()\n return (session.query(network_models_v2.Credential).all())", "def list_o_auth_client_credentials(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/oauth2ClientCredentials\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"lifecycle_state\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_o_auth_client_credentials got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n if 'lifecycle_state' in kwargs:\n lifecycle_state_allowed_values = [\"CREATING\", \"ACTIVE\", \"INACTIVE\", \"DELETING\", \"DELETED\"]\n if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:\n raise ValueError(\n \"Invalid value for `lifecycle_state`, must be one of {0}\".format(lifecycle_state_allowed_values)\n )\n\n query_params = {\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"lifecycleState\": kwargs.get(\"lifecycle_state\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[OAuth2ClientCredentialSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[OAuth2ClientCredentialSummary]\")", "def display_credentials(cls):\n return cls.credentials_list", "def display_credentials(cls):\n return cls.credentials_list", "def user_credentials(self):\r\n credentials = {}\r\n if EMAIL_AUTHENTICATION:\r\n credentials[\"email\"] = self.cleaned_data[\"email\"]\r\n else:\r\n credentials[\"username\"] = self.cleaned_data[\"username\"]\r\n credentials[\"password\"] = self.cleaned_data[\"password1\"]\r\n return credentials", "def user_credentials(self):\r\n credentials = {}\r\n if EMAIL_AUTHENTICATION:\r\n credentials[\"email\"] = self.cleaned_data[\"email\"]\r\n else:\r\n credentials[\"username\"] = self.cleaned_data[\"username\"]\r\n credentials[\"password\"] = self.cleaned_data[\"password\"]\r\n return credentials", "def credentials(self) -> Sequence['outputs.DeviceCredentialResponse']:\n return pulumi.get(self, \"credentials\")", "def get_credentials(self):\n return PlainCredentials(self.user_name, self.password)", "def getCredentials(self):\n if self.result(): # Accepted?\n username = self.username_le.text()\n password = \"\"\n if self.askpassword:\n password = self.password_le.text()\n\n return username, password\n\n raise CredentialDialogReject()", "def credentials(self) -> pulumi.Output[Optional['outputs.CredentialsResponse']]:\n return pulumi.get(self, \"credentials\")", "def GetUserCredentials():\n email = options.email\n if email is None:\n email = GetEmail(\"Email (login for uploading to %s)\" % options.server)\n password = getpass.getpass(\"Password for %s: \" % email)\n return (email, password)", "def credentials(self):\n\n return self._credentials", "def credentials(self):\n return self._credentials", "def get_cred(site_id, user_id):\n log = current_app.log\n db = request.db\n Cred = db.tables.Cred\n cred = Cred.query.filter_by(cred_owner=user_id,\n site_id=site_id).first_or_404()\n log.info(\"Fetched cred for user %u at site %u.\", user_id, site_id)\n return jsonify(cred.cred_value)", "def display_credential(cls):\n return cls.credential_list", "def GetUserCredentials(self):\r\n # Create a local alias to the email variable to avoid Python's crazy\r\n # scoping rules.\r\n global keyring\r\n email = self.email\r\n if email is None:\r\n email = GetEmail(\"Email (login for uploading to %s)\" % self.server)\r\n password = None\r\n if keyring and not email in self.accounts_seen:\r\n try:\r\n password = keyring.get_password(self.host, email)\r\n except:\r\n # Sadly, we have to trap all errors here as\r\n # gnomekeyring.IOError inherits from object. :/\r\n print \"Failed to get password from keyring\"\r\n keyring = None\r\n if password is not None:\r\n print \"Using password from system keyring.\"\r\n self.accounts_seen.add(email)\r\n else:\r\n password = getpass.getpass(\"Password for %s: \" % email)\r\n if keyring:\r\n answer = raw_input(\"Store password in system keyring?(y/N) \").strip()\r\n if answer == \"y\":\r\n keyring.set_password(self.host, email, password)\r\n self.accounts_seen.add(email)\r\n return (email, password)", "def get_credentials(self):\n return self.credentials", "def get_credentials(service_name=\"dataforSeo\", uname=\"matteo.jriva@gmail.com\"):\n pw = keyring.get_password(service_name, uname)\n return [uname, pw]", "def list_credentials():\n creds = load_auth()\n max_username_len = max([len(c.username) for c in creds]) if len(creds) > 0 else 1\n long_format = f\"{{:{max_username_len}}} for {{}}\"\n for cred in creds:\n if len(cred.hostname) > 0:\n print(str.format(long_format, cred.username, cred.hostname))\n else:\n print(cred.username)\n if len(creds) == 0 and os.isatty(1):\n print(\"No credentials configured\")", "def get_stored_credentials(user_id):\n #\n # To instantiate an OAuth2Credentials instance from a Json\n # representation, use the oauth2client.client.Credentials.new_from_json\n # class method.\n user = engine.query(User).filter(userId=user_id).first()\n if user:\n user_dict = user.__dict__\n if user_dict['credentials']:\n # credentials = Credentials.new_from_json(user['credentials'])\n credentials = json.loads(user_dict['credentials'])\n token_expiry = credentials['token_expiry']\n dexp = parser.parse(str(token_expiry))\n dexp = dexp.replace(tzinfo=None)\n dnow = datetime.now()\n\n if dexp > dnow:\n return Credentials.new_from_json(user_dict['credentials'])\n else:\n status_code, data = renew_access_token(client_id=credentials['client_id'],\n client_secret=credentials['client_secret'],\n refresh_token=credentials['refresh_token'],\n )\n if status_code == INT_OK:\n credentials['access_token'] = data['access_token']\n credentials['token_expiry'] = datetime_util(datetime.now() + timedelta(seconds=float(str(data['expires_in']))))\n credentials = Credentials.new_from_json(json_encode(credentials))\n user.update_credentials(credentials.to_json())\n user.sync()\n return credentials\n else:\n return None\n else:\n return None\n return None", "def get_credentials(self, context, filters=None, fields=None):\n return self._get_collection(context,\n network_models_v2.Credential,\n self._make_credential_dict,\n filters=filters,\n fields=fields)", "def get_contact(self, username, password):\n\t\tdn, username = self.auth(username, password)\n\t\tif self.is_blacklisted(username):\n\t\t\traise ServiceForbidden()\n\n\t\tuser = self.get_udm_user(username=username)\n\t\tif not self.send_plugins:\n\t\t\traise ServiceForbidden()\n\n\t\treturn [{\n\t\t\t\"id\": p.send_method(),\n\t\t\t\"label\": p.send_method_label(),\n\t\t\t\"value\": user[p.udm_property]\n\t\t} for p in self.send_plugins.values() if p.udm_property in user]", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'gmail-python-spam-filter.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def _obtain_service_account_creds(self) -> service_account.Credentials:\n credentials_json = self._raw_credentials.get(\"credentials_json\")\n admin_email = self._raw_credentials.get(\"email\")\n account_info = self._load_account_info(credentials_json)\n creds = service_account.Credentials.from_service_account_info(account_info, scopes=SCOPES)\n self._creds = creds.with_subject(admin_email)", "def get_credentials():\n credentials = tools.get_credentials_file()\n session_credentials = session.get_session_credentials()\n for credentials_key in credentials:\n\n # checking for not false, but truthy value here is the desired behavior\n session_value = session_credentials.get(credentials_key)\n if session_value is False or session_value:\n credentials[credentials_key] = session_value\n return credentials", "def delete_smtp_credential(self, user_id, smtp_credential_id, **kwargs):\n resource_path = \"/users/{userId}/smtpCredentials/{smtpCredentialId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_smtp_credential got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"smtpCredentialId\": smtp_credential_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def credentials(self):\n if self.user and self.is_authenticated():\n return AuthCredentials(['authenticated'] + self.user.permissions)\n else:\n return AuthCredentials()", "def view_list_users(self, user):\r\n return user.realm._users.keys()", "def get_credentials(self):\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir, self.CRED_FILENAME)\r\n \r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_credentials(self):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'homework_logger-gmail-api.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\n flow.user_agent = self.APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def account_credential_details(self) -> Sequence['outputs.AccountCredentialDetailsResponse']:\n return pulumi.get(self, \"account_credential_details\")", "def account_credential_details(self) -> Sequence['outputs.AccountCredentialDetailsResponse']:\n return pulumi.get(self, \"account_credential_details\")", "def _GetCredentialsIter(self, credentials_file=None):\n if not credentials_file:\n credentials_file = os.path.join(os.path.dirname(__file__),\n 'credentials.txt')\n if os.path.exists(credentials_file):\n with open(credentials_file) as f:\n for credentials in f:\n username, password = credentials.strip().split(':')\n yield username, password", "def get_credentials(self, oid=None):\n path = '/credentials'\n key = 'credentials'\n if oid is not None:\n path = '%s/%s' % (path, oid)\n key = 'credential'\n res = self.client.call(path, 'GET', data='', token=self.token)\n self.logger.debug('Get openstack credentials: %s' % truncate(res))\n try:\n return res[0][key]\n except:\n raise OpenstackError('No credentials found')", "def listUsers(self, user_uuid):\n user_list = []\n if not user_uuid:\n log(logging.DEBUG, \"Looking for all connected users\")\n for user in self.users.keys():\n print(\"\\n\\n USER IS \" + str(self.users[user]))\n user_info = {\n 'user_uuid' : user,\n 'certificate' : self.users[user]['description']['data']['certificate'],\n 'personal_public_key' : self.users[user]['description']['data']['personal_public_key']\n }\n user_list.append(user_info)\n else:\n log(logging.DEBUG, \"Looking for \\\"%s\\\"\" % user_uuid)\n user_list.append(self.getUser(user_uuid))\n return user_list", "def GetAccountNameAndPassword(credential,\n credentials_path=DEFAULT_CREDENTIAL_PATH):\n if (credentials_path == DEFAULT_CREDENTIAL_PATH and not\n os.path.exists(DEFAULT_CREDENTIAL_PATH)):\n cloud_storage.GetIfChanged(\n DEFAULT_CREDENTIAL_PATH, DEFAULT_CREDENTIAL_BUCKET)\n\n with open(credentials_path, 'r') as f:\n credentials = json.load(f)\n c = credentials.get(credential)\n return c['username'], c['password']", "def get_credentials():\n return ServiceAccountCredentials.from_json_keyfile_dict(SERVICE_ACCOUNT, scopes = SCOPES)", "def get_credentials(servise: str) -> google.oauth2.credentials.Credentials:\n\n # SQL query to get the credentials for the current user from servise credentials table\n query = f\"\"\"\n SELECT token, token_uri, client_id, refresh_token, client_secret, scopes\n FROM {servise}_credentials\n WHERE user_id=?;\n \"\"\"\n\n # Get the credentials\n with connect(DATABASE) as db:\n credentials = db.execute(query, (session[\"user_id\"],)).fetchone()\n\n # Return None if it doesn't exist it the database\n if not credentials: return None\n\n # Transfer the credentials to a dictionary\n credentials_dict = {\n \"token\": credentials[0],\n \"token_uri\": credentials[1],\n \"client_id\": credentials[2],\n \"refresh_token\": credentials[3],\n \"client_secret\": credentials[4],\n \"scopes\": None if credentials[5] is None else credentials[5].split(\" \")\n }\n\n # Return a google Credentials object\n return google.oauth2.credentials.Credentials(**credentials_dict)", "def extract_credentials(path):\n if not os.path.isfile(path):\n raise IOError(None, \"Credential file was not found at %s\" % path)\n\n if os.name == 'posix':\n mode = os.stat(path)[stat.ST_MODE]\n\n if stat.S_IRWXG & mode or stat.S_IRWXO & mode:\n raise IOError(None, \"Credential file cannot be accessible by group or other. Please chmod 600 the credential file.\")\n\n access_key, secret_key = '', ''\n with file(path, 'r') as f:\n for line in (line.strip() for line in f):\n if line.startswith(\"AWSAccessKeyId=\"):\n access_key = line.partition('=')[2]\n elif line.startswith(\"AWSSecretKey=\"):\n secret_key = line.partition('=')[2]\n\n if not access_key or not secret_key:\n raise IOError(None, \"Credential file must contain the keys 'AWSAccessKeyId' and 'AWSSecretKey'\")\n\n return Credentials(access_key, secret_key)", "def get_creds(self):\n return self.creds", "def credentials(self):\n if self._credentials is None:\n all_credentials = self.registry_client.registries.list_credentials(\n self.resources.group.name,\n self.registry.name,\n )\n first_password = next(iter(all_credentials.passwords)).value\n self._credentials = LoginCredentials(\n all_credentials.username,\n first_password,\n )\n return self._credentials", "def get_credentials(self):\n home_dir = os.path.expanduser(\"~\")\n credential_dir = os.path.join(home_dir, \".credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, \"autoto.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, self.auth_flags)\n print(\"Storing credentials to \" + credential_path)\n return credentials", "def get_credentials(path='~/.pgpass', db=DB):\n\n # Load credentials from path\n with open(os.path.expanduser(path), 'r') as file:\n host, port, _, user, password = file.read().strip().split(':')\n \n return host, port, user, password, db", "def credentials(self):\n return CurrentProject().config.credentials[self.key]", "def list_for_user(cls, user, cursor_url=None, limit=10):\n cursor = Cursor(urlsafe=cursor_url)\n messages, next_cursor, more = cls.list_query(user).fetch_page(limit, start_cursor=cursor)\n return (messages, next_cursor, more)", "def get_credentials(account):\n credential_dir = os.path.join(HOME_DIR, META_DIR, account, \"credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'pyDrive.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'credentialv_modify.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n store = Storage(CREDENTIAL_PATH)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, None)\n return credentials", "def credentials(self) -> Optional[pulumi.Input['CredentialsArgs']]:\n return pulumi.get(self, \"credentials\")", "def credential_list():\n rows = safeisland.list_certificates()\n certs = []\n for row in rows:\n# certs.append(row[\"cert\"])\n certs.append({\"uuid\": row[\"uuid\"], \"cert\": row[\"cert\"]})\n\n return {\"payload\": certs}", "def get_creds(\n user=\"default\", config=None, scope=default_scope, creds_dir=None, save=True\n):\n config = config or get_config()\n try:\n if \"private_key_id\" in config:\n return SACredentials.from_service_account_info(config, scopes=scope)\n\n if not isinstance(user, str):\n raise ConfigException(\n \"Need to provide a user key as a string if not using a service account\"\n )\n\n if creds_dir is None:\n creds_dir = get_config_dir() / \"creds\"\n\n creds_file = Path(creds_dir) / user\n\n if creds_file.exists():\n # need to convert Path to string for python 2.7\n return OAuthCredentials.from_authorized_user_file(str(creds_file))\n\n flow = InstalledAppFlow.from_client_config(config, scope)\n creds = flow.run_local_server(\n host=\"localhost\",\n port=8182,\n authorization_prompt_message=\"Please visit this URL: {url}\",\n success_message=\"The auth flow is complete; you may close this window.\",\n open_browser=False,\n )\n\n if save:\n creds_data = {\n \"refresh_token\": creds.refresh_token,\n \"token_uri\": creds.token_uri,\n \"client_id\": creds.client_id,\n \"client_secret\": creds.client_secret,\n \"scopes\": creds.scopes,\n }\n\n ensure_path(creds_dir)\n creds_file.write_text(decode(json.dumps(creds_data)))\n\n return creds\n except Exception:\n exc_info = sys.exc_info()\n raise ConfigException(*exc_info[1:])", "def find_credentials(cls, account):\n for credential in cls.credentials_list:\n if credential.account == account:\n return credential", "def credentials(self, rel_id=None):\n rel = self.framework.model.get_relation(self.relation_name, rel_id)\n\n relation_data = rel.data[rel.app]\n creds_json = relation_data.get(\"credentials\")\n return json.loads(creds_json) if creds_json is not None else ()", "def load_credentials(path: str = 'credentials.txt', user_index: int = 0):\n assert os.path.exists(path), f\"Specificy login credentials in {path}\"\n with open(path) as cfile:\n lines = cfile.readlines()\n assert len(lines) >= 2, f\"Must have line for username and password in {path}\"\n # return username (first line) and password (second line)\n return lines[2 * user_index].strip(), lines[(2 * user_index) + 1].strip()", "def _load_user_credentials(self, storage):\n # Set up a Flow object to be used if we need to authenticate.\n flow = client.flow_from_clientsecrets(\n self.client_secrets,\n scope=self.api_scopes,\n message=tools.message_if_missing(self.client_secrets))\n\n # Retrieve credentials from storage.\n # If the credentials don't exist or are invalid run through the installed\n # client flow. The storage object will ensure that if successful the good\n # credentials will get written back to file.\n\n credentials = storage.get()\n if credentials is None or credentials.invalid:\n credentials = tools.run_flow(flow, storage)\n\n return credentials", "def get_account_credentials(call):\n account = call.data.get(CONF_SPOTIFY_ACCOUNT)\n user = username\n pwd = password\n if account is not None:\n _LOGGER.debug('setting up with different account than default %s', account)\n user = accounts.get(account).get(CONF_USERNAME)\n pwd = accounts.get(account).get(CONF_PASSWORD)\n return user, pwd", "def list_user_access(self, user):\n return self._user_manager.list_user_access(user)", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'clockwise.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_creds():\n\tcredentials = None\n\tif os.path.exists('token.pickle'):\n\t\twith open('token.pickle', 'rb') as token:\n\t\t\tcredentials = pickle.load(token)\n\t# If there are no (valid) credentials available, let the user log in.\n\tif not credentials or not credentials.valid:\n\t\tif credentials and credentials.expired and credentials.refresh_token:\n\t\t\tcredentials.refresh(Request())\n\t\telse:\n\t\t\tflow = InstalledAppFlow.from_client_secrets_file('config/sa.json', SCOPES)\n\t\t\tcredentials = flow.run_local_server(port=0)\n\t\t# Save the credentials for the next run\n\t\twith open('token.pickle', 'wb') as token:\n\t\t\tpickle.dump(credentials, token)\n\treturn credentials", "def creds(self):\n return CredsTable(self.rpc, self.name)", "def list_customer_secret_keys(self, user_id, **kwargs):\n resource_path = \"/users/{userId}/customerSecretKeys\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_customer_secret_keys got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[CustomerSecretKeySummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"list[CustomerSecretKeySummary]\")", "def get_users_recipients(session):\n\n all_users = None\n\n email_recipients = []\n\n all_users = session.query(UsersAuthModel).all()\n\n for user_rs in all_users:\n username = user_rs.user_name\n\n if bool(is_active) and bool(is_superuser):\n email_recipients.append(username)\n\n return email_recipients", "def get_credentials():\n home_dir = './ignore' #os.path.expanduser('./')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = './ignore' #os.path.expanduser('./')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'admin-directory_v1-NestedGroupSync.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print 'Storing credentials to' + credential_path\n return credentials", "def login(self, user, password):\n \n def encode_base64(s, eol=None):\n return \"\".join(base64.encodestring(s).split(\"\\n\"))\n \n def encode_cram_md5(challenge, user, password):\n challenge = base64.decodestring(challenge)\n response = user + \" \" + hmac.HMAC(password, challenge).hexdigest()\n return base64_encode(response, eol=\"\")\n\n def encode_plain(user, password):\n return base64_encode(\"%s\\0%s\\0%s\" % (user, user, password), eol=\"\")\n\n\n AUTH_PLAIN = \"PLAIN\"\n AUTH_CRAM_MD5 = \"CRAM-MD5\"\n AUTH_LOGIN = \"LOGIN\"\n\n if self.helo_resp is None and self.ehlo_resp is None:\n if not (200 <= self.ehlo()[0] <= 299):\n (code, resp) = self.helo()\n if not (200 <= code <= 299):\n raise SMTPHeloError(code, resp)\n\n if not self.has_extn(\"auth\"):\n raise SMTPException(\"SMTP AUTH extension not supported by server.\")\n\n # Authentication methods the server supports:\n authlist = self.esmtp_features[\"auth\"]\n if authlist.startswith('='):\n authlist = authlist[1:]\n authlist = authlist.split()\n # List of authentication methods we support: from preferred to\n # less preferred methods. Except for the purpose of testing the weaker\n # ones, we prefer stronger methods like CRAM-MD5:\n \n preferred_auths = [AUTH_CRAM_MD5, AUTH_PLAIN, AUTH_LOGIN]\n if hmac is None:\n preferred_auths.remove(AUTH_CRAM_MD5)\n \n # Determine the authentication method we'll use\n authmethod = None\n for method in preferred_auths:\n if method in authlist:\n authmethod = method\n break\n\n if authmethod == AUTH_CRAM_MD5:\n (code, resp) = self.docmd(\"AUTH\", AUTH_CRAM_MD5)\n if code == 503:\n # 503 == 'Error: already authenticated'\n return (code, resp)\n (code, resp) = self.docmd(encode_cram_md5(resp, user, password))\n elif authmethod == AUTH_PLAIN:\n (code, resp) = self.docmd(\"AUTH\",\n AUTH_PLAIN + \" \" + encode_plain(user, password))\n elif authmethod == AUTH_LOGIN:\n (code, resp) = self.docmd(\"AUTH\",\n \"%s %s\" % (AUTH_LOGIN, encode_base64(user, eol=\"\")))\n if code != 334:\n raise SMTPException(\"Authorization failed.\")\n (code, resp) = self.docmd(encode_base64(password, eol=\"\"))\n elif authmethod == None:\n raise SMTPException(\"No suitable authentication method found.\")\n if code not in [235, 503]:\n # 235 == 'Authentication successful'\n # 503 == 'Error: already authenticated'\n raise SMTPException(\"Authorization failed.\")\n return (code, resp)", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n credential_dir = os.path.realpath('.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path) # stores the users credentials --> TODO: put in database\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n\n credentials = tools.run_flow(flow, store, flags)\n\n print('Storing credentials to ' + credential_path)\n return credentials", "def auth_credentials(self) -> Optional[Sequence['outputs.AuthCredentialResponse']]:\n return pulumi.get(self, \"auth_credentials\")", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n #home_dir = os.path.expanduser('~')\n home_dir = os.path.expanduser('/home/pi/')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_all_user_info():\n collection = get_collection(\"user\")\n user_infos = list(collection.find())\n for user_info in user_infos:\n user_info.pop('_id')\n user_info.pop('password')\n return user_infos", "def get_stored_credentials(user):\n with open(\"access.json\", \"r\") as f:\n credentials = json.load(f)\n user_creds = credentials[user]\n return user_creds[\"access_token\"], user_creds[\"access_secret\"]", "def get_creds():\n with open(CREDS_PATH, 'r') as creds_file:\n creds = json.load(creds_file)\n return creds['uname'], creds['pword']", "def get_credentials_from_client(self, key, url, connection_id) -> Credentials:\n enc = _Encrypter(key)\n base64_private_key, nonce, verifier = enc.get_verifier()\n encrypted_url = enc.encrypt(url, base64.b64decode(nonce))\n encrypted_credentials, nonce = _HttpClient.get_logins(connection_id, nonce, verifier, encrypted_url)\n iv = base64.b64decode(nonce)\n\n return {\n credential['user']: credential\n for credential in [\n {\n 'user': enc.decrypt(encrypted_credential['Login'], iv),\n 'password': enc.decrypt(encrypted_credential['Password'], iv)\n }\n for encrypted_credential in encrypted_credentials\n ]\n }", "def get_messages(user, password, server=\"pop.gmail.com\"):\n\n # define our connection\n pop_conn = poplib.POP3_SSL(server)\n pop_conn.user(user)\n pop_conn.pass_(password)\n\n # Get message tuples from server:\n tuples = [pop_conn.retr(i) for i in range(1, len(pop_conn.list()[1]) + 1)]\n pop_conn.quit()\n\n # returns the message objects in a list, discarding the other fields\n return [msg[1] for msg in tuples]", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(args.clientSecretFile, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(self, **kwargs):\n creds_file = os.path.join(kwargs['user_dir'], 'credentials.json')\n\n # Getting credentials from Storage\n store = file.Storage(creds_file)\n creds = store.get()\n\n # Validating or refreshing credentials, if necessary\n if creds is None or creds.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n creds = tools.run_flow(flow, store)\n elif creds.access_token_expired:\n creds.refresh(httplib2.Http())\n else:\n pass\n\n return creds", "def display_credentials(cls, credentials_name):\n for credentials in Credentials.credentials_list:\n if credentials.credentials_name == credentials_name:\n Credentials.credentials_list.append(credentials)\n return Credentials.credentials_list", "def get_credentials():\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-showtime.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n print('Storing credentials to ' + credential_path)\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def locked(user):\n\n cmd = \"lsuser -a account_locked {}\".format(user)\n cmd += ' | grep \"account_locked=true\"'\n out = __salt__[\"cmd.run_all\"](cmd, output_loglevel=\"trace\", python_shell=True)\n\n ret = []\n\n lines = out[\"stdout\"].splitlines()\n for line in lines:\n ret.append(line.split()[0])\n\n return ret", "def find_credential(account):\n return Credentials.find_by_username(account)", "def send_email( user, password ):\n \n mail = Mailer( host = EMAIL['host'], \n port = EMAIL['port'],\n use_tls = EMAIL['use_tls'], \n usr = EMAIL['user'], \n pwd = EMAIL['password']\n )\n \n message = Message( From = 'help@rxmedaccess.com',\n To = [user.email],\n Subject = \"Password Reset\"\n )\n \n body = \"\"\"Your new password for {} is {}\n You can reset it to what you like on your settings page once you log in with\n this password\n \"\"\".format(__name__, password )\n\n message.Body = body\n try:\n mail.send(message)\n except Exception as e:\n log.error( 'Send mail error: {}'.format( str(e) ) )", "def getCredentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def credential(self):\n return self._tower.get_credential_by_id(self._data.get('credential'))", "def show_user_contacts(user_id):\n\n user_contacts = Contact.query.filter_by(user_id=user_id).all()\n\n contacts = []\n for contact in user_contacts:\n contacts.append( { 'contact_id': contact.contact_id,\n 'first_name': contact.first_name,\n 'last_name': contact.last_name,\n 'email': contact.email } )\n\n return jsonify(contacts)" ]
[ "0.7158633", "0.63490164", "0.5840114", "0.5356084", "0.5350056", "0.5333744", "0.53025347", "0.53025347", "0.53025347", "0.5278439", "0.5255396", "0.52334976", "0.5210725", "0.5210725", "0.5193972", "0.51926774", "0.5166273", "0.51630765", "0.51324713", "0.51061445", "0.51052934", "0.51003575", "0.5089874", "0.5070253", "0.5051216", "0.5050097", "0.502", "0.49585772", "0.49421754", "0.49201402", "0.49187106", "0.49056125", "0.49012733", "0.48536846", "0.48484036", "0.48472202", "0.48278916", "0.48028702", "0.4786423", "0.47734535", "0.47451517", "0.47451517", "0.47390354", "0.4738704", "0.4730385", "0.4726388", "0.47254288", "0.4712961", "0.47123942", "0.4687656", "0.4648423", "0.46379817", "0.4627256", "0.46270055", "0.4624233", "0.4601323", "0.45967546", "0.45733467", "0.45694274", "0.45643365", "0.45640782", "0.45599592", "0.45436174", "0.45431763", "0.4536209", "0.45186862", "0.45106077", "0.45055982", "0.45032007", "0.44951144", "0.44871175", "0.4480894", "0.4472195", "0.4472195", "0.44690004", "0.44664273", "0.44639724", "0.44639724", "0.44639724", "0.44639724", "0.4460439", "0.445727", "0.4447228", "0.444546", "0.4445354", "0.4441287", "0.44343162", "0.44256818", "0.4425305", "0.44222653", "0.44215778", "0.44103667", "0.44083333", "0.44038877", "0.43997288", "0.43833843", "0.43800807", "0.4378559", "0.43783247", "0.43693087" ]
0.7803695
0
Lists the tag defaults for tag definitions in the specified compartment.
def list_tag_defaults(self, **kwargs): resource_path = "/tagDefaults" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "id", "compartment_id", "tag_definition_id", "lifecycle_state" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_tag_defaults got unknown kwargs: {!r}".format(extra_kwargs)) if 'lifecycle_state' in kwargs: lifecycle_state_allowed_values = ["ACTIVE"] if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values: raise ValueError( "Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values) ) query_params = { "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "id": kwargs.get("id", missing), "compartmentId": kwargs.get("compartment_id", missing), "tagDefinitionId": kwargs.get("tag_definition_id", missing), "lifecycleState": kwargs.get("lifecycle_state", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[TagDefaultSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[TagDefaultSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initDefaults(self):\n return _libsbml.CompartmentGlyph_initDefaults(self)", "def initDefaults(self):\n return _libsbml.Compartment_initDefaults(self)", "def defaults(file):\n\n\tUNCAT_TAGID = 47\n\tNOSERIES_TAGID = 375\n\n\treturn [NOSERIES_TAGID, UNCAT_TAGID]", "def get_default_vpas(self, composition_space):\n\n default_vpas = {}\n for element in composition_space.get_all_elements():\n default_vpas[element.symbol] = self.all_default_vpas[\n element.symbol]\n return default_vpas", "def init_defaults(self, defaults):\r\n for (sect, opt, default) in defaults:\r\n self._default(sect, opt, default)", "def defaults(self):\n return self.conf.get(\"defaults\", [])", "def _add_default_tags(self):\n self.tags.add_tag('ban', required=True)", "def defaults():\n global __preset_staging\n \n t = TreeDict('Default_Parameter_Tree', __defaultpresettree__ = True)\n __preset_staging[id(t)] = t\n return t", "def default_kernel_config(defn):\n # XXX(stephentu): should the default config also include cluster_hp?\n return list(it.chain(\n default_assign_kernel_config(defn),\n default_feature_hp_kernel_config(defn)))", "def defaults():\n return {}", "def defaults() -> dict:\n pass", "def show_defaults(context: CreateCommandsContext):\n logger.info(\"Default parameters when creating jobs:\")\n for parameter in context.settings.job_default_parameters:\n logger.info(parameter.describe())", "def get_defaults(self):\n\t\treturn self.__defaults", "def default_configs(cls):\n config = super().default_configs()\n config.update(\n {\n \"entry_type\": None,\n \"attribute\": None,\n \"index_annotation\": None,\n }\n )\n return config", "def get_persisted_default_config_fields(self):\n return []", "def defaults(self):\n return self._config_parser.defaults()", "def _get_default_config_list(parm_base=None):\n default_config_list = []\n if parm_base is None:\n parm_base = PARM_BASE\n\n conf_dir = os.path.join(parm_base,\n METPLUS_CONFIG_DIR)\n\n # if both are found, set old base confs first so the new takes precedence\n for base_conf in OLD_BASE_CONFS + BASE_CONFS:\n conf_path = os.path.join(conf_dir,\n base_conf)\n if os.path.exists(conf_path):\n default_config_list.append(conf_path)\n\n if not default_config_list:\n print(f\"FATAL: No default config files found in {conf_dir}\")\n sys.exit(1)\n\n return default_config_list", "def default_configs():\n configs=configparser.ConfigParser()\n configs.read(default_configfile())\n \n return configs", "def help_default_values():\n click.echo_via_pager(docgen.generate_default_value_help())", "def initDefaultChoices(self):\n return [text for text in self.formatList]", "def _default() -> list:\n if metadata is None or metadata.default is None:\n return []\n\n return self._always_array(metadata.default)", "def print_defaults():\n print 'area_bounds :', default_area_bounds\n print 'area_bounds_format :', default_area_bounds_format\n print 'area_bounds_range :', default_area_bounds_range\n print 'years_bounds :', default_years_are_bounds\n print 'dates_are_bounds :', default_dates_are_bounds\n print 'init_date_str_format :', default_init_date_str_format\n print 'member_name :', default_member_name\n print 'period_name :', default_period_name\n print 'initialistion_time_name :', default_initialistion_time_name", "def default_configs(cls):\n return {\n 'redirect_path': None,\n 'nif_page_structure': None,\n 'nif_text_links': None,\n }", "def default_tags(self) -> str:\n tags = [\"persistent\"]\n if self.id_tag:\n tags.append(self.id_tag)\n tags.append(\"dismissable\" if self.is_dismissable else \"undismissable\")\n tags.append(\"safe\" if self.mark_content_safe else \"unsafe\")\n return \" \".join(tags)", "def get_config_defaults(self): # pylint: disable=R0201\n return {}", "def defaults():\n\n dummy = FieldTemplate.dummy\n\n return {\"disease_demographic_id\": dummy(\"demographic_id\"),\n }", "def replace_defaults(d):\n\n # remove the defaults section\n defaults = d.pop('.defaults')\n\n # look for default tags and replace them\n for k, v in defaults.items():\n recursive_search_replace(d, '!' + k + '!', v)", "def defaults():\n\n #dummy = FieldTemplate.dummy\n\n return None", "def get_defaults():\n\n return {\n 'base_types': _get_base_types(),\n 'template_types': _get_template_types(),\n 'refined_types': _get_refined_types(),\n 'humannames': _get_humannames(),\n 'argument_kinds': _get_argument_kinds(),\n 'variable_namespace': {},\n 'type_aliases': _get_type_aliases(),\n 'cpp_types': _get_cpp_types(),\n 'numpy_types': _get_numpy_types(),\n 'from_pytypes': _get_from_pytypes(),\n 'cython_ctypes': _get_cython_ctypes(),\n 'cython_cytypes': _get_cython_cytypes(),\n 'cython_pytypes': _get_cython_pytypes(),\n 'cython_cimports': _get_cython_cimports(),\n 'cython_cyimports': _get_cython_cyimports(),\n 'cython_pyimports': _get_cython_pyimports(),\n 'cython_functionnames': _get_cython_functionnames(),\n 'cython_classnames': _get_cython_classnames(),\n 'cython_c2py_conv': _get_cython_c2py_conv(),\n 'cython_py2c_conv_vector_ref': CYTHON_PY2C_CONV_VECTOR_REF,\n 'cython_py2c_conv': _get_cython_py2c_conv(),\n }", "def add_default_options(self):\n\n options = getattr(self.parent, \"pyautodoc_set_default_option\", [])\n for option in options:\n self.set_default_option(option)", "def find_defaults(self):\n\n defaults = self.tree.findall('default')\n default_remote = None\n default_revision = None\n\n if len(defaults) > 1 and self.fail_on_invalid:\n raise InvalidManifest(\n 'More than one default entry, must be unique'\n )\n\n try:\n default_remote = defaults[-1].get('remote')\n default_revision = defaults[-1].get('revision', 'master')\n except IndexError:\n pass # Leave defaults to None\n\n self.defaults = {\n 'remote': default_remote, 'revision': default_revision\n }", "def get_default_config() -> Dict[Text, Any]:\n return {\n # Flag to check whether to split intents\n \"intent_tokenization_flag\": False,\n # Symbol on which intent should be split\n \"intent_split_symbol\": \"_\",\n # Regular expression to detect tokens\n \"token_pattern\": None,\n # Symbol on which prefix should be split\n \"prefix_separator_symbol\": None,\n }", "def defaults(self):\n\n return None", "def defaults(self):\n\n return None", "def default_controls(self):\n\t\tcontrol_list = []\n\t\tconfig = ConfigParser.ConfigParser()\n\t\tconfig.read(\"./config.ini\")\n\t\tcontrols = config.options(\"default_controls\")\n\t\tfor c in controls:\n\t\t\ttry: control_list.append( config.get(\"default_controls\", c) )\n\t\t\texcept:\n\t\t\t\tprint \"ERROR: missing control settings. Check config.ini.\"\n\t\t\t\traise(SystemExit)\n\t\treturn control_list", "def initDefaults(self):\n return _libsbml.Parameter_initDefaults(self)", "def __init__(self, defaults=None, default_sec=\"Uncategorized\"):\n super(XFasterConfig, self).__init__(dict_type=OrderedDict)\n self.default_sec = default_sec\n self.add_section(default_sec)\n if defaults is not None:\n self.update(defaults)", "def default_configs(cls):\n config = super().default_configs()\n config.update(\n {\n \"entry_type\": \"ft.onto.base_ontology.Document\",\n \"model_name\": \"ktrapeznikov/biobert_v1.1_pubmed_squad_v2\",\n \"question\": \"Where do I live\",\n \"max_answer_len\": 15,\n \"cuda_devices\": -1,\n \"handle_impossible_answer\": False,\n }\n )\n return config", "def _getGroupDefaults(self):\n defaults = self.getDefaultGroupContainer(\n _name = \"defaults\",\n diff_command = self.general.diff_command,\n cia_rpc_server = self.general.cia_rpc_server,\n )\n try:\n self._passConfig(defaults, \"defaults\")\n except ConfigSectionNotFoundError:\n # [defaults] is optional\n pass\n else:\n self._config.remove_section('defaults')\n\n return defaults", "def toolbarDefaultItemIdentifiers_(self, toolbar):\n default = NSMutableArray.alloc().init()\n for item in self.interface.toolbar:\n default.addObject_(toolbar_identifier(item))\n return default", "def defaults():\n\n dummy = FieldTemplate.dummy\n\n return {\"disease_disease_id\": dummy(\"disease_id\"),\n \"disease_symptom_id\": dummy(\"symptom_id\"),\n \"disease_testing_device_id\": dummy(\"device_id\"),\n }", "def autodefaults (self):\r\n\r\n self.defaults_from_notes(identifying_key=EMPTYCHAR,\r\n mark=EQUAL,\r\n obj=self.default_dict['commands'],\r\n entrytext=COMMANDMACROSCRIPT)", "def propagate_defaults(config_doc):\n for group_name, group_doc in config_doc.items():\n if isinstance(group_doc, dict):\n defaults = group_doc.get('defaults', {})\n\n for item_name, item_doc in group_doc.items():\n if item_name == 'defaults':\n continue\n if isinstance(item_doc, dict):\n\n group_doc[item_name] = \\\n dict_merge_pair(copy.deepcopy(defaults), item_doc)\n\n return config_doc", "def default_configuration_list(platform, ide):\n\n # All platforms support this format.\n results = [\"Debug\", \"Internal\", \"Release\"]\n\n # Xbox and Windows support link time code generation\n # as a platform\n if ide.is_visual_studio() and platform.is_windows(\n ) or platform in (PlatformTypes.xbox360,):\n results.append(\"Release_LTCG\")\n\n # Configurations specific to the Xbox 360\n if platform is PlatformTypes.xbox360:\n results.extend([\"Profile\", \"Profile_FastCap\", \"CodeAnalysis\"])\n return results", "def initDefaults(self):\n return _libsbml.Layout_initDefaults(self)", "def defaults():\n\n return {\"disease_case_id\": FieldTemplate.dummy(\"case_id\"),\n }", "def list_tags() -> Optional[Dict[str, Target]]:\n if hasattr(_ffi_api, \"TargetTagListTags\"):\n return _ffi_api.TargetTagListTags()\n return None", "def get_default_gramplets(self):\n return ((\"Citation Filter\",),\n (\"Citation Gallery\",\n \"Citation Notes\",\n \"Citation Backlinks\"))", "def assign_defaults(self):\n\n def module_default_sort_key(module):\n sort_key = (\n 1 if module.marked_as_default else -1,\n module.version,\n module.variant,\n -self.index(module.modulepath),\n )\n return sort_key\n\n self.defaults = {}\n grouped = groupby(\n [module for path in self.path for module in path.modules], lambda x: x.name\n )\n for (_, modules) in grouped:\n for module in modules:\n module.is_default = False\n if len(modules) > 1:\n modules = sorted(modules, key=module_default_sort_key, reverse=True)\n modules[0].is_default = True\n self.defaults[modules[0].name] = modules[0]", "def initDefaultChoices(self):\n return []", "def list_default_paths():\n filename = os.path.join(os.path.expanduser('~'), '.gfail_defaults')\n if os.path.exists(filename):\n D = ConfigObj(filename)\n print('Default paths currently set:')\n for key in D:\n print('\\t%s = %s' % (key, D[key]))\n else:\n print('No default paths currently set\\n')", "def desc_with_default(self) -> List[str]:\n return self.desc[:]", "def initDefaults(self):\n return _libsbml.ReferenceGlyph_initDefaults(self)", "def describe_tags(resourceArns=None):\n pass", "def list_default_paths():\n filename = os.path.join(os.path.expanduser('~'), '.gfail_defaults')\n if os.path.exists(filename):\n D = ConfigObj(filename)\n print('Default paths currently set to:\\n')\n for key in D:\n print('\\t%s = %s' % (key, D[key]))\n else:\n print('No default paths currently set\\n')", "def _get_function_defaults(func: FunctionType) -> dict[str, Any]:\n # extracted bit from inspect.signature... ~20x faster\n pos_count = func.__code__.co_argcount\n arg_names = func.__code__.co_varnames\n\n defaults = func.__defaults__ or ()\n\n non_default_count = pos_count - len(defaults)\n positional_args = arg_names[:pos_count]\n\n output = {\n name: defaults[offset]\n for offset, name in enumerate(positional_args[non_default_count:])\n }\n if func.__kwdefaults__:\n output.update(func.__kwdefaults__)\n return output", "def set_config_all_to_defaults():\n logging.debug(\"Creating default config\")\n for section in all_defaults:\n set_config_section_to_defaults(section)\n global config_changed\n config_changed = True", "def _get_defaults(context, config_defaults):\n quotas = {}\n default_quotas = {}\n if CONF.use_default_quota_class:\n default_quotas = db_api.quota_class_get_default(context)\n\n for resource, default in six.iteritems(config_defaults):\n # get rid of the 'quota_' prefix\n resource_name = resource[6:]\n if default_quotas:\n if resource_name not in default_quotas:\n versionutils.report_deprecated_feature(LOG, _(\n \"Default quota for resource: %(res)s is set \"\n \"by the default quota flag: quota_%(res)s, \"\n \"it is now deprecated. Please use the \"\n \"default quota class for default \"\n \"quota.\") % {'res': resource_name})\n quotas[resource_name] = default_quotas.get(resource_name, default)\n\n return quotas", "def default_configs(cls):\n config: dict = super().default_configs()\n\n config.update({\n \"file_ext\": '.txt',\n \"num_sent_per_doc\": -1,\n \"doc_break_str\": None,\n \"column_format\": cls._DEFAULT_FORMAT,\n \"entity_mention_class\": None\n })\n return config", "def __list_all_tags(self):\n\n tags_dict = get_data.get_tagnames_dict()\n if len(tags_dict) > 0:\n first_str = 'tag'\n second_str = 'top posts scraped'\n third_str = 'recent posts scraped'\n descriptor = '{:<40} {:<20} {}'\n print('')\n print(descriptor.format(first_str, second_str, third_str))\n print(descriptor.format(len(first_str) * '-', len(second_str) * '-',\n len(third_str) * '-'))\n for number, tag in tags_dict.items():\n space_str = ' ' if len(str(number)) > 1 else ' '\n first = '[' + space_str + str(number) + '] ' + tag\n second = str(get_data.get_top_tag_post_count(tag))\n third = str(get_data.get_recent_tag_post_count(tag))\n print(descriptor.format(first, second, third))\n else:\n print('no tags found in the database')", "def get_defaults(cls, mode):\n mode_defaults = getattr(cls, f\"{mode.upper()}_DEFAULTS\")\n defaults = PlotConfig({**cls.COMMON_DEFAULTS, **mode_defaults})\n return defaults", "def get_defaults(cls, mode):\n mode_defaults = getattr(cls, f\"{mode.upper()}_DEFAULTS\")\n defaults = PlotConfig({**cls.COMMON_DEFAULTS, **mode_defaults})\n return defaults", "def tag_index(self):\n if not self._tag_index:\n for nb in self.notebooks:\n for tag, links in nb.tags.items():\n self._tag_index[tag].extend(links)\n for tag in self._tag_index.keys():\n self._tag_index[tag] = list(sorted(set(self._tag_index[tag]), key=natsort))\n return self._tag_index", "def list(self):\n\n\t\treturn self._list(\"/tag\", \"tag\")", "def default_parameters(self) -> List[Parameter]:\n return self.settings.job_default_parameters", "def get_defaults_jsonschema():\n preproc_schema = schema_utils.unload_jsonschema_from_marshmallow_class(DefaultsConfig)\n props = preproc_schema['properties']\n return {'type': 'object', 'properties': props, 'additionalProperties': False, 'title': 'global_defaults_options', 'description': 'Set global defaults for input and output features'}", "def _get_job_defaults():\n\n lines = []\n lines += '[Job]\\n'\n j = Job()\n for cj in j._config_names:\n v = getattr(j, cj)\n lines += '%s = %s\\n' % (cj, v)\n lines += '\\n'\n return lines", "def tag_list(context, addon, dev_tags=None, user_tags=None):\n if not dev_tags and not user_tags:\n return ''\n if not dev_tags:\n dev_tags = []\n if not user_tags:\n user_tags = []\n\n c = {\n 'request': context['request'],\n 'addon': addon,\n 'dev_tags': dev_tags,\n 'user_tags': user_tags,\n }\n t = env.get_template('tags/tag_list.html').render(**c)\n return jinja2.Markup(t)", "def config_list_options(section):\n return __CONFIG.items(section)", "def set_defaults(self, compmgr=None):\n for section, default_options in self.defaults(compmgr).items():\n for name, value in default_options.items():\n if not ProductSetting.exists(self.env, self.product,\n section, name):\n if any(parent[section].contains(name, defaults=False)\n for parent in self.parents):\n value = None\n self.set(section, name, value)", "def initDefaults(self):\n return _libsbml.GeneralGlyph_initDefaults(self)", "def x_list():\n\t_loadconfig()", "def list_all_tags(self,obs):", "def get_defaultvalues(host):\n return get_obj_defaultvalues(OBJT_HOST, host)", "def parameters_default(cls):\n return cls._Parameters.__new__.__defaults__", "def tags():", "def _resolve_defaults(self, **kwargs):\n res = list()\n for name, value in kwargs.items():\n if value is None:\n value = self.default(name)\n if value is None:\n raise RuntimeError(f\"Missing default {name}\")\n res.append(value)\n return res", "def setup_default_terms(self):\n # Setting config with pattern -\n # default_dict[\"json config key\"] = (\"Default Value\", \"Ask User\", \"Value Type\")\n\n self.default_terms[Keys.first_run] = (Values.first_run, False, None)\n self.default_terms[Keys.nas_mount] = (Values.nas_mount_path, True, str)\n self.default_terms[Keys.secs_between_checks] = (Values.check_time, True, int)\n self.default_terms[Keys.redmine_api_key] = ('none', False, str)", "def finalize(self):\n print(\"%d default backgdrop names found\" % self.total_default)\n for name in self.list_default:\n print name", "def initDefaults(self):\n return _libsbml.Event_initDefaults(self)", "def defined_tags(self):\n return self._defined_tags", "def get_default_object_values() -> Dict[str, AllowedDefaultValueTypes]:\n # TODO(wxy): Cache this as it is accessed many times.\n\n default_object_values: Dict[str, AllowedDefaultValueTypes] = json.loads(\n constants.get_package_file_contents(\n 'extensions', feconf.OBJECT_DEFAULT_VALUES_EXTENSIONS_MODULE_PATH\n )\n )\n return default_object_values", "def defaults():\n\n return {\"cr_shelter_flag_id\": S3ReusableField.dummy(\"flag_id\"),\n }", "def tags_dict(self):\n return ({'name': 'tag', 'attrs': {'k': k, 'v': v}} for k, v in self.tags.items())", "def tags(self):\n tags = collections.defaultdict(list)\n for cell in self.content.cells:\n if 'tags' in cell.metadata.keys():\n for tag in cell.metadata['tags']:\n tags[tag].append(cell.metadata[\"nbpages\"][\"link\"])\n return tags", "def tag_dict(self):\n tag_dict = dict()\n for document in self.documents:\n for tag in document.tags:\n tag_type = tag['tag']\n tag_dict[tag_type] = tag_dict.get(tag_type, []) + [tag]\n return tag_dict", "def initDefaults(self):\n return _libsbml.SpeciesReferenceGlyph_initDefaults(self)", "def set_attrs_default(input_object, attr_name_list = None):\n if attr_name_list is None:\n attr_name_list = []\n if len(attr_name_list) > 0:\n attr_list = [input_object.attr(attr_name) for attr_name in attr_name_list]\n else:\n attr_list = general.get_channelbox_attributes(input_object)\n\n for attr in attr_list:\n current_val = attr.get()\n if hasattr(attr, 'addAttr'):\n attr.addAttr(e = True, defaultValue = current_val)", "def _default_config(cls):\n return dict()", "def extensible_attributes_list_values():\n return \"extensibleattributedef?\" \\\n \"_return_fields=\" \\\n \"list_values,\" \\\n \"comment,\" \\\n \"name,\" \\\n \"type\"", "def get_ext_param_default_values(self):\n num_param = core.xc_func_info_get_n_ext_params(self.xc_func_info)\n\n ret = []\n for p in range(num_param):\n tmp = core.xc_func_info_get_ext_params_default_value(self.xc_func_info, p)\n ret.append(tmp)\n\n return ret", "def initDefaults(self):\n return _libsbml.SpeciesReference_initDefaults(self)", "def defaults():\n d = Gui.listWorkbenches()\n d = list(d)\n d.sort()\n d = \",\".join(d)\n return d", "def tags_all(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"tags_all\")", "def tags_all(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"tags_all\")", "def tags_all(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"tags_all\")", "def tags_all(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"tags_all\")", "def tags_all(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"tags_all\")", "def tags_all(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"tags_all\")", "def tags_all(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"tags_all\")" ]
[ "0.5788659", "0.5745859", "0.5657144", "0.5510926", "0.5421845", "0.52667904", "0.5232069", "0.52265173", "0.51869756", "0.5130844", "0.51299524", "0.5102675", "0.5096235", "0.50511813", "0.5039385", "0.502269", "0.50160813", "0.49855888", "0.49597377", "0.4956205", "0.4953227", "0.49445555", "0.4942586", "0.49185464", "0.4917683", "0.4894464", "0.48850566", "0.487327", "0.48522192", "0.48455822", "0.48440924", "0.48241416", "0.4815683", "0.4815683", "0.47957018", "0.47806787", "0.47795004", "0.47771123", "0.477447", "0.47400418", "0.46931168", "0.46916565", "0.46892136", "0.46637195", "0.46493596", "0.46434742", "0.46410885", "0.46311912", "0.46264097", "0.46246022", "0.4624056", "0.46126828", "0.46074352", "0.45951778", "0.45906314", "0.45877737", "0.45789397", "0.4571728", "0.45700428", "0.4569644", "0.45383447", "0.45383447", "0.4530966", "0.45305803", "0.45197657", "0.45150036", "0.45116532", "0.4511237", "0.45056003", "0.45034897", "0.44986352", "0.44952667", "0.44918364", "0.44905478", "0.44814467", "0.4477832", "0.44760484", "0.44672427", "0.4466524", "0.4461102", "0.4458339", "0.44551286", "0.44547328", "0.44417065", "0.44314617", "0.44306836", "0.44244537", "0.44200104", "0.44182938", "0.4409053", "0.44056758", "0.4402133", "0.43916926", "0.4389793", "0.4389793", "0.4389793", "0.4389793", "0.4389793", "0.4389793", "0.4389793" ]
0.6216055
0
Lists the tag namespaces in the specified compartment.
def list_tag_namespaces(self, compartment_id, **kwargs): resource_path = "/tagNamespaces" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "include_subcompartments", "lifecycle_state" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_tag_namespaces got unknown kwargs: {!r}".format(extra_kwargs)) if 'lifecycle_state' in kwargs: lifecycle_state_allowed_values = ["ACTIVE", "INACTIVE", "DELETING", "DELETED"] if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values: raise ValueError( "Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values) ) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "includeSubcompartments": kwargs.get("include_subcompartments", missing), "lifecycleState": kwargs.get("lifecycle_state", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[TagNamespaceSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[TagNamespaceSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def list_namespaces(self) -> list:\n return await self.AD.state.list_namespaces()", "def namespaces(self):\n return [self._namespace_prefix]", "def namespaces(self):\n namespaces = set()\n for namespace_package in self.namespace_packages:\n dotted_name = []\n for component in namespace_package.split('.'):\n dotted_name.append(component)\n namespaces.add(tuple(dotted_name))\n return sorted(namespaces, key=lambda n: len(n))", "def namespaces(self):\n return list(self._namespace_schemas.keys())", "def get_namespaces():\n return list(StaticAsset._load_namespaces().keys())", "def list(self, dict_output=False, field_selector=\"\"):\n namespaces_list = self.client_core.list_namespace().items\n logger.info(\"Got namespaces\")\n\n if field_selector:\n namespaces_list = field_filter(obj_list=namespaces_list,\n field_selector=field_selector)\n # convert the list to list of dicts if required\n if dict_output:\n namespaces_list = [convert_obj_to_dict(namespace) for namespace in\n namespaces_list]\n else:\n for namespace in namespaces_list:\n namespace.metadata.resource_version = ''\n return namespaces_list", "def getNamespaces(self):\n return _libsbml.SBMLDocument_getNamespaces(self)", "def _fetch_all_namespaces():\n response = _fetch_herd_session() \\\n .get('{}://{}/{}/{}'.format(HERD_REST_PROTOCOL, HERD_BASE_URL,\n HERD_REST_BASE_PATH, 'namespaces')) \\\n .json()\n\n namespaces = []\n for namespaceKey in response['namespaceKeys']:\n namespaces.append(namespaceKey['namespaceCode'])\n\n _print_info('Retrieved {} namespaces.'.format(len(namespaces)))\n return namespaces", "def test_list_template_for_all_namespaces(self):\n pass", "def get_namespaces(self, label_selector=None):\n return self.core_client.list_namespace(label_selector=label_selector)", "def getNamespaces(self):\n return _libsbml.XMLToken_getNamespaces(self)", "def getNamespaces(self):\n return _libsbml.SBase_getNamespaces(self)", "def GetNamespaces(self):\n return list(self.type_namespaces_map.values())", "def list_namespaced_net_namespace(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_net_namespace\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/netnamespaces'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1NetNamespaceList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def getNamespaces(self, *args):\n return _libsbml.SBMLNamespaces_getNamespaces(self, *args)", "def test_list_net_namespace(self):\n pass", "def ns_list(self):\n return sorted(self.get_ns_name(ns) for ns in self.profile.authoritative_servers)", "def test_list_deployment_config_for_all_namespaces(self):\n pass", "def test_list_build_for_all_namespaces(self):\n pass", "def namespaces(\n self, index: Union[int, str] = \"len\"\n ) -> Union[List[str], int]:\n if index == \"len\":\n return len(self._namespaces)\n try:\n return self._namespaces[index] # type: ignore\n except IndexError:\n return []", "def get_namespaces():\r\n\r\n print 'Getting namespaces'\r\n tree = etree.parse('http://lesswrong.wikia.com/wiki/Special:AllPages', parser)\r\n options = tree.xpath('//select[@id=\"namespace\"]/option')\r\n namespaces = [option.get('value') for option in options]\r\n pprint(namespaces)\r\n return namespaces", "def get_all_namespaces():\n cmds.namespace(setNamespace=':')\n return cmds.namespaceInfo(listOnlyNamespaces=True, recurse=True)", "def _getnamespaces(cls):\n return \" \".join(Kmlable._namespaces)", "def namespaces(self):\n if not self._namespaces:\n self.update_namespaces_info()\n\n return self._namespaces", "def get_namespaces(self):\n if self.namespaces is None:\n namespaces = unpack(self.api.get_namespaces())\n self.namespaces = {\n namespace['name']: DevopsSecurityNamespace(namespace)\n for namespace in namespaces\n }\n return self.namespaces", "def list_services(self, **kwargs: Optional[Any]) -> list:\n\n self.logger.debug(\"list_services: %s\", kwargs)\n\n namespace = kwargs.get(\"namespace\", \"global\")\n\n return self.AD.services.list_services(namespace) # retrieve services", "def add_namespaces(specification):\n\n for ns in specification[\"namespaces\"]:\n specification[\"namespaces\"][ns][\"list\"] = []\n specification[\"namespaces\"][ns][\"list_long\"] = []\n specification[\"namespaces\"][ns][\"list_short\"] = []\n\n specification[\"namespaces\"][ns][\"to_short\"] = {}\n specification[\"namespaces\"][ns][\"to_long\"] = {}\n\n for obj in specification[\"namespaces\"][ns][\"info\"]:\n specification[\"namespaces\"][ns][\"list\"].extend([obj[\"name\"], obj[\"abbreviation\"]])\n specification[\"namespaces\"][ns][\"list_short\"].append(obj[\"abbreviation\"])\n specification[\"namespaces\"][ns][\"list_long\"].append(obj[\"name\"])\n\n specification[\"namespaces\"][ns][\"to_short\"][obj[\"abbreviation\"]] = obj[\"abbreviation\"]\n specification[\"namespaces\"][ns][\"to_short\"][obj[\"name\"]] = obj[\"abbreviation\"]\n\n specification[\"namespaces\"][ns][\"to_long\"][obj[\"abbreviation\"]] = obj[\"name\"]\n specification[\"namespaces\"][ns][\"to_long\"][obj[\"name\"]] = obj[\"name\"]\n\n # For AminoAcid namespace\n if \"abbrev1\" in obj:\n specification[\"namespaces\"][ns][\"to_short\"][obj[\"abbrev1\"]] = obj[\"abbreviation\"]\n specification[\"namespaces\"][ns][\"to_long\"][obj[\"abbrev1\"]] = obj[\"name\"]", "def get_services_in_namespace(self, namespace):\n ret = self.v1_service_list.get(namespace=namespace)\n return [each.metadata.name for each in ret.items]", "def test_get_namespaces_names(self):\n pass", "def watch_namespaced_net_namespace_list(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method watch_namespaced_net_namespace_list\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/watch/netnamespaces'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='JsonWatchEvent',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def namespaces(self):\n return ()", "def test_list_namespaced_deployment_config(self):\n pass", "def get_ns_list(logger,body,v1=None):\n if v1 is None:\n v1 = client.CoreV1Api()\n logger.debug('new client - fn get_ns_list')\n \n try:\n matchNamespace = body.get('matchNamespace')\n except KeyError:\n matchNamespace = '*'\n logger.debug(\"matching all namespaces.\")\n logger.debug(f'Matching namespaces: {matchNamespace}')\n \n try:\n avoidNamespaces = body.get('avoidNamespaces')\n except KeyError:\n avoidNamespaces = ''\n logger.debug(\"not avoiding namespaces\")\n\n nss = v1.list_namespace().items\n matchedns = []\n avoidedns = []\n\n for matchns in matchNamespace:\n for ns in nss:\n if re.match(matchns, ns.metadata.name):\n matchedns.append(ns.metadata.name)\n logger.debug(f'Matched namespaces: {ns.metadata.name} matchpathern: {matchns}')\n if avoidNamespaces:\n for avoidns in avoidNamespaces:\n for ns in nss:\n if re.match(avoidns, ns.metadata.name):\n avoidedns.append(ns.metadata.name)\n logger.debug(f'Skipping namespaces: {ns.metadata.name} avoidpatrn: {avoidns}') \n # purge\n for ns in matchedns.copy():\n if ns in avoidedns:\n matchedns.remove(ns)\n\n return matchedns", "def getSBMLExtensionNamespaces(self, *args):\n return _libsbml.CompExtension_getSBMLExtensionNamespaces(self, *args)", "def namespaces(self):\n return self.namespaced_fields().namespaces()", "def namespaces(self) -> NamespacesType:\n return self.schema.namespaces", "def getSBMLNamespaces(self):\n return _libsbml.SBase_getSBMLNamespaces(self)", "def test_list_build_config_for_all_namespaces(self):\n pass", "def get(self, *args):\n return _libsbml.ListWrapperSBMLNamespaces_get(self, *args)", "def update_namespaces_info(self):\n namespaces = BlockDev.nvdimm_list_namespaces(idle=True)\n\n self._namespaces = dict((namespace.dev, namespace) for namespace in namespaces)", "def test_list_policy_for_all_namespaces(self):\n pass", "def get_all_namespaces(\n soa_dir: str = DEFAULT_SOA_DIR,\n) -> Sequence[Tuple[str, ServiceNamespaceConfig]]:\n rootdir = os.path.abspath(soa_dir)\n namespace_list: List[Tuple[str, ServiceNamespaceConfig]] = []\n for srv_dir in os.listdir(rootdir):\n namespace_list.extend(get_all_namespaces_for_service(srv_dir, soa_dir))\n return namespace_list", "def _load_namespaces(self, result):\n self._namespaces = {}\n\n for namespace in result[\"query\"][\"namespaces\"].values():\n ns_id = namespace[\"id\"]\n name = namespace[\"*\"]\n try:\n canonical = namespace[\"canonical\"]\n except KeyError:\n self._namespaces[ns_id] = [name]\n else:\n if name != canonical:\n self._namespaces[ns_id] = [name, canonical]\n else:\n self._namespaces[ns_id] = [name]\n\n for namespace in result[\"query\"][\"namespacealiases\"]:\n ns_id = namespace[\"id\"]\n alias = namespace[\"*\"]\n self._namespaces[ns_id].append(alias)", "def getSBMLNamespaces(self):\n return _libsbml.SBasePlugin_getSBMLNamespaces(self)", "def get_edge_namespaces():\n hint = request.form['namespaces']\n\n result = {'status': FAIL, 'message': '', 'data': {}}\n try:\n result['status'] = SUCCESS\n result['data']['autocomplete_field'] = []\n result['data']['select_field'] = []\n if hint != '':\n edge_session = edge(edge_create_internal_ns_configuration.edge_url,\n edge_create_internal_ns_configuration.client_id,\n edge_create_internal_ns_configuration.clientSecret)\n\n namespaces = edge_session.get_namespaces()\n count = 0\n for namespace in namespaces:\n if namespace['name'].startswith(hint):\n\n result['data']['autocomplete_field'].append({\n 'input': namespace['id'],\n 'value': '%s (%s)' % (namespace['name'], namespace['id'])\n })\n result['data']['select_field'].append({\n 'id': namespace['id'],\n 'txt': namespace['name']\n })\n if count == 10:\n break\n count += 1\n except Exception as e:\n result['status'] = FAIL\n result['message'] = 'Error while searching for Namespaces: %s and hint: %s!' % (util.safe_str(e), hint)\n return result", "def getSBMLNamespaces(self):\n return _libsbml.ASTBasePlugin_getSBMLNamespaces(self)", "def test_get_namespaces_from_account(self):\n pass", "def getSBMLExtensionNamespaces(self, *args):\n return _libsbml.LayoutExtension_getSBMLExtensionNamespaces(self, *args)", "def get_all_namespaces_for_service(\n service: str, soa_dir: str = DEFAULT_SOA_DIR, full_name: bool = True\n) -> Sequence[Tuple[str, ServiceNamespaceConfig]]:\n service_config = service_configuration_lib.read_service_configuration(\n service, soa_dir\n )\n smartstack = service_config.get(\"smartstack\", {})\n namespace_list = []\n for namespace in smartstack:\n if full_name:\n name = compose_job_id(service, namespace)\n else:\n name = namespace\n namespace_list.append((name, smartstack[namespace]))\n return namespace_list", "def test_get_namespaces_from_accounts(self):\n pass", "def grouplist(self, namespace=None):\n source = self._source(namespace)\n return self._list(source, 'list')", "def sns(self,**kwargs):\n\n\t\tcursor = kwargs.get('cursor',self.cursor)\n\t\treturn self.toc[cursor].keys()", "def test_list_policy_binding_for_all_namespaces(self):\n pass", "def generate_namespaces(self, graph, graph_mode):\n namespaces = []\n if graph_mode == GATEWAY_GATEWAY_GRAPH:\n nodes = graph.gateway_nodes\n namespaces = list(set([roslib.names.namespace(n) for n in nodes]))\n\n elif graph_mode == GATEWAY_PULLED_GRAPH or \\\n graph_mode == GATEWAY_FLIPPED_GRAPH:\n gateway_nodes = graph.gateway_nodes\n connection_nodes = graph.flipped_nodes\n if gateway_nodes or connection_nodes:\n namespaces = [roslib.names.namespace(n) for n in gateway_nodes]\n # an annoyance with the rosgraph library is that it\n # prepends a space to topic names as they have to have\n # different graph node namees from nodes. we have to strip here\n namespaces.extend([roslib.names.namespace(n[1:]) for n in connection_nodes])\n\n return list(set(namespaces))", "def getSBMLNamespaces(self):\n return _libsbml.XMLInputStream_getSBMLNamespaces(self)", "def getSBMLExtensionNamespaces(self, *args):\n return _libsbml.SBMLExtension_getSBMLExtensionNamespaces(self, *args)", "def getSBMLNamespaces(self):\n return _libsbml.XMLOutputStream_getSBMLNamespaces(self)", "def getSBMLExtensionNamespaces(self, *args):\n return _libsbml.GroupsExtension_getSBMLExtensionNamespaces(self, *args)", "def test_list_namespaced_template(self):\n pass", "def collection_names(self, callback):\n callback = partial(self._collection_names_result, callback)\n self[\"system.namespaces\"].find(_must_use_master=True, callback=callback)", "def iterNamespaceURIs(self):\n return iter(self.namespace_to_alias)", "def test_list_namespaced_build_config(self):\n pass", "def all_in_namespace(cls, ns):\n return filter_by_prefix(cls.all(), ns + ':')", "def getSBMLExtensionNamespaces(self, *args):\n return _libsbml.FbcExtension_getSBMLExtensionNamespaces(self, *args)", "def namespaces(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"namespaces\")", "def get_pyxb_namespaces():\n return pyxb.namespace.utility.AvailableNamespaces()", "def identify_cluster_namespaces():\n # Execute 'kubectl get namespaces' command, returned as CompletedProcess\n namespaces_completed_process = run([\"kubectl\", \"get\", \"namespaces\"], stdout=PIPE, stderr=PIPE)\n\n status = namespaces_completed_process.returncode\n # If return code is not 0\n if status:\n return {'status': status, 'value': namespaces_completed_process.stderr.decode('utf-8').strip()}\n else:\n return {'status': status, 'value': namespaces_completed_process.stdout.decode('utf-8').strip()}", "def getSBMLExtensionNamespaces(self, *args):\n return _libsbml.MultiExtension_getSBMLExtensionNamespaces(self, *args)", "def test_list_route_for_all_namespaces(self):\n pass", "def test_list_namespaced_policy_binding(self):\n pass", "def SBMLNamespaces_getSupportedNamespaces():\n return _libsbml.SBMLNamespaces_getSupportedNamespaces()", "def namespaces(self, psuedo=True):\n if self._namespaces == None:\n result = self.call({'action': 'query',\n 'meta': 'siteinfo',\n 'siprop': 'namespaces'})\n self._namespaces = {}\n self._psuedo_namespaces = {}\n for nsid in result['query']['namespaces']:\n if int(nsid) >= 0:\n self._namespaces[int(nsid)] = \\\n result['query']['namespaces'][nsid]['*']\n else:\n self._psuedo_namespaces[int(nsid)] = \\\n result['query']['namespaces'][nsid]['*']\n if psuedo:\n retval = {}\n retval.update(self._namespaces)\n retval.update(self._psuedo_namespaces)\n return retval\n else:\n return self._namespaces", "def GetProvidedNamespaces(self):\n return set(self._provided_namespaces)", "def _extract_namespaces(self) -> Tuple[List[List[str]], List[str]]:\n nparts: Set[int] = set()\n namespaces: List[Set[str]] = []\n terminals: Set[str] = set()\n for name in self.names:\n parts: List[str] = name.split(\".\")\n nparts.add(len(parts))\n if len(nparts) > 1:\n raise PyParamNameError(\n \"Parameter names must have the same number of namespaces.\"\n )\n namespaces = namespaces or [{part} for part in parts[:-1]]\n for i, part in enumerate(parts[:-1]):\n namespaces[i].add(part)\n terminals.add(parts[-1])\n return [list(ns) for ns in namespaces], list(terminals)", "def _load_namespaces(self):\n nsdocs = self._docset.get_namespaces()\n for nsdoc in nsdocs:\n nsobj = Namespace(nsdoc)\n self._docmap[nsdoc] = nsobj\n self._namespaces.add(nsobj)", "def show_tablespaces(self):\n sql = \"SELECT TABLESPACE_NAME FROM DBA_TABLESPACES WHERE CONTENTS <> 'TEMPORARY' ORDER BY 1\"\n self.cur.execute(sql)\n res = self.cur.fetchall()\n key = ['{#TABLESPACE}']\n lst = []\n for i in res:\n d = dict(zip(key, i))\n lst.append(d)\n print(json.dumps({'data': lst}))", "def getTargetNamespaces(self):\n return _libsbml.SBMLConverter_getTargetNamespaces(self)", "def copyNamespaceList(self):\n ret = libxml2mod.xmlCopyNamespaceList(self._o)\n if ret is None:raise treeError('xmlCopyNamespaceList() failed')\n __tmp = xmlNs(_obj=ret)\n return __tmp", "def namespace(self, namespace):\n return self.client.call('GET',\n self.name, params={'namespace': namespace})", "def watch_namespaced_namespace_list(self, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.watch_namespaced_namespace_list_with_http_info(**kwargs)\n else:\n (data) = self.watch_namespaced_namespace_list_with_http_info(**kwargs)\n return data", "def get_all_typespaces(schema_obj):\n\n typespaces = []\n for vendor in schema_obj.vendor_list:\n for typespace in vendor.typespace_list:\n typespaces.append(typespace)\n return typespaces", "def getNumNamespaces(self):\n return _libsbml.XMLNamespaces_getNumNamespaces(self)", "def getSupportedNamespaces():\n return _libsbml.SBMLNamespaces_getSupportedNamespaces()", "def list_pods(req, resp):\n namespace = req.params['namespace']\n return _get_all_pods(namespace)", "def list_namespaced_namespace(self, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.list_namespaced_namespace_with_http_info(**kwargs)\n else:\n (data) = self.list_namespaced_namespace_with_http_info(**kwargs)\n return data", "def get_all_pod_names_in_a_namespace(self, namespace):\n list_of_pod_names = list()\n api_response = self.list_pods_in_a_namespace(namespace=namespace)\n if api_response:\n for item in api_response.items:\n list_of_pod_names.append(item[\"metadata\"][\"name\"])\n return list_of_pod_names", "def getSBMLExtensionNamespaces(self, *args):\n return _libsbml.QualExtension_getSBMLExtensionNamespaces(self, *args)", "def _get_cloud_function_namespaces():\n logger.info(\n f\"Obtaining Cloud Function namespaces in {self.namespace_region}\"\n )\n\n namespaces = []\n\n collecting_namespaces = True\n max_limit = 200\n offset = 0\n\n # request for namespaces is limited to 200 at a time, thus the request is fulfilled in increments of 200s.\n while collecting_namespaces:\n namespace_metadata = _get_cloud_function_namespaces_metadata(offset)\n if namespace_metadata[\"total_count\"] == max_limit:\n offset += max_limit\n else:\n collecting_namespaces = False\n\n for name_space in namespace_metadata[\"namespaces\"]:\n if \"name\" in name_space: # API based namespace\n namespaces.append(\n {\n \"name\": name_space[\"name\"],\n \"type\": \"API_based\",\n \"id\": name_space[\"id\"],\n \"region\": name_space[\"location\"],\n }\n )\n\n else: # cloud foundry based namespace\n namespaces.append(\n {\n \"name\": name_space[\"id\"],\n \"type\": \"CF_based\",\n \"region\": name_space[\"location\"],\n }\n )\n\n return namespaces", "def test_list_namespaced_policy(self):\n pass", "def list(self):\n\n\t\treturn self._list(\"/tag\", \"tag\")", "def exemptable_namespaces(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"exemptable_namespaces\")", "def get_containers_namespace_ids(duthost, skip_containers):\n containers_in_namespaces = defaultdict(list)\n\n logger.info(\"Getting the namespace ids for each container...\")\n containers_states, succeed = duthost.get_feature_status()\n pytest_assert(succeed, \"Failed to get feature status of containers!\")\n\n for container_name, state in containers_states.items():\n if container_name not in skip_containers and state == \"enabled\":\n namespace_ids, succeed = duthost.get_namespace_ids(container_name)\n pytest_assert(succeed, \"Failed to get namespace ids of container '{}'\".format(container_name))\n containers_in_namespaces[container_name] = namespace_ids\n\n logger.info(\"Getting the namespace ids for each container was done!\")\n\n return containers_in_namespaces", "def clearNamespaces(self):\n return _libsbml.XMLToken_clearNamespaces(self)", "def test_list_role_for_all_namespaces(self):\n pass", "def _rosnode_cmd_list(argv):\n args = argv[2:]\n parser = OptionParser(usage=\"usage: %prog list\", prog=NAME)\n parser.add_option(\"-u\",\n dest=\"list_uri\", default=False,\n action=\"store_true\",\n help=\"list XML-RPC URIs (NOT IMPLEMENTED)\")\n parser.add_option(\"-a\",\"--all\",\n dest=\"list_all\", default=False,\n action=\"store_true\",\n help=\"list all information (NOT IMPLEMENTED)\")\n (options, args) = parser.parse_args(args)\n namespace = None\n if len(args) > 1:\n parser.error(\"invalid args: you may only specify one namespace\")\n elif len(args) == 1:\n #namespace = rosgraph.names.script_resolve_name('rostopic', args[0])\n pass\n\n # In ROS 1, the rosnode list invocation was performed using:\n # rosnode_listnodes(namespace=namespace, list_uri=options.list_uri, list_all=options.list_all)\n\n result = rclpy.get_node_names()\n for node in result:\n print(node)", "def get_all_pages_for_namespace(ns):\r\n\r\n print 'Getting pages in namespace %s' % ns\r\n url = 'http://lesswrong.wikia.com/index.php?title=Special:AllPages&from=&to=&namespace=%s' % ns\r\n tree = etree.parse(url, parser)\r\n pages = tree.xpath('//table[2]//a[@title]')\r\n page_names = [page.get('title') for page in pages]\r\n pprint(page_names)\r\n return page_names", "def registered_dde_schemas(verbose=False):\n url = DDE_SCHEMA_BASE_URL + \"?field=_id&size=20\"\n if verbose:\n print(f'Loading registered DDE schema list from \"{url}\"')\n data = load_json_or_yaml(url)\n return [s[\"namespace\"] for s in data[\"hits\"]]", "def test_list_namespaced_build(self):\n pass", "def items(self):\n return self.namespace_to_alias.items()", "def test_list_image_stream_tag_for_all_namespaces(self):\n pass" ]
[ "0.71969026", "0.61050427", "0.5985775", "0.59844434", "0.59576994", "0.5934063", "0.59100467", "0.5898842", "0.58896667", "0.5883841", "0.58817756", "0.58812404", "0.58636045", "0.584227", "0.581729", "0.5809152", "0.58016217", "0.5761703", "0.5708301", "0.56499183", "0.5643741", "0.5632821", "0.55997247", "0.5594167", "0.5523036", "0.55090964", "0.55082816", "0.55039364", "0.54784137", "0.54585445", "0.5436807", "0.54365927", "0.5424068", "0.54141605", "0.53673285", "0.5347642", "0.53459334", "0.5329801", "0.53041965", "0.5292985", "0.52881575", "0.52832735", "0.5274658", "0.5272728", "0.5254538", "0.52373755", "0.52308935", "0.5228741", "0.52160895", "0.52078754", "0.520283", "0.52004194", "0.5181229", "0.5174851", "0.5168184", "0.5161195", "0.5156286", "0.51481307", "0.51252365", "0.5119533", "0.5100442", "0.50757474", "0.50671077", "0.506225", "0.50610703", "0.5057159", "0.5052777", "0.5047324", "0.5042685", "0.50262386", "0.50210226", "0.50123984", "0.49916914", "0.4973546", "0.49715233", "0.49708414", "0.4948968", "0.4942004", "0.49419197", "0.4911154", "0.49104217", "0.48948598", "0.4890705", "0.4886878", "0.48856667", "0.4883919", "0.48834768", "0.48768342", "0.48691773", "0.48630905", "0.4846089", "0.4836831", "0.4833658", "0.48323065", "0.48261422", "0.4825942", "0.4821824", "0.48155507", "0.48029393", "0.4796536" ]
0.7114337
1
Gets the errors for a work request.
def list_tagging_work_request_errors(self, work_request_id, **kwargs): resource_path = "/taggingWorkRequests/{workRequestId}/errors" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_tagging_work_request_errors got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "workRequestId": work_request_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) query_params = { "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[TaggingWorkRequestErrorSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[TaggingWorkRequestErrorSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_errors(self, request):\n\n value = request._get_parameter_value(self)\n return value.errors", "def getErrors(self):\n return self.errors", "def errors (self):\n return self._errors", "def errors (self):\n return self._errors", "def errors(self):\n return self._errors", "def errors(self):\n return self.__errors", "def errors(self) -> pulumi.Output[Sequence['outputs.BatchAIErrorResponse']]:\n return pulumi.get(self, \"errors\")", "def getErrorsList(self):\n return self.__errors", "def errors(self) -> List[Error]:", "def errors(self) -> Tuple[MqexsErrorInfo, ...]:\n return self.__errors", "def errors(self):\n return self.args[1]", "def errors(self):\r\n if not hasattr(self, '_errors_cache'):\r\n self._errors_cache = self.form.get_field_errors(self)\r\n return self._errors_cache", "def _get_errors(exc):\n if hasattr(exc, 'message'):\n errors = exc.messages\n else:\n errors = [str(exc)]\n return errors", "def errors():\n return THE_LOGGER.errors", "def errors(self) -> List[Error]:\n return self._errors_files + list(self._errors.values())", "def validation_errors(self):\n return self._validation_errors", "def get_errors(self):\n return [result for result in self.values() if result.outcome == Result.ERROR]", "def Errors(self):\r\n\t\treturn self._get_attribute('errors')", "def error(self) -> list:\n return self.__err", "def Errors(self):\n return self._get_attribute('errors')", "def errors(self):\n return self._properties.get(\"errors\")", "def get_errors(self, response: response_domain_model.Response, question_code: str) -> Sequence['ValidationError']:\n ...", "def errors(self) -> str:\n return self.job_errors() + self.analysis_errors()", "def job_errors(self) -> str:\n errors = []\n\n # Get any job errors\n for job in self._jobs.values():\n if job and job.status() == JobStatus.ERROR:\n if hasattr(job, \"error_message\"):\n error_msg = job.error_message()\n else:\n error_msg = \"\"\n errors.append(f\"\\n[Job ID: {job.job_id()}]: {error_msg}\")\n\n # Get any job futures errors:\n for jid, fut in self._job_futures.items():\n if fut and fut.done() and fut.exception():\n ex = fut.exception()\n errors.append(\n f\"[Job ID: {jid}]\"\n \"\\n\".join(traceback.format_exception(type(ex), ex, ex.__traceback__))\n )\n return \"\".join(errors)", "def get_error(self) -> List[str]:\n return []", "def get_error(self) -> List[str]:\n return []", "def error_data(self):\n\n if not self.__settings:\n return []\n\n return self.__transaction_errors", "def retrieve_error_messages(self):\n return self.errors_seen[:]", "def get_validation_errors(self):\n return [err.to_dict() for err in self._schema.validator.validation_errors]", "def get_errors(response):\n errors = response.get(\"error\")\n if errors:\n return [e.get(\"message\") for e in errors]\n return None", "def getErrors(self) -> java.util.Collection:\n ...", "def get_errors(self):\n df = self.get_events()\n return df[df.error.notnull()]", "def errors(self):\n raise NotImplementedError", "def get_errors(self, path: str,\n is_ancillary: bool = False,\n is_system: bool = False,\n is_removed: bool = False) -> List[str]:\n u_file = self.__api.files.get(path, is_ancillary=is_ancillary,\n is_system=is_system,\n is_removed=is_removed)\n return [e.message for e in u_file.errors]", "def get_errors(self, path: str,\n is_ancillary: bool = False,\n is_system: bool = False,\n is_removed: bool = False) -> List[str]:", "def get_errors(self):\n return {'loss': self.loss.data[0]}", "def _get_resp_body_errors(self):\n\n if self._resp_body_errors and len(self._resp_body_errors) > 0:\n return self._resp_body_errors\n\n errors = []\n warnings = []\n resp_codes = []\n\n if self.verb is None:\n return errors\n\n dom = self.response.dom()\n if dom is None:\n return errors\n\n for e in dom.findall('Errors'):\n eSeverity = None\n eClass = None\n eShortMsg = None\n eLongMsg = None\n eCode = None\n\n try:\n eSeverity = e.findall('SeverityCode')[0].text\n except IndexError:\n pass\n\n try:\n eClass = e.findall('ErrorClassification')[0].text\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n except IndexError:\n pass\n\n try:\n eShortMsg = smart_encode(e.findall('ShortMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eLongMsg = smart_encode(e.findall('LongMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n if int(eCode) not in resp_codes:\n resp_codes.append(int(eCode))\n except IndexError:\n pass\n\n msg = str(\"Class: {eClass}, Severity: {severity}, Code: {code}, {shortMsg} {longMsg}\") \\\n .format(eClass=eClass, severity=eSeverity, code=eCode, shortMsg=eShortMsg,\n longMsg=eLongMsg)\n\n # from IPython import embed; embed()\n\n if eSeverity == 'Warning':\n warnings.append(msg)\n else:\n errors.append(msg)\n\n self._resp_body_warnings = warnings\n self._resp_body_errors = errors\n self._resp_codes = resp_codes\n\n if self.config.get('warnings') and len(warnings) > 0:\n log.warn(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(warnings)))\n\n if self.response.reply.Ack == 'Failure':\n if self.config.get('errors'):\n log.error(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(errors)))\n\n return errors\n\n return []", "def errors(self):\n _errors = {}\n # pylint: disable=no-member\n for name, field in self._fields.items():\n if field.errors:\n _errors[name] = field.errors.pop()\n\n return _errors", "def getErrors(self):\n errorList = []\n\n # E0\n try:\n if not self.e0.isValid():\n errorList.append(\"Invalid first error axis in ErrorEllipse Class\")\n except (NameError, AttributeError):\n errorList.append(\"No first error axis in ErrorEllipse Class.\")\n\n # E1\n try:\n if not self.e1.isValid():\n errorList.append(\"Invalid second error axis in ErrorEllipse Class\")\n except (NameError, AttributeError):\n errorList.append(\"No second error axis in ErrorEllipse Class.\")\n\n # E2\n try:\n if not self.e2.isValid():\n errorList.append(\"Invalid third error axis in ErrorEllipse Class\")\n except (NameError, AttributeError):\n errorList.append(\"No third error axis in ErrorEllipse Class.\")\n\n # maximumHorizontalProjection\n try:\n self.maximumHorizontalProjection\n except (NameError, AttributeError):\n errorList.append(\"No MaximumHorizontalProjection in ErrorEllipse Class.\")\n\n # maximumVerticalProjection\n try:\n self.maximumVerticalProjection\n except (NameError, AttributeError):\n errorList.append(\"No MaximumVerticalProjection in ErrorEllipse Class\")\n\n # equivalentHorizontalRadius\n try:\n self.equivalentHorizontalRadius\n except (NameError, AttributeError):\n errorList.append(\"No EquivalentHorizontalRadius in ErrorEllipse class\")\n\n return errorList", "def GetAll(self):\n return self._errors.copy()", "def _training_errors(self):\n feed_dict = dict()\n feed_dict[self.model.get_layer('input')] = self.x_train\n for id_ in self.task_ids.keys():\n feed_dict[self.model.get_layer(id_ + '-ground-truth')] = self.y_train[id_]\n errors = {}\n for task_id, loss_type in self.task_ids.iteritems():\n if loss_type is LossTypes.mse:\n errors[task_id] = np.sqrt(self.model.get_layer(task_id + '-loss')\n .eval(session=self.sess, feed_dict=feed_dict))\n elif loss_type is LossTypes.cross_entropy:\n predictions = tf.argmax(self.model.get_layer(task_id + '-prediction'), 1)\n targets = tf.argmax(self.model.get_layer(task_id + '-ground-truth'), 1)\n correct_predictions = tf.equal(targets, predictions)\n accuracy_tensor = tf.reduce_mean(tf.cast(correct_predictions, tf.float32))\n accuracy = accuracy_tensor.eval(session=self.sess, feed_dict=feed_dict)\n errors[task_id] = 1. - accuracy\n return errors", "def errors(self) -> List[Error]:\n # May have inherited errors with a different path.\n for error in self._errors.values():\n error.path = self.path\n if self.is_removed: # Mark all of our errors as non-persistant.\n error.is_persistant = False\n return list(self._errors.values())", "def getError(self):\n \n return self.resp[\"error\"]", "def query_error(self):\n return self.details[KEY_QUERY_ERROR]", "def security_errors(self):\n errors = ErrorDict()\n for f in [\"honeypot\", \"timestamp\", \"security_hash\"]:\n if f in self.errors:\n errors[f] = self.errors[f]\n return errors", "def _validation_errors(self):\n feed_dict = dict()\n feed_dict[self.model.get_layer('input')] = self.x_validate\n for id_ in self.task_ids.keys():\n feed_dict[self.model.get_layer(id_ + '-ground-truth')] = self.y_validate[id_]\n errors = {}\n for task_id, loss_type in self.task_ids.iteritems():\n if loss_type is LossTypes.mse:\n errors[task_id] = np.sqrt(self.model.get_layer(task_id + '-loss')\n .eval(session=self.sess, feed_dict=feed_dict))\n elif loss_type is LossTypes.cross_entropy:\n predictions = tf.argmax(self.model.get_layer(task_id + '-prediction'), 1)\n targets = tf.argmax(self.model.get_layer(task_id + '-ground-truth'), 1)\n correct_predictions = tf.equal(predictions, targets)\n accuracy_tensor = tf.reduce_mean(tf.cast(correct_predictions, tf.float32))\n accuracy = accuracy_tensor.eval(session=self.sess, feed_dict=feed_dict)\n errors[task_id] = 1. - accuracy\n return errors", "async def get_ert_errordata(request):\n # Get database\n try:\n db_instance = request.app.config['database']\n except Exception as ex:\n return response.json({'data': 'Database ERROR ' + str(ex)}, status=500)\n\n # Get query parameters\n try:\n query_params = request.args\n robot_model = query_params.get('RobotModel')\n error_text = query_params.get('ErrorText')\n\n if not (robot_model and error_text):\n raise Exception\n\n except Exception:\n return response.json({'data': 'Required API parameters missing'}, status=400)\n\n # Set query\n query = \"\"\"\n SELECT error_code, cast(error_level as integer) as 'error_level', compounding_flag, error_module, error_source, error_text, error_description, error_resolution\n FROM error_report\n WHERE (:robot_model LIKE robot_model COLLATE NOCASE) AND (:error_text LIKE error_text COLLATE NOCASE);\n \"\"\"\n\n # Retrieve query and return results\n try:\n # Retrieve query\n rows = await db_instance.fetch_all(query=query,\n values={\"robot_model\": robot_model,\n \"error_text\": error_text})\n # Return results\n return response.json({'data': rows_to_list(rows)}, status=200)\n except Exception as ex:\n return response.json({'data': 'Database Query ERROR, ' + str(ex)}, status=500)", "def getErrorLog(self):\n return _libsbml.SBMLValidator_getErrorLog(self)", "def get_error(self, idx=0):\n return self.portal.error_log.getLogEntries()[idx]", "def get_errors(self):\n errors = []\n\n if not self.title:\n msg = 'Title not found: {0}'.format(self.number)\n print(msg)\n errors.append(msg)\n\n if not self.ref:\n msg = 'Ref not found: {0}'.format(self.number)\n print(msg)\n errors.append(msg)\n\n chapter_index = int(self.number) - 1\n\n # get the expected number of frames for this chapter\n expected_frame_count = chapters_and_frames.frame_counts[chapter_index]\n\n for x in range(1, expected_frame_count + 1):\n\n # frame id is formatted like '01-01'\n frame_id = '{0}-{1}'.format(self.number.zfill(2), str(x).zfill(2))\n\n # get the next frame\n frame = next((f for f in self.frames if f['id'] == frame_id), None) # type: dict\n if not frame:\n msg = 'Frame not found: {0}'.format(frame_id)\n print(msg)\n errors.append(msg)\n else:\n # check the frame img and values\n if 'img' not in frame or not frame['img']:\n msg = 'Attribute \"img\" is missing for frame {0}'.format(frame_id)\n print(msg)\n errors.append(msg)\n\n if 'text' not in frame or not frame['text']:\n msg = 'Attribute \"text\" is missing for frame {0}'.format(frame_id)\n print(msg)\n errors.append(msg)\n\n return errors", "def Errcheck(self) -> list:\n\n myError = []\n\n ErrorList = self.myFieldFox.query(\"SYST:ERR?\").split(',')\n\n Error = ErrorList[0]\n\n if int(Error) == 0:\n\n print (\"+0, No Error!\")\n\n else:\n\n while int(Error)!=0:\n\n print (\"Error #: \" + ErrorList[0])\n\n print (\"Error Description: \" + ErrorList[1])\n\n myError.append(ErrorList[0])\n\n myError.append(ErrorList[1])\n\n ErrorList = self.myFieldFox.query(\"SYST:ERR?\").split(',')\n\n Error = ErrorList[0]\n\n myError = list(myError)\n\n return myError", "def tasks_with_errors(self):\n errs = []\n while True:\n try:\n errs.append(self._errq.get_nowait())\n except Queue.Empty:\n break\n return errs", "def check_errors(self):\n\n errors = []\n while True:\n err = self.values(\"SYST:ERR?\")\n if int(err[0]) != 0:\n errmsg = \"Agilent 5313xA: {0}: {1}\".format(err[0], err[1])\n log.error(errmsg + '\\n')\n errors.append(errmsg)\n else:\n break\n\n return errors", "def analysis_errors(self) -> str:\n errors = []\n\n # Get any callback errors\n for cid, callback in self._analysis_callbacks.items():\n if callback.status == AnalysisStatus.ERROR:\n errors.append(f\"\\n[Analysis Callback ID: {cid}]: {callback.error_msg}\")\n\n return \"\".join(errors)", "def xerr(self, i):\n return self.errors[0][i]", "def get_validation_errors(\n self,\n schema_version: Optional[str] = None,\n devel_debug: bool = False,\n ) -> list[ValidationResult]:\n ...", "def errors(self):\n\n dict = {\"Stellar Mass Error\":[self.st_masserr1,self.st_masserr2],\n \"Stellar Radius Error\":[self.st_raderr1,self.st_raderr2]}\n\n return dict", "def get_error_messages(self):\n\n if len(self._sensor_results_list) == 0:\n return\n\n error_msgs = []\n\n for reading in self._sensor_results_list:\n if reading.is_error():\n error_msgs.append(reading.get_error_msg())\n\n if len(error_msgs) > 0:\n return error_msgs\n else:\n return \"No Error Readings\"", "def getErrorReport(self):\n return self.sError;", "def getErrorReport(self):\n return self.sError;", "def GetExpectedError(self, request):\n policy = self.server.GetPolicies()\n if 'request_errors' in policy:\n errors = policy['request_errors']\n if (request in errors) and (errors[request] > 0):\n return errors[request], 'Preconfigured error'\n return None", "def all_errors(self) -> List[XMLSchemaParseError]:\n errors = []\n for comp in self.iter_components():\n if comp.errors:\n errors.extend(comp.errors)\n return errors", "def get_errors(self, obj):\n try:\n errors = obj.exceptions\n serializer = ExportTaskExceptionSerializer(errors, many=True, context=self.context)\n return serializer.data\n except ExportTaskException.DoesNotExist as e:\n return None", "def getErrorTable(self, *args):\n return _libsbml.CompExtension_getErrorTable(self, *args)", "def _get_resends(self):\n if not self.has_error():\n return []\n\n errors = []\n i = 0\n for item in self.my_json['results']:\n if item.has_key('error') and item['error'] == 'Unavailable':\n errors.append((i, item['error']))\n i += 1\n return errors", "def get_error(self):\n return self.exc_info", "def getBuildErrors(self):\n return [x for x in self.xeps if x.buildErrors]", "def getNumErrors(self):\n return _libsbml.XMLErrorLog_getNumErrors(self)", "def errors(func):\n def error_wrapper(*args, **kwargs):\n try:\n if request.method == \"POST\":\n if not request.json:\n raise ValueError(MESSAGE['content-not-allowed']['message'],\n MESSAGE['content-not-allowed']['status'],\n MESSAGE['content-not-allowed']['type'])\n if not settings.machinekit_running:\n raise RuntimeError(MESSAGE['machinekit-down']['message'],\n MESSAGE['machinekit-down']['status'],\n MESSAGE['machinekit-down']['type'])\n return func(*args, **kwargs)\n\n except ValidationError as err:\n return {\n \"errors\": {\n \"keys\": err.messages,\n \"status\": 400,\n \"type\": \"ValidationError\"\n }\n }, 400\n except ValueError as err:\n message, status, err_type = err\n return {\n \"errors\": {\n \"message\": message,\n \"status\": status,\n \"type\": err_type\n }\n }, status\n except RuntimeError as err:\n message, status, err_type = err\n return {\n \"errors\": {\n \"message\": message,\n \"status\": status,\n \"type\": err_type\n }\n }, status\n except NameError as err:\n message, status, err_type = err\n return {\n \"errors\": {\n \"message\": message,\n \"status\": status,\n \"type\": err_type\n }\n }, status\n except (werkzeug.exceptions.BadRequest) as err:\n message, status, err_type = MESSAGE['invalid-content']\n return {\n \"errors\": {\n \"message\": MESSAGE['invalid-content']['message'],\n \"status\": MESSAGE['invalid-content']['status'],\n \"type\": MESSAGE['invalid-content']['type']\n }\n }, MESSAGE['invalid-content']['status']\n except Exception as err:\n return {\"errors\": {\"message\": err.message}}, 500\n\n error_wrapper.__name__ = func.__name__\n return error_wrapper", "def getFailedJobs(self):\n return self.__failedJobs", "def error_details(self):\n return self._error_details", "def getParseErrors(self):\n return [x for x in self.xeps if x.parseErrors]", "def formatErrors(self):\n errorlist = []\n xepsWithErrors = sorted(\n set(self.getParseErrors() + self.getBuildErrors()),\n key=lambda x: str(x))\n if self.getErrors() or xepsWithErrors:\n if self.getErrors():\n errorlist.append(\"********** Read errors **********\")\n for error in self.getErrors():\n errorlist.append(error)\n for xep in xepsWithErrors:\n errorlist.append(\n \"********** Error report for {} **********\".format(str(xep)))\n if xep.parseErrors:\n errorlist.append(\"********** Parsing Errors **********\")\n errors = list(set(xep.parseErrors))\n for error in errors:\n errorlist.append(error)\n if xep.buildErrors:\n errorlist.append(\"********** Build Errors **********\")\n for error in xep.buildErrors:\n if len(error.splitlines()) > 4:\n error = ''.join(error.splitlines()[:4])\n errorlist.append(error)\n return '\\n'.join(errorlist)\n else:\n return None", "def get_internal_errors(self) -> Dict[str, int]:\n self.serial.write(b\"D!\")\n values = self.__read_response(4)\n first_address_byte_errors = self.__extract_int(values[0], b\"!E1\")\n command_byte_errors = self.__extract_int(values[1], b\"!E2\")\n second_address_byte_errors = self.__extract_int(values[2], b\"!E3\")\n PEC_byte_errors = self.__extract_int(values[3], b\"!E4\")\n\n return {\n \"first_address_byte_errors\": first_address_byte_errors,\n \"command_byte_errors\": command_byte_errors,\n \"second_address_byte_errors\": second_address_byte_errors,\n \"PEC_byte_errors\": PEC_byte_errors,\n }", "def get_validation_errors(response, field, index=0):\n assert response.status_code == 400\n i = 0\n for error in response.data[\"invalid_params\"]:\n if error[\"name\"] != field:\n continue\n\n if i == index:\n return error\n\n i += 1", "def _get_errors(sheet, row, col):\n field = _FIELDS['primary data']\n val = sheet.cell(row + field['row'], col + field['column']).value\n if not val:\n return []\n final_row = row + field['row']\n error = sheet.cell(final_row, col + field['column']).value\n while error:\n final_row += 1\n error = sheet.cell(final_row, col + field['column']).value\n return [sheet.cell(i, col + field['column']).value\n for i in range(row + field['row'], final_row)]", "def error_map(self):\n return self._error_map", "def get_encoding_errors(self):\n return self._encoding_errors", "def get_render_errors(self, revision_id):\n url = DeckhandClient.get_path(\n DeckhandPaths.RENDERED_REVISION_DOCS\n ).format(revision_id)\n\n errors = []\n\n LOG.debug(\"Retrieving rendered docs checking for validation messages\")\n response = self._get_request(url)\n if response.status_code >= 400:\n err_resp = yaml.safe_load(response.text)\n errors = err_resp.get('details', {}).get('messageList', [])\n if not errors:\n # default message if none were specified.\n errors.append({\n \"error\": True,\n \"message\": (\"Deckhand has reported an error but did not \"\n \"specify messages. Response: {}\".format(\n response.text))})\n return errors", "def getErrorTable(self, *args):\n return _libsbml.QualExtension_getErrorTable(self, *args)", "def _pydantic_errors_to_validation_results(\n errors: list[dict | Exception] | ValidationError,\n file_path: Path,\n scope: Scope,\n) -> list[ValidationResult]:\n out = []\n for e in (\n errors.errors() if isinstance(errors, ValidationError) else cast(list, errors)\n ):\n if isinstance(e, Exception):\n message = getattr(e, \"message\", str(e))\n id = \"exception\"\n scope = Scope.FILE\n else:\n id = \".\".join(\n filter(\n bool,\n (\n \"dandischema\",\n e.get(\"type\", \"UNKNOWN\"),\n \"+\".join(e.get(\"loc\", [])),\n ),\n )\n )\n message = e.get(\"message\", e.get(\"msg\", None))\n out.append(\n ValidationResult(\n origin=ValidationOrigin(\n name=\"dandischema\",\n version=dandischema.__version__,\n ),\n severity=Severity.ERROR,\n id=id,\n scope=scope,\n path=file_path,\n message=message,\n # TODO? dataset_path=dataset_path,\n # TODO? dandiset_path=dandiset_path,\n )\n )\n return out", "def getErrorTable(self, *args):\n return _libsbml.MultiExtension_getErrorTable(self, *args)", "def describe_error_log_records(\n self,\n request: dds_20151201_models.DescribeErrorLogRecordsRequest,\n ) -> dds_20151201_models.DescribeErrorLogRecordsResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_error_log_records_with_options(request, runtime)", "def report_transaction_error_messages(self):\n response = self.__get_transaction_response()\n\n # get response data from response object\n response_data = response.json()\n\n # get error messages\n response_error = response_data['Error']\n response_error_messages = response_error['messages']\n\n # add all error messages to the report\n error_messages_to_report = []\n for response_error_message in response_error_messages:\n error_description = response_error_message['description']\n error_messages_to_report.append(error_description)\n\n return error_messages_to_report", "def get_field_errors(self, field):\r\n identifier = format_html('{0}.{1}', self.form_name, field.name)\r\n return self.error_class([SafeTuple((identifier, '$pristine', '$pristine', 'invalid', e))\r\n for e in self.errors.get(field.name, [])])", "def error(self):\n return self['error']", "def get_form_errors(form):\n all_errors = []\n for field in form.errors:\n all_errors += form.errors[field]\n return all_errors", "def get_errors(self, output_str):\n date_time = get_date_time()\n trimmed_output = []\n all_errors = []\n line_counter = 0\n temp_lines = ''\n # trimming unnecessary lines from raw input\n for line in output_str.split('\\n'):\n if 'protocol' in line or 'input errors' in line:\n line_counter = line_counter + 1\n temp_lines = temp_lines + line.strip() + ' '\n if line_counter == 2:\n trimmed_output.append(temp_lines)\n line_counter = 0\n temp_lines = ''\n\n # extracting necessary information from each trimmed line\n for line in trimmed_output:\n # extracting port name\n port = re.search('(FastEthernet|Serial)\\d*\\W\\d*', line).group(0)\n error_str = re.search('\\d* input errors', line).group(0)\n input_error = re.search('\\d*', error_str).group(0)\n # extracting crc error\n error_str = re.search('\\d* CRC', line).group(0)\n crc_error = re.search('\\d*', error_str).group(0)\n # extracting frame error\n error_str = re.search('\\d* frame', line).group(0)\n frame_error = re.search('\\d*', error_str).group(0)\n # extracting overrun error\n error_str = re.search('\\d* overrun', line).group(0)\n overrun_error = re.search('\\d*', error_str).group(0)\n # extracting ignored error\n error_str = re.search('\\d* ignored', line).group(0)\n ignored_error = re.search('\\d*', error_str).group(0)\n # appending to a temporary list which will be later converted to a DataFrame\n all_errors.append([self.device, port, input_error, crc_error, frame_error, overrun_error,\n ignored_error, date_time])\n\n error_df = pd.DataFrame(all_errors, columns=['device_name', 'port', 'input', 'crc', 'frame', 'overrun',\n 'ignored', 'date_time'])\n return error_df", "def _get_retriable_errors(out: List[str]) -> List[str]:\n return [\n line for line in out\n if any(error in line for error in RETRIABLE_ERRORS)\n ]", "def getErrorLog(self):\n return _libsbml.XMLInputStream_getErrorLog(self)", "def check_get_errors(self):\n raise NotImplementedError(\"Implement it in a subclass.\")", "def _find_errors(self, new_response: response_domain_model.Response):\n self._reset_child_state()\n for response in self._responses_to_replay:\n for question_code, conditional in self.requirements.items():\n uncaught_errors = conditional.get_errors(response, question_code=question_code)\n if uncaught_errors:\n # Should not be able to get errors on previously passed responses\n # (invalid answers should be ignored when checking conditionals)\n raise Exception('Invalid answers found in previously checked responses')\n\n new_errors = []\n for question_code, conditional in self.requirements.items():\n new_errors.extend(conditional.get_errors(new_response, question_code=question_code))\n\n if new_errors:\n # Recurse again until there are no new errors found\n new_errors.extend(self._find_errors(new_response))\n\n return new_errors", "def refined_errors(self):\r\n errs = []\r\n for err in self.errors:\r\n if err['typo'].lower() not in self.terms:\r\n errs.append(err)\r\n return errs", "def error_messages(self) -> List[str]:\n spatial_msgs = []\n temporal_msgs = []\n if self.spatial:\n spatial_msgs = [m for v, m in self.spatial_validations if not v(self.spatial)]\n if self.temporal:\n temporal_msgs = [m for v, m in self.temporal_validations if not v(self.temporal)]\n\n return spatial_msgs + temporal_msgs", "def get_error(self):\n return self.e", "def problem_errors(assignment) -> dict:\n\n problems = {}\n problem_number = 1\n\n for problem in assignment.problems.all():\n problems[problem_number] = len(problem.errors.all())\n problem_number += 1\n return problems", "def error_wrapper(x):\n errors = list()\n for error_key, error_list in list(x.items()):\n for error in error_list:\n if error_key == 'non_field_errors':\n errors.append(error)\n else:\n errors.append(\"%s: %s\" % (error_key, error))\n return errors", "def get_beam_errors(self):\n\n lsize = 2 * self.lmax + 1\n nspec = 6 if self.pol else 1\n beam_shape = (self.num_maps * nspec, lsize)\n\n save_name = \"beam_errors\"\n cp = \"beam_errors\"\n\n if hasattr(self, \"beam_errors\") and not self.force_rerun[cp]:\n return self.beam_errors\n\n ret = self.load_data(\n save_name, cp, fields=[\"beam_errors\"], to_attrs=True, shape=beam_shape\n )\n if ret is not None:\n return ret[\"beam_errors\"]\n\n beam_errors = OrderedDict()\n beam_errors[\"tt\"] = OrderedDict()\n if self.pol:\n for s in [\"ee\", \"bb\", \"te\", \"eb\", \"tb\"]:\n beam_errors[s] = OrderedDict()\n\n for tag, otag in zip(self.map_tags, self.map_tags_orig):\n if otag in self.beam_error_product:\n be = self.beam_error_product[otag]\n elif otag in self.fwhm_err:\n # convert error on the FWHM to an envelope error on the beam window\n fwhm = self.fwhm[otag]\n bl = self.beam_windows[\"tt\"][tag]\n blp = hp.gauss_beam(\n fwhm * (1 - self.fwhm_err[otag]), lsize - 1, self.pol\n )\n blm = hp.gauss_beam(\n fwhm * (1 + self.fwhm_err[otag]), lsize - 1, self.pol\n )\n if self.pol:\n bl = np.asarray(\n [bl, self.beam_windows[\"ee\"][tag], self.beam_windows[\"te\"][tag]]\n )\n blp = blp.T[[0, 1, 3]]\n blm = blm.T[[0, 1, 3]]\n be = (blp - blm) / 2.0 / bl\n else:\n raise ValueError(\"No beam in config for {}\".format(otag))\n\n be = np.atleast_2d(be)[:, :lsize]\n beam_errors[\"tt\"][tag] = np.copy(be[0])\n if self.pol:\n for s in [\"ee\", \"bb\", \"eb\"]:\n beam_errors[s][tag] = np.copy(be[1])\n for s in [\"te\", \"tb\"]:\n beam_errors[s][tag] = np.copy(be[2])\n\n # save and return\n self.beam_errors = beam_errors\n self.save_data(save_name, from_attrs=[\"beam_errors\"])\n return self.beam_errors", "def GetFailures(self):\n return self._compute_client.all_failures", "def get_error_days():\n db = psycopg2.connect(database=DBNAME)\n c = db.cursor()\n # dividing views of bad requests and total request to get percentage\n c.execute(\"select bad_request.time, \"\n \"(bad_request.num * 1.0 / total_request.num) as errors \"\n \"from bad_request, total_request \"\n \"where bad_request.time = total_request.time \"\n \"and (bad_request.num * 1.0 / total_request.num) > 0.01\")\n results = c.fetchall()\n text_file = open(\"text.txt\", \"a+\") # append to text file\n text_file.write(\"Day(s) where more than 1 percent of requests were errors:\"\n \"\\n\\n\")\n for time, errors in results:\n text_file.write(time.strftime('%B %d, %Y') + \" - \" +\n str(errors * 100)[:3] + \"% errors\\n\")\n text_file.write(\"\\n\")\n text_file.close()\n db.close()" ]
[ "0.76682556", "0.6973088", "0.69202435", "0.69202435", "0.691879", "0.6830657", "0.6767033", "0.6696042", "0.6650402", "0.6554404", "0.6477976", "0.64408416", "0.64325213", "0.6419485", "0.6418935", "0.63859105", "0.6373044", "0.63697267", "0.6365497", "0.6364051", "0.6349409", "0.63430977", "0.63074017", "0.62982464", "0.6286475", "0.6286475", "0.6250138", "0.62451285", "0.61667675", "0.6146152", "0.61281055", "0.61138034", "0.61106277", "0.60974246", "0.60576105", "0.60346144", "0.592645", "0.5890338", "0.58882", "0.58459514", "0.5822071", "0.58128744", "0.58088297", "0.5703983", "0.5682471", "0.56742066", "0.5673744", "0.5658543", "0.56359327", "0.56311893", "0.56247205", "0.56203824", "0.5618261", "0.5611232", "0.55908585", "0.55831337", "0.5580948", "0.5567783", "0.5557061", "0.5557061", "0.5555228", "0.55429333", "0.55325365", "0.55233824", "0.55215925", "0.5513357", "0.551299", "0.550702", "0.5504017", "0.5500248", "0.5493382", "0.549017", "0.54893553", "0.547846", "0.5473715", "0.547106", "0.54661083", "0.54516584", "0.5442551", "0.5429973", "0.5420276", "0.5418997", "0.54177487", "0.54043895", "0.54018074", "0.5399368", "0.53951144", "0.5390969", "0.5386923", "0.5386564", "0.53857875", "0.53853124", "0.53843576", "0.5381585", "0.5375114", "0.5372781", "0.53717273", "0.5369736", "0.5363267", "0.5357763" ]
0.6593195
9
Gets the logs for a work request.
def list_tagging_work_request_logs(self, work_request_id, **kwargs): resource_path = "/taggingWorkRequests/{workRequestId}/logs" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_tagging_work_request_logs got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "workRequestId": work_request_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) query_params = { "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[TaggingWorkRequestLogSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[TaggingWorkRequestLogSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getLogs():", "def getLogs():", "def logs(self, **kwargs):\n return self.client.api.logs(self.id, **kwargs)", "def get_logs(self):\n return self.network.get_logs()", "def get_job_logs(self, params, ujs_proxy=None):\n if ujs_proxy is None:\n ujs_proxy = self.__proxy_client()\n return ujs_proxy.get_job_logs(params)", "def get_request_journal(self):\n response = requests.get(self.requests_url)\n if response.status_code != http_client.OK:\n raise ValueError(response.text, response.status_code)\n response_body = json.loads(response.text)\n return response_body[\"requests\"]", "def logs(self):\n return self._logs", "def logs(self):\n return self._logs", "def logs(self):\n return self.logger.logs()", "def getLogs():\n # in flux, it may be possible to provide more structured information\n # like python Failure instances", "def logs(self):\n if not self._logs:\n self.read_logs()\n return self._logs", "def _get_logs(self):\n contents = dict()\n contents[\"Scheduler\"] = self._parse_log_content(\n self.scheduler.client.get_scheduler_logs()\n )\n log_workers = self.scheduler.client.get_worker_logs()\n for i, (_, worker_content) in enumerate(log_workers.items()):\n contents[f\"Worker-{i}\"] = self._parse_log_content(worker_content)\n return contents", "def log(self):\n resp = requests.get(\"%s/api/log\"%self.urlbase, verify=False)\n return resp.json[\"log\"]", "def get_logs(self, start=None, max_lines=100):\n\n logs = self._api.get_log(self.id, start, max_lines)\n\n if logs.success:\n return logs.result\n\n else:\n self._log.error(\"Failed to retrieve job logs. Error: \"\n \"{0}\".format(logs.result.msg))\n return None", "def getLog(self):\n \n return self.resp[\"log\"]", "def getLog(self):\n return self.session.request('diag/log/')", "def GetLogs(self):\n utcnow = datetime.datetime.utcnow()\n lower_filter = self.log_position.GetFilterLowerBound()\n upper_filter = self.log_position.GetFilterUpperBound(utcnow)\n new_filter = self.base_filters + [lower_filter, upper_filter]\n entries = logging_common.FetchLogs(\n log_filter=' AND '.join(new_filter),\n order_by='ASC',\n limit=self.LOG_BATCH_SIZE)\n return [entry for entry in entries if\n self.log_position.Update(entry.timestamp, entry.insertId)]", "def fetchLogRecords(self):\n return self.handler.buffer", "def get_logs(self):\n if self.retrieved:\n raise errors.IllegalState('List has already been retrieved.')\n self.retrieved = True\n return objects.LogList(self._results, runtime=self._runtime)", "def fetchLogs(self):\n return [record.msg for record in self.handler.buffer]", "def get_logs(self, name):\n logs = self.get_status()\n\n for pod in self.list_pods(namespace=self.project):\n if name in pod.name: # get just logs from pods related to app\n pod_logs = pod.get_logs()\n if pod_logs:\n logs += pod_logs\n\n return logs", "def GetLogs(self):\n raise NotImplementedError()", "def get_log(*args, **kwargs):\n return get_log_async(*args, **kwargs).get_result()", "def get_logs(self, job_id):\n\n # Get the logstream name\n response = self.batch_client.describe_jobs(jobs=[job_id])\n logstream = response[\"jobs\"][0][\"container\"][\"logStreamName\"]\n\n # Keep a list with the log messages\n logs = []\n\n # Get the logs\n response = self.logs_client.get_log_events(\n logGroupName=\"/aws/batch/job\", logStreamName=logstream\n )\n\n # Add to the list\n logs.extend([l[\"message\"] for l in response[\"events\"]])\n\n # Keep getting more pages\n while response[\"nextForwardToken\"] is not None:\n\n # Keep track of the last token used\n last_token = response[\"nextForwardToken\"]\n\n # Get the next page\n response = self.logs_client.get_log_events(\n logGroupName=\"/aws/batch/job\",\n logStreamName=logstream,\n nextToken=last_token,\n )\n\n # If the token is the same, we're done\n if response[\"nextForwardToken\"] == last_token:\n response[\"nextForwardToken\"] = None\n else:\n # Otherwise keep adding to the logs\n logs.extend([l[\"message\"] for l in response[\"events\"]])\n\n return logs", "def get_logs(self):\n logs_directory = self.protocol_config['logs']\n protocol_name = self.protocol_config['protocol']\n os.system(f'fab -f Execution/fabfile.py get_logs:{logs_directory} --parallel | '\n f' tee WebApp/ExecutionLogs/{protocol_name}.log')", "def _get_logs(self):\n logstart = self.LOGSTART%(self.session.uuid, self.session.run_counter)\n logend = self.LOGEND%(self.session.uuid, self.session.run_counter)\n log = self.container.logs().decode('UTF-8')\n while log.find(logstart) == -1 or log.find(logend) == -1:\n log = self.container.logs().decode('UTF-8')\n cleaned_log = self._get_cleaned_logs(log, logstart, logend)\n self.session.run_counter = self.session.run_counter + 1\n self.session.save()\n return cleaned_log", "def getLog(self):\n return self.log", "def getLog(self):\n return self.log", "def get_log_entries(self):\n if self.retrieved:\n raise errors.IllegalState('List has already been retrieved.')\n self.retrieved = True\n return objects.LogEntryList(self._results, runtime=self._runtime)", "def get_logs(self, start, end):\n raise NotImplementedError(\"error - not implemented\")", "def get_logs(self, start, end):\n raise NotImplementedError(\"error - not implemented\")", "def retrieveLogs(self, execution, localLogDestination):\n # TODO: Implement this in order to get your logs out. The parent implementation will take care of cpu.log in case\n # profiling was requested. Example:\n #\n # execution.host.getFile( '{0}/log.log'.format( self.getExecutionLogDir( execution ) ),\n # os.path.join( localLogDestination, 'log.log' ), reuseConnection = execution.getRunnerConnection() )\n # client.retrieveLogs(self, execution, localLogDestination)\n #\n # The use of the execution.getRunnerConnection() connection prevents errors with multi-threading.\n #\n # This assumes you have no logs of your own:\n client.retrieveLogs(self, execution, localLogDestination)", "def getLog(request):\n # TODO: GET\n data = {}\n return data", "def logs(self) -> Sequence['outputs.GetElasticsearchLogResult']:\n return pulumi.get(self, \"logs\")", "def logs(self, shell=False):\n if self.app_id:\n return self.yarn_api.logs(self.app_id, shell=shell)\n else:\n raise KnitException('Cannot get logs, app not started')", "def log_entries(self) -> Generator[dict, None, None]:\n if self.log_stream:\n yield from logs.fetch_stream(self.log_stream)\n else:\n yield from []", "def get_dust_log(self):\n\t\tif self._session:\n\t\t\tresults = self._session.get_dust_log()\n\t\t\tif results.get('success'):\n\t\t\t\tresults = results.get('results')\n\t\t\t\tif results and \"rows\" in results:\n\t\t\t\t\treturn results.get('rows', [])\n\n\t\treturn []", "def push_worklogs(\n entries: Sequence[TogglEntry], toggl_token: str\n) -> Optional[str]:\n for index, worklog in enumerate(entries):\n logger.info('pushing worklog {}/{}'.format(index + 1, len(entries)))\n\n payload = json.dumps(asdict(worklog), cls=DateTimeEncoder)\n\n response = requests.post(\n 'https://www.toggl.com/api/v8/time_entries',\n data=payload,\n headers={'Content-Type': 'application/json'},\n auth=(toggl_token, 'api_token'),\n )\n\n try:\n response.raise_for_status()\n except HTTPError as err:\n assert isinstance(err.response.text, str)\n return err.response.text\n except RequestException:\n return traceback.format_exc()\n\n return None", "def get_lines(self):\n return self._container.logs(stream=True)", "def get_run_log(self, run_id):\n postresult = requests.get(\n f\"{self.proto}://{self.host}/ga4gh/wes/v1/runs/{run_id}\",\n headers=self.auth,\n )\n return wes_reponse(postresult)", "def _get_job_log(self, job_id, skip_lines):\n log = self.get_mongo_util().get_job_log(job_id)\n lines = []\n for log_line in log.lines: # type: LogLines\n if skip_lines and int(skip_lines) >= log_line.linepos:\n continue\n lines.append(\n {\n \"line\": log_line.line,\n \"linepos\": log_line.linepos,\n \"error\": log_line.error,\n \"ts\": log_line.ts,\n }\n )\n\n log_obj = {\"lines\": lines, \"last_line_number\": log.stored_line_count}\n return log_obj", "def all_logs(self):\n return os.listdir(LOGS_BASE_PATH)", "def GetLogs(self):\n stdout, _, _ = RunKubectlCommand(['logs', self.name])\n return stdout", "def logs(self, task: RemoteTask) -> Iterable[str]:\n raise NotImplementedError()", "def fetch_run_logs(id_, **kwargs):\n run = get_run_object(id_)\n check_run_permission(run, kwargs[\"token_info\"])\n query = \"ilyde-run-{}\".format(run.id)\n return query_elasticsearch(query)", "def get_logs(job_key):\n job = Job.fetch(job_key, connection=conn)\n if job.is_finished:\n logs = job.result\n elif job.is_failed:\n logs = job.exc_info\n else:\n logs = \"Task is still running\"\n return str(logs), 200", "def get_work_log(session, snapshot, work_id):\n # type: (Session, Optional[str], str) -> Text\n if not session.network:\n raise ValueError(\"Network must be set to get node roles\")\n\n url_tail = \"/{}/{}/{}/{}/{}/{}\".format(\n CoordConstsV2.RSC_NETWORKS,\n session.network,\n CoordConstsV2.RSC_SNAPSHOTS,\n session.get_snapshot(snapshot),\n CoordConstsV2.RSC_WORK_LOG,\n work_id,\n )\n\n return _get(session, url_tail, dict()).text", "def log_parts():\n return get_log_parts('derivaweb_start_time',\n 'derivaweb_request_guid',\n 'derivaweb_request_content_range',\n 'derivaweb_content_type')", "def ExtractRequestsFromLog(log_path):\n requests = []\n wpr_http_netloc = None\n with open(log_path) as log_file:\n for line in log_file.readlines():\n # Extract WPR's HTTP proxy's listening network location.\n match = _PARSE_WPR_DOMAIN_REGEX.match(line)\n if match:\n wpr_http_netloc = match.group('netloc')\n assert wpr_http_netloc.startswith('127.0.0.1:')\n continue\n # Extract the WPR requested URLs.\n match = _PARSE_WPR_URL_REGEX.match(line)\n if match:\n parsed_url = urlparse(match.group('url'))\n # Ignore strange URL requests such as http://ousvtzkizg/\n # TODO(gabadie): Find and terminate the location where they are queried.\n if '.' not in parsed_url.netloc and ':' not in parsed_url.netloc:\n continue\n assert wpr_http_netloc\n request = WprRequest(is_served=(match.group('level') == 'DEBUG'),\n method=match.group('method'), url=match.group('url'),\n is_wpr_host=parsed_url.netloc == wpr_http_netloc)\n requests.append(request)\n return requests", "def getJobLog(self, jobId):\n params = {'id': jobId}\n try:\n resp = self.gc.get(JobUtils.JOB_ID_PATH, parameters=params)\n except HttpError as e:\n if e.status == 400:\n print('Error. invalid job id:', jobId)\n return {}\n raise\n\n if not resp:\n return ''\n\n log = resp.get('log', '')\n return log", "def get_requests(self):\n\t\tself.last_processed = self.last_modified\n\t\treturn self.requests", "def get_logs():\n callback = bottle.request.query.get('callback')\n folder = os.path.dirname(os.path.abspath(__file__))\n test_run_title = bottle.request.query.test_run_id\n results = {'logs': {'monitor': '', 'testrun': ''}, 'host': bottle.request.headers.get('host')}\n try:\n with open(os.path.join(folder, 'monitor.log'), 'r+') as _f:\n results['logs'].update({'monitor': tools.get_last_logs(_f.readlines())})\n with open(os.path.join(folder, '%s-testrun.log' % test_run_title), 'r+') as _f:\n results['logs'].update({'testrun': tools.get_last_logs(_f.readlines())})\n except IOError as err:\n key = 'monitor' if 'monitor' in str(err) else 'testrun'\n results['logs'].update({key: 'Could not find logs: %s' % err})\n return '{0}({1})'.format(callback, [results])", "def open_logs():\n\treturn log, action_log, error_log", "def log (self):\n return self._log", "def debug_logs_get():\n try:\n return flask.Response(debug_logs.collect(), mimetype='text/plain')\n except debug_logs.Error as e:\n return flask.Response('Failed to retrieve debug logs: %s' % str(e),\n status=500)", "def get_server_logs(self):\n self.response.content\n binary_body = re.split('--==.*==', self.response.content)[2].split('\\r\\n')[5]\n\n f = StringIO.StringIO()\n f.write(bytearray(binary_body))\n\n memory_zip = ZipFile(f)\n zip_content = {name: memory_zip.read(name) for name in memory_zip.namelist()}\n oracc_log = zip_content['oracc.log']\n request_log = zip_content['request.log']\n\n # Check if server returns a lemmatised file\n autolem = None \n for key, value in zip_content.iteritems():\n if key.endswith(\"autolem.atf\"):\n autolem = value\n\n print zip_content.keys()\n print \"@\"*30\n print oracc_log\n print \"@\"*30\n print request_log\n print \"@\"*30\n if autolem:\n print autolem\n print \"@\"*30\n\n return oracc_log, request_log, autolem", "def get_logs_and_parts(self):\n result =[]\n query_params = {'key':'1715230983110018712', 'parameter_name':'imprId','et_log_date':'2017-06-01'}\n query = \"\"\"select a.key,a.uuid,a.page_url,a.domain_name,a.app_visitor_cookie,a.referral_domain\n from wt_logs a, wt_log_parts b\n where a.key = b.key\n and a.et_log_date = :et_log_date\n and a.key = :key\n and b.parameter_name = :parameter_name\"\"\".replace('\\n',' ')\n with vertica_python.connect(**conn_info) as connection:\n #print(\"Connected to {} on host{} \".format(conn_info['database'],conn_info['host']))\n cur = connection.cursor()\n cur.execute(query,query_params)\n for row in cur.iterate():\n result.append(row)\n return(result)", "def query_worklog(self, emp_id=None):\n\n query = \"select * from worklog\"\n\n try:\n self.dbCursor.execute(query)\n return self.dbCursor.fetchall()\n except mysql.connector.Error as err:\n ErrorMessageWindow(err)", "def get_requests(self):\r\n\t\tself.last_processed = self.last_modified\r\n\t\treturn self.requests", "def get_traillogs(self, conn, limit=100, offset=0, username=None, start_time=None,\n end_time=None, description=None, target=None, classification=None,\n customer_name=None, ip_address=None, app_id=None):\n path = urls.TRAIL_LOG[\"GET_ALL\"]\n params = {\n \"limit\": limit,\n \"offset\": offset\n }\n if username:\n params[\"username\"] = username\n if start_time:\n params[\"start_time\"] = start_time\n if end_time:\n params[\"end_time\"] = end_time\n if description:\n params[\"description\"] = description\n if target:\n params[\"target\"] = target\n if classification:\n params[\"classification\"] = classification\n if customer_name:\n params[\"customer_name\"] = customer_name\n if ip_address:\n params[\"ip_address\"] = ip_address\n if app_id:\n params[\"app_id\"] = app_id\n resp = conn.command(apiMethod=\"GET\", apiPath=path, apiParams=params)\n return resp", "def get_run_log():\r\n params=request.values\r\n result = ExecRunLog.query.filter(ExecRunLog.exec_id==params['exec_id']).all()\r\n return json_response(result=result)", "def get_log(request, **kwargs):\n\n #Creating the command for the logs \n try:\n\tprint(kwargs)\n\tprint(request.GET['project_id'])\n\toutputStr = sidecar.events.test_logs(project_id=request.GET['project_id'])\n\tlog_data = outputStr.log_data\n\toutputStr = \" <br>\".join(log_data.split(\"\\n\"))\n except Exception, e:\n outputStr = \"Updating the logs...\"\t\n #Making the output\n context = {\n \"page_title\": _(\"Test Details\"),\n \"test_lists\": 'report_list', #tests_list\n \"log_data\": outputStr\n }\n return render(request, 'rally_dashboard/events/test_logs.html', context)", "def get_logs_list():\n # reads the session\n session = request.args.get('session', type=str)\n\n available_keys = []\n\n if check_session_validity(session):\n user = get_user_from_session(session)\n\n all_keys = lh.get_handlers().keys()\n\n for key in all_keys:\n if lh.check_user_log_visibility(user, key):\n available_keys.append(key)\n\n return jsonify({\"logs\": available_keys})", "def log(self):\n return self._log", "def log(self):\n return self._log", "def getLog(self):\n pass", "def logs(ctx, job, past, follow, hide_time):\n\n def get_experiment_logs():\n if past:\n try:\n response = PolyaxonClient().experiment.logs(\n user, project_name, _experiment, stream=False)\n get_logs_handler(handle_job_info=True,\n show_timestamp=not hide_time,\n stream=False)(response.content.decode().split('\\n'))\n print()\n\n if not follow:\n return\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n if not follow:\n Printer.print_error(\n 'Could not get logs for experiment `{}`.'.format(_experiment))\n Printer.print_error(\n 'Error message `{}`.'.format(e))\n sys.exit(1)\n\n try:\n PolyaxonClient().experiment.logs(\n user,\n project_name,\n _experiment,\n message_handler=get_logs_handler(handle_job_info=True,\n show_timestamp=not hide_time))\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not get logs for experiment `{}`.'.format(_experiment))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n\n def get_experiment_job_logs():\n if past:\n try:\n response = PolyaxonClient().experiment_job.logs(\n user,\n project_name,\n _experiment,\n _job,\n stream=False)\n get_logs_handler(handle_job_info=True,\n show_timestamp=not hide_time,\n stream=False)(response.content.decode().split('\\n'))\n print()\n\n if not follow:\n return\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n if not follow:\n Printer.print_error(\n 'Could not get logs for experiment `{}`.'.format(_experiment))\n Printer.print_error(\n 'Error message `{}`.'.format(e))\n sys.exit(1)\n\n try:\n PolyaxonClient().experiment_job.logs(\n user,\n project_name,\n _experiment,\n _job,\n message_handler=get_logs_handler(handle_job_info=True,\n show_timestamp=not hide_time))\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not get logs for job `{}`.'.format(_job))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n\n user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),\n ctx.obj.get('experiment'))\n\n if job:\n _job = get_experiment_job_or_local(job)\n get_experiment_job_logs()\n else:\n get_experiment_logs()", "def upload_logs():\n return {\n 'page': 'upload_logs',\n 'raw_logs': '',\n }", "def logs(self) -> Optional[pulumi.Input['BrokerLogsArgs']]:\n return pulumi.get(self, \"logs\")", "def logs(self) -> Optional[pulumi.Input['BrokerLogsArgs']]:\n return pulumi.get(self, \"logs\")", "def get_rolling_log_history():\n current_tag = get_current_tag()\n return get_log_history(current_tag)", "def get_logs(self, offset = 0, length = 10, type = \"all\"):\n if isinstance(type, bool):\n if type:\n type = \"error\"\n else:\n type = \"all\"\n return AlgoliaUtils_request(self.headers, self.write_hosts, \"GET\", \"/1/logs?offset=%d&length=%d&type=%s\" % (offset, length, type), self.timeout)", "def extract_log_info(request, response=None):\n\n log_data = {\n 'remote_address': request.META.get('REMOTE_ADDR'),\n 'request_method': request.method,\n 'request_path': request.get_full_path(),\n 'run_time': time.time() - request.start_time,\n 'request_body': str(request.body, 'utf-8'),\n 'response_body': response.content\n }\n return log_data", "def get_log(self, logs, ref=None):\n log_list = list()\n output_list = list()\n if isinstance(logs, str):\n log_list.append(logs)\n elif isinstance(logs, list):\n log_list = logs\n for name in log_list:\n new_log = Log()\n new_log.name = name.lower()[:3] + '_' + \\\n self.well_name.lower().replace('-', '_')\n new_log.units = self.unit_dict[name]\n new_log.descr = name\n new_log.depth = np.array(self.data_frame['Depth(m)'].values)\n new_log.data = np.array(self.data_frame[\n '{}({})'.format(name, self.unit_dict[name])].values)\n if ref == 'sea':\n shift = int(self.kelly_bushing // 0.1)\n shift_data = np.full_like(new_log.data, np.nan, dtype=np.double)\n shift_data[:-shift] = new_log.data[shift:]\n new_log.data = shift_data\n output_list.append(new_log)\n if isinstance(logs, str):\n return output_list[0]\n else:\n return output_list", "def retrieve_git_log(self):\n result = [str(entry).split(\"\\t\")[1]\n for entry in self.repo.head.log()]\n\n return result", "def logs():\n with open(configs.LOG_PATH) as f:\n return f.read()", "def log(self):\n if self._log is None:\n self._log = Log(client=self)\n return self._log", "def get_formatted_task_log(self):\n try:\n log = requests.get(self.gs_base_url + \"/out.log\").content\n except:\n return [f\"####-##-## ##:##:## Task ID: {self.name}\\n\"]\n return (f\"####-##-## ##:##:## Task ID: {self.name}\\n\" + log.decode('utf-8')).splitlines()", "def log(self):\r\n return self._log", "def get_browser_log_entries(self):\n print(\"B\" * 80)\n loglevels = {\"NOTSET\": 0, \"DEBUG\": 10, \"INFO\": 20, \"WARNING\": 30, \"ERROR\": 40, \"SEVERE\": 40, \"CRITICAL\": 50}\n slurped_logs = self.webdriver.get_log(\"browser\")\n browserlog = logging.getLogger(\"browser\")\n for entry in slurped_logs:\n print(entry[\"message\"])\n # convert broswer log to python log format\n rec = browserlog.makeRecord(\n \"%s.%s\" % (browserlog.name, entry[\"source\"]),\n loglevels.get(\"WARNING\"), # always log it as warn...\n # loglevels.get(entry['level']),\n \".\",\n 0,\n entry[\"message\"],\n None,\n None,\n )\n rec.created = entry[\"timestamp\"] / 1000 # log using original timestamp.. us -> ms\n # pylint: disable=broad-except\n try:\n # add browser log to python log\n browserlog.handle(rec)\n self.progress(entry[\"message\"])\n except Exception as ex:\n print(\"caught exception during transfering browser logs: \" + str(ex))\n print(entry)", "def activity_logs(self) -> api.ActivityLogs:\n return self._get_model(model=api.ActivityLogs)", "def get_log_events(self):\n client = boto3.client('logs')\n\n # Set the timestamp we will start from next poll.\n # and limit current poll to.\n end_timestamp = self.get_timestamp()\n\n # Request LogEvents.\n\n # Check if LogStream was added while agent was running. If so, get LogEvents from LogStream creation time.\n # So we don't miss any.\n if self.added:\n self.last_event_check_timestamp = self.creation_time\n self.added = False\n\n log_events_response = client.get_log_events(\n startTime=self.last_event_check_timestamp,\n endTime=end_timestamp,\n logGroupName=self.log_group.name,\n logStreamName=self.name,\n limit=self.event_limit,\n startFromHead=True\n )\n\n # Create LogEvents list from response.\n events = [LogEvent(log_event_dict) for log_event_dict in log_events_response['events']]\n\n # Token used if another request is required to get all LogEvents.\n next_forward_token = log_events_response['nextForwardToken']\n\n event_count = len(events)\n\n # While we get LogEvents equal to event_limit, continue requesting.\n while event_count >= self.event_limit:\n log_events_response = client.get_log_events(\n startTime=self.last_event_check_timestamp,\n endTime=end_timestamp,\n logGroupName=self.log_group.name,\n logStreamName=self.name,\n limit=self.event_limit,\n nextToken=next_forward_token,\n startFromHead=True\n )\n\n # Set length and next forward token for while loop.\n event_count = len(log_events_response['events'])\n next_forward_token = log_events_response['nextForwardToken']\n\n # Add LogEvents to our event list.\n events += [LogEvent(log_event_dict) for log_event_dict in log_events_response['events']]\n\n # Set starting point for next poll\n self.last_event_check_timestamp = end_timestamp\n\n print('Found ' + str(len(events)) + ' LogEvents for LogStream ' + self.log_group.name + ' ' + self.name)\n return events", "def get_main_log(self) -> Any:\n return self.logger", "def get_logs(self, upload_id: str, token: str) -> Tuple[dict, dict]:\n data, _, headers = self.json('post', f'/{upload_id}/logs', token)\n return data, headers", "def fetch_log_entries(owner_account_id):\n batch_size = 500\n log_entries = []\n\n i = 0\n while True:\n i += 1\n skip = batch_size * (i - 1)\n top = batch_size\n\n resp = oauth.tapkey.get(f\"Owners/{owner_account_id}/LogEntries?\"\n f\"$skip={skip}&$top={top}&\"\n f\"$filter=logType eq 'Command' and command eq 'TriggerLock'&\"\n f\"$select=id,entryNo,lockTimestamp,receivedAt,boundLockId,boundCardId,contactId&\")\n\n if resp.status_code != 200:\n abort(500)\n\n next_log_entries = resp.json()\n\n if not isinstance(next_log_entries, list):\n abort(500)\n\n log_entries.extend(next_log_entries)\n\n if len(next_log_entries) < batch_size:\n break\n\n return log_entries", "def worklog(accountable):\n worklog = accountable.issue_worklog()\n headers = ['author_name', 'comment', 'time_spent']\n if worklog:\n rows = [[v for k, v in sorted(w.items()) if k in headers]\n for w in worklog]\n rows.insert(0, headers)\n print_table(SingleTable(rows))\n else:\n click.secho(\n 'No worklogs found for {}'.format(accountable.issue_key),\n fg='red'\n )", "def log(self) -> DagsterLogManager:\n return self._step_execution_context.log", "def get_logs(self, start_date, end_date):\n return (self, start_date, end_date)", "def log(self):\n\n if self.__log is not None:\n return self.__log\n\n return JobsHistoryView.classLog()", "def test_getAuditLogsWithNoParams(self):\r\n logs = self.client.getAuditLogs()\r\n return logs", "def get_log_messages(self, instance):\n return LogMessageSerializer(instance.log_messages, many=True).data", "def retrieve_requests(self, request=None):\n data = {}\n if request:\n data = request.dict()\n req = requests.put('{}/retrieve'.format(self._get_url()),\n params={'type': 'requests'}, data=json.dumps(data))\n if req.status_code == 200:\n try:\n return req.json()\n except ValueError:\n return []\n return []", "def read_logs(self) -> Dict[str, Any]:\n return self.maps[0]", "def getLogSession(self):\n return self.session.request('diag/logSession/')", "def get_logger(context):\n Log.job_log = logging.getLogger(context)\n return Log.job_log", "def log():\n data = {}\n for head in repo.heads:\n log_entries = []\n for log_entry in head.log():\n log_entries.append({\n 'message': log_entry.message,\n 'commit': log_entry.newhexsha,\n 'parent': log_entry.oldhexsha,\n 'author': str(log_entry.actor),\n 'committer': str(log_entry.actor), #fix this \n 'time': log_entry.time\n })\n data[head.name] = log_entries\n return jsonify(data)", "def list_logs():\n resource_route = \"/static/log/\"\n file_request_path = request.base_url[:request.base_url.rfind('/')] + resource_route\n path_to_current_file = os.path.dirname(os.path.abspath(__file__))\n logs_path = os.path.join(path_to_current_file, 'static', 'log')\n directory_list = os.listdir(logs_path)\n log_files = [f for f in directory_list if os.path.isfile(os.path.join(logs_path, f))]\n log_files.sort()\n if '.gitignore' in log_files:\n log_files.remove('.gitignore')\n full_log_paths = [file_request_path + f for f in log_files]\n response_code = 200\n return make_response(jsonify({'files': full_log_paths}), response_code)", "def _get_deployment_logs(namespace, name, tail_lines=TAIL_LINES_DEFAULT):\n pods = []\n try:\n api_response = k8s_client.list_namespaced_pod(namespace, label_selector='release={}'.format(name))\n for api_items in api_response.items:\n pods.append(api_items.metadata.name)\n except ApiException as e:\n print(\"Exception when calling CoreV1Api->list_namespaced_pod: %s\\n\" % e)\n\n # Iterate over list of pods and concatenate logs\n logs = \"\"\n try:\n for pod in pods:\n logs += pod + \"\\n\"\n logs += k8s_client.read_namespaced_pod_log(pod, namespace, tail_lines=tail_lines)\n except ApiException as e:\n logging.error(\"Exception when calling CoreV1Api->read_namespaced_pod_log: %s\\n\" % e)\n return logs", "def get_log(self, name):\n try:\n if not self.check_integrity():\n raise Exception(\"Database check failed.\")\n if name not in self.existing_logs:\n raise Exception(\"no log named {}!\".format(name))\n depth = None\n data = None\n info = None\n with sqlite3.connect(self.db_file) as conn:\n cur = conn.cursor()\n cur.execute(\"SELECT {} FROM data\".format(name))\n data = cur.fetchall()\n cur.execute(\"SELECT dept FROM data\")\n depth = cur.fetchall()\n cur.execute(\"SELECT * FROM curves \\\n WHERE name = \\\"{}\\\"\".format(name.upper()))\n info = cur.fetchall()\n data = [d[0] for d in data]\n depth = [d[0] for d in depth]\n for idx, d in enumerate(data):\n if d is None:\n data[idx] = np.nan\n log = Log()\n log.name = info[0][1].lower()\n log.units = info[0][2].lower()\n log.descr = info[0][3]\n log.depth = depth\n log.data = data\n return log\n except Exception as inst:\n print(inst.args[0])\n return []", "def get_added_logs(self):\n with open(self.path, \"r\") as log_file:\n log_file.seek(self.position)\n contents = log_file.read()\n self.position = log_file.tell()\n return contents" ]
[ "0.7205541", "0.7205541", "0.69183385", "0.68548286", "0.671339", "0.66895777", "0.6651718", "0.6651718", "0.6607248", "0.6598488", "0.6557063", "0.655379", "0.64999664", "0.6349522", "0.63252586", "0.6317283", "0.6278176", "0.6275006", "0.6270008", "0.6261736", "0.62345433", "0.6211976", "0.6153629", "0.61185503", "0.6092576", "0.6092398", "0.60079014", "0.60079014", "0.5950199", "0.59160805", "0.59160805", "0.5894351", "0.58932567", "0.5888294", "0.58524334", "0.5851343", "0.5847058", "0.58345884", "0.5823352", "0.57827616", "0.5781832", "0.5766486", "0.5731941", "0.5727548", "0.57150424", "0.57095754", "0.57034206", "0.56699765", "0.5657467", "0.5652353", "0.5647583", "0.56473905", "0.56423515", "0.56354934", "0.5628836", "0.56204796", "0.561794", "0.56173915", "0.5614094", "0.5610622", "0.5604848", "0.5600957", "0.5599623", "0.55952334", "0.55952334", "0.55884343", "0.5566496", "0.5565118", "0.5559365", "0.5559365", "0.55115175", "0.55049235", "0.5499204", "0.54927063", "0.54898304", "0.548509", "0.5483686", "0.5472755", "0.54603004", "0.54578215", "0.54562104", "0.545554", "0.5426549", "0.54124814", "0.5407074", "0.5395082", "0.53877926", "0.538298", "0.53704", "0.5352267", "0.53467244", "0.5345028", "0.534304", "0.5341404", "0.5315722", "0.53137743", "0.5305353", "0.53029394", "0.5278417", "0.52548945" ]
0.6452193
13
Lists the tagging work requests in compartment.
def list_tagging_work_requests(self, compartment_id, **kwargs): resource_path = "/taggingWorkRequests" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "resource_identifier" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_tagging_work_requests got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "resourceIdentifier": kwargs.get("resource_identifier", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[TaggingWorkRequestSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[TaggingWorkRequestSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list(self, jobguid=\"\", executionparams=None):", "def tags(self, request, tag_list, group):\n return tag_list", "def get_jobs_list(self, response):\n pass", "def listTagsByNotebook(self, authenticationToken, notebookGuid):\r\n pass", "def handle_tags(self, request):\n \"\"\"\n @api {get} /tags List tags\n @apiName GetTags\n @apiGroup Misc\n @apiVersion 1.0.0\n\n @apiDescription List currenty used tags\n\n @apiSuccessExample {json} Example response:\n [\n \"tag1\",\n \"tag2\"\n ]\n \"\"\"\n\n headers = {\n 'Content-Type': 'application/javascript',\n 'Access-Control-Allow-Origin': '*'\n }\n\n tags = []\n\n for task in self.cluster.config.get('tasks').values():\n if 'tags' in task:\n tags += task['tags']\n\n tags = list(set(tags))\n\n return HTTPReply(code = 200, body = json.dumps(tags), headers = headers)", "def list(self, jobguid=\"\", executionparams=dict()):", "def list(self):\n return self.rpc.call(MsfRpcMethod.JobList)", "def listTags(self, authenticationToken):\r\n pass", "def ListTags(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def listTagsByNotebook(self, authenticationToken, notebookGuid):\r\n self.send_listTagsByNotebook(authenticationToken, notebookGuid)\r\n return self.recv_listTagsByNotebook()", "def list(self):\n\n\t\treturn self._list(\"/tag\", \"tag\")", "def listRequests(self):\n reqmgr = RequestManagerImpl()\n retval = []\n for request in reqmgr.listRequests(self.endpoint):\n tmpRequest = Request()\n tmpRequest.setReqmgrUrl( self.endpoint )\n tmpRequest.setWorkflowName( request['request_name'] )\n retval.append( tmpRequest )\n return retval", "def get_job_list(self):\n return self.job_list", "def get_job_list(self):\n return self.job_list", "def __gitTagList(self):\n self.vcs.gitListTagBranch(self.project.getProjectPath(), True)", "def list_all_tags(self,obs):", "def jobs(self, tags=None, tags_intersect=None):\n return list(self.all_jobs(tags=tags, tags_intersect=tags_intersect))", "def getTagList(self):\n if not self.proxy:\n self.proxy = self.session.service(\"ALBehaviorManager\")\n return self.proxy.getTagList()", "def worklist():\n from wheelcms_axle.content import Content\n pending = Content.objects.filter(state=\"pending\", node__isnull=False)\n return pending", "def ListWorkers(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def list_requesters():\n from mephisto.core.local_database import LocalMephistoDB\n from tabulate import tabulate\n\n db = LocalMephistoDB()\n requesters = db.find_requesters()\n dict_requesters = [r.to_dict() for r in requesters]\n click.echo(tabulate(dict_requesters, headers=\"keys\"))", "def tags(self):\r\n url = '{0}/tags/'.format(self.get_url())\r\n request = http.Request('GET', url)\r\n\r\n return request, parsers.parse_json", "def list_tags():\n\n tags = Tag.query.all()\n return render_template('tags/list_tags.html', tags=tags)", "def tag_list(request):\r\n rdict = request.matchdict\r\n username = rdict.get(\"username\", None)\r\n if username:\r\n username = username.lower()\r\n\r\n tags_found = TagMgr.find(username=username)\r\n\r\n return {\r\n 'tag_list': tags_found,\r\n 'tag_count': len(tags_found),\r\n 'username': username,\r\n }", "def tags(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'tags')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def list_work_requests(self, compartment_id, **kwargs):\n resource_path = \"/workRequests\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"resource_identifier\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_work_requests got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"resourceIdentifier\": kwargs.get(\"resource_identifier\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[WorkRequestSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[WorkRequestSummary]\")", "def get_components(self, req):\n request_name = req.request\n\n names = []\n if(request_name == \"\"):\n comps = self.rt_proxy.get_available_components() # get all\n else:\n comps = self.rt_proxy.get_available_components(request_name)\n\n for c in comps:\n names.append(str(c))\n\n resp = ListComponentsResponse(names)\n\n return resp", "async def request_jobs_list(self, jobs_list_active_only: bool, *args, **kwargs) -> List[str]:\n # TODO: implement\n raise NotImplementedError('{} function \"request_jobs_list\" not implemented yet'.format(self.__class__.__name__))", "def list_cost_tracking_tags(self, compartment_id, **kwargs):\n resource_path = \"/tagNamespaces/actions/listCostTrackingTags\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_cost_tracking_tags got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Tag]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[Tag]\")", "def get_tags_list(*args, **kwargs):\n return Tag.objects.active()", "def get_tags_list(*args, **kwargs):\n return Tag.objects.active()", "def GetAssetTagsFromUploadRequest(self, entity, request):\n return []", "def getBuildRequests():", "def list_jobs(self):\n\n return dict(self._from_json(self.manage.run(override=\"list-jobs\")))", "def list(self):\n return list(\n filter(\n lambda x: x.get('type') == 'tagit', # pragma: no cover\n self._post(\n request=ApiActions.LIST.value,\n uri=ApiUri.ACTIONS.value,\n ).get('actions')\n )\n )", "def list_tags():\r\n tags = Tag.query.order_by(Tag.name).all()\r\n return render_template('tags.html', tags=tags)", "def _list_tags(self, expression):\n try:\n for tag in self.dockerioapi.get_tags(expression):\n Msg().out(tag)\n return self.STATUS_OK\n except (KeyError, TypeError, ValueError):\n return self.STATUS_ERROR", "def get_job_names(self):\n return []", "def tag_list(context, addon, dev_tags=None, user_tags=None):\n if not dev_tags and not user_tags:\n return ''\n if not dev_tags:\n dev_tags = []\n if not user_tags:\n user_tags = []\n\n c = {\n 'request': context['request'],\n 'addon': addon,\n 'dev_tags': dev_tags,\n 'user_tags': user_tags,\n }\n t = env.get_template('tags/tag_list.html').render(**c)\n return jinja2.Markup(t)", "def listTags(self, authenticationToken):\r\n self.send_listTags(authenticationToken)\r\n return self.recv_listTags()", "def get_all_tags():\n try:\n data = ReadTag().run()\n except Exception as ex:\n return jsonify({'code': '500','message':'Internal server error'})\n else:\n return jsonify({'code': '200','data': data})", "def test_list(self):\n response = self.app.get(self.url('tags.list'))\n # Test response...", "def get_tags(request):\n as_list = request.params.get('as_list')\n if as_list:\n return [\n tag.name\n for tag in Tag.query.all()\n ]\n else:\n return [\n {\n 'name': tag.name,\n 'id': tag.id\n }\n for tag in Tag.query.all()\n ]", "def get_tags(request):\n try:\n tags = []\n for tag in Tag.objects.all():\n tags.append({\"title\": tag.title, \"id\": tag.pk})\n\n return format_ajax_response(True, \"Knowledgebase tags retrieved successfully.\", {\"tags\": tags})\n except Exception as ex:\n logger.error(\"Failed to get_tags: %s\" % ex)\n return format_ajax_response(False, \"There was an error retrieving the knowledgebase tags.\")", "def ListJobs(self, token=None):\n return aff4.FACTORY.Open(self.CRON_JOBS_PATH, token=token).ListChildren()", "def get_queryset(self, request):\n return super(JobAdmin, self).get_queryset(request).prefetch_related('tags')", "def list(self, request):\n jobs = Job.objects.all()\n\n city = self.request.query_params.get('city', None)\n state = self.request.query_params.get('state', None)\n\n # Support filtering jobs by user id\n job = self.request.query_params.get('user', None)\n if job is not None:\n jobs = jobs.filter(user=request.user)\n\n if city is not None:\n jobs = jobs.filter(city=city)\n\n if state is not None:\n jobs = jobs.filter(state=state)\n\n serializer = JobSerializer(\n jobs, many=True, context={'request': request})\n return Response(serializer.data)", "def get(self, request):\n serializer = self.serializer_class(self.queryset.all(), many=True)\n return Response({'tags':serializer.data}, status=status.HTTP_200_OK)", "def listJobs():\n logger.debug('[FLASKWEB /jobs] Request for job listing')\n jobs = db.getJobs(numdays=2)\n for job in jobs:\n job['time'] = datetime.datetime.strptime(job['time'], db.TS_FMT).replace(tzinfo=db.timezone('UTC')).isoformat()\n if job['complete']:\n job['complete'] = datetime.datetime.strptime(job['complete'], db.TS_FMT).replace(tzinfo=db.timezone('UTC')).isoformat()\n\n # Garbage Collect Orpahened jobs\n compiles = db.getCompiles()\n for compile in compiles:\n if compile['submit']:\n compile['submit'] = datetime.datetime.strptime(compile['submit'], db.TS_FMT).replace(tzinfo=db.timezone('UTC')).isoformat()\n if compile['complete']:\n compile['complete'] = datetime.datetime.strptime(compile['complete'], db.TS_FMT).replace(tzinfo=db.timezone('UTC')).isoformat()\n # for c in compiles:\n # if c['uid'] not in compile_tasks.keys():\n # db.updateCompile(c['uid'], status='KILLED', done=True)\n # compiles = db.getCompiles()\n\n if request.headers['Accept'] == 'application/json':\n return jsonify(dict(LaunchJobs=jobs, CompilingJobs=compiles)), 200\n else:\n return render_template(\"jobs.html\", joblist=jobs, compilelist=compiles)", "def list_tags_for_resource(Resource=None):\n pass", "def list_jobs(arn=None, nextToken=None):\n pass", "def tag_list(request, queryset=None, **kwargs):\n if queryset is None:\n queryset = Tag.objects.all()\n \n if 'queryset' in kwargs:\n del kwargs['queryset']\n \n if 'template_name' not in kwargs:\n kwargs['template_name'] = 'flickrsets/tag/list.html'\n \n if 'template_object_name' not in kwargs:\n kwargs['template_object_name'] = 'tag'\n \n return list_detail.object_list(request, queryset, **kwargs)", "def get_request_extensions(self):\n return []", "def get_tag_interactions(request, pk):\n try:\n tag = InteractionTag.objects.get(pk=pk)\n except InteractionTag.DoesNotExist:\n return Response(status=status.HTTP_404_NOT_FOUND)\n\n ans = []\n interactions = tag.interaction_set.all()\n for interaction in interactions:\n if request.user == interaction.owner:\n ans.append(InteractionSerializer(interaction).data)\n return Response(ans)", "def list_service(request):\n builder = http.ResponseBuilder()\n master_addr = request.GET.get('master',None)\n if not master_addr:\n return builder.error('master is required').build_json()\n\n client = wrapper.Galaxy(master_addr,settings.GALAXY_CLIENT_BIN)\n status,jobs = client.list_jobs()\n LOG.info(status)\n if not status:\n return builder.error('fail to list jobs').build_json()\n ret = []\n for job in jobs:\n ret.append(job.__dict__)\n return builder.ok(data=ret).build_json()", "def list_tags(self, session):\n result = self._tag(session.get, session=session)\n return result['tags']", "def list(self):\n return self._post(\n request='list',\n uri=ApiUri.TAGS.value,\n ).get('tags')", "def list_merge_requests(request):\n if request.user.profile.superuser == False:\n raise Http404\n \n lMergeRequests = PersonMergeRequest.objects.filter()\n for mergeRequest in lMergeRequests:\n mergeRequest.from_adjuducations_count = ContestAdjudicator.objects.filter(person=mergeRequest.source_person).count()\n mergeRequest.to_adjuducations_count = ContestAdjudicator.objects.filter(person=mergeRequest.destination_person).count()\n \n mergeRequest.from_compositions_count = TestPiece.objects.filter(composer=mergeRequest.source_person).count()\n mergeRequest.to_compositions_count = TestPiece.objects.filter(composer=mergeRequest.destination_person).count()\n \n mergeRequest.from_arranger_count = TestPiece.objects.filter(arranger=mergeRequest.source_person).count()\n mergeRequest.to_arranger_count = TestPiece.objects.filter(arranger=mergeRequest.destination_person).count()\n \n return render_auth(request, 'move/list_person_merge_requests.html', {'MergeRequests' : lMergeRequests})", "def ListWaiters(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def jobs(self):\n return self.properties.get('jobs',\n EntityCollection(self.context, SynchronizationJob,\n ResourcePath(\"jobs\", self.resource_path)))", "def do_list_tags(cs, args):\n resp, tags = cs.repositories.list_tags(args.repository)\n tags = [{\"Tag\": t} for t in tags]\n utils.print_list(tags, [\"Tag\"], sortby=\"Tag\")", "def getTags(number=None):", "def list_components(self, request, context):\n response = ListComponentsResponse()\n for component in self._delegator.list_components():\n response.components.append(component)\n return response", "def get_requests(self):\r\n\t\tself.last_processed = self.last_modified\r\n\t\treturn self.requests", "def list(self, request):\n exp = Experiment.objects.all()\n serializer = ExperimentSerializer(exp, many=True)\n return send_response(request.method, serializer)", "def get_requests(self):\n\t\tself.last_processed = self.last_modified\n\t\treturn self.requests", "def getJobList_impl(self):\n my_infos = TestJob.objects.filter(\n (Q(job_status='Running')|Q(job_status='Submitted')|Q(job_status='Incomplete'))\n &Q(check_or_not=True)\n )\n\n if not connection.in_atomic_block:\n self._commit_transaction(src='getInfosList_impl')\n print(\"###\", my_infos)\n logger.info(my_infos)\n return my_infos", "def get(self):\n res = SmartAPI.get_tags(self.args.field)\n self.finish(res)", "def get(self):\n res = SmartAPI.get_tags(self.args.field)\n self.finish(res)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)" ]
[ "0.58259183", "0.5787486", "0.578615", "0.5756901", "0.57381487", "0.5711629", "0.5706951", "0.5672795", "0.5647787", "0.56005555", "0.5547329", "0.5543428", "0.55379766", "0.55379766", "0.55000997", "0.5487424", "0.5475018", "0.5472611", "0.5454506", "0.54417205", "0.54269785", "0.5417054", "0.5416555", "0.5367583", "0.5349937", "0.53378636", "0.5318648", "0.531532", "0.5305949", "0.5297729", "0.5297729", "0.5277664", "0.52743137", "0.5269255", "0.5257659", "0.52514344", "0.5227342", "0.5210131", "0.519842", "0.5189989", "0.5174211", "0.51712584", "0.51668286", "0.5148506", "0.51432174", "0.5142149", "0.51392514", "0.51374733", "0.5134467", "0.51288205", "0.51123893", "0.51067114", "0.5101469", "0.5091619", "0.50730485", "0.50651956", "0.5063613", "0.5057077", "0.5048207", "0.5004549", "0.5001289", "0.49913314", "0.49870497", "0.49671555", "0.49579942", "0.49576223", "0.4957267", "0.49524835", "0.49524835", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828", "0.49517828" ]
0.69553626
0
Lists the tag definitions in the specified tag namespace.
def list_tags(self, tag_namespace_id, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}/tags" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "lifecycle_state" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_tags got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) if 'lifecycle_state' in kwargs: lifecycle_state_allowed_values = ["ACTIVE", "INACTIVE", "DELETING", "DELETED"] if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values: raise ValueError( "Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values) ) query_params = { "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "lifecycleState": kwargs.get("lifecycle_state", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[TagSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="list[TagSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list(self):\n\n\t\treturn self._list(\"/tag\", \"tag\")", "def get_all(context, namespace_name, session, filters=None, marker=None,\n limit=None, sort_key='created_at', sort_dir='desc'):\n\n namespace = namespace_api.get(context, namespace_name, session)\n query = (session.query(models.MetadefTag).filter_by(\n namespace_id=namespace['id']))\n\n marker_tag = None\n if marker is not None:\n marker_tag = _get(context, marker, session)\n\n sort_keys = ['created_at', 'id']\n sort_keys.insert(0, sort_key) if sort_key not in sort_keys else sort_keys\n\n query = paginate_query(query=query,\n model=models.MetadefTag,\n limit=limit,\n sort_keys=sort_keys,\n marker=marker_tag, sort_dir=sort_dir)\n metadef_tag = query.all()\n metadef_tag_list = []\n for tag in metadef_tag:\n metadef_tag_list.append(tag.to_dict())\n\n return metadef_tag_list", "def list_tags():\r\n tags = Tag.query.order_by(Tag.name).all()\r\n return render_template('tags.html', tags=tags)", "def list_tags():\n\n tags = Tag.query.all()\n return render_template('tags/list_tags.html', tags=tags)", "def test_list_template_for_all_namespaces(self):\n pass", "def list_all_tags(self,obs):", "def __list_all_tags(self):\n\n tags_dict = get_data.get_tagnames_dict()\n if len(tags_dict) > 0:\n first_str = 'tag'\n second_str = 'top posts scraped'\n third_str = 'recent posts scraped'\n descriptor = '{:<40} {:<20} {}'\n print('')\n print(descriptor.format(first_str, second_str, third_str))\n print(descriptor.format(len(first_str) * '-', len(second_str) * '-',\n len(third_str) * '-'))\n for number, tag in tags_dict.items():\n space_str = ' ' if len(str(number)) > 1 else ' '\n first = '[' + space_str + str(number) + '] ' + tag\n second = str(get_data.get_top_tag_post_count(tag))\n third = str(get_data.get_recent_tag_post_count(tag))\n print(descriptor.format(first, second, third))\n else:\n print('no tags found in the database')", "def tags(self, request, tag_list, group):\n return tag_list", "def list(self):\n return self._post(\n request='list',\n uri=ApiUri.TAGS.value,\n ).get('tags')", "def listTags(self, authenticationToken):\r\n pass", "def registered_dde_schemas(self):\n url = DDE_SCHEMA_BASE_URL + \"?field=_id&size=20\"\n if self.verbose:\n print(f'Loading registered DDE schema list from \"{url}\"')\n data = load_json_or_yaml(url)\n return [s[\"namespace\"] for s in data[\"hits\"]]", "def tag_list(request):\r\n rdict = request.matchdict\r\n username = rdict.get(\"username\", None)\r\n if username:\r\n username = username.lower()\r\n\r\n tags_found = TagMgr.find(username=username)\r\n\r\n return {\r\n 'tag_list': tags_found,\r\n 'tag_count': len(tags_found),\r\n 'username': username,\r\n }", "def registered_dde_schemas(verbose=False):\n url = DDE_SCHEMA_BASE_URL + \"?field=_id&size=20\"\n if verbose:\n print(f'Loading registered DDE schema list from \"{url}\"')\n data = load_json_or_yaml(url)\n return [s[\"namespace\"] for s in data[\"hits\"]]", "def tags():", "def do_list_tags(cs, args):\n resp, tags = cs.repositories.list_tags(args.repository)\n tags = [{\"Tag\": t} for t in tags]\n utils.print_list(tags, [\"Tag\"], sortby=\"Tag\")", "def tag_names(self):\r\n return [tag.name for tag in self.get_tags()]", "def definition_list(self, on, **kw):\n tag = 'dl'\n if on:\n tagstr = self._open(tag, newline=1, **kw)\n else:\n tagstr = self._close(tag, newline=1)\n return tagstr", "def list_tags(self, session):\n result = self._tag(session.get, session=session)\n return result['tags']", "def list_services(self, **kwargs: Optional[Any]) -> list:\n\n self.logger.debug(\"list_services: %s\", kwargs)\n\n namespace = kwargs.get(\"namespace\", \"global\")\n\n return self.AD.services.list_services(namespace) # retrieve services", "def tag_list(context, addon, dev_tags=None, user_tags=None):\n if not dev_tags and not user_tags:\n return ''\n if not dev_tags:\n dev_tags = []\n if not user_tags:\n user_tags = []\n\n c = {\n 'request': context['request'],\n 'addon': addon,\n 'dev_tags': dev_tags,\n 'user_tags': user_tags,\n }\n t = env.get_template('tags/tag_list.html').render(**c)\n return jinja2.Markup(t)", "def _tags(self):\n retval = []\n for of in self.tagnames:\n retval.append([of, self.get_datatype(of), self.get(of)])\n return retval", "def get_tags(self):\r\n self.tags = []\r\n for tag in self.soup.find_all('dl'):\r\n name = tag.dt.contents[0]\r\n\r\n # getting info about tag\r\n info = ''\r\n for p in tag.dd.find_all('p'):\r\n info += p.getText() + ' '\r\n\r\n # getting reference link and code snippet\r\n a_tags = tag.dd.find_all('a')\r\n example_id = a_tags[1]['href'].replace('#', '') # code snippet\r\n example = self.soup.find('div', {'id': example_id}).getText()\r\n\r\n # url reference (from HTML5Doctor if exists)\r\n reference = ''\r\n try:\r\n reference = tag.dt.span.a['href'] # url for HTML5Doctor\r\n except:\r\n reference = a_tags[0]['href'] # url for W3C\r\n\r\n reference = 'http://html5doctor.com/element-index/#' + name\r\n new_tag = Tag(name, info, reference, example)\r\n self.tags.append(new_tag)\r\n logger.info('Tag parsed: %s' % new_tag.name)", "def get_all_tagged(self,tag_name):\n return self.tag2elements[tag_name]", "def tags(catalog,lista,tag):\n final=lt.newList(datastructure='ARRAY_LIST')\n i=it.newIterator(lista)\n while it.hasNext(i):\n vid=it.next(i)\n if tag in vid['tags']:\n lt.addLast(final,vid)\n return final", "def ListTags(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def do_taglist(parser, token):\n bits = token.split_contents()[1:]\n if len(bits) >= 2 and bits[-2] == 'in':\n return TaglistNode(bits[0:-2], bits[-1])\n return TaglistNode(bits)", "async def list_namespaces(self) -> list:\n return await self.AD.state.list_namespaces()", "def _get_tags(fname):\n tarsqi_inst, tarsqidoc = tarsqi.load_ttk_document(fname)\n return tarsqidoc.tags", "def list_tags() -> Optional[Dict[str, Target]]:\n if hasattr(_ffi_api, \"TargetTagListTags\"):\n return _ffi_api.TargetTagListTags()\n return None", "def test_list_build_for_all_namespaces(self):\n pass", "def tagged(tag = ''):\n\tresults = queries.tagged(tag)\n\ttags = queries.tags()\n\treturn render_template('index.html', packages=results, tags=tags, currentFilter=tag)", "def tag_list(request, queryset=None, **kwargs):\n if queryset is None:\n queryset = Tag.objects.all()\n \n if 'queryset' in kwargs:\n del kwargs['queryset']\n \n if 'template_name' not in kwargs:\n kwargs['template_name'] = 'flickrsets/tag/list.html'\n \n if 'template_object_name' not in kwargs:\n kwargs['template_object_name'] = 'tag'\n \n return list_detail.object_list(request, queryset, **kwargs)", "def GetNamespaces(self):\n return list(self.type_namespaces_map.values())", "def getTagList(self):\n if not self.proxy:\n self.proxy = self.session.service(\"ALBehaviorManager\")\n return self.proxy.getTagList()", "def list_namespaced_image_stream_tag(self, namespace, **kwargs):\n\n all_params = ['namespace', 'pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_image_stream_tag\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `list_namespaced_image_stream_tag`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/imagestreamtags'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1ImageStreamTagList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def test_list_image_stream_tag_for_all_namespaces(self):\n pass", "def show_tags():\n\n tags = Tag.query.all()\n\n return render_template(\"tags/tag_list.html\", tags=tags)", "def describe_tags(resourceArns=None):\n pass", "def tags():\r\n section = document.add_section()\r\n new_width, new_height = section.page_height, section.page_width\r\n section.orientation = WD_ORIENT.LANDSCAPE\r\n section.page_width = 7772400\r\n section.page_height = 10058400\r\n document.add_heading('Tags', level=1)\r\n tags = get_qlik_sense.get_tag()\r\n num_of_tags = len(tags)\r\n table = document.add_table(rows=num_of_tags+1, cols=1)\r\n table.style = 'Grid Table 1 Light Accent 1'\r\n row = table.rows[0]\r\n row.cells[0].text = 'name'\r\n for tag in range(num_of_tags):\r\n row = table.rows[tag+1]\r\n row.cells[0].text = str(tags[tag])", "def list_packages(self):\n for tag, pkg in PACKAGES.iteritems():\n print \"{tag} - {label}\".format(tag=tag, label=pkg['label'])", "def defined_tags(self):\n return self._defined_tags", "def get_services_in_namespace(self, namespace):\n ret = self.v1_service_list.get(namespace=namespace)\n return [each.metadata.name for each in ret.items]", "def tags(self) -> list[str]:\n _args: list[Arg] = []\n _ctx = self._select(\"tags\", _args)\n return _ctx.execute_sync(list[str])", "def count(context, namespace_name, session):\n namespace = namespace_api.get(context, namespace_name, session)\n query = (session.query(func.count(models.MetadefTag.id)).filter_by(\n namespace_id=namespace['id']))\n return query.scalar()", "def registered_tags(self):\r\n return self._mapping.keys()", "def grouplist(self, namespace=None):\n source = self._source(namespace)\n return self._list(source, 'list')", "def get_namespaces():\r\n\r\n print 'Getting namespaces'\r\n tree = etree.parse('http://lesswrong.wikia.com/wiki/Special:AllPages', parser)\r\n options = tree.xpath('//select[@id=\"namespace\"]/option')\r\n namespaces = [option.get('value') for option in options]\r\n pprint(namespaces)\r\n return namespaces", "def get_tags(self) -> List:\n LOGGER.info('Get all the tags')\n\n with self.client.create_session() as session:\n tag_count = (func.count(RDSTableTag.table_rk)\n + func.count(RDSDashboardTag.dashboard_rk)).label('tag_count')\n\n records = session.query(\n RDSTag.rk.label('tag_name'),\n tag_count\n )\\\n .outerjoin(RDSTableTag)\\\n .outerjoin(RDSDashboardTag)\\\n .filter(RDSTag.tag_type == 'default')\\\n .group_by(RDSTag.rk)\\\n .having(tag_count > 0)\\\n .all()\n\n results = []\n for record in records:\n results.append(TagDetail(tag_name=record.tag_name,\n tag_count=record.tag_count))\n\n return results", "def test_list_namespaced_template(self):\n pass", "def find_all(self, params={}, **options):\n return self.client.get_collection(\"/tags\", params, **options)", "def get_tags(request):\n as_list = request.params.get('as_list')\n if as_list:\n return [\n tag.name\n for tag in Tag.query.all()\n ]\n else:\n return [\n {\n 'name': tag.name,\n 'id': tag.id\n }\n for tag in Tag.query.all()\n ]", "def read_tagged_word_list(filename):\n # TODO: write and test this method\n print 'reading tagged file'", "def list(self, dict_output=False, field_selector=\"\"):\n namespaces_list = self.client_core.list_namespace().items\n logger.info(\"Got namespaces\")\n\n if field_selector:\n namespaces_list = field_filter(obj_list=namespaces_list,\n field_selector=field_selector)\n # convert the list to list of dicts if required\n if dict_output:\n namespaces_list = [convert_obj_to_dict(namespace) for namespace in\n namespaces_list]\n else:\n for namespace in namespaces_list:\n namespace.metadata.resource_version = ''\n return namespaces_list", "async def guild_tags(self, ctx):\n guild_tags = self._tag_dict.get(ctx.guild.id)\n if not guild_tags:\n raise commands.BadArgument(f'This guild does not have any tags!')\n tags = sorted(guild_tags.items(), key=lambda x: x[1]['uses'], reverse=True)\n data = [f'{tag[0]} - {tag[1][\"uses\"]} uses' for tag in tags]\n embed = discord.Embed(colour=self.bot.colour)\n embed.set_author(name=f\"All Tags in {ctx.guild}\", icon_url=ctx.guild.icon_url)\n source = IndexedListSource(data=data, embed=embed, title=\"Tags\")\n await CatchAllMenu(source=source).start(ctx)", "def collect_set_of_named_references(tag: str) -> Set[str]:\n result = set() # type: Set[str]\n for element in root.iter(tag):\n name = element.attrib[\"name\"]\n result.add(name)\n return result", "def list_tags(filename):\n storeapps = APP.config[\"storage\"]\n filename = filename.encode(\"utf-8\")\n\n try:\n application = list(nativeapps.io.ls(storeapps, r\".*\" + filename + \"$\"))[0]\n meta_path = os.path.join(os.path.dirname(application), \"metadata.json\")\n metadata = json.loads(nativeapps.io.readfile(meta_path))\n tags = metadata.get(\"tags\", [])\n return flask.jsonify(tags)\n except IndexError:\n return \"Unknown application: %s\" % (application), 404", "def get_namespaces():\n return list(StaticAsset._load_namespaces().keys())", "def list_tags(self, entry_name):\n return self.__datacatalog.list_tags(parent=entry_name)", "def taglist() -> bytes:\n\n tags = cherrypy.engine.publish(\n \"bookmarks:tags:all\"\n ).pop()\n\n return cherrypy.engine.publish(\n \"jinja:render\",\n \"apps/bookmarks/bookmarks-taglist.jinja.html\",\n tags=tags,\n subview_title=\"Tags\"\n ).pop()", "def list_tags_for_resource(Resource=None):\n pass", "def tag_index(self):\n if not self._tag_index:\n for nb in self.notebooks:\n for tag, links in nb.tags.items():\n self._tag_index[tag].extend(links)\n for tag in self._tag_index.keys():\n self._tag_index[tag] = list(sorted(set(self._tag_index[tag]), key=natsort))\n return self._tag_index", "def GetTagListForTag(dom, tag):\n tags = dom.getElementsByTagName(tag)\n if not tags:\n return None\n return tags", "def listTagsByNotebook(self, authenticationToken, notebookGuid):\r\n pass", "def getSBMLExtensionNamespaces(self, *args):\n return _libsbml.LayoutExtension_getSBMLExtensionNamespaces(self, *args)", "def get_all_tags():\n try:\n data = ReadTag().run()\n except Exception as ex:\n return jsonify({'code': '500','message':'Internal server error'})\n else:\n return jsonify({'code': '200','data': data})", "def collect_tags(self):\n tags = []\n for document in self.documents:\n for tag_token in document.tags:\n tags.append(tag_token)\n return tags", "def _list_tags(self, expression):\n try:\n for tag in self.dockerioapi.get_tags(expression):\n Msg().out(tag)\n return self.STATUS_OK\n except (KeyError, TypeError, ValueError):\n return self.STATUS_ERROR", "def list_tags(ResourceArn=None):\n pass", "def list(self) -> directory.Level.Listing:\n return self.Listing(self.tag.states)", "def list_namespaced_template(self, namespace, **kwargs):\n\n all_params = ['namespace', 'pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_template\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `list_namespaced_template`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/templates'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1TemplateList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def get_all_pages_for_namespace(ns):\r\n\r\n print 'Getting pages in namespace %s' % ns\r\n url = 'http://lesswrong.wikia.com/index.php?title=Special:AllPages&from=&to=&namespace=%s' % ns\r\n tree = etree.parse(url, parser)\r\n pages = tree.xpath('//table[2]//a[@title]')\r\n page_names = [page.get('title') for page in pages]\r\n pprint(page_names)\r\n return page_names", "def namespaces(self):\n return list(self._namespace_schemas.keys())", "def list(self):\n return list(\n filter(\n lambda x: x.get('type') == 'tagit', # pragma: no cover\n self._post(\n request=ApiActions.LIST.value,\n uri=ApiUri.ACTIONS.value,\n ).get('actions')\n )\n )", "def get_tags(self, tags):\n tag_list = []\n for tag in tags:\n tag_list.append(tag[\"name\"])\n return tag_list", "def defined_tags(self, defined_tags):\n self._defined_tags = defined_tags", "def get_tag_index(self) -> List[str]:\n path = os.path.join(self.directory_path, \"__tags.json\")\n if not os.path.exists(path):\n return list()\n try:\n with open(path) as f:\n return json.load(f)\n except json.decoder.JSONDecodeError:\n print(f\"Could not get tag index. Check file: {path}\")", "def getNoteTagNames(self, authenticationToken, guid):\r\n pass", "def namespaces(self):\n return ()", "def makeDocStructures(context):\n namespaces = {}\n subdirs = {}\n registry = context._docRegistry\n for (namespace, name), schema, usedIn, handler, info, parent in registry:\n if not parent:\n ns_entry = namespaces.setdefault(namespace, {})\n ns_entry[name] = (schema, handler, info)\n else:\n sd_entry = subdirs.setdefault((parent.namespace, parent.name), [])\n sd_entry.append((namespace, name, schema, handler, info))\n return namespaces, subdirs", "def list_tag_namespaces(self, compartment_id, **kwargs):\n resource_path = \"/tagNamespaces\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"include_subcompartments\",\n \"lifecycle_state\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_tag_namespaces got unknown kwargs: {!r}\".format(extra_kwargs))\n\n if 'lifecycle_state' in kwargs:\n lifecycle_state_allowed_values = [\"ACTIVE\", \"INACTIVE\", \"DELETING\", \"DELETED\"]\n if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:\n raise ValueError(\n \"Invalid value for `lifecycle_state`, must be one of {0}\".format(lifecycle_state_allowed_values)\n )\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"includeSubcompartments\": kwargs.get(\"include_subcompartments\", missing),\n \"lifecycleState\": kwargs.get(\"lifecycle_state\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[TagNamespaceSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[TagNamespaceSummary]\")", "def namespaces(self):\n namespaces = set()\n for namespace_package in self.namespace_packages:\n dotted_name = []\n for component in namespace_package.split('.'):\n dotted_name.append(component)\n namespaces.add(tuple(dotted_name))\n return sorted(namespaces, key=lambda n: len(n))", "def taglist(self):\n tags = []\n for tag in self.tags:\n tags.append(tag.title)\n return ', '.join(map(str, tags))", "def tag_dict(self):\n tag_dict = dict()\n for document in self.documents:\n for tag in document.tags:\n tag_type = tag['tag']\n tag_dict[tag_type] = tag_dict.get(tag_type, []) + [tag]\n return tag_dict", "def get_tags_list(*args, **kwargs):\n return Tag.objects.active()", "def get_tags_list(*args, **kwargs):\n return Tag.objects.active()", "def watch_namespaced_template_list(self, namespace, **kwargs):\n\n all_params = ['namespace', 'pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method watch_namespaced_template_list\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `watch_namespaced_template_list`\")\n\n resource_path = '/oapi/v1/watch/namespaces/{namespace}/templates'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='JsonWatchEvent',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def listTags(self, authenticationToken):\r\n self.send_listTags(authenticationToken)\r\n return self.recv_listTags()", "def describe_tags(filters=None, maxResults=None, nextToken=None):\n pass", "def test_list(self):\n response = self.app.get(self.url('tags.list'))\n # Test response...", "def get_all_by_namespace(context, namespace_name, session):\n\n # namespace get raises an exception if not visible\n namespace = namespace_api.get(\n context, namespace_name, session)\n\n db_recs = (\n session.query(models.MetadefResourceType)\n .join(models.MetadefResourceType.associations)\n .filter_by(namespace_id=namespace['id'])\n .values(models.MetadefResourceType.name,\n models.MetadefNamespaceResourceType.properties_target,\n models.MetadefNamespaceResourceType.prefix,\n models.MetadefNamespaceResourceType.created_at,\n models.MetadefNamespaceResourceType.updated_at))\n\n model_dict_list = []\n for name, properties_target, prefix, created_at, updated_at in db_recs:\n model_dict_list.append(\n _set_model_dict\n (name, properties_target, prefix, created_at, updated_at)\n )\n\n return model_dict_list", "def list_bags(self, bags):\n prefix, suffix = self._get_jsonp()\n return prefix + JSON.list_bags(self, bags) + suffix", "def tags(self):\r\n return resources.Tags(self)", "def __gitTagList(self):\n self.vcs.gitListTagBranch(self.project.getProjectPath(), True)", "def all_in_namespace(cls, ns):\n return filter_by_prefix(cls.all(), ns + ':')", "def tags(self):\r\n url = '{0}/tags/'.format(self.get_url())\r\n request = http.Request('GET', url)\r\n\r\n return request, parsers.parse_json", "def get_search_tag_names(self):\n return self._ruleset.keys()", "def tags(self):\n tag_docs = self.tag_data\n tags = set([x[\"tag\"] for x in tag_docs])\n # remove the \"thawed\" tag\n tags.discard(\"thawed\")\n return tags", "def tags(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'tags')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def list_valid_tags(self):\n tags = Tag.objects.filter(db_category=\"rp hooks\").order_by(\"db_key\")\n self.msg(\"Categories: %s\" % \"; \".join(tag.db_key for tag in tags))\n return", "def test_list_build_config_for_all_namespaces(self):\n pass" ]
[ "0.62576944", "0.6049763", "0.57420623", "0.5677931", "0.56470114", "0.5591507", "0.5418022", "0.54105335", "0.54017437", "0.5380748", "0.5363339", "0.53598344", "0.53386366", "0.5322805", "0.5246583", "0.5219264", "0.52185553", "0.5193574", "0.51878446", "0.5177014", "0.5166008", "0.51583755", "0.5158218", "0.5122658", "0.5121329", "0.5098682", "0.50767756", "0.501721", "0.5001005", "0.5000745", "0.49844703", "0.49810195", "0.49778786", "0.49738938", "0.49686646", "0.49210778", "0.4920114", "0.49180013", "0.4913832", "0.4908577", "0.48981568", "0.4855393", "0.4842103", "0.4821254", "0.4814114", "0.47989112", "0.47885174", "0.4781442", "0.47643355", "0.47549456", "0.47549355", "0.47505373", "0.47394383", "0.47282016", "0.47271076", "0.4722479", "0.4714915", "0.47051424", "0.4700352", "0.4693723", "0.46858984", "0.4684258", "0.46795502", "0.46759665", "0.46705258", "0.4663415", "0.46627012", "0.46608543", "0.46383548", "0.46343422", "0.46326318", "0.46306753", "0.462531", "0.46204218", "0.46139622", "0.4613545", "0.46103314", "0.46045178", "0.45962736", "0.45932648", "0.45872158", "0.45840782", "0.4580572", "0.4579704", "0.4579704", "0.45792872", "0.45760116", "0.45746383", "0.4565471", "0.45617628", "0.45526814", "0.4550295", "0.45485938", "0.45456564", "0.45423704", "0.4540252", "0.45343265", "0.45306277", "0.45290416", "0.45231882" ]
0.50524735
27
Lists the `UserGroupMembership` objects in your tenancy. You must specify your tenancy's OCID as the value for the compartment ID (see `Where to Get the Tenancy's OCID and User's OCID`__).
def list_user_group_memberships(self, compartment_id, **kwargs): resource_path = "/userGroupMemberships" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "user_id", "group_id", "page", "limit" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_user_group_memberships got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "userId": kwargs.get("user_id", missing), "groupId": kwargs.get("group_id", missing), "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[UserGroupMembership]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[UserGroupMembership]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def view_group(request, group_id):\n users = models.UserProfile.all().order('email')\n if group_id:\n group = models.UserGroup.get_by_id(int(group_id))\n if group.users:\n users = models.UserProfile.get(group.users)\n else:\n users = []\n return utility.respond(request, 'admin/view_group', {'users': users})", "def get_queryset(self):\n user = self.request.user\n return user.group_set.all()", "def list_groups(request):\n groups = models.UserGroup.all().order('name')\n return utility.respond(request, 'admin/list_groups', {'groups': groups})", "def get(self):\r\n return UserGroupService.getAllUserGroups(self)", "def queryUsersAndGroups(self):\n with self._open_connection() as session:\n #List Users\n pprint(list( [ (x[User.id], x[User.name]) for x in session.query(User) ] ))\n # List groups\n groups = session.query(User).filter( User.type == 'rodsgroup' )\n [x[User.name] for x in groups]\n #More detailed listings\n grp_usr_mapping = [ (iRODSUserGroup ( session.user_groups, result), iRODSUser (session.users, result)) \\\n for result in session.query(UserGroup,User) ]\n pprint( [ (x,y) for x,y in grp_usr_mapping if x.id != y.id ] )", "def list_users(access_token):\n request_url = OKTA_URL + \"api/v1/users\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def get_group_list(org_id):\n tList = get_template('app/usermanagementorg/group_list.html')\n groups = get_groups(org_id)\n return tList.render(Context({ 'groups': groups, }))", "def get_memberships(self):\n return UnitMembership.objects.filter(unit=self).select_related(\"user\")", "def get_user_groups(user):\n auth_groups = user.groups.all()\n # groups = [group.profile for group in auth_group] # not working\n # todo implement better\n groups = [GroupProfile.objects.filter(group=group)[0] for group in auth_groups if GroupProfile.objects.filter(group=group).count()]\n return groups", "def users_groups():\n if request.method == \"GET\":\n query = {\"token\": ciconnect_api_token, \"globus_id\": session[\"primary_identity\"]}\n # Get user info to derive unix name\n user = get_user_info(session)\n unix_name = user[\"metadata\"][\"unix_name\"]\n # Get user's group membership info based on session unix name\n users_group_memberships = get_user_group_memberships(session, unix_name)\n\n multiplexJson = {}\n group_membership_status = {}\n for group in users_group_memberships:\n if group[\"state\"] not in [\"nonmember\"]:\n group_name = group[\"name\"]\n group_query = (\n \"/v1alpha1/groups/\" + group_name + \"?token=\" + query[\"token\"]\n )\n multiplexJson[group_query] = {\"method\": \"GET\"}\n group_membership_status[group_query] = group[\"state\"]\n # POST request for multiplex return\n multiplex = get_multiplex(multiplexJson)\n\n users_groups = []\n for group in multiplex:\n if (\n session[\"url_host\"][\"unix_name\"]\n in (json.loads(multiplex[group][\"body\"])[\"metadata\"][\"name\"])\n ) and (\n len(\n (json.loads(multiplex[group][\"body\"])[\"metadata\"][\"name\"]).split(\n \".\"\n )\n )\n > 1\n ):\n users_groups.append(\n (\n json.loads(multiplex[group][\"body\"]),\n group_membership_status[group],\n )\n )\n # users_groups = [group for group in users_groups if len(group['name'].split('.')) == 3]\n\n # Query user's pending project requests\n pending_project_requests = get_user_pending_project_requests(unix_name)\n # Check user's member status of root connect group\n connect_group = session[\"url_host\"][\"unix_name\"]\n user_status = get_user_connect_status(unix_name, connect_group)\n\n domain_name = domain_name_edgecase()\n\n with open(\n brand_dir\n + \"/\"\n + domain_name\n + \"/form_descriptions/group_unix_name_description.md\",\n \"r\",\n ) as file:\n group_unix_name_description = file.read()\n\n return render_template(\n \"users_groups.html\",\n groups=users_groups,\n project_requests=pending_project_requests,\n user_status=user_status,\n group_unix_name_description=group_unix_name_description,\n )", "def personal_group_user_listing(request):\n\town_id, page_num = request.user.id, request.GET.get('page', '1')\n\tstart_index, end_index = get_indices(page_num, OBJS_PER_PAGE_IN_USER_GROUP_LIST)\n\tpayload, total_grps = retrieve_user_group_list_contents(own_id,start_index,end_index)\n\tpage_list = get_overall_page_list(total_grps, OBJS_PER_PAGE_IN_USER_GROUP_LIST)\n\treturn render(request,\"personal_group/group_listing/user_group_list.html\",{'payload':payload,'pages':page_list,'num_pages':len(page_list),\\\n\t\t'current_page':page_num,'current_time':time.time(),'own_id':str(request.user.id),'items_in_curr_page':len(payload)})", "def groups():\n access_token = session['access_token']\n return \"%s\" % list_groups(access_token)", "def all_memberships(request):\n memberships = Membership.objects.all()\n context = {\n 'memberships': memberships,\n }\n return render(request, \"memberships.html\", context)", "def get_membersof(self, kwargs):\n group = kwargs[\"group\"]\n verbose = kwargs.get(\"verbose\", False)\n\n results = list(self.engine.query(self.engine.GROUP_DN_FILTER(group), [\"distinguishedName\", \"objectSid\"]))\n if results:\n group_dn = results[0][\"distinguishedName\"]\n else:\n error(\"Group {group} does not exists\".format(group=group))\n\n primary_group_id = results[0][\"objectSid\"].split('-')[-1]\n results = self.engine.query(self.engine.ACCOUNTS_IN_GROUP_FILTER(primary_group_id, group_dn))\n self.display(results, verbose)", "def list(self, request, *args, **kwargs):\n if not request.user.is_superuser:\n self.queryset = Group.objects.filter(owner__pk=request.user.id)\n\n return super().list(request, args, kwargs)", "def filter_users(request):\n groups = models.UserGroup.all().order('name')\n return utility.respond(request, 'admin/filter_users', {'groups': groups})", "def members(self):\n data = UserProfile.objects.filter(\n organization_id=self.id\n ).order_by(\n 'display_name', 'first_name', 'last_name'\n )\n\n return data", "def get_queryset(self):\n group_info = Group.objects.filter(id__in=Member.objects.filter(\n user=self.request.user).values('group').distinct())\n for data in group_info:\n user_id = Member.objects.get(role_type='owner', group_id=data.id)\n data.owner = user_id.user.phone\n \n return group_info", "def list_group_members(self, entity):\n\n members = []\n\n for nodePath, node in self.cache.get_tree(self.userProjects).items():\n if nodePath.startswith(entity.path):\n # Check if node is a direct child\n distance = len(pathlib.Path(nodePath).relative_to(pathlib.Path(entity.path)).parts)\n\n if distance == 1:\n if type(node) is gitlab.v4.objects.Group or type(node) is gitlab.v4.objects.Project:\n members.append(node.path)\n elif type(node) is gitlab.v4.objects.User:\n members.append(node.username)\n\n return members", "def get_memberships(self, kwargs):\n account = kwargs[\"account\"]\n recursive = kwargs.get(\"recursive\", False)\n\n already_printed = set()\n\n def lookup_groups(dn, leading_sp, already_treated):\n results = self.engine.query(self.engine.DISTINGUISHED_NAME(dn), [\"memberOf\", \"primaryGroupID\"])\n for result in results:\n if \"memberOf\" in result:\n for group_dn in result[\"memberOf\"]:\n if group_dn not in already_treated:\n print(\"{g:>{width}}\".format(g=group_dn, width=leading_sp + len(group_dn)))\n already_treated.add(group_dn)\n lookup_groups(group_dn, leading_sp + 4, already_treated)\n\n if \"primaryGroupID\" in result and result[\"primaryGroupID\"]:\n pid = result[\"primaryGroupID\"]\n results = list(self.engine.query(self.engine.PRIMARY_GROUP_ID(pid)))\n if results:\n already_treated.add(results[0][\"dn\"])\n\n return already_treated\n\n results = self.engine.query(self.engine.ACCOUNT_IN_GROUPS_FILTER(account), [\"memberOf\", \"primaryGroupID\"])\n for result in results:\n if \"memberOf\" in result:\n for group_dn in result[\"memberOf\"]:\n print(group_dn)\n if recursive:\n already_printed.add(group_dn)\n s = lookup_groups(group_dn, 4, already_printed)\n already_printed.union(s)\n\n # for some reason, when we request an attribute which is not set on an object,\n # ldap3 returns an empty list as the value of this attribute\n if \"primaryGroupID\" in result and result[\"primaryGroupID\"] != []:\n pid = result[\"primaryGroupID\"]\n results = list(self.engine.query(self.engine.PRIMARY_GROUP_ID(pid)))\n if results:\n print(results[0][\"dn\"])", "def _get_org_members(self):\n url = f\"{BASE_URL}/orgs/{ORG}/members\"\n return self.fetch_all_pages(url, flatten=True, query_params={\"per_page\": 100})", "def showORGusers(**kwargs):\n sessiontoken = kwargs['sessiontoken']\n ORG_ID = kwargs['ORG_ID']\n strCSPProdURL = kwargs['strCSPProdURL']\n jsonResponse = get_csp_users_json(strCSPProdURL, ORG_ID, sessiontoken)\n if jsonResponse == None:\n print(\"API Error\")\n sys.exit(1)\n\n users = jsonResponse['results']\n table = PrettyTable(['First Name', 'Last Name', 'User Name'])\n for i in users:\n table.add_row([i['user']['firstName'],i['user']['lastName'],i['user']['username']])\n print (table.get_string(sortby=\"Last Name\"))", "def list_all_organizations(ctx):\n pprint(ctx.obj.orgs.get().data)", "def get_members(self):\r\n database = main.connect_to_cloudsql()\r\n cursor = database.cursor()\r\n query = (\"SELECT username from \" + ENV_DB + \".Groups WHERE gid='{}'\").format(self.g_id)\r\n cursor.execute(query)\r\n data = cursor.fetchall()\r\n database.close()\r\n return list(i[0] for i in data)", "def list_user_groups(self, token):\n requestUser = self.get_username_from_token(token)\n dataBase = self.read_database()\n groups = dataBase['userGroups']\n groupList = list()\n for group in groups:\n members = groups[group]['members']\n owners = groups[group]['owners']\n if requestUser in members or requestUser in owners:\n groupList.append(group)\n return groupList", "def myorgs(request):\n context = RequestContext(request)\n \n user = request.user\n orgs = user.orgusers.get_query_set()\n \n context['orgs'] = orgs\n return render_to_response('myorgs.html', context)", "def fetch_their_members(our_group):\n\tgroup_id = our_group[\"groupId\"]\n\turl = f'{BASE_URL}/groups/{group_id}/members'\n\tparams = {'$select': 'userPrincipalName,id'}\n\treturn call_api(url, params)", "def getGroup(group: int, name=\"\") -> list:\n groups = mongo.db.groups.find({'id':group},{'_id':0})\n userID_list = []\n user_list = []\n for entry in groups:\n if entry[\"id\"] == group:\n userID_list = userID_list + entry[\"members\"]\n if len(userID_list) != 0:\n for entry in userID_list:\n x = fetchUser(userId=entry)\n user_list = user_list + x\n return user_list", "def groups(self):\r\n return users.Groups(self)", "def test_list_my_memberships_owner(self):\n url = '/api/v1/communities/0/list_my_memberships/'\n\n response = self.client.get(url, HTTP_AUTHORIZATION=self.auth('user1'))\n self.assertEqual(status.HTTP_200_OK, response.status_code)\n\n data = response.data\n self.assertEqual(10, data['count'])", "def get_group_users(groupname):\n return jsonify(admin.get_group_users(current_app.scoped_session(), groupname))", "def get_uw_group_members(\n gws_base_url: str,\n gws_ca_cert: str,\n gws_client_cert: str,\n gws_client_key: str,\n uw_group: str,\n) -> list:\n\n r = requests.get(\n gws_base_url + \"/group/\" + uw_group + \"/member\",\n verify=gws_ca_cert,\n cert=(gws_client_cert, gws_client_key),\n )\n\n group_members = []\n\n for member in r.json()[\"data\"]:\n if member[\"type\"] == \"uwnetid\":\n # Verify personal NetID\n # https://wiki.cac.washington.edu/pages/viewpage.action?spaceKey=infra&title=UW+NetID+Namespace\n if re.match(\"^[a-z][a-z0-9]{0,7}$\", member[\"id\"]):\n group_members.append(member[\"id\"])\n\n return group_members", "def list_users(ctx, project_name):\n project = ctx.obj.groups.byName[project_name].get().data\n pprint(ctx.obj.groups[project.id].databaseUsers.get().data)", "def get_groups(self):\n result = self.conn.usergroup.get(status=0, output='extend', selectUsers=\"extend\")\n groups = {group[\"name\"]: Group(\n name=group[\"name\"],\n id=group[\"usrgrpid\"],\n members=group[\"users\"],\n ) for group in result}\n return groups", "def list_group_users(self, group_id, **params):\n url = 'groups/%s/users' % group_id\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def test_get_group_users(self):\n response = self.client.get_group_users(\"ABC123\")\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v2/groups/ABC123/users\")\n self.assertEqual(\n util.params_to_dict(args),\n {\n \"account_id\": [self.client.account_id],\n \"limit\": [\"100\"],\n \"offset\": [\"0\"],\n },\n )", "def list_groups(self):\n return self.get_admin(\"groups\")", "def getMembership(config, user):\r\n\r\n seen = set()\r\n for member_of in _getMembership(config, user, seen):\r\n yield member_of\r\n\r\n # everyone is always a member of group \"all\"\r\n yield 'all'", "def get_members(self):\n res = self.__con__.search_s(\n self.__ldap_base_dn__,\n ldap.SCOPE_SUBTREE,\n \"(memberof=%s)\" % self.__dn__,\n ['uid'])\n\n ret = []\n for val in res:\n val = val[1]['uid'][0]\n try:\n ret.append(val.decode('utf-8'))\n except UnicodeDecodeError:\n ret.append(val)\n except KeyError:\n continue\n\n return [CSHMember(self.__lib__,\n result,\n uid=True)\n for result in ret]", "def GetGroupMembers(self, group):\n return []", "def users(self,org_id=None):\n if org_id is None:\n org_id = self.org_id\n return self.get('{}/orgs/{}/users'.format(ApiVersion.A1.value,org_id))", "def test_get_resource_group_member_list(self):\n pass", "def list_groups():\n return _list_tindyb_unique_values(\"group\", dbpath=__dbpath__)", "def get_queryset(self):\n mo = MemberOrganisation.objects.get(users=self.request.user)\n if self.user_type_query:\n return User.objects.filter(profile__member_organisation__slug=mo.slug)\n return User.profile.objects.all()", "def list_users(self):\n raise NotImplementedError", "def list(request):\n return render_to_response('rteacher/manage_groups_list.html', request, **klist(\n request=request\n ))", "def list_groups(self):\n return self._get(\"cloudConnectorGroups\").list", "def group(self, group_cn):\n group = self.search(base=GROUPS, cn=group_cn)\n\n if len(group) == 0:\n return []\n else:\n group_members = group[0]['attributes']['member']\n\n members = []\n for member in group_members:\n members.append(self.search(dn=member))\n\n if self.objects:\n return self.member_objects(members)\n\n return members", "def groups(request):\n groups = GroupProfile.objects.all()\n user = request.user\n user_permissions = 'allowed' if user.is_authenticated() else ''\n show_errors_in_form = False\n group_form = GroupProfileForm()\n return render_to_response('home.html', locals())", "def get_groups(self, obj):\n groupsForCompany = get_groups_with_perms(obj)\n return [x.id for x in groupsForCompany]", "def get_groups(self):\n user_node = self.get()\n grouplist = []\n if user_node:\n for rel in graph.match(start_node=user_node, rel_type='in'):\n grouplist.append(Usergroup(id=rel.end_node()['id']))\n return grouplist", "def getCicMemberList(self, groupname,listofroles,fieldselector=None):\n\n urlEndpoint = \"/odata/Users?$filter=\"\n queryString = \"\"\n memberList = \"\"\n\n # Build a query string based on the number of given roles via OR concatenation\n if len(listofroles) > 1:\n for role in listofroles:\n\n if not queryString:\n queryString+=\"groupname eq '\"+groupname+\"' and role eq '\"+role+\"'\"\n else:\n queryString+=\" or groupname eq '\"+groupname+\"' and role eq '\"+role+\"'\"\n queryString = urllib.pathname2url(queryString)\n\n # or create only one query\n else:\n queryString = urllib.pathname2url(\"groupname eq '\"+groupname+\"' and role eq '\"+listofroles[0]+\"'\")\n\n # Append a field selector if given so only this one key field is returned for each entry\n if fieldselector is not None:\n selectedField = \"&$select=\"+fieldselector\n urlComplete = urlEndpoint+queryString+selectedField\n # else return whole key set for each result\n else:\n urlComplete = urlEndpoint+queryString\n\n try:\n response = self.httpHandler.sendHttpRequest(urlComplete)\n\n except urllib2.HTTPError as e:\n\n logger.debug(traceback.format_exc())\n\n else:\n\n responseCode = response.getcode()\n if responseCode == 200:\n response = json.loads(response.read())\n if not response[\"value\"]:\n logger.debug(\"CIC OData query returned an empty result-set. Check used parameters\")\n return memberList\n else:\n # read key value for each result and create a new clean result array\n if fieldselector is not None:\n for item in response[\"value\"]:\n # Technical users have no associated d-number so dont add them to the member list\n if item[fieldselector] is not None:\n memberList+=\" \"+item[fieldselector].encode(\"ascii\",\"ignore\")\n return memberList\n # return the whole payload as is\n else:\n return response\n else:\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(responseCode, body))\n raise AssertionError(\"Got unexpected http return code: {} when calling {}\".format(responseCode, urlComplete))", "def organizations(self):\n self.elements('organizations')", "def listGroups(self):\n return tuple(Group.create(groupName, self._modelDataManager) for groupName in self.pm_getUserManager().listGroups())", "def get_people(self):\n url = self.base_url + 'memberships'\n\n req = requests.get(headers=self.headers, url=url)\n\n return req.json()", "def test_get_groups_users(self):\n api.user.create(\n username='chuck',\n email='chuck@norris.org',\n password='secret',\n )\n api.group.create(groupname='staff')\n api.group.add_user(username='chuck', groupname='staff')\n\n users = api.user.get_users(groupname='staff')\n usernames = [user.getUserName() for user in users]\n\n self.assertEqual(usernames, ['chuck'])", "def getGroupMembers(group_id):\r\n return Group.getGroupMembers(group_id)", "def list_groups(access_token):\n request_url = OKTA_URL + \"api/v1/groups\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def users_in_group(self, group_id):\n users = []\n users = self._get(('user', 'group', str(group_id)))\n for user in users:\n if 'dreamdiary.diary.user' in user['saml_permissions']:\n users.append(user)\n return users", "def list_members_of_organisation(\n self, organisation_id: OrganisationId\n ) -> List[Publisher]:\n ...", "def list_group_members(self, token, userGroup):\n requestUser = self.get_username_from_token(token)\n dataBase = self.read_database()\n if userGroup not in dataBase['userGroups']:\n raise GroupDoesNotExistException(\"User group does not exist\")\n\n if requestUser not in dataBase['userGroups'][userGroup]['owners']:\n raise UserPermissionException(\"User is not an owner of this group\")\n owners = dataBase['userGroups'][userGroup]['owners']\n members = dataBase['userGroups'][userGroup]['members']\n return {'owners':owners, 'members':members}", "def getChosenMembers(self):\n members_and_groups = self.request.form.get('form.widgets.users')\n members = []\n\n if UserAndGroupSelectionWidget_installed:\n pg = getToolByName(self.context, 'portal_groups')\n groups = pg.getGroupIds()\n chosen_groups = list(set(members_and_groups).intersection(set(groups)))\n chosen_members = list(set(members_and_groups).difference(set(groups)))\n\n for g in chosen_groups:\n chosen_members += pg.getGroupById(g).getGroupMemberIds()\n\n members = list(set(chosen_members))\n else:\n # Case when Products.UserAndGroupSelectionWidget is not installed/used\n members = [member for member in members_and_groups.split('\\r\\n')\n if member]\n\n return members", "def get_gadm_list():\n cur = g.db.execute('select id_user from user_group where gadm == 1', [uid])\n gadm = [row[0] for row in cur.fetchall()]\n return gadm", "def capacitygroup_list(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_capacitygroup_list(cmd_ctx, cpc, options))", "def filter_queryset(self, request, queryset, view):\n if view.action == \"retrieve\" and request.method == \"GET\":\n return queryset.model.objects.all()\n\n filtered_queryset = super().filter_queryset(request, queryset, view)\n org_users = set(\n [group.team.organization for group in request.user.groups.all()] +\n [o.user for o in filtered_queryset]\n )\n\n return queryset.model.objects.filter(user__in=org_users, user__is_active=True)", "def test_get_resource_membership_list(self):\n pass", "def group_members(group):\n\n group = ldapi.lookup(ld, 'cn', group, cfg['ldap_groups_base'])\n\n if group and 'uniqueMember' in group:\n r = re.compile('^uid=([^,]*)')\n return map(lambda x: r.match(x).group(1), group['uniqueMember'])\n return []", "def getPeopleGroups(self):\n return [FoursquarePeopleGroup(le) for le in self.base.get(\"groups\", [])]", "def organizations(self):\n return self.get('{}/orgs'.format(ApiVersion.A1.value))", "def getGroup():\n\tprint\n\tprint \"Requesting the list of groups for this account\"\n\n\tgroups_result = getResult('/papi/v0/groups')\n\n\treturn (groups_result)", "def list_projects(ctx):\n pprint(ctx.obj.groups.get().data)", "def get_users(request):\n response = {\n 'users': []\n }\n\n username = request.GET.get('username', '').lower()\n groups = request.GET.getlist('groups')\n is_active = request.GET.get('is_active', '').lower()\n\n users = User.objects\n\n if is_active and is_active == 'true':\n users = users.filter(is_active=True)\n\n if username:\n users = users.filter(username=username)\n\n if groups:\n group_ids = []\n for groupname in groups:\n groupname = groupname.lower()\n try:\n group = Group.objects.get(name=groupname)\n group_ids.append(group.id)\n except Group.DoesNotExist as e:\n LOG.exception(\"Failed to filter by group, group with name %s not found.\" % groupname)\n users = users.filter(groups__in=group_ids)\n\n users = users.order_by('username')\n\n for user in users:\n user = {\n 'id': user.id,\n 'username': user.username,\n 'groups': [group.name for group in user.groups.all()],\n 'is_active': user.is_active,\n 'superuser': user.is_superuser\n }\n response['users'].append(user)\n\n response['status'] = 0\n\n return JsonResponse(response)", "def users_groups_by_permission(self, user, permission_name):\n groups = []\n orgs = []\n for role in user.roles:\n for p in role['permissions']:\n if permission_name in p['name']:\n orgs.append(role['organisation']['id'])\n for org in orgs:\n ogs = self._get(('group', 'organisation', str(org)))\n for og in ogs:\n if og['official'] == False:\n groups.append(og)\n return groups", "def printUsersInGroup(group) -> None:\n click.echo(tabulate(listUsersInDict(group), headers=\"keys\", tablefmt=\"grid\"))", "def user_groups(user_email, request):\n user = DBSession.query(User).filter(User.email == user_email).first()\n return user.group_names() if user else []", "def product_group_list(obj):\n client = get_client(obj)\n\n res = client.product_group_list()\n\n print(json.dumps(res, indent=4))", "def organization(request, id):\n org = get_object_or_404(Organization, pk=id)\n context = {\n 'org': org,\n 'cover': modify_image_url(str(org.cover), 'cover'),\n 'logo': modify_image_url(str(org.logo), 'logo'),\n 'mission': \"\",\n 'values': \"\",\n 'members': \"\",\n }\n\n context['mission'] = org.mission\n\n if org.value_set.exists():\n context['values'] = org.value_set.all()\n\n if org.membership_set.exists():\n membership = org.membership_set.all()\n context['members'] = []\n for member in membership:\n m = User.objects.get(pk=member.user_id)\n context['members'].append(m)\n\n return render(request, 'wantedly_app/org.html', context)", "def members(self):\n return self.find_users_by_rel('member')", "def groups(self):\n #return self.get('{}/groups'.format(ApiVersion.A1.value))\n return self.get('{}/groups'.format(ApiVersion.CM1.value))", "def do_user_list(cs, args):\n _, users = cs.users.list()\n fields = ['user_id', 'username', 'email', 'realname', 'comment']\n utils.print_list(users, fields, sortby=args.sortby)", "def test_get_members(session): # pylint:disable=unused-argument\n user = factory_user_model()\n org = OrgService.create_org(TestOrgInfo.org1, user.id)\n\n response = org.get_members()\n assert response\n assert len(response['members']) == 1\n assert response['members'][0]['membershipTypeCode'] == 'OWNER'", "def get_groups():\n\n # FUTURE: Properly reutrn error, Mongo is giving it's own\n if current_user.groups:\n return Response(response=json.dumps([g.to_dict() for g in current_user.groups]), status=200, mimetype=\"application/json\")\n else:\n return return_json_error('No groups assigned to', 500)", "def get_group_members(self, group):\n members = []\n result = self.search('ou=groups,dc=mozilla',\n filterstr='cn=%s' % (group))\n if result == False:\n raise self.SearchError\n elif result == []:\n return []\n for group in result[1]:\n members = list(set(members) | set(group[1]['memberUid']))\n return members", "def KLP_Users_list(request):\n\n # get logged in user\n\n user = request.user\n if user.id:\n\n # check logged in user permissions, to get user list\n\n KLP_user_Perm(request.user, 'Users', None)\n\n # get all active(1) users list other than staff and super user order by username\n\n user_list = User.objects.filter(is_staff=0,\n is_superuser=0).order_by('username')\n\n # render show users form with users list\n\n return render_to_response('viewtemplates/show_users_form.html',\n {\n 'user_list': user_list,\n 'user': user,\n 'title': 'KLP Users',\n 'legend': 'Karnataka Learning Partnership',\n 'entry': 'Add',\n }, context_instance=RequestContext(request))\n else:\n\n # if user is not logged in redirect to login page\n\n return HttpResponseRedirect('/login/')", "def list_all():\n\n members = ldapi.search(ld, cfg['ldap_users_base'], '(objectClass=member)')\n return dict([(member[0], member[1]) for member in members])", "def _get_users(self, context, project, object_list=None):\n by = self.request.GET.get(\"by\", \"\").lower()\n options = [\n (\"Project members\", \"members\"),\n (\"Others (Anonymous users)\", \"anonymous\"),\n ]\n exists = [i for i in options if i[1] == by]\n matches = exists[0][1] if len(exists) == 1 else \"\"\n\n if object_list is not None:\n return self._get_object_list(\n object_list,\n matches != \"\",\n creator__isnull=True if matches == \"anonymous\" else False,\n )\n\n return {\n **context,\n \"by\": by,\n \"by_options\": options,\n }", "def user_list(ctx):\n data = ctx.obj.get_all_users()\n output_json_data(data)", "def list(self) -> List[Organisation]:\n ...", "def getGroups():\r\n return Group.getGroups()", "def display_users(cls):\n return cls.user_list", "def list_groups(self, **params):\n url = 'groups'\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def get_list_groups(self):\n list_response = requests.get(self.groups_url, headers=self.headers)\n return list_response.json()[\"groups\"]", "def get_queryset(self):\n user = self.request.user\n collabLists = ListObject.objects.filter(collaborators__id=user.id)\n return collabLists", "def test_list_my_memberships_member(self):\n url = '/api/v1/communities/0/list_my_memberships/'\n\n response = self.client.get(url, HTTP_AUTHORIZATION=self.auth('user3'))\n self.assertEqual(status.HTTP_200_OK, response.status_code)\n\n data = response.data\n self.assertEqual(3, data['count'])\n self.assertEqual(3, data['results'][0]['community']['id'])\n self.assertEqual(4, data['results'][1]['community']['id'])\n self.assertEqual(5, data['results'][2]['community']['id'])\n self.assertEqual('0', data['results'][0]['status'])\n self.assertEqual('1', data['results'][1]['status'])\n self.assertEqual('2', data['results'][2]['status'])\n self.assertEqual('2', data['results'][0]['role'])\n self.assertEqual('2', data['results'][1]['role'])\n self.assertEqual('2', data['results'][2]['role'])", "def get_all_groups():\n return jsonify(admin.get_all_groups(current_app.scoped_session()))", "def user_groups(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"user_groups\")", "def lists(start=None, top=None):\n url = 'persongroups'\n params = {\n 'start': start,\n 'top': top,\n }\n\n return util.request('GET', url, params=params)", "def get_groups(phone_num):\n phone_num = strip_phone_num(phone_num) # Get last 9 digits\n user = User.query.filter_by(phone_num=phone_num).first()\n\n if not user:\n return {\n \"success\": False,\n \"msg\": \"User does not exist.\"\n }\n\n payload = {}\n payload[\"groups\"] = []\n groups = user.groups\n for group in groups:\n group_dict = {\n \"id\": group.id,\n \"name\": group.name\n }\n\n members = [{\n \"nickname\": user.nickname,\n \"phone_num\": \"0\" + user.phone_num,\n \"balance\": None\n }]\n for assoc in user.group_associations:\n if assoc.group_id != group.id:\n continue\n\n associate = User.query.filter_by(id=assoc.associate_id).first()\n\n members.append({\n \"nickname\": assoc.associate_nickname,\n \"phone_num\": '0' + associate.phone_num,\n \"balance\": assoc.balance\n })\n\n group_dict[\"members\"] = members\n payload[\"groups\"].append(group_dict)\n\n return {\n \"success\": True,\n \"groups\" : payload[\"groups\"]\n }", "def get_user_groups(username):\n return jsonify(admin.get_user_groups(current_app.scoped_session(), username))", "def list_forum_members(request, course_id):\r\n course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\r\n course = get_course_by_id(course_id)\r\n has_instructor_access = has_access(request.user, 'instructor', course)\r\n has_forum_admin = has_forum_access(\r\n request.user, course_id, FORUM_ROLE_ADMINISTRATOR\r\n )\r\n\r\n rolename = request.GET.get('rolename')\r\n\r\n # default roles require either (staff & forum admin) or (instructor)\r\n if not (has_forum_admin or has_instructor_access):\r\n return HttpResponseBadRequest(\r\n \"Operation requires staff & forum admin or instructor access\"\r\n )\r\n\r\n # EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)\r\n if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:\r\n return HttpResponseBadRequest(\"Operation requires instructor access.\")\r\n\r\n # filter out unsupported for roles\r\n if not rolename in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:\r\n return HttpResponseBadRequest(strip_tags(\r\n \"Unrecognized rolename '{}'.\".format(rolename)\r\n ))\r\n\r\n try:\r\n role = Role.objects.get(name=rolename, course_id=course_id)\r\n users = role.users.all().order_by('username')\r\n except Role.DoesNotExist:\r\n users = []\r\n\r\n def extract_user_info(user):\r\n \"\"\" Convert user to dict for json rendering. \"\"\"\r\n return {\r\n 'username': user.username,\r\n 'email': user.email,\r\n 'first_name': user.first_name,\r\n 'last_name': user.last_name,\r\n }\r\n\r\n response_payload = {\r\n 'course_id': course_id.to_deprecated_string(),\r\n rolename: map(extract_user_info, users),\r\n }\r\n return JsonResponse(response_payload)" ]
[ "0.59523857", "0.59251404", "0.5874129", "0.5672281", "0.56644803", "0.56609535", "0.56514776", "0.56267077", "0.5618761", "0.56177497", "0.5544431", "0.54518914", "0.544957", "0.5421477", "0.54123914", "0.54096514", "0.5407805", "0.5396198", "0.53743017", "0.53723145", "0.5355645", "0.53510547", "0.53446084", "0.534089", "0.53142476", "0.5313268", "0.5311054", "0.52878046", "0.5286481", "0.5281303", "0.5271511", "0.5262589", "0.5235903", "0.5232978", "0.52221507", "0.5219929", "0.52168053", "0.51601505", "0.5154807", "0.51483124", "0.51427054", "0.51205325", "0.51170343", "0.5114647", "0.51141465", "0.5112073", "0.51048386", "0.5093024", "0.50860643", "0.508366", "0.50823355", "0.5072945", "0.50662065", "0.50653213", "0.50628513", "0.50624", "0.5054486", "0.5047733", "0.5046984", "0.5045205", "0.50336945", "0.50291634", "0.5028965", "0.5026138", "0.50190884", "0.5009563", "0.5006316", "0.4999897", "0.4997499", "0.49957305", "0.498632", "0.4986059", "0.49798098", "0.49652696", "0.49539593", "0.49488544", "0.49374086", "0.49306524", "0.49291736", "0.49174708", "0.4913304", "0.49085706", "0.49084118", "0.49061373", "0.4903684", "0.49015695", "0.48959008", "0.48918122", "0.4883652", "0.48825744", "0.48808637", "0.4878468", "0.4877027", "0.48695135", "0.4867579", "0.4866395", "0.48616102", "0.4859384", "0.4854058", "0.4850273" ]
0.68893605
0
Lists the users in your tenancy. You must specify your tenancy's OCID as the value for the compartment ID (remember that the tenancy is simply the root compartment). See `Where to Get the Tenancy's OCID and User's OCID`__.
def list_users(self, compartment_id, **kwargs): resource_path = "/users" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "identity_provider_id", "external_identifier" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_users got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "identityProviderId": kwargs.get("identity_provider_id", missing), "externalIdentifier": kwargs.get("external_identifier", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[User]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[User]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_user_list(cs, args):\n _, users = cs.users.list()\n fields = ['user_id', 'username', 'email', 'realname', 'comment']\n utils.print_list(users, fields, sortby=args.sortby)", "def user_list(ctx):\n data = ctx.obj.get_all_users()\n output_json_data(data)", "def list(self, tenant_id=None):\n\n if not tenant_id:\n return self._list(\"/users\", \"users\")\n else:\n return self._list(\"/tenants/%s/users\" % tenant_id, \"users\")", "def list_users(self):\n raise NotImplementedError", "def users(self,org_id=None):\n if org_id is None:\n org_id = self.org_id\n return self.get('{}/orgs/{}/users'.format(ApiVersion.A1.value,org_id))", "def list_users():\n\n db_users = User.query.all()\n\n return render_template(\"list_users.html\", headline=\"Blogly Users\", users=db_users)", "def showORGusers(**kwargs):\n sessiontoken = kwargs['sessiontoken']\n ORG_ID = kwargs['ORG_ID']\n strCSPProdURL = kwargs['strCSPProdURL']\n jsonResponse = get_csp_users_json(strCSPProdURL, ORG_ID, sessiontoken)\n if jsonResponse == None:\n print(\"API Error\")\n sys.exit(1)\n\n users = jsonResponse['results']\n table = PrettyTable(['First Name', 'Last Name', 'User Name'])\n for i in users:\n table.add_row([i['user']['firstName'],i['user']['lastName'],i['user']['username']])\n print (table.get_string(sortby=\"Last Name\"))", "def display_users(cls):\n return cls.user_list", "def list_users():\n check_admin()\n results = User.query.order_by(-User.id)\n return render_template('user_list.html', users=results)", "def user_list():\n\n users = User.query.all()\n return render_template(\"/user_list.html\", users=users)", "def user_list():\n\n users = User.query.all()\n\n return render_template(\"user_list.html\", users=users)", "def user_list():\n\n users = User.query.all()\n \n return render_template(\"user_list.html\", users=users)", "async def list_users(self, ctx):\n \n path = \"Users\"\n headers = {\n 'accept': 'application/json'\n }\n response = send_request(method=\"get\", path=path, headers=headers)\n users = []\n for page in response:\n users.append(f\"**{page['Name']}**: ``{page['Id']}``\")\n log.debug(users)\n\n embed = embeds.make_embed(ctx=ctx, title=\"List Users\", image_url=\"https://emby.media/resources/logowhite_1881.png\")\n\n await LinePaginator.paginate([line for line in users], ctx, embed, restrict_to_user=ctx.author)", "def user_list():\n\n users = User.query.all()\n return render_template(\"user_list.html\", users=users)", "def user_list():\n\n users = User.query.all()\n return render_template(\"user_list.html\", users=users)", "def user_list():\n\n users = User.query.all()\n return render_template(\"user_list.html\", users=users)", "def user_list():\n\n users = User.query.all()\n return render_template(\"user_list.html\", users=users)", "def user_list():\n\n users = User.query.all()\n return render_template(\"user_list.html\", users=users)", "def user_list():\n\n users = User.query.all()\n return render_template(\"user_list.html\", users=users)", "def listUsers(self):\n return self._client.listUsers()", "def list_users(self):\n return self.get_admin(\"users\")", "def get_all_users():", "def users(self, predicate=None):\n \n if predicate is None:\n return self._get(\"users\").json()\n else:\n return self._get(\"users/search\", params={\"predicate\":predicate}).json()", "def list_users(self, kwargs):\n verbose = kwargs.get(\"verbose\", False)\n filter_ = kwargs.get(\"filter\", \"all\")\n\n if verbose:\n attributes = self.engine.all_attributes()\n else:\n attributes = [\"sAMAccountName\", \"objectClass\"]\n\n if filter_ == \"all\":\n results = self.engine.query(self.engine.USER_ALL_FILTER(), attributes)\n elif filter_ == \"spn\":\n results = self.engine.query(self.engine.USER_SPN_FILTER(), attributes)\n elif filter_ == \"enabled\":\n results = self.engine.query(self.engine.USER_ACCOUNT_CONTROL_FILTER_NEG(USER_ACCOUNT_CONTROL[\"ACCOUNTDISABLE\"]), attributes)\n elif filter_ == \"disabled\":\n results = self.engine.query(self.engine.USER_ACCOUNT_CONTROL_FILTER(USER_ACCOUNT_CONTROL[\"ACCOUNTDISABLE\"]), attributes)\n elif filter_ == \"locked\":\n results = self.engine.query(self.engine.USER_LOCKED_FILTER(), attributes)\n elif filter_ == \"nopasswordexpire\":\n results = self.engine.query(self.engine.USER_ACCOUNT_CONTROL_FILTER(USER_ACCOUNT_CONTROL[\"DONT_EXPIRE_PASSWORD\"]), attributes)\n elif filter_ == \"passwordexpired\":\n results = self.engine.query(self.engine.USER_ACCOUNT_CONTROL_FILTER(USER_ACCOUNT_CONTROL[\"PASSWORD_EXPIRED\"]), attributes)\n elif filter_ == \"nokrbpreauth\":\n results = self.engine.query(self.engine.USER_ACCOUNT_CONTROL_FILTER(USER_ACCOUNT_CONTROL[\"DONT_REQ_PREAUTH\"]), attributes)\n elif filter_ == \"reversible\":\n results = self.engine.query(self.engine.USER_ACCOUNT_CONTROL_FILTER(USER_ACCOUNT_CONTROL[\"ENCRYPTED_TEXT_PWD_ALLOWED\"]), attributes)\n else:\n return None\n\n self.display(results, verbose)", "def show_users():\n\n user = User(connection=connection, cursor=cursor)\n\n all_users = user.get_all_users()\n\n context = {\n 'all_users': all_users\n }\n\n return render_template('pages/tables/users.html', **context)", "def all_users():\n\n users = crud.get_users()\n\n return render_template('all_users.html', users=users)", "def users(self):\n from sagas.ofbiz.entities import OfEntity as e, oc\n rs=e().allUserLogin()\n for r in rs:\n print(r['userLoginId'])", "def user_list():\n if session['user_admin'] == False:\n abort(403)\n\n # Retrieve all Users\n sqa_sess = sqa_session()\n users = sqa_sess.query(User).all()\n\n return render_template('admin/user_list.html', users=users)", "def get_users(self):\n fields = ['name', ]\n return self.get_data(\"myUsers\", fields)", "def fetch_all_users():\n url = \"{}/workspace/{}/users\".format(V1_API_URL, WORKSPACE_ID)\n responses = requests.get(url, headers=HEADERS)\n return [\n {\n \"acronym\": user[\"name\"].lower(),\n \"clockify_id\": user[\"id\"],\n \"email\": user[\"email\"].lower(),\n }\n for user in responses.json()\n ]", "def list_users():\n users = User.query.order_by(User.last_name, User.first_name).all()\n return render_template('index.html', users=users)", "def users_list():\n users = User.query.all()\n serialized_objects = users_schema.dumps(users, sort_keys=True, indent=4)\n\n return Response(\n response=serialized_objects,\n status=http_status.OK,\n mimetype=\"application/json\"\n )", "def list_users(self):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/users\"\n _body = None\n _headers = {'Content-type': 'application/json',\n 'x-auth-token': self.cloud_admin_info['token_project']}\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\" no response from Server\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\n \"get user list Failed with status %s \" %\n response.status)\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"users List : %s\")\n return output[\"users\"]", "def list_all_users():\n session_id = request.args.get('session-id', None)\n user_id = request.args.get('user-id', None)\n users_list = get_users_list()\n if check_authentication(session_id, user_id) and is_admin_user(user_id):\n return render_template('admin_area.html', user=user_id, session_id=session_id, users_list=users_list)\n else:\n return render_template('home.html', cars_list=get_cars_preview(), news_list=get_news_list(), authjs=False,\n preview_length=get_cars_preview().__len__(), del_session_cookie=True)", "def list(self, *args, **kwargs):\n users = User.objects.filter(is_superuser=False)\n return self.list_by(users, self.serializer_class)", "def show_users():\n users_list = []\n all_users = storage.all('User')\n for obj in all_users.values():\n users_list.append(obj.to_dict())\n return jsonify(users_list)", "def getInterestedUsers():", "def show_users(self):\n\n u = User(self.settings)\n users_list = u.find_users()\n\n # transform the results in a \"jsonifiable\"-form\n json_results = []\n for user in users_list:\n json_results.append(user.to_json())\n\n # return\n return json_results", "def list_users():\n return json_response(\n status=200,\n response_data={\n \"success\": True,\n \"data\": {\n \"users\": [user.serialize() for user in User.all()]\n }\n }\n )", "def get_users(self):\n return self.execute(TABELLE['users']['select']['all'])", "def user_list(request):\r\n params = request.params\r\n order = params.get('order', None)\r\n limit = params.get('limit', None)\r\n user_list = UserMgr.get_list(order=order, limit=limit)\r\n ret = {\r\n 'count': len(user_list),\r\n 'users': [dict(h) for h in user_list],\r\n }\r\n return _api_response(request, ret)", "def list_users(item):\n users = User.load_all_users(item)\n for user in users:\n print(user.username)", "def get_all_users():\n return jsonify(admin.get_all_users(current_app.scoped_session()))", "def user_list(request):\n if request.method == 'GET':\n user_info = UserData.objects.all()\n serializer = UserProfileSerializer(user_info, many=True)\n return JSONResponse(serializer.data)\n else:\n return JSONResponse('Using wrong api.', status=404)", "def get_users(self):\n users = []\n page = 1\n while not len(users) % 100:\n users += self._get('/users?{0}'.format(urllib.urlencode({'per_page': 100, 'page': page})))\n if not users:\n break\n page += 1\n return users", "def list_users(BrokerId=None, MaxResults=None, NextToken=None):\n pass", "def users():\n access_token = session['access_token']\n return \"%s\" % list_users(access_token)", "def users_list(request):\n users = User.objects.all()\n serializer = UserSerializer(users, many=True)\n return Response(serializer.data)", "def list_users(access_token):\n request_url = OKTA_URL + \"api/v1/users\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n group_request = requests.get(request_url, headers=headers).json()\n return group_request", "def view_list_users(self, user):\r\n return user.realm._users.keys()", "def users(self):\r\n return resource.Users(self)", "def get_users(self):\r\n\t\tlogger.debug(\"Fetch users\")\r\n\t\t\r\n\t\treturn login.get_users()", "def get_users():\n return Response(f\"{User.get_all_users()}\", 200, mimetype='text/plain')", "def KLP_Users_list(request):\n\n # get logged in user\n\n user = request.user\n if user.id:\n\n # check logged in user permissions, to get user list\n\n KLP_user_Perm(request.user, 'Users', None)\n\n # get all active(1) users list other than staff and super user order by username\n\n user_list = User.objects.filter(is_staff=0,\n is_superuser=0).order_by('username')\n\n # render show users form with users list\n\n return render_to_response('viewtemplates/show_users_form.html',\n {\n 'user_list': user_list,\n 'user': user,\n 'title': 'KLP Users',\n 'legend': 'Karnataka Learning Partnership',\n 'entry': 'Add',\n }, context_instance=RequestContext(request))\n else:\n\n # if user is not logged in redirect to login page\n\n return HttpResponseRedirect('/login/')", "def users(self, instance, **query):\n instance = self._get_resource(_instance.Instance, instance)\n return self._list(_user.User, instance_id=instance.id, **query)", "def user_list():\n users = User.objects.all()\n return {\"users\": users}", "def user_list(request_dict):\n users = User.query.all()\n users_list = list()\n for user in users:\n users_list.append(user)\n\n return JSONTools.user_list_reply(users_list)", "def list_users():\n\n users = User.query.order_by(\"last_name\").all()\n return render_template(\"users/user_list.html\", users=users)", "def users(self):\r\n return resources.Users(self)", "def get_list_of_users(request):\n admin_user_list = AdminUser.objects.order_by('pk')\n paginator = Paginator(admin_user_list, 1) # Show 3 admin per page\n\n page = request.GET.get('page')\n try:\n admin_list = paginator.page(page)\n except PageNotAnInteger:\n # If page is not an integer, deliver first page.\n admin_list = paginator.page(1)\n except EmptyPage:\n # If page is out of range (e.g. 9999), deliver last page of results.\n admin_list = paginator.page(paginator.num_pages)\n context = {'admin_list': admin_list, 'page': page}\n return render(request, 'users/list_of_users.html', context)", "def list_user():\n\tbegin = 0\n\tlength = 25\n\ttry:\n\t\tif request.json != None:\n\t\t\tbegin = int(request.json.get('begin', 0))\n\t\t\tlength = int(request.json.get('length', 25))\n\texcept:\n\t\tabort(403)\n\tif length > 100 :\n\t\tlength = 100\n\tuserList = User.list(begin, length)\n\tif userList == None:\n\t\tabort(400)\n\treturn jsonify({'users': map(lambda(e): e.output(), userList), 'begin': begin, 'length': len(userList)})", "def users_view():\n data = get_data()\n return [{'user_id': i, 'name': 'User {0}'.format(str(i))}\n for i in data.keys()]", "def user_list(server_object, client, address, command_args):\n\n\tmsg = \"\"\n\n\t#: Create a formatted string of all the users.\n\tfor usr in server_object.usrs.values():\n\t\tmsg += usr + '\\n'\n\n\tclient.send(msg.encode())", "def list_users(bookings):\n return[view_user(booking.user) for booking in bookings]", "def active_users(self, *args, **kwargs):\r\n return self._get('ActiveUsers', *args, **kwargs)", "def GetUsers(*args):\n return _XCAFDoc.XCAFDoc_ShapeTool_GetUsers(*args)", "def list_users(self, user=None):\n from expfactory.database.models import Participant\n\n participants = Participant.query.all()\n users = []\n for user in participants:\n users.append(self.print_user(user))\n return users", "def get_users(self, params=None):\n url = 'users'\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBodyList(resp, body['users'])", "def show_users():\r\n users = User.query.order_by(User.last_name,User.first_name).all()\r\n return render_template('list.html', users=users)", "def describe_users(AuthenticationType=None, MaxResults=None, NextToken=None):\n pass", "def get_users(filter, api_site_parameter, page = 1, pagesize = 30, sort = 'reputation'):\n path = \"users\"\n results = __fetch_results(path, api_site_parameter, inname= filter, page = page, pagesize = pagesize, sort = sort)\n return results", "def users(self):\n return self.get_data(\"users\")", "def get_user_list(self, connection):\n http = get_web_service(connection)\n try:\n req = http.request('GET', connection[\"url\"] + '/users/?_format=json')\n data = json.loads(req.data.decode('utf-8'))\n # print(json.dumps(data, indent=4, sort_keys=True))\n return data\n except urllib3.exceptions.HTTPError as e:\n print(\"Connection error\")\n print(e)", "def display_users():\n users = storage.all(\"User\").values()\n users_list = []\n for obj in users:\n users_list.append(obj.to_dict())\n return jsonify(users_list)", "def users(self, request, pk):\n store = Store.objects.get(id=pk)\n user_store_ids = UserStore.objects.filter(store=store).values('user__id')\n users = User.objects.filter(id__in=user_store_ids)\n return Response(UserSerializer(users, many=True).data)", "def listusers():\n\n try:\n users = User.query.order_by(User.email).all()\n click.echo(\n tabulate(\n [\n [u.username, u.email, \"admin\" if u.is_admin else None]\n for u in users\n ]\n )\n )\n except OperationalError:\n click.echo(\"Tabela de usuários inexistente...\")", "def list(cls, context, filters=None, limit=3000, marker=1,\n sort_key='id', sort_dir='asc'):\n #import pdb; pdb.set_trace()\n db_users = cls.dbapi.get_user_list(\n context, limit=limit, marker=marker, sort_key=sort_key,\n sort_dir=sort_dir, filters=filters)\n total = db_users.total\n return [User._from_db_object(cls(context), obj) for obj in db_users], total", "def list_users():\n\n search = request.args.get('q')\n\n if not search:\n users = User.query.all()\n else:\n users = User.query.filter(User.username.like(f\"%{search}%\")).all()\n\n return render_template('users/index.html', users=users)", "def get(self):\n return get_all_users()", "def get(self):\n return get_all_users()", "def get(self):\n return get_all_users()", "def get(self):\n return get_all_users()", "def users_get(self) -> Dict[str, list]:\n self.__logger.debug('Eva.users_get called')\n return self.__http_client.users_get()", "def users_list(request):\n users_filter = UserFilter(request.GET, queryset=CustomUser.objects.filter(is_admin=False), request=request)\n return render(request, 'users/list.html', {'filter': users_filter})", "def get(self):\n queries = {\"wildcard_properties\": []}\n\n fullname_query = request.args.get(\"fullName\", None)\n email_query = request.args.get(\"email\", None)\n\n if fullname_query:\n queries[\"fullName\"] = f\"TextP.startingWith('{fullname_query}')\"\n queries[\"wildcard_properties\"].append(\"fullName\")\n if email_query:\n queries[\"fullName\"] = f\"TextP.startingWith('{email_query}')\"\n queries[\"wildcard_properties\"].append(\"email\")\n\n users = User.filter(limit=10, **queries)\n response = UserListSchema(many=True).dumps(users).data\n\n return jsonify_response(json.loads(response), 200)", "def index(request):\n users = User.objects.filter(is_staff=False, is_active=True).order_by('username')\n return render(request, 'users/view_all_users.html',\n { 'users': users })", "def list(self , request,*args,**kwargs):\n return super(UsersViewset,self).list(request,args,kwargs)", "def users(self, site = None):\r\n uids = self.user_ids()\r\n if uids:\r\n users = Account._byID(uids, True, return_dict = False)\r\n return [self.ajax_user(u) for u in users]\r\n else:\r\n return ()", "def list_users():\n if not check_content_type():\n return jsonify(status=CONTENT_TYPE_ERROR)\n reqdata = request.json\n if not check_token(reqdata[\"token\"]):\n return jsonify(status=TOKEN_ERROR)\n users = db.session.query(User).all()\n resdata = []\n for user in users:\n resdata.append({\"id\" : user.id, \"login\" : user.login, \"password\" : user.hash_password})\n return jsonify(data=resdata, status=OK_STATUS)", "def get_users():\r\n page = request.args.get('page', 1, type=int)\r\n per_page = min(request.args.get('per_page', 10, type=int), 100)\r\n data = User.to_collection_dict(User.query, page, per_page, 'api.get_users')\r\n return jsonify(data)", "def get_client_users(departament_id):\n try:\n response = business.get_unpaginated_client_users(ObjectId(departament_id))\n except Exception as e:\n response = {\"status\": False, \"message\": \"Error {}\".format(str(e))}\n return Response(json.dumps(response, default=json_util.default), mimetype=\"application/json\")", "def get_users():\n\n return User.query.all() # [<User user_id=1 fname=Alice lname=Apple>]", "def list_users():\n api_request = apireq.APIRequest(request, 'client_schema')\n if api_request.is_invalid():\n return api_request.error_text, 400\n return json.dumps(user_management.list_new_users(), indent=4,\n default=json_util.default)", "def get_users(self):\n url = \"%s/api/v1/users\" % self.subdomain\n req = request.get(url, headers=self.api_headers)\n if request.ok(req):\n response_json = req.json()\n return response_json[\"users\"]\n else:\n return None", "def list_keystone_v3_users(self):\n LOG_OBJ.debug(\"List the users.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/users\"\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while creating user\")\n print (\"No response from Server while creating user\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\" Creating user Failed with status %s \"\n \"and error : %s\" % (response.status, response.data))\n print (\" Creating user Failed with status %s \" %\n response.status)\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Users list : %s \" % output)\n print (\"Users list : %s \" % output)\n return output['users']", "def _get_users_list(self):\n return self.users['user_id'].tolist()", "def get_users():\n return db.fetch_users()", "def _list_users(self):\n users = fileIO.load_json(\"users.json\")\n print(\"The list of users is as follows:\")\n for i in users:\n print(users[i][\"name\"])\n self._list_user_settings(users)", "def get_users():\n users = User.query.order_by(User.id).all()\n users = {user.id: user.username for user in users}\n\n response = jsonify({\"success\": True, \"users\": users})\n\n return response", "def get_adusers(self, account_id, batch=False):\n path = 'act_%s/users' % account_id\n return self.make_request(path, 'GET', batch=batch)" ]
[ "0.7112592", "0.7053617", "0.7042246", "0.7014473", "0.6937669", "0.6858919", "0.68402475", "0.68375933", "0.6806012", "0.6805044", "0.67853475", "0.6784348", "0.6762646", "0.67508584", "0.67508584", "0.67508584", "0.67508584", "0.67508584", "0.67508584", "0.6744993", "0.67432755", "0.6739134", "0.66937053", "0.66748405", "0.6641492", "0.6636363", "0.6633672", "0.66131943", "0.65916145", "0.658011", "0.65737265", "0.6554758", "0.6554725", "0.65526575", "0.6547933", "0.65447533", "0.65410924", "0.6531776", "0.6515006", "0.6509187", "0.65079105", "0.6505472", "0.6503787", "0.6499015", "0.64763665", "0.6461489", "0.6456837", "0.64465296", "0.64454794", "0.64321566", "0.6429271", "0.64246815", "0.6420973", "0.6413232", "0.6405957", "0.64048725", "0.6397496", "0.6383211", "0.63763946", "0.63754797", "0.63735825", "0.637236", "0.63709366", "0.63596475", "0.63572025", "0.63567954", "0.63567257", "0.6354103", "0.63417315", "0.6335434", "0.6333472", "0.63300985", "0.632417", "0.6312086", "0.6306264", "0.6299951", "0.62945634", "0.6293213", "0.62803024", "0.62803024", "0.62803024", "0.62803024", "0.62788934", "0.62764806", "0.62521833", "0.6246643", "0.62448055", "0.62416637", "0.62411773", "0.6238739", "0.6230095", "0.62236273", "0.6220193", "0.6219111", "0.62170625", "0.6215737", "0.6214714", "0.6211191", "0.6210099", "0.6209128" ]
0.6938413
4
Lists the work requests in compartment.
def list_work_requests(self, compartment_id, **kwargs): resource_path = "/workRequests" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "page", "limit", "resource_identifier" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_work_requests got unknown kwargs: {!r}".format(extra_kwargs)) query_params = { "compartmentId": compartment_id, "page": kwargs.get("page", missing), "limit": kwargs.get("limit", missing), "resourceIdentifier": kwargs.get("resource_identifier", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[WorkRequestSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[WorkRequestSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listRequests(self):\n reqmgr = RequestManagerImpl()\n retval = []\n for request in reqmgr.listRequests(self.endpoint):\n tmpRequest = Request()\n tmpRequest.setReqmgrUrl( self.endpoint )\n tmpRequest.setWorkflowName( request['request_name'] )\n retval.append( tmpRequest )\n return retval", "def worklist():\n from wheelcms_axle.content import Content\n pending = Content.objects.filter(state=\"pending\", node__isnull=False)\n return pending", "def list(self):\n return self.rpc.call(MsfRpcMethod.JobList)", "def ListWorkers(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def list(self, jobguid=\"\", executionparams=None):", "def list_requesters():\n from mephisto.core.local_database import LocalMephistoDB\n from tabulate import tabulate\n\n db = LocalMephistoDB()\n requesters = db.find_requesters()\n dict_requesters = [r.to_dict() for r in requesters]\n click.echo(tabulate(dict_requesters, headers=\"keys\"))", "def list(self, jobguid=\"\", executionparams=dict()):", "async def request_jobs_list(self, jobs_list_active_only: bool, *args, **kwargs) -> List[str]:\n # TODO: implement\n raise NotImplementedError('{} function \"request_jobs_list\" not implemented yet'.format(self.__class__.__name__))", "def get_jobs_list(self, response):\n pass", "def queryAllRequests(self):\n logging.info(\"Querying all requests at ReqMgr instance ...\")\n r = self.reqMgrService.getRequestNames()\n print \"Found %s requests:\" % len(r)\n for req in r:\n print req", "def list_tagging_work_requests(self, compartment_id, **kwargs):\n resource_path = \"/taggingWorkRequests\"\n method = \"GET\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"page\",\n \"limit\",\n \"resource_identifier\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"list_tagging_work_requests got unknown kwargs: {!r}\".format(extra_kwargs))\n\n query_params = {\n \"compartmentId\": compartment_id,\n \"page\": kwargs.get(\"page\", missing),\n \"limit\": kwargs.get(\"limit\", missing),\n \"resourceIdentifier\": kwargs.get(\"resource_identifier\", missing)\n }\n query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[TaggingWorkRequestSummary]\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n query_params=query_params,\n header_params=header_params,\n response_type=\"list[TaggingWorkRequestSummary]\")", "def list_resources(self):\n self.workersResponded = 0\n print(\"Main thread\", threading.get_ident())\n\n for addr in self.equipment_model.get_addr_list():\n self.equipment_model.reset_index(addr)\n self.equipment_model.set_connected(addr, 2)\n\n if not self.worker_pool.is_init():\n w = self.worker_pool.create_worker(addr)\n #Signals from worker\n w.signal_connected.connect(self.slot_connected)\n w.signal_not_connected.connect(self.slot_not_connected)\n w.signal_write_success.connect(self.parent.slot_write_success)\n w.signal_query_success.connect(self.parent.slot_query_success)\n w.signal_error.connect(self.parent.slot_error)\n\n self.next_connection(addr)\n \n self.worker_pool.set_init(True)", "def getRequestList(self):\n\n result = RequestsDAO().getRequests()\n mapped_result = []\n\n if not result:\n return jsonify(Error=\"NOT FOUND\"), 404\n\n else:\n for r in result:\n mapped_result.append(self.mapToUserRequestDict(r))\n\n return jsonify(TURN=mapped_result), 200", "def get_components(self, req):\n request_name = req.request\n\n names = []\n if(request_name == \"\"):\n comps = self.rt_proxy.get_available_components() # get all\n else:\n comps = self.rt_proxy.get_available_components(request_name)\n\n for c in comps:\n names.append(str(c))\n\n resp = ListComponentsResponse(names)\n\n return resp", "def get_job_list(self):\n return self.job_list", "def get_job_list(self):\n return self.job_list", "def list(self):\n self.background_scheduler.print_jobs()", "def workloads(self):\n return self._workloads", "def list_jobs(self):\n\n return dict(self._from_json(self.manage.run(override=\"list-jobs\")))", "def listJobs():\n logger.debug('[FLASKWEB /jobs] Request for job listing')\n jobs = db.getJobs(numdays=2)\n for job in jobs:\n job['time'] = datetime.datetime.strptime(job['time'], db.TS_FMT).replace(tzinfo=db.timezone('UTC')).isoformat()\n if job['complete']:\n job['complete'] = datetime.datetime.strptime(job['complete'], db.TS_FMT).replace(tzinfo=db.timezone('UTC')).isoformat()\n\n # Garbage Collect Orpahened jobs\n compiles = db.getCompiles()\n for compile in compiles:\n if compile['submit']:\n compile['submit'] = datetime.datetime.strptime(compile['submit'], db.TS_FMT).replace(tzinfo=db.timezone('UTC')).isoformat()\n if compile['complete']:\n compile['complete'] = datetime.datetime.strptime(compile['complete'], db.TS_FMT).replace(tzinfo=db.timezone('UTC')).isoformat()\n # for c in compiles:\n # if c['uid'] not in compile_tasks.keys():\n # db.updateCompile(c['uid'], status='KILLED', done=True)\n # compiles = db.getCompiles()\n\n if request.headers['Accept'] == 'application/json':\n return jsonify(dict(LaunchJobs=jobs, CompilingJobs=compiles)), 200\n else:\n return render_template(\"jobs.html\", joblist=jobs, compilelist=compiles)", "def get_requests(self):\n\t\tself.last_processed = self.last_modified\n\t\treturn self.requests", "def work_devices(self):\n return self._work_devices", "def get_requests(self):\r\n\t\tself.last_processed = self.last_modified\r\n\t\treturn self.requests", "def list(self):\n return self.request(\"GET\")", "def give_workers_list(self):\n return self._workers", "def need_list():\n operation = request.args['operation']\n timestamp = int(time())\n id_session = request.remote_addr\n keys = ['operation', 'timestamp', 'id_session']\n values = [operation, timestamp, id_session]\n data = dict(zip(keys, values))\n msg = json.dumps(data)\n qm.send(cmdq, msg)\n return \"ok\"", "def list(self, request):\n jobs = Job.objects.all()\n\n city = self.request.query_params.get('city', None)\n state = self.request.query_params.get('state', None)\n\n # Support filtering jobs by user id\n job = self.request.query_params.get('user', None)\n if job is not None:\n jobs = jobs.filter(user=request.user)\n\n if city is not None:\n jobs = jobs.filter(city=city)\n\n if state is not None:\n jobs = jobs.filter(state=state)\n\n serializer = JobSerializer(\n jobs, many=True, context={'request': request})\n return Response(serializer.data)", "def _list(self, req):\n list_type = None\n status_prefix = 'STATUS LIST '\n if req:\n list_type = req.pop(0)\n if list_type and list_type == SPECTATE:\n games = self.server.get_unfinished_games()\n status_prefix += SPECTATE + ' '\n else:\n games = self.server.get_open_games()\n self.send_line(status_prefix + ' '.join(\n [str(g.id) for g in games if not self.game or self.game is not g]))", "def list(self, _request):\n serializer = TaskSerializer(instance=TASKS.values(), many=True)\n return response.Response(serializer.data)", "def get_jobs(self, *, params: Optional[dict] = None) -> \"resource_types.Jobs\":\n\n return communicator.Jobs(self.__requester).fetch(parameters=params)", "def ListJobs(self, token=None):\n return aff4.FACTORY.Open(self.CRON_JOBS_PATH, token=token).ListChildren()", "def view_requests(self):\n requests = self.caller.db.scene_requests or {}\n table = EvTable(\"{wName{n\", \"{wSummary{n\", width=78, border=\"cells\")\n for tup in requests.values():\n table.add_row(tup[0], tup[1])\n self.msg(str(table))", "async def get_jobs(): \n return mngr.getAllJobs()", "def list(self, request):\n\n to_approve_requests = Task.objects.filter(\n Q(approver_email=request.user.email),\n Q(state=Task.SUCCESS, review_output=True)\n | Q(state=Task.ERROR, review_output=True),\n ).order_by(\"-registered_on\")\n\n own_requests = Task.objects.filter(author_email=request.user.email).order_by(\n \"-registered_on\"\n )\n for request in own_requests:\n if request.state != Task.OUTPUT_RELEASED:\n request.output = None\n\n return Response(\n {\n \"to_approve_requests\": TaskSerializer(\n to_approve_requests, many=True\n ).data,\n \"own_requests\": TaskSerializer(own_requests, many=True).data,\n }\n )", "async def list_workers(self, *, option: ListApiOptions) -> ListApiResponse:\n try:\n reply = await self._client.get_all_worker_info(timeout=option.timeout)\n except DataSourceUnavailable:\n raise DataSourceUnavailable(GCS_QUERY_FAILURE_WARNING)\n\n result = []\n for message in reply.worker_table_data:\n data = protobuf_message_to_dict(\n message=message, fields_to_decode=[\"worker_id\", \"raylet_id\"]\n )\n data[\"worker_id\"] = data[\"worker_address\"][\"worker_id\"]\n data[\"node_id\"] = data[\"worker_address\"][\"raylet_id\"]\n data[\"ip\"] = data[\"worker_address\"][\"ip_address\"]\n data[\"start_time_ms\"] = int(data[\"start_time_ms\"])\n data[\"end_time_ms\"] = int(data[\"end_time_ms\"])\n data[\"worker_launch_time_ms\"] = int(data[\"worker_launch_time_ms\"])\n data[\"worker_launched_time_ms\"] = int(data[\"worker_launched_time_ms\"])\n result.append(data)\n\n num_after_truncation = len(result)\n result = self._filter(result, option.filters, WorkerState, option.detail)\n num_filtered = len(result)\n # Sort to make the output deterministic.\n result.sort(key=lambda entry: entry[\"worker_id\"])\n result = list(islice(result, option.limit))\n return ListApiResponse(\n result=result,\n total=reply.total,\n num_after_truncation=num_after_truncation,\n num_filtered=num_filtered,\n )", "def get_requests():\n return PortProvisionRequest.get()", "def jobs(self):\n return self.get_jobs()", "def get_requests(self):\n return self.dbsession.query(RequestModel).all()", "def get_request_journal(self):\n response = requests.get(self.requests_url)\n if response.status_code != http_client.OK:\n raise ValueError(response.text, response.status_code)\n response_body = json.loads(response.text)\n return response_body[\"requests\"]", "def workers(self):\n return self.worker_list", "def retrieve_requests(self, request=None):\n data = {}\n if request:\n data = request.dict()\n req = requests.put('{}/retrieve'.format(self._get_url()),\n params={'type': 'requests'}, data=json.dumps(data))\n if req.status_code == 200:\n try:\n return req.json()\n except ValueError:\n return []\n return []", "def _list(self, **kwargs):\n\n return self._make_request(**kwargs)", "def list_jobs():\n\n name_to_job_details = redis_controller.get_name_to_job_details()\n return list(name_to_job_details.values())", "def api_upwork_get_tasks(request):\n qs = models.JobUpwork.objects.filter(\n is_processed=False).values_list('url', flat=True)\n\n return JsonResponse(list(qs), safe=False)", "def request_workspace_list(self, request):\n \n user_id = request['user_id'] \n \n response = {'workspaces': []}\n response['workspaces'] = self.list_workspaces(user_id=user_id)\n \n return response", "def ListWaiters(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)", "def List(self, request, global_params=None):\n config = self.GetMethodConfig('List')\n return self._RunMethod(\n config, request, global_params=global_params)" ]
[ "0.67798686", "0.64472514", "0.6357527", "0.62949765", "0.61017513", "0.606693", "0.6015134", "0.59746355", "0.5971976", "0.59663254", "0.596574", "0.59381527", "0.5924295", "0.590886", "0.5905645", "0.5905645", "0.5890049", "0.5807364", "0.57771367", "0.57649094", "0.5733279", "0.57244754", "0.5722624", "0.5684172", "0.5675422", "0.56422913", "0.5636008", "0.5620735", "0.56052774", "0.5591213", "0.55725396", "0.5553027", "0.55387765", "0.5523543", "0.5516618", "0.55018735", "0.54849076", "0.5478154", "0.5472161", "0.54579765", "0.5445692", "0.5433262", "0.5426168", "0.5407309", "0.5404097", "0.5403351", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343", "0.5402343" ]
0.69024056
0
Move the compartment to a different parent compartment in the same tenancy. When you move a compartment, all its contents (subcompartments and resources) are moved with it. Note that the `CompartmentId` that you specify in the path is the compartment that you want to move.
def move_compartment(self, compartment_id, move_compartment_details, **kwargs): resource_path = "/compartments/{compartmentId}/actions/moveCompartment" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match", "opc_request_id", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "move_compartment got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "compartmentId": compartment_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing), "opc-request-id": kwargs.get("opc_request_id", missing), "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=move_compartment_details) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=move_compartment_details)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_patch_project_move_child(self):\n new_category = self.make_project(\n 'NewCategory', PROJECT_TYPE_CATEGORY, self.category\n )\n self.make_assignment(new_category, self.user, self.role_owner)\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.category.sodar_uuid},\n )\n patch_data = {'parent': str(new_category.sodar_uuid)}\n response = self.request_knox(url, method='PATCH', data=patch_data)\n self.assertEqual(response.status_code, 400, msg=response.content)", "def move_by(self, path, env=None):\n env = self._find_env(env)\n old_pos = self.position(env)\n new_pos = [p + c for p, c in zip(old_pos, path)]\n env.move_agent(self, new_pos)", "def move_to_node(self,node):\n path=self.get_path(self.current_node,node)\n self.move_to(path)", "def test_patch_project_move_root(self):\n new_category = self.make_project(\n 'NewCategory', PROJECT_TYPE_CATEGORY, None\n )\n new_owner = self.make_user('new_owner')\n self.make_assignment(new_category, new_owner, self.role_owner)\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.project.sodar_uuid},\n )\n patch_data = {'parent': ''}\n response = self.request_knox(url, method='PATCH', data=patch_data)\n self.assertEqual(response.status_code, 200, msg=response.content)", "def move_node(self, node_id, new_parent_id, connection=None):\n\n connection = connection or self.engine.connect()\n\n self.detach_node(node_id=node_id, connection=connection)\n self.attach_node(node_id=node_id, new_parent_id=new_parent_id, connection=connection)", "def reparent(self, obj, parent):\n return self.update(obj, parent=parent)", "def move(self, new_parent):\n\n new_parent.children.append(self)\n new_parent.rebuild_children_dict()\n self.delete()", "def setCompartment(self, *args):\n return _libsbml.CompartmentReference_setCompartment(self, *args)", "def move(self, from_id, to_id):\n return self._client.post(\n url=self._client.get_full_url(\n self.get_path(\n 'children', realm=self._realm_name, group_id=to_id\n )\n ),\n data=json.dumps({\n 'id': from_id\n })\n )", "def _swap_with_parent(self) -> bool:\n if self.parent is None:\n return False\n if self.parent.get_chainwork() >= self.get_chainwork():\n return False\n self.print_error(\"swap\", self.forkpoint, self.parent.forkpoint) #Calvin: We should see in the logs when a swap happens\n parent_branch_size = self.parent.height() - self.forkpoint + 1\n forkpoint = self.forkpoint # type: Optional[int]\n parent = self.parent # type: Optional[Blockchain]\n child_old_id = self.get_id()\n parent_old_id = parent.get_id()\n # swap files\n # child takes parent's name\n # parent's new name will be something new (not child's old name) Calvin: This makes sense, otherwise the hash would be invalid\n self.assert_headers_file_available(self.path())\n child_old_name = self.path()\n with open(self.path(), 'rb') as f:\n my_data = f.read()\n self.assert_headers_file_available(parent.path())\n assert forkpoint > parent.forkpoint, (f\"forkpoint of parent chain ({parent.forkpoint}) \"\n f\"should be at lower height than children's ({forkpoint})\")\n with open(parent.path(), 'rb') as f:\n # Calvin: forkpoint - parent.forkpoint is technically the height of this blockchain, why not use height method?\n # Calvin: Answer: There is a main_chain, this uses the blockchain with the greatest chainwork as the main_chain\n f.seek((forkpoint - parent.forkpoint)*HEADER_SIZE) # Calvin: This excludes the forkpoint_hash, why? Technically the forkpoints have the same first header! Saves a few bytes of writing.\n parent_data = f.read(parent_branch_size*HEADER_SIZE)\n self.write(parent_data, 0) # Calvin: writes the parents block data into this (current child)\n parent.write(my_data, (forkpoint - parent.forkpoint)*HEADER_SIZE) # Calvin: writes the child's block data into parents file\n # swap parameters # Calvin: Swaps the childs parents to be the parent's parent and the parent's parent is now the previous child\n self.parent, parent.parent = parent.parent, self # type: Optional[Blockchain], Optional[Blockchain]\n self.forkpoint, parent.forkpoint = parent.forkpoint, self.forkpoint\n self._forkpoint_hash, parent._forkpoint_hash = parent._forkpoint_hash, hash_raw_header(bh2u(parent_data[:HEADER_SIZE])) # Swaps the forkpoint_hash values\n self._prev_hash, parent._prev_hash = parent._prev_hash, self._prev_hash\n # parent's new name\n os.replace(child_old_name, parent.path())\n self.update_size()\n parent.update_size()\n # update pointers\n blockchains.pop(child_old_id, None)\n blockchains.pop(parent_old_id, None)\n blockchains[self.get_id()] = self\n blockchains[parent.get_id()] = parent\n return True", "def test_clashing_without_children(self):\n old_url = '/old-url/'\n parent = RouteFactory.create(url=old_url)\n child = ChildRouteFactory.create(slug='leaf', parent=parent)\n occupied_url = '/occupied/'\n RouteFactory.create(url=occupied_url)\n\n with transaction.atomic():\n with self.assertRaises(IntegrityError):\n parent.move_to(occupied_url, move_children=False)\n\n self.assertEqual(parent.url, old_url)\n child.refresh_from_db()\n self.assertEqual(child.url, '/old-url/leaf/')", "def move_to(i3: i3ipc.Connection, workspace: int):\n i3.command(f\"move container to workspace number {workspace}\")", "def move(self, path):\n self.current_location = (path[1][1], path[1][0])", "def _move(self, id: str, parent_id: str) -> MoveFolderResponseModel:\n endpoint: ApiEndpoint = self.api_endpoint_group.move\n request_obj: MoveFolderRequestModel = endpoint.load_request(parent_id=parent_id)\n response: MoveFolderResponseModel = endpoint.perform_request(\n http=self.auth.http,\n request_obj=request_obj,\n id=id,\n )\n return response", "def move_node(self, job):\n transfer = Transfer(job.jobInfo)\n target = transfer.target\n direction = transfer.direction\n result = None\n # Check uris\n check_uri(target, self.sm, shouldExist = True)\n checks = check_uri(direction, self.sm, shouldExist = False)\n null = direction.endswith(NULL)\n # Retrieve existing record\n node = self.sm.get_node(target)[0]['node']\n node = self.nf.get_node(node)\n # Check whether endpoint is reserved URI\n if null: self.nm.delete_node(target)\n if direction.endswith(AUTO): \n direction = generate_uri(direction)\n result = {'destination': direction}\n if not(null):\n # Check if endpoint is a container\n if checks['exists'] and checks['container']: direction += target[target.rfind('/'):]\n # Change identifier\n node.set_uri(direction)\n # Update db\n self.sm.update_node(target, direction, node.tostring())\n # Check if target is a container\n if isinstance(node, ContainerNode):\n # Move children\n for child in self.sm.get_children(target):\n node = self.nf.get_node(self.sm.get_node(child)[0]['node'])\n if null:\n self.nm.delete_node(node.uri)\n else:\n new_uri = node.uri.replace(target, direction)\n node.set_uri(new_uri)\n self.sm.update_node(child, new_uri, node.tostring())\n return result", "def move(name, other, newname=None):", "def removeCompartment(self, *args):\n return _libsbml.Model_removeCompartment(self, *args)", "def compartment_id(self, compartment_id):\n self._compartment_id = compartment_id", "def compartment_id(self, compartment_id):\n self._compartment_id = compartment_id", "def move_to_collection(self, collection):\n if self.collection == collection:\n return\n\n if collection is None:\n raise ValidationError(\n f\"Entity {self}({self.pk}) can only be moved to another container.\"\n )\n self.collection = collection\n self.tags = collection.tags\n self.permission_group = collection.permission_group\n self.save(update_fields=[\"collection\", \"tags\", \"permission_group\"])\n self.data.update(\n permission_group=collection.permission_group,\n collection_id=collection.pk,\n tags=collection.tags,\n )", "async def move(self, context_path: str):\n NewContext = CONTEXTS[context_path]\n new_context = NewContext(self.session)\n await self.leave()\n await type(self).condition.mark_as_done(self)\n self.session.context = new_context\n await new_context.enter()\n await self.send_messages()", "def reparentTo(self, objnp):\n\n # if isinstance(objnp, cm.CollisionModel):\n # self.__objcm.objnp.reparentTo(objnp.objnp)\n # elif isinstance(objnp, NodePath):\n # self.__objcm.objnp.reparentTo(objnp)\n # else:\n # print(\"NodePath.reparent_to() argument 1 must be environment.CollisionModel or panda3d.core.NodePath\")\n if objnp is not base.render:\n print(\"This bullet dynamics model doesnt support to plot to non base.render nodes!\")\n raise ValueError(\"Value Error!\")\n else:\n self.__objcm.objnp.reparentTo(objnp)\n # self.setMat(self.__objcm.getMat())\n # print(self.objbdb.gethomomat())\n self.__objcm.objnp.setMat(base.pg.np4ToMat4(self.objbdb.get_homomat()))", "def test_patch_project_move(self):\n self.assertEqual(\n self.project.full_title,\n self.category.title + ' / ' + self.project.title,\n )\n\n new_category = self.make_project(\n 'NewCategory', PROJECT_TYPE_CATEGORY, None\n )\n self.make_assignment(new_category, self.user_owner_cat, self.role_owner)\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.project.sodar_uuid},\n )\n patch_data = {'parent': str(new_category.sodar_uuid)}\n response = self.request_knox(url, method='PATCH', data=patch_data)\n\n self.assertEqual(response.status_code, 200, msg=response.content)\n self.project.refresh_from_db()\n model_dict = model_to_dict(self.project)\n self.assertEqual(model_dict['parent'], new_category.pk)\n owners = [a.user for a in self.project.get_owners()]\n self.assertIn(self.user_owner_cat, owners)\n self.assertIn(self.user_owner, owners)\n\n # Assert child project full title update\n self.assertEqual(\n self.project.full_title,\n new_category.title + ' / ' + self.project.title,\n )\n self.assertEqual(\n json.loads(response.content)['parent'], str(new_category.sodar_uuid)\n )", "def setCompartment(self, *args):\n return _libsbml.Reaction_setCompartment(self, *args)", "def test_deletionDisownsParent(self):\n port = self.port(store=self.store, portNumber=self.lowPortNumber, factory=self.factory)\n port.setServiceParent(self.store)\n port.deleteFromStore()\n service = IServiceCollection(self.store)\n self.failIfIn(port, list(service))", "def move(self, target):\n if target.relto(self):\n raise error.EINVAL(target, \"cannot move path into a subdirectory of itself\")\n try:\n self.rename(target)\n except error.EXDEV: # invalid cross-device link\n self.copy(target)\n self.remove()", "def move_to(self, mobject_or_point):\n layer_center = self.surrounding_rectangle.get_center()\n if isinstance(mobject_or_point, Mobject):\n target_center = mobject_or_point.get_center() \n else:\n target_center = mobject_or_point\n\n self.shift(target_center - layer_center)", "def relocate(self, source, destination):\n destination_dir = os.path.dirname(destination)\n if not os.path.exists(destination_dir):\n self.subdir(destination_dir)\n os.rename(source, destination)", "def relocate(source, destination, move=False):\n venv = api.VirtualEnvironment(source)\n if not move:\n\n venv.relocate(destination)\n return None\n\n venv.move(destination)\n return None", "def move_to_zone(self, zone):\n if isinstance(zone, basestring):\n zone = self.project.get_flow().get_zone(zone)\n zone.add_item(self)", "def move_vertice(self, vertice, comp):\n self.compartimentos[self.states[vertice]].remove(vertice)\n self.states[vertice] = comp\n self.compartimentos[comp].add(vertice)", "def put_object(self, parent_object, connection_name, **data):\n assert self.access_token, \"Write operations require an access token\"\n return self.request(\n \"{0}/{1}/{2}\".format(self.version, parent_object, connection_name),\n post_args=data,\n method=\"POST\",\n )", "def move_to_element(self, elem):\n ActionChains(self.driver).move_to_element(elem).perform()", "def _move_to_inserted_directory(file_path: str):\n parts = list(Path(file_path).parts)\n parts.insert(-1, 'inserted')\n move(file_path, str(Path(*parts)))", "def svn_client_move5(svn_commit_info_t_commit_info_p, apr_array_header_t_src_paths, char_dst_path, svn_boolean_t_force, svn_boolean_t_move_as_child, svn_boolean_t_make_parents, apr_hash_t_revprop_table, svn_client_ctx_t_ctx, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass", "def move_buildings(self):", "def change_tag_namespace_compartment(self, tag_namespace_id, change_tag_namespace_compartment_detail, **kwargs):\n resource_path = \"/tagNamespaces/{tagNamespaceId}/actions/changeCompartment\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"change_tag_namespace_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagNamespaceId\": tag_namespace_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=change_tag_namespace_compartment_detail)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=change_tag_namespace_compartment_detail)", "def mv(cur_path, new_path):\n cur_abs = navigate.get_abs_path(cur_path)\n new_abs = navigate.get_abs_path(new_path)\n cur_parent, cur_name = navigate.split_path(cur_abs)\n new_parent, new_name = navigate.split_path(new_abs)\n up_parent, up_name = navigate.split_path(new_parent)\n if not db.file_exists(cur_parent, cur_name):\n print \"Error: '\" + cur_name + \"' does not exist.\"\n elif up_parent is not None and not db.directory_exists(up_parent, up_name):\n print \"Error: '\" + new_parent + \"' is not a valid directory.\"\n elif db.file_exists(new_parent, new_name):\n print \"Error: '\" + new_name + \"' already exists at that location.\"\n else:\n cur_dbox_path = '/' + cur_name\n new_dbox_path = '/' + new_name\n access_token = db.get_access_to_file(cur_parent, cur_name)\n client = dropbox.client.DropboxClient(access_token)\n client.file_move(cur_dbox_path, new_dbox_path)\n db.move_file(cur_parent, cur_name, new_parent, new_name)", "def setCompartment(self, *args):\n return _libsbml.Species_setCompartment(self, *args)", "def put_object(self, parent_object, connection_name, **data):\n assert self.access_token, \"Write operations require an access token\"\n return self.request(parent_object + \"/\" + connection_name, post_args=data)", "def test_clashing_with_children(self):\n old_url = '/old-url/'\n parent = RouteFactory.create(url=old_url)\n child = ChildRouteFactory.create(slug='leaf', parent=parent)\n RouteFactory.create(url='/occupied/leaf/')\n\n with transaction.atomic():\n with self.assertRaises(IntegrityError):\n parent.move_to('/occupied/', move_children=True)\n\n self.assertEqual(parent.url, old_url)\n child.refresh_from_db()\n self.assertEqual(child.url, '/old-url/leaf/')", "def del_object_from_parent(self):\n if self.parent:\n self.parent.objects.pop(self.ref)", "def svn_client_move(svn_client_commit_info_t_commit_info_p, char_src_path, svn_opt_revision_t_src_revision, char_dst_path, svn_boolean_t_force, svn_client_ctx_t_ctx, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass", "def move(self, name, source, dest):\n self.m.path.assert_absolute(source)\n self.m.path.assert_absolute(dest)\n self._run(name, ['move', source, dest])\n self.m.path.mock_copy_paths(source, dest)\n self.m.path.mock_remove_paths(source)", "def update_compartment(self, compartment_id, update_compartment_details, **kwargs):\n resource_path = \"/compartments/{compartmentId}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"compartmentId\": compartment_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_compartment_details,\n response_type=\"Compartment\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_compartment_details,\n response_type=\"Compartment\")", "def chdir(self, path):\n if not path:\n path = \"/\"\n elif not path.endswith(\"/\"):\n path = \"{}/\".format(path)\n res = self.get_cdmi(path)\n if res.ok():\n cdmi_info = res.json()\n # Check that object is a container\n if not cdmi_info[\"objectType\"] == CDMI_CONTAINER:\n return Response(406, u\"{0} isn't a container\".format(path))\n if cdmi_info[\"parentURI\"] == \"/\" and cdmi_info[\"objectName\"] == \"Home\":\n # root\n self._pwd = \"/\"\n else:\n self._pwd = \"{}{}\".format(\n cdmi_info[\"parentURI\"], cdmi_info[\"objectName\"]\n )\n return Response(0, \"ok\")\n else:\n return res", "def move(self, name, new_name, user):\n full_path = self.path(name)\n new_path = self.path(new_name)\n if self.is_locked(full_path):\n self.lock(full_path, user)\n try:\n self.__volume.copy2(full_path, new_path)\n finally:\n self.unlock(full_path, user)", "def test_change_parent_location(self):\n pass", "def move(cls, entry, collection1, collection2):\n entry.save_to_mongo(collection=collection2)\n Database.delete_one(collection=collection1, query={'id': entry._id})", "def _move_order_from_to(self, bo, source='trades', destination='history'):\n ticket = bo.ticket\n log.info(\"bo_blotter: bo#%s: move from '%s' to '%s'\" % (ticket, source, destination))\n self._d_orders[destination][ticket]= self._d_orders[source][ticket]\n del self._d_orders[source][ticket]", "def removeCompartmentReference(self, *args):\n return _libsbml.MultiCompartmentPlugin_removeCompartmentReference(self, *args)", "def move(self, dest_tile, swapping=False, arrived=False):\n # Standard Move action. GameInstance will be notified\n if (not dest_tile.blocked or swapping) and not arrived:\n self.game.remove_tile_content(self)\n self.x = dest_tile.x\n self.y = dest_tile.y\n self.game.add_tile_content(self)\n\n # Wrapped pop in a try as player won't have a path\n try:\n self.path.pop(0)\n except IndexError:\n pass\n\n # Reached end of path or was player directed.\n # Will now use target object/resolve request\n else:\n if self.target_job:\n self.target_job.init_request(self)\n else:\n # Assumes one usable object per tile\n appliances = [c for c in dest_tile.contents if getattr(c, \"use\", None)]\n if appliances:\n self.use_item(appliances[0])", "def move(self, hex):\n # If current has nest, set nest location to unoccupied\n if self.hex is not None:\n self.hex.unoccupied()\n # Set nest site to new hexagon\n self.hex = hex\n # Update occupancy of new hexagon\n self.hex.occupied()\n self.history = []", "def move_to_root(self, group_id, name):\n return self._client.post(\n url=self._client.get_full_url(\n self.get_path('collection', realm=self._realm_name)\n ),\n data=json.dumps({\n 'id': group_id,\n 'name': name\n })\n )", "def move_to_parent(self, path):\n if path == self.dir_to_check:\n print (' Parent directory out of scope!')\n return path\n else:\n dir_name = os.path.dirname(path)\n return dir_name", "def move_cell(self, cell: Cell):\r\n assert isinstance(cell, Cell)\r\n comp_block = cell.block.fm.blockA if cell.block.name == \"B\" else cell.block.fm.blockB\r\n # lock cell\r\n cell.lock()\r\n # Adjust gains and yank cells before the move\r\n self.__adjust_gains_before_move(cell)\r\n # Remove cell from this block\r\n self.remove_cell(cell)\r\n # Add cell to complementary block\r\n comp_block.add_cell(cell)\r\n # Adjust the distribution of this cell's nets to reflect the move\r\n cell.adjust_net_distribution()\r\n # Adjust gains and yank cells after the move\r\n self.__adjust_gains_after_move(cell)", "def recover_compartment(self, compartment_id, **kwargs):\n resource_path = \"/compartments/{compartmentId}/actions/recoverCompartment\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\",\n \"opc_request_id\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"recover_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"compartmentId\": compartment_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing),\n \"opc-request-id\": kwargs.get(\"opc_request_id\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Compartment\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"Compartment\")", "def _move(self, pos):\n self.put_par(\"drive\", pos)", "def assign_parent(self, ip, port, father_address):\n node = self.find_node(ip, port)\n parent_node = self.find_node(father_address[0], father_address[1])\n\n if node is None:\n raise ValueError(\"Node is not registered\")\n\n if parent_node is None:\n raise ValueError(\"Parent Node is not registered\")\n\n if node.parent is not None:\n # TODO: we still don't know what to do in this case\n node.remove_from_parent()\n pass\n\n # add to child\n node.set_parent(parent_node)\n # add to parent\n parent_node.add_child(node)\n pass", "def set_parent(self, child, parent):\n parents = cmds.listConnections(\"%s.parent\" % child, plugs=True, source=True)\n if parents:\n # there is only one parent at a time\n cmds.disconnectAttr(\"%s.parent\" % child, \"%s\" % parents[0])\n if parent:\n cmds.connectAttr(\"%s.parent\" % child, \"%s.children\" % parent, force=True, nextAvailable=True)", "def moveEntity(self):\n x = self.going_east - self.going_west\n y = self.going_north - self.going_south\n if x and y:\n x /= 2 ** 0.5\n y /= 2 ** 0.5\n direction = geometry.Vector(x, y)\n self.post(models.events.MoveEntityRequest(self._entity_id, direction))", "def reparent(self, dim, child, new_parent):\n cls = dim.closure_table\n\n # Detach child\n self.execute(\n 'DELETE FROM %s '\n 'WHERE child IN (SELECT child FROM %s where parent = ?) '\n 'AND parent NOT IN (SELECT child FROM %s WHERE parent = ?)' % (\n cls, cls, cls\n ),\n (child, child)\n )\n\n # Set new parent\n self.execute(\n 'SELECT supertree.parent, subtree.child, '\n 'supertree.depth + subtree.depth + 1 '\n 'FROM %s AS supertree JOIN %s AS subtree '\n 'WHERE subtree.parent = ? '\n 'AND supertree.child = ?' % (cls, cls),\n (child, new_parent)\n )\n values = list(self.cursor)\n self.cursor.executemany(\n 'INSERT INTO %s (parent, child, depth) values (?, ?, ?)' % cls,\n values\n )", "def mv(self, src_path, dst_path):\n try:\n postdata = codecs.encode(json.dumps({ 'src': src_path, 'dst': dst_path }), 'utf-8')\n self._urlopen('/api/fileops/move', postdata).read()\n except HTTPError as err:\n raise RuntimeError(\"Unable to move '{}' to '{}'\".format(src_path, dst_path))", "def svn_client_move4(svn_commit_info_t_commit_info_p, char_src_path, char_dst_path, svn_boolean_t_force, svn_client_ctx_t_ctx, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass", "def move_to(self, x, y):\n self._impl.move_to(x, y)", "def moveTo(self, newFolder):\n moveURI = self.metaData.getLink(\"move\")\n parent = self.metaData.getLinkIndex('parent')\n\n assert parent != -1\n assert moveURI is not None\n if not hasattr(newFolder, \"metaData\"): raise TypeError(\"Your newFolder does not have a metaData property\")\n if not hasattr(newFolder, \"selfLink\"): raise TypeError(\"Your newFolder does not have a self link\")\n\n self.metaData.jsonObj['links'][parent] = {'href' : newFolder.selfLink, 'rel' : 'parent'}\n header = self._baseHeader.copy()\n header['Content-Type'] = \"application/vnd.huddle.data+json\"\n response = self._adapter.putRequest(moveURI,header, json.dumps(self.metaData.jsonObj))\n\n newLink = self._client.getUrlFromHeaderLink(response['Headers']['link'])\n return Folder(self._client, newLink)", "def svn_client_move2(svn_client_commit_info_t_commit_info_p, char_src_path, char_dst_path, svn_boolean_t_force, svn_client_ctx_t_ctx, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass", "def setCompartmentReference(self, *args):\n return _libsbml.MultiSimpleSpeciesReferencePlugin_setCompartmentReference(self, *args)", "def setCompartment(self, *args):\n return _libsbml.QualitativeSpecies_setCompartment(self, *args)", "def move_to_by_path(self, new_relative_path, retain_editor_and_modified=False):\n target_folder = Folder(self.context)\n target_folder.set_property(\"ServerRelativePath\", SPResPath(new_relative_path))\n\n def _move_folder():\n MoveCopyUtil.move_folder_by_path(self.context, self._build_full_url(self.server_relative_path.DecodedUrl),\n self._build_full_url(new_relative_path),\n MoveCopyOptions(\n retain_editor_and_modified_on_move=retain_editor_and_modified))\n\n self.ensure_property(\"ServerRelativePath\", _move_folder)\n return target_folder", "def _move_mount(original_root, mount_entry):\n target = mount_entry.target[len(original_root):]\n _LOGGER.info('Mount move %r => %s', mount_entry, target)\n\n try:\n fs_linux.mount_move(target, mount_entry.target)\n except FileNotFoundError as err:\n _LOGGER.warning('missing mountpoint %r: %s',\n mount_entry.target, err)", "def move_to(self,path):\n #print path\n for index,edge in enumerate(path):\n #print edge\n edge_pattern='edge_(?P<begin_node>\\w+)_(?P<end_node>\\w+)_(?P<iterator>\\w+)'\n match=re.match(edge_pattern,edge)\n begin_node=match.groupdict()['begin_node']\n end_node=match.groupdict()['end_node']\n print(\"moving {0} -> {1}\".format(begin_node,end_node))\n #print self.data\n self.data=self.__dict__[edge](self.data)\n #print self.data\n self.current_node=match.groupdict()['end_node']\n self.state=[0 for i in range(len(self.node_names))]\n position=self.node_names.index(self.current_node)\n self.state[position]=1\n self.state_matrix=np.matrix(self.state).T\n #print self.state\n #print self.current_node", "def test_setServiceParent(self):\n port = self.port(store=self.store)\n port.setServiceParent(self.store)\n self.failUnlessIn(port, list(IService(self.store)))", "def test_object_move(self):\n self.assertTrue(self.obj1 in self.room1.contents)\n # use move_to hook\n self.obj1.move_to(self.room2)\n self.assertFalse(self.obj1 in self.room1.contents)\n self.assertTrue(self.obj1 in self.room2.contents)\n\n # move back via direct setting of .location\n self.obj1.location = self.room1\n self.assertTrue(self.obj1 in self.room1.contents)\n self.assertFalse(self.obj1 in self.room2.contents)", "def RestoreCase(dirc, dest):\n subprocess.call(['cp', '-r', dirc, dest])", "def test_disownServiceParent(self):\n port = self.port(store=self.store)\n port.setServiceParent(self.store)\n port.disownServiceParent()\n self.failIfIn(port, list(IService(self.store)))", "def move_rel(self):\n pass", "def move_container(i3, name, monitor, container_id=None):\n i3.command(f'move container to workspace {name}')\n i3.command(f'workspace {name}, move workspace to output {monitor}')\n if container_id:\n i3.command(f'[con_id=\"{container_id}\"] focus')", "def move_item(dataobj_id, new_path):\n file = get_by_id(dataobj_id)\n data_dir = get_data_dir()\n out_dir = (data_dir / new_path).resolve()\n if not file:\n raise FileNotFoundError\n if (out_dir / file.parts[-1]).exists():\n raise FileExistsError\n elif is_relative_to(out_dir, data_dir) and out_dir.exists(): # check file isn't\n return shutil.move(str(file), f\"{get_data_dir()}/{new_path}/\")\n return False", "def move_entity(self, ent, dest):\n\n # remove entity from its current tile\n self.tiles[ent.position[x]][ent.position[y]].remove_entity(ent)\n # append to destination tile\n self.tiles[dest[x]][dest[y]].add_entity(ent)", "def _collab_swap(self, sender, amount=0):\n\n self.result = self.collab.swap(self.swap_params).interpret(\n storage=self.collab_storage, sender=sender, amount=amount)\n\n assert len(self.result.operations) == 1\n assert self.result.operations[0]['parameters']['entrypoint'] == 'swap'\n self.assertEqual(\n self.result.operations[0]['destination'],\n self.collab_storage['marketplaceAddress']\n )", "def move(model, origin, dest):\n model.move(origin, dest)", "def move(model, origin, dest):\n model.move(origin, dest)", "def move_to(self, position, env=None):\n\n env = self._find_env(env)\n env.move_agent(self, position)", "def setCompartmentId(self, *args):\n return _libsbml.CompartmentGlyph_setCompartmentId(self, *args)", "def addCompartment(self, vol=1, comp_id=\"\"):\n\n c1 = self.model.createCompartment()\n self.check(c1, \"create compartment\")\n if len(comp_id) == 0:\n comp_id = \"c\" + str(self.model.getNumCompartments())\n self.check(c1.setId(comp_id), \"set compartment id\")\n self.check(c1.setConstant(True), 'set compartment \"constant\"')\n self.check(c1.setSpatialDimensions(3), \"set compartment dimensions\")\n\n self.check(c1.setSize(vol), 'set compartment \"size\"')\n self.check(c1.setUnits(\"litre\"), \"set compartment size units\")\n return c1", "def _assign(self, source):\n if self._parent:\n oldZincRegion = self._zincRegion\n zincSiblingAfter = oldZincRegion.getNextSibling()\n else:\n oldZincRegion = None\n zincSiblingAfter = None\n self.freeContents()\n self._name = source._name\n # self._parent = source._parent should not be changed\n self._children = source._children\n for child in self._children:\n child._parent = self\n self._modelSources = source._modelSources\n self._zincRegion = source._zincRegion\n # self._ancestorModelSourceCreated is unchanged\n if self._parent:\n self._parent._zincRegion.removeChild(oldZincRegion)\n self._parent._zincRegion.insertChildBefore(self._zincRegion, zincSiblingAfter)", "def move_to(self, x, y):\r\n self.__current_room = x, y", "def testParentageChange(self):\n cc2 = cdl_convert.ColorCorrectionRef('001')\n cc2.parent = 'bob'\n mr2 = cdl_convert.MediaRef('sdhjd.dpx')\n mr2.parent = 'jim'\n\n self.cd.cc = cc2\n self.cd.media_ref = mr2\n\n self.assertEqual(\n self.cd,\n cc2.parent\n )\n\n self.assertEqual(\n self.cd,\n mr2.parent\n )", "def move_character(character, dest):\n character_path = dirname(character.path)\n shutil.move(character_path, dest)", "def cambiar_parent(self):\r\n self.client.parent = self", "def cambiar_parent(self):\r\n self.client.parent = self", "def swap(self, subtree_a, subtree_b):\n\n temp1 = subtree_a.parent\n temp2 = subtree_b.parent\n\n temp1.children[temp1.children.index(subtree_a)] = subtree_b\n temp2.children[temp2.children.index(subtree_b)] = subtree_a\n \n subtree_a.parent = temp2\n subtree_b.parent = temp1\n\n self.propogate_subtree(subtree_a)\n self.propogate_subtree(subtree_b)", "async def move_to(self, exit):\n self.location = exit.destination_for(self.location)\n await self.msg(self.location.look(self))", "def move(self, new_home):\n #checked#\n ###your code here###\n if self.home!=None:\n self.home.occupant=None\n new_home.occupant=self\n self.home=new_home", "def _move_to_head(self, node):\n self._remove_node(node)\n self._add_node(node)", "def attach(self, name: str) -> ContainerReference:\n self.parent = ContainerReference(name)\n return self.parent", "def mv(self, src: int, dest: int) -> bool:\n url = 'https://webapi.115.com/files/move'\n result = self.s.post(url, data={'pid': dest, 'fid[0]': src}, headers={'Origin': origin['webapi'], 'Referer': referer['115'].format(self.default_dir)}).json()\n if result['errno'] == '':\n _ = functools.reduce(dict.__getitem__, self._dirs_lookup[src], self.dirs) # TODO: need to test\n self._dirs_lookup[src] = self._dirs_lookup[dest].append(dest)\n parent = functools.reduce(dict.__getitem__, self._dirs_lookup[src], self.dirs)\n if src not in parent:\n parent.update({src: _})\n else:\n parent.get(src).update(_)\n return True", "def plates_to_parent(self, index, plates):\n raise NotImplementedError()", "def test_without_children(self):\n branch = RouteFactory.create(url='/old-branch/')\n leaf = ChildRouteFactory.create(slug='leaf', parent=branch)\n new_url = '/new-branch/'\n\n with self.assertNumQueries(1):\n # UPDATE \"routes_route\"\n # SET \"polymorphic_ctype_id\" = 1,\n # \"url\" = '/new-branch/'\n # WHERE \"routes_route\".\"id\" = 42\n branch.move_to(new_url, move_children=False)\n\n self.assertEqual(branch.url, new_url)\n leaf.refresh_from_db()\n self.assertEqual(leaf.url, '/old-branch/leaf/')" ]
[ "0.5687158", "0.542566", "0.533787", "0.5328388", "0.516376", "0.5099298", "0.50907147", "0.5065896", "0.50041324", "0.49923396", "0.49568546", "0.4937967", "0.49123746", "0.4888783", "0.4883443", "0.48625642", "0.48523584", "0.48384222", "0.48384222", "0.48376495", "0.48334068", "0.4828661", "0.47939885", "0.4793703", "0.47564533", "0.4732511", "0.47204143", "0.47102433", "0.4696371", "0.46860638", "0.4661146", "0.4658334", "0.4629476", "0.46273988", "0.4617412", "0.46158206", "0.46139973", "0.46039608", "0.4597151", "0.45950663", "0.45913714", "0.45866996", "0.45836225", "0.45796853", "0.45792028", "0.45616627", "0.45589063", "0.45527047", "0.45472705", "0.45370457", "0.45317397", "0.45199588", "0.45059466", "0.44968042", "0.44914854", "0.4486044", "0.44722477", "0.44688913", "0.44644597", "0.4451287", "0.4431767", "0.44211665", "0.44067308", "0.4399331", "0.43898097", "0.43861738", "0.4382234", "0.4381909", "0.43775907", "0.43695992", "0.4369502", "0.43524697", "0.43397534", "0.43332532", "0.43241057", "0.4321875", "0.4315011", "0.431022", "0.42943805", "0.42933828", "0.42875397", "0.42854846", "0.42854846", "0.428136", "0.4272965", "0.4272904", "0.4269934", "0.42632434", "0.42625394", "0.42613107", "0.42493582", "0.42493582", "0.42475024", "0.42457247", "0.42405203", "0.4235658", "0.423372", "0.42326155", "0.42319456", "0.42298666" ]
0.6347577
0
Recover the compartment from DELETED state to ACTIVE state.
def recover_compartment(self, compartment_id, **kwargs): resource_path = "/compartments/{compartmentId}/actions/recoverCompartment" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match", "opc_request_id" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "recover_compartment got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "compartmentId": compartment_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing), "opc-request-id": kwargs.get("opc_request_id", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Compartment") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="Compartment")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reactivate(self):\n self.write({'active': True, 'state': 'running'})\n STAGE = self.env['anytracker.stage']\n for ticket in self:\n starts = STAGE.search([('method_id', '=', ticket.method_id.id),\n ('progress', '=', 0)])\n if len(starts) != 1:\n raise except_orm(\n _('Configuration error !'),\n _('One and only one stage should have a 0% progress'))\n # write stage in a separate line to recompute progress & risk\n ticket.write({'stage_id': starts[0].id})\n self.recompute_parents()", "def _revert(self):\n self.kwargs[\"collect\"].change_status(self.kwargs[\"collect\"].ENDED)", "def decline(self):\n self.is_active = False\n self.save()", "def decline(self):\n self.is_active = False\n self.save()", "def deactivate(self):\r\n self.activated = False", "def post_revert(self):", "def pre_revert(self):", "def restore_state(self, ckpt):\n raise NotImplemented()", "def _hard_update(self, active, target):\n\n target.load_state_dict(active.state_dict())", "def action_reactivate(self):\n options=self.env['plm.config.settings'].GetOptions()\n status = 'released'\n action = 'reactivate'\n default={\n 'engineering_writable': False,\n 'state': status,\n }\n doc_default = {\n 'state': status,\n 'writable': False,\n }\n operationParams = {\n 'status': status,\n 'statusName': _('Released'),\n 'action': action,\n 'docaction': 'reactivate',\n 'excludeStatuses': ['draft', 'confirmed', 'transmitted', 'released'],\n 'includeStatuses': ['undermodify', 'obsoleted'],\n 'default': default,\n 'doc_default': doc_default,\n }\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_perform(self._ids, operationParams, default)", "def reactivate_for_mntner(\n database_handler: DatabaseHandler, reactivated_mntner: RPSLMntner\n) -> Tuple[List[RPSLObject], List[str]]:\n log_prelude = f\"reactivation {reactivated_mntner.pk()}\"\n source = reactivated_mntner.source()\n scopefilter_validator = ScopeFilterValidator()\n roa_validator = SingleRouteROAValidator(database_handler)\n\n if not get_setting(f\"sources.{source}.authoritative\"):\n raise ValueError(f\"Not authoritative for source {source}\")\n\n logger.info(f\"{log_prelude}: Starting reactivation for for {reactivated_mntner}\")\n\n def pk_exists(pk: str, rpsl_object_class: str) -> bool:\n existing_object_query = RPSLDatabaseQuery(column_names=[\"pk\"]).sources([source])\n existing_object_query = existing_object_query.rpsl_pk(pk).object_classes([rpsl_object_class])\n return bool(list(database_handler.execute_query(existing_object_query)))\n\n if pk_exists(reactivated_mntner.pk(), \"mntner\"):\n msg = (\n f\"source {source} has a currently active mntner {reactivated_mntner.pk()} - can not restore the\"\n \" suspended one\"\n )\n logger.info(f\"{log_prelude}: error: {msg}\")\n raise ValueError(msg)\n\n # This is both a check to make sure the mntner is actually suspended,\n # but also to catch cases where a suspended mntner did not have itself as mnt-by.\n query = (\n RPSLDatabaseSuspendedQuery()\n .sources([source])\n .rpsl_pk(reactivated_mntner.pk())\n .object_classes([\"mntner\"])\n )\n results = list(database_handler.execute_query(query))\n\n if not results:\n msg = f\"mntner {reactivated_mntner.pk()} not found in suspended store for source {source}\"\n logger.info(f\"{log_prelude}: error: {msg}\")\n raise ValueError(msg)\n\n query = RPSLDatabaseSuspendedQuery().sources([source]).mntner(reactivated_mntner.pk())\n results += list(database_handler.execute_query(query))\n\n restored_row_pk_uuids = set()\n restored_objects = []\n info_messages: List[str] = []\n\n for result in results:\n if result[\"pk\"] in restored_row_pk_uuids:\n continue\n\n reactivating_obj = rpsl_object_from_text(result[\"object_text\"], strict_validation=False)\n\n if pk_exists(reactivating_obj.pk(), reactivating_obj.rpsl_object_class):\n msg = (\n f\"Skipping restore of object {reactivating_obj} - an object already exists with the same key\"\n )\n logger.info(f\"{log_prelude}: {msg}\")\n info_messages.append(msg)\n continue\n\n reactivating_obj.scopefilter_status, _ = scopefilter_validator.validate_rpsl_object(reactivating_obj)\n if get_setting(\"rpki.roa_source\") and reactivating_obj.is_route and reactivating_obj.asn_first:\n reactivating_obj.rpki_status = roa_validator.validate_route(\n reactivating_obj.prefix, reactivating_obj.asn_first, source\n )\n\n database_handler.upsert_rpsl_object(\n reactivating_obj, JournalEntryOrigin.suspension, forced_created_value=result[\"original_created\"]\n )\n restored_row_pk_uuids.add(result[\"pk\"])\n restored_objects.append(reactivating_obj)\n logger.info(f\"{log_prelude}: Restoring object {reactivating_obj}\")\n\n database_handler.delete_suspended_rpsl_objects(restored_row_pk_uuids)\n return restored_objects, info_messages", "def update_active(self):\n self.set_active(0)\n self.state = INACTIVE", "def update_active(self):\n self.set_active(0)\n self.state = INACTIVE", "def update_active(self):\n self.set_active(0)\n self.state = INACTIVE", "def update_active(self):\n self.set_active(0)\n self.state = INACTIVE", "def update_active(self):\n self.set_active(0)\n self.state = INACTIVE", "def deactivate(self):\n self.active = False", "def deactivate(self):\n self.active = False", "def _revert_to_draft(self):\n self.status = self.DRAFT\n try:\n with transaction.atomic():\n for signup in self.signups.all():\n signup.delete()\n self.save()\n except DatabaseError:\n self.status = self.REGISTRATION", "def test_deactivate_account02(self, client):\n a = AccountFactory.get_account(type=Account.DEPOSITORY)\n AccountFactory.get_account(item=a.item, type=Account.DEPOSITORY)\n client = self.get_auth_client(a.item.user)\n\n url = '/v1/accounts/set_active/{}'.format(a.id)\n dic = {'is_active': False}\n data = json.dumps(dic)\n response = client.put(url, data, content_type='application/json')\n assert response.status_code == 400", "def flip_state(self):\n logging.debug(\"element selected\")\n if len(self._contents) == 0:\n self.deactivate()\n else:\n if self._contents[self.pointer][2] == 'exit':\n self.deactivate()\n return\n self.states[self.pointer] = not self.states[self.pointer] #Just inverting.\n self.refresh()", "def enable_archived_deleted(self):\r\n if self.archived_deleted:\r\n self._set_subclient_properties(\"_subclient_properties['cloudAppsSubClientProp']\\\r\n ['salesforceSubclient']['backupArchivedandDeletedRecs']\", False)", "def activate(self):\r\n self.update_enrollment(is_active=True)", "def restore_full_state(self, state):\n state_ref = self.ale.decodeState(state)\n self.ale.restoreSystemState(state_ref)\n self.ale.deleteState(state_ref)", "def cancel(self):\n self.is_active = False\n self.save()", "def cancel(self):\n self.is_active = False\n self.save()", "def test_recover(self):\n # Should work\n A = ConwayAgent(23, True)\n A.infect()\n A.recover()\n self.assertTrue(A.status() == 3)\n\n # Should do nothing\n A.reset()\n A.recover()\n self.assertTrue(A.status() == 1)\n\n # Should also do nothing\n A.infect()\n A.kill()\n A.recover()\n self.assertTrue(A.status() == 0)", "def consider_deactivation(self):\n pass", "def reactivate(self, creator):\r\n # if we reactivate then reinit this\r\n self.activation = Activation(creator)\r\n self.activated = False", "def revert_state(self):\n if self.previous_states > 0: # checks for empty\n self.update_status(self.previous_states.pop())", "def _pre_deactivation_steps(self, context, id_, data, resource_inventory):\n LOG.info(\"[%s] Pre deactivation checks started\" % id_)\n resource_type = resource_inventory[eon_const.EON_RESOURCE_TYPE]\n expected_states = eon_const.EXPECTED_STATES_DEACTIVATION[resource_type]\n self.validator.validate_state(expected_states,\n resource_inventory.get(\n eon_const.EON_RESOURCE_STATE))\n resource_driver = driver.load_resource_driver(\n resource_inventory[eon_const.EON_RESOURCE_TYPE])\n resource_driver.pre_deactivation_steps(context,\n resource_inventory=resource_inventory)\n next_state = eon_const.DEACTIVATION_STATE_MAPPING.get(\n resource_inventory.get(eon_const.EON_RESOURCE_STATE))\n\n self.virt_utils.update_prop(context, id_, eon_const.EON_RESOURCE_STATE,\n next_state)\n resource_inventory[eon_const.EON_RESOURCE_STATE] = next_state\n LOG.info(\"[%s] Pre deactivation checks finished successfully\" % id_)", "def activate(self):\n self.active = True", "def activate(self):\n self.active = True", "def set_as_reverted(self):\n self.is_revert = False\n self.update_request.status = 'Reverted'", "def _activate(self):\n self.active = True", "def change_restored(self, event):\n pass", "def reactivate(self) -> None:\n self.display_until = None\n self.save()", "def confirm_email(self):\n self.active = True\n self.save()", "def activate(self):\n self._is_active = True", "def test_deactivate_and_save():\n course_run_enrollment = CourseRunEnrollmentFactory.create(\n active=True, change_status=None\n )\n program_enrollment = ProgramEnrollmentFactory.create(\n active=True, change_status=None\n )\n enrollments = [course_run_enrollment, program_enrollment]\n for enrollment in enrollments:\n enrollment.deactivate_and_save(ENROLL_CHANGE_STATUS_REFUNDED)\n enrollment.refresh_from_db()\n enrollment.active = False\n enrollment.change_status = ENROLL_CHANGE_STATUS_REFUNDED", "def resume(self):\n pass\n # self.condor_object.release()", "def deactivate(self):\n self._is_active = False", "def ComputerFinalStateOfCharge(self):\r\n pass", "def recover( self, job, job_wrapper ):\n job_state = self.__job_state( job, job_wrapper )\n job_wrapper.command_line = job.get_command_line()\n state = job.get_state()\n if state in [model.Job.states.RUNNING, model.Job.states.QUEUED]:\n log.debug( \"(LWR/%s) is still in running state, adding to the LWR queue\" % ( job.get_id()) )\n job_state.old_state = True\n job_state.running = state == model.Job.states.RUNNING\n self.monitor_queue.put( job_state )", "def test_deactivate_account01(self, client):\n a1 = AccountFactory.get_account()\n a2 = AccountFactory.get_account(item=a1.item)\n client = self.get_auth_client(a1.item.user)\n\n url = '/v1/accounts/set_active/{}'.format(a2.id)\n dic = {'is_active': False}\n data = json.dumps(dic)\n response = client.put(url, data, content_type='application/json')\n assert response.status_code == 204\n\n url = '/v1/accounts/set_active/{}'.format(a1.id)\n data = json.dumps(dic)\n response = client.put(url, data, content_type='application/json')\n assert response.status_code == 400", "def deactivate(self):\n pass", "def deactivate(self):\n pass", "def set_inactive(self):\n self.active = False", "def unsetCompartment(self):\n return _libsbml.Reaction_unsetCompartment(self)", "def saveActivate():\n save()\n activate(block=\"true\")", "def _deactivate(self, util, persist=False):\n activation_keys = _keys_for_activation(self._language, self._version)\n\n # We substitute zero here because that is treated as the never\n # activated value.\n activated_env = os.environ.get(activation_keys.activated, \"0\")\n shell = self._parent_shell if persist else None\n\n if int(activated_env) > 1:\n util.overwrite_environment_variable(shell,\n activation_keys.activated,\n str(int(activated_env) - 1))\n return False\n elif int(activated_env) == 1:\n active_environment = self._active_environment(ActiveEnvironment)\n\n for key in active_environment.overwrite.keys():\n backup = activation_keys.deactivate.format(key=key)\n util.overwrite_environment_variable(shell,\n key,\n os.environ.get(backup, \"\"))\n util.overwrite_environment_variable(shell,\n backup,\n None)\n\n for key in active_environment.prepend.keys():\n inserted = activation_keys.inserted.format(key=key)\n for value in os.environ[inserted].split(os.pathsep):\n util.remove_from_environment_variable(shell,\n key,\n value)\n\n util.overwrite_environment_variable(shell,\n inserted,\n None)\n\n util.overwrite_environment_variable(shell,\n activation_keys.activated,\n None)\n\n return True\n else:\n return False", "def previous_status(self):\n if self.status == self.REGISTRATION:\n self._revert_to_draft()\n elif self.status == self.PENDING:\n self._revert_to_registration()", "def set_no_longer_active(self):\n with self.redis_client.lock(\"active-lock\"):\n self.set_to_redis(\"active\", \"done\")", "def refresh(self):\n self.lease = self.blazar.lease.get(self.id)", "def test_reactivate_and_save():\n course_run_enrollment = CourseRunEnrollmentFactory.create(\n active=False, change_status=ENROLL_CHANGE_STATUS_REFUNDED\n )\n program_enrollment = ProgramEnrollmentFactory.create(\n active=False, change_status=ENROLL_CHANGE_STATUS_REFUNDED\n )\n enrollments = [course_run_enrollment, program_enrollment]\n for enrollment in enrollments:\n enrollment.reactivate_and_save()\n enrollment.refresh_from_db()\n enrollment.active = True\n enrollment.change_status = None", "def deactivate(self):\r\n self.update_enrollment(is_active=False)", "def restore(instr, enabled, stop_after, state):\n if enabled:\n instr.write(\"ACQUIRE:STOPAFTER {}\".format(stop_after))\n instr.write(\"ACQUIRE:STATE {}\".format(state))", "def action_draft(self):\n self.state = 'draft'", "def action_draft(self):\n self.state = 'draft'", "def reactivate(self):\r\n self.require_item()\r\n\r\n url = '{0}/reactivate'.format(self.get_url())\r\n request = http.Request('PUT', url)\r\n\r\n return request, parsers.parse_empty", "def retract_intake(self):\n intake_state = String()\n intake_state.data = \"retract\"\n\n self.ros_node.publish(\"/auto/intake/state\", String, intake_state, latching = True)\n rospy.loginfo(\"Retracted Intake\")", "async def ensure_active(self):\n if not self.active:\n await self.refresh()", "def back(self):\n\n self.root.bom_compare_old = self.old_entry.get()\n self.root.bom_compare_save = self.save_entry.get()\n self.root.bom_compare_new = self.new_entry.get()\n\n self.root.back(BomCompare)", "def _revert_to_registration(self):\n self.status = self.REGISTRATION\n for character in self.characters.all():\n character.status = Character.DELETED\n character.save()\n self.save()", "def complete(aut):\n trash_is_here = False\n aut_bis = aut.clone()\n\n alphabet = aut_bis.get_alphabet()\n states = aut_bis.get_states()\n\n for state in states:\n for letter in alphabet:\n transition_tmp = aut_bis.delta(letter, [state])\n if transition_tmp == set():\n if not trash_is_here:\n aut_bis.add_state(\"Trash\")\n trash_is_here = True\n aut_bis.add_transition((state, letter, \"Trash\"))\n if trash_is_here:\n aut_bis.add_transition((\"Trash\", letter, \"Trash\"))\n\n return aut_bis", "def _deactivate(self, context, id_, data, resource_inventory):\n resource_driver = driver.load_resource_driver(\n resource_inventory[eon_const.EON_RESOURCE_TYPE])\n LOG.info(\"[%s] Deactivation started.\" % id_)\n try:\n # Notifies on deactivation\n self.notify(context, id_, resource_inventory)\n\n run_playbook = data.get(eon_const.RUN_PLAYBOOK, True)\n force_deactivate = data.get(eon_const.FORCED_KEY, False)\n resource_driver.deactivate(\n context, id_,\n resource_inventory=resource_inventory,\n run_playbook=run_playbook,\n force_deactivate=force_deactivate)\n resource_driver.post_deactivation_steps(context,\n resource_inventory=resource_inventory)\n LOG.info(\"[%s] Deactivation finished successfully\" % id_)\n except Exception as e:\n LOG.exception(e)\n LOG.error(\"Deactivation observed failures. %s \" %\n e.message)\n finally:\n try:\n self.db_api.delete_property(context, id_,\n eon_const.HYPERVISOR_ID)\n except exception.NotFound:\n pass # ignore\n if data.get(eon_const.FORCED_KEY):\n self._forced_deactivate(context, id_, resource_inventory)\n rollback_state = (eon_const.ROLLBACK_STATE_ACTIVATION[\n resource_inventory[\n eon_const.EON_RESOURCE_TYPE]])\n self.virt_utils.update_prop(context, id_,\n eon_const.EON_RESOURCE_STATE,\n rollback_state)", "def test_model_activation(self):\n\n # GIVEN deactivated model item\n obj = self.obj2\n obj.deactivate()\n\n # WHEN retriving item details\n response = self.api.dataid(self.app_label, self.model_name2, obj.id)\n\n # THEN it should fail\n self.assertTrue(response.error)\n\n # -----\n\n # GIVEN model is activated\n obj.activate()\n\n # WHEN retriving item details\n response = self.api.dataid(self.app_label, self.model_name2, obj.id)\n\n # THEN it should succeed\n self.assertTrue(response.success)", "def active(self, activate):\n self.is_active = activate", "def disable_archived_deleted(self):\r\n if not self.archived_deleted:\r\n self._set_subclient_properties(\"_subclient_properties['cloudAppsSubClientProp']\\\r\n ['salesforceSubclient']['backupArchivedandDeletedRecs']\", True)", "def upkeep(self) -> None:\n if self.atype in ['REINFORCE', 'A2C']:\n self._db.reset()", "def recover(self):\n self.deleted = False\n self.save()\n self.history.create(user_id=self.pk, action=user_history.RECOVERY)", "def deactivate(self) -> bool:\n pass", "def restore(self, memento):\n self.state = memento.state", "def doRestore(self):\n self.logger.log(\"Begin to restore instance status...\")\n \n try:\n self.readConfigInfo()\n self.getUserInfo()\n \n # dump status to file\n cmd = ClusterCommand.getQueryStatusCmd(self.user, self.dbNodeInfo.id, self.__curStatusFile)\n (status, output) = commands.getstatusoutput(cmd)\n if (status != 0):\n self.logger.logExit(\"Query local instance status failed!Error: %s\" % output)\n \n bakDbStatus = DbClusterStatus()\n bakDbStatus.initFromFile(self.__bakStatusFile)\n bakNodeStatus = bakDbStatus.getDbNodeStatusById(self.dbNodeInfo.id)\n if (bakNodeStatus is None):\n self.logger.logExit(\"Get backup status of local node failed!\")\n \n curDbStatus = DbClusterStatus()\n curDbStatus.initFromFile(self.__curStatusFile)\n curNodeStatus = curDbStatus.getDbNodeStatusById(self.dbNodeInfo.id)\n if (curNodeStatus is None):\n self.logger.logExit(\"Get current status of local node failed!\")\n if (not curNodeStatus.isNodeHealthy()):\n self.logger.logExit(\"Current status of node is not healthy!\")\n \n # Compare the status and restore it\n bakInstances = bakNodeStatus.datanodes + bakNodeStatus.gtms\n for bakInst in bakInstances:\n curInst = curNodeStatus.getInstanceByDir(bakInst.datadir)\n if (curInst is None):\n self.logger.logExit(\"Get current status of instance failed!DataDir:%s\" % bakInst.datadir)\n \n if (bakInst.status == curInst.status):\n continue\n \n if (bakInst.status == DbClusterStatus.INSTANCE_STATUS_PRIMARY):\n self.__switchToPrimary(bakInst.datadir)\n elif (bakInst.status == DbClusterStatus.INSTANCE_STATUS_STANDBY):\n self.__switchToStandby(bakInst.datadir)\n \n except Exception, e:\n self.logger.logExit(str(e))\n \n self.logger.log(\"Restore instance status successfully.\")\n self.logger.closeLog()", "def perform_destroy(self, instance):\n instance.is_active = not instance.is_active\n instance.save()", "def perform_destroy(self, instance):\n instance.is_active = not instance.is_active\n instance.save()", "def change_abandoned(self, event):\n pass", "def undo(self):\n LOG.debug(\"In the undo method, will attempt to restore\")\n\n # validate detected nothing to do for this, nothing was done\n # for execute, so simply return\n if self.no_op:\n return\n\n if not self.source_dev or not self.target_dev:\n return\n LOG.debug(\"The source dictionary is: %s\", self.source_dict_restore)\n LOG.debug(\"The target dictionary is: %s\", self.target_dict_restore)\n\n # In scenario where no source IP Address...\n if self.source_dict_restore:\n self.commandex.send_ifcfg(self.source_dev,\n self.source_dict_restore)\n\n # May have failed because the ifcfg didn't even exist, nothing\n # to roll back then\n if self.target_dict_restore:\n self.commandex.send_ifcfg(self.target_dev,\n self.target_dict_restore)", "def restore_state(self, state: ale_py.ALEState):\n self.ale.restoreState(state)", "async def refresh_entity_state(self):", "def keystone_departed():\n with charm.provide_charm_instance() as charm_instance:\n charm_instance.remove_config()", "def on_deactivate(self) -> None:", "def set_inactive(self):\n if self.active is False:\n return\n self.active = False\n self.save()\n self.question_set.update(active=False)", "def _set_active(o, d):\n try:\n if d:\n o.activate()\n else:\n o.deactivate()\n except AttributeError:\n # if the object doesn't have activate and deactivate that's fine just\n # keep going\n pass", "def set_active(self):\n if self.active is True:\n return\n self.active = True\n self.save()\n self.question_set.update(active=True)", "def is_up(self):\n data = self.vxprint()\n return self.name in data and data[self.name].STATE == \"ACTIVE\"", "def actualizar_estado_actividad(self):\r\n now = timezone.now()\r\n estado_previo = self.estado\r\n if self.fecha_de_inicio > now:\r\n self.estado = 'Por abrir'\r\n elif self.fecha_de_cierre < now:\r\n self.estado = 'Cerrada'\r\n else:\r\n self.estado = 'Abierta'\r\n if estado_previo != self.estado:\r\n self.save()", "def automatic_backup(self):\n\n if self.observationId:\n logging.info(\"automatic backup\")\n self.save_project_activated()", "def restore_state(self, state):\n state_ref = self.ale.decodeState(state)\n self.ale.restoreState(state_ref)\n self.ale.deleteState(state_ref)", "def deactivate(self):\n pass", "def affection_status_switch_on(self):\n self._affection_status_switch = False", "def test_restore_from_compacted_backup(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster_validate()\n self.backup_compact()\n self.backup_restore_validate()", "def set_active(self):\n self.active = True", "def revert(self):\n self.instance.save()\n return self.instance", "def unsetCompartment(self):\n return _libsbml.CompartmentReference_unsetCompartment(self)", "def action_draft(self):\n context = self._context or {}\n inv_obj = self.env['account.invoice']\n\n brw = self.browse( self.ids[0])\n inv_ids = [i.invoice_id.id for i in brw.line_ids]\n if inv_ids:\n inv_obj.write( {'wh_src_id': False})\n\n return self.write( {'state': 'draft'})", "def test_active_txn_missing(self):\r\n b = cli.blank()\r\n\r\n result = cli.active_txn({\r\n 'activeTransaction': {}\r\n })\r\n self.assertIsInstance(result, cli.FormattedItem)\r\n self.assertEquals(result.original, b.original)", "def Confirm(self):\r\n \r\n global references\r\n self.from_ed = self.ed_result.get(\"1.0\",'end-1c')\r\n references.append(self.from_ed)\r\n self.confirm_b.configure(state = 'disabled')\r\n self.discard_b.configure(state = 'disabled')\r\n self.finalresult.configure(state = 'normal')\r\n self.finalresult.delete('1.0', END)\r\n \r\n self.final()", "def mark_refunded(self):\n order = self.clone()\n order.status = Order.STATUS_REFUNDED\n order.save()\n return order", "def on_deactivate(self):" ]
[ "0.5480294", "0.5277981", "0.52267", "0.52267", "0.5225232", "0.5208037", "0.5192213", "0.51521283", "0.51319057", "0.50953686", "0.50489", "0.503891", "0.503891", "0.503891", "0.503891", "0.503891", "0.5005554", "0.5005554", "0.49955514", "0.49873796", "0.49641302", "0.49197772", "0.49138245", "0.49061626", "0.49010143", "0.49010143", "0.48821554", "0.4879413", "0.48771948", "0.4866343", "0.4860267", "0.48579732", "0.48579732", "0.48343128", "0.4817704", "0.48116514", "0.48063833", "0.48031428", "0.48018792", "0.47980666", "0.47923714", "0.4771863", "0.47665158", "0.47663423", "0.4766234", "0.47646517", "0.47646517", "0.47626483", "0.4755885", "0.47439897", "0.47390443", "0.47339642", "0.47258812", "0.47220355", "0.47206733", "0.47143805", "0.4710567", "0.46987784", "0.46987784", "0.46968818", "0.46765372", "0.46689537", "0.46687636", "0.46479276", "0.46464056", "0.46358818", "0.46268722", "0.4612476", "0.460563", "0.46029848", "0.45906785", "0.45865968", "0.45817623", "0.45806235", "0.45709613", "0.45709613", "0.45604897", "0.45591184", "0.4556617", "0.45466152", "0.4535473", "0.45300788", "0.45289284", "0.45249367", "0.45189387", "0.45174393", "0.45096612", "0.44917879", "0.44910333", "0.44853094", "0.447393", "0.44671997", "0.4465989", "0.44546574", "0.44484746", "0.4445157", "0.44443595", "0.44386354", "0.44362748", "0.44306618" ]
0.47606575
48
Removes a user from a group by deleting the corresponding `UserGroupMembership`.
def remove_user_from_group(self, user_group_membership_id, **kwargs): resource_path = "/userGroupMemberships/{userGroupMembershipId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "remove_user_from_group got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userGroupMembershipId": user_group_membership_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_from_group(user: User, group: Group) -> Result:\n if user.pw_name not in group.gr_mem:\n return Result(State.unchanged)\n command([\"/usr/sbin/deluser\", user.pw_name, group.gr_name])\n group.gr_mem.remove(user.pw_name)\n return Result(State.success)", "def delete_group_user(self, group_id, user_id):\n resp, body = self.delete('groups/%s/users/%s' % (group_id, user_id))\n self.expected_success(204, resp.status)\n return rest_client.ResponseBody(resp, body)", "def delete_group_user(self, group_id, user_id):\n resp, body = self.delete('groups/%s/users/%s' % (group_id, user_id))\n self.expected_success(204, resp.status)\n return service_client.ResponseBody(resp, body)", "def removeUserFromGroup(self, user, group):\n return self.pm_getUserManager().removeUserFromGroup(self._unbox(user), self._unbox(group))", "def remove_user_from_group(self, group_name, user_name):\r\n params = {'GroupName' : group_name,\r\n 'UserName' : user_name}\r\n return self.get_response('RemoveUserFromGroup', params)", "def action_remove_from_group(self, kwargs):\n user = kwargs[\"user\"]\n group = kwargs[\"group\"]\n\n if self.engine.remove_user_from_group(user, group):\n info(f\"User {user} sucessfully removed from {group}\")\n else:\n error(f\"Unable to remove {user} from {group}, check privileges or dn\")", "def delete(self, id):\r\n return UserGroupService.removeUserGroup(self, id)", "def del_user_from_group(self,username,groupname):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_del_user_from_group_query,{'username':username,'groupname':groupname,'username_field':self.sql_username_field,'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: del_user_from_group: %s\" % (query,))\n\n cursor.execute(query)\n db.commit()\n return True", "def remove_group(self, resolvable):\n group = self._resolve_group(resolvable)\n\n for membership in self.group_memberships:\n if membership.group.href == group.href:\n membership.delete()\n return\n\n raise StormpathError({\n 'developerMessage': 'This user is not part of Group %s.' % group.name,\n })", "def remove_user_from_group(self, group_name, user_login):\n params = {\n 'login': user_login,\n 'name': group_name\n }\n self.sonarqube.make_call('post', API_USER_GROUPS_REMOVE_USER_ENDPOINT, **params)", "def delete_user_group(self, token, userGroup):\n requestUser = self.get_username_from_token(token)\n if self.check_user_has_owner_clearance(requestUser, userGroup):\n dataBase = self.read_database()\n if userGroup in dataBase['userGroups']:\n del dataBase['userGroups'][userGroup]\n self.write_database(dataBase)\n return\n else:\n raise GroupDoesNotExistException(\"Group does not exist\")\n else:\n raise UserPermissionException(\"User does not have write access\")", "def remove_member(self, group_id: str, user_id: str):\n # If successful, this method returns 204 No Content response code.\n # It does not return anything in the response body.\n # Using resp_type=\"text\" to avoid parsing error in the calling method.\n self.ms_client.http_request(\n method='DELETE',\n url_suffix=f'groups/{group_id}/members/{user_id}/$ref', resp_type=\"text\")", "def remove_user_from_group(self, token, userGroup, userName):\n requestUser = self.get_username_from_token(token)\n if self.check_user_has_owner_clearance(requestUser, userGroup):\n dataBase = self.read_database()\n if userGroup not in dataBase['userGroups']:\n raise GroupDoesNotExistException(\"Group does not exist\")\n owners = dataBase['userGroups'][userGroup]['owners']\n members = dataBase['userGroups'][userGroup]['members']\n if userName in owners:\n dataBase['userGroups'][userGroup]['owners'].remove(userName)\n if userName in members:\n dataBase['userGroups'][userGroup]['members'].remove(userName)\n self.write_database(dataBase)\n\n else:\n raise UserPermissionException(\"Requesting user is not owner of specified user group\")", "def delete_user(group_id, member_id):\n query=\"DELETE FROM groupmembers WHERE group_id= '{}' AND member_id = {}\".format(group_id, member_id)\n cur.execute(query)\n return cur.fetchone()", "def remove_from_group(_request, group_id, email):\n group = models.UserGroup.get_by_id(int(group_id))\n user_key = models.UserProfile.load(email).key()\n if group.users is None:\n group.users = []\n logging.warning('Group \"%s\" had a None users list' % group.name)\n group.users.remove(user_key)\n group.put()\n\n url = urlresolvers.reverse('views.admin.edit_user', args=[email])\n return http.HttpResponseRedirect(url)", "def remove_user_from_groups(conn, user_dn, group_dn):\n try:\n conn.extend.microsoft.remove_members_from_groups([str(user_dn)], [str(group_dn)])\n except Exception as e:\n raise Exception(\"Can't remove user from groups :: {}\".format(e))", "def delete_user_from_group(self, name, login):\n params = {\n 'login': login,\n 'name': name\n }\n self.sonarqube._make_call('post', API_USER_GROUPS_REMOVE_USER, **params)", "def delete_group_group_member(self, targetgroup, groupname):\n try:\n targetgroup = self.quote(targetgroup)\n groupname = self.quote(groupname)\n self.g.delete('groups/%s/groups/%s' % (targetgroup,\n groupname),\n headers={})\n except HTTPError as e:\n return self._manage_errors(e)", "def delete_group(self, group_name):\n params = {\n 'name': group_name\n }\n\n self.sonarqube._make_call('post', API_USER_GROUPS_DELETE, **params)", "def delete_group(self, group_name):\n params = {\n 'name': group_name\n }\n\n self.sonarqube.make_call('post', API_USER_GROUPS_DELETE_ENDPOINT, **params)", "def test_050_delete_user_from_group(self):\n\n testflow.step(\n \"Removing user %s from group %s\", TEST_USER1, TEST_GROUP1\n )\n assert MANAGE_CLI.run(\n 'userdel',\n TEST_GROUP1,\n user=TEST_USER1\n )[0], \"Failed to remove user from group '%s'\" % TEST_GROUP1\n\n testflow.step(RMV_GRP_MSG, TEST_GROUP1)\n assert not MANAGE_CLI.run(\n 'userdel',\n TEST_GROUP1,\n user='nonsense'\n )[0], \"Possible to remove nonexisting user from group\"\n\n testflow.step(\"Removing user %s from nonexistent group\", TEST_GROUP1)\n assert not MANAGE_CLI.run(\n 'userdel',\n 'nonsense',\n user=TEST_USER1\n )[0], \"Possible to remove user from nonexisting group\"", "def remove_user_from_group(self, group_name, user_name, delegate_account=None):\n self.log.debug(\"Removing user \" + user_name + \" to group \" + group_name)\n params = {'GroupName': group_name,\n 'UserName': user_name}\n if delegate_account:\n params['DelegateAccount'] = delegate_account\n self.connection.get_response('RemoveUserFromGroup', params)", "def delete_user(request, user):\n\n if models.Group.created_by(user).count() > 0:\n raise UserDeletionError('Cannot delete user who is a group creator.')\n\n user.groups = []\n\n query = _all_user_annotations_query(request, user)\n annotations = es_helpers.scan(client=request.es.conn, query={'query': query})\n for annotation in annotations:\n storage.delete_annotation(request, annotation['_id'])\n\n request.db.delete(user)", "def delete_group(self, group):\n raise NotImplementedError('delete_group')", "def delete_proj_user(self, user_id):\n conn = pyone.OneServer(\n self.auth_url,\n session=\"{0}:{1}\".format(self.username, self.password)\n )\n try:\n user = conn.user.info(user_id)\n group = user.get_GROUPS().ID[0]\n # delete group\n conn.group.delete(group)\n # delete user\n return conn.user.delete(user.get_ID())\n except pyone.OneNoExistsException as e:\n logger.exception(\"Failed. User trying to delete, doesn't exist: \", user_id)\n except Exception as e:\n logger.exception(\"Failed. User trying to delete, group doesn't exist: \", user_id)", "def del_from_groups(self, username, groups):\n pass", "def delete_group(args, p4, group_name, metrics):\n LOG.debug(\"delete_group() {}\".format(group_name))\n r = p4.fetch_group(group_name)\n if r and r.get('Owners') and p4gf_const.P4GF_USER in r.get('Owners'):\n print_verbose(args, _(\"Deleting group '{group_name}'...\").format(group_name=group_name))\n p4.run('group', '-a', '-d', group_name)\n metrics.groups += 1\n else:\n print_verbose(args, _(\"Not deleting group '{group}':\"\n \" Does not exist or '{user}' is not an owner.\")\n .format(group=group_name, user=p4gf_const.P4GF_USER))", "def del_group(self, group_id, group_type):\n self._mod_group(\n command=self.ofproto.OFPGC_DELETE,\n group_id=group_id,\n group_type=group_type,\n )", "def remove_user(cursor, username):\n cursor.execute(\"DELETE FROM users WHERE username = ?\", (username,))\n cursor.execute(\"\"\"\n DELETE\n FROM groups\n WHERE NOT EXISTS(\n SELECT\n NULL\n FROM\n usergroups ug\n WHERE\n ug.groupid = groupid\n )\n \"\"\")", "def delete_group(self, group_id):\n url = self.groups_url + \"/%s\" % group_id\n return requests.delete(url, headers=self.headers)", "def delete_group(_request, group_id):\n group = models.UserGroup.get_by_id(int(group_id))\n group.delete()\n\n url = urlresolvers.reverse('views.admin.list_groups')\n return http.HttpResponseRedirect(url)", "def delete_user_group_values(self, id_user:int, id_group:int) -> None:\n try:\n self.cursor.execute(f\"DELETE FROM {table_user_group_connect} WHERE id_user={id_user} AND id_group={id_group};\")\n self.connection.commit()\n except Exception as e:\n msg = f\"We faced problems ith deletion from {table_user_group_connect} table, Mistake: {e}\"\n self.proceed_error(msg)", "def deleteGroup(groupName):\r\n Group.deleteGroup(groupName)", "def remove_group():\n _id = request.form['_id']\n data, code, message = FIELD_SERVICE.remove_group(_id)\n return __result(data, code, message)", "def delete_group(self, group_o):\n class_query = ClassQuery('fvTenant')\n class_query.propFilter = 'eq(fvTenant.name, \"' + group_o.name + '\")'\n tenant_list = self.moDir.query(class_query)\n if len(tenant_list) > 0:\n tenant_list[0].delete()\n self.commit(tenant_list[0])", "def test_deluser(self):\n self.run_function(\"group.add\", [self._group], gid=self._gid)\n self.run_function(\"user.add\", [self._user])\n self.run_function(\"group.adduser\", [self._group, self._user])\n self.assertTrue(self.run_function(\"group.deluser\", [self._group, self._user]))\n group_info = self.run_function(\"group.info\", [self._group])\n self.assertNotIn(self._user, str(group_info[\"members\"]))", "def rm_user_group(self, groupname, ls_user):\n data = {\"groupname\": groupname, \"rm_users\": ls_user}\n headers = {\"user-agent\": self.u_agent}\n req_url = self.normalize_admin_url(u\"groups/{}\".format(groupname))\n res = requests.put(\n req_url,\n headers=headers,\n auth=self.auth,\n data=json.dumps(data),\n verify=False,\n )\n if res.status_code in [200, 206]:\n return Response(0, res)\n else:\n return Response(res.status_code, res)", "def delete_group_member(self, group_id, member_id):\n url = self.groups_url + \"/%s/members/%s\" % (group_id, member_id)\n return requests.delete(url, headers=self.headers)", "def removeGroup(self, group, defaultGroup=''):\n return self.pm_getUserManager().removeGroup(self._unbox(group), self._unbox(defaultGroup))", "def delete_group(user):\n return 'do some magic!'", "def delete_all_group_member(self, group_id):\n url = self.groups_url + \"/%s/members\" % group_id\n return requests.delete(url, headers=self.headers)", "def delete_user(self, user):\n self.delete(user)", "def delete_group(\n group_id: BSONObjectId,\n tkn: Token = Depends(from_authotization_header_nondyn),\n):\n assert_has_clearance(tkn.owner, \"sni.delete_group\")\n grp: Group = Group.objects.get(pk=group_id)\n logging.debug(\"Deleting group %s (%s)\", grp.group_name, group_id)\n grp.delete()", "def delete_group(self, group_name):\r\n params = {'GroupName' : group_name}\r\n return self.get_response('DeleteGroup', params)", "def delete_group(gid):\n if request.method == 'POST':\n hl.deleteGroup(gid)\n return redirect('/users')", "def test_delete_team_user_group(client):\n resp = client.delete_team_user_group(TEAM_ID, NEW_GROUP_ID)\n assert resp['team_id'] == TEAM_ID\n assert resp['group_deleted']", "def delete_security_group(self, security_group):\r\n return self.delete(self.security_group_path % (security_group))", "def delete_group(self, group_id: str):\n # If successful, this method returns 204 No Content response code.\n # It does not return anything in the response body.\n # Using resp_type=\"text\" to avoid parsing error in the calling method.\n self.ms_client.http_request(method='DELETE', url_suffix=f'groups/{group_id}', resp_type=\"text\")", "def delete_user(self):\n User.user_list.remove(self)", "def delete_user(self):\n User.user_list.remove(self)", "def delete_user(self):\n User.user_list.remove(self)", "def cleanup_user_groups(event):\n name = event.object.name\n\n if name.startswith(\"group:\"):\n principals = get_principals()\n users_groups = [p for p in principals if name in principals[p].groups]\n for user_or_group in users_groups:\n principals[user_or_group].groups.remove(name)\n\n DBSession.query(LocalGroup).filter(\n LocalGroup.principal_name == name).delete()", "def delete_user(self, user):\n name = utils.get_name(user)\n self._user_manager.delete(name)", "def delete_group(id, createdby):\n query = \"DELETE FROM groups WHERE group_id = {} AND createdby ='{}'\".format(id, createdby)\n cur.execute(query)", "def test_resource_user_resource_remove_user_from_user_groups_delete(self):\n pass", "def delete_user(self):\n\n User.user_list.remove(self)", "def remove_member_command(client: MsGraphClient, args: dict) -> tuple[str, dict, dict]:\n group_id = str(args.get('group_id'))\n user_id = str(args.get('user_id'))\n client.remove_member(group_id, user_id)\n\n human_readable = f'User {user_id} was removed from the Group \"{group_id}\" successfully.'\n return human_readable, NO_OUTPUTS, NO_OUTPUTS", "def remove_member_from_group(self, group_id, member_id):\n route_values = {}\n if group_id is not None:\n route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')\n if member_id is not None:\n route_values['memberId'] = self._serialize.url('member_id', member_id, 'str')\n self._send(http_method='DELETE',\n location_id='45a36e53-5286-4518-aa72-2d29f7acc5d8',\n version='6.0-preview.1',\n route_values=route_values)", "def delete_user(self, user):\n # noinspection PyUnresolvedReferences\n self.delete(user)", "def test_removeGroup(self):\n\t\tuser = User.objects.get(id=1)\n\t\tself.client.force_authenticate(user=user)\n\t\tgroup = Group.objects.create(admin=user, name='testGroup3', isPublic=True, \n\t\t\tdescription='This is another test group that just created.')\n\n\t\turl = \"/groups/3/\"\n\t\tresponse = self.client.delete(url, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n\n\t\turl = \"/groups/2/\"\n\t\tresponse = self.client.delete(url, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_070_delete_group_from_group(self):\n\n testflow.step(\n \"Removing group %s from group %s\",\n TEST_GROUP1, TEST_GROUP2\n )\n assert MANAGE_CLI.run(\n 'groupdel',\n TEST_GROUP1,\n group=TEST_GROUP2,\n )[0], \"Failed to delete group from group '%s'\" % TEST_GROUP1", "def delete_user(self, user):\n self.execute(TABELLE['id_users'][\"delete\"], user[\"id\"])", "def remove(self, user):\n if user != self.head:\n user.group = None\n user.save()\n self.players.remove(user)", "def remove_user_from_groups(username):\n groups = request.get_json().get(\"groups\", [])\n return jsonify(\n admin.remove_user_from_groups(\n current_app.scoped_session(), username, groups=groups\n )\n )", "def remove_from_group(self, org, contact, group):\n pass", "def test_groups_group_users_user_delete(self):\n pass", "def test_groups_group_users_user_delete(self):\n pass", "def remove_access(self, access_group):\n\n if self.has_auth_access(access_group):\n self.access_groups.remove(access_group)", "def delete():\n name = request.json['name']\n group = models.user.Group.get(name)\n if not group:\n raise Absent('Group does not exists.', deletion=False)\n else:\n models.db.session.delete(group)\n models.db.session.commit()\n return response(200, deletion=True)", "async def del_user(conn: LDAPConnection, user: dict, mailman: Client) -> None:\n await conn.delete(user[\"dn\"])\n uid = user[\"attributes\"][\"uid\"][0]\n rmtree(user[\"attributes\"][\"homeDirectory\"][0])\n rmtree(f\"/webtree/{uid[:1]}/{uid}\")\n mailing_list = mailman.get_list(\"announce-redbrick\")\n mailing_list.unsubscribe(f\"{uid}@redbrick.dcu.ie\")", "def removeGroup(self, *args):\n return _libsbml.GroupsModelPlugin_removeGroup(self, *args)", "def _delete_security_group(self, group_id):\n\n group_to_delete = self.get_resource()\n\n if not group_to_delete:\n raise NonRecoverableError(\n 'Unable to delete security group {0}, because the group '\n 'does not exist in the account'.format(group_id))\n\n try:\n self.execute(self.client.delete_security_group,\n dict(group_id=group_id), raise_on_falsy=True)\n except (exception.EC2ResponseError,\n exception.BotoServerError) as e:\n raise NonRecoverableError('{0}'.format(str(e)))", "def remove_user_from_cohort(course_key, username, cohort_id=None):\n if username is None:\n raise ValueError('Need a valid username')\n user = User.objects.get(username=username)\n if cohort_id is not None:\n membership = CohortMembership.objects.get(\n user=user, course_id=course_key, course_user_group__id=cohort_id\n )\n membership.delete()\n else:\n try:\n membership = CohortMembership.objects.get(user=user, course_id=course_key)\n except CohortMembership.DoesNotExist:\n pass\n else:\n membership.delete()", "def delete_group(groupname):\n response = jsonify(admin.delete_group(current_app.scoped_session(), groupname))\n return response", "async def delete_contact_group(dbcon: DBConnection, contact_group_id: int) -> None:\n if not await contact_group_exists(dbcon, contact_group_id):\n raise errors.InvalidArguments('contact group does not exist')\n q = \"\"\"delete from contact_groups where id=%s\"\"\"\n await dbcon.operation(q, (contact_group_id,))", "def customer_group_delete(group_id):\n result = {\"success\" : 1, \"message\" : \"Customer can not be Deleted\"}\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n \n #clean up the user id\n group_id = db.escape_string(group_id)\n \n query = \"\"\"\n DELETE FROM `groups`\n WHERE `groups`.`group_id` = \"%s\"\n \"\"\" %(group_id)\n cursor = db.cursor()\n try:\n if (cursor.execute(query)) != 0:\n db.commit()\n result = {\"success\" : 0, \"message\" : \"Customer Group Deleted Successfully\"}\n except Exception as customer_exp:\n result = {\"success\" : 1, \"message\" : \"Customer Group can not be Deleted \" + str(e)}\n finally:\n cursor.close()\n db.close()\n return result", "def test_delete_group_by_id(self):\n # Create a user with 2 groups\n resp = self.app.post('/users', data=json.dumps(self.test_user1_data))\n assert resp.status_code == 200\n\n # Delete one of those groups\n resp = self.app.delete('/groups/{}'.format(self.test_group1_groupid))\n assert resp.status_code == 200\n\n # Verify that the group is gone\n resp = self.app.get('/groups/{}'.format(self.test_group1_groupid))\n assert resp.status_code == 404\n\n # Verify that the user's groups don't have that group listed\n resp = self.app.get('/users/{}'.format(self.test_user1_userid))\n assert resp.status_code == 200\n\n data = json.loads(resp.data)\n assert self.test_group1_groupid not in data['groups']", "def test_remove_member_from_group(client):\n group = client.remove_members_from_group(TEAM_ID, GROUP_ID, 35555)\n assert group.team_id == TEAM_ID\n assert group.group_id == GROUP_ID\n assert 35555 not in group.members", "def del_user(self, name):\n del self.users[irc.strings.IRCFoldedCase(modules.trim_nick(name))]", "def delusers(self, args):\n\n if len(args) < 2:\n print(self.addusers.__doc__)\n return\n\n gname = args[0]\n users = args[1:]\n\n g = sr.group(gname)\n\n if not g.in_db:\n print(\"Group '%s' not found.\" % ( gname ))\n return\n\n not_members = g.user_rm( users )\n g.save()\n\n for uname in not_members:\n print(\"Unable to remove non-member '%s' from '%s'\" % ( gname, uname ))", "def delete_user(self, user_id):\n\n # ask the model to delete the user\n um = User(self.settings)\n status = um.delete(user_id)\n\n # return\n return status", "def delete(person_group_id):\n url = 'persongroups/{}'.format(person_group_id)\n\n return util.request('DELETE', url)", "def delete_user(self, _id):\n return self.make_request(\"DELETE\", \"users/\"+_id, {})", "def delete_user(self, user_id):\n return self._delete('/users/{0}'.format(user_id))", "def delete_user(user_id):\n\n user = User.query.get(user_id)\n db.session.delete(user)\n db.session.commit()\n return", "def remove_from_group(self, group):\n\n if self.in_group(group):\n self.secondary_groups.remove(group)\n return self", "def delete_user(id):\n user_repo = UserRepository(db)\n base_repo = BaseRepository(db, User)\n u = base_repo.get_by_id(id)\n if not u:\n click.echo(\"User with specified id does not exists.\")\n return ERROR_USER_DOES_NOT_EXIST\n user_repo.delete_user(u)\n click.echo(\"User with id \" + str(id) + \" has been deleted.\")", "def test_remove_user_from_course_group(self):\r\n add_users(self.global_admin, CourseInstructorRole(self.course_key), self.creator)\r\n add_users(self.global_admin, CourseStaffRole(self.course_key), self.creator)\r\n\r\n add_users(self.creator, CourseStaffRole(self.course_key), self.staff)\r\n self.assertTrue(has_access(self.staff, CourseStaffRole(self.course_key)))\r\n\r\n remove_users(self.creator, CourseStaffRole(self.course_key), self.staff)\r\n self.assertFalse(has_access(self.staff, CourseStaffRole(self.course_key)))\r\n\r\n remove_users(self.creator, CourseInstructorRole(self.course_key), self.creator)\r\n self.assertFalse(has_access(self.creator, CourseInstructorRole(self.course_key)))", "def remove_user_from_govern(self, request, pk=None, user_id=None):\n try:\n user = UserProfile.objects.get(id=user_id, organization__id=pk)\n except ObjectDoesNotExist:\n raise ResourceNotFound\n else:\n user.organization = None\n user.save()\n\n return Response(status=status.HTTP_204_NO_CONTENT)", "def remove(self, user):\n self.packet.send_room([\"rp\", user.get_int_id(self.rooms),\n user.data.id], user.room)\n self.rooms[user.room][\"users\"].remove(user)", "def unfollow_group(request, pk):\n group = get_object_or_404(Group, id=pk)\n\n # Check user is not member of the group\n if not group.members.filter(id=request.user.id).exists():\n actions.unfollow(request.user, group, send_action=False)\n request.user.userprofile.follow_groups.remove(group)\n messages.warning(\n request,\n 'Successed, you are not following this Group anymore.')\n # the group members can choose not follow the group anymore, but still\n # been the member\n else:\n actions.unfollow(request.user, group, send_action=False)\n messages.warning(\n request,\n 'Successed, you are not following this Group anymore. But you are still the one of the members of this group.')\n\n return redirect('groups:groups-detail', pk)", "def do_del_group(dbsync, group):\n pass", "def delete_user(self, instance, name):\n return instance.delete_user(name)", "def deleteUserPermission(self, name, _type):\n self._client.deleteUserPermission(name, _type)", "def delete_all_user_group(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserGroup.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def remove_group_from_customer(self,\n customer_id,\n group_id):\n\n # Prepare query URL\n _url_path = '/v2/customers/{customer_id}/groups/{group_id}'\n _url_path = APIHelper.append_url_with_template_parameters(_url_path, {\n 'customer_id': customer_id,\n 'group_id': group_id\n })\n _query_builder = self.config.get_base_uri()\n _query_builder += _url_path\n _query_url = APIHelper.clean_url(_query_builder)\n\n # Prepare headers\n _headers = {\n 'accept': 'application/json'\n }\n\n # Prepare and execute request\n _request = self.config.http_client.delete(_query_url, headers=_headers)\n OAuth2.apply(self.config, _request)\n _response = self.execute_request(_request)\n\n decoded = APIHelper.json_deserialize(_response.text)\n if type(decoded) is dict:\n _errors = decoded.get('errors')\n else:\n _errors = None\n _result = ApiResponse(_response, body=decoded, errors=_errors)\n return _result", "def remove_user_from_role(request, username_or_email, role, group_title, event_name):\r\n\r\n username_or_email = strip_if_string(username_or_email)\r\n try:\r\n user = _user_from_name_or_email(username_or_email)\r\n except User.DoesNotExist:\r\n return u'<font color=\"red\">Error: unknown username or email \"{0}\"</font>'.format(username_or_email)\r\n\r\n role.remove_users(user)\r\n\r\n # Deal with historical event names\r\n if event_name in ('staff', 'beta-tester'):\r\n track.views.server_track(\r\n request,\r\n \"add-or-remove-user-group\",\r\n {\r\n \"event_name\": event_name,\r\n \"user\": unicode(user),\r\n \"event\": \"remove\"\r\n },\r\n page=\"idashboard\"\r\n )\r\n else:\r\n track.views.server_track(request, \"remove-instructor\", {\"instructor\": unicode(user)}, page=\"idashboard\")\r\n\r\n return '<font color=\"green\">Removed {0} from {1}</font>'.format(user, group_title)", "def remove_user(user_id):\n user = Users.query.get(user_id)\n if user_id in [0, 1]:\n return 'Removal of default User #%s (%s) is forbidden.' % (user_id, user.login), 'warning'\n db_session.delete(user)\n db_session.commit()\n return 'User #%s (%s) has been deleted.' % (user_id, user.login), 'success'", "def delete_user():", "def delete(self, sg_id):\n self.client.delete_security_group(sg_id)" ]
[ "0.77502465", "0.77219933", "0.7696446", "0.7663514", "0.76629514", "0.74954414", "0.7458197", "0.7424002", "0.73372084", "0.73255515", "0.7295747", "0.72660804", "0.7258215", "0.7208784", "0.7098873", "0.7064429", "0.70261437", "0.6963763", "0.69584423", "0.692891", "0.6916596", "0.6905372", "0.6857054", "0.68052864", "0.6796855", "0.6767436", "0.6738642", "0.6733409", "0.67216444", "0.6643759", "0.6642495", "0.66344684", "0.65901273", "0.65665656", "0.65127456", "0.6511883", "0.6481757", "0.6480387", "0.6473043", "0.6464266", "0.6436852", "0.6432192", "0.6421365", "0.6420464", "0.6416147", "0.6412671", "0.635293", "0.63421905", "0.6335634", "0.6335634", "0.6335634", "0.6329092", "0.6318042", "0.62984353", "0.6280613", "0.6277322", "0.6275028", "0.62723136", "0.62720674", "0.6248573", "0.6248313", "0.6235686", "0.62258816", "0.6179062", "0.61447585", "0.6134414", "0.6134414", "0.6088828", "0.60829407", "0.60626715", "0.6046876", "0.60457665", "0.6038993", "0.6037904", "0.60371804", "0.60211116", "0.60098684", "0.60064334", "0.60036", "0.59987813", "0.59976465", "0.59866625", "0.5969267", "0.5964451", "0.59601456", "0.5947122", "0.5942095", "0.59114474", "0.5909703", "0.59053534", "0.58885753", "0.5887266", "0.5876113", "0.5874613", "0.58683944", "0.58662224", "0.5863168", "0.5861349", "0.58579266", "0.58442956" ]
0.68140376
23
Resets the OAuth2 client credentials for the SCIM client associated with this identity provider.
def reset_idp_scim_client(self, identity_provider_id, **kwargs): resource_path = "/identityProviders/{identityProviderId}/actions/resetScimClient" method = "POST" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "reset_idp_scim_client got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="ScimClientCredentials") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="ScimClientCredentials")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def logout(self):\n self._client.clear_credentials()", "def reset_secret(self, save=False):\n client = cas.get_client()\n client.revoke_application_tokens(self.client_id, self.client_secret)\n self.client_secret = generate_client_secret()\n\n if save:\n self.save()\n return True", "def set_credentials(self, client_id=None, client_secret=None):\n self._client_id = client_id\n self._client_secret = client_secret\n\n # make sure to reset session due to credential change\n self._session = None", "def refresh(self):\n self._request_token(grant_type='client_credentials')", "def remove_client_credentials(self):\n if self._dry_run:\n return\n os.unlink(self._store_pathname)", "def reset_credentials(self):\n credentials = {}\n with open(self.credentials_file, 'w') as fh_credentials:\n fh_credentials.write(json.dumps(credentials))", "def reset(ctx):\n\n controller = ctx.obj['controller']\n click.echo('Resetting OATH data...')\n old_id = controller.id\n controller.reset()\n\n settings = ctx.obj['settings']\n keys = settings.setdefault('keys', {})\n if old_id in keys:\n del keys[old_id]\n settings.write()\n\n click.echo(\n 'Success! All OATH credentials have been cleared from your YubiKey.')", "def change_authentication(self, client_id=None, client_secret=None,\n access_token=None, refresh_token=None):\n # TODO: Add error checking so you cannot change client_id and retain\n # access_token. Because that doesn't make sense.\n self.client_id = client_id or self.client_id\n self.client_secret = client_secret or self.client_secret\n self.access_token = access_token or self.access_token\n self.refresh_token = refresh_token or self.refresh_token", "def refresh_credentials():\n global auth_token\n auth_token = get_oauth_token()", "def logout(client):\n\n return client.post('/v1/auth/revoke')", "def logOut(self):\n self.client.logout()", "def disconnect(self):\r\n self._apiSession.close()\r\n self._oAuthSession.close()\r\n \r\n # Check the access token and refresh if expired\r", "def auth_invalidate_session(self) -> None:\n self.__logger.debug('Eva.auth_invalidate_session called')\n return self.__http_client.auth_invalidate_session()", "def resetCredentials(self, request, response):\n response.expireCookie('.ASPXAUTH', path='/', domain=COOKIE_DOMAIN)\n response.expireCookie('username', path='/', domain=COOKIE_DOMAIN)", "def uninit_client(self):\n self.add_msg(\"Connection Lost\")\n self.client = None", "def refresh(self):\n self._request_token(grant_type='password', username=self._username,\n password=self._password)", "def on_reset_clientid(self, jdata):\n local_seed = get_rand_char(32).lower()\n config_file_name = MOLO_CONFIGS.get_config_object().get('domain', '')\n #keep Compatibility with old version\n if config_file_name and config_file_name!='molohub':\n config_file_name = CONFIG_FILE_NAME + '_' + config_file_name + '.yaml'\n else:\n config_file_name = CONFIG_FILE_NAME + '.yaml'\n save_local_seed(\n MOLO_CLIENT_APP.hass_context.config.path(config_file_name),\n local_seed)\n LOGGER.debug(\"reset clientid %s to %s\", self.client_id, local_seed)\n self.handle_close()", "def disconnect_identity(identity):\n session.pop(\"cern_resource\", None)\n key = current_app.config.get(\n \"OAUTHCLIENT_CERN_OPENID_SESSION_KEY\",\n OAUTHCLIENT_CERN_OPENID_SESSION_KEY,\n )\n provides = session.pop(key, set())\n identity.provides -= provides", "def reset_user(self):\n\n if self.resin.auth.is_logged_in():\n self.wipe_application()\n self.resin.models.key.base_request.request(\n 'user__has__public_key', 'DELETE',\n endpoint=self.resin.settings.get('pine_endpoint'), login=True\n )", "def remove_credentials(self, conjurrc: ConjurrcData):\n self.credentials_provider.remove_credentials(conjurrc)", "def re_authenticate(self):\n url = URLS['token']\n data = {\n \"grant_type\": \"refresh_token\",\n \"refresh_token\": self.refresh_token,\n \"client_id\": self.client_id,\n \"client_secret\": self.client_secret\n }\n r = requests.post(url, data=data)\n r.raise_for_status()\n j = r.json()\n self.access_token = j['access_token']\n self.refresh_token = j['refresh_token']\n self._set_token_expiration_time(expires_in=j['expires_in'])\n return r", "def test_credentials_set_reset(self):\n empty_setting = {\n 'AccessKeyId': None,\n 'SecretAccessKey': None,\n 'SessionToken': None\n }\n nonempty_setting = {\n 'AccessKeyId': '1',\n 'SecretAccessKey': '2',\n 'SessionToken': '3'\n }\n self.assertEqual(_credentials, empty_setting)\n credentials_set(nonempty_setting)\n self.assertEqual(_credentials, nonempty_setting)\n credentials_reset()\n self.assertEqual(_credentials, empty_setting)", "def renew_access_token(self):\n self._access_token = self._get_access_token()", "def _refresh_access_token(self) -> None:\n response = httpx.post(\n f\"{self._base_url}/oauth2/token\",\n proxies=self._proxies,\n data={\n \"grant_type\": \"client_credentials\",\n \"client_id\": self._api_key,\n \"client_secret\": self._api_secret,\n },\n )\n response.raise_for_status()\n token = response.json()[\"access_token\"]\n c = httpx.Client()\n c.close()\n self._authorization_headers = {\"Authorization\": f\"Bearer {token}\"}", "def _reset_connection(self):\n\n self.__userid = 0\n self.__token = 0\n self.__conn.close()\n\n self.__conn = httplib.HTTPConnection(\"www.slimtimer.com\")\n self._logon()", "def call_for_auth_reset(self):\n pos.select_dispenser(1)\n crindsim.lift_handle()\n pos.click(\"reset\")\n pos.click(\"yes\")\n crindsim.lower_handle()\n #Checks crind diag to see if reset message is displayed\n if not system.wait_for(lambda: \"reset\" in pos.read_dispenser_diag()[\"Status\"].lower(), verify = False):\n tc_fail(\"CRIND did not reset\")\n #Wait for crind to return to idle\n if not system.wait_for(lambda: \"idle\" in pos.read_dispenser_diag()[\"Status\"].lower(), timeout = 120, verify = False):\n tc_fail(\"CRIND did not return to idle\")\n pos.click(\"back\")", "def tearDown(self):\n self.client = app.test_client()\n self.salir = logout(self.client)", "def resetUser(self):\n\t\turl = \"https://habitica.com/api/v4/user/reset\"\n\t\treturn(postUrl(url, self.credentials))", "def reset(self):\n self.logger.debug(\"Resetting %s\", self.key)\n self.driver.reset(self.key)", "def revoke(self):\n # Set the application as unsucessful with the current datetime\n self.status = self.Status.REVOKED\n self.revoked_datetime = timezone.now()\n\n # Removes credentialing from the user\n self.user.is_credentialed = False\n self.user.credential_datetime = None\n\n with transaction.atomic():\n self.user.save()\n self.save()\n\n logger.info('Credentialing for user {0} has been removed.'.format(\n self.user.email))", "def recreate_client(token=None):\n if token:\n # If we've successfully retrieved the token from the session (or have\n # been provided with a token), get authorization.\n auth = get_spotify_auth(token)\n # TODO make sure auth token uses this too\n auth.refresh_token_if_needed(app_config[\"SPOTIFY_AUTH\"][\"token_duration\"])\n return Client(auth, session.get(\"client_session\"))\n else:\n return None", "def _refresh_token(self, client):\n\n url = self._url('token')\n client_data = self.clients[client]\n refresh_token = client_data['token']['refresh_token']\n data = {'grant_type': 'refresh_token',\n 'scope': 'PRODUCTION',\n 'refresh_token': refresh_token}\n consumer_key = client_data['response']['consumerKey']\n consumer_secret = client_data['response']['consumerSecret']\n auth = requests.auth.HTTPBasicAuth(consumer_key, consumer_secret)\n return self.POST(url, data=data, auth=auth)", "def reset(self):\n requests.put('{}/reset'.format(self._get_url()))", "def initialize_oauth2_session(self):\n\n def token_updater(token):\n \"\"\"Stores oauth2 token on disk\"\"\"\n try:\n with open(self.OAUTH_TOKEN_PATH, 'w') as f:\n json.dump(token, f)\n except Exception as err:\n log.Error('Could not save the OAuth2 token to %s. This means '\n 'you may need to do the OAuth2 authorization '\n 'process again soon. Original error: %s' % (\n self.OAUTH_TOKEN_PATH, err))\n\n token = None\n try:\n with open(self.OAUTH_TOKEN_PATH) as f:\n token = json.load(f)\n except IOError as err:\n log.Notice('Could not load OAuth2 token. '\n 'Trying to create a new one. (original error: %s)' % err)\n\n self.http_client = OAuth2Session(\n self.CLIENT_ID,\n scope=self.OAUTH_SCOPE,\n redirect_uri=self.OAUTH_REDIRECT_URL,\n token=token,\n auto_refresh_kwargs={\n 'client_id': self.CLIENT_ID,\n 'client_secret': self.CLIENT_SECRET,\n },\n auto_refresh_url=self.OAUTH_TOKEN_URL,\n token_updater=token_updater)\n\n if token is not None:\n self.http_client.refresh_token(self.OAUTH_TOKEN_URL)\n\n endpoints_response = self.http_client.get(self.metadata_url +\n 'account/endpoint')\n if endpoints_response.status_code != requests.codes.ok:\n token = None\n\n if token is None:\n if not sys.stdout.isatty() or not sys.stdin.isatty():\n log.FatalError('The OAuth2 token could not be loaded from %s '\n 'and you are not running duplicity '\n 'interactively, so duplicity cannot possibly '\n 'access Amazon Drive.' % self.OAUTH_TOKEN_PATH)\n authorization_url, _ = self.http_client.authorization_url(\n self.OAUTH_AUTHORIZE_URL)\n\n print('')\n print('In order to allow duplicity to access Amazon Drive, please '\n 'open the following URL in a browser and copy the URL of the '\n 'page you see after authorization here:')\n print(authorization_url)\n print('')\n\n redirected_to = (raw_input('URL of the resulting page: ')\n .replace('http://', 'https://', 1)).strip()\n\n token = self.http_client.fetch_token(\n self.OAUTH_TOKEN_URL,\n client_secret=self.CLIENT_SECRET,\n authorization_response=redirected_to)\n\n endpoints_response = self.http_client.get(self.metadata_url +\n 'account/endpoint')\n endpoints_response.raise_for_status()\n token_updater(token)\n\n urls = endpoints_response.json()\n if 'metadataUrl' not in urls or 'contentUrl' not in urls:\n log.FatalError('Could not retrieve endpoint URLs for this account')\n self.metadata_url = urls['metadataUrl']\n self.content_url = urls['contentUrl']", "def recreate_client_from_session():\n token = session.get(\"spotify_token\")\n return recreate_client(token)", "def revoke(self):\n if self.access_token is None:\n raise InvalidInvocation('no token available to revoke')\n\n self._authenticator.revoke_token(self.access_token, 'access_token')\n self._clear_access_token()", "def refresh(self):\n grant_type = 'https://oauth.reddit.com/grants/installed_client'\n self._request_token(grant_type=grant_type,\n device_id=self._device_id)", "def tearDown(self):\n Credentials.credentials_list = []", "def tearDown(self):\n Credentials.credentials_list = []", "def invalidate_existing_tokens(self, client_id, user):\n\n app = Application.objects.get(client_id=client_id)\n tokens = AccessToken.objects.filter(user=user, application=app)\n tokens.delete()", "def refresh_access_token(self):\n self._access_token = self.generate_access_token()", "def restore_auth_configuration(self, path):\n raise NotImplementedError()", "def reset_session(self):\n if not self.is_open():\n return\n if self._active_result is not None:\n self._active_result.fetch_all()\n try:\n self.keep_open = self.protocol.send_reset(self.keep_open)\n except (InterfaceError, OperationalError) as err:\n _LOGGER.warning(\n \"Warning: An error occurred while attempting to reset the \"\n \"session: %s\",\n err,\n )", "def unset_credentials(ctx, user, store):\n try:\n logger.debug(\"store={store}, user={user}\".format(store=store, user=user))\n _pycred.unset_credentials(store, user)\n except Exception as e:\n logger.debug(e, exc_info=True)\n print('Error: {msg}'.format(msg=str(e)), file=sys.stderr)\n sys.exit(1)", "def _reset(self):\n if not self._first_create:\n self._sc2_env.close()\n self._sc2_env = self._create_env()\n self._first_create = False\n return self._sc2_env.reset()", "def logout(self):\n self.auth = None", "def tearDown(self):\n Credentials.credential_list = []", "def manage_clearSecrets(self, REQUEST):\n manager = getUtility(IKeyManager)\n manager.clear()\n manager.rotate()\n response = REQUEST.response\n response.redirect(\n '%s/manage_secret?manage_tabs_message=%s' %\n (self.absolute_url(), 'All+secrets+cleared.')\n )", "def clear_session(self):\n self.session_mgr.clear_session()", "def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs):\n log.debug('Destroy grant token for client %r, %r', client_id, code)\n grant = self._grantgetter(client_id=client_id, code=code)\n if grant:\n grant.delete()", "def reset(ctx, force):\n\n force or click.confirm(\n \"WARNING! This will delete all stored OATH accounts and restore factory \"\n \"settings. Proceed?\",\n abort=True,\n err=True,\n )\n\n session = ctx.obj[\"session\"]\n click.echo(\"Resetting OATH data...\")\n old_id = session.device_id\n session.reset()\n\n keys = ctx.obj[\"oath_keys\"]\n if old_id in keys:\n del keys[old_id]\n keys.write()\n logger.info(\"Deleted remembered access key\")\n\n click.echo(\"Success! All OATH accounts have been deleted from the YubiKey.\")", "def relogin(self):\n spotify.Error.maybe_raise(lib.sp_session_relogin(self._sp_session))", "def logout():\n update_session(\"X-GEMINI-APIKEY\", \"\")\n set_secret_key(\"\".encode())\n set_login_state(False)", "def resetSecret(self):\n self.secret = str(uuid())\n self.put()", "def close(self):\n if self.client:\n self.client.connection.close()\n self.client = None", "def _reset_server_settings(self, server_id):\n\t\tself._remove_cfg_from_list(server_id)\n\t\tself._remove_settings_file(server_id)", "def init_client(self, client):\n self.client = client", "def set_credentials():", "def cleanup_credentials(self, conjurrc: ConjurrcData):\n self.credentials_provider.cleanup_if_exists(conjurrc.conjur_url)", "def reset(self, reset_from):\n self._grants.clear()\n self._groups.clear()\n self._reset_cached()\n self._id += 1\n for name, backend in self._backends.items():\n if name == reset_from:\n continue\n backend.reload()", "def test_replace_o_auth_client(self):\n pass", "def tearDown(self):\n Credentials.cred_list = []", "def testRefreshClientCredentials(self,\n mock_get_credentials, mock_initialize_client):\n # Set an expired token.\n self.turbinia_processor.credentials = mock.MagicMock(\n expiry = FAKE_CREDENTIALS['expiry'], expired = True)\n self.turbinia_processor.RefreshClientCredentials()\n mock_get_credentials.assert_called_once()\n mock_initialize_client.assert_called_once()", "def revoke(self, only_access=False):\n if only_access or self.refresh_token is None:\n super(Authorizer, self).revoke()\n else:\n self._authenticator.revoke_token(self.refresh_token,\n 'refresh_token')\n self._clear_access_token()\n self.refresh_token = None", "def cleanup(name, client=None):\n credential_specs_path = _get_path(client)\n path = os.path.join(credential_specs_path, name + '.json')\n fs.rm_safe(path)", "def set(self, client):\n if not client:\n raise SurvoxAPIMissingParameter('client')\n c = self.get()\n if not c:\n raise SurvoxAPIRuntime('No client available named: {name}'.format(name=self.name))\n return self.api_put(endpoint=self.url, data=client)", "def clear(self, client=None):\r\n if client is None:\r\n client = self.get_client(write=True)\r\n\r\n client.flushdb()", "def reset(serial):\n if click.confirm(\n \"Warning: Your credentials will be lost!!! Do you wish to continue?\"\n ):\n print(\"Press the button to confirm -- again, your credentials will be lost!!!\")\n solo.client.find(serial).reset()\n click.echo(\"....aaaand they're gone\")", "def reset(serial):\n if click.confirm(\n \"Warning: Your credentials will be lost!!! Do you wish to continue?\"\n ):\n print(\"Press the button to confirm -- again, your credentials will be lost!!!\")\n solo.client.find(serial).reset()\n click.echo(\"....aaaand they're gone\")", "def delete_credentials(self):\n Credentials.credentials_list.remove(self)", "def delete_credentials(self):\n Credentials.credentials_list.remove(self)", "def delete_credentials(self):\n Credentials.credentials_list.remove(self)", "def delete_client():\n preserve_cache = request.args.get('preserve_cache', False)\n delete_client(g.client_id, preserve_cache)\n return jsonify({'Success': True})", "def auth_renew_session(self) -> None:\n self.__logger.debug('Eva.auth_renew_session called')\n return self.__http_client.auth_renew_session()", "async def close(self) -> None:\n assert self._client is not None\n await self._client.close()\n self._client = None", "def logout(self):\r\n self._endpoint.destroy()", "def disconnect():\n\n # Only disconnect a connected user.\n credentials = session.get('credentials')\n if credentials is None:\n response = make_response(json.dumps('Current user not connected.'), 401)\n response.headers['Content-Type'] = 'application/json'\n return response\n\n # Execute HTTP GET request to revoke current token.\n access_token = credentials.access_token\n url = 'https://accounts.google.com/o/oauth2/revoke?token=%s' % access_token\n h = httplib2.Http()\n result = h.request(url, 'GET')[0]\n\n if result['status'] == '200':\n # Reset the user's session.\n del session['credentials']\n response = make_response(json.dumps('Successfully disconnected.'), 200)\n response.headers['Content-Type'] = 'application/json'\n return response\n else:\n # For whatever reason, the given token was invalid.\n response = make_response(\n json.dumps('Failed to revoke token for given user.', 400))\n response.headers['Content-Type'] = 'application/json'\n return response", "def refresh_auth_token(self):\n self._auth_token = self.generate_auth_token()", "def revoke_authorization(self):\n if not self.runner.browser:\n raise Exception('Webdriver must be connected first.')\n\n self.runner.get(self.revoke_url)\n\n steps = itertools.chain(self.config.get('sign_in_steps', []),\n self.config.get('revoke_steps', []))\n for step in steps:\n self.runner.execute_step(step)", "def setUp(self):\r\n super(CLITestAuthNoAuth, self).setUp()\r\n self.mox = mox.Mox()\r\n self.client = client.HTTPClient(username=USERNAME,\r\n tenant_name=TENANT_NAME,\r\n password=PASSWORD,\r\n endpoint_url=ENDPOINT_URL,\r\n auth_strategy=NOAUTH,\r\n region_name=REGION)\r\n self.addCleanup(self.mox.VerifyAll)\r\n self.addCleanup(self.mox.UnsetStubs)", "def auth_token_provider_client_id(self, auth_token_provider_client_id):\n\n self._auth_token_provider_client_id = auth_token_provider_client_id", "def logout(self):\n try:\n if self._session_id is not None:\n url = (yield self.get_sitemap())['authorization'] + '/user/logout'\n response = yield self.authorized_fetch(\n url=url, auth_token=self._session_id, method='POST', body='{}')\n self._logger.info(\"Logout result: %s\", response.body)\n finally:\n # Clear the local session_id, no matter what katportal says\n self._session_id = None\n self._current_user_id = None", "def terminate_session():\n token = oidc.user_loggedin and oidc.get_access_token()\n if token and oidc.validate_token(token):\n # Direct POST to Keycloak necessary to clear KC domain browser cookie\n logout_uri = oidc.client_secrets['userinfo_uri'].replace(\n 'userinfo', 'logout')\n data = {\n 'client_id': oidc.client_secrets['client_id'],\n 'client_secret': oidc.client_secrets['client_secret'],\n 'refresh_token': oidc.get_refresh_token()}\n requests.post(logout_uri, auth=BearerAuth(token), data=data)\n\n oidc.logout() # clears local cookie only", "def testUpdateCredentials(self):\r\n \r\n credentials = dict()\r\n credentials[\"username\"] = \"\"\r\n credentials[\"password\"] = \"\"\r\n self._factory.updateCredentials(credentials)", "def delete_credential(credentials):\n credentials.delete_credentials()", "def close(self, **kwargs):\r\n del(self._client_write)\r\n del(self._client_read)\r\n self._client_write = None\r\n self._client_read = None", "def client_id(self, client_id):\n\n self._client_id = client_id", "def client_id(self, client_id):\n\n self._client_id = client_id", "def client_id(self, client_id):\n\n self._client_id = client_id", "def client_id(self, client_id):\n\n self._client_id = client_id", "def update_access_token(self):\n self.token = util.prompt_for_user_token(self._username, scope,\n client_id=const.CLIENT_ID,\n client_secret=const.CLIENT_SECRET,\n redirect_uri=const.REDIRECT_URL)\n self._client = spotipy.Spotify(auth=self.token)", "def delete_credentials(self):\n Credentials.credential_list.remove(self)", "def reset(self):\r\n return self._api.reset()", "def close(self):\n self._close()\n self._closed = True\n self._auth_error = self._AUTH_ERROR_MESSAGES[0]\n self.login_info = None", "def reset(cls):\n cls._options = None\n cls._scoped_instances = {}", "def close(self):\n self.password = None\n self.session.close()", "def reset(self):\n return self._send_command('reset')", "def revoke_access_token(self):\n response = self._telegraph.method('revokeAccessToken')\n\n self._telegraph.access_token = response.get('access_token')\n\n return response", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r" ]
[ "0.63996184", "0.61857796", "0.6167417", "0.61205524", "0.6116238", "0.5673805", "0.5546106", "0.54928285", "0.54202366", "0.53968155", "0.53470254", "0.53334624", "0.5311507", "0.5289614", "0.5285055", "0.5245378", "0.52309966", "0.5217204", "0.5211187", "0.51890314", "0.51373774", "0.5134191", "0.51086605", "0.50872236", "0.50849146", "0.50816184", "0.5076356", "0.50723517", "0.5056509", "0.50351644", "0.50305945", "0.50273395", "0.5019926", "0.5003783", "0.49992272", "0.49546787", "0.4938524", "0.4938042", "0.4938042", "0.4926619", "0.4913559", "0.49084577", "0.49039096", "0.49009112", "0.48954976", "0.489287", "0.4881015", "0.48774353", "0.487596", "0.48714855", "0.48709038", "0.48705488", "0.48688814", "0.48657453", "0.4855642", "0.48531353", "0.48346153", "0.48328194", "0.48272714", "0.48265642", "0.48206946", "0.48067045", "0.48008415", "0.47943643", "0.47803903", "0.4779993", "0.47573358", "0.47569838", "0.47569838", "0.475283", "0.475283", "0.475283", "0.47470602", "0.47379908", "0.47331262", "0.4722332", "0.47157836", "0.47103503", "0.47093344", "0.47068283", "0.4706789", "0.47053587", "0.47038698", "0.4690565", "0.46853787", "0.46850157", "0.46794087", "0.46794087", "0.46794087", "0.46794087", "0.4678685", "0.4677918", "0.46745285", "0.46685895", "0.4668137", "0.46677893", "0.4659548", "0.4652025", "0.4651662", "0.4651662" ]
0.6584309
0
Updates the specified auth token's description.
def update_auth_token(self, user_id, auth_token_id, update_auth_token_details, **kwargs): resource_path = "/users/{userId}/authTokens/{authTokenId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_auth_token got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "authTokenId": auth_token_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_auth_token_details, response_type="AuthToken") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_auth_token_details, response_type="AuthToken")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_api_video_patch_detail_token_user_description(self):\n video = factories.VideoFactory(description=\"my description\")\n jwt_token = AccessToken()\n jwt_token.payload[\"resource_id\"] = str(video.id)\n jwt_token.payload[\"roles\"] = [random.choice([\"instructor\", \"administrator\"])]\n jwt_token.payload[\"permissions\"] = {\"can_update\": True}\n\n data = {\"description\": \"my new description\"}\n\n response = self.client.patch(\n f\"/api/videos/{video.id}/\",\n data,\n HTTP_AUTHORIZATION=f\"Bearer {jwt_token}\",\n content_type=\"application/json\",\n )\n self.assertEqual(response.status_code, 200)\n video.refresh_from_db()\n self.assertEqual(video.description, \"my new description\")", "def test_api_video_update_detail_token_user_description(self):\n video = factories.VideoFactory(description=\"my description\")\n jwt_token = AccessToken()\n jwt_token.payload[\"resource_id\"] = str(video.id)\n jwt_token.payload[\"roles\"] = [random.choice([\"instructor\", \"administrator\"])]\n jwt_token.payload[\"permissions\"] = {\"can_update\": True}\n\n response = self.client.get(\n f\"/api/videos/{video.id}/\",\n HTTP_AUTHORIZATION=f\"Bearer {jwt_token}\",\n )\n data = json.loads(response.content)\n data[\"description\"] = \"my new description\"\n response = self.client.put(\n f\"/api/videos/{video.id}/\",\n data,\n HTTP_AUTHORIZATION=f\"Bearer {jwt_token}\",\n content_type=\"application/json\",\n )\n self.assertEqual(response.status_code, 200)\n video.refresh_from_db()\n self.assertEqual(video.description, \"my new description\")", "def update_description(self, host, baseUrl, description):\n self._host = host\n self._urlBase = baseUrl\n self._description = description\n return", "def updateTag(self, authenticationToken, tag):\r\n pass", "async def slashtag_edit_description(\n self, ctx: commands.Context, tag: GuildTagConverter, *, description: str\n ):\n await ctx.send(await tag.edit_description(description))", "def description(self, new_description):\r\n self.set({\"description\": new_description})", "def set_description(self, desc: str) -> None:\n self.metadata.data[\"description\"] = desc", "def setDescription(self, description):\n url = G.api + self.testId + '/snapshots/' + self.hash\n self.info = requests.put(url, auth=(G.username, G.authkey), data={'description':description})", "def request_description_update():\n global should_update_description\n should_update_description = True", "def updateNote(self, authenticationToken, note):\r\n pass", "def update_description(self, option, desc):\n _, command = self.__options[option]\n self.__options[option] = (desc, command)", "def test_api_video_update_detail_token_user_title(self):\n video = factories.VideoFactory(title=\"my title\")\n jwt_token = AccessToken()\n jwt_token.payload[\"resource_id\"] = str(video.id)\n jwt_token.payload[\"roles\"] = [random.choice([\"instructor\", \"administrator\"])]\n jwt_token.payload[\"permissions\"] = {\"can_update\": True}\n data = {\"title\": \"my new title\"}\n response = self.client.put(\n f\"/api/videos/{video.id}/\",\n data,\n HTTP_AUTHORIZATION=f\"Bearer {jwt_token}\",\n content_type=\"application/json\",\n )\n self.assertEqual(response.status_code, 200)\n video.refresh_from_db()\n self.assertEqual(video.title, \"my new title\")", "def update(self, descriptor_msg, auth=None):\n request_args = self._make_request_args(descriptor_msg, auth)\n try:\n response = requests.put(**request_args)\n response.raise_for_status()\n except requests.exceptions.ConnectionError as e:\n msg = \"Could not connect to restconf endpoint: %s\" % str(e)\n self._log.error(msg)\n raise UpdateError(msg) from e\n except requests.exceptions.HTTPError as e:\n msg = \"PUT request to %s error: %s\" % (request_args[\"url\"], response.text)\n self._log.error(msg)\n raise UpdateError(msg) from e\n except requests.exceptions.Timeout as e:\n msg = \"Timed out connecting to restconf endpoint: %s\", str(e)\n self._log.error(msg)\n raise UpdateError(msg) from e", "def set_description(desc):\n global last_description\n last_description = desc", "def _update_token(token):\n session.token = token", "def edit_description(self, task, new_description):\n raise ValueError(\"cannot edit description in 'In Progress' status\")", "def add_description(self, desc):\n self.description = desc", "def description(request):\n if request.method != 'POST':\n description = request.issue.description or \"\"\n return HttpTextResponse(description)\n if not request.issue.edit_allowed:\n if not IS_DEV:\n return HttpTextResponse('Login required', status=401)\n issue = request.issue\n issue.description = request.POST.get('description')\n issue.put()\n return HttpTextResponse('')", "def update_token(self, token_response):\n self.access_token = token_response['access_token']\n self.access_token_expires = datetime.fromtimestamp(\n time.time() + token_response['expires_in'],\n )\n if 'refresh_token' in token_response:\n self.refresh_token = token_response['refresh_token']", "def set_description(self, description):\r\n self.__description = description", "def updateResource(self, authenticationToken, resource):\r\n pass", "def set_description(self, description):\n self.description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def test_update_task_description(\n self,\n mock_config_load,\n mock_custom_objects_api,\n mock_core_v1_api,\n ):\n task_id = util.MOCK_UUID_5\n rv = TEST_CLIENT.patch(\n f\"/tasks/{task_id}\",\n json={\n \"description\": \"valid description\",\n },\n )\n\n self.assertEqual(rv.status_code, 200)", "def set_description(self, data):\n self._description = self._uni(data)", "def api_token2(self, api_token2):\n\n self._api_token2 = api_token2", "def set_description(self, room_description):\n self.description = room_description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "async def set_profile_description(self, ctx, *, description: str):\n max_words = self.plugin.data.profile.max_description_length\n if len(description) > max_words:\n res = f\"{ctx.emotes.web_emotion.xx} Sorry but profile description cannot exceed {max_words} word limit.\"\n return await ctx.send_line(res)\n profile = await self.cache.get_profile(ctx.author.id)\n await profile.set_description(description)\n embed = self.bot.theme.embeds.primary(title=\"✅ Your Profile Description has been updated to:\")\n embed.set_author(name=ctx.author.name, icon_url=ctx.author.avatar_url)\n embed.description = profile.description\n await ctx.send(\"\", embed=embed)", "def SetDescription(self, description):\n self.description = str(description)", "def description(self, description) :\n\t\ttry :\n\t\t\tself._description = description\n\t\texcept Exception as e:\n\t\t\traise e", "def set_description(self, description):\n self.__description = description", "def set_description(self, sNewDescription):\n\t\tcall_sdk_function('PrlVmDev_SetDescription', self.handle, sNewDescription)", "def token_detail(self, token_detail):\n\n self._token_detail = token_detail", "def test_api_video_update_detail_token_user_other_video(self):\n video_token = factories.VideoFactory()\n video_update = factories.VideoFactory(title=\"my title\")\n jwt_token = AccessToken()\n jwt_token.payload[\"resource_id\"] = str(video_token.id)\n jwt_token.payload[\"roles\"] = [random.choice([\"instructor\", \"administrator\"])]\n jwt_token.payload[\"permissions\"] = {\"can_update\": True}\n\n data = {\"title\": \"my new title\"}\n response = self.client.put(\n f\"/api/videos/{video_update.id}/\",\n data,\n HTTP_AUTHORIZATION=f\"Bearer {jwt_token}\",\n content_type=\"application/json\",\n )\n\n self.assertEqual(response.status_code, 403)\n video_update.refresh_from_db()\n self.assertEqual(video_update.title, \"my title\")", "def set_description(self, description):\n self._description = description", "def description(self, description: str):\n\n self._description = description", "def description(self, description: str):\n\n self._description = description", "def description(self, description: str):\n\n self._description = description", "def description(self, description: str):\n\n self._description = description", "def set_description(self, descr):\n self._current_test_descr = descr", "def update(self):\n token = request_token(self.client_id, self.client_secret)\n self.request_time = datetime.now()\n self._initialized = True\n self.token = token", "def description(self, value):\n self._update_values('description', value)", "def set_desc(self, item_desc):\r\n self.description = item_desc", "def description(self, description):\n\n self._set_field(\"description\", description)", "def set_longdescription(self, longdesc):\n self.longdescription(longdesc)", "def edit(key, input_token):\n try:\n target = decrypt_string(input_token.strip(), key=key)\n except InvalidToken:\n click.echo('Error: Token is invalid')\n sys.exit(1)\n\n # Call external editor for editting value\n new_value = force_text(editor.edit(contents=target)).strip()\n encrypted_new_value = encrypt_string(new_value, key=key)\n\n # Display the plain new value\n click.echo('The new value is: ', nl=False)\n click.echo(click.style(new_value, fg='blue'))\n\n # Display the encrypted new value\n click.echo('The encrypted new value is: ', nl=False)\n click.echo(click.style(encrypted_new_value, fg='red'))", "def description(self, new_description):\n self.set_description(new_description, self._xml)\n self._description = self._read_description(self._xml)", "def update_experiment_description(self, experiment_id, description):\n return self.dbclient.update_by_id(Tables.EXPERIMENTS, experiment_id, {\n ExperimentAttr.DESC: description\n })", "def set_description(\n self, path: Union[bytes, str], description: Optional[Union[bytes, str]] = None\n ) -> None:\n path = _to_bytes_or_null(path)\n description = _to_bytes_or_null(description)\n ret = lib.Fapi_SetDescription(self._ctx, path, description)\n _chkrc(ret)", "def update_body(self, body: dict[Any, Any]) -> None:\n body[\"data\"][\"AUTHENTICATOR\"] = ID_TOKEN_AUTHENTICATOR\n body[\"data\"][\"TOKEN\"] = self._id_token", "def update_group(self, group_id, new_description):\n url = self.groups_url + \"/\" + group_id\n new_data = json.dumps({\"description\": new_description})\n\n return requests.put(url, new_data, headers=self.headers)", "async def edit(self, ctx: \"IceTeaContext\", otag: TagConverter, *, new_content: str):\n tag: models.Tag = otag\n if tag.alias:\n return await ctx.send(\"Unable to edit an alias\")\n if tag.author == ctx.author.id:\n content = await ctx.clean_content(new_content)\n await tag.edit(content=content)\n await ctx.send(\"Tag updated Successfully\")\n elif tag.author != ctx.author.id:\n await ctx.send(\"You do not own this tag\")", "def _upgrade_token(self, http_body):\n self.token_string = auth_sub_string_from_body(http_body)", "def set_description(self, description):\n self.description = description\n if not self.record:\n return\n self.mdb.results.update({'_id':self.result_id}, \n {'$set':{'test_case':description}})", "def setDescription(self, valueName, valueDescription):\n\t\tself.settings[valueName][1] = valueDescription", "def description(self, newDescription=None):\n pass", "def stat_description(self, stat_description):\n\n self._stat_description = stat_description", "def additional_description(self, additional_description):\n\n self._additional_description = additional_description" ]
[ "0.6497291", "0.6414944", "0.62694657", "0.610832", "0.5971511", "0.5964725", "0.581407", "0.5786695", "0.5741787", "0.57394075", "0.5723997", "0.5715468", "0.5691383", "0.55781394", "0.55590355", "0.5552019", "0.5540073", "0.5533868", "0.552116", "0.5510495", "0.54998803", "0.54981506", "0.54952526", "0.54952526", "0.54952526", "0.54952526", "0.5490366", "0.54829794", "0.5477068", "0.5476743", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54521465", "0.54397243", "0.5424509", "0.54027516", "0.5374625", "0.5365813", "0.535429", "0.5351231", "0.5344595", "0.5326898", "0.5326898", "0.5326898", "0.5326898", "0.53243995", "0.5303555", "0.5283419", "0.5266276", "0.52600056", "0.52561796", "0.52262765", "0.52174884", "0.5208023", "0.52009517", "0.51938313", "0.51874405", "0.5172566", "0.5169828", "0.5150995", "0.5150583", "0.5145147", "0.5144556", "0.51435417" ]
0.0
-1
Updates authentication policy for the specified tenancy
def update_authentication_policy(self, compartment_id, update_authentication_policy_details, **kwargs): resource_path = "/authenticationPolicies/{compartmentId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_authentication_policy got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "compartmentId": compartment_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_authentication_policy_details, response_type="AuthenticationPolicy") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_authentication_policy_details, response_type="AuthenticationPolicy")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_policy(self, *args, **kwargs):\r\n pass", "def update_policy(self):\n pass", "def update_access_policy_request(self, subscription_id: str, resource_group_name: str,\n vault_name: str, operation_kind: str, object_id: str,\n keys: list[str], secrets: list[str], certificates: list[str],\n storage: list[str]) -> dict[str, Any]:\n permissions = self.config_vault_permission(\n keys, secrets, certificates, storage)\n data = {\"properties\": {\"accessPolicies\": [\n {\"objectId\": object_id, \"permissions\": permissions, \"tenantId\": self.ms_client.tenant_id}]}}\n full_url = urljoin(self.azure_cloud.endpoints.resource_manager, f'subscriptions/{subscription_id}/resourceGroups/'\n f'{resource_group_name}/providers/Microsoft.KeyVault/vaults/'\n f'{vault_name}/accessPolicies/{operation_kind}')\n\n return self.http_request('PUT', full_url=full_url, data=data, ok_codes=[200, 201])", "def update_service_access_policies(DomainName=None, AccessPolicies=None):\n pass", "def UpdatePolicy(self, request, global_params=None):\n config = self.GetMethodConfig('UpdatePolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def rbac_policy_update(request, policy_id, **kwargs):\n body = {'rbac_policy': kwargs}\n rbac_policy = neutronclient(request).update_rbac_policy(\n policy_id, body=body).get('rbac_policy')\n return RBACPolicy(rbac_policy)", "def put(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n update_l7_policy(request, **kwargs)", "def update_policy(ranger_url, policy_id, policy_data, admin_username_password):\n\n url = format(\"{ranger_url}/service/public/v2/api/policy/{policy_id}\")\n\n base_64_string = base64.encodestring(admin_username_password).replace('\\n', '')\n\n request = urllib2.Request(url, json.dumps(policy_data))\n request.get_method = lambda: 'PUT'\n request.add_header('Content-Type', 'application/json')\n request.add_header('Accept', 'application/json')\n request.add_header('Authorization', format('Basic {base_64_string}'))\n\n try:\n result = openurl(request, timeout=20)\n response_code = result.getcode()\n if response_code == 200:\n Logger.info(format(\"Successfully updated policy in Ranger Admin\"))\n return response_code\n else:\n Logger.error(format(\"Unable to update policy in Ranger Admin\"))\n return None\n except urllib2.HTTPError as e:\n raise Fail(\"HTTPError while updating policy Reason = \" + str(e.code))\n except urllib2.URLError as e:\n raise Fail(\"URLError while updating policy. Reason = \" + str(e.reason))\n except TimeoutError:\n raise Fail(\"Connection timeout error while updating policy\")\n except Exception as err:\n raise Fail(format(\"Error while updating policy. Reason = {err}\"))", "def post_access_control_list_update(self, resource_id, resource_dict):\n pass", "def UpdateAccessApprovalSettings(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details(\"Method not implemented!\")\n raise NotImplementedError(\"Method not implemented!\")", "def test_update_hyperflex_proxy_setting_policy(self):\n pass", "def Update(self,\n fp_id=None,\n only_generate_request=False,\n firewall_policy=None,\n batch_mode=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.Patch(\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)[2])\n return self.WaitOperation(\n op_res, message='Updating the organization firewall policy.')", "def _set_restricted_policy(environ, bag):\n username = environ['tiddlyweb.usersign']['name']\n if username == 'GUEST':\n return\n bag.policy.owner = username\n # accept does not matter here\n for constraint in ['read', 'write', 'create', 'delete', 'manage']:\n setattr(bag.policy, constraint, [username])\n return", "def set_policy(self, name, policy):\n client = self.connect(VAULT_TOKEN)\n client.set_policy(name, policy)", "def policy_update_fn(self, data: Dict[str, Any], result: Dict[str, Any]) -> None:", "def test_update_firewall_policy(self):\r\n resource = 'firewall_policy'\r\n cmd = firewallpolicy.UpdateFirewallPolicy(test_cli20.MyApp(sys.stdout),\r\n None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_apic(self):\n return self.client.policy.update(policyList=self.policy_list.response)", "def add_grading_policy(self, grading_policy):\r\n\r\n self.course.grading_policy = grading_policy\r\n store = editable_modulestore()\r\n store.update_item(self.course, '**replace_user**')\r\n self.refresh_course()", "def test_patch_hyperflex_proxy_setting_policy(self):\n pass", "def apply_policy(self, policy):\n tenant_name = policy['tenant_name']\n fw_id = policy['fw_id']\n fw_name = policy['fw_name']\n LOG.debug(\"asa_apply_policy: tenant=%s fw_id=%s fw_name=%s\",\n tenant_name, fw_id, fw_name)\n cmds = [\"conf t\", \"changeto context \" + tenant_name]\n\n rule_dict = policy['rules']\n for rule_id in rule_dict:\n rule = rule_dict[rule_id]\n protocol = rule['protocol']\n name = rule['name']\n enabled = rule['enabled']\n dst_port = rule['destination_port']\n src_port = rule['source_port']\n\n if (rule['source_ip_address'] is not None):\n src_ip = IPNetwork(rule['source_ip_address'])\n else:\n src_ip = IPNetwork('0.0.0.0/0')\n\n if (rule['destination_ip_address'] is not None):\n dst_ip = IPNetwork(rule['destination_ip_address'])\n else:\n dst_ip = IPNetwork('0.0.0.0/0')\n\n if rule['action'] == 'allow':\n action = 'permit'\n else:\n action = 'deny'\n\n LOG.debug(\"rule[%s]: name=%s enabled=%s prot=%s dport=%s sport=%s \\\n dip=%s %s sip=%s %s action=%s\",\n rule_id, name, enabled, protocol, dst_port, src_port,\n dst_ip.network, dst_ip.netmask,\n src_ip.network, src_ip.netmask, action)\n\n acl = \"access-list \"\n acl = (acl + tenant_name + \" extended \" + action + \" \" +\n protocol + \" \")\n if (rule['source_ip_address'] is None):\n acl = acl + \"any \"\n else:\n acl = acl + str(src_ip.network) + \" \" + (\n str(src_ip.netmask) + \" \")\n if (src_port is not None):\n if (':' in src_port):\n range = src_port.replace(':', ' ')\n acl = acl + \"range \" + range + \" \"\n else:\n acl = acl + \"eq \" + src_port + \" \"\n if (rule['destination_ip_address'] is None):\n acl = acl + \"any \"\n else:\n acl = acl + str(dst_ip.network) + \" \" + \\\n str(dst_ip.netmask) + \" \"\n if (dst_port is not None):\n if (':' in dst_port):\n range = dst_port.replace(':', ' ')\n acl = acl + \"range \" + range + \" \"\n else:\n acl = acl + \"eq \" + dst_port + \" \"\n if (enabled is False):\n acl = acl + 'inactive'\n\n # remove the old ace for this rule\n if (rule_id in self.rule_tbl):\n cmds.append('no ' + self.rule_tbl[rule_id])\n\n self.rule_tbl[rule_id] = acl\n if tenant_name in self.tenant_rule:\n if rule_id not in self.tenant_rule[tenant_name]['rule_lst']:\n self.tenant_rule[tenant_name]['rule_lst'].append(rule_id)\n cmds.append(acl)\n cmds.append(\"access-group \" + tenant_name + \" global\")\n cmds.append(\"write memory\")\n\n LOG.debug(cmds)\n data = {\"commands\": cmds}\n return self.rest_send_cli(data)", "def test_update_ikepolicy(self):\r\n resource = 'ikepolicy'\r\n cmd = ikepolicy.UpdateIKEPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_Policy(self,inputpolicy):\n \n policyob = self.SD_Map.retrieve_ob(inputpolicy)\n policyob.values[-1] = self.PolicyDicts[inputpolicy][self.translate(self.policy_option_vars[inputpolicy].get(),\n input_language = self.language,\n output_language = 'english')]", "def updateResource(self, authenticationToken, resource):\r\n pass", "def pre_access_control_list_update(self, resource_id, resource_dict):\n pass", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def setPolicy(self, value):\n return self._set(policy=value)", "def update_policy(policy_id):\n old_policy = PolicyService.get_policy_by_id(policy_id)\n if old_policy is None:\n abort(404)\n new_policy = PolicyService.update_policy_by_id(policy_id, json_to_policy(request.json))\n if new_policy is None:\n abort(406)\n return new_policy.__dict__", "def _enforce(self, req, action):\n try:\n self.policy.enforce(req.context, action, {})\n except exception.Forbidden:\n raise HTTPForbidden()", "def _update_by_auth(self, username, password, token_path, grant_type, client_id):\n token = requests.post(\n self.api_url + self.oauthpath,\n params={\n \"username\": username,\n \"grant_type\": grant_type,\n \"client_id\": client_id\n },\n data={\n \"password\": password\n }).json()\n if \"access_token\" in token:\n self.username = username\n self.token = token[\"access_token\"]\n else:\n raise KeyError(\"Authentication failed\", token)", "def test_update_hyperflex_local_credential_policy(self):\n pass", "def device_password_policy(self, device_password_policy):\n\n self._device_password_policy = device_password_policy", "def setAcceptPolicy(self, policy):\n if not self.__loaded:\n self.__load()\n \n if policy > self.AcceptMax:\n return\n if policy == self.__acceptCookies:\n return\n \n self.__acceptCookies = policy\n self.__saveTimer.changeOccurred()", "def set_actor_policy(self, actor_policy):\n raise NotImplementedError", "def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy:\n return response", "def test_patch_hyperflex_local_credential_policy(self):\n pass", "def test_change_default_throttling_settings_http_with_overwrite_throttled_rate_above_account_quota():", "def test_tenant_update(sample_identity):\n access_token, tenant, tenant_user, tc = sample_identity\n tenant.name = \"ilovebeansllc\"\n headers = {\"Authorization\": \"Bearer \" + access_token}\n updated_tenant_request = id_schemas.TenantSchema().dump(tenant)\n updated_tenant = tc.put(\n f\"api/v1/identity/tenant/{tenant.id}\",\n json=updated_tenant_request,\n headers=headers,\n )\n assert updated_tenant.status_code == 200, \"Tenant could not be updated\"", "def test_patch_namespaced_policy(self):\n pass", "def test_patch_cluster_policy_binding(self):\n pass", "def add_to_resource_policy(self, permission: aws_cdk.aws_iam.PolicyStatement) -> None:\n ...", "def test_patch_namespaced_policy_binding(self):\n pass", "def switch_tenant(tenant_name, password, environment, tester_conf):\n if hasattr(environment.cfy, '_current_user'):\n environment.add_cleanup(\n environment.cfy.profiles.set,\n kwargs=environment.cfy._current_user,\n )\n else:\n creds_conf = tester_conf['cloudify']\n environment.add_cleanup(\n environment.cfy.profiles.set,\n kwargs={\n 'tenant': 'default_tenant',\n 'username': creds_conf['existing_manager_username'],\n 'password': creds_conf['existing_manager_password'],\n },\n )\n\n environment.cfy._current_user = {\n 'tenant': tenant_name,\n 'username': tenant_name,\n 'password': password,\n }\n\n environment.cfy.profiles.set(\n tenant=tenant_name,\n username=tenant_name,\n password=password,\n )", "def refreshAuthentication(self, authenticationToken):\r\n pass", "def patch(self,\n dns_forwarder_zone_id,\n policy_dns_forwarder_zone,\n ):\n return self._invoke('patch',\n {\n 'dns_forwarder_zone_id': dns_forwarder_zone_id,\n 'policy_dns_forwarder_zone': policy_dns_forwarder_zone,\n })", "def put(self):\n coll_policy_id = views_helper.get_request_value(self.request, \"coll_policy_id\", \"BODY\")\n name = views_helper.get_request_value(self.request, \"coll_policy_name\", \"BODY\")\n command = views_helper.get_request_value(self.request, \"command\", \"BODY\")\n desc = views_helper.get_request_value(self.request, \"desc\", \"BODY\")\n ostype = views_helper.get_request_value(self.request, \"ostype\", \"BODY\")\n coll_policy_update_data = {\n 'name': name,\n 'cli_command': command,\n 'desc': desc,\n 'ostype': ostype\n }\n if len(CollPolicy.objects.filter(~Q(coll_policy_id=coll_policy_id), name=name)):\n data = {\n 'data': '',\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.FALSE,\n constants.MSG_TYPE: 'NAME_DUPLICATE',\n constants.MESSAGE: constants.COLLECTION_POLICY_NAME_DUPLICATE\n }\n\n }\n return api_return(data=data)\n obj = CollPolicy.objects.get(coll_policy_id=coll_policy_id)\n serializer = CollPolicyEditSerializer(instance=obj, data=coll_policy_update_data)\n try:\n if serializer.is_valid():\n serializer.save()\n data = {\n 'data': serializer.data,\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.TRUE,\n constants.MESSAGE: constants.SUCCESS\n }\n\n }\n return api_return(data=data)\n except Exception as e:\n if constants.DEBUG_FLAG:\n print traceback.format_exc(e)\n return exception_handler(e)", "def put(self):\n token = self.access_token_from_authorization_header()\n\n data = self.convert_argument_to_json()\n\n permissions = data['permissions']\n\n if len(permissions) is not 4:\n raise tornado.web.HTTPError(400, 'Some permissions are missing. Permissions count must be 4.')\n\n for ix, permission in enumerate(permissions):\n\n try:\n permission = int(permission)\n\n if permission not in [0, 1]:\n raise Exception('Permission must be either of 0 or 1.')\n\n permissions[ix] = int(permission)\n\n except Exception as ex:\n raise tornado.web.HTTPError(400, 'Permission must be integer')\n\n with self.session_scope() as session:\n token = convert_uuid_or_400(token)\n\n token = session.query(AuthToken).filter(AuthToken.uid == token).one_or_none()\n\n user = token.auth\n updated_permission = bitarray()\n\n updated_permission.extend(permissions)\n\n user.permissions = updated_permission.to01()\n\n session.flush()\n\n response = user.to_json()\n self.write(response)", "def test_authenticated_user_update(self):\r\n with self.flask_app.test_request_context('/'):\r\n for token in self.auth_providers:\r\n assert_raises(Forbidden,\r\n getattr(require, 'token').update,\r\n token)", "def SetIamPolicy(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_firewall_policy(self, firewall_policy, body=None):\r\n return self.put(self.firewall_policy_path % (firewall_policy),\r\n body=body)", "def modify_audit_policy(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n runtime = util_models.RuntimeOptions()\n return self.modify_audit_policy_with_options(request, runtime)", "def update(ctx: CLIContext, access_key, resource_policy, is_admin, is_active, rate_limit):\n with Session() as session:\n try:\n data = session.KeyPair.update(\n access_key,\n is_active=is_active,\n is_admin=is_admin,\n resource_policy=resource_policy,\n rate_limit=rate_limit)\n except Exception as e:\n ctx.output.print_mutation_error(\n e,\n item_name='keypair',\n action_name='update',\n )\n sys.exit(1)\n if not data['ok']:\n ctx.output.print_mutation_error(\n msg=data['msg'],\n item_name='keypair',\n action_name='update',\n )\n sys.exit(1)\n ctx.output.print_mutation_result(\n data,\n extra_info={\n 'access_key': access_key,\n },\n )", "def _AddAgentToPolicy(self, policy, tpu_user_agent):\n logging_binding = None\n storage_binding = None\n tpu_member_str = 'serviceAccount:{}'.format(tpu_user_agent)\n\n for binding in policy.bindings:\n if binding.role == self.logging_role:\n logging_binding = binding\n if binding.role == self.storage_role:\n storage_binding = binding\n\n # Skip checking bindings if this is the tpuServiceAgent role.\n if binding.role != self.tpu_service_agent:\n # Check if the tpuMemberStr is already in a binding.\n for member in binding.members:\n if member == tpu_member_str:\n # The TPU service account has already been enabled. Make no\n # modifications.\n return None\n\n if logging_binding is None:\n logging_binding = self.messages.Binding(role=self.logging_role)\n policy.bindings.append(logging_binding)\n\n if storage_binding is None:\n storage_binding = self.messages.Binding(role=self.storage_role)\n policy.bindings.append(storage_binding)\n\n logging_binding.members.append(tpu_member_str)\n storage_binding.members.append(tpu_member_str)\n\n return policy", "def voter_change_authority_process_view(request):\n authority_required = {'admin'} # admin, verified_volunteer\n if not voter_has_authority(request, authority_required):\n return redirect_to_sign_in_page(request, authority_required)\n\n voter_on_stage = Voter()\n authority_changed = False\n\n voter_id = request.GET.get('voter_id', 0)\n voter_id = convert_to_int(voter_id)\n authority_granted = request.GET.get('authority_granted', False)\n authority_removed = request.GET.get('authority_removed', False)\n\n # Check to see if this voter is already being used anywhere\n voter_on_stage_found = False\n try:\n voter_query = Voter.objects.filter(id=voter_id)\n if len(voter_query):\n voter_on_stage = voter_query[0]\n voter_on_stage_found = True\n except Exception as e:\n handle_record_not_found_exception(e, logger=logger)\n\n if voter_on_stage_found:\n try:\n if authority_granted == 'verified_volunteer':\n voter_on_stage.is_verified_volunteer = True\n authority_changed = True\n elif authority_granted == 'admin':\n voter_on_stage.is_admin = True\n authority_changed = True\n\n if authority_removed == 'verified_volunteer':\n voter_on_stage.is_verified_volunteer = False\n authority_changed = True\n elif authority_removed == 'admin':\n voter_on_stage.is_admin = False\n authority_changed = True\n\n if authority_changed:\n voter_on_stage.save()\n\n messages.add_message(request, messages.INFO, 'Voter authority updated.')\n except Exception as e:\n handle_record_not_saved_exception(e, logger=logger)\n messages.add_message(request, messages.ERROR, 'Could not save voter.')\n else:\n messages.add_message(request, messages.ERROR, 'Could not save change to authority.')\n\n return HttpResponseRedirect(reverse('voter:voter_edit', args=(voter_id,)))", "def PUT(request):\n\n # Make sure required parameters are there\n\n try:\n request.check_required_parameters(\n path={\n 'simulationId': 'int',\n 'userId': 'int'\n },\n body={\n 'authorization': {\n 'authorizationLevel': 'string'\n }\n }\n )\n\n except exceptions.ParameterError as e:\n return Response(400, e.message)\n\n # Instantiate and Authorization\n\n authorization = Authorization.from_JSON({\n 'userId': request.params_path['userId'],\n 'simulationId': request.params_path['simulationId'],\n 'authorizationLevel': request.params_body['authorization']['authorizationLevel']\n })\n\n # Make sure this Authorization exists\n\n if not authorization.exists():\n return Response(404, '{} not found.'.format(authorization))\n\n # Make sure this User is allowed to edit this Authorization\n\n if not authorization.google_id_has_at_least(request.google_id, 'OWN'):\n return Response(403, 'Forbidden from updating {}.'.format(authorization))\n\n # Try to update this Authorization\n\n try:\n authorization.update()\n\n except exceptions.ForeignKeyError as e:\n return Response(400, 'Invalid authorization level.')\n\n # Return this Authorization\n\n return Response(\n 200,\n 'Successfully updated {}.'.format(authorization),\n authorization.to_JSON()\n )", "def modify_audit_policy_with_options(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.audit_log_switch_source):\n query['AuditLogSwitchSource'] = request.audit_log_switch_source\n if not UtilClient.is_unset(request.audit_status):\n query['AuditStatus'] = request.audit_status\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.service_type):\n query['ServiceType'] = request.service_type\n if not UtilClient.is_unset(request.storage_period):\n query['StoragePeriod'] = request.storage_period\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='ModifyAuditPolicy',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.ModifyAuditPolicyResponse(),\n self.call_api(params, req, runtime)\n )", "def refresh(self):\n self._policies = self._get_policies()", "def put(self, op_id: str) -> Response:\n data = request.get_json()\n\n authorized: bool = Users.objects.get(id=get_jwt_identity()).roles.organization or \\\n Users.objects.get(id=get_jwt_identity()).roles.admin\n\n if authorized:\n try:\n res = Opportunity.objects.get(id=op_id).update(**data)\n except ValidationError as e:\n return bad_request(e.message)\n return jsonify(res)\n else:\n return forbidden()", "def update_policy(self, policy, inverse_policy=None):\n self.make_T_policy_matrix(policy)\n self.inverse_dynamics_by_time = dict()\n self.policy = policy\n self.inverse_policy = inverse_policy", "def qos_settings_control_modify(self, enforcement=None, ratebucket_rebalance=None):\n return self.request( \"qos-settings-control-modify\", {\n 'enforcement': [ enforcement, 'enforcement', [ bool, 'None' ], False ],\n 'ratebucket_rebalance': [ ratebucket_rebalance, 'ratebucket-rebalance', [ bool, 'None' ], False ],\n }, {\n } )", "def apply_policy(self, tenant_id, group_id, policy_id, check_template, alarm_template, nplan_id):\n d = cass.get_servers_by_group_id(self._db, tenant_id, group_id)\n\n def proc_servers(servers):\n deferreds = [\n self.add_policy_to_server(self._db, tenant_id, policy_id, server['serverId'], server['entityId'],\n check_template, alarm_template, nplan_id)\n for server in servers\n ]\n return defer.gatherResults(deferreds, consumeErrors=False)\n d.addCallback(proc_servers)\n d.addCallback(lambda _: None)\n return d", "def updateMergePolicy(self, mergePolicyId: str = None, policy: dict = None) -> dict:\n if mergePolicyId is None:\n raise ValueError(\"Require a mergePolicyId\")\n if policy is None or type(policy) != dict:\n raise ValueError(\"Require a dictionary to update the merge policy\")\n if self.loggingEnabled:\n self.logger.debug(f\"Starting updateMergePolicy\")\n path = f\"/config/mergePolicies/{mergePolicyId}\"\n res = self.connector.putData(\n self.endpoint + path, data=policy, headers=self.header\n )\n return res", "def test_update_hyperflex_sys_config_policy(self):\n pass", "def _update_challenge(request: PipelineRequest, challenger: \"PipelineResponse\") -> HttpChallenge:\n\n challenge = HttpChallenge(\n request.http_request.url,\n challenger.http_response.headers.get(\"WWW-Authenticate\"),\n response_headers=challenger.http_response.headers,\n )\n ChallengeCache.set_challenge_for_url(request.http_request.url, challenge)\n return challenge", "def update(self,\n dns_forwarder_zone_id,\n policy_dns_forwarder_zone,\n ):\n return self._invoke('update',\n {\n 'dns_forwarder_zone_id': dns_forwarder_zone_id,\n 'policy_dns_forwarder_zone': policy_dns_forwarder_zone,\n })", "def test_update_hyperflex_cluster_storage_policy(self):\n pass", "def device_update_policy(self, device_update_policy):\n\n self._device_update_policy = device_update_policy", "def patch(self, nodepool_policy_ident, patch):\n context = pecan.request.context\n nodepool_policy = api_utils.get_resource('NodePoolPolicy', nodepool_policy_ident)\n\n # policy.enforce(context, 'nodepool_policy:update', nodepool_policy,\n # action='nodepool_policy:update')\n try:\n nodepool_policy_dict = nodepool_policy.as_dict()\n print 'ssssss'\n print patch\n new_nodepool_policy = NodePoolPolicy(**api_utils.apply_jsonpatch(nodepool_policy_dict, patch))\n\n except api_utils.JSONPATCH_EXCEPTIONS as e:\n raise exception.PatchError(patch=patch, reason=e)\n\n # Update only the fields that have changed\n for field in objects.NodePoolPolicy.fields:\n try:\n patch_val = getattr(new_nodepool_policy, field)\n except AttributeError:\n # Ignore fields that aren't exposed in the API\n continue\n if patch_val == wtypes.Unset:\n patch_val = None\n if nodepool_policy[field] != patch_val:\n nodepool_policy[field] = patch_val\n\n # delta = nodepool_policy.obj_what_changed()\n nodepool_policy.save()\n # validate_function_properties(delta)\n\n # res_nodepool_policy = pecan.request.rpcapi.bay_update(nodepool_policy)\n return NodePoolPolicy.convert_with_links(nodepool_policy)", "def apply_settings(auth_info, django_settings):\r\n provider_names = auth_info.keys()\r\n provider.Registry.configure_once(provider_names)\r\n enabled_providers = provider.Registry.enabled()\r\n _set_global_settings(django_settings)\r\n _set_provider_settings(django_settings, enabled_providers, auth_info)", "def set_policy (self, policy = None, args = (), policy_cleanup = None):\n if policy == self.policy:\n # same policy; might want to change args/cleanup function, though\n self._policy_args = args\n if policy is not None and not isinstance(policy, basestring):\n self._policy_cleanup = policy_cleanup\n return\n # perform cleanup for current policy, if any\n if isinstance(self.policy, basestring):\n # built-in\n try:\n POLICY_CLEANUP[self.policy](self)\n except AttributeError:\n pass\n elif self.policy is not None and self._policy_cleanup is not None:\n # custom\n self._policy_cleanup(self)\n del self._policy_cleanup\n # set new policy\n self.policy = policy\n if policy is None:\n # if disabling scrolling, clean up some attributes we won't need\n try:\n del self._scroll_fn, self._policy_args\n except AttributeError:\n pass\n else:\n self._policy_args = args if args else ()\n if isinstance(policy, basestring):\n # built-in\n self._scroll_fn = POLICY_SCROLL[policy]\n else:\n # custom\n self._scroll_fn = policy\n self._policy_cleanup = policy_cleanup", "def SetIamPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('SetIamPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def SetIamPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('SetIamPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def SetIamPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('SetIamPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def SetIamPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('SetIamPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def SetIamPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('SetIamPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def SetIamPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('SetIamPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def updateTenant(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def policies(self, policies):\n\n self._policies = policies", "def set_hardware_specific_load_balancing_policy(cls, vendor_id, product_id, policy):\n cls.execute([\"-l\", \"-t\", cls._get_hardware_id(vendor_id, product_id), str(policy)])", "def update_auth_data(self, auth_data: AuthData) -> None:\n self.auth_data.update(auth_data)\n if \"refresh_id\" in self.auth_data:\n self.set_cookie(COOKIE_NAME, self.auth_data[\"refresh_id\"])\n if self.on_auth_data_changed:\n self.on_auth_data_changed(self.auth_data)", "def _enforce(self, req, action, target=None):\n if target is None:\n target = {}\n try:\n self.policy.enforce(req.context, action, target)\n except exception.Forbidden as e:\n LOG.debug(\"User not permitted to perform '%s' action\", action)\n raise webob.exc.HTTPForbidden(explanation=e.msg, request=req)", "def policy_filter(self, policy_filter):\n self._policy_filter = policy_filter", "def add(self, policy_name, data):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.info(\"Adding the policy: %s\", address)\n payload = json.dumps({\"policy\": data})\n response = self.vault.requests_request(\n \"POST\", address, headers=self.vault.token_header, data=payload\n )", "def grant(self, privilege):\r\n request = http.Request('PUT', self.get_url(), privilege)\r\n\r\n return request, parsers.parse_empty", "def test_control_acl_update(self):\n with factories.single_commit():\n control = factories.ControlFactory()\n person = factories.PersonFactory()\n control.add_person_with_role_name(person, \"Admin\")\n access_control_list = {\n \"Admin\": [\n {\n \"email\": \"user1@example.com\",\n \"name\": \"user1\",\n },\n {\n \"email\": \"user2@example.com\",\n \"name\": \"user2\",\n },\n ]\n }\n self.setup_people(access_control_list)\n\n response = self.api.put(control, control.id, {\n \"access_control_list\": access_control_list,\n })\n\n self.assert200(response)\n control = all_models.Control.query.get(control.id)\n self.assert_obj_acl(control, access_control_list)", "def set_policyname(self, policyname):\n self.options[\"policyname\"] = policyname", "def policyid(self, policyid):\n self._policyid = policyid", "def set_iam_policy(\n self,\n ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"set_iam_policy\" not in self._stubs:\n self._stubs[\"set_iam_policy\"] = self.grpc_channel.unary_unary(\n \"/google.iam.v1.IAMPolicy/SetIamPolicy\",\n request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,\n response_deserializer=policy_pb2.Policy.FromString,\n )\n return self._stubs[\"set_iam_policy\"]", "def set_iam_policy(\n self,\n ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"set_iam_policy\" not in self._stubs:\n self._stubs[\"set_iam_policy\"] = self.grpc_channel.unary_unary(\n \"/google.iam.v1.IAMPolicy/SetIamPolicy\",\n request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,\n response_deserializer=policy_pb2.Policy.FromString,\n )\n return self._stubs[\"set_iam_policy\"]", "def add_auth(self, http_request):\r\n pass", "def update_by_config(self, policy_enabling_map):\n self.enabled_policies = []\n\n for policy_name, policy_config in policy_enabling_map.items():\n if not self._is_policy_exists(policy_name):\n self._warn_unexistent_policy(policy_name)\n continue\n\n if policy_config['enabled']:\n enabled_policy = self._get_policy(policy_name)\n self.enabled_policies.append(enabled_policy)", "def test_update_hyperflex_node_config_policy(self):\n pass", "def UpdateBackupPolicy(\n self, backuppolicy_ref, backup_policy, update_mask, async_\n ):\n update_op = self._adapter.UpdateBackupPolicy(\n backuppolicy_ref, backup_policy, update_mask\n )\n if async_:\n return update_op\n operation_ref = resources.REGISTRY.ParseRelativeName(\n update_op.name, collection=netapp_util.OPERATIONS_COLLECTION\n )\n return self.WaitForOperation(operation_ref)", "def apply_client_authentication_options(\n self, headers, request_body=None, bearer_token=None\n ):\n # Inject authenticated header.\n self._inject_authenticated_headers(headers, bearer_token)\n # Inject authenticated request body.\n if bearer_token is None:\n self._inject_authenticated_request_body(request_body)", "async def modify_audit_policy_with_options_async(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.audit_log_switch_source):\n query['AuditLogSwitchSource'] = request.audit_log_switch_source\n if not UtilClient.is_unset(request.audit_status):\n query['AuditStatus'] = request.audit_status\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.service_type):\n query['ServiceType'] = request.service_type\n if not UtilClient.is_unset(request.storage_period):\n query['StoragePeriod'] = request.storage_period\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='ModifyAuditPolicy',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.ModifyAuditPolicyResponse(),\n await self.call_api_async(params, req, runtime)\n )", "def patch(self,\n draft_id,\n policy_draft,\n ):\n return self._invoke('patch',\n {\n 'draft_id': draft_id,\n 'policy_draft': policy_draft,\n })", "def change_authentication(self, client_id=None, client_secret=None,\n access_token=None, refresh_token=None):\n # TODO: Add error checking so you cannot change client_id and retain\n # access_token. Because that doesn't make sense.\n self.client_id = client_id or self.client_id\n self.client_secret = client_secret or self.client_secret\n self.access_token = access_token or self.access_token\n self.refresh_token = refresh_token or self.refresh_token", "def set_policyname(self, policyname):\n self.options['policyname'] = policyname", "def device_update_policy(self, device_ids, policy_id):\n return self._device_action(device_ids, \"UPDATE_POLICY\", {\"policy_id\": policy_id})", "async def modify_audit_policy_async(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n runtime = util_models.RuntimeOptions()\n return await self.modify_audit_policy_with_options_async(request, runtime)", "def isolation_policies_every(self, isolation_policies_every):\n\n self._isolation_policies_every = isolation_policies_every" ]
[ "0.61890286", "0.60093766", "0.5636485", "0.55529946", "0.5489578", "0.54655504", "0.5454699", "0.54493403", "0.5210004", "0.5150643", "0.51444256", "0.5112735", "0.50792444", "0.50723416", "0.50633496", "0.5059199", "0.5028738", "0.50170374", "0.498372", "0.49623302", "0.49584088", "0.49507344", "0.49472907", "0.4929195", "0.49230617", "0.49059168", "0.49018243", "0.48485732", "0.48247835", "0.47757995", "0.47541147", "0.4745784", "0.47373775", "0.47241688", "0.47218844", "0.47172126", "0.46932203", "0.4687643", "0.46788472", "0.4676085", "0.46708503", "0.46620575", "0.46585816", "0.46366242", "0.46340677", "0.4622072", "0.46145338", "0.46048307", "0.46001893", "0.4598506", "0.4593162", "0.45802504", "0.4570574", "0.45674786", "0.4563371", "0.4559522", "0.4558925", "0.45558515", "0.4547572", "0.45403868", "0.45378104", "0.45271215", "0.4517849", "0.45115584", "0.45105127", "0.45011324", "0.44994265", "0.44989905", "0.44948152", "0.44886312", "0.44886312", "0.44886312", "0.44886312", "0.44886312", "0.44886312", "0.44830778", "0.4481179", "0.44742638", "0.44565982", "0.44540682", "0.44491795", "0.44485262", "0.44467473", "0.44466582", "0.444646", "0.44463506", "0.4444584", "0.4444584", "0.4431196", "0.44252858", "0.44229987", "0.44185698", "0.4417822", "0.4415418", "0.43995658", "0.4398902", "0.4396052", "0.4395141", "0.43919656", "0.43907496" ]
0.535475
8
Updates the specified compartment's description or name. You can't update the root compartment.
def update_compartment(self, compartment_id, update_compartment_details, **kwargs): resource_path = "/compartments/{compartmentId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_compartment got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "compartmentId": compartment_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_compartment_details, response_type="Compartment") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_compartment_details, response_type="Compartment")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_object(self, name: str) -> None:", "def update(args, config):\n print('Updates an HPC fleet with name \"{}\"'.format(args.fleet_name))", "def test_update(self):\n obj = self.provision_single_asset()\n test_string = \"testing this thing\"\n p = {'id': obj.id, 'description': test_string}\n self.put('widget', 200, params=p)\n self.session.refresh(obj)\n assert obj.description == test_string", "def update_catalog(self, old_catalog_name, new_catalog_name, description):\n if self.resource is None:\n self.resource = self.client.get_resource(self.href)\n org = self.resource\n links = get_links(\n org, rel=RelationType.DOWN, media_type=EntityType.CATALOG.value)\n for link in links:\n if old_catalog_name == link.name:\n catalog = self.client.get_resource(link.href)\n href = catalog.get('href')\n admin_href = href.replace('/api/catalog/',\n '/api/admin/catalog/')\n admin_view_of_catalog = self.client.get_resource(admin_href)\n if new_catalog_name is not None:\n admin_view_of_catalog.set('name', new_catalog_name)\n if description is not None:\n admin_view_of_catalog['Description'] = E.Description(\n description)\n return self.client.put_resource(\n admin_href,\n admin_view_of_catalog,\n media_type=EntityType.ADMIN_CATALOG.value)\n raise Exception('Catalog not found.')", "def update(self, job_name, param_name, value, description=None):\n if job_name in self._jobs:\n getattr(self._jobs[job_name], param_name).update(value, description)\n else:\n self.log.error(\"Invalid job name: %s\", job_name)", "def firmware_pack_modify(handle, org_name, name, rack_bundle_version=None,\n blade_bundle_version=None, descr=None, mode=None,\n org_parent=\"org-root\"):\n\n org_dn = org_parent + \"/org-\" + org_name\n fw_dn= org_dn + \"/fw-host-pack-\" + name\n mo = handle.query_dn(fw_dn)\n if mo is not None:\n if rack_bundle_version is not None:\n mo.rack_bundle_version = rack_bundle_version\n if blade_bundle_version is not None:\n mo.blade_bundle_version = blade_bundle_version\n if mode is not None:\n mo.mode=mode\n if descr is not None:\n mo.descr = descr\n\n handle.set_mo(mo)\n handle.commit()\n else:\n log.info(\"Firmware host pack <%s> not found.\" % name)", "def _update(self, course_name: str, newdata: ParseType) -> None:\n\n self.courses[course_name] = newdata", "def updateNameAndDescription(self, name, desc):\n self.magneticfield.name = name\n self.magneticfield.description = desc\n\n self.magneticfield.writeFile()", "def update(self, container, representation):\n pass", "def request_description_update():\n global should_update_description\n should_update_description = True", "def update_description(self, host, baseUrl, description):\n self._host = host\n self._urlBase = baseUrl\n self._description = description\n return", "def compartment_id(self, compartment_id):\n self._compartment_id = compartment_id", "def compartment_id(self, compartment_id):\n self._compartment_id = compartment_id", "def setName(self, *args):\n return _libsbml.Compartment_setName(self, *args)", "def setCompartment(self, *args):\n return _libsbml.Species_setCompartment(self, *args)", "def setCompartment(self, *args):\n return _libsbml.CompartmentReference_setCompartment(self, *args)", "def update(self, title=None, description = None):\n jsonData = self.metaData.jsonObj\n header = self._baseHeader.copy()\n\n header['Content-type'] = \"application/vnd.huddle.data+json\"\n url = self.metaData.getLink(\"edit\")\n assert url is not None\n\n if title is not None: jsonData['title'] = title\n if description is not None: jsonData['description'] = description\n\n response = self._adapter.putRequest(url, header, json.dumps(jsonData))\n\n return Document(self._client, self._client.getUrlFromHeaderLink(response['Headers']['link']))", "def description(self, new_description):\r\n self.set({\"description\": new_description})", "def put(self, department_id):\n department = get_department_by_id(department_id)\n department.name = request.json[\"name\"]\n db.session.commit()\n return {}, 200", "def update(self, name):\n attemptdir = self.attemptdir(name)\n\n try:\n os.makedirs(attemptdir)\n\n except OSError as err:\n self.logger.info('Could not mkdir {} (ignoring): {}'.format(attemptdir, err))\n pass\n\n with open(os.path.join(attemptdir, 'image'), 'w+') as f:\n try:\n f.write(self.bom().content[name].image)\n\n except KeyError:\n self.logger.error('ERROR: component {} is not in the bom'.format(name))\n raise ZDGComponentBlocked(name)\n\n with open(os.path.join(attemptdir, 'owner'), 'w+') as f:\n f.write(self.bom().content[name].owner)\n\n with open(os.path.join(attemptdir, 'version'), 'w+') as f:\n f.write(self.bom().content[name].version)\n\n with open(os.path.join(attemptdir, 'submission'), 'w+') as f:\n f.write(self.bom().content[name].submission)", "def update(ctx, name, description, tags):\n user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),\n ctx.obj.get('experiment'))\n update_dict = {}\n\n if name:\n update_dict['name'] = name\n\n if description:\n update_dict['description'] = description\n\n tags = validate_tags(tags)\n if tags:\n update_dict['tags'] = tags\n\n if not update_dict:\n Printer.print_warning('No argument was provided to update the experiment.')\n sys.exit(0)\n\n try:\n response = PolyaxonClient().experiment.update_experiment(\n user, project_name, _experiment, update_dict)\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not update experiment `{}`.'.format(_experiment))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n\n Printer.print_success(\"Experiment updated.\")\n get_experiment_details(response)", "def rename(self, serial, name):\n api_page = \"/configuration/object/ap_rename\"\n url = \"{}{}?{}&UIDARUBA={}\".format(\n self.base_url,\n api_page,\n self.config_path,\n self.uidaruba)\n\n obj_dict = {'serial-num': serial, 'new-name': name}\n obj_json = json.loads(json.dumps(obj_dict))\n\n resp = self.post(url, obj_json)\n\n print(resp.status_code)\n print(resp.text)", "def setCompartment(self, *args):\n return _libsbml.Reaction_setCompartment(self, *args)", "def update():\n return 'update api in put'", "def do_update(cs, args):\n opts = {}\n opts['memory'] = args.memory\n opts['cpu'] = args.cpu\n opts['name'] = args.name\n if 'auto_heal' in args and args.auto_heal:\n opts['auto_heal'] = True\n if 'no_auto_heal' in args and args.no_auto_heal:\n opts['auto_heal'] = False\n opts = zun_utils.remove_null_parms(**opts)\n if not opts:\n raise exc.CommandError(\"You must update at least one property\")\n container = cs.containers.update(args.container, **opts)\n _show_container(container)", "def setCompartment(self, *args):\n return _libsbml.QualitativeSpecies_setCompartment(self, *args)", "def test_update_category_to_existing_name(self):\n sample_category()\n category = sample_category(name='House')\n url = category_details_url(category.id)\n res = self.client.put(url, {\"name\": \"place\"})\n\n category.refresh_from_db()\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(\n res.data['errors']['name'][0],\n 'This field must be unique.')", "def update(self, name=None, description=None, tags=None, provenance=None):\n # type: (str, str, List[str], str) -> bool\n headers = Headers({\"content-type\": \"application/json\", \"accept\": \"application/json\"})\n body = {} # type: Dict[str, Union[str, List, Dict]]\n\n if name is not None:\n self.raw_model[\"name\"] = name\n if description is not None:\n self.raw_model[\"description\"] = description\n if tags is not None:\n self.raw_model[\"tags\"] = tags\n if provenance is not None:\n self.raw_model[\"provenance\"] = provenance\n\n body = self.raw_model\n\n log.debug(\"Body %s\", body)\n\n resource_object = self.connection.api_call(\n \"PUT\", [\"v1\", \"resources\", self.id], headers=headers, json=body, model=Resource\n )\n\n self.raw_model = resource_object.raw_model\n\n log.debug(\"Updated dataset %s with content %s\", self.id, self.raw_model)\n return True", "def setServiceDescription(self, description):\n with self.zeroconf.lock:\n self.zeroconf.outbox.put(description)", "def update_simple(parent, name, value):\n element = parent.find('./' + name) \n\n if element is None:\n element = ET.SubElement(parent, name)\n element.text = value\n else:\n element.text = value", "def test_update(self):\n payload = {\n 'name': 'Pecho inclinado',\n 'description': \"New description\",\n 'muscle_group': \"pecho\"\n }\n response = self.client.put(\n '/exercises/{}/'.format(self.exer1.id), data=payload)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(\n Exercise.objects.get(id=self.exer1.id).name, payload['name'])", "def updateName(self,name):\n self.name = name", "def change_tag_namespace_compartment(self, tag_namespace_id, change_tag_namespace_compartment_detail, **kwargs):\n resource_path = \"/tagNamespaces/{tagNamespaceId}/actions/changeCompartment\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"change_tag_namespace_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagNamespaceId\": tag_namespace_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=change_tag_namespace_compartment_detail)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=change_tag_namespace_compartment_detail)", "def put(self, *args, **kwargs):\n\n addr = EtherAddress(args[0])\n\n if 'desc' in kwargs:\n self.service.update(addr, kwargs['desc'])\n else:\n self.service.update(addr)", "def addCompartment(self, vol=1, comp_id=\"\"):\n\n c1 = self.model.createCompartment()\n self.check(c1, \"create compartment\")\n if len(comp_id) == 0:\n comp_id = \"c\" + str(self.model.getNumCompartments())\n self.check(c1.setId(comp_id), \"set compartment id\")\n self.check(c1.setConstant(True), 'set compartment \"constant\"')\n self.check(c1.setSpatialDimensions(3), \"set compartment dimensions\")\n\n self.check(c1.setSize(vol), 'set compartment \"size\"')\n self.check(c1.setUnits(\"litre\"), \"set compartment size units\")\n return c1", "def update(self):\n #self._switch.odlclient._request_json(self._path, method=\"put\", json={\n # \"flow\": self._odl_inventory()\n #})\n self.remove() # actually, remove only uses self.switch and self.id, so this removes the other entry as well.\n self.deploy()", "def update(self, descriptor_msg, auth=None):\n request_args = self._make_request_args(descriptor_msg, auth)\n try:\n response = requests.put(**request_args)\n response.raise_for_status()\n except requests.exceptions.ConnectionError as e:\n msg = \"Could not connect to restconf endpoint: %s\" % str(e)\n self._log.error(msg)\n raise UpdateError(msg) from e\n except requests.exceptions.HTTPError as e:\n msg = \"PUT request to %s error: %s\" % (request_args[\"url\"], response.text)\n self._log.error(msg)\n raise UpdateError(msg) from e\n except requests.exceptions.Timeout as e:\n msg = \"Timed out connecting to restconf endpoint: %s\", str(e)\n self._log.error(msg)\n raise UpdateError(msg) from e", "def update_application(configurationId=None, name=None, description=None):\n pass", "def update_doc(doc):\n doc.long_name = \"X\"", "def update_volume(VolumeId=None, Name=None, MountPoint=None):\n pass", "def test_department_can_be_edited(self):\n res = self.client().put(service_url, json={\"id_dep\": 1, \"dep_name\": \"\", \"description\": \"this is a new description\"})\n self.assertEqual(res.status_code, 204)\n results = self.client().get(service_url+'/1')\n self.assertIn('is a new', str(results.data))\n self.assertIn('dep 1', str(results.data))", "def update_endpoint(EndpointName=None, EndpointConfigName=None):\n pass", "def description(self, new_description):\n self.set_description(new_description, self._xml)\n self._description = self._read_description(self._xml)", "def update(self, catalog: Metadata, action: str):\n self._insert_request(self.update_queue, catalog, action)", "def set_description(self, sNewDescription):\n\t\tcall_sdk_function('PrlVmDev_SetDescription', self.handle, sNewDescription)", "def update(self, name=None, password=None, host=None):\n return self.manager.update(self, name=name, password=password,\n host=host)", "def test_edit_volume(self, volume, volumes_steps):\n new_name = volume.name + ' (updated)'\n with volume.put(name=new_name):\n volumes_steps.edit_volume(volume_name=volume.name,\n new_volume_name=new_name)", "def _update(self, binding, data):\n self._validate_data(data)\n if not data.get('name',False):\n data['name'] = data.get('frontend_label',False) or 'No Label'\n if not data.get('create_variant',False):\n data['create_variant'] = data.get('is_configurable',False)\n binding.write(data)\n self._create_attribute_option(binding, data)\n _logger.debug('%d updated from magento %s', binding.id, self.magento_id)\n return", "def update(self, attrs):\n if attrs.get('name'):\n self.name = string.capwords(attrs.get('name'))\n if attrs.get('description'):\n self.description = attrs.get('description')\n if attrs.get('author'):\n self.author = attrs.get('author')\n\n try:\n db.session.add(self)\n db.session.commit()\n except IntegrityError as err:\n if isinstance(err.orig, UniqueViolation):\n raise Conflict(\"Name already used by another exercise.\")\n raise UnexpectedError(DATABASE_ERROR_MSG)\n except DBAPIError as err:\n raise UnexpectedError(DATABASE_ERROR_MSG)", "def update_product(self, product_id, name, archived=False):\n archived = 'y' if archived else 'n'\n return self._make_post_request(self._urls['product'] % product_id,\n data=dict(name=name, archived=archived))", "def editComponent(self, comp_idx, params):\n for i in params:\n try:\n params[i] = float(params[i])\n except:\n continue\n setattr(self.components[comp_idx], i, params[i])\n self.buffer._updateComponent(comp_idx) \n self.reBuild()", "def update_description(self, option, desc):\n _, command = self.__options[option]\n self.__options[option] = (desc, command)", "def edit(self, config, etag):\n\n data = self._json_encode(config)\n headers = self._default_headers()\n\n if etag is not None:\n headers[\"If-Match\"] = etag\n\n return self._request(self.name,\n ok_status=None,\n data=data,\n headers=headers,\n method=\"PUT\")", "def update(self, customerguid, name=\"\", login=\"\", password=\"\", email=\"\", address=\"\", vat=\"\", jobguid=\"\", executionparams=None):", "def update(self, request, pk=None):\n lot = Lot.objects.get(pk=request.data[\"lotId\"])\n\n project = Project.objects.get(pk=pk)\n project.name = request.data[\"name\"]\n project.estimatedCost = request.data[\"estimatedCost\"]\n project.estimatedCompletionDate = request.data[\"estimatedCompletionDate\"]\n #project.projectNote = Note.objects.get(pk=request.data['projectNote'])\n\n project.lotId = lot\n project.save()\n\n return Response({}, status=status.HTTP_204_NO_CONTENT)", "def update_editor(self, dto: CustomEditorDTO):\n self.name = dto.name\n self.description = dto.description\n self.url = dto.url\n self.save()", "def update(clients, context, name=None):\n port_id = context['port_id']\n logger.info(\"Taking action port.update {}.\".format(port_id))\n neutron = clients.get_neutron()\n body = {'port': {}}\n if name is not None:\n body['port']['name'] = name\n neutron.update_port(port_id, body=body)", "def edit_service(self, target_host, service_description, field_name, new_value):\n\n\t\toriginal_object = self.get_service(target_host, service_description)\n\t\tif original_object == None:\n\t\t\traise ParserError(\"Service not found\")\n\t\treturn config.edit_object( original_object, field_name, new_value)", "def update(self) -> None:\n self._api.update()\n if self.available:\n self._attr_native_value = self._api.data[self.entity_description.key]\n else:\n self._attr_native_value = None", "def update_object(self, name: str) -> None:\n try:\n object = Object.from_name(name)\n except Object.NotFound:\n record = self.catalog.get(name) # must be name pattern recognized by catalog\n log.info(f'Creating new object for {name}')\n Object.add({'type_id': self.__get_type_id(record), 'aliases': self.__get_names(record),\n 'ra': record.ra, 'dec': record.declination, 'redshift': record.redshift,\n 'data': {'tns': record.to_json()}})\n else:\n # find best alternate identifier for catalog search\n for provider in ('iau', 'ztf', 'atlas'): # preferred ordering\n if provider in object.aliases:\n if name != object.aliases[provider]:\n log.debug(f'Searching with name {object.aliases[provider]} <- {name}')\n name = object.aliases[provider]\n break\n else:\n raise TNSError(f'Object ({name}) not found in catalog')\n record = self.catalog.get(name)\n self.__ensure_iau_pattern(record.name)\n if info := self.__build_info(object, record):\n Object.update(object.id, **info)\n else:\n log.info(f'No changes found for {name}')", "def update_experiment_description(self, experiment_id, description):\n return self.dbclient.update_by_id(Tables.EXPERIMENTS, experiment_id, {\n ExperimentAttr.DESC: description\n })", "def set_desc(self, item_desc):\r\n self.description = item_desc", "def update(self, request, pk=None):\n serializer = OrganizationUpdateBody(data=request.data)\n if serializer.is_valid(raise_exception=True):\n name = serializer.validated_data.get(\"name\")\n # agents = serializer.validated_data.get(\"agents\")\n # network = serializer.validated_data.get(\"network\")\n # channel = serializer.validated_data.get(\"channel\")\n try:\n Organization.objects.get(name=name)\n except ObjectDoesNotExist:\n pass\n # organization = Organization.objects.filter(name=name).update(agents=agents, network=network.id, channel=channel.id)\n\n return Response(status=status.HTTP_204_NO_CONTENT)", "def test_update_hyperflex_app_catalog(self):\n pass", "def update(self, title=None, state=None, description=None, due_on=None):\n data = {\n \"title\": title,\n \"state\": state,\n \"description\": description,\n \"due_on\": due_on,\n }\n self._remove_none(data)\n json = None\n\n if data:\n json = self._json(self._patch(self._api, data=dumps(data)), 200)\n if json:\n self._update_attributes(json)\n return True\n return False", "def fusion_api_update_deployment_manager(self, body=None, uri=None, api=None, headers=None):\n return self.dep_mgr.update(body=body, uri=uri, api=api, headers=headers)", "def set_description(self, desc):\n super().set_description(desc, refresh=True)\n if self._pbar:\n self._pbar._set_description(self.desc)", "def update_host(self, conf, tenant_id, network_id, host_id, body):\n\t\tpass", "def update_object(self, name: str) -> None:\n try:\n object = Object.from_name(name)\n except Object.NotFound as error:\n log.warning(f'Cannot add new objects using TNSQueryManager')\n raise TNSError(str(error)) from error\n if 'iau' in object.aliases:\n if name != object.aliases['iau']:\n log.debug(f'Searching with name {object.aliases[\"iau\"]} <- {name}')\n name = object.aliases['iau']\n elif 'ztf' in object.aliases:\n log.debug(f'Searching TNS for IAU name {name}')\n name = self.tns.search_name(name).objname\n if name is None:\n raise TNSError(f'Could not find IAU name {name}')\n else:\n raise TNSError(f'No support identifier found {name}')\n response = self.tns.search_object(name)\n if response.is_empty:\n raise TNSError(f'No data on object {name}')\n else:\n if info := self.__build_info(name, object, response):\n Object.update(object.id, **info)\n else:\n log.info(f'No changes for {name}')", "def testUpdate(self):\n response = self.runPut(self.root, sequencer=self.hiseq2000.sodar_uuid, data=self.post_data)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(data[\"vendor_id\"], self.post_data[\"vendor_id\"])", "def put(self):\n coll_policy_id = views_helper.get_request_value(self.request, \"coll_policy_id\", \"BODY\")\n name = views_helper.get_request_value(self.request, \"coll_policy_name\", \"BODY\")\n command = views_helper.get_request_value(self.request, \"command\", \"BODY\")\n desc = views_helper.get_request_value(self.request, \"desc\", \"BODY\")\n ostype = views_helper.get_request_value(self.request, \"ostype\", \"BODY\")\n coll_policy_update_data = {\n 'name': name,\n 'cli_command': command,\n 'desc': desc,\n 'ostype': ostype\n }\n if len(CollPolicy.objects.filter(~Q(coll_policy_id=coll_policy_id), name=name)):\n data = {\n 'data': '',\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.FALSE,\n constants.MSG_TYPE: 'NAME_DUPLICATE',\n constants.MESSAGE: constants.COLLECTION_POLICY_NAME_DUPLICATE\n }\n\n }\n return api_return(data=data)\n obj = CollPolicy.objects.get(coll_policy_id=coll_policy_id)\n serializer = CollPolicyEditSerializer(instance=obj, data=coll_policy_update_data)\n try:\n if serializer.is_valid():\n serializer.save()\n data = {\n 'data': serializer.data,\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.TRUE,\n constants.MESSAGE: constants.SUCCESS\n }\n\n }\n return api_return(data=data)\n except Exception as e:\n if constants.DEBUG_FLAG:\n print traceback.format_exc(e)\n return exception_handler(e)", "def test_update_task_description(\n self,\n mock_config_load,\n mock_custom_objects_api,\n mock_core_v1_api,\n ):\n task_id = util.MOCK_UUID_5\n rv = TEST_CLIENT.patch(\n f\"/tasks/{task_id}\",\n json={\n \"description\": \"valid description\",\n },\n )\n\n self.assertEqual(rv.status_code, 200)", "def update(self):\n\t\tprint(\"Editing %s '%s'\" % (self.getSpecString(), self.getName()))\n\t\tchoice = None\n\t\twhile (choice != 5):\n\t\t\tchoice = None \t\n\t\t\twhile (choice != 1 and choice != 2 and choice != 3 and choice != 4 and choice != 5):\n\t\t\t\tprint(\"Please select an action\")\n\t\t\t\tprint(\" 1) Edit name\")\n\t\t\t\tprint(\" 2) Edit description\")\n\t\t\t\tprint(\" 3) Add item\")\n\t\t\t\tprint(\" 4) Remove item\")\n\t\t\t\tprint(\" 5) Save and exit\")\n\t\t\t\tchoice = self.askForInteger(\"Action\")\n\n\t\t\t\tif (choice != 1 and choice != 2 and choice != 3 and choice != 4 and choice != 5):\n\t\t\t\t\tprint(\"Invalid choice!\")\n\n\t\t\tif (choice == 1):\n\t\t\t\tself.setName(self.askForString(\"You erase the list's title and write\"))\n\t\t\telif (choice == 2):\n\t\t\t\tself.setDescription(self.askForString(\"You update the list's description to read\"))\n\t\t\telif (choice == 3):\n\t\t\t\tself.addItem(self.askForString(\"Add to list\"))\n\t\t\telif (choice == 4):\n\t\t\t\tprint(self.getAllItemsStr())\n\t\t\t\tremoveIndex = self.askForInteger(\"Remove entry\")\n\t\t\t\tprint(\"Removing %s...\" % (self.items[removeIndex - 1]))\n\t\t\t\tself.removeItem(removeIndex - 1)\n\t\t\telif (choice == 5):\n\t\t\t\tprint(\"Saving %s...\" % self.getSpecString())\n\t\t\t\tself.setUpdatedAt(datetime.datetime.now())\n\t\t\t\tself.refreshYAML()\n\t\t\t\tprint(\"Saved!\")", "def testOnDescriptionChanged(self, widget):\n spy_signal = QtSignalSpy(widget, widget.modelModified)\n\n # Change the name\n new_text = \"Rabbit season\"\n widget.txtDescription.setText(new_text)\n widget.txtDescription.editingFinished.emit()\n\n # Check the signal\n assert spy_signal.count() == 1\n\n # model dict updated\n assert widget.model['description'] == new_text", "def set_description(self, room_description):\n self.description = room_description", "def update(ctx, name, description, tags):\n user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'),\n ctx.obj.get('group'))\n update_dict = {}\n\n if name:\n update_dict['name'] = name\n\n if description:\n update_dict['description'] = description\n\n tags = validate_tags(tags)\n if tags:\n update_dict['tags'] = tags\n\n if not update_dict:\n Printer.print_warning('No argument was provided to update the experiment group.')\n sys.exit(0)\n\n try:\n response = PolyaxonClient().experiment_group.update_experiment_group(\n user, project_name, _group, update_dict)\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not update experiment group `{}`.'.format(_group))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n\n Printer.print_success(\"Experiment group updated.\")\n get_group_details(response)", "def edit_category(self, category_id, category_name, parent_id):\n # [todo] - all parameters except category_id optional, fill others with\n # current values\n\n # [todo] - validate category_id\n # [todo] - validate new values\n\n # open a cursor\n cur = self.get_cursor()\n\n stmt = \"UPDATE categories \" + \\\n \"SET parent_id='{0}', \".format(parent_id) + \\\n \"category_name='{0}' \".format(category_name) + \\\n \"WHERE category_id={0}\".format(category_id)\n\n cur.execute(stmt)\n\n # close the cursor\n self.close_cursor()", "def fusion_api_edit_datacenter(self, body, uri, api=None, headers=None):\n return self.dc.update(body, uri, api, headers)", "def test_update(self, init_db, category):\n category_name = fake.alphanumeric()\n category.update(name=category_name)\n assert category.name == category_name", "def do_PUT(self):\n note_details = NoteDetails\n if self.path == '/note/api/update':\n response_data=note_details.update_data(self)\n Response(self).jsonResponse(status=200, data=response_data)", "def update(src, des, tipe):\n from xbooks.Xinit import Xrc\n src = \"Xblog/docs/\" + src\n des = \"Xblog/docs/\" + des\n if \"Xblog/docs/notebooks/\" == des.replace(os.path.basename(des), \"\"):\n editNavBar(src, des, tipe, Xrc)\n else:\n editParentIndex(src, des, tipe, Xrc)\n ccc.success(\"updatation procedures for \" + des)", "def edit_name(entry):\n entry.name = get_name()\n entry.save()\n input(\"Edit successful. \")\n return entry", "def update_composed_node(cls, composed_node_uuid, values):\n return cls.dbdriver.update_composed_node(composed_node_uuid, values)", "def SetDescription(self, description):\n self.description = str(description)", "def setCompartment(self, *args):\n return _libsbml.MultiSpeciesType_setCompartment(self, *args)", "def update_product(self, *args):\n product_name = args[0]\n details = args[1]\n quantity = args[2]\n price = args[3]\n last_modified = args[4]\n product_id = args[5]\n update_row = \"UPDATE products SET product_name = '{}', details = '{}', quantity = '{}', price = '{}', \" \\\n \"last_modified = '{}' WHERE product_id = '{}';\"\\\n .format(product_name, details, quantity, price, last_modified, product_id)\n self.cursor.execute(update_row, (product_name, details, quantity, price, last_modified, product_id))\n self.connection.commit()", "def edit_payee(self, payee_id, new_payee_name):\n # [todo] - add check that new_payee_name is unique\n\n # open a cursor\n cur = self.get_cursor()\n\n edit_payee_statement = \"UPDATE payees \" + \\\n \"SET payee_name='{0}' \".format(new_payee_name) + \\\n \"WHERE payee_id={0}\".format(payee_id)\n\n cur.execute(edit_payee_statement)\n\n # close the cursor\n self.close_cursor()", "def set_description(self, desc: str) -> None:\n self.metadata.data[\"description\"] = desc", "def edit_person():\n # get person name from user\n responses = accept_inputs([\"Person's name\"])\n person_name = responses[\"Person's name\"]\n # check for existence\n results = query_with_results(\"select * from person where name = ?\", [person_name])\n if len(results) == 0:\n print(\"No person found with name '%s'.\" % person_name)\n return\n else:\n # get id of person\n id = query_with_results(\"select id from person where name = ?\", [person_name])[0][0]\n # the task exists, so ask the user for the new description\n responses = accept_inputs([\"New name\"])\n # update db\n query_no_results(\"update person set name = ? where id = ?\", [responses[\"New name\"], id])\n print(\"Person with old name '%s' changed to '%s'.\" % (person_name, responses[\"New name\"]))", "def update_name(old_name, new_name, phonebook):\n\n phonebook_data = read_phonebook(phonebook)\n\n if not phonebook_data.get(old_name):\n raise NoEntryError(\"This entry does not exist! \"\n \"(Names are case-sensitive.)\")\n\n else:\n print \"Previous entry:\", old_name, phonebook_data[old_name]\n number = phonebook_data[old_name]\n del phonebook_data[old_name]\n phonebook_data[new_name] = number\n print \"New entry:\", new_name, phonebook_data[new_name]\n save(phonebook_data, phonebook)", "def endpoint_update(self, endpoint_name=None, config=None):\n if config is None:\n raise Exception(\"Config required!\")\n if endpoint_name is None:\n self.request('/v1.1/endpoint', 'POST', body=config)\n else:\n self.request('/v1.1/endpoints/%s' % endpoint_name, 'POST', body=config)", "def updateDevice(self, serial: str, **kwargs):\n\n kwargs.update(locals())\n\n metadata = {\n 'tags': ['devices', 'configure'],\n 'operation': 'updateDevice'\n }\n resource = f'/devices/{serial}'\n\n body_params = ['name', 'tags', 'lat', 'lng', 'address', 'notes', 'moveMapMarker', 'switchProfileId', 'floorPlanId', ]\n payload = {k.strip(): v for k, v in kwargs.items() if k.strip() in body_params}\n action = {\n \"resource\": resource,\n \"operation\": \"update\",\n \"body\": payload\n }\n return action", "def update(self, organisation: Organisation) -> None:\n ...", "def update(self, system, environment_input):\n pass", "def change_name(self, address, name):\n with self.connect() as c:\n cur = c.cursor()\n cur.execute(\"UPDATE AddressBook SET name = '{}' WHERE address = '{}'\".format(name, address))\n return True", "def update(self, price, dt):\n log.info(\"Update bo feature '%s' for bo#%s with price=%s dt=%s\" % (self.name, self.bo.ticket, price, dt))", "def test_edit_resource(self):\n s1 = System()\n b1 = Books(\"1984\", \"George Orwell\", \"Harvill Secker\", \"1949\", \"0123456789123\")\n s1.edit_resource(b1, \"Animal Farm\")\n self.assertEqual(b1.get_title(), \"1984\")\n s1.add_resource(b1)\n s1.edit_resource(b1, \"Animal Farm\")\n self.assertEqual(b1.get_title(), \"Animal Farm\")", "def ap_reprovision(self, orig_name):\n api_page = \"/configuration/object/ap_reprovision\"\n url = \"{}{}?{}&UIDARUBA={}\".format(\n self.base_url,\n api_page,\n self.config_path,\n self.uidaruba)\n\n obj = {\"_action\": \"modify\",\n \"ap-name\": orig_name,\n \"reprovision_option\": \"ap-name\"\n }\n\n json_obj = json.loads(json.dumps(obj))\n resp = self.post(url, json_obj)\n print(\"ap_reprovision_resp: {}\".format(resp.status_code))\n # print(resp.text)", "def update_comment(self, comment):\n\n self._presets._update(self._preset_type, self._name, comment=comment)\n self._presets.sync()", "def set_description(self, description):\r\n self.__description = description" ]
[ "0.56508", "0.5638967", "0.5597009", "0.55432", "0.5494998", "0.5449337", "0.53832597", "0.5348151", "0.53268445", "0.5285396", "0.5275306", "0.5235795", "0.5235795", "0.51921105", "0.5159983", "0.5123664", "0.50844306", "0.50828993", "0.50785875", "0.5047809", "0.5020412", "0.5014647", "0.50084394", "0.49942082", "0.49363297", "0.49216577", "0.49170053", "0.4913295", "0.48995313", "0.48943081", "0.48918334", "0.4881213", "0.48794127", "0.48766097", "0.48717874", "0.4870156", "0.48555964", "0.4844822", "0.48336443", "0.4832798", "0.4813434", "0.47942078", "0.47876912", "0.47842845", "0.477904", "0.47675166", "0.47613144", "0.47564176", "0.4752023", "0.47421312", "0.4741375", "0.47370297", "0.4736595", "0.47255388", "0.47225314", "0.47194713", "0.47179836", "0.4711172", "0.47102934", "0.4688536", "0.4680128", "0.46792695", "0.4679062", "0.46605054", "0.4656122", "0.46538973", "0.46522614", "0.46519798", "0.46495852", "0.4630883", "0.46306062", "0.46300432", "0.4627103", "0.46190268", "0.46108803", "0.46097597", "0.4609362", "0.46082482", "0.46076143", "0.46069142", "0.45957315", "0.45914888", "0.45869836", "0.45860904", "0.4582369", "0.45816624", "0.4580535", "0.4571703", "0.45600328", "0.45582688", "0.45501405", "0.45489565", "0.45481217", "0.45473248", "0.45429754", "0.45393658", "0.4536858", "0.45344523", "0.4532671", "0.4531739" ]
0.60476094
0
Updates the specified secret key's description.
def update_customer_secret_key(self, user_id, customer_secret_key_id, update_customer_secret_key_details, **kwargs): resource_path = "/users/{userId}/customerSecretKeys/{customerSecretKeyId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_customer_secret_key got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "customerSecretKeyId": customer_secret_key_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_customer_secret_key_details, response_type="CustomerSecretKeySummary") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_customer_secret_key_details, response_type="CustomerSecretKeySummary")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def description(self, new_description):\r\n self.set({\"description\": new_description})", "def update_experiment_description(self, experiment_id, description):\n return self.dbclient.update_by_id(Tables.EXPERIMENTS, experiment_id, {\n ExperimentAttr.DESC: description\n })", "async def slashtag_edit_description(\n self, ctx: commands.Context, tag: GuildTagConverter, *, description: str\n ):\n await ctx.send(await tag.edit_description(description))", "def update_description(self, option, desc):\n _, command = self.__options[option]\n self.__options[option] = (desc, command)", "def update_description(self, host, baseUrl, description):\n self._host = host\n self._urlBase = baseUrl\n self._description = description\n return", "def UpdateSecretKey():\n _LOG.info('Updating webapp2_secret_key.')\n webapp2_secret_key = Webapp2SecretKey(id='current_secret_key')\n webapp2_secret_key.secret_key = os.urandom(16).encode('hex')\n webapp2_secret_key.put()\n return True", "def request_description_update():\n global should_update_description\n should_update_description = True", "def secret_key(self, val):\n self.__secret_key = val", "def set_description(self, desc: str) -> None:\n self.metadata.data[\"description\"] = desc", "def set_desc(self, item_desc):\r\n self.description = item_desc", "def set_description(self, sNewDescription):\n\t\tcall_sdk_function('PrlVmDev_SetDescription', self.handle, sNewDescription)", "def setDescription(self, description):\n url = G.api + self.testId + '/snapshots/' + self.hash\n self.info = requests.put(url, auth=(G.username, G.authkey), data={'description':description})", "def ModifyDescriptionOfCost(idOfCost, description):\n\n logs.logger.debug(\"Start to modify description of Cost based on the ID.\")\n try:\n modifiedCost = session.query(Cost.Cost).filter(\n Cost.Cost.id == idOfCost).one()\n modifiedCost.description = description\n session.commit()\n logs.logger.info(\"Modify description of Cost based on the ID.\")\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def add_description(self, desc):\n self.description = desc", "def edit_description(self, task, new_description):\n raise ValueError(\"cannot edit description in 'In Progress' status\")", "def update(secret: str, value: str, env: Optional[str], config: str) -> None:\n layer = Layer.load_from_yaml(config, env)\n gen_all(layer)\n _raise_if_no_k8s_cluster_exists(layer)\n\n configure_kubectl(layer)\n amplitude_client.send_event(amplitude_client.UPDATE_SECRET_EVENT)\n secret_value = base64.b64encode(value.encode(\"utf-8\")).decode(\"utf-8\")\n patch = [{\"op\": \"replace\", \"path\": f\"/data/{secret}\", \"value\": secret_value}]\n load_kube_config()\n v1 = CoreV1Api()\n v1.patch_namespaced_secret(\"secret\", layer.name, patch)\n\n print(\"Success\")", "def set_description(self, sNewShareDescription):\n\t\tcall_sdk_function('PrlShare_SetDescription', self.handle, sNewShareDescription)", "def description(self, value):\n self._update_values('description', value)", "def update_group(self, group_id, new_description):\n url = self.groups_url + \"/\" + group_id\n new_data = json.dumps({\"description\": new_description})\n\n return requests.put(url, new_data, headers=self.headers)", "def update_description_debounced(self, value):\n self.update_description(value)", "def SetDescription(self, description):\n self.description = str(description)", "def set_description(self, description):\n self.description = description", "def description(self, description) :\n\t\ttry :\n\t\t\tself._description = description\n\t\texcept Exception as e:\n\t\t\traise e", "async def set_profile_description(self, ctx, *, description: str):\n max_words = self.plugin.data.profile.max_description_length\n if len(description) > max_words:\n res = f\"{ctx.emotes.web_emotion.xx} Sorry but profile description cannot exceed {max_words} word limit.\"\n return await ctx.send_line(res)\n profile = await self.cache.get_profile(ctx.author.id)\n await profile.set_description(description)\n embed = self.bot.theme.embeds.primary(title=\"✅ Your Profile Description has been updated to:\")\n embed.set_author(name=ctx.author.name, icon_url=ctx.author.avatar_url)\n embed.description = profile.description\n await ctx.send(\"\", embed=embed)", "def setServiceDescription(self, description):\n with self.zeroconf.lock:\n self.zeroconf.outbox.put(description)", "def set_package_description(self, package, description):\n with self._conn.begin():\n self._conn.execute(\n \"VALUES (set_package_description(%s, %s))\",\n (package, description))", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, new_description):\n self.set_description(new_description, self._xml)\n self._description = self._read_description(self._xml)", "def set_description(self, description):\r\n self.__description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def set_description(self, description):\n self._description = description", "def description(self, description):\n\n self._set_field(\"description\", description)", "def edit_key(self, key_id, label=None, notes=None):\r\n data = {}\r\n\r\n if label:\r\n data['label'] = label\r\n\r\n if notes:\r\n data['notes'] = notes\r\n\r\n return self.sshkey.editObject(data, id=key_id)", "def set_description(self, sDescription):\n\t\tcall_sdk_function('PrlVirtNet_SetDescription', self.handle, sDescription)", "def setDescription(self, valueName, valueDescription):\n\t\tself.settings[valueName][1] = valueDescription", "def put_column_description(self, *, table_uri: str, column_name: str, description: str) -> None:\n column_uri = table_uri + '/' + column_name\n desc_key = column_uri + '/_description'\n description = RDSColumnDescription(rk=desc_key,\n description_source='description',\n description=description,\n column_rk=column_uri)\n try:\n with self.client.create_session() as session:\n session.merge(description)\n session.commit()\n except Exception as e:\n LOGGER.exception(f'Failed to update the table {table_uri} column {column_name} description')\n raise e", "def description(self, description: str):\n\n self._description = description", "def description(self, description: str):\n\n self._description = description", "def description(self, description: str):\n\n self._description = description", "def description(self, description: str):\n\n self._description = description", "def set_description(self, description):\n self.__description = description", "def set_description(desc):\n global last_description\n last_description = desc", "def set_description(self, data):\n self._description = self._uni(data)", "def test_update(self):\n obj = self.provision_single_asset()\n test_string = \"testing this thing\"\n p = {'id': obj.id, 'description': test_string}\n self.put('widget', 200, params=p)\n self.session.refresh(obj)\n assert obj.description == test_string", "def description(self, newDescription=None):\n pass", "def _set_description(\n meta: Dict, description: Optional[Union[str, bool]] = None, **kwargs\n) -> Dict:\n if description is False or description is None:\n show_description_value = MetaWidget.DESCRIPTION_OPTION_NOTHING\n description = \"\"\n elif isinstance(description, str):\n show_description_value = MetaWidget.DESCRIPTION_OPTION_CUSTOM\n else:\n raise IllegalArgumentError(\n \"When using the add_card_widget or add_service_card_widget, 'description' must be \"\n \"'text_type' or None or False. Type is: {}\".format(type(description))\n )\n meta.update(\n {\n MetaWidget.SHOW_DESCRIPTION_VALUE: show_description_value,\n MetaWidget.CUSTOM_DESCRIPTION: description,\n }\n )\n return meta", "def set_description(self, sNewVmDescription):\n\t\tcall_sdk_function('PrlVmCfg_SetDescription', self.handle, sNewVmDescription)", "def update(ctx: CLIContext, access_key, resource_policy, is_admin, is_active, rate_limit):\n with Session() as session:\n try:\n data = session.KeyPair.update(\n access_key,\n is_active=is_active,\n is_admin=is_admin,\n resource_policy=resource_policy,\n rate_limit=rate_limit)\n except Exception as e:\n ctx.output.print_mutation_error(\n e,\n item_name='keypair',\n action_name='update',\n )\n sys.exit(1)\n if not data['ok']:\n ctx.output.print_mutation_error(\n msg=data['msg'],\n item_name='keypair',\n action_name='update',\n )\n sys.exit(1)\n ctx.output.print_mutation_result(\n data,\n extra_info={\n 'access_key': access_key,\n },\n )", "def description(self, newDescription=None):\n if newDescription != None:\n self._setValue('description', newDescription)\n return self._getValue('description')", "def description(request):\n if request.method != 'POST':\n description = request.issue.description or \"\"\n return HttpTextResponse(description)\n if not request.issue.edit_allowed:\n if not IS_DEV:\n return HttpTextResponse('Login required', status=401)\n issue = request.issue\n issue.description = request.POST.get('description')\n issue.put()\n return HttpTextResponse('')", "def ingredient_description(self, ingredient_description):\n\n self._ingredient_description = ingredient_description", "def add_command_description(self, command, description):\n self._command_list[command] = description", "async def setIncident_locationDescription(\n self, eventID: str, incidentNumber: int, description: str, author: str\n ) -> None:", "def set_description(self, room_description):\n self.description = room_description", "def update(self, key, new_value):\n raise NotImplementedError", "def update_key(self, key):\n self._api_key = key", "def _update_input_config(input_config,secret_key):\n\n for key in input_config.keys():\n if input_config[key].get('arguments') is None:\n input_config[key]['arguments'] = {'secret':secret_key}\n elif input_config[key]['arguments'].get('secret') is None:\n input_config[key]['arguments']['secret'] = secret_key", "def set_description(\n self, path: Union[bytes, str], description: Optional[Union[bytes, str]] = None\n ) -> None:\n path = _to_bytes_or_null(path)\n description = _to_bytes_or_null(description)\n ret = lib.Fapi_SetDescription(self._ctx, path, description)\n _chkrc(ret)", "def set_description(self, description):\n self.description = description\n if not self.record:\n return\n self.mdb.results.update({'_id':self.result_id}, \n {'$set':{'test_case':description}})" ]
[ "0.6546764", "0.6383036", "0.63771176", "0.6299966", "0.6243986", "0.6152988", "0.6130237", "0.5990805", "0.5977715", "0.59324723", "0.59167236", "0.58869696", "0.5866867", "0.58435416", "0.5841273", "0.5839453", "0.5824073", "0.5772846", "0.5757899", "0.57126033", "0.5691525", "0.56674993", "0.56334877", "0.56284237", "0.5620298", "0.56113714", "0.56083614", "0.56083614", "0.56083614", "0.56083614", "0.5576048", "0.5573803", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.5553428", "0.55477893", "0.55420375", "0.55408233", "0.5538753", "0.5529104", "0.5511183", "0.55013853", "0.55013853", "0.55013853", "0.55013853", "0.5485184", "0.54779315", "0.54730403", "0.54703456", "0.54420084", "0.5399438", "0.53942764", "0.53840774", "0.53812516", "0.53769714", "0.5367749", "0.5359017", "0.5352408", "0.5336002", "0.5320722", "0.5319692", "0.53108555", "0.52827334", "0.52817464" ]
0.0
-1
Updates the specified dynamic group.
def update_dynamic_group(self, dynamic_group_id, update_dynamic_group_details, **kwargs): resource_path = "/dynamicGroups/{dynamicGroupId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_dynamic_group got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "dynamicGroupId": dynamic_group_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_dynamic_group_details, response_type="DynamicGroup") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_dynamic_group_details, response_type="DynamicGroup")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_group():\n _id = request.form['_id']\n name = request.form['name']\n data, code, message = FIELD_SERVICE.update_group(_id, name)\n return __result(data, code, message)", "def update_group(groupname):\n name = request.get_json().get(\"name\", None)\n description = request.get_json().get(\"description\", None)\n response = jsonify(\n admin.update_group(current_app.scoped_session(), groupname, description, name)\n )\n return response", "def do_group_update():\n target_group = Group.query.filter_by(id=request.form['id']).first()\n if target_group is None:\n return group_list(\"Unknown group.\")\n\n target_group.name = request.form['name']\n target_group.group_meter_id = request.form['meter']\n target_group.group_production_meter_id_first = request.form['group_production_meter_id_first']\n target_group.group_production_meter_id_second = request.form[\n 'group_production_meter_id_second']\n\n db.session.commit()\n return group_list(\"Updated group \" + target_group.name)", "def update_group(self, group_id, **kwargs):\n post_body = json.dumps({'group': kwargs})\n resp, body = self.patch('groups/%s' % group_id, post_body)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def group_update(*, login_manager: LoginManager, group_id: str, **kwargs: Any):\n groups_client = login_manager.get_groups_client()\n\n # get the current state of the group\n group = groups_client.get_group(group_id)\n\n # assemble put data using existing values for any field not given\n # note that the API does not accept the full group document, so we must\n # specify name and description instead of just iterating kwargs\n data = {}\n for field in [\"name\", \"description\"]:\n if kwargs.get(field) is not None:\n data[field] = kwargs[field]\n else:\n data[field] = group[field]\n\n response = groups_client.update_group(group_id, data)\n\n formatted_print(response, simple_text=\"Group updated successfully\")", "def update_group(self, group_name, new_group_name=None, new_path=None):\r\n params = {'GroupName' : group_name}\r\n if new_group_name:\r\n params['NewGroupName'] = new_group_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateGroup', params)", "def request_group_update():\n target_group = Group.query.filter_by(id=request.args['id']).first()\n if target_group is None:\n return group_list(\"Unknown group.\")\n\n return Response(\n render_template(\n 'admin/group/create-update.html',\n csrf_token=(\n get_raw_jwt() or {}).get(\"csrf\"),\n target=\"/admin/group/update\",\n id=target_group.id,\n name=target_group.name,\n meter=target_group.group_meter_id,\n group_production_meter_id_first=target_group.group_production_meter_id_first,\n group_production_meter_id_second=target_group.group_production_meter_id_second),\n mimetype='text/html')", "def update(self,\n provider_id,\n group_id,\n group,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'group_id': group_id,\n 'group': group,\n })", "def update(self):\r\n return self.connection._update_group('UpdateAutoScalingGroup', self)", "def update_targetgroup(self, group_id, **kwargs):\r\n result = False\r\n if self._db(self._db.targetgroup.id==group_id).select():\r\n result = True\r\n self._db(self._db.targetgroup.id==group_id).update(**kwargs)\r\n self._db.commit()\r\n return result", "def test_modify_group(self):\n response = self.client.modify_group(\"ABC123\")\n self.assertEqual(response[\"method\"], \"POST\")\n self.assertEqual(response[\"uri\"], \"/admin/v1/groups/ABC123\")\n self.assertEqual(util.params_to_dict(response[\"body\"]), {\"account_id\": [self.client.account_id]})", "def update_group(self, group_id, new_description):\n url = self.groups_url + \"/\" + group_id\n new_data = json.dumps({\"description\": new_description})\n\n return requests.put(url, new_data, headers=self.headers)", "def test_update_group(self):\n pass", "async def update_contact_group(dbcon: DBConnection, contact_group_id: int, data: Dict[str, str]) -> None:\n\n async def _run(cur: Cursor) -> None:\n for key, value in data.items():\n if key not in ['name', 'active']:\n raise errors.IrisettError('invalid contact key %s' % key)\n q = \"\"\"update contact_groups set %s=%%s where id=%%s\"\"\" % key\n q_args = (value, contact_group_id)\n await cur.execute(q, q_args)\n\n if not await contact_group_exists(dbcon, contact_group_id):\n raise errors.InvalidArguments('contact group does not exist')\n await dbcon.transact(_run)", "def update_group(self, group_id, update_group_details, **kwargs):\n resource_path = \"/groups/{groupId}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_group got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"groupId\": group_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_group_details,\n response_type=\"Group\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_group_details,\n response_type=\"Group\")", "def update_group_with_http_info(self, bucket_id, group_id, group, **kwargs):\n\n all_params = ['bucket_id', 'group_id', 'group', 'if_match', 'if_none_match', 'fields']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method update_group\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'bucket_id' is set\n if ('bucket_id' not in params) or (params['bucket_id'] is None):\n raise ValueError(\"Missing the required parameter `bucket_id` when calling `update_group`\")\n # verify the required parameter 'group_id' is set\n if ('group_id' not in params) or (params['group_id'] is None):\n raise ValueError(\"Missing the required parameter `group_id` when calling `update_group`\")\n # verify the required parameter 'group' is set\n if ('group' not in params) or (params['group'] is None):\n raise ValueError(\"Missing the required parameter `group` when calling `update_group`\")\n\n if 'if_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_match']):\n raise ValueError(\"Invalid value for parameter `if_match` when calling `update_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n if 'if_none_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_none_match']):\n raise ValueError(\"Invalid value for parameter `if_none_match` when calling `update_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n\n collection_formats = {}\n\n resource_path = '/buckets/{bucket_id}/groups/{group_id}'.replace('{format}', 'json')\n path_params = {}\n if 'bucket_id' in params:\n path_params['bucket_id'] = params['bucket_id']\n if 'group_id' in params:\n path_params['group_id'] = params['group_id']\n\n query_params = {}\n if 'fields' in params:\n query_params['_fields'] = params['fields']\n collection_formats['_fields'] = 'csv'\n\n header_params = {}\n if 'if_match' in params:\n header_params['If-Match'] = params['if_match']\n if 'if_none_match' in params:\n header_params['If-None-Match'] = params['if_none_match']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'group' in params:\n body_params = params['group']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['basicAuth']\n\n return self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Group',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def update_group(\n self,\n group,\n validate_only=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n # Wrap the transport method to add retry and timeout logic.\n if \"update_group\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"update_group\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.update_group,\n default_retry=self._method_configs[\"UpdateGroup\"].retry,\n default_timeout=self._method_configs[\"UpdateGroup\"].timeout,\n client_info=self._client_info,\n )\n\n request = group_service_pb2.UpdateGroupRequest(\n group=group, validate_only=validate_only,\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"group.name\", group.name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"update_group\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def update(person_group_id, name=None, user_data=None):\n url = 'persongroups/{}'.format(person_group_id)\n json = {\n 'name': name,\n 'userData': user_data,\n }\n\n return util.request('PATCH', url, json=json)", "def ModifyGroup(self, group, reason=None, **kwargs):\n query = []\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_PUT,\n (\"/%s/groups/%s/modify\" %\n (GANETI_RAPI_VERSION, group)), query, kwargs)", "def update(self, consistencygroup, **kwargs):\n if not kwargs:\n return\n\n body = {\"consistencygroup\": kwargs}\n\n return self._update(\"/consistencygroups/%s\" %\n base.getid(consistencygroup), body)", "def security_group_update(secgroup=None, auth=None, **kwargs):\n cloud = get_operator_cloud(auth)\n kwargs = _clean_kwargs(keep_name=True, **kwargs)\n return cloud.update_security_group(secgroup, **kwargs)", "def _mod_group(self, command, group_id, group_type, buckets=None):\n self.datapath.send_msg(\n self.parser.OFPGroupMod(\n datapath=self.datapath,\n command=command,\n group_id=group_id,\n type_=group_type,\n buckets=buckets,\n )\n )", "def update_groups(self, groups):\n self.fetch_group_messages() # preload messages before updating groups\n self.groups = groups\n self.put()", "def update_adgroup(self, adgroup_id, name=None, adgroup_status=None,\n bid_type=None, bid_info=None, creative_id=None,\n tracking_specs=None, view_tags=None, objective=None,\n targeting=None, conversion_specs=None,\n batch=False):\n path = \"%s\" % adgroup_id\n args = {}\n if name:\n args['name'] = name\n if bid_type:\n args['bid_type'] = bid_type\n if bid_info:\n args['bid_info'] = json.dumps(bid_info)\n\n if creative_id:\n args['creative'] = json.dumps({'creative_id': creative_id})\n if tracking_specs:\n args['tracking_specs'] = json.dumps(tracking_specs)\n if view_tags:\n args['view_tags'] = json.dumps(view_tags)\n if objective:\n args['objective'] = objective\n if adgroup_status:\n args['adgroup_status'] = adgroup_status\n if targeting:\n args['targeting'] = json.dumps(targeting)\n if conversion_specs:\n args['conversion_specs'] = json.dumps(conversion_specs)\n return self.make_request(path, 'POST', args, batch=batch)", "def update_research_group(self, employee_id, new_research_group):\n cursor = self.dbconnect.get_cursor()\n try:\n cursor.execute('UPDATE employee '\n 'SET research_group = %s '\n 'WHERE id=%s;',\n (new_research_group, employee_id))\n self.dbconnect.commit()\n except:\n self.dbconnect.rollback()\n raise", "def replace_namespaced_group(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method replace_namespaced_group\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `replace_namespaced_group`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `replace_namespaced_group`\")\n\n resource_path = '/oapi/v1/groups/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1Group',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def put(self):\n status = ErrorCode.SUCCESS\n try:\n data = DotDict(json_decode(self.request.body))\n cid = self.current_user.cid\n tid = self.current_user.tid\n gid = data.gid\n name = data.name\n logging.info(\"[UWEB] Modify group request: %s, cid: %s\",\n data, self.current_user.cid)\n except Exception as e:\n status = ErrorCode.ILLEGAL_DATA_FORMAT\n logging.exception(\"[UWEB] Invalid data format. body:%s, Exception: %s\",\n self.request.body, e.args)\n self.write_ret(status)\n return\n\n try: \n group = self.get_group_by_cid(cid, name)\n if group:\n status = ErrorCode.GROUP_EXIST\n self.write_ret(status)\n return\n\n self.db.execute(\"UPDATE T_GROUP\"\n \" SET name = %s\"\n \" WHERE id = %s\",\n name, gid)\n\n # NOTE: wspush to client \n if status == ErrorCode.SUCCESS:\n WSPushHelper.pushS3(tid, self.db, self.redis)\n\n self.write_ret(status)\n except Exception as e:\n logging.exception(\"[UWEB] Modify group failed. cid: %s, Exception: %s\",\n self.current_user.cid, e.args)\n status = ErrorCode.SERVER_BUSY\n self.write_ret(status)", "def patch(self,\n provider_id,\n group_id,\n group,\n ):\n return self._invoke('patch',\n {\n 'provider_id': provider_id,\n 'group_id': group_id,\n 'group': group,\n })", "def test_user_group_controller_update(self):\n pass", "def patch_group_with_http_info(self, bucket_id, group_id, group, **kwargs):\n\n all_params = ['bucket_id', 'group_id', 'group', 'if_match', 'if_none_match', 'fields']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method patch_group\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'bucket_id' is set\n if ('bucket_id' not in params) or (params['bucket_id'] is None):\n raise ValueError(\"Missing the required parameter `bucket_id` when calling `patch_group`\")\n # verify the required parameter 'group_id' is set\n if ('group_id' not in params) or (params['group_id'] is None):\n raise ValueError(\"Missing the required parameter `group_id` when calling `patch_group`\")\n # verify the required parameter 'group' is set\n if ('group' not in params) or (params['group'] is None):\n raise ValueError(\"Missing the required parameter `group` when calling `patch_group`\")\n\n if 'if_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_match']):\n raise ValueError(\"Invalid value for parameter `if_match` when calling `patch_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n if 'if_none_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_none_match']):\n raise ValueError(\"Invalid value for parameter `if_none_match` when calling `patch_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n\n collection_formats = {}\n\n resource_path = '/buckets/{bucket_id}/groups/{group_id}'.replace('{format}', 'json')\n path_params = {}\n if 'bucket_id' in params:\n path_params['bucket_id'] = params['bucket_id']\n if 'group_id' in params:\n path_params['group_id'] = params['group_id']\n\n query_params = {}\n if 'fields' in params:\n query_params['_fields'] = params['fields']\n collection_formats['_fields'] = 'csv'\n\n header_params = {}\n if 'if_match' in params:\n header_params['If-Match'] = params['if_match']\n if 'if_none_match' in params:\n header_params['If-None-Match'] = params['if_none_match']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'group' in params:\n body_params = params['group']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json', 'application/merge-patch+json', 'application/json-patch+json'])\n\n # Authentication setting\n auth_settings = ['basicAuth']\n\n return self.api_client.call_api(resource_path, 'PATCH',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Group',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def test_updateGroup(self):\n self.result = None\n\n def callRemote(methodName, *a):\n self.result = methodName == '_updateTabsFromServer'\n\n tab = widgets.Tab(u'id4', u'Title 4', self.contentFactory)\n group = widgets.TabGroup(u'group1', u'Group', tabs=[tab])\n self.patch(self.tabView, 'callRemote', callRemote)\n self.tabView.updateGroup(group)\n self.assertTrue(self.result)\n self.assertNotIdentical(self.tabView.getTab(u'id4'), None)\n self.assertNotIdentical(self.tabView.getGroup(u'group1'), None)\n self.assertIdentical(self.tabView.tabs[-1], tab)\n\n # Update a group, and add a new tab.\n newTab = widgets.Tab(u'id5', u'Title 5', self.contentFactory)\n replacementGroup = widgets.TabGroup(\n u'group1', u'New Group', tabs=[newTab])\n self.tabView.updateGroup(replacementGroup)\n self.assertIdentical(\n self.tabView.getGroup(u'group1'), replacementGroup)\n self.assertNotIdentical(self.tabView.getTab(u'id5'), None)\n self.assertRaises(\n errors.InvalidIdentifier, self.tabView.getTab, u'id4')\n self.assertNotIn(tab, self.tabView.tabs)\n\n # Remove a tab from a group.\n self.tabView.removeTabs([newTab])\n self.assertRaises(\n errors.InvalidIdentifier, self.tabView.getTab, u'id5')\n self.assertNotIn(newTab, self.tabView.getGroup(u'group1').tabs)", "def test_editGroup(self):\n\t\tuser = User.objects.get(id=1)\n\t\tself.client.force_authenticate(user=user)\n\t\tgroup = Group.objects.create(admin=user, name='testGroup3', isPublic=True, \n\t\t\tdescription='This is another test group that just created.')\n\n\t\turl = \"/groups/3/\"\n\t\tdata = {\n\t\t\t'name' : 'anotherTestGroup'\n\t\t}\n\n\t\tresponse = self.client.patch(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(response.data[\"name\"], 'anotherTestGroup')\n\n\t\turl = \"/groups/2/\"\n\t\tresponse = self.client.patch(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def set_group(self, id_: str, player: str, group: list):\n self._groups[id_] = {\n 'player': player,\n 'group': group\n }", "def test_update_team_user_group(client):\n group = client.update_team_user_group(TEAM_ID, GROUP_ID, {\n \"name\": \"Updated Python group\",\n \"is_reviewer\": False,\n \"is_admin\": True,\n \"admin_rights\": [\"upload\"]\n })\n assert group.team_id == TEAM_ID\n assert group.group_id == GROUP_ID\n assert group.name == \"Updated Python group\"\n assert group.permissions['is_admin']\n assert not group.permissions['is_reviewer']", "def setGroup(self, group):\n\t\tself.config.GROUP = group", "def capacitygroup_update(cmd_ctx, cpc, capacitygroup, **options):\n cmd_ctx.execute_cmd(\n lambda: cmd_capacitygroup_update(cmd_ctx, cpc, capacitygroup, options))", "def slotGroupEdit(self):\n dialog = GroupDialog(self)\n if dialog.exec_loop() == QDialog.Accepted:\n if dialog.group_id != None:\n # set group\n self.sampleGroup.globalGroupId = dialog.group_id\n self.groupLabel.setText(dialog.group_id)\n else:\n # ungroup\n self.sampleGroup.globalGroupId = None\n self.groupLabel.setText('Not\\nGrouped')\n self.emit(PYSIGNAL('groupChanged'), (self,))", "def _metadata_update_group(group_id):\n\n # get all firmwares in this group\n firmwares = db.firmware.get_all()\n firmwares_filtered = []\n for f in firmwares:\n if f.target == 'private':\n continue\n if f.group_id != group_id:\n continue\n firmwares_filtered.append(f)\n\n # create metadata file for the embargoed firmware\n affidavit = _create_affidavit()\n filename = 'firmware-%s.xml.gz' % _qa_hash(group_id)\n _generate_metadata_kind(filename,\n firmwares_filtered,\n affidavit=affidavit)", "def regroup(self, serial, group):\n api_page = \"/configuration/object/ap_regroup\"\n url = \"{}{}?{}&UIDARUBA={}\".format(\n self.base_url,\n api_page,\n self.config_path,\n self.uidaruba)\n\n obj_dict = {'serial-num': serial, 'new-group': group}\n obj_json = json.loads(json.dumps(obj_dict))\n\n resp = self.post(url, obj_json)\n\n print(resp.status_code)\n print(resp.text)", "def upsert_group(self,\n group, # type: Group\n *options, # type: UpsertGroupOptions\n **kwargs # type: Any\n ):\n # This endpoint accepts application/x-www-form-urlencoded and requires the data be sent as form data.\n # The name/id should not be included in the form data.\n # Roles should be a comma separated list of strings.\n # If, only if, the role contains a bucket name then the rolename should be suffixed\n # with[<bucket_name>] e.g. bucket_full_access[default],security_admin.\n\n final_args = forward_args(kwargs, *options)\n final_args.update({k: v for k, v in group.as_dict.items() if k in {\n 'roles', 'description', 'ldap_group_reference'}})\n self._admin_bucket.group_upsert(group.name, **final_args)", "def patch_namespaced_group(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method patch_namespaced_group\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `patch_namespaced_group`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `patch_namespaced_group`\")\n\n resource_path = '/oapi/v1/groups/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'PATCH',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1Group',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def groups_update(self, mar, request):\n group_id = mar.viewed_user_auth.user_id\n member_ids_dict, owner_ids_dict = self._services.usergroup.LookupMembers(\n mar.cnxn, [group_id])\n owner_ids = owner_ids_dict.get(group_id, [])\n member_ids = member_ids_dict.get(group_id, [])\n if not permissions.CanEditGroup(\n mar.perms, mar.auth.effective_ids, owner_ids):\n raise permissions.PermissionException(\n 'The user is not allowed to edit this group.')\n\n group_settings = self._services.usergroup.GetGroupSettings(\n mar.cnxn, group_id)\n if (request.who_can_view_members or request.ext_group_type\n or request.last_sync_time or request.friend_projects):\n group_settings.who_can_view_members = (\n request.who_can_view_members or group_settings.who_can_view_members)\n group_settings.ext_group_type = (\n request.ext_group_type or group_settings.ext_group_type)\n group_settings.last_sync_time = (\n request.last_sync_time or group_settings.last_sync_time)\n if framework_constants.NO_VALUES in request.friend_projects:\n group_settings.friend_projects = []\n else:\n id_dict = self._services.project.LookupProjectIDs(\n mar.cnxn, request.friend_projects)\n group_settings.friend_projects = (\n list(id_dict.values()) or group_settings.friend_projects)\n self._services.usergroup.UpdateSettings(\n mar.cnxn, group_id, group_settings)\n\n if request.groupOwners or request.groupMembers:\n self._services.usergroup.RemoveMembers(\n mar.cnxn, group_id, owner_ids + member_ids)\n owners_dict = self._services.user.LookupUserIDs(\n mar.cnxn, request.groupOwners, autocreate=True)\n self._services.usergroup.UpdateMembers(\n mar.cnxn, group_id, list(owners_dict.values()), 'owner')\n members_dict = self._services.user.LookupUserIDs(\n mar.cnxn, request.groupMembers, autocreate=True)\n self._services.usergroup.UpdateMembers(\n mar.cnxn, group_id, list(members_dict.values()), 'member')\n\n return api_pb2_v1.GroupsUpdateResponse()", "def put_group(\n group_id: BSONObjectId,\n data: PutGroupIn,\n tkn: Token = Depends(from_authotization_header_nondyn),\n):\n grp: Group = Group.objects.get(pk=group_id)\n if not (\n tkn.owner == grp.owner or has_clearance(tkn.owner, \"sni.update_group\")\n ):\n raise PermissionError\n logging.debug(\"Updating group %s (%s)\", grp.group_name, group_id)\n if data.add_members is not None:\n grp.members += [\n User.objects.get(character_name=member_name)\n for member_name in set(data.add_members)\n ]\n if data.authorized_to_login is not None:\n assert_has_clearance(tkn.owner, \"sni.set_authorized_to_login\")\n grp.authorized_to_login = data.authorized_to_login\n if data.description is not None:\n grp.description = data.description\n if data.members is not None:\n grp.members = [\n User.objects.get(character_name=member_name)\n for member_name in set(data.members)\n ]\n if data.owner is not None:\n grp.owner = User.objects.get(character_name=data.owner)\n if data.remove_members is not None:\n grp.members = [\n member\n for member in grp.members\n if member.character_name not in data.remove_members\n ]\n grp.members = list(set(grp.members + [grp.owner]))\n grp.save()\n return GetGroupOut.from_record(grp)", "def async_update_group_state(self) -> None:", "def update_adcampaign_group(self, campaign_group_id, name=None,\n campaign_group_status=None, objective=None,\n batch=False):\n path = '%s' % campaign_group_id\n args = {}\n if name is not None:\n args['name'] = name\n if campaign_group_status is not None:\n args['campaign_group_status'] = campaign_group_status\n if objective is not None:\n args['objective'] = objective\n return self.make_request(path, 'POST', args, batch=batch)", "def test_update_device_group_by_id(self):\n pass", "def set_group(self, group):\n # Implemented from template for osid.resource.ResourceForm.set_group_template\n if self.get_group_metadata().is_read_only():\n raise errors.NoAccess()\n if not self._is_valid_boolean(group):\n raise errors.InvalidArgument()\n self._my_map['group'] = group", "async def handle_set_group(self, match: Match[str], payload: str) -> None:\n groupid = match.group(1)\n\n try:\n group = self._bridge.groups[groupid]\n state = GroupSetState(**json.loads(payload))\n LOGGER.info(f\"Updating group {group.name}\")\n await group.set_action(**state.dict())\n except IndexError:\n LOGGER.warning(f\"Unknown group id: {groupid}\")\n except json.JSONDecodeError:\n LOGGER.warning(f\"Bad JSON on light request: {payload}\")\n except TypeError:\n LOGGER.warning(f\"Expected dictionary, got: {payload}\")\n except ValidationError as e:\n LOGGER.warning(f\"Invalid light state: {e}\")", "def edit_group_command(self):\n self.switch_frame(\"Edit Group\")\n id = self.parent.get_frame_id(\"Edit Group\")\n self.parent.frames[id].display_group(self.user.active_group)", "def set(self, name_group, key, value):\n self.psettings.beginGroup(name_group)\n self.psettings.setValue(key, value)\n self.closeGroup()", "def modify_resource_group(\n self,\n request: dds_20151201_models.ModifyResourceGroupRequest,\n ) -> dds_20151201_models.ModifyResourceGroupResponse:\n runtime = util_models.RuntimeOptions()\n return self.modify_resource_group_with_options(request, runtime)", "def test_update_resource_group(self):\n pass", "def with_group(self, group):\n\t\tself.variables['group'] = group\n\t\treturn self", "def edit_group(self, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/Edit/\"))", "def update_supergroups_by_id(self, group_ids, kind):\n\n updated = set(map(int, group_ids))\n\n logger = logging.getLogger(__name__)\n debug = logger.debug\n\n debug('updating %s: %r', kind, updated)\n\n groups = self['__store']\n\n # print(kind)\n existing = getattr(self, kind + 's')\n debug('existing %s: %r', kind, updated)\n # print(updated, existing)\n\n if updated != existing:\n\n group_lookup = {\n group.group_id: group.name\n for group in groups\n }\n\n db = groups.db\n\n to_remove = existing - updated\n if to_remove:\n debug('removing %s %r from %r', kind, to_remove, self.name)\n cmd = 'delete from subgroups where subgroup_id=%s and group_id in %s'\n db(cmd, self.group_id, to_remove)\n\n for group_id in to_remove:\n audit(\n 'remove %s' % kind,\n group_lookup.get(\n group_id,\n 'unknown (%s)' % group_id,\n ),\n self.name\n )\n\n to_add = updated - existing\n if to_add:\n debug('adding %s %r to %r', kind, to_add, self.name)\n cmd = 'insert into subgroups (group_id, subgroup_id) values (%s, %s)'\n sequence = zip(to_add, [self.group_id] * len(to_add))\n db.execute_many(cmd, sequence)\n\n for subgroup_id in to_add:\n audit(\n 'add %s' % kind,\n group_lookup.get(\n subgroup_id,\n 'unknown (%s)' % subgroup_id,\n ),\n self.name\n )\n\n else:\n debug('%s unchanged', kind)", "def fusion_api_update_group_role_assignment(self, body, api=None, headers=None):\n return self.LoginDomainsGroupToRoleMapping.update(body, api, headers)", "def post_security_group_update(self, resource_id, resource_dict):\n pass", "def set_group(self, group: str) -> None:\n self.group = group", "def _do_update(self, meta, k, v):\n self.runtime.logger.info('{}: [{}] -> {}'.format(meta.in_group_config_path, k, v))\n meta.config[k] = v\n meta.save()", "def customer_group_customer_put(user_id, group_id):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n user_group_user_details = {}\n user_group_user_details[\"user_id\"] = user_id\n user_group_user_details[\"group_id\"] = group_id\n\n query = \"\"\"\n Update `users_groups`\n SET `group_id` = \\\"%(group_id)s\\\"\n WHERE `user_id` = \\\"%(user_id)s\\\" \n \"\"\" %(user_group_user_details)\n cursor = db.cursor()\n result = {\"success\" : 0, \"message\" : \"Customer's Group is not updated\"}\n try:\n if cursor.execute(query):\n db.commit()\n result = {\"success\" : 0, \"message\" : \"Customer updated Successfully\"}\n except Exception as e:\n result = {\"success\" : 1, \"message\" : \"Customer can not be updated in the Group. Error \\\"\\'%s\\'\\\" \\\n Query \\\"\\'%s\\'\\\" \" % (e, query) }\n finally:\n cursor.close()\n db.close()\n return result", "def set_group(self, data, group, intg, dq=None):\n # TODO: Include a 2-D DQ array to be combined with the GROUPDQ array\n #\n # Copy the input data to a 2-D plane for this group/intg combination.\n # NOTE: This only works if data array is broadcastable so the shape\n # of the data array is checked.\n #\n data = np.asarray(data, dtype=self.data.dtype)\n detector_shape = (self.rows, self.columns)\n if data.shape == detector_shape:\n self.data[intg, group, :, :] = data \n # Invalidate the averaged data\n self._data_averaged = None\n # Update the group data quality array if necessary.\n if dq is not None:\n if self.include_groupdq:\n dq = np.asarray(dq, dtype=self.groupdq.dtype) # Convert to same data type.\n self.groupdq[intg, group, :, :] |= dq\n else:\n strg = \"Incompatible arguments. A groupdq array is \"\n strg += \"provided when include_groupdq=False. \"\n strg += \"The array is ignored.\"\n LOGGER.error(strg)\n else:\n strg = \"Group data array has the wrong shape \"\n strg += \"(%s instead of %s).\" % (str(data.shape),\n str(detector_shape))\n raise TypeError(strg)", "def group(self, group):\n self._group = group", "def update_tag_group_acl(session, tag_id=None, group_id=None,\n allow_install=False, allow_uninstall=False, allow_reboot=False,\n allow_schedule=False, allow_wol=False, allow_snapshot_creation=False,\n allow_snapshot_removal=False, allow_snapshot_revert=False,\n allow_tag_creation=False, allow_tag_removal=False, allow_read=False,\n date_modified=datetime.now(), username='system_user'\n ):\n session = validate_session(session)\n group = None\n\n if group_id and tag_id:\n group = session.query(TagGroupAccess).\\\n filter(TagGroupAccess.group_id == group_id).\\\n filter(TagGroupAccess.tag_id == tag_id).first()\n if group:\n try:\n group.allow_install = allow_install\n group.allow_uninstall = allow_uninstall\n group.allow_reboot = allow_reboot\n group.allow_schedule = allow_schedule\n group.allow_wol = allow_wol\n group.allow_snapshot_creation = allow_snapshot_creation\n group.allow_snapshot_removal = allow_snapshot_removal\n group.allow_snapshot_revert = allow_snapshot_revert\n group.allow_tag_creation = allow_tag_creation\n group.allow_tag_removal = allow_tag_removal\n group.allow_read = allow_read\n group.date_modified = date_modified\n session.commit()\n return({\n 'pass': True,\n 'message': 'ACL for Group %s was modified for Tag %s' % \\\n (group_id, tag_id)\n })\n except Exception as e:\n session.rollback()\n return({\n 'pass': False,\n 'message': 'Failed to modify ACL for Group %s on Tag %s' % \\\n (group_id, tag_id)\n })\n else:\n return({\n 'pass': False,\n 'message': 'Invalid group_id %s and or tag_id' % \\\n (group_id, tag_id)\n })", "def update_security_group(self, security_group, body=None):\r\n return self.put(self.security_group_path %\r\n security_group, body=body)", "def set_group(self, address, group):\n self.groups[address] = group", "def update(self, val):\n try:\n key = self._group_by(val)\n except lena.core.LenaKeyError:\n raise lena.core.LenaValueError(\n \"could not find a key for {}\".format(val)\n )\n\n if key in self.groups:\n self.groups[key].append(val)\n else:\n self.groups[key] = [val]", "def update_by_key(\n self,\n key: str,\n version: int,\n actions: typing.List[CustomerGroupUpdateAction],\n *,\n expand: OptionalListStr = None,\n force_update: bool = False,\n ) -> CustomerGroup:\n params = self._serialize_params({\"expand\": expand}, _CustomerGroupUpdateSchema)\n update_action = CustomerGroupUpdate(version=version, actions=actions)\n return self._client._post(\n endpoint=f\"customer-groups/key={key}\",\n params=params,\n data_object=update_action,\n response_class=CustomerGroup,\n force_update=force_update,\n )", "def test_update_entry_groups(self):\r\n # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.\r\n pass", "def edit_service_group(self, loadbal_id, group_id, allocation=None,\r\n port=None, routing_type=None, routing_method=None):\r\n kwargs = NestedDict({})\r\n kwargs['mask'] = ('mask[virtualServers[serviceGroups'\r\n '[services[groupReferences]]]]')\r\n\r\n load_balancer = self.lb_svc.getObject(id=loadbal_id, **kwargs)\r\n virtual_servers = load_balancer['virtualServers']\r\n for virtual_server in virtual_servers:\r\n if virtual_server['id'] == group_id:\r\n service_group = virtual_server['serviceGroups'][0]\r\n if allocation is not None:\r\n virtual_server['allocation'] = int(allocation)\r\n if port is not None:\r\n virtual_server['port'] = int(port)\r\n if routing_type is not None:\r\n service_group['routingTypeId'] = int(routing_type)\r\n if routing_method is not None:\r\n service_group['routingMethodId'] = int(routing_method)\r\n break\r\n return self.lb_svc.editObject(load_balancer, id=loadbal_id)", "def customer_group_put(group_info):\n related_groups = customer_group_get_related(group_info[\"group_id\"])\n\n now = datetime.datetime.now()\n f = '%Y-%m-%d %H:%M:%S'\n insert_time = now.strftime(f)\n\n result = {\"success\" : 1, \"message\" : \"Customer Company can not be Updated\"}\n\n for groups in related_groups:\n c_group_info = list(groups)\n #check for the roles\n c_g_id = c_group_info[0]\n c_g_role = c_group_info[1].split(\"(\")[1][:-1]\n c_g_name = c_group_info[1].split(\"(\")[0]\n new_c_g_name = group_info[\"group_name\"] + \"(\"+ c_g_role +\")\"\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n cursor = db.cursor()\n query = \"\"\"\n UPDATE `groups`\n SET\n `group_name` = \"%s\",\n `company_name` = \"%s\",\n `company_address` = \"%s\",\n `company_telephone` = \"%s\",\n `company_fax` = \"%s\",\n `company_website` = \"%s\",\n `company_sales_contact` = \"%s\",\n `company_purchase_contact` = \"%s\",\n `company_business` = \"%s\",\n `company_business_type` = \"%s\",\n `company_sales_email` = \"%s\",\n `company_purchase_email` = \"%s\",\n `company_reg_number` = \"%s\",\n `company_vat_number` = \"%s\",\n `description` = \"%s\"\n WHERE\n `group_id` = \"%s\"\n \"\"\" %(\n new_c_g_name, \n group_info[\"company_name\"],\n group_info[\"company_address\"],\n group_info[\"company_telephone\"],\n group_info[\"company_fax\"],\n group_info[\"company_website\"],\n group_info[\"company_sales_contact\"],\n group_info[\"company_purchase_contact\"],\n group_info[\"company_business\"],\n group_info[\"company_business_type\"],\n group_info[\"company_sales_email\"],\n group_info[\"company_purchase_email\"],\n group_info[\"company_reg_number\"],\n group_info[\"company_vat_number\"],\n group_info[\"description\"],\n c_g_id\n )\n try:\n if cursor.execute(query):\n db.commit()\n result = {\"success\" : 0, \"message\" : \"Customer Company Updated Successfully\"}\n except Exception as e:\n result = {\"success\" : 1, \"message\" : \"Customer Company can not be Updated. Error \\\"\\'%s\\'\\\" \\\n Query = %s\" % (e, query) }\n finally:\n cursor.close()\n db.close()\n return result", "def update_group(self, bucket_id, group_id, group, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.update_group_with_http_info(bucket_id, group_id, group, **kwargs)\n else:\n (data) = self.update_group_with_http_info(bucket_id, group_id, group, **kwargs)\n return data", "def edit_group(request, group_id):\n group = None\n if group_id:\n group = models.UserGroup.get_by_id(int(group_id))\n return utility.edit_instance(request, models.UserGroup, forms.GroupEditForm,\n 'admin/edit_group',\n urlresolvers.reverse('views.admin.list_groups'),\n group_id, group=group)", "def test_partially_update_device_group_by_id(self):\n pass", "def update(cls, db: Database, record_uuid: str, record: GroupPartial) -> Group:\n existing_group = cls.find_by_uuid(db, record_uuid)\n updated_record = cls.model(**record.dict(), uuid=record_uuid)\n if updated_record.metadata.name != existing_group.metadata.name:\n if GroupManager.find_by_name(db, updated_record.metadata.name):\n raise ValidationError(\n \"Group with name [%s] already exists\" % record.metadata.name)\n cls.validate_group(db, updated_record)\n return super(GroupManager, cls).update(db, record_uuid, record)", "def delete_dynamic_group(self, dynamic_group_id, **kwargs):\n resource_path = \"/dynamicGroups/{dynamicGroupId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_dynamic_group got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"dynamicGroupId\": dynamic_group_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def set_group(self, group):\n self._group = group", "def group(self, group):\n\n self._group = group", "def group(self, group):\n\n self._group = group", "def group(self, group):\n\n self._group = group", "def fusion_api_update_from_group(self, uri, api=None, headers=None):\n param = '/compliance'\n return self.li.update(body=None, uri=uri, api=api, headers=headers, param=param)", "def update_app_policy_group(self, id, **kwargs):\n resp, body = self.put(self.get_uri(self.resource, id), json.dumps({'application_policy_group':kwargs}))\n body = json.loads(body)\n self.expected_success(http_client.OK, resp.status)\n return rest_client.ResponseBody(resp, body)", "def group(self, val):\n self.set_property(\"Group\", val)", "def test_update_device_group_by_id1(self):\n pass", "def update(self, security_group_id: str, body: CloudSecurityGroup) -> None:\n\t\troute = f'{AWSSecurityGroupConsts.CLOUD_SECURITY_GROUP.value}/{security_group_id}'\n\t\treturn self._put(route=route, body=body)", "async def modify_resource_group_async(\n self,\n request: dds_20151201_models.ModifyResourceGroupRequest,\n ) -> dds_20151201_models.ModifyResourceGroupResponse:\n runtime = util_models.RuntimeOptions()\n return await self.modify_resource_group_with_options_async(request, runtime)", "def update_pin_group():\n create_instance(new=False)", "def update_placement(session, cluster, vm_ref, group_info):\n cluster_config = session._call_method(\n vutil, \"get_object_property\", cluster, \"configurationEx\")\n\n if cluster_config:\n group = _get_vm_group(cluster_config, group_info)\n client_factory = session.vim.client.factory\n config_spec = client_factory.create('ns0:ClusterConfigSpecEx')\n\n if not group:\n \"\"\"Creating group\"\"\"\n config_spec.groupSpec = _create_vm_group_spec(\n client_factory, group_info, [vm_ref], operation=\"add\",\n group=group)\n\n if group:\n # VM group exists on the cluster which is assumed to be\n # created by VC admin. Add instance to this vm group and let\n # the placement policy defined by the VC admin take over\n config_spec.groupSpec = _create_vm_group_spec(\n client_factory, group_info, [vm_ref], operation=\"edit\",\n group=group)\n\n # If server group policies are defined (by tenants), then\n # create/edit affinity/anti-affinity rules on cluster.\n # Note that this might be add-on to the existing vm group\n # (mentioned above) policy defined by VC admin i.e if VC admin has\n # restricted placement of VMs to a specific group of hosts, then\n # the server group policy from nova might further restrict to\n # individual hosts on a cluster\n if group_info.policies:\n # VM group does not exist on cluster\n policy = group_info.policies[0]\n if policy != 'soft-affinity':\n rule_name = \"%s-%s\" % (group_info.uuid, policy)\n rule = _get_rule(cluster_config, rule_name)\n operation = \"edit\" if rule else \"add\"\n config_spec.rulesSpec = _create_cluster_rules_spec(\n client_factory, rule_name, [vm_ref], policy=policy,\n operation=operation, rule=rule)\n\n reconfigure_cluster(session, cluster, config_spec)", "def add_to_group(self, org, contact, group):\n pass", "def UpdateGroupMembership(self, newMembers):\r\n globals.groupMembers[newMembers.targetGuid] = True #remove the target Sticky\r\n\r\n for guid in newMembers.guids[0]:\r\n globals.groupMembers[guid]=True\r\n\r\n group = Group()\r\n globals._groupNumber = globals._groupNumber+1\r\n group.groupID = globals._groupName + str(globals._groupNumber)\r\n group.targetSticky[\"guid\"] = newMembers.targetGuid\r\n group.targetSticky[\"desc\"] = newMembers.targetDesc\r\n group.targetSticky[\"head\"] = newMembers.targetHead #lplp1313 new value\r\n\r\n guidSims = tuple(zip(newMembers.guids[0], newMembers.descriptions[0], newMembers.headers[0], list(newMembers.cos_sims[0]))) #lplp1313 new value \r\n for g, d, h, c in guidSims:\r\n gs = GroupSticky()\r\n gs.guid=g\r\n gs.desc=d\r\n gs.head=h #lplp1313 new value\r\n gs.cosineVal=c\r\n group.groupStickies.append(gs)\r\n\r\n globals._jsonReply._groups.append(group)", "def set_group(group_name):\n group_config = env.groups[group_name]\n set_role_defs(\n web=group_config['servers'][WEB_ROLE],\n db=group_config['servers'][DB_ROLE],\n )\n env.branch = group_config['branch']\n env.subdomain = group_config.get('subdomain', 'www')", "def set_group(self, bot, update, args):\n username = str(update.message.from_user['username'])\n chat_id = str(update.message.from_user['id'])\n\n try:\n group_name = self.format_group(str(args[0]))\n\n if self.is_group(group_name):\n self.user_db.add_new_user(username, group_name, chat_id)\n bot.send_message(update.message.chat_id,\n 'Расписание для группы *{}* успешно установлено!\\n'\n '/today\\n'\n '/tomorrow\\n'\n '/week\\n'\n '/nextweek\\n'\n '/full\\n'\n '/timetable\\n'\n '/keyboard\\n'.format(group_name),\n parse_mode='Markdown')\n else:\n raise Exception(\"Group is not exists.\")\n except (Exception, IndexError):\n bot.send_message(update.message.chat_id,\n 'Группы с таким именем не существует, проверьте корректность введенного имени.',\n parse_mode='Markdown')", "def set_definition(self, definition):\n return self.client._perform_json(\n \"PUT\", \"/admin/groups/%s\" % self.name,\n body = definition)", "def update(ctx, name, description, tags):\n user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'),\n ctx.obj.get('group'))\n update_dict = {}\n\n if name:\n update_dict['name'] = name\n\n if description:\n update_dict['description'] = description\n\n tags = validate_tags(tags)\n if tags:\n update_dict['tags'] = tags\n\n if not update_dict:\n Printer.print_warning('No argument was provided to update the experiment group.')\n sys.exit(0)\n\n try:\n response = PolyaxonClient().experiment_group.update_experiment_group(\n user, project_name, _group, update_dict)\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not update experiment group `{}`.'.format(_group))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n\n Printer.print_success(\"Experiment group updated.\")\n get_group_details(response)", "def _set_group_name(self):\n self._scene_gen.group_name = self._group_name_le.text()\n self._refresh_view()", "def perform_with_group(self, eff, expected_lookup, group,\n fallback_dispatcher=None):\n def gsg(log, tenant_id, group_id):\n assert (log, tenant_id, group_id) == expected_lookup\n return group\n store = self.get_store()\n store.get_scaling_group.side_effect = gsg\n dispatcher = self.get_dispatcher(store)\n if fallback_dispatcher is not None:\n dispatcher = ComposedDispatcher([dispatcher,\n fallback_dispatcher])\n return sync_perform(dispatcher, eff)", "def update_node_group_acl(session, node_id=None, group_id=None,\n allow_install=False, allow_uninstall=False, allow_reboot=False,\n allow_schedule=False, allow_wol=False, allow_snapshot_creation=False,\n allow_snapshot_removal=False, allow_snapshot_revert=False,\n allow_tag_creation=False, allow_tag_removal=False, allow_read=False,\n date_modified=datetime.now(), username='system_user'\n ):\n session = validate_session(session)\n group = None\n if group_id and node_id:\n group = session.query(NodeGroupAccess).\\\n filter(NodeGroupAccess.group_id == group_id).\\\n filter(NodeGroupAccess.node_id == node_id).first()\n if group:\n try:\n group.allow_install = allow_install\n group.allow_uninstall = allow_uninstall\n group.allow_reboot = allow_reboot\n group.allow_schedule = allow_schedule\n group.allow_wol = allow_wol\n group.allow_snapshot_creation = allow_snapshot_creation\n group.allow_snapshot_removal = allow_snapshot_removal\n group.allow_snapshot_revert = allow_snapshot_revert\n group.allow_tag_creation = allow_tag_creation\n group.allow_tag_removal = allow_tag_removal\n group.allow_read = allow_read\n group.date_modified = date_modified\n session.commit()\n return({\n 'pass': True,\n 'message': 'ACL for Group %s was modified for Node %s' % \\\n (group_id, node_id)\n })\n except Exception as e:\n session.rollback()\n return({\n 'pass': False,\n 'message': 'Failed to modify ACL for Group %s on Node %s' % \\\n (group_id, node_id)\n })\n else:\n return({\n 'pass': False,\n 'message': 'Invalid group_id %s and or node_id %s' % \\\n (group_id, node_id)\n })", "def create_dynamic_group(self, create_dynamic_group_details, **kwargs):\n resource_path = \"/dynamicGroups\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_dynamic_group got unknown kwargs: {!r}\".format(extra_kwargs))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n body=create_dynamic_group_details,\n response_type=\"DynamicGroup\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n body=create_dynamic_group_details,\n response_type=\"DynamicGroup\")", "def edit_group(request, name=None):\n if not request.user.is_superuser:\n raise PopupException(_(\"You must be a superuser to add or edit a group.\"), error_code=401)\n\n if name is not None:\n instance = Group.objects.get(name=name)\n else:\n instance = None\n\n if request.method == 'POST':\n form = GroupEditForm(request.POST, instance=instance)\n if form.is_valid():\n form.save()\n request.info(_('Group information updated'))\n return list_groups(request)\n\n else:\n form = GroupEditForm(instance=instance)\n\n return render('edit_group.mako', request, dict(form=form, action=request.path, name=name))", "def put(self, id):\r\n return UserGroupService.updateUserGroup(self, id)", "def group_id(self, group_id):\n\n self._group_id = group_id" ]
[ "0.73748296", "0.71882457", "0.7182601", "0.7062276", "0.6967742", "0.6875376", "0.68748295", "0.6828043", "0.6739177", "0.6604309", "0.64636046", "0.6460566", "0.639448", "0.6345957", "0.6323035", "0.6288533", "0.6273564", "0.6173461", "0.61531174", "0.61079484", "0.6106953", "0.6087284", "0.60076416", "0.5996293", "0.5986506", "0.5970233", "0.5953732", "0.5951706", "0.5926803", "0.59185225", "0.5910342", "0.5865728", "0.58417255", "0.58412284", "0.5811853", "0.5807287", "0.57911533", "0.57869303", "0.5784653", "0.57788724", "0.5770013", "0.57698065", "0.5765696", "0.57622683", "0.57605493", "0.5752738", "0.5751265", "0.5746337", "0.5741195", "0.574007", "0.5725381", "0.5718982", "0.5715317", "0.5714861", "0.5713131", "0.57122517", "0.57094824", "0.5691661", "0.5682665", "0.56730944", "0.5665672", "0.563599", "0.5632068", "0.5630832", "0.5620796", "0.56107354", "0.5595797", "0.55907595", "0.5590407", "0.55889267", "0.55872947", "0.5580058", "0.55770963", "0.5570717", "0.55611295", "0.5559828", "0.555093", "0.555093", "0.555093", "0.55449986", "0.55385315", "0.5536034", "0.55230284", "0.54939914", "0.5476577", "0.5475092", "0.5469627", "0.5444372", "0.54312557", "0.5406466", "0.5400219", "0.53938603", "0.5380949", "0.5376578", "0.53714", "0.53701264", "0.53605753", "0.535395", "0.5349707", "0.5349673" ]
0.7564378
0
Updates the specified group.
def update_group(self, group_id, update_group_details, **kwargs): resource_path = "/groups/{groupId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_group got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "groupId": group_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_group_details, response_type="Group") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_group_details, response_type="Group")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_group(self, group_id, **kwargs):\n post_body = json.dumps({'group': kwargs})\n resp, body = self.patch('groups/%s' % group_id, post_body)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def update_group():\n _id = request.form['_id']\n name = request.form['name']\n data, code, message = FIELD_SERVICE.update_group(_id, name)\n return __result(data, code, message)", "def group_update(*, login_manager: LoginManager, group_id: str, **kwargs: Any):\n groups_client = login_manager.get_groups_client()\n\n # get the current state of the group\n group = groups_client.get_group(group_id)\n\n # assemble put data using existing values for any field not given\n # note that the API does not accept the full group document, so we must\n # specify name and description instead of just iterating kwargs\n data = {}\n for field in [\"name\", \"description\"]:\n if kwargs.get(field) is not None:\n data[field] = kwargs[field]\n else:\n data[field] = group[field]\n\n response = groups_client.update_group(group_id, data)\n\n formatted_print(response, simple_text=\"Group updated successfully\")", "def update(self,\n provider_id,\n group_id,\n group,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'group_id': group_id,\n 'group': group,\n })", "def update_group(groupname):\n name = request.get_json().get(\"name\", None)\n description = request.get_json().get(\"description\", None)\n response = jsonify(\n admin.update_group(current_app.scoped_session(), groupname, description, name)\n )\n return response", "def do_group_update():\n target_group = Group.query.filter_by(id=request.form['id']).first()\n if target_group is None:\n return group_list(\"Unknown group.\")\n\n target_group.name = request.form['name']\n target_group.group_meter_id = request.form['meter']\n target_group.group_production_meter_id_first = request.form['group_production_meter_id_first']\n target_group.group_production_meter_id_second = request.form[\n 'group_production_meter_id_second']\n\n db.session.commit()\n return group_list(\"Updated group \" + target_group.name)", "def request_group_update():\n target_group = Group.query.filter_by(id=request.args['id']).first()\n if target_group is None:\n return group_list(\"Unknown group.\")\n\n return Response(\n render_template(\n 'admin/group/create-update.html',\n csrf_token=(\n get_raw_jwt() or {}).get(\"csrf\"),\n target=\"/admin/group/update\",\n id=target_group.id,\n name=target_group.name,\n meter=target_group.group_meter_id,\n group_production_meter_id_first=target_group.group_production_meter_id_first,\n group_production_meter_id_second=target_group.group_production_meter_id_second),\n mimetype='text/html')", "def update_group(self, group_name, new_group_name=None, new_path=None):\r\n params = {'GroupName' : group_name}\r\n if new_group_name:\r\n params['NewGroupName'] = new_group_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateGroup', params)", "def ModifyGroup(self, group, reason=None, **kwargs):\n query = []\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_PUT,\n (\"/%s/groups/%s/modify\" %\n (GANETI_RAPI_VERSION, group)), query, kwargs)", "def update_targetgroup(self, group_id, **kwargs):\r\n result = False\r\n if self._db(self._db.targetgroup.id==group_id).select():\r\n result = True\r\n self._db(self._db.targetgroup.id==group_id).update(**kwargs)\r\n self._db.commit()\r\n return result", "def update_group(\n self,\n group,\n validate_only=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n # Wrap the transport method to add retry and timeout logic.\n if \"update_group\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"update_group\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.update_group,\n default_retry=self._method_configs[\"UpdateGroup\"].retry,\n default_timeout=self._method_configs[\"UpdateGroup\"].timeout,\n client_info=self._client_info,\n )\n\n request = group_service_pb2.UpdateGroupRequest(\n group=group, validate_only=validate_only,\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"group.name\", group.name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"update_group\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def update_group_with_http_info(self, bucket_id, group_id, group, **kwargs):\n\n all_params = ['bucket_id', 'group_id', 'group', 'if_match', 'if_none_match', 'fields']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method update_group\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'bucket_id' is set\n if ('bucket_id' not in params) or (params['bucket_id'] is None):\n raise ValueError(\"Missing the required parameter `bucket_id` when calling `update_group`\")\n # verify the required parameter 'group_id' is set\n if ('group_id' not in params) or (params['group_id'] is None):\n raise ValueError(\"Missing the required parameter `group_id` when calling `update_group`\")\n # verify the required parameter 'group' is set\n if ('group' not in params) or (params['group'] is None):\n raise ValueError(\"Missing the required parameter `group` when calling `update_group`\")\n\n if 'if_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_match']):\n raise ValueError(\"Invalid value for parameter `if_match` when calling `update_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n if 'if_none_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_none_match']):\n raise ValueError(\"Invalid value for parameter `if_none_match` when calling `update_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n\n collection_formats = {}\n\n resource_path = '/buckets/{bucket_id}/groups/{group_id}'.replace('{format}', 'json')\n path_params = {}\n if 'bucket_id' in params:\n path_params['bucket_id'] = params['bucket_id']\n if 'group_id' in params:\n path_params['group_id'] = params['group_id']\n\n query_params = {}\n if 'fields' in params:\n query_params['_fields'] = params['fields']\n collection_formats['_fields'] = 'csv'\n\n header_params = {}\n if 'if_match' in params:\n header_params['If-Match'] = params['if_match']\n if 'if_none_match' in params:\n header_params['If-None-Match'] = params['if_none_match']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'group' in params:\n body_params = params['group']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['basicAuth']\n\n return self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Group',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def update_group(self, group_id, new_description):\n url = self.groups_url + \"/\" + group_id\n new_data = json.dumps({\"description\": new_description})\n\n return requests.put(url, new_data, headers=self.headers)", "def test_modify_group(self):\n response = self.client.modify_group(\"ABC123\")\n self.assertEqual(response[\"method\"], \"POST\")\n self.assertEqual(response[\"uri\"], \"/admin/v1/groups/ABC123\")\n self.assertEqual(util.params_to_dict(response[\"body\"]), {\"account_id\": [self.client.account_id]})", "def update_group(self, bucket_id, group_id, group, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.update_group_with_http_info(bucket_id, group_id, group, **kwargs)\n else:\n (data) = self.update_group_with_http_info(bucket_id, group_id, group, **kwargs)\n return data", "def test_update_group(self):\n pass", "def update(self, consistencygroup, **kwargs):\n if not kwargs:\n return\n\n body = {\"consistencygroup\": kwargs}\n\n return self._update(\"/consistencygroups/%s\" %\n base.getid(consistencygroup), body)", "def set_group(self, group: str) -> None:\n self.group = group", "def set_group(self, group):\n self._group = group", "def security_group_update(secgroup=None, auth=None, **kwargs):\n cloud = get_operator_cloud(auth)\n kwargs = _clean_kwargs(keep_name=True, **kwargs)\n return cloud.update_security_group(secgroup, **kwargs)", "def patch(self,\n provider_id,\n group_id,\n group,\n ):\n return self._invoke('patch',\n {\n 'provider_id': provider_id,\n 'group_id': group_id,\n 'group': group,\n })", "def upsert_group(self,\n group, # type: Group\n *options, # type: UpsertGroupOptions\n **kwargs # type: Any\n ):\n # This endpoint accepts application/x-www-form-urlencoded and requires the data be sent as form data.\n # The name/id should not be included in the form data.\n # Roles should be a comma separated list of strings.\n # If, only if, the role contains a bucket name then the rolename should be suffixed\n # with[<bucket_name>] e.g. bucket_full_access[default],security_admin.\n\n final_args = forward_args(kwargs, *options)\n final_args.update({k: v for k, v in group.as_dict.items() if k in {\n 'roles', 'description', 'ldap_group_reference'}})\n self._admin_bucket.group_upsert(group.name, **final_args)", "def update_groups(self, groups):\n self.fetch_group_messages() # preload messages before updating groups\n self.groups = groups\n self.put()", "def group(self, group):\n self._group = group", "def update(self):\r\n return self.connection._update_group('UpdateAutoScalingGroup', self)", "def group(self, group):\n\n self._group = group", "def group(self, group):\n\n self._group = group", "def group(self, group):\n\n self._group = group", "def set_group(self, group):\n # Implemented from template for osid.resource.ResourceForm.set_group_template\n if self.get_group_metadata().is_read_only():\n raise errors.NoAccess()\n if not self._is_valid_boolean(group):\n raise errors.InvalidArgument()\n self._my_map['group'] = group", "def patch_group_with_http_info(self, bucket_id, group_id, group, **kwargs):\n\n all_params = ['bucket_id', 'group_id', 'group', 'if_match', 'if_none_match', 'fields']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method patch_group\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'bucket_id' is set\n if ('bucket_id' not in params) or (params['bucket_id'] is None):\n raise ValueError(\"Missing the required parameter `bucket_id` when calling `patch_group`\")\n # verify the required parameter 'group_id' is set\n if ('group_id' not in params) or (params['group_id'] is None):\n raise ValueError(\"Missing the required parameter `group_id` when calling `patch_group`\")\n # verify the required parameter 'group' is set\n if ('group' not in params) or (params['group'] is None):\n raise ValueError(\"Missing the required parameter `group` when calling `patch_group`\")\n\n if 'if_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_match']):\n raise ValueError(\"Invalid value for parameter `if_match` when calling `patch_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n if 'if_none_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_none_match']):\n raise ValueError(\"Invalid value for parameter `if_none_match` when calling `patch_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n\n collection_formats = {}\n\n resource_path = '/buckets/{bucket_id}/groups/{group_id}'.replace('{format}', 'json')\n path_params = {}\n if 'bucket_id' in params:\n path_params['bucket_id'] = params['bucket_id']\n if 'group_id' in params:\n path_params['group_id'] = params['group_id']\n\n query_params = {}\n if 'fields' in params:\n query_params['_fields'] = params['fields']\n collection_formats['_fields'] = 'csv'\n\n header_params = {}\n if 'if_match' in params:\n header_params['If-Match'] = params['if_match']\n if 'if_none_match' in params:\n header_params['If-None-Match'] = params['if_none_match']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'group' in params:\n body_params = params['group']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json', 'application/merge-patch+json', 'application/json-patch+json'])\n\n # Authentication setting\n auth_settings = ['basicAuth']\n\n return self.api_client.call_api(resource_path, 'PATCH',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Group',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "async def update_contact_group(dbcon: DBConnection, contact_group_id: int, data: Dict[str, str]) -> None:\n\n async def _run(cur: Cursor) -> None:\n for key, value in data.items():\n if key not in ['name', 'active']:\n raise errors.IrisettError('invalid contact key %s' % key)\n q = \"\"\"update contact_groups set %s=%%s where id=%%s\"\"\" % key\n q_args = (value, contact_group_id)\n await cur.execute(q, q_args)\n\n if not await contact_group_exists(dbcon, contact_group_id):\n raise errors.InvalidArguments('contact group does not exist')\n await dbcon.transact(_run)", "def put_group(\n group_id: BSONObjectId,\n data: PutGroupIn,\n tkn: Token = Depends(from_authotization_header_nondyn),\n):\n grp: Group = Group.objects.get(pk=group_id)\n if not (\n tkn.owner == grp.owner or has_clearance(tkn.owner, \"sni.update_group\")\n ):\n raise PermissionError\n logging.debug(\"Updating group %s (%s)\", grp.group_name, group_id)\n if data.add_members is not None:\n grp.members += [\n User.objects.get(character_name=member_name)\n for member_name in set(data.add_members)\n ]\n if data.authorized_to_login is not None:\n assert_has_clearance(tkn.owner, \"sni.set_authorized_to_login\")\n grp.authorized_to_login = data.authorized_to_login\n if data.description is not None:\n grp.description = data.description\n if data.members is not None:\n grp.members = [\n User.objects.get(character_name=member_name)\n for member_name in set(data.members)\n ]\n if data.owner is not None:\n grp.owner = User.objects.get(character_name=data.owner)\n if data.remove_members is not None:\n grp.members = [\n member\n for member in grp.members\n if member.character_name not in data.remove_members\n ]\n grp.members = list(set(grp.members + [grp.owner]))\n grp.save()\n return GetGroupOut.from_record(grp)", "def put(self):\n status = ErrorCode.SUCCESS\n try:\n data = DotDict(json_decode(self.request.body))\n cid = self.current_user.cid\n tid = self.current_user.tid\n gid = data.gid\n name = data.name\n logging.info(\"[UWEB] Modify group request: %s, cid: %s\",\n data, self.current_user.cid)\n except Exception as e:\n status = ErrorCode.ILLEGAL_DATA_FORMAT\n logging.exception(\"[UWEB] Invalid data format. body:%s, Exception: %s\",\n self.request.body, e.args)\n self.write_ret(status)\n return\n\n try: \n group = self.get_group_by_cid(cid, name)\n if group:\n status = ErrorCode.GROUP_EXIST\n self.write_ret(status)\n return\n\n self.db.execute(\"UPDATE T_GROUP\"\n \" SET name = %s\"\n \" WHERE id = %s\",\n name, gid)\n\n # NOTE: wspush to client \n if status == ErrorCode.SUCCESS:\n WSPushHelper.pushS3(tid, self.db, self.redis)\n\n self.write_ret(status)\n except Exception as e:\n logging.exception(\"[UWEB] Modify group failed. cid: %s, Exception: %s\",\n self.current_user.cid, e.args)\n status = ErrorCode.SERVER_BUSY\n self.write_ret(status)", "def test_update_team_user_group(client):\n group = client.update_team_user_group(TEAM_ID, GROUP_ID, {\n \"name\": \"Updated Python group\",\n \"is_reviewer\": False,\n \"is_admin\": True,\n \"admin_rights\": [\"upload\"]\n })\n assert group.team_id == TEAM_ID\n assert group.group_id == GROUP_ID\n assert group.name == \"Updated Python group\"\n assert group.permissions['is_admin']\n assert not group.permissions['is_reviewer']", "def update_security_group(self, security_group, body=None):\r\n return self.put(self.security_group_path %\r\n security_group, body=body)", "def update(self, group_snapshot, **kwargs):\n if not kwargs:\n return\n\n body = {\"group_snapshot\": kwargs}\n\n return self._update(\"/group_snapshots/%s\" % base.getid(group_snapshot),\n body)", "def groups_update(self, mar, request):\n group_id = mar.viewed_user_auth.user_id\n member_ids_dict, owner_ids_dict = self._services.usergroup.LookupMembers(\n mar.cnxn, [group_id])\n owner_ids = owner_ids_dict.get(group_id, [])\n member_ids = member_ids_dict.get(group_id, [])\n if not permissions.CanEditGroup(\n mar.perms, mar.auth.effective_ids, owner_ids):\n raise permissions.PermissionException(\n 'The user is not allowed to edit this group.')\n\n group_settings = self._services.usergroup.GetGroupSettings(\n mar.cnxn, group_id)\n if (request.who_can_view_members or request.ext_group_type\n or request.last_sync_time or request.friend_projects):\n group_settings.who_can_view_members = (\n request.who_can_view_members or group_settings.who_can_view_members)\n group_settings.ext_group_type = (\n request.ext_group_type or group_settings.ext_group_type)\n group_settings.last_sync_time = (\n request.last_sync_time or group_settings.last_sync_time)\n if framework_constants.NO_VALUES in request.friend_projects:\n group_settings.friend_projects = []\n else:\n id_dict = self._services.project.LookupProjectIDs(\n mar.cnxn, request.friend_projects)\n group_settings.friend_projects = (\n list(id_dict.values()) or group_settings.friend_projects)\n self._services.usergroup.UpdateSettings(\n mar.cnxn, group_id, group_settings)\n\n if request.groupOwners or request.groupMembers:\n self._services.usergroup.RemoveMembers(\n mar.cnxn, group_id, owner_ids + member_ids)\n owners_dict = self._services.user.LookupUserIDs(\n mar.cnxn, request.groupOwners, autocreate=True)\n self._services.usergroup.UpdateMembers(\n mar.cnxn, group_id, list(owners_dict.values()), 'owner')\n members_dict = self._services.user.LookupUserIDs(\n mar.cnxn, request.groupMembers, autocreate=True)\n self._services.usergroup.UpdateMembers(\n mar.cnxn, group_id, list(members_dict.values()), 'member')\n\n return api_pb2_v1.GroupsUpdateResponse()", "def edit_group(request, group_id):\n group = None\n if group_id:\n group = models.UserGroup.get_by_id(int(group_id))\n return utility.edit_instance(request, models.UserGroup, forms.GroupEditForm,\n 'admin/edit_group',\n urlresolvers.reverse('views.admin.list_groups'),\n group_id, group=group)", "def patch_group(self, bucket_id, group_id, group, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.patch_group_with_http_info(bucket_id, group_id, group, **kwargs)\n else:\n (data) = self.patch_group_with_http_info(bucket_id, group_id, group, **kwargs)\n return data", "def set_group(self, address, group):\n self.groups[address] = group", "def edit_group(request, name=None):\n if not request.user.is_superuser:\n raise PopupException(_(\"You must be a superuser to add or edit a group.\"), error_code=401)\n\n if name is not None:\n instance = Group.objects.get(name=name)\n else:\n instance = None\n\n if request.method == 'POST':\n form = GroupEditForm(request.POST, instance=instance)\n if form.is_valid():\n form.save()\n request.info(_('Group information updated'))\n return list_groups(request)\n\n else:\n form = GroupEditForm(instance=instance)\n\n return render('edit_group.mako', request, dict(form=form, action=request.path, name=name))", "def test_editGroup(self):\n\t\tuser = User.objects.get(id=1)\n\t\tself.client.force_authenticate(user=user)\n\t\tgroup = Group.objects.create(admin=user, name='testGroup3', isPublic=True, \n\t\t\tdescription='This is another test group that just created.')\n\n\t\turl = \"/groups/3/\"\n\t\tdata = {\n\t\t\t'name' : 'anotherTestGroup'\n\t\t}\n\n\t\tresponse = self.client.patch(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(response.data[\"name\"], 'anotherTestGroup')\n\n\t\turl = \"/groups/2/\"\n\t\tresponse = self.client.patch(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def update(person_group_id, name=None, user_data=None):\n url = 'persongroups/{}'.format(person_group_id)\n json = {\n 'name': name,\n 'userData': user_data,\n }\n\n return util.request('PATCH', url, json=json)", "def setGroup(self, group):\n\t\tself.config.GROUP = group", "def update_adcampaign_group(self, campaign_group_id, name=None,\n campaign_group_status=None, objective=None,\n batch=False):\n path = '%s' % campaign_group_id\n args = {}\n if name is not None:\n args['name'] = name\n if campaign_group_status is not None:\n args['campaign_group_status'] = campaign_group_status\n if objective is not None:\n args['objective'] = objective\n return self.make_request(path, 'POST', args, batch=batch)", "def update_adgroup(self, adgroup_id, name=None, adgroup_status=None,\n bid_type=None, bid_info=None, creative_id=None,\n tracking_specs=None, view_tags=None, objective=None,\n targeting=None, conversion_specs=None,\n batch=False):\n path = \"%s\" % adgroup_id\n args = {}\n if name:\n args['name'] = name\n if bid_type:\n args['bid_type'] = bid_type\n if bid_info:\n args['bid_info'] = json.dumps(bid_info)\n\n if creative_id:\n args['creative'] = json.dumps({'creative_id': creative_id})\n if tracking_specs:\n args['tracking_specs'] = json.dumps(tracking_specs)\n if view_tags:\n args['view_tags'] = json.dumps(view_tags)\n if objective:\n args['objective'] = objective\n if adgroup_status:\n args['adgroup_status'] = adgroup_status\n if targeting:\n args['targeting'] = json.dumps(targeting)\n if conversion_specs:\n args['conversion_specs'] = json.dumps(conversion_specs)\n return self.make_request(path, 'POST', args, batch=batch)", "def update_research_group(self, employee_id, new_research_group):\n cursor = self.dbconnect.get_cursor()\n try:\n cursor.execute('UPDATE employee '\n 'SET research_group = %s '\n 'WHERE id=%s;',\n (new_research_group, employee_id))\n self.dbconnect.commit()\n except:\n self.dbconnect.rollback()\n raise", "def update(ctx, name, description, tags):\n user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'),\n ctx.obj.get('group'))\n update_dict = {}\n\n if name:\n update_dict['name'] = name\n\n if description:\n update_dict['description'] = description\n\n tags = validate_tags(tags)\n if tags:\n update_dict['tags'] = tags\n\n if not update_dict:\n Printer.print_warning('No argument was provided to update the experiment group.')\n sys.exit(0)\n\n try:\n response = PolyaxonClient().experiment_group.update_experiment_group(\n user, project_name, _group, update_dict)\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not update experiment group `{}`.'.format(_group))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n\n Printer.print_success(\"Experiment group updated.\")\n get_group_details(response)", "def set_group(self, id_: str, player: str, group: list):\n self._groups[id_] = {\n 'player': player,\n 'group': group\n }", "def test_user_group_controller_update(self):\n pass", "def put(self, id):\r\n return UserGroupService.updateUserGroup(self, id)", "def edit_group_command(self):\n self.switch_frame(\"Edit Group\")\n id = self.parent.get_frame_id(\"Edit Group\")\n self.parent.frames[id].display_group(self.user.active_group)", "def with_group(self, group):\n\t\tself.variables['group'] = group\n\t\treturn self", "def _mod_group(self, command, group_id, group_type, buckets=None):\n self.datapath.send_msg(\n self.parser.OFPGroupMod(\n datapath=self.datapath,\n command=command,\n group_id=group_id,\n type_=group_type,\n buckets=buckets,\n )\n )", "def test_updateGroup(self):\n self.result = None\n\n def callRemote(methodName, *a):\n self.result = methodName == '_updateTabsFromServer'\n\n tab = widgets.Tab(u'id4', u'Title 4', self.contentFactory)\n group = widgets.TabGroup(u'group1', u'Group', tabs=[tab])\n self.patch(self.tabView, 'callRemote', callRemote)\n self.tabView.updateGroup(group)\n self.assertTrue(self.result)\n self.assertNotIdentical(self.tabView.getTab(u'id4'), None)\n self.assertNotIdentical(self.tabView.getGroup(u'group1'), None)\n self.assertIdentical(self.tabView.tabs[-1], tab)\n\n # Update a group, and add a new tab.\n newTab = widgets.Tab(u'id5', u'Title 5', self.contentFactory)\n replacementGroup = widgets.TabGroup(\n u'group1', u'New Group', tabs=[newTab])\n self.tabView.updateGroup(replacementGroup)\n self.assertIdentical(\n self.tabView.getGroup(u'group1'), replacementGroup)\n self.assertNotIdentical(self.tabView.getTab(u'id5'), None)\n self.assertRaises(\n errors.InvalidIdentifier, self.tabView.getTab, u'id4')\n self.assertNotIn(tab, self.tabView.tabs)\n\n # Remove a tab from a group.\n self.tabView.removeTabs([newTab])\n self.assertRaises(\n errors.InvalidIdentifier, self.tabView.getTab, u'id5')\n self.assertNotIn(newTab, self.tabView.getGroup(u'group1').tabs)", "def customer_group_customer_put(user_id, group_id):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n user_group_user_details = {}\n user_group_user_details[\"user_id\"] = user_id\n user_group_user_details[\"group_id\"] = group_id\n\n query = \"\"\"\n Update `users_groups`\n SET `group_id` = \\\"%(group_id)s\\\"\n WHERE `user_id` = \\\"%(user_id)s\\\" \n \"\"\" %(user_group_user_details)\n cursor = db.cursor()\n result = {\"success\" : 0, \"message\" : \"Customer's Group is not updated\"}\n try:\n if cursor.execute(query):\n db.commit()\n result = {\"success\" : 0, \"message\" : \"Customer updated Successfully\"}\n except Exception as e:\n result = {\"success\" : 1, \"message\" : \"Customer can not be updated in the Group. Error \\\"\\'%s\\'\\\" \\\n Query \\\"\\'%s\\'\\\" \" % (e, query) }\n finally:\n cursor.close()\n db.close()\n return result", "def set_group(self, data, group, intg, dq=None):\n # TODO: Include a 2-D DQ array to be combined with the GROUPDQ array\n #\n # Copy the input data to a 2-D plane for this group/intg combination.\n # NOTE: This only works if data array is broadcastable so the shape\n # of the data array is checked.\n #\n data = np.asarray(data, dtype=self.data.dtype)\n detector_shape = (self.rows, self.columns)\n if data.shape == detector_shape:\n self.data[intg, group, :, :] = data \n # Invalidate the averaged data\n self._data_averaged = None\n # Update the group data quality array if necessary.\n if dq is not None:\n if self.include_groupdq:\n dq = np.asarray(dq, dtype=self.groupdq.dtype) # Convert to same data type.\n self.groupdq[intg, group, :, :] |= dq\n else:\n strg = \"Incompatible arguments. A groupdq array is \"\n strg += \"provided when include_groupdq=False. \"\n strg += \"The array is ignored.\"\n LOGGER.error(strg)\n else:\n strg = \"Group data array has the wrong shape \"\n strg += \"(%s instead of %s).\" % (str(data.shape),\n str(detector_shape))\n raise TypeError(strg)", "def set_group(self, bot, update, args):\n username = str(update.message.from_user['username'])\n chat_id = str(update.message.from_user['id'])\n\n try:\n group_name = self.format_group(str(args[0]))\n\n if self.is_group(group_name):\n self.user_db.add_new_user(username, group_name, chat_id)\n bot.send_message(update.message.chat_id,\n 'Расписание для группы *{}* успешно установлено!\\n'\n '/today\\n'\n '/tomorrow\\n'\n '/week\\n'\n '/nextweek\\n'\n '/full\\n'\n '/timetable\\n'\n '/keyboard\\n'.format(group_name),\n parse_mode='Markdown')\n else:\n raise Exception(\"Group is not exists.\")\n except (Exception, IndexError):\n bot.send_message(update.message.chat_id,\n 'Группы с таким именем не существует, проверьте корректность введенного имени.',\n parse_mode='Markdown')", "def update(self, security_group_id: str, body: CloudSecurityGroup) -> None:\n\t\troute = f'{AWSSecurityGroupConsts.CLOUD_SECURITY_GROUP.value}/{security_group_id}'\n\t\treturn self._put(route=route, body=body)", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def edit_group(self, groupId):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/GroupV2/{groupId}/Edit/\"))", "async def handle_set_group(self, match: Match[str], payload: str) -> None:\n groupid = match.group(1)\n\n try:\n group = self._bridge.groups[groupid]\n state = GroupSetState(**json.loads(payload))\n LOGGER.info(f\"Updating group {group.name}\")\n await group.set_action(**state.dict())\n except IndexError:\n LOGGER.warning(f\"Unknown group id: {groupid}\")\n except json.JSONDecodeError:\n LOGGER.warning(f\"Bad JSON on light request: {payload}\")\n except TypeError:\n LOGGER.warning(f\"Expected dictionary, got: {payload}\")\n except ValidationError as e:\n LOGGER.warning(f\"Invalid light state: {e}\")", "def fusion_api_update_group_role_assignment(self, body, api=None, headers=None):\n return self.LoginDomainsGroupToRoleMapping.update(body, api, headers)", "def add_to_group(self, org, contact, group):\n pass", "def modify_resource_group(\n self,\n request: dds_20151201_models.ModifyResourceGroupRequest,\n ) -> dds_20151201_models.ModifyResourceGroupResponse:\n runtime = util_models.RuntimeOptions()\n return self.modify_resource_group_with_options(request, runtime)", "def slotGroupEdit(self):\n dialog = GroupDialog(self)\n if dialog.exec_loop() == QDialog.Accepted:\n if dialog.group_id != None:\n # set group\n self.sampleGroup.globalGroupId = dialog.group_id\n self.groupLabel.setText(dialog.group_id)\n else:\n # ungroup\n self.sampleGroup.globalGroupId = None\n self.groupLabel.setText('Not\\nGrouped')\n self.emit(PYSIGNAL('groupChanged'), (self,))", "def test_update_resource_group(self):\n pass", "def capacitygroup_update(cmd_ctx, cpc, capacitygroup, **options):\n cmd_ctx.execute_cmd(\n lambda: cmd_capacitygroup_update(cmd_ctx, cpc, capacitygroup, options))", "def regroup(self, serial, group):\n api_page = \"/configuration/object/ap_regroup\"\n url = \"{}{}?{}&UIDARUBA={}\".format(\n self.base_url,\n api_page,\n self.config_path,\n self.uidaruba)\n\n obj_dict = {'serial-num': serial, 'new-group': group}\n obj_json = json.loads(json.dumps(obj_dict))\n\n resp = self.post(url, obj_json)\n\n print(resp.status_code)\n print(resp.text)", "def async_update_group_state(self) -> None:", "def update_by_key(\n self,\n key: str,\n version: int,\n actions: typing.List[CustomerGroupUpdateAction],\n *,\n expand: OptionalListStr = None,\n force_update: bool = False,\n ) -> CustomerGroup:\n params = self._serialize_params({\"expand\": expand}, _CustomerGroupUpdateSchema)\n update_action = CustomerGroupUpdate(version=version, actions=actions)\n return self._client._post(\n endpoint=f\"customer-groups/key={key}\",\n params=params,\n data_object=update_action,\n response_class=CustomerGroup,\n force_update=force_update,\n )", "def update(cls, db: Database, record_uuid: str, record: GroupPartial) -> Group:\n existing_group = cls.find_by_uuid(db, record_uuid)\n updated_record = cls.model(**record.dict(), uuid=record_uuid)\n if updated_record.metadata.name != existing_group.metadata.name:\n if GroupManager.find_by_name(db, updated_record.metadata.name):\n raise ValidationError(\n \"Group with name [%s] already exists\" % record.metadata.name)\n cls.validate_group(db, updated_record)\n return super(GroupManager, cls).update(db, record_uuid, record)", "def set(self, name_group, key, value):\n self.psettings.beginGroup(name_group)\n self.psettings.setValue(key, value)\n self.closeGroup()", "def group(self, val):\n self.set_property(\"Group\", val)", "def update_tag_group_acl(session, tag_id=None, group_id=None,\n allow_install=False, allow_uninstall=False, allow_reboot=False,\n allow_schedule=False, allow_wol=False, allow_snapshot_creation=False,\n allow_snapshot_removal=False, allow_snapshot_revert=False,\n allow_tag_creation=False, allow_tag_removal=False, allow_read=False,\n date_modified=datetime.now(), username='system_user'\n ):\n session = validate_session(session)\n group = None\n\n if group_id and tag_id:\n group = session.query(TagGroupAccess).\\\n filter(TagGroupAccess.group_id == group_id).\\\n filter(TagGroupAccess.tag_id == tag_id).first()\n if group:\n try:\n group.allow_install = allow_install\n group.allow_uninstall = allow_uninstall\n group.allow_reboot = allow_reboot\n group.allow_schedule = allow_schedule\n group.allow_wol = allow_wol\n group.allow_snapshot_creation = allow_snapshot_creation\n group.allow_snapshot_removal = allow_snapshot_removal\n group.allow_snapshot_revert = allow_snapshot_revert\n group.allow_tag_creation = allow_tag_creation\n group.allow_tag_removal = allow_tag_removal\n group.allow_read = allow_read\n group.date_modified = date_modified\n session.commit()\n return({\n 'pass': True,\n 'message': 'ACL for Group %s was modified for Tag %s' % \\\n (group_id, tag_id)\n })\n except Exception as e:\n session.rollback()\n return({\n 'pass': False,\n 'message': 'Failed to modify ACL for Group %s on Tag %s' % \\\n (group_id, tag_id)\n })\n else:\n return({\n 'pass': False,\n 'message': 'Invalid group_id %s and or tag_id' % \\\n (group_id, tag_id)\n })", "def replace_namespaced_group(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method replace_namespaced_group\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `replace_namespaced_group`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `replace_namespaced_group`\")\n\n resource_path = '/oapi/v1/groups/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1Group',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def test_creator_in_group_can_update(self):\n\n self.client.login(username='notlogged', password='notlogged')\n expected_url = reverse('group_view', args=(self.group.pk,))\n\n utils.test_can_access(self, self.url,\n post_redirect_url=expected_url,\n data=self.data)\n\n updated_group = Group.objects.get(pk=self.group.pk)\n self.assertEqual(updated_group.name, self.data['name'])\n self.assertEqual(updated_group.description, self.data['description'])\n self.assertIsNotNone(updated_group.last_edit_date)", "async def update_country_group_async(\n country_group_code: str,\n body: Optional[UpdateCountryGroupRequest] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = UpdateCountryGroup.create(\n country_group_code=country_group_code,\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def test_update_device_group_by_id(self):\n pass", "def update(self, val):\n try:\n key = self._group_by(val)\n except lena.core.LenaKeyError:\n raise lena.core.LenaValueError(\n \"could not find a key for {}\".format(val)\n )\n\n if key in self.groups:\n self.groups[key].append(val)\n else:\n self.groups[key] = [val]", "def add_to_group(self, group):\n\n if not self.in_group(group):\n self.secondary_groups.append(group)\n return self", "def publish_group(self, group: GroupInfo) -> None:\n self._mqtt.publish(f\"group/{group.id}\", group, retain=True)", "def setGatingGroup(self, channel, group, unitCode=0):\n resp = self.XAPCommand('GRPSEL', channel, group, unitCode=unitCode)\n return resp", "def fusion_api_update_from_group(self, uri, api=None, headers=None):\n param = '/compliance'\n return self.li.update(body=None, uri=uri, api=api, headers=headers, param=param)", "def addGroup(self, group):\n self._model.insertH5pyObject(group)", "def test_api_v1_groups_id_put(self):\n pass", "def put(self, id):\n adm = Administration()\n lg = LearnGroup.from_dict(api.payload)\n if lg is not None:\n\n lg.set_id(id)\n adm.save_learngroup(lg)\n return lg, 200\n\n else:\n return '', 500", "def test_update_device_group_by_id1(self):\n pass", "def patch_namespaced_group(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method patch_namespaced_group\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `patch_namespaced_group`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `patch_namespaced_group`\")\n\n resource_path = '/oapi/v1/groups/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'PATCH',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1Group',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def with_group_key(self, group_key):\n self.group_key = group_key\n return self", "def test_update_entry_groups(self):\r\n # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.\r\n pass", "async def modify_resource_group_async(\n self,\n request: dds_20151201_models.ModifyResourceGroupRequest,\n ) -> dds_20151201_models.ModifyResourceGroupResponse:\n runtime = util_models.RuntimeOptions()\n return await self.modify_resource_group_with_options_async(request, runtime)", "def update_app_policy_group(self, id, **kwargs):\n resp, body = self.put(self.get_uri(self.resource, id), json.dumps({'application_policy_group':kwargs}))\n body = json.loads(body)\n self.expected_success(http_client.OK, resp.status)\n return rest_client.ResponseBody(resp, body)", "def test_modify_group(self):\n # Add users\n resp = self.app.post('/users', data=json.dumps(self.test_user1_data))\n assert resp.status_code == 200\n\n resp = self.app.post('/users', data=json.dumps(self.test_user2_data))\n assert resp.status_code == 200\n\n # Modify group 1 to add user 2\n resp = self.app.put('/groups/{}'.format(self.test_group1_groupid),\n data=json.dumps(self.test_group1_modify))\n assert resp.status_code == 200\n\n data = json.loads(resp.data)\n assert self.test_user1_userid in data\n assert self.test_user2_userid in data\n\n # Check user2 to see if it has group1 listed\n resp = self.app.get('/users/{}'.format(self.test_user2_userid))\n assert resp.status_code == 200\n\n data = json.loads(resp.data)\n assert 'groups' in data\n assert self.test_group1_groupid in data['groups']", "def show(self, group):\n self._current_group = group" ]
[ "0.83019364", "0.82925797", "0.81625587", "0.8079423", "0.8010341", "0.7975716", "0.78060603", "0.7803931", "0.7534597", "0.7418219", "0.7364829", "0.7364479", "0.73005295", "0.7184962", "0.7092921", "0.7048925", "0.7039343", "0.7023443", "0.7007921", "0.6988068", "0.6973582", "0.69407403", "0.6902594", "0.6899487", "0.68929434", "0.6838242", "0.6838242", "0.6838242", "0.682517", "0.6806426", "0.6798845", "0.6778644", "0.67686313", "0.674755", "0.6729728", "0.6686501", "0.66839814", "0.668122", "0.6675947", "0.6622896", "0.6614531", "0.66012746", "0.6588182", "0.6577286", "0.6570621", "0.6568183", "0.6558261", "0.6541172", "0.6504159", "0.64888483", "0.6473832", "0.6456448", "0.6425292", "0.6384235", "0.63550705", "0.63236785", "0.6319", "0.6315674", "0.630182", "0.62836826", "0.62836826", "0.62836826", "0.62836826", "0.62836826", "0.62836826", "0.6274238", "0.6273198", "0.62570584", "0.6246747", "0.62286574", "0.6176498", "0.6134094", "0.6133324", "0.6127302", "0.61106366", "0.61069906", "0.6100699", "0.60934323", "0.60685325", "0.6064035", "0.60432494", "0.6039053", "0.60390073", "0.6023249", "0.60142535", "0.5966406", "0.59501183", "0.5922552", "0.59220475", "0.5917112", "0.5900127", "0.58915263", "0.58804935", "0.58799547", "0.58787954", "0.5872855", "0.5871397", "0.5835637", "0.58354247", "0.58243525" ]
0.7277004
13
Updates the specified identity provider.
def update_identity_provider(self, identity_provider_id, update_identity_provider_details, **kwargs): resource_path = "/identityProviders/{identityProviderId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_identity_provider got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_identity_provider_details, response_type="IdentityProvider") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_identity_provider_details, response_type="IdentityProvider")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_provider(self, provider_id, provider_name, endpoints, zone_id, provider_region):\n try:\n self.client.post('{api_url}/providers/{id}'.format(api_url=self.api_url, id=provider_id),\n action='edit',\n zone={'id': zone_id},\n connection_configurations=endpoints,\n provider_region=provider_region)\n self.changed = True\n except Exception as e:\n self.module.fail_json(msg=\"Failed to update provider. Error: {!r}\".format(e))", "def update_identity_provider(module, sdk, cloud, idp):\n\n description = module.params.get('description')\n enabled = module.params.get('enabled')\n domain_id = module.params.get('domain_id')\n remote_ids = module.params.get('remote_ids')\n\n attributes = {}\n\n if (description is not None) and (description != idp.description):\n attributes['description'] = description\n if (enabled is not None) and (enabled != idp.is_enabled):\n attributes['enabled'] = enabled\n if (domain_id is not None) and (domain_id != idp.domain_id):\n attributes['domain_id'] = domain_id\n if (remote_ids is not None) and (remote_ids != idp.remote_ids):\n attributes['remote_ids'] = remote_ids\n\n if not attributes:\n return False, idp\n\n if module.check_mode:\n return True, None\n\n try:\n new_idp = cloud.identity.update_identity_provider(idp, **attributes)\n except sdk.exceptions.OpenStackCloudException as ex:\n module.fail_json(msg='Failed to update identity provider: {0}'.format(str(ex)))\n return (True, new_idp)", "def update_provider(\n provider_id:UUID = Form(...),\n name:str = Form(...),\n qualification:str = Form(...),\n speciality:str = Form(...),\n phone:str = Form(...),\n department:Optional[str] = Form(\"N/A\"),\n organization:str = Form(...),\n location:Optional[str] = Form(\"N/A\"),\n address:str = Form(...),\n active:bool = Form(...)\n ):\n\n post_data = {\n \"name\": name,\n \"qualification\": qualification,\n \"speciality\": speciality,\n \"phone\": phone,\n \"department\": department,\n \"organization\": organization,\n \"location\": location,\n \"address\": address,\n \"active\": active\n }\n provider_data = open_for_reading()\n provider_data[str(provider_id)] = post_data\n open_for_writing(data=provider_data)\n return {\"msg\": \"updated\"}", "def update(self,\n provider_id,\n l3_vpn_context,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'l3_vpn_context': l3_vpn_context,\n })", "def update(self,\n provider_id,\n interface_id,\n provider_interface,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'interface_id': interface_id,\n 'provider_interface': provider_interface,\n })", "def update(self,\n provider_id,\n provider_deployment_map_id,\n provider_deployment_map,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'provider_deployment_map_id': provider_deployment_map_id,\n 'provider_deployment_map': provider_deployment_map,\n })", "def update(self,\n provider_id,\n group_id,\n group,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'group_id': group_id,\n 'group': group,\n })", "def update(self,\n provider_id,\n interface_id,\n service_interface,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'interface_id': interface_id,\n 'service_interface': service_interface,\n })", "def refresh_provider(self, provider_id):\n try:\n self.client.post('{api_url}/providers/{id}'.format(api_url=self.api_url, id=provider_id),\n action='refresh')\n self.changed = True\n except Exception as e:\n self.module.fail_json(msg=\"Failed to refresh provider. Error: {!r}\".format(e))", "def update(self,identity,params=None, headers=None):\n path = self._sub_url_params('/payouts/:identity', {\n \n 'identity': identity,\n })\n \n if params is not None:\n params = {self._envelope_key(): params}\n\n response = self._perform_request('PUT', path, params, headers,\n retry_failures=True)\n return self._resource_for(response)", "def provider(self, provider):\n\n self._provider = provider", "def update(self, request, *args, **kwargs):\n response = super(ProviderViewSet, self).update(request, *args, **kwargs)\n response.data['message'] = \"Proveedor ha sido editado\"\n return response", "def add_or_update_provider(self, provider_name, provider_type, endpoints, zone, provider_region,\n validate_provider_auth = True, initiate_refresh = True):\n zone_id = self.find_zone_by_name(zone or 'default')\n # check if provider with the same name already exists\n provider_id = self.find_provider_by_name(provider_name)\n if provider_id: # provider exists\n existing_config = self.get_provider_config(provider_id)\n\n # ManageIQ Euwe / CFME 5.7 API and older versions don't support certificate authority field in endpoint.\n # If it wasn't returned from existing provider configuration this means it is either unsupported or null,\n # in both cases we can remove null/empty certificate_authority from endpoints we want to update.\n self.filter_unsupported_fields_from_config(endpoints, existing_config['endpoints'], {'certificate_authority'})\n\n updates = self.required_updates(provider_id, endpoints, zone_id, provider_region, existing_config)\n\n if not updates:\n return dict(changed=self.changed,\n msg=\"Provider %s already exists\" % provider_name)\n\n old_validation_details = self.auths_validation_details(provider_id)\n operation = \"update\"\n self.update_provider(provider_id, provider_name, endpoints, zone_id, provider_region)\n roles_with_changes = set(updates[\"Added\"]) | set(updates[\"Updated\"])\n else: # provider doesn't exists, adding it to manageiq\n\n # ManageIQ Euwe / CFME 5.7 API and older versions don't support certificate authority field in endpoint.\n # filter empty fields if none on creation - No existing endpoints for new provider\n self.filter_unsupported_fields_from_config(endpoints, [{}], {'certificate_authority'})\n updates = None\n old_validation_details = {}\n operation = \"addition\"\n provider_id = self.add_new_provider(provider_name, provider_type,\n endpoints, zone_id, provider_region)\n roles_with_changes = [e['endpoint']['role'] for e in endpoints]\n\n if validate_provider_auth:\n authtypes_to_verify = []\n for e in endpoints:\n if e['endpoint']['role'] in roles_with_changes:\n authtypes_to_verify.append(e['authentication']['authtype'])\n result, details = self.verify_authenticaion_validation(provider_id, old_validation_details, authtypes_to_verify)\n else:\n result = \"Skipped Validation\"\n details = result\n\n if result == \"Invalid\":\n self.module.fail_json(msg=\"Failed to Validate provider authentication after {operation}. details: {details}\".format(operation=operation, details=details))\n elif result == \"Valid\" or result == \"Skipped Validation\":\n if initiate_refresh:\n self.refresh_provider(provider_id)\n message = \"Successful {operation} of {provider} provider. Authentication: {validation}. Refreshing provider inventory\".format(operation=operation, provider=provider_name, validation=details)\n else:\n message = \"Successful {operation} of {provider} provider. Authentication: {validation}.\".format(operation=operation, provider=provider_name, validation=details)\n elif result == \"Timed out\":\n message = \"Provider {provider} validation after {operation} timed out. Authentication: {validation}\".format(operation=operation, provider=provider_name, validation=details)\n return dict(\n provider_id=provider_id,\n changed=self.changed,\n msg=message,\n updates=updates\n )", "def testUpdate(self):\n try:\n provU = ProvenanceProvider(self.__cfgOb, self.__cachePath, useCache=False)\n pD = {self.__provKeyName: self.__provInfoL}\n ok = provU.store(pD)\n self.assertTrue(ok)\n #\n ok = provU.update(pD)\n self.assertTrue(ok)\n #\n fD = provU.fetch()\n self.assertTrue(self.__provKeyName in fD)\n self.assertDictEqual(pD, fD)\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n self.fail()", "def setCurrentUser(self, provider):\n pass", "def update_ldap_provider(self, body):\n try:\n self.logger.info('update_ldap_provider called.')\n\n # Validate required parameters\n self.logger.info(\n 'Validating required parameters for update_ldap_provider.')\n self.validate_parameters(body=body)\n\n # Prepare query URL\n self.logger.info('Preparing query URL for update_ldap_provider.')\n _url_path = '/public/ldapProvider'\n _query_builder = self.config.get_base_uri()\n _query_builder += _url_path\n _query_url = APIHelper.clean_url(_query_builder)\n\n # Prepare headers\n self.logger.info('Preparing headers for update_ldap_provider.')\n _headers = {\n 'accept': 'application/json',\n 'content-type': 'application/json; charset=utf-8'\n }\n\n # Prepare and execute request\n self.logger.info(\n 'Preparing and executing request for update_ldap_provider.')\n _request = self.http_client.put(\n _query_url,\n headers=_headers,\n parameters=APIHelper.json_serialize(body))\n AuthManager.apply(_request, self.config)\n _context = self.execute_request(_request,\n name='update_ldap_provider')\n\n # Endpoint and global error handling using HTTP status codes.\n self.logger.info('Validating response for update_ldap_provider.')\n if _context.response.status_code == 0:\n raise RequestErrorErrorException('Error', _context)\n self.validate_response(_context)\n\n # Return appropriate type\n return APIHelper.json_deserialize(\n _context.response.raw_body,\n LdapProviderResponse.from_dictionary)\n\n except Exception as e:\n self.logger.error(e, exc_info=True)\n raise", "def update_user(id):\n pass", "def provider(self, provider: Provider) -> None:\n self._provider = provider", "def update_user():", "def update(self,\n provider_id,\n bgp_routing_config,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'bgp_routing_config': bgp_routing_config,\n })", "def fusion_api_edit_server_profile(self, body, uri, api=None, headers=None, param=''):\n return self.profile.update(body, uri, api, headers, param=param)", "def updateUser(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def fusion_api_edit_user(self, body, uri, api=None, headers=None):\n return self.user.update(body, uri, api, headers)", "def update(self,\n provider_id,\n l3vpn_id,\n l3_vpn,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'l3vpn_id': l3vpn_id,\n 'l3_vpn': l3_vpn,\n })", "def on_identity_loaded(sender, identity):\n key = current_app.config.get(\n \"OAUTHCLIENT_CERN_OPENID_SESSION_KEY\",\n OAUTHCLIENT_CERN_OPENID_SESSION_KEY,\n )\n identity.provides.update(session.get(key, []))", "def put(self, entity, schema):\n profile = entity.profiles.get_or_404(schema=schema)\n try:\n update_data = json.loads(request.data)\n except json.JSONDecodeError as e:\n raise APIBadRequest(str(e))\n\n if 'identity' in update_data:\n profile.identity = update_data['identity']\n if 'servers' in update_data:\n profile.servers = update_data['servers']\n\n profile.save()\n\n return jsonify(profile.to_json()), 200", "def put(self, id):\n return userDao.update(id, api.payload)", "def update(self, uuid, android_key):\n try:\n pmanager = PushManager.query.filter_by(\n uuid=uuid\n ).one_or_none()\n if pmanager is None:\n raise GatlinException(\"App not exist\", 404)\n self._provider.app_name = pmanager.app_name\n _ = self._provider.set_android_platform(android_key)\n pmanager.android_key = android_key\n return pmanager.save()\n except GatlinException as exception:\n raise exception", "def update_user():\n #TODO user update \n pass", "def test_update_profile(self):\n self.cim.update_profile(\n customer_id=u\"222\",\n description=u\"Foo bar baz quz\",\n email=u\"dialtone@gmail.com\",\n customer_profile_id=u\"122\"\n )", "async def test_update(self):\n rsps = respx.put(f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id') \\\n .mock(return_value=Response(200))\n await provisioning_client.update_provisioning_profile('id', {'name': 'new name'})\n assert rsps.calls[0].request.url == \\\n f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id'\n assert rsps.calls[0].request.headers['auth-token'] == 'header.payload.sign'\n assert rsps.calls[0].request.content == json.dumps({'name': 'new name'}).encode('utf-8')", "def update(self,\n provider_id,\n route_id,\n static_routes,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'route_id': route_id,\n 'static_routes': static_routes,\n })", "def update(self):\n self.__execute(self.pkgin_bin, \"update\")", "def change_provider(update, context):\n text = \"Выберите сайт с которого вы хотите получать прогнозы:\"\n keyboard = []\n for provider in context.bot_data['providers']:\n keyboard.append(InlineKeyboardButton(provider.capitalize(), callback_data=provider))\n\n reply_markup = InlineKeyboardMarkup(build_menu(keyboard, n_cols=2))\n context.bot.send_message(chat_id=update.effective_chat.id, text=text, reply_markup=reply_markup)", "def patch(self,\n provider_id,\n interface_id,\n provider_interface,\n ):\n return self._invoke('patch',\n {\n 'provider_id': provider_id,\n 'interface_id': interface_id,\n 'provider_interface': provider_interface,\n })", "def update(self, identifier, data):\n self.client.request_with_method(Methods.UPDATE % (self.name, identifier,),\n data=data)", "def update(self,\n ike_profile_id,\n ip_sec_vpn_ike_profile,\n ):\n return self._invoke('update',\n {\n 'ike_profile_id': ike_profile_id,\n 'ip_sec_vpn_ike_profile': ip_sec_vpn_ike_profile,\n })", "def updateUser(self, payload):\n\t\turl = \"https://habitica.com/api/v3/user\"\n\t\treturn(putUrl(url, self.credentials, payload))", "def update_identity(self, mc: ManagedCluster) -> ManagedCluster:\n self._ensure_mc(mc)\n\n current_identity_type = \"spn\"\n current_user_assigned_identity = \"\"\n if mc.identity is not None:\n current_identity_type = mc.identity.type.casefold()\n if mc.identity.user_assigned_identities is not None and len(mc.identity.user_assigned_identities) > 0:\n current_user_assigned_identity = list(mc.identity.user_assigned_identities.keys())[0]\n\n goal_identity_type = current_identity_type\n assign_identity = self.context.get_assign_identity()\n if self.context.get_enable_managed_identity():\n if not assign_identity:\n goal_identity_type = \"systemassigned\"\n else:\n goal_identity_type = \"userassigned\"\n\n is_update_identity = ((current_identity_type != goal_identity_type) or\n (current_identity_type == goal_identity_type and\n current_identity_type == \"userassigned\" and\n assign_identity is not None and\n current_user_assigned_identity != assign_identity))\n if is_update_identity:\n if current_identity_type == \"spn\":\n msg = (\n \"Your cluster is using service principal, and you are going to update \"\n \"the cluster to use {} managed identity.\\nAfter updating, your \"\n \"cluster's control plane and addon pods will switch to use managed \"\n \"identity, but kubelet will KEEP USING SERVICE PRINCIPAL \"\n \"until you upgrade your agentpool.\\n\"\n \"Are you sure you want to perform this operation?\"\n ).format(goal_identity_type)\n elif current_identity_type != goal_identity_type:\n msg = (\n \"Your cluster is already using {} managed identity, and you are going to \"\n \"update the cluster to use {} managed identity.\\n\"\n \"Are you sure you want to perform this operation?\"\n ).format(current_identity_type, goal_identity_type)\n else:\n msg = (\n \"Your cluster is already using userassigned managed identity, current control plane identity is {},\"\n \"and you are going to update the cluster identity to {}.\\n\"\n \"Are you sure you want to perform this operation?\"\n ).format(current_user_assigned_identity, assign_identity)\n # gracefully exit if user does not confirm\n if not self.context.get_yes() and not prompt_y_n(msg, default=\"n\"):\n raise DecoratorEarlyExitException\n # update identity\n if goal_identity_type == \"systemassigned\":\n identity = self.models.ManagedClusterIdentity(\n type=\"SystemAssigned\"\n )\n elif goal_identity_type == \"userassigned\":\n user_assigned_identity = {\n assign_identity: self.models.ManagedServiceIdentityUserAssignedIdentitiesValue()\n }\n identity = self.models.ManagedClusterIdentity(\n type=\"UserAssigned\",\n user_assigned_identities=user_assigned_identity\n )\n mc.identity = identity\n return mc", "def test_update_payment_profile(self):\n self.cim.update_payment_profile(\n customer_profile_id=u\"122\",\n customer_payment_profile_id=u\"444\",\n card_number=u\"422222222222\",\n expiration_date=u\"2009-10\"\n )", "def svc_provider(self, svc_provider):\n\n self._svc_provider = svc_provider", "def update(self, request, *args, **kwargs):\n return super(UserViewSet, self).update(request, *args, **kwargs)", "def put(self, user_id):\r\n return update_user(request, user_id)", "def put(self):\n request = transforms.loads(self.request.get('request'))\n\n if not self.assert_xsrf_token_or_fail(\n request, 'update-service_account', {}):\n return\n\n if not roles.Roles.is_super_admin():\n transforms.send_json_response(\n self, 401, 'Access denied.')\n return\n\n payload = request.get('payload')\n updated_dict = transforms.loads(payload)\n # updated_dict = transforms.json_to_dict(\n # transforms.loads(payload), self.get_schema_dict())\n\n errors = []\n self.apply_updates(updated_dict, errors)\n if not errors:\n transforms.send_json_response(self, 200, 'Saved.')\n else:\n transforms.send_json_response(self, 412, '\\n'.join(errors))", "def update_user(user_id, data):\n logging.debug(\"Uptating user: user_id={}\".format(user_id))\n return ask('appusers/{0}'.format(user_id), data, 'put')", "def update(self, **kwargs):\n self.manager.update(self, **kwargs)", "def update(self, **kwargs):\n self.manager.update(self, **kwargs)", "def _enable(cls, provider):\r\n if provider.NAME in cls._ENABLED:\r\n raise ValueError('Provider %s already enabled' % provider.NAME)\r\n cls._ENABLED[provider.NAME] = provider", "def update_account_data(self):\n self.ensure_one()\n getattr(self, '%s_update_account_data' % self.provider, lambda: None)()", "def update(self, identity, data=None, record=None, **kwargs):\n record.custom_fields = data.get(\"custom_fields\", {})", "def _update_provider_details_without_commit(provider_details):\n provider_details.version += 1\n provider_details.updated_at = datetime.utcnow()\n history = ProviderDetailsHistory.from_original(provider_details)\n db.session.add(provider_details)\n db.session.add(history)", "def access_info_update(context, storage_id, values):\n session = get_session()\n with session.begin():\n _access_info_get(context, storage_id, session).update(values)\n return _access_info_get(context, storage_id, session)", "def SetIdentityServerConfig(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_user(self):\n self.client.force_authenticate(user=self.user)\n self.response = self.client.patch(\n reverse(\n 'edit_account',kwargs={ 'pk': self.user.id}),\n self.updated_data, format='json'\n )\n self.user = CustomUser.objects.get(username=self.user.username)", "def update_extdata(params):\n user = get_user(uid=None)\n db = api.db.get_conn()\n db.users.update_one({\"uid\": user[\"uid\"]}, {\"$set\": {\"extdata\": params}})", "def update(self, uid):\n raise NotImplementedError", "def update(self, customerguid, name=\"\", login=\"\", password=\"\", email=\"\", address=\"\", vat=\"\", jobguid=\"\", executionparams=None):", "def update_entities(self):\n raise NotImplementedError()", "def patch(self,\n provider_id,\n provider_deployment_map_id,\n provider_deployment_map,\n ):\n return self._invoke('patch',\n {\n 'provider_id': provider_id,\n 'provider_deployment_map_id': provider_deployment_map_id,\n 'provider_deployment_map': provider_deployment_map,\n })", "def patch(self,\n provider_id,\n l3_vpn_context,\n ):\n return self._invoke('patch',\n {\n 'provider_id': provider_id,\n 'l3_vpn_context': l3_vpn_context,\n })", "def update(self, user: U) -> None:\n ...", "def extend_identity(identity, roles):\n provides = set([UserNeed(current_user.email)] + [RoleNeed(name) for name in roles])\n identity.provides |= provides\n key = current_app.config.get(\n \"OAUTHCLIENT_CERN_OPENID_SESSION_KEY\",\n OAUTHCLIENT_CERN_OPENID_SESSION_KEY,\n )\n session[key] = provides", "def update_profile(self, method=\"POST\", id=1, fullname=\"John Doe\",\r\n name=\"johndoe\", locale=\"es\",\r\n email_addr=\"johndoe@example.com\",\r\n new_name=None,\r\n btn='Profile'):\r\n url = \"/account/%s/update\" % name\r\n if new_name:\r\n name = new_name\r\n if (method == \"POST\"):\r\n return self.app.post(url,\r\n data={'id': id,\r\n 'fullname': fullname,\r\n 'name': name,\r\n 'locale': locale,\r\n 'email_addr': email_addr,\r\n 'btn': btn},\r\n follow_redirects=True)\r\n else:\r\n return self.app.get(url,\r\n follow_redirects=True)", "def set_provider(self, provider):\n \n check = self.check_provider(provider)\n if check is not None:\n self.default_provider = provider\n else:\n return None", "def update(self,request,pk = None):\n return Response({'http_method':'PUT'})", "def update(self):\n if self._data_provider_state is not None:\n self._state = self._data_provider_state()\n \n if self._data_provider_attributes is not None:\n self._attributes = self._data_provider_attributes()", "def update(cls, name, value):\n\n db = get_db_handle()\n secret = cls.get_instance(name)\n secret_data = secret.data[0] # using backref\n\n pass_phrase = secret_data.pass_phrase\n\n LOG.debug(\"Encrypting new data\")\n encrypted_msg = Crypto.encrypt_AES_GCM(value, pass_phrase)\n\n (kdf_salt, ciphertext, iv, auth_tag) = encrypted_msg\n\n query = db.data_table.update(\n kdf_salt=kdf_salt,\n ciphertext=ciphertext,\n iv=iv,\n auth_tag=auth_tag,\n pass_phrase=pass_phrase,\n ).where(db.data_table.secret_ref == secret)\n\n query.execute()\n\n query = db.secret_table.update(last_update_time=datetime.datetime.now()).where(\n db.secret_table.name == name\n )\n\n query.execute()", "def update_access_token(self):\n self.token = util.prompt_for_user_token(self._username, scope,\n client_id=const.CLIENT_ID,\n client_secret=const.CLIENT_SECRET,\n redirect_uri=const.REDIRECT_URL)\n self._client = spotipy.Spotify(auth=self.token)", "def auth_token_provider_endpoint(self, auth_token_provider_endpoint):\n\n self._auth_token_provider_endpoint = auth_token_provider_endpoint", "def put(self, request):\n profile = Profile.get_by_id(request.user.id)\n if not profile:\n return HttpResponse(status=403)\n user = CustomUser.objects.get(id=request.user.id)\n update_data = json.loads(request.body.decode('utf-8'))\n user.update(first_name=update_data.get('first_name'),\n last_name=update_data.get('last_name'))\n profile.update(\n birthday=update_data.get('birthday'),\n gender=update_data.get('gender'),\n hobbies=update_data.get('hobbies'),\n facebook=update_data.get('facebook'))\n data = profile.to_dict()\n return JsonResponse(data, status=200)", "def provider(self, provider: str):\n if provider is None:\n raise ValueError(\"Invalid value for `provider`, must not be `None`\") # noqa: E501\n\n self._provider = provider", "def record_update_for_user(record_id, values):\n session = get_session()\n with session.begin():\n record_ref = get_user_record(record_id, session=session)\n record_ref.update(values)\n record_ref.save(session=session)", "def update_identity_profile(self, mc: ManagedCluster) -> ManagedCluster:\n self._ensure_mc(mc)\n\n assign_kubelet_identity = self.context.get_assign_kubelet_identity()\n if assign_kubelet_identity:\n identity_profile = {\n 'kubeletidentity': self.models.UserAssignedIdentity(\n resource_id=assign_kubelet_identity,\n )\n }\n user_assigned_identity = self.context.get_assign_identity()\n if not user_assigned_identity:\n user_assigned_identity = self.context.get_user_assignd_identity_from_mc()\n cluster_identity_object_id = self.context.get_user_assigned_identity_object_id(user_assigned_identity)\n # ensure the cluster identity has \"Managed Identity Operator\" role at the scope of kubelet identity\n self.context.external_functions.ensure_cluster_identity_permission_on_kubelet_identity(\n self.cmd,\n cluster_identity_object_id,\n assign_kubelet_identity)\n mc.identity_profile = identity_profile\n return mc", "def provider_id(self):\n raise NotImplementedError", "def update_identity(self, realm=None, type=\"users\", username=None, user_data=None):\n if not username:\n raise ValueError(\"Please provide a username.\")\n\n if not user_data:\n raise ValueError(\"Please provide correct user information.\")\n\n user_data = self._to_string(data=user_data)\n type = self._type_validator(type=type)\n uri = self._uri_realm_creator(realm=realm, uri=type + '/' + username)\n data = self._put(uri=uri, data=user_data, headers=self.headers)\n return data.json()", "def put(self, customer_id):\n data = request.json\n return edit_customer(customer_id=customer_id, data=data)", "async def update(self, ctx):\n if is_support_guild(ctx.guild.id):\n await ctx.send('Sorry, this discord does not allow update, saveid, '\n 'leaderboard, and series commands so as not to overload me. '\n 'Try `!careerstats` or `!yearlystats` with your customer ID to test '\n 'or go to #invite-link to bring the bot to your discord for all functionality')\n return\n await ctx.send(f'Updating user: {ctx.author.name}, this may take a minute')\n log.info(f'Updating user: {ctx.author.name}')\n await self.updater.update_member(ctx)", "def update(cls, id, xml):\n raise Exception('Not Implemented Yet')", "def _update_by_auth(self, username, password, token_path, grant_type, client_id):\n token = requests.post(\n self.api_url + self.oauthpath,\n params={\n \"username\": username,\n \"grant_type\": grant_type,\n \"client_id\": client_id\n },\n data={\n \"password\": password\n }).json()\n if \"access_token\" in token:\n self.username = username\n self.token = token[\"access_token\"]\n else:\n raise KeyError(\"Authentication failed\", token)", "def identity_provider(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider\")", "def patch(self, request, *args, **kwargs):\n user= get_object_or_404(YouYodaUser, email=request.data.get('email'))\n # user = YouYodaUser.objects.get(auth_token=request.headers['Authorization'].replace('Token ', ''))\n serializer = ProfileEditSerializer(user, data=request.data, partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def update(self, *args, **kwargs):\n raise NotImplementedError", "def updateCustomer(self, **params):\n self.__requireParams(params, ['id'])\n return self.__req('update_customer', params)", "def update(self, profiles, matches):\n raise NotImplementedError()", "def transform_from_provider(self, source_provider):\n self.source_provider = source_provider\n return self.__transform()", "def put(self, id):\n data = flask.request.json\n user_dao.update_user(id, data)\n return None, 204", "def update(self, request, pk=None):\n return Response({'http_method': 'PUT'})", "def update_account_with(self, id_, **kwargs):\n self.update_user_with(id_, **kwargs)\n self.update_profile_with(id_, **kwargs)\n # TODO:\n # self.update_prefecture_with(id_, kwargs)", "def update(self, *args, **kwargs):\n pass", "def update(self, *args, **kwargs):\n pass", "def update(self, *args, **kwargs):\n pass", "def put(self, request, flavor_profile_id):\n update_flavor_profile(request)", "async def update_issuer(self, issuer_name: str, **kwargs) -> CertificateIssuer:\n\n enabled = kwargs.pop(\"enabled\", None)\n account_id = kwargs.pop(\"account_id\", None)\n password = kwargs.pop(\"password\", None)\n organization_id = kwargs.pop(\"organization_id\", None)\n admin_contacts = kwargs.pop(\"admin_contacts\", None)\n\n if account_id or password:\n issuer_credentials = self._models.IssuerCredentials(account_id=account_id, password=password)\n else:\n issuer_credentials = None\n if admin_contacts:\n admin_details: Optional[List[Any]] = list(\n self._models.AdministratorDetails(\n first_name=contact.first_name,\n last_name=contact.last_name,\n email_address=contact.email,\n phone=contact.phone,\n )\n for contact in admin_contacts\n )\n else:\n admin_details = None\n if organization_id or admin_details:\n organization_details = self._models.OrganizationDetails(id=organization_id, admin_details=admin_details)\n else:\n organization_details = None\n if enabled is not None:\n issuer_attributes = self._models.IssuerAttributes(enabled=enabled)\n else:\n issuer_attributes = None\n\n parameters = self._models.CertificateIssuerUpdateParameters(\n provider=kwargs.pop(\"provider\", None),\n credentials=issuer_credentials,\n organization_details=organization_details,\n attributes=issuer_attributes,\n )\n\n issuer_bundle = await self._client.update_certificate_issuer(\n vault_base_url=self.vault_url, issuer_name=issuer_name, parameter=parameters, **kwargs\n )\n return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle)", "def update_user_profile(email, **kwargs): # PUT\n user = coll(\"users\").find_one({\"_id\": email})\n if not user:\n return {\"message\": \"User not found\"}, 404\n\n coll(\"users\").update_one({\"_id\": email}, {\"$set\": kwargs})\n\n return {\"message\": \"User profile successfully updated\"}, 200", "def update_user(self, user_id, **kwargs):\n user = self.get(user_id, raise_error=True)\n if 'display_name' in kwargs:\n user.display_name = kwargs['display_name']\n if 'email' in kwargs:\n user.email = kwargs['email']\n if 'verified' in kwargs:\n user.verified = kwargs['verified']\n self.session.add(user)", "def update(self, request, pk=None):\n\n return Response({'http_method':'PUT'})", "def healthcare_provider_id(self, healthcare_provider_id):\n\n self._healthcare_provider_id = healthcare_provider_id", "def update(self, request, pk=None):\n\n return Response({'http_method': 'PUT'})", "def update_region(self, region_id, region_ref):\n raise exception.NotImplemented() # pragma: no cover", "def updateStudents(request):\n\n return updateRole('gsoc_student')" ]
[ "0.6841701", "0.6564375", "0.63220054", "0.62459624", "0.623236", "0.5886005", "0.58837974", "0.5625915", "0.5601719", "0.55890137", "0.5554441", "0.54530036", "0.54441226", "0.539471", "0.5389379", "0.53829527", "0.5339994", "0.5336483", "0.5261702", "0.521437", "0.52008426", "0.5177207", "0.51419634", "0.5112954", "0.51001453", "0.50945497", "0.5057962", "0.5035924", "0.5022", "0.5011586", "0.50039756", "0.49916977", "0.49717024", "0.49478197", "0.49396974", "0.4938617", "0.49347886", "0.4889442", "0.48755258", "0.4873932", "0.48712152", "0.48671222", "0.48633742", "0.4854552", "0.4837176", "0.48362398", "0.48362398", "0.48222178", "0.48184693", "0.48171937", "0.48089978", "0.48085687", "0.48067266", "0.4802264", "0.47888023", "0.47818208", "0.47770545", "0.47675142", "0.47615266", "0.4761231", "0.47421178", "0.4735402", "0.47067255", "0.46960902", "0.46937943", "0.46848172", "0.4681851", "0.46807396", "0.46763557", "0.4676227", "0.4669505", "0.46683946", "0.4664614", "0.46587732", "0.46561337", "0.46549386", "0.4644848", "0.4641537", "0.46384048", "0.4635937", "0.46195206", "0.46154073", "0.46139288", "0.46134093", "0.46073973", "0.46042192", "0.46017614", "0.4601279", "0.4601055", "0.4601055", "0.4601055", "0.45988902", "0.45941192", "0.45865142", "0.45852023", "0.4583557", "0.45801833", "0.4577482", "0.4573815", "0.45713767" ]
0.7050549
0
Updates the specified group mapping.
def update_idp_group_mapping(self, identity_provider_id, mapping_id, update_idp_group_mapping_details, **kwargs): resource_path = "/identityProviders/{identityProviderId}/groupMappings/{mappingId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_idp_group_mapping got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id, "mappingId": mapping_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_idp_group_mapping_details, response_type="IdpGroupMapping") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_idp_group_mapping_details, response_type="IdpGroupMapping")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_group():\n _id = request.form['_id']\n name = request.form['name']\n data, code, message = FIELD_SERVICE.update_group(_id, name)\n return __result(data, code, message)", "def do_group_update():\n target_group = Group.query.filter_by(id=request.form['id']).first()\n if target_group is None:\n return group_list(\"Unknown group.\")\n\n target_group.name = request.form['name']\n target_group.group_meter_id = request.form['meter']\n target_group.group_production_meter_id_first = request.form['group_production_meter_id_first']\n target_group.group_production_meter_id_second = request.form[\n 'group_production_meter_id_second']\n\n db.session.commit()\n return group_list(\"Updated group \" + target_group.name)", "def group_update(*, login_manager: LoginManager, group_id: str, **kwargs: Any):\n groups_client = login_manager.get_groups_client()\n\n # get the current state of the group\n group = groups_client.get_group(group_id)\n\n # assemble put data using existing values for any field not given\n # note that the API does not accept the full group document, so we must\n # specify name and description instead of just iterating kwargs\n data = {}\n for field in [\"name\", \"description\"]:\n if kwargs.get(field) is not None:\n data[field] = kwargs[field]\n else:\n data[field] = group[field]\n\n response = groups_client.update_group(group_id, data)\n\n formatted_print(response, simple_text=\"Group updated successfully\")", "def update_groups(self, groups):\n self.fetch_group_messages() # preload messages before updating groups\n self.groups = groups\n self.put()", "def update_group(self, group_id, **kwargs):\n post_body = json.dumps({'group': kwargs})\n resp, body = self.patch('groups/%s' % group_id, post_body)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return rest_client.ResponseBody(resp, body)", "def update_group(groupname):\n name = request.get_json().get(\"name\", None)\n description = request.get_json().get(\"description\", None)\n response = jsonify(\n admin.update_group(current_app.scoped_session(), groupname, description, name)\n )\n return response", "def update(self,\n provider_id,\n group_id,\n group,\n ):\n return self._invoke('update',\n {\n 'provider_id': provider_id,\n 'group_id': group_id,\n 'group': group,\n })", "def fusion_api_update_group_role_assignment(self, body, api=None, headers=None):\n return self.LoginDomainsGroupToRoleMapping.update(body, api, headers)", "def set_group(self, address, group):\n self.groups[address] = group", "def update_group(self, group_name, new_group_name=None, new_path=None):\r\n params = {'GroupName' : group_name}\r\n if new_group_name:\r\n params['NewGroupName'] = new_group_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateGroup', params)", "def request_group_update():\n target_group = Group.query.filter_by(id=request.args['id']).first()\n if target_group is None:\n return group_list(\"Unknown group.\")\n\n return Response(\n render_template(\n 'admin/group/create-update.html',\n csrf_token=(\n get_raw_jwt() or {}).get(\"csrf\"),\n target=\"/admin/group/update\",\n id=target_group.id,\n name=target_group.name,\n meter=target_group.group_meter_id,\n group_production_meter_id_first=target_group.group_production_meter_id_first,\n group_production_meter_id_second=target_group.group_production_meter_id_second),\n mimetype='text/html')", "def set_group(self, id_: str, player: str, group: list):\n self._groups[id_] = {\n 'player': player,\n 'group': group\n }", "async def update_contact_group(dbcon: DBConnection, contact_group_id: int, data: Dict[str, str]) -> None:\n\n async def _run(cur: Cursor) -> None:\n for key, value in data.items():\n if key not in ['name', 'active']:\n raise errors.IrisettError('invalid contact key %s' % key)\n q = \"\"\"update contact_groups set %s=%%s where id=%%s\"\"\" % key\n q_args = (value, contact_group_id)\n await cur.execute(q, q_args)\n\n if not await contact_group_exists(dbcon, contact_group_id):\n raise errors.InvalidArguments('contact group does not exist')\n await dbcon.transact(_run)", "def update_targetgroup(self, group_id, **kwargs):\r\n result = False\r\n if self._db(self._db.targetgroup.id==group_id).select():\r\n result = True\r\n self._db(self._db.targetgroup.id==group_id).update(**kwargs)\r\n self._db.commit()\r\n return result", "def update(self):\r\n return self.connection._update_group('UpdateAutoScalingGroup', self)", "def test_modify_group(self):\n response = self.client.modify_group(\"ABC123\")\n self.assertEqual(response[\"method\"], \"POST\")\n self.assertEqual(response[\"uri\"], \"/admin/v1/groups/ABC123\")\n self.assertEqual(util.params_to_dict(response[\"body\"]), {\"account_id\": [self.client.account_id]})", "def test_update_group(self):\n pass", "def set_group(self, group):\n # Implemented from template for osid.resource.ResourceForm.set_group_template\n if self.get_group_metadata().is_read_only():\n raise errors.NoAccess()\n if not self._is_valid_boolean(group):\n raise errors.InvalidArgument()\n self._my_map['group'] = group", "def update_group(self, group_id, new_description):\n url = self.groups_url + \"/\" + group_id\n new_data = json.dumps({\"description\": new_description})\n\n return requests.put(url, new_data, headers=self.headers)", "def update(self, val):\n try:\n key = self._group_by(val)\n except lena.core.LenaKeyError:\n raise lena.core.LenaValueError(\n \"could not find a key for {}\".format(val)\n )\n\n if key in self.groups:\n self.groups[key].append(val)\n else:\n self.groups[key] = [val]", "def groups_update(self, mar, request):\n group_id = mar.viewed_user_auth.user_id\n member_ids_dict, owner_ids_dict = self._services.usergroup.LookupMembers(\n mar.cnxn, [group_id])\n owner_ids = owner_ids_dict.get(group_id, [])\n member_ids = member_ids_dict.get(group_id, [])\n if not permissions.CanEditGroup(\n mar.perms, mar.auth.effective_ids, owner_ids):\n raise permissions.PermissionException(\n 'The user is not allowed to edit this group.')\n\n group_settings = self._services.usergroup.GetGroupSettings(\n mar.cnxn, group_id)\n if (request.who_can_view_members or request.ext_group_type\n or request.last_sync_time or request.friend_projects):\n group_settings.who_can_view_members = (\n request.who_can_view_members or group_settings.who_can_view_members)\n group_settings.ext_group_type = (\n request.ext_group_type or group_settings.ext_group_type)\n group_settings.last_sync_time = (\n request.last_sync_time or group_settings.last_sync_time)\n if framework_constants.NO_VALUES in request.friend_projects:\n group_settings.friend_projects = []\n else:\n id_dict = self._services.project.LookupProjectIDs(\n mar.cnxn, request.friend_projects)\n group_settings.friend_projects = (\n list(id_dict.values()) or group_settings.friend_projects)\n self._services.usergroup.UpdateSettings(\n mar.cnxn, group_id, group_settings)\n\n if request.groupOwners or request.groupMembers:\n self._services.usergroup.RemoveMembers(\n mar.cnxn, group_id, owner_ids + member_ids)\n owners_dict = self._services.user.LookupUserIDs(\n mar.cnxn, request.groupOwners, autocreate=True)\n self._services.usergroup.UpdateMembers(\n mar.cnxn, group_id, list(owners_dict.values()), 'owner')\n members_dict = self._services.user.LookupUserIDs(\n mar.cnxn, request.groupMembers, autocreate=True)\n self._services.usergroup.UpdateMembers(\n mar.cnxn, group_id, list(members_dict.values()), 'member')\n\n return api_pb2_v1.GroupsUpdateResponse()", "async def handle_set_group(self, match: Match[str], payload: str) -> None:\n groupid = match.group(1)\n\n try:\n group = self._bridge.groups[groupid]\n state = GroupSetState(**json.loads(payload))\n LOGGER.info(f\"Updating group {group.name}\")\n await group.set_action(**state.dict())\n except IndexError:\n LOGGER.warning(f\"Unknown group id: {groupid}\")\n except json.JSONDecodeError:\n LOGGER.warning(f\"Bad JSON on light request: {payload}\")\n except TypeError:\n LOGGER.warning(f\"Expected dictionary, got: {payload}\")\n except ValidationError as e:\n LOGGER.warning(f\"Invalid light state: {e}\")", "def ModifyGroup(self, group, reason=None, **kwargs):\n query = []\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_PUT,\n (\"/%s/groups/%s/modify\" %\n (GANETI_RAPI_VERSION, group)), query, kwargs)", "def upsert_group(self,\n group, # type: Group\n *options, # type: UpsertGroupOptions\n **kwargs # type: Any\n ):\n # This endpoint accepts application/x-www-form-urlencoded and requires the data be sent as form data.\n # The name/id should not be included in the form data.\n # Roles should be a comma separated list of strings.\n # If, only if, the role contains a bucket name then the rolename should be suffixed\n # with[<bucket_name>] e.g. bucket_full_access[default],security_admin.\n\n final_args = forward_args(kwargs, *options)\n final_args.update({k: v for k, v in group.as_dict.items() if k in {\n 'roles', 'description', 'ldap_group_reference'}})\n self._admin_bucket.group_upsert(group.name, **final_args)", "def set(self, name_group, key, value):\n self.psettings.beginGroup(name_group)\n self.psettings.setValue(key, value)\n self.closeGroup()", "def update_group(self, group_id, update_group_details, **kwargs):\n resource_path = \"/groups/{groupId}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_group got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"groupId\": group_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_group_details,\n response_type=\"Group\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_group_details,\n response_type=\"Group\")", "def update_group_with_http_info(self, bucket_id, group_id, group, **kwargs):\n\n all_params = ['bucket_id', 'group_id', 'group', 'if_match', 'if_none_match', 'fields']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method update_group\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'bucket_id' is set\n if ('bucket_id' not in params) or (params['bucket_id'] is None):\n raise ValueError(\"Missing the required parameter `bucket_id` when calling `update_group`\")\n # verify the required parameter 'group_id' is set\n if ('group_id' not in params) or (params['group_id'] is None):\n raise ValueError(\"Missing the required parameter `group_id` when calling `update_group`\")\n # verify the required parameter 'group' is set\n if ('group' not in params) or (params['group'] is None):\n raise ValueError(\"Missing the required parameter `group` when calling `update_group`\")\n\n if 'if_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_match']):\n raise ValueError(\"Invalid value for parameter `if_match` when calling `update_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n if 'if_none_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_none_match']):\n raise ValueError(\"Invalid value for parameter `if_none_match` when calling `update_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n\n collection_formats = {}\n\n resource_path = '/buckets/{bucket_id}/groups/{group_id}'.replace('{format}', 'json')\n path_params = {}\n if 'bucket_id' in params:\n path_params['bucket_id'] = params['bucket_id']\n if 'group_id' in params:\n path_params['group_id'] = params['group_id']\n\n query_params = {}\n if 'fields' in params:\n query_params['_fields'] = params['fields']\n collection_formats['_fields'] = 'csv'\n\n header_params = {}\n if 'if_match' in params:\n header_params['If-Match'] = params['if_match']\n if 'if_none_match' in params:\n header_params['If-None-Match'] = params['if_none_match']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'group' in params:\n body_params = params['group']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['basicAuth']\n\n return self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Group',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def test_update_entry_groups(self):\r\n # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.\r\n pass", "def set_group(self, group: str) -> None:\n self.group = group", "def update(self, consistencygroup, **kwargs):\n if not kwargs:\n return\n\n body = {\"consistencygroup\": kwargs}\n\n return self._update(\"/consistencygroups/%s\" %\n base.getid(consistencygroup), body)", "def test_user_group_controller_update(self):\n pass", "def set_group(self, group):\n self._group = group", "def group(self, group):\n self._group = group", "def update(person_group_id, name=None, user_data=None):\n url = 'persongroups/{}'.format(person_group_id)\n json = {\n 'name': name,\n 'userData': user_data,\n }\n\n return util.request('PATCH', url, json=json)", "def put(self):\n status = ErrorCode.SUCCESS\n try:\n data = DotDict(json_decode(self.request.body))\n cid = self.current_user.cid\n tid = self.current_user.tid\n gid = data.gid\n name = data.name\n logging.info(\"[UWEB] Modify group request: %s, cid: %s\",\n data, self.current_user.cid)\n except Exception as e:\n status = ErrorCode.ILLEGAL_DATA_FORMAT\n logging.exception(\"[UWEB] Invalid data format. body:%s, Exception: %s\",\n self.request.body, e.args)\n self.write_ret(status)\n return\n\n try: \n group = self.get_group_by_cid(cid, name)\n if group:\n status = ErrorCode.GROUP_EXIST\n self.write_ret(status)\n return\n\n self.db.execute(\"UPDATE T_GROUP\"\n \" SET name = %s\"\n \" WHERE id = %s\",\n name, gid)\n\n # NOTE: wspush to client \n if status == ErrorCode.SUCCESS:\n WSPushHelper.pushS3(tid, self.db, self.redis)\n\n self.write_ret(status)\n except Exception as e:\n logging.exception(\"[UWEB] Modify group failed. cid: %s, Exception: %s\",\n self.current_user.cid, e.args)\n status = ErrorCode.SERVER_BUSY\n self.write_ret(status)", "def group(self, group):\n\n self._group = group", "def group(self, group):\n\n self._group = group", "def group(self, group):\n\n self._group = group", "def add_to_group(self, org, contact, group):\n pass", "def update_research_group(self, employee_id, new_research_group):\n cursor = self.dbconnect.get_cursor()\n try:\n cursor.execute('UPDATE employee '\n 'SET research_group = %s '\n 'WHERE id=%s;',\n (new_research_group, employee_id))\n self.dbconnect.commit()\n except:\n self.dbconnect.rollback()\n raise", "def async_update_group_state(self) -> None:", "def update_group_association(old_email, new_email):\n\n groups.update({'users': old_email},\n {'$set': {'users.$': new_email}},\n upsert=False,\n multi=True)", "def patch(self,\n provider_id,\n group_id,\n group,\n ):\n return self._invoke('patch',\n {\n 'provider_id': provider_id,\n 'group_id': group_id,\n 'group': group,\n })", "def put(self):\n if not request.is_json:\n parser = reqparse.RequestParser()\n parser.add_argument(constants.PID_KEY, help='Process id')\n parser.add_argument(constants.GID_KEY, help='Group id')\n data = parser.parse_args()\n else:\n data = request.json\n group_id = data[constants.GID_KEY]\n process_id = data[constants.PID_KEY]\n utils.check_process_id_in_req(process_id)\n utils.check_group_id_in_req(group_id)\n _check_group_exists(group_id)\n GID_COORD_DICT[group_id] = (process_id, request.remote_addr)\n response = {constants.COORD_PID_KEY: GID_COORD_DICT[group_id][0],\n constants.COORD_IP_KEY: GID_COORD_DICT[group_id][1]}\n return response", "def UpdateGroupMembership(self, newMembers):\r\n globals.groupMembers[newMembers.targetGuid] = True #remove the target Sticky\r\n\r\n for guid in newMembers.guids[0]:\r\n globals.groupMembers[guid]=True\r\n\r\n group = Group()\r\n globals._groupNumber = globals._groupNumber+1\r\n group.groupID = globals._groupName + str(globals._groupNumber)\r\n group.targetSticky[\"guid\"] = newMembers.targetGuid\r\n group.targetSticky[\"desc\"] = newMembers.targetDesc\r\n group.targetSticky[\"head\"] = newMembers.targetHead #lplp1313 new value\r\n\r\n guidSims = tuple(zip(newMembers.guids[0], newMembers.descriptions[0], newMembers.headers[0], list(newMembers.cos_sims[0]))) #lplp1313 new value \r\n for g, d, h, c in guidSims:\r\n gs = GroupSticky()\r\n gs.guid=g\r\n gs.desc=d\r\n gs.head=h #lplp1313 new value\r\n gs.cosineVal=c\r\n group.groupStickies.append(gs)\r\n\r\n globals._jsonReply._groups.append(group)", "def _mod_group(self, command, group_id, group_type, buckets=None):\n self.datapath.send_msg(\n self.parser.OFPGroupMod(\n datapath=self.datapath,\n command=command,\n group_id=group_id,\n type_=group_type,\n buckets=buckets,\n )\n )", "def update(self, group_snapshot, **kwargs):\n if not kwargs:\n return\n\n body = {\"group_snapshot\": kwargs}\n\n return self._update(\"/group_snapshots/%s\" % base.getid(group_snapshot),\n body)", "def update_pin_group():\n create_instance(new=False)", "def setGroup(self, group):\n\t\tself.config.GROUP = group", "def _group_modify_id(group, id_modifier):\n\n group = group._replace(id=id_modifier(group.id))\n group = group._replace(children=list(map(lambda g: Skeleton._group_modify_id(g, id_modifier), group.children)))\n\n return group", "def test_groups_group_id_state_put(self):\n pass", "def _do_update(self, meta, k, v):\n self.runtime.logger.info('{}: [{}] -> {}'.format(meta.in_group_config_path, k, v))\n meta.config[k] = v\n meta.save()", "def group(self, val):\n self.set_property(\"Group\", val)", "def _addProteinIdsToGroupMapping(self, proteinIds, groupId):\n for proteinId in AUX.toList(proteinIds):\n self._proteinToGroupIds[proteinId].add(groupId)", "def mapGroups(groupList, letters):\r\n changeList = findIndices(groupList)\r\n i = 0\r\n for index in changeList:\r\n toReplace = groupList[index]\r\n groupList = qMS.listReplace(groupList, toReplace, letters[i])\r\n i = i+1\r\n return list(groupList)", "def update_group(\n self,\n group,\n validate_only=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n # Wrap the transport method to add retry and timeout logic.\n if \"update_group\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"update_group\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.update_group,\n default_retry=self._method_configs[\"UpdateGroup\"].retry,\n default_timeout=self._method_configs[\"UpdateGroup\"].timeout,\n client_info=self._client_info,\n )\n\n request = group_service_pb2.UpdateGroupRequest(\n group=group, validate_only=validate_only,\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"group.name\", group.name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"update_group\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def update(self, mapping):\n if not ismapping(mapping):\n raise TypeError(\"mapping type required\")\n field_names = getpyattr(type(self), 'field_names')\n for key, value in mapping.items():\n if key in field_names:\n setattr(self, key, value)", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def update_groups(self, user_id, group_ids):\n user = self.get(user_id, raise_error=True)\n new_groups = (\n self.session\n .query(tables.Group)\n .filter(tables.Group.group_id.in_(group_ids))\n )\n user.groups = new_groups.all()\n self.session.flush()", "def set_gadm(uid, gid):\n g.db.execute('update into user_group (gadm) values (1) where id_user == ? and id_group == ?',\n [uid, gid])", "def post_security_group_update(self, resource_id, resource_dict):\n pass", "def update(self, key, value):\n if key in self.map:\n self.map[key] = value", "def patch_group_with_http_info(self, bucket_id, group_id, group, **kwargs):\n\n all_params = ['bucket_id', 'group_id', 'group', 'if_match', 'if_none_match', 'fields']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method patch_group\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'bucket_id' is set\n if ('bucket_id' not in params) or (params['bucket_id'] is None):\n raise ValueError(\"Missing the required parameter `bucket_id` when calling `patch_group`\")\n # verify the required parameter 'group_id' is set\n if ('group_id' not in params) or (params['group_id'] is None):\n raise ValueError(\"Missing the required parameter `group_id` when calling `patch_group`\")\n # verify the required parameter 'group' is set\n if ('group' not in params) or (params['group'] is None):\n raise ValueError(\"Missing the required parameter `group` when calling `patch_group`\")\n\n if 'if_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_match']):\n raise ValueError(\"Invalid value for parameter `if_match` when calling `patch_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n if 'if_none_match' in params and not re.search('\\\\\\\"[0-9]+\\\\\\\"', params['if_none_match']):\n raise ValueError(\"Invalid value for parameter `if_none_match` when calling `patch_group`, must conform to the pattern `/\\\\\\\"[0-9]+\\\\\\\"/`\")\n\n collection_formats = {}\n\n resource_path = '/buckets/{bucket_id}/groups/{group_id}'.replace('{format}', 'json')\n path_params = {}\n if 'bucket_id' in params:\n path_params['bucket_id'] = params['bucket_id']\n if 'group_id' in params:\n path_params['group_id'] = params['group_id']\n\n query_params = {}\n if 'fields' in params:\n query_params['_fields'] = params['fields']\n collection_formats['_fields'] = 'csv'\n\n header_params = {}\n if 'if_match' in params:\n header_params['If-Match'] = params['if_match']\n if 'if_none_match' in params:\n header_params['If-None-Match'] = params['if_none_match']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'group' in params:\n body_params = params['group']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json', 'application/merge-patch+json', 'application/json-patch+json'])\n\n # Authentication setting\n auth_settings = ['basicAuth']\n\n return self.api_client.call_api(resource_path, 'PATCH',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Group',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def put(self, id):\r\n return UserGroupService.updateUserGroup(self, id)", "def set_group(self, data, group, intg, dq=None):\n # TODO: Include a 2-D DQ array to be combined with the GROUPDQ array\n #\n # Copy the input data to a 2-D plane for this group/intg combination.\n # NOTE: This only works if data array is broadcastable so the shape\n # of the data array is checked.\n #\n data = np.asarray(data, dtype=self.data.dtype)\n detector_shape = (self.rows, self.columns)\n if data.shape == detector_shape:\n self.data[intg, group, :, :] = data \n # Invalidate the averaged data\n self._data_averaged = None\n # Update the group data quality array if necessary.\n if dq is not None:\n if self.include_groupdq:\n dq = np.asarray(dq, dtype=self.groupdq.dtype) # Convert to same data type.\n self.groupdq[intg, group, :, :] |= dq\n else:\n strg = \"Incompatible arguments. A groupdq array is \"\n strg += \"provided when include_groupdq=False. \"\n strg += \"The array is ignored.\"\n LOGGER.error(strg)\n else:\n strg = \"Group data array has the wrong shape \"\n strg += \"(%s instead of %s).\" % (str(data.shape),\n str(detector_shape))\n raise TypeError(strg)", "def update_supergroups_by_id(self, group_ids, kind):\n\n updated = set(map(int, group_ids))\n\n logger = logging.getLogger(__name__)\n debug = logger.debug\n\n debug('updating %s: %r', kind, updated)\n\n groups = self['__store']\n\n # print(kind)\n existing = getattr(self, kind + 's')\n debug('existing %s: %r', kind, updated)\n # print(updated, existing)\n\n if updated != existing:\n\n group_lookup = {\n group.group_id: group.name\n for group in groups\n }\n\n db = groups.db\n\n to_remove = existing - updated\n if to_remove:\n debug('removing %s %r from %r', kind, to_remove, self.name)\n cmd = 'delete from subgroups where subgroup_id=%s and group_id in %s'\n db(cmd, self.group_id, to_remove)\n\n for group_id in to_remove:\n audit(\n 'remove %s' % kind,\n group_lookup.get(\n group_id,\n 'unknown (%s)' % group_id,\n ),\n self.name\n )\n\n to_add = updated - existing\n if to_add:\n debug('adding %s %r to %r', kind, to_add, self.name)\n cmd = 'insert into subgroups (group_id, subgroup_id) values (%s, %s)'\n sequence = zip(to_add, [self.group_id] * len(to_add))\n db.execute_many(cmd, sequence)\n\n for subgroup_id in to_add:\n audit(\n 'add %s' % kind,\n group_lookup.get(\n subgroup_id,\n 'unknown (%s)' % subgroup_id,\n ),\n self.name\n )\n\n else:\n debug('%s unchanged', kind)", "def test_api_v1_groups_id_put(self):\n pass", "def updateAll(data):\n if (data.updatePositions):\n data.groups.player.update(data)\n data.groups.projectiles.update(data)\n data.groups.monsters.update(data)\n data.groups.spawners.update(data)", "def test_update_device_group_by_id(self):\n pass", "def put_group(\n group_id: BSONObjectId,\n data: PutGroupIn,\n tkn: Token = Depends(from_authotization_header_nondyn),\n):\n grp: Group = Group.objects.get(pk=group_id)\n if not (\n tkn.owner == grp.owner or has_clearance(tkn.owner, \"sni.update_group\")\n ):\n raise PermissionError\n logging.debug(\"Updating group %s (%s)\", grp.group_name, group_id)\n if data.add_members is not None:\n grp.members += [\n User.objects.get(character_name=member_name)\n for member_name in set(data.add_members)\n ]\n if data.authorized_to_login is not None:\n assert_has_clearance(tkn.owner, \"sni.set_authorized_to_login\")\n grp.authorized_to_login = data.authorized_to_login\n if data.description is not None:\n grp.description = data.description\n if data.members is not None:\n grp.members = [\n User.objects.get(character_name=member_name)\n for member_name in set(data.members)\n ]\n if data.owner is not None:\n grp.owner = User.objects.get(character_name=data.owner)\n if data.remove_members is not None:\n grp.members = [\n member\n for member in grp.members\n if member.character_name not in data.remove_members\n ]\n grp.members = list(set(grp.members + [grp.owner]))\n grp.save()\n return GetGroupOut.from_record(grp)", "def MutateAdGroupLabels(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_adgroup(self, adgroup_id, name=None, adgroup_status=None,\n bid_type=None, bid_info=None, creative_id=None,\n tracking_specs=None, view_tags=None, objective=None,\n targeting=None, conversion_specs=None,\n batch=False):\n path = \"%s\" % adgroup_id\n args = {}\n if name:\n args['name'] = name\n if bid_type:\n args['bid_type'] = bid_type\n if bid_info:\n args['bid_info'] = json.dumps(bid_info)\n\n if creative_id:\n args['creative'] = json.dumps({'creative_id': creative_id})\n if tracking_specs:\n args['tracking_specs'] = json.dumps(tracking_specs)\n if view_tags:\n args['view_tags'] = json.dumps(view_tags)\n if objective:\n args['objective'] = objective\n if adgroup_status:\n args['adgroup_status'] = adgroup_status\n if targeting:\n args['targeting'] = json.dumps(targeting)\n if conversion_specs:\n args['conversion_specs'] = json.dumps(conversion_specs)\n return self.make_request(path, 'POST', args, batch=batch)", "async def mergegroup(self, ctx, original_group_id: int, duplicate_group_id: int):\n original_group = await ex.get_group(original_group_id)\n duplicate_group = await ex.get_group(duplicate_group_id)\n if not duplicate_group:\n return await ctx.send(f\"> {duplicate_group_id} could not find a Group.\")\n if not original_group:\n return await ctx.send(f\"> {original_group} could not find a Group.\")\n # move aliases\n await ex.conn.execute(\"UPDATE groupmembers.aliases SET objectid = $1 WHERE isgroup = $2 AND objectid = $3\", original_group.id, 1, duplicate_group.id)\n for member_id in duplicate_group.members:\n if member_id not in original_group.members:\n # update the member location to the original group\n await ex.conn.execute(\"UPDATE groupmembers.idoltogroup SET groupid = $1 WHERE idolid = $2 AND groupid = $3\", original_group.id, member_id, duplicate_group.id)\n # delete group\n await ex.conn.execute(\"DELETE FROM groupmembers.groups WHERE groupid = $1\", duplicate_group.id)\n # recreate cache\n await ex.create_idol_cache()\n await ex.create_group_cache()\n await ctx.send(f\"> Merged {duplicate_group_id} to {original_group_id}.\")", "def test_update_resource_group(self):\n pass", "def set_pingroups(self, groups):\n self.groups = groups[:]", "def test_update_team_user_group(client):\n group = client.update_team_user_group(TEAM_ID, GROUP_ID, {\n \"name\": \"Updated Python group\",\n \"is_reviewer\": False,\n \"is_admin\": True,\n \"admin_rights\": [\"upload\"]\n })\n assert group.team_id == TEAM_ID\n assert group.group_id == GROUP_ID\n assert group.name == \"Updated Python group\"\n assert group.permissions['is_admin']\n assert not group.permissions['is_reviewer']", "def security_group_update(secgroup=None, auth=None, **kwargs):\n cloud = get_operator_cloud(auth)\n kwargs = _clean_kwargs(keep_name=True, **kwargs)\n return cloud.update_security_group(secgroup, **kwargs)", "def test_partially_update_device_group_by_id(self):\n pass", "def customer_group_customer_put(user_id, group_id):\n db_conn = DB_Conn()\n db = db_conn.db_connect()\n\n user_group_user_details = {}\n user_group_user_details[\"user_id\"] = user_id\n user_group_user_details[\"group_id\"] = group_id\n\n query = \"\"\"\n Update `users_groups`\n SET `group_id` = \\\"%(group_id)s\\\"\n WHERE `user_id` = \\\"%(user_id)s\\\" \n \"\"\" %(user_group_user_details)\n cursor = db.cursor()\n result = {\"success\" : 0, \"message\" : \"Customer's Group is not updated\"}\n try:\n if cursor.execute(query):\n db.commit()\n result = {\"success\" : 0, \"message\" : \"Customer updated Successfully\"}\n except Exception as e:\n result = {\"success\" : 1, \"message\" : \"Customer can not be updated in the Group. Error \\\"\\'%s\\'\\\" \\\n Query \\\"\\'%s\\'\\\" \" % (e, query) }\n finally:\n cursor.close()\n db.close()\n return result", "def update_group(self, bucket_id, group_id, group, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.update_group_with_http_info(bucket_id, group_id, group, **kwargs)\n else:\n (data) = self.update_group_with_http_info(bucket_id, group_id, group, **kwargs)\n return data", "def _set_group_states(\n self, group_states: Sequence[Tuple[RobotGroupConfig, RobotState]]):\n for config, state in group_states:\n if config.qpos_indices is None:\n continue\n if state.qpos is not None:\n self.sim_scene.data.qpos[config.qpos_indices] = state.qpos\n if state.qvel is not None:\n self.sim_scene.data.qvel[config.qvel_indices] = state.qvel\n\n self.sim_scene.sim.forward()", "def update_adcampaign_group(self, campaign_group_id, name=None,\n campaign_group_status=None, objective=None,\n batch=False):\n path = '%s' % campaign_group_id\n args = {}\n if name is not None:\n args['name'] = name\n if campaign_group_status is not None:\n args['campaign_group_status'] = campaign_group_status\n if objective is not None:\n args['objective'] = objective\n return self.make_request(path, 'POST', args, batch=batch)", "def updateMappingSet(self,mappingSetId:str=None,mappingSet:dict=None)->dict:\n if mappingSetId is None:\n raise ValueError(\"Require a mappingSet ID\")\n if mappingSet is None:\n raise ValueError(\"Require a dictionary as mappingSet\")\n path = f\"/mappingSets/{mappingSetId}\"\n res = self.connector.putData(self.endpoint+path,data=mappingSet)\n return res", "def group_identifier(self, group_identifier):\n\n self._group_identifier = group_identifier", "def update(self, preds: Tensor, target: Tensor, groups: Tensor) -> None:\n group_stats = _binary_groups_stat_scores(\n preds, target, groups, self.num_groups, self.threshold, self.ignore_index, self.validate_args\n )\n\n self._update_states(group_stats)", "def updateMappingSetMapping(self,mappingSetId:str=None,mappingId:str=None,mapping:dict=None)->dict:\n if mappingSetId is None:\n raise ValueError(\"Require a mappingSet ID\")\n if mappingId is None:\n raise ValueError(\"Require a mapping ID\")\n if mapping is None or type(mapping) != dict:\n raise Exception(\"Require a dictionary as mapping\")\n path = f\"/mappingSets/{mappingSetId}/mappings/{mappingId}\"\n res = self.connector.putData(self.endpoint + path,data=mapping)\n return res", "def groups(self, groups):\n self._groups = groups", "def update(cls, db: Database, record_uuid: str, record: GroupPartial) -> Group:\n existing_group = cls.find_by_uuid(db, record_uuid)\n updated_record = cls.model(**record.dict(), uuid=record_uuid)\n if updated_record.metadata.name != existing_group.metadata.name:\n if GroupManager.find_by_name(db, updated_record.metadata.name):\n raise ValidationError(\n \"Group with name [%s] already exists\" % record.metadata.name)\n cls.validate_group(db, updated_record)\n return super(GroupManager, cls).update(db, record_uuid, record)", "def edit_group_command(self):\n self.switch_frame(\"Edit Group\")\n id = self.parent.get_frame_id(\"Edit Group\")\n self.parent.frames[id].display_group(self.user.active_group)", "def test_update_device_group_by_id1(self):\n pass", "def set_definition(self, definition):\n return self.client._perform_json(\n \"PUT\", \"/admin/groups/%s\" % self.name,\n body = definition)", "def test_map_update_updates(self):\r\n partition = uuid4()\r\n cluster = 1\r\n TestQueryUpdateModel.objects.create(\r\n partition=partition, cluster=cluster,\r\n text_map={\"foo\": '1', \"bar\": '2'})\r\n TestQueryUpdateModel.objects(\r\n partition=partition, cluster=cluster).update(\r\n text_map__update={\"bar\": '3', \"baz\": '4'})\r\n obj = TestQueryUpdateModel.objects.get(partition=partition, cluster=cluster)\r\n self.assertEqual(obj.text_map, {\"foo\": '1', \"bar\": '3', \"baz\": '4'})", "def with_group(self, group):\n\t\tself.variables['group'] = group\n\t\treturn self", "def set_one(self, name_group, key, value):\n self.set(name_group, key, value)\n for item in self.get_all_childname(name_group):\n if item != key:\n self.set(name_group, item, False)", "def merge_groups(loop_ds, group_map, da_name, group_dim='sample_id', group_n_dim='group_n'):\n cell_count = loop_ds.coords[group_n_dim].to_pandas()\n loop_ds[da_name] = loop_ds[da_name] * loop_ds.coords[group_n_dim]\n\n loop_ds['_sample_group'] = group_map\n loop_ds = loop_ds.groupby('_sample_group').sum(dim=group_dim)\n\n sample_group_count = cell_count.groupby(group_map).sum()\n sample_group_count.index.name = '_sample_group'\n loop_ds.coords[group_n_dim] = sample_group_count\n\n loop_ds[da_name] = loop_ds[da_name] / loop_ds[group_n_dim]\n\n loop_ds = loop_ds.rename({\n '_sample_group': group_dim\n })\n return loop_ds" ]
[ "0.668097", "0.66690725", "0.65914", "0.65848315", "0.6473367", "0.6461976", "0.63963073", "0.6341957", "0.63222533", "0.6204804", "0.6149209", "0.6132897", "0.61260456", "0.61017835", "0.6094815", "0.60790116", "0.60371214", "0.60071486", "0.5943335", "0.5897234", "0.58925587", "0.5806522", "0.57663155", "0.5756128", "0.5741594", "0.57364136", "0.57097846", "0.56625116", "0.5629996", "0.5626552", "0.5626546", "0.5618827", "0.56066513", "0.5602854", "0.5598216", "0.5586118", "0.5586118", "0.5586118", "0.5581164", "0.5580017", "0.5579019", "0.55721265", "0.55583835", "0.555462", "0.554754", "0.5524257", "0.55205506", "0.55171484", "0.5514621", "0.55122644", "0.5503809", "0.54915357", "0.5478598", "0.54777986", "0.5474589", "0.5472413", "0.543501", "0.54285884", "0.54285884", "0.54285884", "0.54285884", "0.54285884", "0.54285884", "0.54128706", "0.5410165", "0.53972614", "0.53906834", "0.5360808", "0.5357826", "0.5344242", "0.53432983", "0.5341201", "0.53397715", "0.53397596", "0.53397524", "0.5324648", "0.5312863", "0.52980185", "0.52978486", "0.5295293", "0.52917737", "0.52839774", "0.5265928", "0.52624893", "0.52537143", "0.5240053", "0.5223849", "0.5208488", "0.520638", "0.5200871", "0.5196541", "0.5193052", "0.5189332", "0.5186083", "0.5184461", "0.51825875", "0.5169283", "0.5163364", "0.51590055", "0.5156828" ]
0.59906954
18
Updates the specified network source.
def update_network_source(self, network_source_id, update_network_source_details, **kwargs): resource_path = "/networkSources/{networkSourceId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_network_source got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "networkSourceId": network_source_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_network_source_details, response_type="NetworkSources") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_network_source_details, response_type="NetworkSources")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_sources(self, *args, **kwargs):\n tasks.update_sources()\n return Response({})", "def set_source(self, source):\n self.data['source'] = source", "def update_source(wn, old_source, target, new_source, change_list=None):\n rel_type = find_type(old_source, target)\n delete_rel(old_source, target, change_list)\n insert_rel(new_source, rel_type, target, change_list)\n if rel_type in wordnet.inverse_synset_rels:\n inv_rel_type = wordnet.inverse_synset_rels[rel_type]\n delete_rel(target, old_source, change_list)\n insert_rel(target, inv_rel_type, new_source, change_list)", "def update_target_network(self):\r\n self.send(self.server_conn, (sys._getframe().f_code.co_name, {}))", "def update_target_network(self):\n\n\t\tprint \"Updating Target DQN...\"\n\t\t\n\t\tself.update_operation.run()", "def update(self, src, labels): # real signature unknown; restored from __doc__\n pass", "def fusion_api_edit_network_set(self, body=None, uri=None, api=None, headers=None):\n return self.network_set.update(body, uri, api, headers)", "async def async_set_source(self, source):\n self._source = source\n #self.async_schedule_update_ha_state(True)", "def update_edge_by_source_target(self, _source, _target, source=None, target=None, name=None, data={}):\n return self.make_request(\"PUT\", \"edges?source=%s&target=%s\"%(_source,_target), { \"id\" : name, \"source\" : source, \"target\" : target, \"data\" : data })", "def update_feed_source(request):\n try:\n feed = FeedSource.objects.get(id=request.id)\n feed.status = not feed.status\n feed.save()\n except (ValidationError, FeedSource.DoesNotExist) as e:\n exc = e\n logger(__name__, \"Could not update Feed Source due to {}\".format(str(exc)))\n errors = _get_errors(exc)\n return feeds_pb2.OperationStatus(\n op_status=feeds_pb2.Status.Value('FAILURE'),\n details={'errors': feeds_pb2.RepeatedString(data=errors)},\n )\n return feeds_pb2.OperationStatus(\n op_status=feeds_pb2.Status.Value('SUCCESS'),\n )", "def update_network(self, context, net_id, network):\n LOG.debug(_(\"NeutronRestProxyV2.update_network() called\"))\n\n self._warn_on_state_status(network['network'])\n\n session = context.session\n with session.begin(subtransactions=True):\n new_net = super(NeutronRestProxyV2, self).update_network(\n context, net_id, network)\n self._process_l3_update(context, new_net, network['network'])\n\n # update network on network controller\n self._send_update_network(new_net, context)\n return new_net", "def _set_source(self, source):\n if source != self._source:\n self._source = source\n self._channel = \"\"\n self._channel_name = \"\"\n self._is_forced_val = True\n self._forced_count = 0", "def update(src):", "def update_source(self):\n if self.verbose:\n print(\"Updating source\")\n self.source.data = self.source_data\n if self.source.selected is not None:\n self.source.selected.indices = self.selection\n for c in self.callbacks[\"update_source\"]:\n c()\n self.pending_update = False\n if self.update_buffer is not None:\n self.context.doc.add_next_tick_callback(self.update_buffer)\n self.update_buffer = None", "def update_source(self, id, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.update_source_with_http_info(id, **kwargs)\n else:\n (data) = self.update_source_with_http_info(id, **kwargs)\n return data", "def update_node(self, node, updating_node):\n out_edges = list(self.source_net.edges(node, data=True))\n self.remove_node(node)\n self.source_net.add_node(node, attr_dict=self.source_net.nodes[updating_node]['attr_dict'])\n self.source_net.add_edges_from(out_edges)\n\n # Transfer incoming edges\n for u, v, data in self.source_net.in_edges(updating_node, data=True):\n self.source_net.add_edge(u, node, **data)\n\n self.remove_node(updating_node)", "def set_source(self, source_name):\n self.source = source_name", "def update_network(self, context, net_id, network):\n\n LOG.debug(_(\"QuantumRestProxyV2.update_network() called\"))\n\n # Validate Args\n if network[\"network\"].get(\"admin_state_up\"):\n if network[\"network\"][\"admin_state_up\"] is False:\n LOG.warning(_(\"Network with admin_state_up=False are not yet \"\n \"supported by this plugin. Ignoring setting for \"\n \"network %s\", net_name))\n\n # update DB\n orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id)\n tenant_id = orig_net[\"tenant_id\"]\n new_net = super(QuantumRestProxyV2, self).update_network(\n context, net_id, network)\n\n # update network on network controller\n if new_net[\"name\"] != orig_net[\"name\"]:\n try:\n resource = NETWORKS_PATH % (tenant_id, net_id)\n data = {\n \"network\": new_net,\n }\n ret = self.servers.put(resource, data)\n if not self.servers.action_success(ret):\n raise RemoteRestError(ret[2])\n except RemoteRestError as e:\n LOG.error(_(\"QuantumRestProxyV2: Unable to update remote \"\n \"network: %s\"), e.message)\n # reset network to original state\n super(QuantumRestProxyV2, self).update_network(\n context, id, orig_net)\n raise\n\n # return updated network\n return new_net", "def update_target_network(self):\n self.target_dqn.set_weights.remote(self.dqn.get_weights.remote())", "def update(self, ex):\r\n if not self.optimizer:\r\n raise RuntimeError('No optimizer set.')\r\n\r\n # Train mode\r\n self.network.train()\r\n\r\n source_ids = ex['source_ids']\r\n source_pos_ids = ex['source_pos_ids']\r\n source_type_ids = ex['source_type_ids']\r\n source_mask = ex['source_mask']\r\n label = ex['label']\r\n\r\n if self.use_cuda:\r\n label = label.cuda(non_blocking=True)\r\n source_ids = source_ids.cuda(non_blocking=True)\r\n source_pos_ids = source_pos_ids.cuda(non_blocking=True) \\\r\n if source_pos_ids is not None else None\r\n source_type_ids = source_type_ids.cuda(non_blocking=True) \\\r\n if source_type_ids is not None else None\r\n source_mask = source_mask.cuda(non_blocking=True) \\\r\n if source_mask is not None else None\r\n\r\n # Run forward\r\n score = self.network(source_ids=source_ids,\r\n source_pos_ids=source_pos_ids,\r\n source_type_ids=source_type_ids,\r\n source_mask=source_mask)\r\n\r\n # Compute loss and accuracies\r\n loss = self.criterion(score, label)\r\n\r\n if self.args.gradient_accumulation_steps > 1:\r\n loss = loss / self.args.gradient_accumulation_steps\r\n\r\n if self.args.fp16:\r\n with amp.scale_loss(loss, self.optimizer) as scaled_loss:\r\n scaled_loss.backward()\r\n else:\r\n loss.backward()\r\n\r\n if (self.updates + 1) % self.args.gradient_accumulation_steps == 0:\r\n if self.args.fp16:\r\n torch.nn.utils.clip_grad_norm_(amp.master_params(self.optimizer), self.args.grad_clipping)\r\n else:\r\n torch.nn.utils.clip_grad_norm_(self.network.parameters(), self.args.grad_clipping)\r\n\r\n self.optimizer.step()\r\n self.scheduler.step() # Update learning rate schedule\r\n self.optimizer.zero_grad()\r\n\r\n self.updates += 1\r\n\r\n return loss.item()", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def update_target_q_network(self):\n assert self.target_network != None\n self.target_network.run_copy()", "def update_target_net(self, sess):\n sess.run(self.update_target_net_op)", "def update_target_network(self) -> NoReturn:\n self.target.load_state_dict(self.model.state_dict())", "def set_flow_source(self, source):\n self._source = source", "def update_network_profile(self, profile, body=None):\r\n return self.put(self.network_profile_path % (profile), body=body)", "def source(self, source: Source):\n self._source = source", "def update_edge(self, _id, source=None, target=None, name=None, data={}):\n return self.make_request(\"PUT\", \"nodes/\"+_id, { \"id\" : name, \"source\" : source, \"target\" : target, \"data\" : data })", "def setAddressSource(self, address_source):\n # type: (str)->None\n\n self._validator.validate_one(\n 'source', VALID_OPTS['source'], address_source)\n self._ifAttributes['source'] = address_source", "def fusion_api_edit_fc_network(self, body, uri, api=None, headers=None):\n return self.fc_network.update(body, uri, api, headers)", "def update(self):\n with enforce_unique_instance('repository', blocking=True):\n for dist, component in self.pending:\n self.update_sources(dist, component)", "def update_packages(self, packages: Packages, source=\"conda\") -> None:\n self[source] = self.get(source, {})\n self._update_packages(self[source], packages)", "def update_all_sources(source_data_filename, host_filename):\n\n # The transforms we support\n transform_methods = {\"jsonarray\": jsonarray}\n\n all_sources = sort_sources(recursive_glob(\"*\", source_data_filename))\n\n for source in all_sources:\n update_file = open(source, \"r\", encoding=\"UTF-8\")\n update_data = json.load(update_file)\n update_file.close()\n\n # we can pause updating any given hosts source.\n # if the update.json \"pause\" key is missing, don't pause.\n if update_data.get('pause', False):\n continue\n\n update_url = update_data[\"url\"]\n update_transforms = []\n if update_data.get(\"transforms\"):\n update_transforms = update_data[\"transforms\"]\n\n print(\"Updating source \" + os.path.dirname(source) + \" from \" + update_url)\n\n try:\n updated_file = get_file_by_url(update_url)\n\n # spin the transforms as required\n for transform in update_transforms:\n updated_file = transform_methods[transform](updated_file)\n\n # get rid of carriage-return symbols\n updated_file = updated_file.replace(\"\\r\", \"\")\n\n hosts_file = open(\n path_join_robust(BASEDIR_PATH, os.path.dirname(source), host_filename),\n \"wb\",\n )\n write_data(hosts_file, updated_file)\n hosts_file.close()\n except Exception:\n print(\"Error in updating source: \", update_url)", "def update():\n\n # Get last new x value as last x value + 1\n x_n0 = data_source.data['x'][-1]\n x_n1 = x_n0 + 0.1\n\n # Assign a new y value\n y_n1 = param_source.data['amp_sine'][0] * np.sin(x_n1) +\\\n param_source.data['amp_rand'][0] * np.random.rand(1)\n\n # Get old last average and use to calculate new average\n avg_n1 = _get_new_avg(data_source,\n y_n1,\n param_source.data['rollover'][0])\n\n # Make a dict of data to add on to the end of the source\n additional_data = dict(x=[x_n1], y=[y_n1], avg=[avg_n1])\n\n # Stream the new data with a rollover value of 10\n data_source.stream(additional_data,\n rollover=param_source.data['rollover'][0])\n\n # logger.debug(param_source.data['update_delay'][0])", "def update_source_with_http_info(self, id, **kwargs):\n\n all_params = ['id', 'body']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method update_source\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'id' is set\n if ('id' not in params) or (params['id'] is None):\n raise ValueError(\"Missing the required parameter `id` when calling `update_source`\")\n\n\n collection_formats = {}\n\n path_params = {}\n if 'id' in params:\n path_params['id'] = params['id']\n\n query_params = []\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['Using HTTP Header', 'Using URL Query Parameter']\n\n return self.api_client.call_api('/sources/{id}', 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type=None,\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def command_update(args):\n\tparser = argparse.ArgumentParser(\n\t\tprog=\"inquisitor update\",\n\t\tdescription=command_update.__doc__,\n\t\tadd_help=False)\n\tparser.add_argument(\"source\",\n\t\tnargs=\"*\",\n\t\thelp=\"Sources to update.\")\n\targs = parser.parse_args(args)\n\n\tif len(args.source) == 0:\n\t\tparser.print_help()\n\t\treturn 0\n\tif not os.path.isdir(DUNGEON_PATH):\n\t\tlogger.error(\"Couldn't find dungeon. Set INQUISITOR_DUNGEON or cd to parent folder of ./dungeon\")\n\t\treturn -1\n\tif not os.path.isdir(SOURCES_PATH):\n\t\tlogger.error(\"Couldn't find sources. Set INQUISITOR_SOURCES or cd to parent folder of ./sources\")\n\n\t# Update sources\n\tfrom inquisitor.sources import update_sources\n\tupdate_sources(*args.source)\n\treturn 0", "def update_source_sense(wn, old_source, target, new_source, change_list=None):\n rel_type = find_sense_type(wn, old_source, target)\n delete_sense_rel(wn, old_source, target, change_list)\n insert_sense_rel(wn, new_source, rel_type, target, change_list)\n if rel_type in inverse_sense_rels:\n inv_rel_type = inverse_sense_rels[rel_type]\n delete_sense_rel(wn, target, old_source, change_list)\n insert_sense_rel(wn, target, inv_rel_type, new_source, change_list)", "def update_sources(self, session: Session) -> None:\n\n # extract results from response:\n response = self.get_response()\n results = response.get(\"results\")\n\n if results is None:\n return None\n\n for dictionary in results:\n # transform raw result into object:\n result = self.dict2result(dictionary)\n # get source, add if new:\n name = self.result2name(result)\n source = session._get_source(name)\n if source is None:\n source = _Source(name)\n session._add_source(source)\n # update source's score vector:\n source.levenshtein_vector.update_score(self.concept, result)", "def _set_source(source, context):\n if isinstance(source, (str, list, dict, Dataset)):\n return Source(source, context)\n elif isinstance(source, Source):\n return source\n else:\n raise ValueError('Wrong source')", "def updateNetwork(self, session: Session, network: Network) -> Network:\n try:\n return NetworkManager().updateNetwork(session, network)\n except TortugaException as ex:\n raise\n except Exception as ex:\n self._logger.exception(str(ex))\n raise TortugaException(exception=ex)", "def _add_source_net_filter(self, rule_list, source_net):\n for rule in rule_list:\n if (\"source\" in rule.keys()):\n if (\"nets\" in rule[\"source\"].keys()):\n rule[\"source\"][\"nets\"].append(source_net)\n else:\n rule[\"source\"].update({\"nets\": [source_net]})\n else:\n rule.update({\"source\": {\"nets\": [source_net]}})", "def source_id(self, source_id):\n\n self._source_id = source_id", "def source_id(self, source_id):\n\n self._source_id = source_id", "def update_spx_source(src=\"\"):\n\n # delete old spx_prj source\n p_spxsrc = p(spx_src_dir)\n if p_spxsrc.is_dir():\n shutil.rmtree(p_spxsrc)\n p_spxsrc.mkdir()\n\n # prepare new spx_prj source dir\n p_spxsrc_org = p(str(p_spxsrc) + \"_org\")\n list_src = p_spxsrc_org.glob(\"**/*\")\n _listprint(list_src)\n du.copy_tree(str(p_spxsrc_org), str(p_spxsrc))\n\n # copy doc source\n list_src = src.glob(\"**/*\")\n _listprint(list_src)\n\n du.copy_tree(str(src), str(p_spxsrc))", "def source(self, source: str):\n if source is None:\n raise ValueError(\"Invalid value for `source`, must not be `None`\") # noqa: E501\n\n self._source = source", "def update_source():\n\n require('environment', provided_by=env.environments)\n with cd(env.code_root):\n sudo('git pull', user=env.deploy_user)\n sudo('git checkout %(branch)s' % env, user=env.deploy_user)", "def update_network_profile(arn=None, name=None, description=None, type=None, uplinkBandwidthBits=None, downlinkBandwidthBits=None, uplinkDelayMs=None, downlinkDelayMs=None, uplinkJitterMs=None, downlinkJitterMs=None, uplinkLossPercent=None, downlinkLossPercent=None):\n pass", "def update_target_network(self):\n self.target.set_weights(self.policy.get_weights()) # Update weights of target network with weights of policy network", "def fusion_api_edit_ethernet_network(self, body, uri, api=None, headers=None):\n return self.ethernet_network.update(body, uri, api, headers)", "def update_model(source: str, supported_model_name: str):\n if not request.data:\n abort(400, 'Request is missing the model (json) data')\n\n connector = __get_connector(source)\n supported_model = __get_supported_model(supported_model_name)\n\n try:\n model_data = json.loads(request.data)\n return connector.update(supported_model, model_data)\n except Exception as e:\n abort(500, e)", "def update_source(self):\n cwd = None\n if os.path.exists(self.path):\n cwd = self.path\n cmd = 'git fetch && git reset --hard origin/master'\n else:\n cmd = 'git clone %s %s' % (self.repo_url, self.path)\n Command(cmd, cwd=cwd)", "def input(self, source) -> None:\n if source is self._source:\n return\n self._source = source\n if self._socket is not None:\n self._output.input = source", "def update_net(self) -> None:\n self.units.update_net()", "def update_target_network(self):\n variables = self.online_network.trainable_variables\n variables_copy = [tf.Variable(v) for v in variables]\n self.target_network.trainable_variables = variables_copy", "def update_terraform_source(new_source=None, new_source_path=None):\n if new_source:\n material = update_terraform_source_material(new_source)\n else:\n # If the plan operation passes NOone, then this would error.\n material = get_terraform_source_material()\n node_instance_dir = get_node_instance_dir(source_path=new_source_path)\n module_root = get_storage_path()\n ctx.logger.debug('The storage root tree:\\n{}'.format(tree(module_root)))\n if material:\n extract_binary_tf_data(module_root, material, new_source_path)\n else:\n if isinstance(new_source, str) and os.path.isdir(new_source):\n if new_source_path:\n source_tmp_path = os.path.join(new_source, new_source_path)\n else:\n source_tmp_path = new_source\n copy_directory(source_tmp_path, node_instance_dir)\n elif isinstance(new_source, dict) and os.path.isdir(\n new_source.get('location')):\n if new_source_path:\n source_tmp_path = os.path.join(\n new_source.get('location'), new_source_path)\n else:\n source_tmp_path = new_source.get('location')\n copy_directory(source_tmp_path, node_instance_dir)\n return node_instance_dir", "def put(self, id):\n context = request.environ.get('context')\n net_obj = dbapi.networks_update(context, id, request.json)\n return jsonutils.to_primitive(net_obj), 200, None", "def setSources(self, xsrc, zsrc):\n xsrc = ascontiguousarray(xsrc, float64)\n zsrc = ascontiguousarray(zsrc, float64)\n nsrc = len(xsrc)\n if (len(xsrc) != len(zsrc)):\n print(\"Inconsistent array lengths\")\n xsrcPointer = xsrc.ctypes.data_as(POINTER(c_double))\n zsrcPointer = zsrc.ctypes.data_as(POINTER(c_double))\n ierr = c_int(1)\n self.fteik2d.fteik_solver2d_setSources64f(nsrc,\n zsrcPointer, xsrcPointer,\n ierr)\n if (ierr.value != 0):\n print(\"Error setting sources\")\n return -1\n self.nsrc = nsrc\n return 0", "def hard_update(self,target, source):\n\t\tfor target_param, param in zip(target.parameters(), source.parameters()):\n\t\t\t\ttarget_param.data.copy_(param.data)", "def update(self, source):\n temp = self.newVersion()\n try:\n\n # Find the most recent stamp, for a starting point\n latestStamp = self.getLatestStamp()\n if latestStamp > 0:\n # We have an existing stamp to copy\n shutil.copyfile(self.getFile(latestStamp), temp)\n else:\n # Start a blank RRD\n self.rrdInit(temp)\n assert os.path.isfile(temp)\n\n # Look for new samples to insert after our current latest.\n for stamp, time, value in source(latestStamp):\n if stamp <= latestStamp:\n continue\n try:\n rrd(\"update\", temp, \"%d:%s\" % (int(time), value))\n except RRDException:\n # Ignore errors caused by two updates within a second\n if str(sys.exc_info()[1]).find(\"illegal attempt to update\") < 0:\n raise\n\n latestStamp = stamp\n\n except:\n self.rollbackVersion(temp)\n raise\n\n # Commit the updated RRD\n self.commitVersion(temp, latestStamp)\n\n # Clean old versions if that was successful\n self.cleanStamps(lambda s: s < latestStamp)\n return latestStamp", "def update_config(self, config, priority, source):\n for key, value in config.items():\n self._config[key].add(value, priority, source)", "def source_domain(self, source_domain):\n\n self._source_domain = source_domain", "def _update_target_net(self):\n self.target_net.load_state_dict(self.policy_net.state_dict())\n self.target_net.eval()", "def update_policy_network(self):\r\n self.send(self.server_conn, (sys._getframe().f_code.co_name, {}))", "def AddSource(self, source):\n self._sources.append(source)", "def mutate(self, info, input):\n # Convert input to dictionary\n data = api_utils.input_to_dictionary(input)\n data_source = Operation('ModelDataSource').update(**data)\n return UpdateDataSource(data_source=data_source)", "def update_copy(self, source, dest):\n relsource = os.path.relpath(source, os.path.realpath(self.dirname))\n for copy in self.runscript.copies:\n if copy[1] == dest:\n copy[0] = relsource\n break\n else:\n self.runscript.add_copy(relsource, dest)", "def hard_update(target, source):\n for target_param, param in zip(target.parameters(), source.parameters()):\n target_param.data.copy_(param.data)", "def ModifyNetwork(self, network, reason=None, **kwargs):\n query = []\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_PUT,\n (\"/%s/networks/%s/modify\" %\n (GANETI_RAPI_VERSION, network)), None, kwargs)", "def source_id(self, source_id: str):\n\n self._source_id = source_id", "def hard_update(source_net, target_net):\n for target_param, param in zip(target_net.parameters(), source_net.parameters()):\n target_param.data.copy_(param.data)", "def source_instance(self, source_instance):\n self._source_instance = source_instance", "def _add_source(self, source: _Source) -> None:\n\n self._sources.append(source)", "def install_or_update_source():\n new_install = install_source()\n if not new_install:\n update_source()", "def update_network(**kwargs):\n\n ip_addr = kwargs.get('ip_addr')\n is_private = kwargs.get('is_private')\n name = kwargs.get('name')\n dns_names = kwargs.get('dns_names')\n is_scanning = kwargs.get('is_scanning', False)\n network_id = make_shortuuid(name)\n\n network = {\n 'dns_names': dns_names,\n 'ip_addr': ip_addr,\n 'is_private' : is_private,\n 'name': name,\n 'id': network_id,\n 'is_scanning': is_scanning,\n 'updated_count': 0\n\n }\n\n network_exists = r.table(\"networks\").insert([network], conflict=\"update\")\n\n return network_exists.run(conn)", "def add_source_address(self, srcAddr):\n self.source.address = srcAddr", "def add_source_address(self, srcAddr):\n self.source.address = srcAddr", "def update_source_range(self):\r\n self.source_range_index = self.SourceRangeValue.currentIndex()\r\n self.cmd = None\r\n if self.source_range_type_index and self.connected:\r\n self.cmd = self.source_range_switch.get(\r\n self.source_range_index, None)\r\n self.I_source.write(self.cmd)", "def instruction_set(self, register, source):\n if Vm.is_register(source):\n source = self.get_register(source)\n\n self.set_register(register, source)", "def set_node(self, name, state):\n self.source_net.nodes[name] = state", "def _assign(self, source):\n if self._parent:\n oldZincRegion = self._zincRegion\n zincSiblingAfter = oldZincRegion.getNextSibling()\n else:\n oldZincRegion = None\n zincSiblingAfter = None\n self.freeContents()\n self._name = source._name\n # self._parent = source._parent should not be changed\n self._children = source._children\n for child in self._children:\n child._parent = self\n self._modelSources = source._modelSources\n self._zincRegion = source._zincRegion\n # self._ancestorModelSourceCreated is unchanged\n if self._parent:\n self._parent._zincRegion.removeChild(oldZincRegion)\n self._parent._zincRegion.insertChildBefore(self._zincRegion, zincSiblingAfter)", "def UpdateNetworkID(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def connect_merge(self, source_name: Optional[str]):\n self.source_name = source_name", "def set_source(self, source):\n self.qbpm = self.sources[source]\n self.title = self.qbpm.address\n self.setWindowTitle(self.title)", "def _update_dnsmasq(self, network_id):\n\n # Check whether we should really do the following processing.\n if self.suppress_dnsmasq_updates:\n LOG.debug(\"Don't update dnsmasq yet;\"\n \" must be processing a snapshot\")\n self.dirty_networks.add(network_id)\n return\n\n self.dnsmasq_updater.update_network(network_id)", "def set_data_source(self, source_id):\n self.data_source = source_id", "def set_url(self, source_url):\n if utils.validate_url(source_url, \"rtsp\"):\n self.url = source_url\n self.set_state_null()\n self.setup_pipeline()\n self.play()\n else:\n print(\"Invalid URL\")", "def update_node(self, old_node: 'GraphNode', new_node: 'GraphNode'):\n\n self.operator.update_node(old_node, new_node)", "def update_target_network(self):\n self.target_Qmodel = clone_model(self.Qmodel)\n self.target_Qmodel.set_weights(self.Qmodel.get_weights())\n\n # target network is never compiled\n self.target_Qmodel.compile(loss='mse', optimizer=Adam())", "def update_source():\n from .project import sudo_project, git_repository_path, git_repository\n\n with sudo_project():\n # Get current commit\n path = git_repository_path()\n previous_commit = git.get_commit(path, short=True)\n\n # Update source from git (reset)\n repository = git_repository()\n current_commit = git.reset(repository['branch'],\n repository_path=path,\n ignore=blueprint.get('git_force_ignore'))\n\n if current_commit is not None and current_commit != previous_commit:\n info(indent('(new version)'))\n else:\n info(indent('(same commit)'))\n\n return previous_commit, current_commit", "def remotes_update(flox: Flox):\n\n for source in flox.remotes.all().keys():\n fetch_remote(flox, source)\n success(f\"Updated: {source}\")\n\n success_box(f\"Remote sources updated\")", "def hard_update_target_network(self,step):\n \n if step % self.C == 0:\n pars = self.model.get_weights()\n self.target_model.set_weights(pars)", "def hard_update_target_network(self,step):\n \n if step % self.C == 0:\n pars = self.model.get_weights()\n self.target_model.set_weights(pars)", "def add_source(self, group_source):\n if group_source.name in self._sources:\n raise ValueError(\"GroupSource '%s': name collision\" % \\\n group_source.name)\n self._sources[group_source.name] = group_source", "def delete_network_source(self, network_source_id, **kwargs):\n resource_path = \"/networkSources/{networkSourceId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_network_source got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"networkSourceId\": network_source_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)" ]
[ "0.6329532", "0.612555", "0.60807055", "0.6027774", "0.58959955", "0.5872399", "0.5833497", "0.5821681", "0.58040607", "0.5792299", "0.57776225", "0.5715011", "0.5686939", "0.5681682", "0.5670427", "0.5601962", "0.5567387", "0.5534381", "0.5495406", "0.5481051", "0.54780453", "0.54780453", "0.54780453", "0.54780453", "0.54780453", "0.54780453", "0.54780453", "0.5462909", "0.54583734", "0.54476845", "0.5430992", "0.5415751", "0.5408836", "0.5383678", "0.53641355", "0.53626627", "0.53334373", "0.52998793", "0.5290756", "0.5289886", "0.52697927", "0.5263317", "0.5227461", "0.52187735", "0.52138424", "0.5203177", "0.51885414", "0.51864487", "0.51864487", "0.51789486", "0.5174983", "0.5174344", "0.51725215", "0.51706463", "0.5165506", "0.5159028", "0.5154085", "0.5145338", "0.51395947", "0.51387864", "0.5126235", "0.51244694", "0.5123586", "0.51136494", "0.5110097", "0.50882566", "0.5084765", "0.5084255", "0.50620997", "0.50544804", "0.5052541", "0.50449914", "0.50449264", "0.5040523", "0.5036385", "0.50344944", "0.50307083", "0.5023002", "0.501105", "0.5008832", "0.49987432", "0.49987432", "0.49943623", "0.49915162", "0.49775907", "0.49733493", "0.4957991", "0.49571398", "0.4953182", "0.4951511", "0.4941929", "0.49216056", "0.49204278", "0.48996294", "0.4899039", "0.4897914", "0.48953512", "0.48953512", "0.4886437", "0.48845372" ]
0.7314297
0
Updates Oauth token for the user
def update_o_auth_client_credential(self, user_id, oauth2_client_credential_id, update_o_auth2_client_credential_details, **kwargs): resource_path = "/users/{userId}/oauth2ClientCredentials/{oauth2ClientCredentialId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_o_auth_client_credential got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "oauth2ClientCredentialId": oauth2_client_credential_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_o_auth2_client_credential_details, response_type="OAuth2ClientCredential") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_o_auth2_client_credential_details, response_type="OAuth2ClientCredential")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_access_token(self):\n self.token = util.prompt_for_user_token(self._username, scope,\n client_id=const.CLIENT_ID,\n client_secret=const.CLIENT_SECRET,\n redirect_uri=const.REDIRECT_URL)\n self._client = spotipy.Spotify(auth=self.token)", "def update_user_token(session, new_token, user):\n token = find_token_by_user_id(session, user.id)\n\n if token is None:\n new_token.user_id = user.id\n session.add(new_token)\n return new_token\n\n token.access_token = new_token.access_token\n token.refresh_token = new_token.refresh_token\n token.expires_in = new_token.expires_in\n\n return token", "def _update_by_auth(self, username, password, token_path, grant_type, client_id):\n token = requests.post(\n self.api_url + self.oauthpath,\n params={\n \"username\": username,\n \"grant_type\": grant_type,\n \"client_id\": client_id\n },\n data={\n \"password\": password\n }).json()\n if \"access_token\" in token:\n self.username = username\n self.token = token[\"access_token\"]\n else:\n raise KeyError(\"Authentication failed\", token)", "def update_token(self, token_response):\n self.access_token = token_response['access_token']\n self.access_token_expires = datetime.fromtimestamp(\n time.time() + token_response['expires_in'],\n )\n if 'refresh_token' in token_response:\n self.refresh_token = token_response['refresh_token']", "def _update_token(token):\n session.token = token", "def refresh_credentials():\n global auth_token\n auth_token = get_oauth_token()", "def refresh_auth_token(self):\r\n \r\n # For some reason, the auth token in the root path only works if you're \r\n # unauthenticated. To get around that, we check if this is an authed\r\n # session and, if so, get the token from the profile page.\r\n \r\n if self.is_authed:\r\n req = self.session.get(f\"https://archiveofourown.org/users/{self.username}\")\r\n else:\r\n req = self.session.get(\"https://archiveofourown.org\")\r\n \r\n if req.status_code == 429:\r\n raise utils.HTTPError(\"We are being rate-limited. Try again in a while or reduce the number of requests\")\r\n \r\n soup = BeautifulSoup(req.content, \"lxml\")\r\n token = soup.find(\"input\", {\"name\": \"authenticity_token\"})\r\n if token is None:\r\n raise utils.UnexpectedResponseError(\"Couldn't refresh token\")\r\n self.authenticity_token = token.attrs[\"value\"]", "def _refresh_access_token(self) -> None:\n response = httpx.post(\n f\"{self._base_url}/oauth2/token\",\n proxies=self._proxies,\n data={\n \"grant_type\": \"client_credentials\",\n \"client_id\": self._api_key,\n \"client_secret\": self._api_secret,\n },\n )\n response.raise_for_status()\n token = response.json()[\"access_token\"]\n c = httpx.Client()\n c.close()\n self._authorization_headers = {\"Authorization\": f\"Bearer {token}\"}", "def put(self, authorized_username):\n data = request.get_json()\n is_updated = actions.update_user_password(authorized_username, data['password'])\n if is_updated:\n token = actions.create_token(authorized_username, data['password'])\n token = token.decode('utf-8')\n return{'token': token}, 200\n else:\n abort(404)\n pass", "def refresh_auth_token(self):\n self._auth_token = self.generate_auth_token()", "def update(self):\n token = request_token(self.client_id, self.client_secret)\n self.request_time = datetime.now()\n self._initialized = True\n self.token = token", "def refresh(self):\n self._request_token(grant_type='client_credentials')", "def refresh(self):\n self._request_token(grant_type='password', username=self._username,\n password=self._password)", "def refresh_access_token(self):\n self._access_token = self.generate_access_token()", "async def token(self, ctx):\n logger.info(\"token command issued by {0}\".format(ctx.message.author.name))\n await ctx.message.delete()\n spotify_token = spotipy.util.prompt_for_user_token_auto(self.spotify_username, self.spotify_scope, self.spotify_id, self.spotify_secret)\n self.spotify_client = spotipy.Spotify(auth=spotify_token)\n await ctx.send(\"Spotify refresh token updated\")", "def refresh_token(self):\n url = 'https://www.yikyak.com/api/auth/token/refresh'\n token = self._request('POST', url)\n self.session.headers.update({'x-access-token': token})", "def update_user():", "def _update_token(self, request):\n\n # Refresh our source credentials.\n self._source_credentials.refresh(request)\n\n body = {\n \"delegates\": self._delegates,\n \"scope\": self._target_scopes,\n \"lifetime\": str(self._lifetime) + \"s\"\n }\n\n headers = {\n 'Content-Type': 'application/json',\n }\n\n # Apply the source credentials authentication info.\n self._source_credentials.apply(headers)\n\n self.token, self.expiry = _make_iam_token_request(\n request=request,\n principal=self._target_principal,\n headers=headers,\n body=body)", "def test_update_user(self):\n token = self.authenticate_user(self.auth_user_data).data[\"token\"]\n response = self.client.put(self.user_url,\n self.user_data,\n HTTP_AUTHORIZATION=f'token {token}',\n format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def post(self):\n current_user_id = get_jwt_identity()\n new_token = create_access_token(identity=current_user_id)\n response, status = {\n 'message': 'Access token was successfully refreshed',\n 'access_token': new_token\n }, 200\n return Response(dumps(response), status=status, mimetype='application/json')", "def token_updater(token):\n try:\n with open(self.OAUTH_TOKEN_PATH, 'w') as f:\n json.dump(token, f)\n except Exception as err:\n log.Error('Could not save the OAuth2 token to %s. This means '\n 'you may need to do the OAuth2 authorization '\n 'process again soon. Original error: %s' % (\n self.OAUTH_TOKEN_PATH, err))", "def refresh():\n current_user = get_jwt_identity()\n ret = {\n 'access_token': create_access_token(identity=current_user)\n }\n return jsonify(ret), 200", "def refresh_token(self):\n token = json.loads(get_metadata(\n 'instance/service-accounts/%s/token' % self.service_account,\n ))\n seconds = token['expires_in'] - 60\n self._expiration_time = (\n datetime.datetime.now() + datetime.timedelta(seconds=seconds)\n )\n self._token = token['access_token']", "def _refresh_access_token(self):\n url = self._get_url(subpath=\"auth\", route=\"refresh\")\n refresh_token = get_refresh_token()\n payload = {\"refresh_token\": refresh_token}\n response = self.session.post(url, json=payload)\n response.raise_for_status()\n access_token = response.json()[\"access_token\"]\n set_process_execution_user_token(access_token)\n self.session.headers[\"authorization\"] = f\"Bearer {access_token}\"", "def refreshAccessToken(self, token):\r\n header = {'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}\r\n url = self._config['OAUTH2ENDPOINT']['huddleAccessTokenServer']\r\n\r\n body = {\"grant_type\": \"refresh_token\",\r\n \"client_id\": self._config['OAUTH2']['clientID'],\r\n \"refresh_token\": token.getRefreshToken()\r\n }\r\n\r\n return self._adapter.postRequest(url, header, parse.urlencode(body))", "def renew_token(cls, token_obj: \"AuthToken\") -> None:\n token_obj.renew_token(renewed_by=cls)", "def get(self):\n\n user = context_property.request_user\n Log.info(\"Refresh access token for %i\" % user.id)\n\n return {\n \"accessToken\" : create_access_token(user.id)\n }, 200", "def get_access_token(self):\n\n token_work = time.time() < self.expires\n\n if token_work:\n # No need update token\n return self.access_token\n\n data = {\n 'client_id': self.client_id,\n 'grant_type': 'implicit'\n }\n\n response = requests.post('https://api.moltin.com/oauth/access_token', data=data)\n raise_response_errors(response)\n\n response_json = response.json()\n\n self.access_token = response_json['access_token']\n self.expires = response_json['expires']\n\n logger.debug('elasticpathh access token was updated')\n\n return self.access_token", "def refresh():\n print(\"refresh request\")\n old_token = request.get_data()\n new_token = guard.refresh_jwt_token(old_token)\n ret = {'access_token': new_token}\n return ret, 200", "def test_patch_o_auth_access_token(self):\n pass", "def update_token(token, user, item=None):\n timestamp = time.time()\n # Make sure the mapping from login to user is available\n conn.hset('login:', token, user)\n # Record time when token was last seen\n conn.zadd('recent:', token, timestamp)\n\n if item:\n # Note that a user viewed an item and when\n conn.zadd('viewed:' + token, item, timestamp)\n # Only keep 25 views\n conn.zremrangebyrank('viewed:' + token, 0, -26)\n # Decrease view count for item by one giving us most viewed with lowest score\n conn.zincrby('viewed:', item, -1)", "def put(self, session: Session = None) -> Response:\n token = generate_token(username=current_user.name, session=session)\n return jsonify({'token': token})", "def put(self, authorized_username):\n data = request.get_json()\n is_updated = actions.update_user_username(authorized_username, data['username'])\n if is_updated:\n token = actions.create_token(data['username'], actions.get_user_by_username(data['username'])['password'])\n token = token.decode('utf-8')\n return{'token': token}, 200\n else:\n abort(404, message='Username already exists')\n pass", "def refresh_token(self, path='/oauth/token', data={}):\n if data.keys():\n data.update(self.data)\n else:\n data = self.data.copy()\n data.update({\n 'grant_type': 'refresh_token',\n 'refresh_token': self.token.get('refresh_token')\n })\n try:\n self.token.update(requests.post(url='%s%s' % (self.url, path), data=data).json())\n except:\n raise 'unknown issue'\n return self.token", "def renew_access_token(self):\n self._access_token = self._get_access_token()", "def login_token(self, token):\n self.token = token # this will also set the refresh_token to None", "def refresh_token():\n current_user = get_jwt_identity()\n if current_user is None:\n return abort(401)\n response = deepcopy(AUTH_OKAY)\n response['payload']['access_token'] = create_access_token(\n identity=current_user,\n expires_delta=EXPIRY_DURATION\n )\n response['payload']['expires_in'] = EXPIRY_DURATION.seconds\n response['payload']['not_before'] = int(time() + EXPIRY_DURATION.seconds)\n return jsonify(response['payload']), response['status_code']", "def extend(self):\n graph = GraphAPI()\n\n response = graph.get('oauth/access_token',\n client_id=FACEBOOK_APPLICATION_ID,\n client_secret=FACEBOOK_APPLICATION_SECRET_KEY,\n grant_type='fb_exchange_token',\n fb_exchange_token=self.token\n )\n\n components = parse_qs(response)\n\n self.token = components['access_token'][0]\n self.expires_at = datetime.now() + \\\n timedelta(seconds=int(components['expires'][0]))\n\n self.save()", "def update_user(self):\n self.client.force_authenticate(user=self.user)\n self.response = self.client.patch(\n reverse(\n 'edit_account',kwargs={ 'pk': self.user.id}),\n self.updated_data, format='json'\n )\n self.user = CustomUser.objects.get(username=self.user.username)", "def set_access_token(self, url: str = None):\n if isinstance(url, type(None)):\n url = self.base_url + self.token_plus_refresh_url\n\n data = self.generate_user_headers()\n response = requests.request(\n \"POST\", url, headers=self.generate_generic_headers(), json=data)\n\n self._token_time = dt.datetime.now()\n\n if response.status_code == 200:\n self.logger.info(msg='Access to FlexNow done. Token created')\n else:\n self.logger.warning(msg=f'Something went wrong when trying to create the token: \"{response.json()}\".')\n raise (Exception(f'Something went wrong when trying to refresh the token: \"{response.json()}\".'))\n\n self._access_token = json.loads(response.text).get(\"accessToken\")\n self._refresh_token = json.loads(response.text).get(\"refreshToken\")", "async def update_app_oauth(self, app_id: str, data: dict) -> dict:\r\n return await self.put(API_APP_OAUTH.format(app_id=app_id), data)", "def refresh_callback(token):\n print('CALLBACK: The token has been updated since last run')\n with open(USER_DETAILS_FILE, 'w') as f:\n json.dump(token, f)\n print('Successfully written update refresh token')", "def put(self, url, user, data):\n token = self.login(user)\n response = requests.put(\n url_root + url, headers={\"access-token\": token}, json=data\n )\n return response.json(), response.status_code", "def step_impl(context):\n fields = {\n 'grant_type': 'refresh_token',\n 'refresh_token': context.oauth.refresh_token,\n 'scope': context.vendor_config['versioned_auth']['scope'],\n }\n\n context.response = token_request(fields,\n context.vendor_config['versioned_auth'],\n context.conformance)", "def _refresh_token(self):\n token_url = self._base_url + '/api/oauth2/token'\n params = {\n 'grant_type': 'client_credentials',\n 'client_id': self._client_id,\n 'client_secret': self._client_secret\n }\n headers = {'accept': 'application/json'}\n response = requests.post(token_url,proxies = self._proxy,params= params,headers = headers)\n logging.debug(response.text)\n parsed = response.json()\n self._access_token = parsed['access_token']\n self._refresh_token = parsed['refresh_token']\n expires_in = parsed['expires_in']\n ## Keep a buffer of 120 seconds to refresh token before expiry\n self._expires_at = datetime.now() + timedelta(seconds=(expires_in - 120))\n\n logging.debug('access_token %s expires at %s', self._access_token, self._expires_at)\n\n return", "def refresh():\n print(\"refresh request\")\n old_token = flask.request.get_data()\n new_token = guard.refresh_jwt_token(old_token)\n ret = {'access_token': new_token}\n return ret, 200", "def set_user_api_token(connection, api_url, token):\n\n body = {\n 'endpoint': api_url,\n 'user': '',\n 'password': '',\n 'token': token,\n 'type': 'token'\n }\n\n connection.post_obj_as_json('user/credentials', body)", "def update_user():\n #TODO user update \n pass", "def step_impl(context):\n fields = {\n 'grant_type': 'refresh_token',\n 'refresh_token': context.oauth.refresh_token,\n 'scope': context.vendor_config['auth']['scope'],\n }\n\n context.response = token_request(fields,\n context.vendor_config['auth'],\n context.conformance)", "def test_replace_o_auth_access_token(self):\n pass", "def refreshAccessToken(self):\r\n\r\n assert hasattr(self.oauthToken, \"getRefreshToken\")\r\n\r\n #turn the response into json\r\n\r\n response = self._oauth.refreshAccessToken(self.oauthToken)\r\n responseBody = json.loads(response['Body'])\r\n\r\n try:\r\n oauthToken = token.Token(responseBody)\r\n except TypeError:\r\n print (\"Bad response when refreshing the token \" + str(responseBody))\r\n sys.exit()\r\n\r\n return oauthToken", "def refresh_access_token(self):\n parameters = {'client_id': self.CLIENT_ID,\n 'auth_code': self.auth_code,\n 'client_secret': self.CLIENT_SECRET,\n 'grant_type': 'authorization_code'}\n url = self.ACCESS_TOKEN_URL % parameters\n data = self._get_refresh_data()\n logging.info('url: %s, data: %s', url, data)\n\n try:\n # empty data to trigger a post\n req = urllib2.Request(url, data)\n req.add_header('Content-Type', 'application/x-www-form-urlencoded')\n result = urllib2.urlopen(req)\n result = json.load(result)\n logging.info('result: %s', result)\n except urllib2.HTTPError, err:\n result = json.load(err)\n logging.info(result)\n raise err\n\n self.access_token = result['access_token']\n self.expires = int(time.time() + result['expires_in'])\n self.refresh_token = result.get('refresh_token', None)", "def refresh_token(self):\n\n self.token = flask.request.cookies.get('kdm-manager_token')\n\n if self.token is None or self.token == 'None':\n err = 'Could not retrieve JWT from cookies!'\n self.logger.error(err)\n self.logger.error(flask.request.cookies)\n flask.abort(500, err)\n\n # set the API endpoint and post the Authorization header to it\n endpoint = app.config['API']['url'] + 'authorization/refresh'\n response = requests.post(\n endpoint,\n verify = app.config['API']['verify_ssl'],\n headers = {\n 'Authorization': self.token,\n 'API-Key': app.config['API_KEY']\n },\n )\n\n if response.status_code == 200:\n self.token = response.json()['access_token']\n return True\n else:\n self.logger.error('%s Could not refresh token!' % self)\n self.logger.error('%s - %s' % (response.status_code, response.text))\n raise utils.Logout('Could not refresh JWT!')", "def UserToken(self) -> object:", "async def update(self):\n self.data = await self.api.user.get()", "async def oauth2_token(\n request: Request, oauth2_request=Depends(_oauth2_request)\n):", "def refresh_token():\n try:\n deserialized_message = peek_app_token()\n app_id = deserialized_message.get('app_id')\n installation_id = deserialized_message.get('installation_id')\n store_token(get_token(app_id, installation_id))\n\n except Exception as exc:\n log.error(f'Could not refresh token.\\n{exc}')\n traceback.print_exc(file=sys.stderr)", "def re_authenticate(self):\n url = URLS['token']\n data = {\n \"grant_type\": \"refresh_token\",\n \"refresh_token\": self.refresh_token,\n \"client_id\": self.client_id,\n \"client_secret\": self.client_secret\n }\n r = requests.post(url, data=data)\n r.raise_for_status()\n j = r.json()\n self.access_token = j['access_token']\n self.refresh_token = j['refresh_token']\n self._set_token_expiration_time(expires_in=j['expires_in'])\n return r", "def auth_token(self):", "def fusion_api_edit_user(self, body, uri, api=None, headers=None):\n return self.user.update(body, uri, api, headers)", "def auth(self):\n if self.get_saved_token():\n return\n self.oauth2()\n self.save_token()", "def _set_access_token(self):\n integration_context = demisto.getIntegrationContext()\n access_token = integration_context.get(ACCESS_TOKEN_CONST)\n valid_until = integration_context.get(EXPIRES_IN)\n if access_token and valid_until:\n if int(time.time()) < valid_until:\n self.access_token = access_token\n self.api_url = integration_context.get(API_URL_CONST, DEFAULT_API_URL)\n self.instance_id = integration_context.get(INSTANCE_ID_CONST)\n return\n demisto.debug(f'access token time: {valid_until} expired/none. Will call oproxy')\n access_token, api_url, instance_id, refresh_token, expires_in = self._oproxy_authorize()\n updated_integration_context = {\n ACCESS_TOKEN_CONST: access_token,\n EXPIRES_IN: int(time.time()) + expires_in - SECONDS_30,\n API_URL_CONST: api_url,\n INSTANCE_ID_CONST: instance_id\n }\n if refresh_token:\n updated_integration_context.update({'refresh_token': refresh_token})\n demisto.setIntegrationContext(updated_integration_context)\n self.access_token = access_token\n self.api_url = api_url\n self.instance_id = instance_id", "def set_token(self, token: AccessToken):\n self.access_token = token.access_token or \"\"\n if isinstance(token, AccessToken):\n self.refresh_token = token.refresh_token or \"\"\n self.token_type = token.token_type or \"\"\n self.expires_in = token.expires_in or 0\n\n lag = datetime.timedelta(seconds=-self.lag_time)\n if token.access_token and token.expires_in:\n lag = datetime.timedelta(seconds=token.expires_in - self.lag_time)\n self.expires_at = datetime.datetime.now() + lag", "async def _refresh_token(self):\n async with self.web_session.post(url=self._login_url, json=self._refresh_payload) as resp:\n if self.check_status(resp.status, self._login_url):\n data = await resp.json()\n token = data.get(\"access_token\")\n if token:\n self._set_token(token)\n self.expired_token = False\n return\n await self._try_login()\n await self._wait_for_login()", "def refresh():\n current_user = get_jwt_identity()\n\n user = get_user_by_username(current_user)\n\n if not user:\n return make_response(CONST_LOGIN_MSG, 401, {\n 'WWW-Authenticate': f'Basic realm=\"{CONST_REALM_MSG}\"'})\n\n if user.is_admin:\n claims = {'is_admin': True}\n else:\n claims = {'is_admin': False}\n\n now = datetime.datetime.now(datetime.timezone.utc)\n access_expires = (now + jwt_config.access_expires).timestamp()\n refresh_expires = (now + jwt_config.refresh_expires).timestamp()\n\n response = {\n 'access_token': create_access_token(identity=current_user,\n user_claims=claims),\n 'access_expires': access_expires,\n 'refresh_expires': refresh_expires,\n 'refresh_token': create_refresh_token(identity=current_user),\n 'user': get_user_details(user)\n\n }\n return jsonify(response), 200", "def update_body(self, body: dict[Any, Any]) -> None:\n body[\"data\"][\"AUTHENTICATOR\"] = ID_TOKEN_AUTHENTICATOR\n body[\"data\"][\"TOKEN\"] = self._id_token", "def update_cloud_token(self):\n self._cloud_token = rest_util.get_server_access_token(self._url_login, self._credential, REST_HEADERS,\n self._token_prefix, self._token_suffix)", "def renew_token_and_request(self, p_request):\n self.get_fb_token()\n return self.freshen_token(p_request)", "def refreshAuthentication(self, authenticationToken):\r\n pass", "def expire_token(self):\n self.user_in_db = User.users_db.get(self.email)\n\n self.user_in_db.update({'token': ''})\n\n User.users_db.put(self.user_in_db)\n\n return {'success': True}", "def save_auth_token(self, access_token, refresh_token):\n raise NotImplementedError(\n \"\"\"\n save_auth_token must be implemented by a child class\n \"\"\"\n )", "def user(self, user_token, user_device=None):\n self.set('user', user_token)\n self.set('device', user_device)", "def refresh_token(self):\n now = timezone.now()\n limit = now - timedelta(days=20)\n # TODO: use expires_in from response data?\n print(self.token_refresh_date)\n print(limit)\n if self.token_refresh_date < limit:\n url = '{}refresh_access_token'.format(conf.INSTAGRAM_API)\n params = {\n 'grant_type': 'ig_refresh_token',\n 'access_token': self.token\n }\n response = requests.get(url, params=params)\n data = response.json()\n else:\n print('no need to get a fresch token yet')\n return\n if response.status_code == 200 and data:\n self.token = data.get('access_token')\n self.token_refresh_date = now\n self.token_ok = True\n self.save()\n elif settings.DEBUG:\n self.token_ok = False\n self.save()\n print('could not refresh token')\n return", "def setoAuthTokenFromCASSAMLProperties(event):\n user = api.user.get(event.properties['username'])\n user.setMemberProperties(mapping=dict(oauth_token=event.properties['oauthToken']))", "def save_token(self, token, request):\n client = request.client\n if request.user:\n user_id = request.user.pk\n else:\n user_id = client.user_id\n item = self.token_model(\n client_id=client.client_id,\n user_id=user_id,\n **token\n )\n item.save()\n return item", "def user_token(app_env, user_refresh):\n cred = tk.Credentials(*app_env)\n\n try:\n yield cred.refresh_user_token(user_refresh)\n except tk.HTTPError as error:\n skip_or_fail(tk.HTTPError, \"Error in retrieving user token!\", error)\n cred.close()", "def test_authtoken_refresh(self):\n hagrid = models.User(username='hagrid', fullname='Rubeus Hagrid')\n auth_token = models.AuthToken(user=hagrid, algorithm='hmac-sha-1')\n existing_token = auth_token.token\n existing_secret = auth_token.secret\n auth_token.refresh()\n self.assertNotEqual(existing_token, auth_token.token)\n self.assertNotEqual(existing_secret, auth_token.secret)", "def refreshToken(self, token):\n \n postData = { 'refresh_token': token.refreshToken,\n 'client_id': self.clientId,\n 'client_secret': self.clientSecret,\n 'grant_type': self.refreshGrantType }\n postFields = urlencode(postData)\n\n \n buffer = BytesIO()\n c = pycurl.Curl()\n try:\n c.setopt(c.URL, self.refreshServer)\n c.setopt(c.POSTFIELDS, postFields)\n c.setopt(c.WRITEDATA, buffer)\n c.perform()\n \n responsecode = c.getinfo(c.RESPONSE_CODE)\n reqResp = json.loads(buffer.getvalue().decode('iso-8859-1'))\n except pycurl.error as err:\n msgData = { 'error_code': GDataOAuthError.ERR_NETWORK, 'error_string': c.errstr() }\n self.applicationCallback(MessageTypes.MSG_OAUTH_FAILED, msgData)\n return\n finally:\n c.close()\n\n\n if(responsecode == 200):\n expiration = int(time.time()) + int(reqResp['expires_in'])\n token.accessToken = reqResp['access_token']\n token.expiration = expiration\n token.tokenType = reqResp['token_type']\n self.applicationCallback(MessageTypes.MSG_OAUTH_SUCCESS, token);\n elif(responsecode == 401):\n msgData = { 'error_code': GDataOAuthError.ERR_CREDENTIALS, 'error_string': reqResp['error'] }\n self.applicationCallback(MessageTypes.MSG_OAUTH_FAILED, msgData)\n elif(responsecode == 400):\n msgData = { 'error_code': GDataOAuthError.ERR_PROTOCOL, 'error_string': reqResp['error'] + \": \" + reqResp['error_description']}\n self.applicationCallback(MessageTypes.MSG_OAUTH_FAILED, msgData)\n else:\n msgData = { 'error_code': GDataOAuthError.ERR_UNKNOWN, 'error_string': reqResp['error'] + \": \" + reqResp['error_description'] }\n self.applicationCallback(MessageTypes.MSG_OAUTH_FAILED, msgData)", "def for_user(self, a_token, a_secret):\n\t\tself.a_token = a_token\n\t\tself.a_secret = a_secret", "def update(self, user: U) -> None:\n ...", "def user(self, user_token, user_device=None):\n\n self.user_token = user_token\n self.user_device = user_device", "def set_reddit_oauth_refresh_token(site_name, token, _current_parser=None):\n set_value(site_name, OAUTH_CRED_KEYS[CredKeys.refresh], token, _current_parser)", "def refresh_token():\n json_request = request.json\n refresh_token = json_request.get('refresh_token')\n if not refresh_token:\n return msg.errors.bad_request(\n 'You should provide refresh token for this call')\n refresh_token_obj = RefreshToken.valid_token(refresh_token)\n if not refresh_token_obj:\n return msg.errors.unauthorized('Provided refresh token is not valid')\n access_token = generate_token(refresh_token_obj.user_id)\n return msg.success(\n message='New access token generated',\n access_token=access_token)", "def __set_authentication_token(self, token):\n cache = {\"authentication_token\": token}\n save_json(self._tokenPath, cache)", "def bearer_authentication(self, token: str) -> None:\n self.api_session.headers.update({'Authorization': f'Bearer {token}'})", "def test_authenticated_user_update(self):\r\n with self.flask_app.test_request_context('/'):\r\n for token in self.auth_providers:\r\n assert_raises(Forbidden,\r\n getattr(require, 'token').update,\r\n token)", "def api_extend_account():\n user_id = request.form.get('user_id')\n today_plus_180 = get_expiration_date(180)\n user = UserEntity.get_by_id(user_id)\n user = UserEntity.update(user, access_expires_at=today_plus_180)\n return jsonify_success(\n {\"message\": \"Updated expiration date to {}\".format(today_plus_180)})", "def update_token(token):\n try:\n payload = jwt.decode(token, os.environ.get('SECRET', 'test'))\n payload['exp'] = datetime.utcnow() + timedelta(days=100)\n jwt_bytes = jwt.encode(\n payload,\n os.environ.get('SECRET', 'test'),\n algorithm='HS256'\n )\n return jwt_bytes.decode('utf-8')\n except Exception as e:\n raise Exception(str(e))", "def post(self):\n current_user = get_jwt_identity()\n return {\n # Mark the token as un-fresh since we used the refresh token to regenerate this\n \"accessToken\": create_access_token(identity=current_user, fresh=False),\n \"userId\": current_user\n }", "async def async_refresh_access_token_if_needed(self):\n if self.authenticator.should_refresh():\n async with self._token_refresh_lock:\n refreshed_authentication = await self.authenticator.async_refresh_access_token(\n force=False\n )\n _LOGGER.info(\n \"Refreshed august access token. The old token expired at %s, and the new token expires at %s\",\n self.authentication.access_token_expires,\n refreshed_authentication.access_token_expires,\n )\n self._authentication = refreshed_authentication", "def auth(request):\n\n service = get_model_instance(request.user, MODULE_NAME)\n if service and request.method == 'POST':\n username = request.POST['username']\n\n # Delete existing token\n AccessToken.objects.filter(service=service).delete()\n # Before creating a new one\n AccessToken.objects.create(\n service=service,\n username=username,\n created=datetime.now(),\n api_token=service.app.oauth.consumer_key\n )\n\n service.setup = True\n service.public = True\n service.save()\n\n return redirect(settings_redirect(request))", "def set_token(self, token):\n # type: (Token) -> None\n self.token = token\n self._token_header = \"Bearer \" + token[\"access_token\"]", "def set_oauth(self, consumer_token, access_token):\n self.consumer_token = consumer_token\n self.access_token = access_token", "def refresh_token(self):\n # basic function to get an access token\n api_response = requests.get(\n self.api_config.get_api_url() + \"authentication/g?username=\" + self.api_config.get_api_username() + \"&password=\" + self.api_config.get_api_password())\n\n if api_response.status_code >= 200:\n self.API_TOKEN = api_response.content.decode()\n\n return self.API_TOKEN\n else:\n return None", "def token_auth(self):\n self.client = APIClient()\n self.user = User.objects.create_user(username='testuser', email='test@test.com', password='testpassword')\n self.token = Token.objects.create(user=self.user)\n self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key)", "def save_bearer_token(self, token, request, *args, **kwargs):\n log.debug('Save bearer token %r', token)\n self._tokensetter(token, request, *args, **kwargs)\n return request.client.default_redirect_uri", "def getAccessToken(self):\r\n\r\n #lets see if we have an oauth code\r\n if self.oauthToken is None:\r\n self.oauthToken = self.createAccessToken\r\n\r\n if self.oauthToken.isExpired(): #check to see if its expired if so refresh it\r\n self.oauthToken = self.refreshAccessToken()\r\n\r\n return self.oauthToken #return out access token\r", "def refresh_spotify_instance(self):\n import spotipy\n\n token_refreshed = False\n need_token = (self._token_info is None or\n self._oauth.is_token_expired(self._token_info))\n if need_token:\n new_token = \\\n self._oauth.refresh_access_token(\n self._token_info['refresh_token'])\n # skip when refresh failed\n if new_token is None:\n return\n\n self._token_info = new_token\n token_refreshed = True\n if token_refreshed or self._spotify is None:\n self._spotify = spotipy.Spotify(auth=self._token_info.get('access_token'))\n self._user = self._spotify.me()", "def api_token2(self, api_token2):\n\n self._api_token2 = api_token2", "def _refresh_access_token(self):\n # force https so that we don't send around tokens unsecurely\n url = 'https://{}/api/token/refresh'.format(urlparse(self.base_url).netloc)\n \n # paranoid: check again that we only send the token to https\n if urlparse(url).scheme != \"https\":\n msg = 'This should not happen, please file a bug report.'\n raise Exception(msg)\n\n if not self.jwt_refresh_token:\n raise FDSNUnauthorizedException(\"Unauthorized, authentication \"\n \"required.\", )\n\n # convert to json\n data = json.dumps({\"refresh\": self.jwt_refresh_token})\n # encode\n data = bytes(data, \"utf-8\")\n headers = {\"Content-Type\": \"application/json\"}\n html = urllib_request.Request(url, data=data, headers=headers)\n # decode('utf-8')\n try:\n result = urllib_request.urlopen(html).read().decode(\"utf-8\")\n dic = json.loads(result)\n self.jwt_access_token = dic['access']\n\n if self.debug:\n print('Got temporary access/refresh: {}/{}'.format(self.jwt_access_token, self.jwt_refresh_token))\n \n return\n except:\n raise FDSNUnauthorizedException(\"Unauthorized, authentication \"\n \"expired. Please set your credentials again.\", )", "def set_access_token(self, token):\n\n self.__current_request_mock.headers['Authorization'] = token" ]
[ "0.7218071", "0.7128234", "0.70803964", "0.70797455", "0.6948329", "0.69223416", "0.6816124", "0.67791194", "0.677325", "0.6766163", "0.6606324", "0.65013164", "0.6490434", "0.64472616", "0.64429665", "0.64318883", "0.6418706", "0.64162564", "0.6413319", "0.63991517", "0.6385085", "0.63684547", "0.63431394", "0.6338588", "0.62584007", "0.624832", "0.62463784", "0.62334836", "0.622952", "0.6228788", "0.6194738", "0.6184121", "0.61391926", "0.61321795", "0.61312526", "0.61069155", "0.6104181", "0.60973734", "0.60696316", "0.60420066", "0.6030879", "0.60265726", "0.6009713", "0.6008249", "0.5987083", "0.5985237", "0.59648806", "0.5962282", "0.5954519", "0.5950286", "0.59472275", "0.5941657", "0.59370786", "0.5925095", "0.59237856", "0.59034896", "0.58915603", "0.5889202", "0.5886435", "0.5882959", "0.58767873", "0.58685046", "0.5862061", "0.58588934", "0.58399415", "0.5836056", "0.58312243", "0.5819639", "0.5817656", "0.58081126", "0.5787911", "0.5779098", "0.57781684", "0.5770929", "0.5769325", "0.57556915", "0.57465756", "0.57437676", "0.5743407", "0.57423913", "0.5723418", "0.5715926", "0.5703854", "0.5700664", "0.5699091", "0.5681449", "0.567803", "0.5676166", "0.5668536", "0.5663692", "0.56609803", "0.5648485", "0.56438524", "0.56390667", "0.56377137", "0.5634722", "0.56330985", "0.563047", "0.56293696", "0.56051904", "0.56042385" ]
0.0
-1
Updates the specified policy. You can update the description or the policy statements themselves. Policy changes take effect typically within 10 seconds.
def update_policy(self, policy_id, update_policy_details, **kwargs): resource_path = "/policies/{policyId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_policy got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "policyId": policy_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_policy_details, response_type="Policy") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_policy_details, response_type="Policy")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_policy(self, *args, **kwargs):\r\n pass", "def update_policy(self):\n pass", "def UpdatePolicy(self, request, global_params=None):\n config = self.GetMethodConfig('UpdatePolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def update_policy(policy_id):\n old_policy = PolicyService.get_policy_by_id(policy_id)\n if old_policy is None:\n abort(404)\n new_policy = PolicyService.update_policy_by_id(policy_id, json_to_policy(request.json))\n if new_policy is None:\n abort(406)\n return new_policy.__dict__", "def update_Policy(self,inputpolicy):\n \n policyob = self.SD_Map.retrieve_ob(inputpolicy)\n policyob.values[-1] = self.PolicyDicts[inputpolicy][self.translate(self.policy_option_vars[inputpolicy].get(),\n input_language = self.language,\n output_language = 'english')]", "def device_update_policy(self, device_ids, policy_id):\n return self._device_action(device_ids, \"UPDATE_POLICY\", {\"policy_id\": policy_id})", "def update_policy(self, policy, inverse_policy=None):\n self.make_T_policy_matrix(policy)\n self.inverse_dynamics_by_time = dict()\n self.policy = policy\n self.inverse_policy = inverse_policy", "def update_policy(ranger_url, policy_id, policy_data, admin_username_password):\n\n url = format(\"{ranger_url}/service/public/v2/api/policy/{policy_id}\")\n\n base_64_string = base64.encodestring(admin_username_password).replace('\\n', '')\n\n request = urllib2.Request(url, json.dumps(policy_data))\n request.get_method = lambda: 'PUT'\n request.add_header('Content-Type', 'application/json')\n request.add_header('Accept', 'application/json')\n request.add_header('Authorization', format('Basic {base_64_string}'))\n\n try:\n result = openurl(request, timeout=20)\n response_code = result.getcode()\n if response_code == 200:\n Logger.info(format(\"Successfully updated policy in Ranger Admin\"))\n return response_code\n else:\n Logger.error(format(\"Unable to update policy in Ranger Admin\"))\n return None\n except urllib2.HTTPError as e:\n raise Fail(\"HTTPError while updating policy Reason = \" + str(e.code))\n except urllib2.URLError as e:\n raise Fail(\"URLError while updating policy. Reason = \" + str(e.reason))\n except TimeoutError:\n raise Fail(\"Connection timeout error while updating policy\")\n except Exception as err:\n raise Fail(format(\"Error while updating policy. Reason = {err}\"))", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def put(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n update_l7_policy(request, **kwargs)", "def Update(self,\n fp_id=None,\n only_generate_request=False,\n firewall_policy=None,\n batch_mode=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.Patch(\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)[2])\n return self.WaitOperation(\n op_res, message='Updating the organization firewall policy.')", "def update_firewall_policy(self, firewall_policy, body=None):\r\n return self.put(self.firewall_policy_path % (firewall_policy),\r\n body=body)", "def rbac_policy_update(request, policy_id, **kwargs):\n body = {'rbac_policy': kwargs}\n rbac_policy = neutronclient(request).update_rbac_policy(\n policy_id, body=body).get('rbac_policy')\n return RBACPolicy(rbac_policy)", "def setPolicy(self, value):\n return self._set(policy=value)", "def update(self,\n draft_id,\n policy_draft,\n ):\n return self._invoke('update',\n {\n 'draft_id': draft_id,\n 'policy_draft': policy_draft,\n })", "async def update_certificate_policy(\n self, certificate_name: str, policy: CertificatePolicy, **kwargs\n ) -> CertificatePolicy:\n bundle = await self._client.update_certificate_policy(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n certificate_policy=policy._to_certificate_policy_bundle(),\n **kwargs\n )\n return CertificatePolicy._from_certificate_policy_bundle(certificate_policy_bundle=bundle)", "def set_policy(self, name, policy):\n client = self.connect(VAULT_TOKEN)\n client.set_policy(name, policy)", "def test_update_ikepolicy(self):\r\n resource = 'ikepolicy'\r\n cmd = ikepolicy.UpdateIKEPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def test_update_firewall_policy(self):\r\n resource = 'firewall_policy'\r\n cmd = firewallpolicy.UpdateFirewallPolicy(test_cli20.MyApp(sys.stdout),\r\n None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_apic(self):\n return self.client.policy.update(policyList=self.policy_list.response)", "def updateMergePolicy(self, mergePolicyId: str = None, policy: dict = None) -> dict:\n if mergePolicyId is None:\n raise ValueError(\"Require a mergePolicyId\")\n if policy is None or type(policy) != dict:\n raise ValueError(\"Require a dictionary to update the merge policy\")\n if self.loggingEnabled:\n self.logger.debug(f\"Starting updateMergePolicy\")\n path = f\"/config/mergePolicies/{mergePolicyId}\"\n res = self.connector.putData(\n self.endpoint + path, data=policy, headers=self.header\n )\n return res", "def policy_update_fn(self, data: Dict[str, Any], result: Dict[str, Any]) -> None:", "def Update(self,\n priority=None,\n firewall_policy=None,\n firewall_policy_rule=None,\n batch_mode=False,\n only_generate_request=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRuleRequestTuple(\n priority=priority,\n firewall_policy=firewall_policy,\n firewall_policy_rule=firewall_policy_rule)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.PatchRule(\n self._MakeUpdateRuleRequestTuple(\n priority=priority,\n firewall_policy=firewall_policy,\n firewall_policy_rule=firewall_policy_rule)[2])\n return self.WaitOperation(\n op_res, message='Updating a rule in the organization firewall policy.')", "def modify_audit_policy(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n runtime = util_models.RuntimeOptions()\n return self.modify_audit_policy_with_options(request, runtime)", "def device_update_policy(self, device_update_policy):\n\n self._device_update_policy = device_update_policy", "def test_update_ipsecpolicy(self):\r\n resource = 'ipsecpolicy'\r\n cmd = ipsecpolicy.UpdateIPsecPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_policy(self):\n self.trainer_metrics.start_policy_update_timer(\n number_experiences=len(self.training_buffer.update_buffer[\"actions\"]),\n mean_return=float(np.mean(self.cumulative_returns_since_policy_update)),\n )\n self.cumulative_returns_since_policy_update = []\n n_sequences = max(\n int(self.trainer_parameters[\"batch_size\"] / self.policy.sequence_length), 1\n )\n value_total, policy_total = [], []\n advantages = self.training_buffer.update_buffer[\"advantages\"].get_batch()\n self.training_buffer.update_buffer[\"advantages\"].set(\n (advantages - advantages.mean()) / (advantages.std() + 1e-10)\n )\n num_epoch = self.trainer_parameters[\"num_epoch\"]\n for _ in range(num_epoch):\n self.training_buffer.update_buffer.shuffle()\n buffer = self.training_buffer.update_buffer\n for l in range(\n len(self.training_buffer.update_buffer[\"actions\"]) // n_sequences\n ):\n start = l * n_sequences\n end = (l + 1) * n_sequences\n run_out = self.policy.update(\n buffer.make_mini_batch(start, end), n_sequences\n )\n value_total.append(run_out[\"value_loss\"])\n policy_total.append(np.abs(run_out[\"policy_loss\"]))\n self.stats[\"Losses/Value Loss\"].append(np.mean(value_total))\n self.stats[\"Losses/Policy Loss\"].append(np.mean(policy_total))\n for _, reward_signal in self.policy.reward_signals.items():\n update_stats = reward_signal.update(\n self.training_buffer.update_buffer, n_sequences\n )\n for stat, val in update_stats.items():\n self.stats[stat].append(val)\n if self.policy.bc_module:\n update_stats = self.policy.bc_module.update()\n for stat, val in update_stats.items():\n self.stats[stat].append(val)\n self.training_buffer.reset_update_buffer()\n self.trainer_metrics.end_policy_update()", "def put_user_policy(self, user_name, policy_name, policy_json):\r\n params = {'UserName' : user_name,\r\n 'PolicyName' : policy_name,\r\n 'PolicyDocument' : policy_json}\r\n return self.get_response('PutUserPolicy', params, verb='POST')", "def _modify_schedule_policy_properties(self):\n request_json = {\n 'taskInfo':\n {\n 'taskOperation': 1,\n 'associations': self._associations,\n 'task': self._task_json,\n \"appGroup\":\n {\n \"appGroups\": self._app_groups if self._app_groups else [],\n },\n 'subTasks': self._subtasks\n }\n }\n\n flag, response = self._commcell_object._cvpysdk_object.make_request(\n 'PUT', self._MODIFY_SCHEDULE_POLICY, request_json\n )\n output = self._process_schedule_policy_update_response(flag, response)\n self.refresh()\n\n if output[0]:\n return\n\n o_str = 'Failed to update properties of Schedule Policy\\nError: \"{0}\"'\n raise SDKException('Schedules', '102', o_str.format(output[2]))", "def update_policy(self):\n raise UnityTrainerException(\"The update_model method was not implemented.\")", "def put(self):\n coll_policy_id = views_helper.get_request_value(self.request, \"coll_policy_id\", \"BODY\")\n name = views_helper.get_request_value(self.request, \"coll_policy_name\", \"BODY\")\n command = views_helper.get_request_value(self.request, \"command\", \"BODY\")\n desc = views_helper.get_request_value(self.request, \"desc\", \"BODY\")\n ostype = views_helper.get_request_value(self.request, \"ostype\", \"BODY\")\n coll_policy_update_data = {\n 'name': name,\n 'cli_command': command,\n 'desc': desc,\n 'ostype': ostype\n }\n if len(CollPolicy.objects.filter(~Q(coll_policy_id=coll_policy_id), name=name)):\n data = {\n 'data': '',\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.FALSE,\n constants.MSG_TYPE: 'NAME_DUPLICATE',\n constants.MESSAGE: constants.COLLECTION_POLICY_NAME_DUPLICATE\n }\n\n }\n return api_return(data=data)\n obj = CollPolicy.objects.get(coll_policy_id=coll_policy_id)\n serializer = CollPolicyEditSerializer(instance=obj, data=coll_policy_update_data)\n try:\n if serializer.is_valid():\n serializer.save()\n data = {\n 'data': serializer.data,\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.TRUE,\n constants.MESSAGE: constants.SUCCESS\n }\n\n }\n return api_return(data=data)\n except Exception as e:\n if constants.DEBUG_FLAG:\n print traceback.format_exc(e)\n return exception_handler(e)", "def update_policy(env, policy, V, discount_factor):\n\n for state in range(env.nS):\n # for a given state compute state-action value.\n action_values = one_step_lookahead(env, state, V, discount_factor)\n\n # choose the action which maximizes the state-action value.\n policy[state] = np.argmax(action_values)\n\n return policy", "def policy_id(self, policy_id):\n\n self._policy_id = policy_id", "def policy_id(self, policy_id):\n\n self._policy_id = policy_id", "def put_group_policy(self, group_name, policy_name, policy_json):\r\n params = {'GroupName' : group_name,\r\n 'PolicyName' : policy_name,\r\n 'PolicyDocument' : policy_json}\r\n return self.get_response('PutGroupPolicy', params, verb='POST')", "def policies(self, policies):\n\n self._policies = policies", "def put(self, request, l7_rule_id, l7_policy_id):\n kwargs = {'l7_rule_id': l7_rule_id, 'l7_policy_id': l7_policy_id}\n update_l7_rule(request, **kwargs)", "def policyid(self, policyid):\n self._policyid = policyid", "def _add_policy(self, policy):\n self.by_name[policy.name.upper()] = policy\n self.by_index[int(policy)] = policy", "def set_policy (self, policy = None, args = (), policy_cleanup = None):\n if policy == self.policy:\n # same policy; might want to change args/cleanup function, though\n self._policy_args = args\n if policy is not None and not isinstance(policy, basestring):\n self._policy_cleanup = policy_cleanup\n return\n # perform cleanup for current policy, if any\n if isinstance(self.policy, basestring):\n # built-in\n try:\n POLICY_CLEANUP[self.policy](self)\n except AttributeError:\n pass\n elif self.policy is not None and self._policy_cleanup is not None:\n # custom\n self._policy_cleanup(self)\n del self._policy_cleanup\n # set new policy\n self.policy = policy\n if policy is None:\n # if disabling scrolling, clean up some attributes we won't need\n try:\n del self._scroll_fn, self._policy_args\n except AttributeError:\n pass\n else:\n self._policy_args = args if args else ()\n if isinstance(policy, basestring):\n # built-in\n self._scroll_fn = POLICY_SCROLL[policy]\n else:\n # custom\n self._scroll_fn = policy\n self._policy_cleanup = policy_cleanup", "def policy_name(self, policy_name):\n\n self._policy_name = policy_name", "def policy_name(self, policy_name):\n\n self._policy_name = policy_name", "def test_update_webhook_policy_to_at_style_scheduler(self):\n upd_policy_response = self.autoscale_client.update_policy(\n group_id=self.group.id,\n policy_id=self.policy_up['policy_id'],\n name='upd_webhook_to_scheduler',\n cooldown=self.sp_cooldown,\n change=self.sp_change,\n args={'at': self.autoscale_behaviors.get_time_in_utc(60)},\n policy_type='schedule')\n self.assertEquals(upd_policy_response.status_code, 400,\n msg='Update webhook policy to schedule policy type'\n ' on the group {0} with response code {1}'.format(\n self.group.id, upd_policy_response.status_code))", "def patch(self,\n draft_id,\n policy_draft,\n ):\n return self._invoke('patch',\n {\n 'draft_id': draft_id,\n 'policy_draft': policy_draft,\n })", "def add_policy(self, policy_name, policy_text): \n self.policies.add(policy_name, policy_text)\n self.policies = set()", "def translate_policy(policy: dict):\n if 'PolicyName' in policy:\n # This is a normal policy that should not be expanded\n return policy\n template_name = next(iter(policy))\n template_parameters = policy[template_name]\n try:\n # 'convert' will return a list of policy statements\n policy_document = processor.convert(template_name, template_parameters)\n except InsufficientParameterValues as e:\n # Exception's message will give lot of specific details\n raise ValueError(str(e))\n except InvalidParameterValues:\n raise ValueError(\"Must specify valid parameter values for policy template '{}'\".format(template_name))\n return {\n \"PolicyName\": template_name + '-' + str(uuid.uuid4()),\n \"PolicyDocument\": policy_document\n }", "def put(self, consumer_key, rid):\n policy = Policy.query.filter(\n Policy.consumer_key == consumer_key,\n Policy.rid == rid\n ).first_or_404()\n\n payload = json.loads(request.data)\n if \"actions\" not in payload:\n abort(400, \"Missing required field: actions\")\n\n policy.actions = set(payload[\"actions\"])\n policy.save()\n return self.jsonify(self._serialize(policy), status_code=200)", "def update_access_policy_command(client: KeyVaultClient, args: dict[str, Any], params: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n operation_kind = args['operation_kind']\n object_id = args['object_id']\n keys = argToList(args.get('keys'))\n secrets = argToList(args.get('secrets'))\n certificates = argToList(args.get('certificates'))\n storage_accounts = argToList(args.get('storage', []))\n # subscription_id and resource_group_name arguments can be passed as command arguments or as configuration parameters,\n # if both are passed as arguments, the command arguments will be used.\n subscription_id = get_from_args_or_params(params=params, args=args, key='subscription_id')\n resource_group_name = get_from_args_or_params(params=params, args=args, key='resource_group_name')\n\n response = client.update_access_policy_request(subscription_id, resource_group_name,\n vault_name, operation_kind, object_id, keys,\n secrets, certificates, storage_accounts)\n\n readable_output = tableToMarkdown(f'{vault_name} Updated Access Policy',\n response,\n ['id', 'name', 'type', 'location'], removeNull=True,\n headerTransform=string_to_table_header)\n\n return CommandResults(\n outputs_prefix='AzureKeyVault.VaultAccessPolicy',\n outputs_key_field='id',\n outputs=response,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )", "def test_update(self, mock_put):\n self.policies.update(id=333114, policy_update=self.policy_show_response)\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alert_policies/333114.json',\n headers=self.policies.headers,\n data=json.dumps(self.policy_show_response)\n )", "def apply_policy(cls, metadata, policy):\r\n for attr, value in policy.iteritems():\r\n attr = cls._translate(attr)\r\n if attr not in cls.fields:\r\n # Store unknown attributes coming from policy.json\r\n # in such a way that they will export to xml unchanged\r\n metadata['xml_attributes'][attr] = value\r\n else:\r\n metadata[attr] = value", "def update_policy(self):\n self._sess.run(self._hard_copy_to_target_op);", "def set_target_policy(self, policy):\n self.target_policy = policy", "def update_l7_policy(request, **kwargs):\n data = request.DATA\n l7_policy_id = data['l7policy'].get('id')\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.update_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n l7_policy=l7_policy_id,\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def add(self, policy_name, data):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.info(\"Adding the policy: %s\", address)\n payload = json.dumps({\"policy\": data})\n response = self.vault.requests_request(\n \"POST\", address, headers=self.vault.token_header, data=payload\n )", "def draw_policy(self, policy):\n\n if self.update_animation:\n self.canvas.delete(\"policy\")\n for state, action in policy.items():\n row, col = state\n if self.GRID_MAP[row][col] != \"H\" and self.GRID_MAP[row][col] != \"G\":\n x1 = (col * self.GRID_ROW_HEIGHT) + (self.GRID_ROW_HEIGHT / 2)\n y1 = (row * self.GRID_ROW_HEIGHT) + (self.GRID_ROW_HEIGHT / 2)\n if action == \"Left\":\n x2 = x1 + 15\n x1 -= 15\n self.canvas.create_line(x1, y1, x2, y1, arrow=FIRST, tag=\"policy\")\n elif action == \"Up\":\n y2 = y1 + 15\n y1 -= 15\n self.canvas.create_line(x1, y1, x1, y2, arrow=FIRST, tag=\"policy\")\n elif action == \"Right\":\n x2 = x1 + 15\n x1 -= 15\n self.canvas.create_line(x1, y1, x2, y1, arrow=LAST, tag=\"policy\")\n elif action == \"Down\":\n y2 = y1 + 15\n y1 -= 15\n self.canvas.create_line(x1, y1, x1, y2, arrow=LAST, tag=\"policy\")", "async def modify_audit_policy_async(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n runtime = util_models.RuntimeOptions()\n return await self.modify_audit_policy_with_options_async(request, runtime)", "def update_policy_values(policy_lookup, board, state, player, action, next_state, reward):\n\t\n\t# compute total expected reward including future rewards\n\tif board.check_end():\n\t\texpected = reward\n\telse:\n\t\tif player == 1:\n\t\t\texpected = reward + discount * min_value(policy_lookup, next_state, 2)\n\t\telif player == 2:\n\t\t\texpected = reward + discount * max_value(policy_lookup, next_state, 1)\n\t# get current policy action value\n\tpolicy_value = get_policy_value(policy_lookup, state, player, action)\n\t# update policy action value\n\tpolicy_lookup[(state, player)][action] += learning_rate * (expected - policy_value)", "def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy:\n return response", "def update(ctx: CLIContext, access_key, resource_policy, is_admin, is_active, rate_limit):\n with Session() as session:\n try:\n data = session.KeyPair.update(\n access_key,\n is_active=is_active,\n is_admin=is_admin,\n resource_policy=resource_policy,\n rate_limit=rate_limit)\n except Exception as e:\n ctx.output.print_mutation_error(\n e,\n item_name='keypair',\n action_name='update',\n )\n sys.exit(1)\n if not data['ok']:\n ctx.output.print_mutation_error(\n msg=data['msg'],\n item_name='keypair',\n action_name='update',\n )\n sys.exit(1)\n ctx.output.print_mutation_result(\n data,\n extra_info={\n 'access_key': access_key,\n },\n )", "def set_submit_policy(\n self, policy: SubmitPolicyStr | widgets.QDataWidgetMapper.SubmitPolicy\n ):\n self.setSubmitPolicy(SUBMIT_POLICY.get_enum_value(policy))", "def test_update_success(self, mock_put):\n self.policies.update(\n id=self.policy_single_response['policy']['id'],\n name=self.policy_single_response['policy']['name'],\n incident_preference=self.policy_single_response['policy']['incident_preference']\n )\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alerts_policies/{0}.json'.format(\n self.policy_single_response['policy']['id']\n ),\n headers=self.policies.headers,\n data=json.dumps({\n \"policy\": {\n \"name\": self.policy_single_response['policy']['name'],\n \"incident_preference\": self.policy_single_response['policy']['incident_preference']\n }\n })\n )", "def update(self,\n dns_forwarder_zone_id,\n policy_dns_forwarder_zone,\n ):\n return self._invoke('update',\n {\n 'dns_forwarder_zone_id': dns_forwarder_zone_id,\n 'policy_dns_forwarder_zone': policy_dns_forwarder_zone,\n })", "def test_update_webhook_policy_to_cron_style_scheduler(self):\n upd_policy_response = self.autoscale_client.update_policy(\n group_id=self.group.id,\n policy_id=self.policy_down['policy_id'],\n name='upd_webhook_to_scheduler',\n cooldown=self.sp_cooldown,\n change=self.sp_change,\n args={'cron': '* 3 * * *'},\n policy_type='schedule')\n self.assertEquals(upd_policy_response.status_code, 400,\n msg='Update webhook policy to schedule policy type'\n ' on the group {0} with response code {1}'.format(\n self.group.id, upd_policy_response.status_code))", "def put_metric_policy(ContainerName=None, MetricPolicy=None):\n pass", "def add_grading_policy(self, grading_policy):\r\n\r\n self.course.grading_policy = grading_policy\r\n store = editable_modulestore()\r\n store.update_item(self.course, '**replace_user**')\r\n self.refresh_course()", "def put_bucket_policy(self, bucket_name, policy):\n self._client.put_bucket_policy(Bucket=bucket_name, Policy=policy)", "def policy(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"policy\")", "def update_authentication_policy(self, compartment_id, update_authentication_policy_details, **kwargs):\n resource_path = \"/authenticationPolicies/{compartmentId}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_authentication_policy got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"compartmentId\": compartment_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_authentication_policy_details,\n response_type=\"AuthenticationPolicy\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_authentication_policy_details,\n response_type=\"AuthenticationPolicy\")", "def update_policy_network(self):\r\n self.send(self.server_conn, (sys._getframe().f_code.co_name, {}))", "def update_service_access_policies(DomainName=None, AccessPolicies=None):\n pass", "def update_policy(self):\n n_sequences = max(int(self.trainer_parameters['batch_size'] / self.policy.sequence_length), 1)\n value_total, policy_total, forward_total, inverse_total = [], [], [], []\n advantages = self.training_buffer.update_buffer['advantages'].get_batch()\n self.training_buffer.update_buffer['advantages'].set(\n (advantages - advantages.mean()) / (advantages.std() + 1e-10))\n num_epoch = self.trainer_parameters['num_epoch']\n for k in range(num_epoch):\n self.training_buffer.update_buffer.shuffle()\n buffer = self.training_buffer.update_buffer\n for l in range(len(self.training_buffer.update_buffer['actions']) // n_sequences):\n start = l * n_sequences\n end = (l + 1) * n_sequences\n run_out = self.policy.update(buffer.make_mini_batch(start, end), n_sequences)\n value_total.append(run_out['value_loss'])\n policy_total.append(np.abs(run_out['policy_loss']))\n if self.use_curiosity:\n inverse_total.append(run_out['inverse_loss'])\n forward_total.append(run_out['forward_loss'])\n self.stats['value_loss'].append(np.mean(value_total))\n self.stats['policy_loss'].append(np.mean(policy_total))\n if self.use_curiosity:\n self.stats['forward_loss'].append(np.mean(forward_total))\n self.stats['inverse_loss'].append(np.mean(inverse_total))\n self.training_buffer.reset_update_buffer()", "def policy_delete(request, policy_id):\n neutronclient(request).delete_qos_policy(policy_id)", "def policy(self, s):\r\n raise NotImplemented()", "def policy(self) -> Optional[pulumi.Input['ServicePolicyArgs']]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input['ServicePolicyArgs']]:\n return pulumi.get(self, \"policy\")", "def modify_audit_policy_with_options(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.audit_log_switch_source):\n query['AuditLogSwitchSource'] = request.audit_log_switch_source\n if not UtilClient.is_unset(request.audit_status):\n query['AuditStatus'] = request.audit_status\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.service_type):\n query['ServiceType'] = request.service_type\n if not UtilClient.is_unset(request.storage_period):\n query['StoragePeriod'] = request.storage_period\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='ModifyAuditPolicy',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.ModifyAuditPolicyResponse(),\n self.call_api(params, req, runtime)\n )", "def expiration_policy(self, expiration_policy):\n\n self._expiration_policy = expiration_policy", "def set_hardware_specific_load_balancing_policy(cls, vendor_id, product_id, policy):\n cls.execute([\"-l\", \"-t\", cls._get_hardware_id(vendor_id, product_id), str(policy)])", "def policy_eval(env, policy, V, discount_factor):\n policy_value = np.zeros(env.nS)\n for state, action in enumerate(policy):\n for probablity, next_state, reward, info in env.P[state][action]:\n policy_value[state] += probablity * (reward + (discount_factor * V[next_state]))\n\n return policy_value", "def apply_policy(self, policy, method):\n action, optimal_value, move = policy(self, method)\n return action, optimal_value, move", "def set_policyname(self, policyname):\n self.options[\"policyname\"] = policyname", "def patch(self, nodepool_policy_ident, patch):\n context = pecan.request.context\n nodepool_policy = api_utils.get_resource('NodePoolPolicy', nodepool_policy_ident)\n\n # policy.enforce(context, 'nodepool_policy:update', nodepool_policy,\n # action='nodepool_policy:update')\n try:\n nodepool_policy_dict = nodepool_policy.as_dict()\n print 'ssssss'\n print patch\n new_nodepool_policy = NodePoolPolicy(**api_utils.apply_jsonpatch(nodepool_policy_dict, patch))\n\n except api_utils.JSONPATCH_EXCEPTIONS as e:\n raise exception.PatchError(patch=patch, reason=e)\n\n # Update only the fields that have changed\n for field in objects.NodePoolPolicy.fields:\n try:\n patch_val = getattr(new_nodepool_policy, field)\n except AttributeError:\n # Ignore fields that aren't exposed in the API\n continue\n if patch_val == wtypes.Unset:\n patch_val = None\n if nodepool_policy[field] != patch_val:\n nodepool_policy[field] = patch_val\n\n # delta = nodepool_policy.obj_what_changed()\n nodepool_policy.save()\n # validate_function_properties(delta)\n\n # res_nodepool_policy = pecan.request.rpcapi.bay_update(nodepool_policy)\n return NodePoolPolicy.convert_with_links(nodepool_policy)", "def validate_policy(policy):\n if \"name\" not in policy:\n raise AttributeError(\"Policy missing attribute name.\")\n if \"policyType\" not in policy:\n raise AttributeError(\"Policy {} do not have PolicyType.\".format(policy[\"name\"]))\n if policy[\"policyType\"] not in [0, 1, 2]:\n raise AttributeError(\"Policy {} must have PolicyType 0, 1, or 2.\".format(policy[\"name\"]))\n if \"resources\" not in policy:\n raise AttributeError(\"Policy {} do not have resources.\".format(policy[\"name\"]))\n if policy[\"policyType\"] == 0 and not (\"policyItems\" in policy or \"denyPolicyItems\" in policy):\n raise AttributeError(\"Policy {} do not have policyItems nor denyPolicyItems.\".format(policy[\"name\"]))", "def set_policyname(self, policyname):\n self.options['policyname'] = policyname", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(cls):\n return relationship.many_to_one(cls, 'policy')", "def post_network_policy_update(self, resource_id, resource_dict):\n pass", "def PolicyStatement(self) -> PolicyStatement:", "def update_state(self, progress, policy_state=None):\n raise NotImplementedError", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[str]:\n return pulumi.get(self, \"policy\")", "def _policy_eval(self, policy: np.ndarray) -> np.ndarray:\n V = np.zeros(self.state_dim)\n diff = 1.0\n dr = 0.9\n while (diff >= self.theta):\n diff = 0.0\n for s in self.mdp._state_dict:\n old = V[self.mdp._state_dict[s]]\n temp = 0.0\n for opt in range(self.action_dim):\n if policy[self.mdp._state_dict[s],opt] == 1.0: \n for next_s in self.mdp._state_dict:\n p = self.mdp.P[self.mdp._state_dict[s],opt,self.mdp._state_dict[next_s]]\n r = self.mdp.R[self.mdp._state_dict[s],opt,self.mdp._state_dict[next_s]]\n Vs = V[self.mdp._state_dict[next_s]]\n temp = temp + p * (r + dr * Vs)\n V[self.mdp._state_dict[s]] = temp\n diff = max(diff,abs(old - V[self.mdp._state_dict[s]]))\n return V", "def phone_policy(self, phone_policy):\n\n self._phone_policy = phone_policy", "def put_container_policy(ContainerName=None, Policy=None):\n pass", "def UpdateBackupPolicy(\n self, backuppolicy_ref, backup_policy, update_mask, async_\n ):\n update_op = self._adapter.UpdateBackupPolicy(\n backuppolicy_ref, backup_policy, update_mask\n )\n if async_:\n return update_op\n operation_ref = resources.REGISTRY.ParseRelativeName(\n update_op.name, collection=netapp_util.OPERATIONS_COLLECTION\n )\n return self.WaitForOperation(operation_ref)" ]
[ "0.7842353", "0.75241417", "0.748778", "0.7414664", "0.7212257", "0.71420217", "0.71321785", "0.68537253", "0.67365426", "0.67068434", "0.67029107", "0.664534", "0.6516526", "0.6492029", "0.6485327", "0.6461159", "0.6407627", "0.64037424", "0.6252379", "0.6247484", "0.6230572", "0.61694294", "0.6116362", "0.60837615", "0.60546523", "0.6007976", "0.59768033", "0.59275633", "0.5900771", "0.57866055", "0.5779829", "0.57736415", "0.5759138", "0.5759138", "0.57546186", "0.57455784", "0.5735141", "0.57270974", "0.5711703", "0.5711575", "0.5711218", "0.5711218", "0.56848425", "0.56566596", "0.5654468", "0.5651694", "0.5595002", "0.5586453", "0.5565867", "0.5565142", "0.55492973", "0.5526037", "0.5507521", "0.54765004", "0.5464473", "0.54569596", "0.5447522", "0.5442283", "0.5421972", "0.5419326", "0.53953695", "0.5372276", "0.5350773", "0.53321916", "0.53282285", "0.5322424", "0.5317699", "0.5317699", "0.5317266", "0.53122914", "0.53107834", "0.5291901", "0.52876973", "0.5276857", "0.52697724", "0.52697724", "0.52448195", "0.522745", "0.5207855", "0.520157", "0.51942897", "0.5173949", "0.51629525", "0.5159024", "0.5148821", "0.51481974", "0.51481974", "0.51481974", "0.5136018", "0.512447", "0.51131237", "0.51010054", "0.50974315", "0.50974315", "0.50974315", "0.5088892", "0.50870717", "0.50821245", "0.5080916", "0.5078311" ]
0.68388695
8
Updates the specified SMTP credential's description.
def update_smtp_credential(self, user_id, smtp_credential_id, update_smtp_credential_details, **kwargs): resource_path = "/users/{userId}/smtpCredentials/{smtpCredentialId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_smtp_credential got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "smtpCredentialId": smtp_credential_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_smtp_credential_details, response_type="SmtpCredentialSummary") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_smtp_credential_details, response_type="SmtpCredentialSummary")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_credential(self, context, id, credential):\n c = credential['credential']\n cred = update_credential(id,\n c['user_name'],\n c['password'])\n return self._make_credential_dict(cred)", "def _update_credential(self, key, cred):\n self._data[key] = cred\n self._write()", "def put(self, credential):\n pass", "def testUpdateCredentials(self):\r\n \r\n credentials = dict()\r\n credentials[\"username\"] = \"\"\r\n credentials[\"password\"] = \"\"\r\n self._factory.updateCredentials(credentials)", "def update_description(self, host, baseUrl, description):\n self._host = host\n self._urlBase = baseUrl\n self._description = description\n return", "def putCredential(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_credential(credential_id,\n new_user_name=None, new_password=None):\n session = db.get_session()\n try:\n cred = (session.query(network_models_v2.Credential).\n filter_by(credential_id=credential_id).one())\n if new_user_name:\n cred[\"user_name\"] = new_user_name\n if new_password:\n cred[\"password\"] = new_password\n session.merge(cred)\n session.flush()\n return cred\n except exc.NoResultFound:\n raise c_exc.CredentialNotFound(credential_id=credential_id)", "def update(self, descriptor_msg, auth=None):\n request_args = self._make_request_args(descriptor_msg, auth)\n try:\n response = requests.put(**request_args)\n response.raise_for_status()\n except requests.exceptions.ConnectionError as e:\n msg = \"Could not connect to restconf endpoint: %s\" % str(e)\n self._log.error(msg)\n raise UpdateError(msg) from e\n except requests.exceptions.HTTPError as e:\n msg = \"PUT request to %s error: %s\" % (request_args[\"url\"], response.text)\n self._log.error(msg)\n raise UpdateError(msg) from e\n except requests.exceptions.Timeout as e:\n msg = \"Timed out connecting to restconf endpoint: %s\", str(e)\n self._log.error(msg)\n raise UpdateError(msg) from e", "def credential(self, credential):\n\n self._credential = credential", "def credential(self, credential):\n\n self._credential = credential", "async def set_profile_description(self, ctx, *, description: str):\n max_words = self.plugin.data.profile.max_description_length\n if len(description) > max_words:\n res = f\"{ctx.emotes.web_emotion.xx} Sorry but profile description cannot exceed {max_words} word limit.\"\n return await ctx.send_line(res)\n profile = await self.cache.get_profile(ctx.author.id)\n await profile.set_description(description)\n embed = self.bot.theme.embeds.primary(title=\"✅ Your Profile Description has been updated to:\")\n embed.set_author(name=ctx.author.name, icon_url=ctx.author.avatar_url)\n embed.description = profile.description\n await ctx.send(\"\", embed=embed)", "def update_description(self, option, desc):\n _, command = self.__options[option]\n self.__options[option] = (desc, command)", "def update_experiment_description(self, experiment_id, description):\n return self.dbclient.update_by_id(Tables.EXPERIMENTS, experiment_id, {\n ExperimentAttr.DESC: description\n })", "def challenge_description(self, challenge_description):\n\n self._challenge_description = challenge_description", "def description(self, new_description):\r\n self.set({\"description\": new_description})", "def set_desc(self, item_desc):\r\n self.description = item_desc", "async def store_credential(\n self, cred_ex_record: V20CredExRecord, cred_id: str = None\n ) -> None:", "async def receive_credential(\n self, cred_ex_record: V20CredExRecord, cred_issue_message: V20CredIssue\n ) -> None:", "def set_description(desc):\n global last_description\n last_description = desc", "def set_cred(self, cred):\n self.cred = cred\n self.dirty = False", "def add_credential(self, authenticator_id, credential):\n pass", "def request_description_update():\n global should_update_description\n should_update_description = True", "async def slashtag_edit_description(\n self, ctx: commands.Context, tag: GuildTagConverter, *, description: str\n ):\n await ctx.send(await tag.edit_description(description))", "async def issue_credential(\n self, cred_ex_record: V20CredExRecord, retries: int = 5\n ) -> CredFormatAttachment:", "def set_description(self, desc: str) -> None:\n self.metadata.data[\"description\"] = desc", "def description(self, description) :\n\t\ttry :\n\t\t\tself._description = description\n\t\texcept Exception as e:\n\t\t\traise e", "def update_password(self, username, password):\n self.update(('Password', password), username)", "def set_edit_text(self, newtext, md5):\n # convert to CRLF line endings\n newtext = convert_endings(newtext, 'CRLF')\n newtext = newtext.encode('utf8')\n edit_info = {'plan': newtext,\n 'edit_text_md5': md5,\n 'submit': 'Change Plan'}\n response = self._get_page('edit.php', post=edit_info)\n soup = bs4.BeautifulSoup(response.text, \"html5lib\")\n alert = soup.find('div', {'class': 'alertmessage'})\n info = soup.find('div', {'class': 'infomessage'})\n if alert is not None:\n # some kind of error\n msg = self._parse_message(alert)\n raise PlansError(msg['body'])\n elif info is None:\n raise PlansError('Plans did not verify update')\n else:\n # probably success\n msg = self._parse_message(info)\n return msg['body']", "def set_credentials():", "def add_description(self, desc):\n self.description = desc", "def update(challenge, request):\n challenge.name = request.form['name']\n challenge.description = request.form['description']\n challenge.value = int(request.form.get('value', 0)) if request.form.get('value', 0) else 0\n challenge.max_attempts = int(request.form.get('max_attempts', 0)) if request.form.get('max_attempts', 0) else 0\n challenge.unlock_at = int(request.form.get('unlock_at', 0)) if request.form.get('unlock_at', 0) else 0\n challenge.category = request.form['category']\n challenge.hidden = 'hidden' in request.form\n db.session.commit()\n db.session.close()", "def credential(self, value):\n credential = self.organization.get_credential_by_name_with_type_id(value,\n self.credential._data.get('credential_type'))\n if not credential:\n raise InvalidCredential(value)\n self._update_values('credential', credential.id)", "async def create_credential_offer(self, credential_definition_id: str) -> str:\n try:\n async with self._profile.session() as session:\n cred_def = await session.handle.fetch(\n CATEGORY_CRED_DEF, credential_definition_id\n )\n key_proof = await session.handle.fetch(\n CATEGORY_CRED_DEF_KEY_PROOF, credential_definition_id\n )\n except AskarError as err:\n raise IndyIssuerError(\"Error retrieving credential definition\") from err\n if not cred_def or not key_proof:\n raise IndyIssuerError(\n \"Credential definition not found for credential offer\"\n )\n try:\n # The tag holds the full name of the schema,\n # as opposed to just the sequence number\n schema_id = cred_def.tags.get(\"schema_id\")\n cred_def = CredentialDefinition.load(cred_def.raw_value)\n\n credential_offer = CredentialOffer.create(\n schema_id or cred_def.schema_id,\n cred_def,\n key_proof.raw_value,\n )\n except CredxError as err:\n raise IndyIssuerError(\"Error creating credential offer\") from err\n\n return credential_offer.to_json()", "def update(self, customerguid, name=\"\", login=\"\", password=\"\", email=\"\", address=\"\", vat=\"\", jobguid=\"\", executionparams=None):", "def set_desc(self, desc: str):\n self._desc = desc", "def SetDescription(self, description):\n self.description = str(description)", "def edit_description(self, task, new_description):\n raise ValueError(\"cannot edit description in 'In Progress' status\")", "def set_description(self, sNewShareDescription):\n\t\tcall_sdk_function('PrlShare_SetDescription', self.handle, sNewShareDescription)", "def update(\n self,\n email,\n company_name,\n location,\n job_profile,\n salary,\n username,\n password,\n security_question,\n security_answer,\n notes,\n date_applied,\n status,\n):", "def put(self, url, credential, email):\n if not credential:\n return 1\n auths = self._read_all()\n auths[url] = {\"auth\": credential, \"email\": email, }\n self._shred()\n return self._write_all(auths)", "def save_credentials(credentials):\n credentials. save_details()", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def SetPassword(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def credential_id(self, credential_id):\n\n self._credential_id = credential_id", "def SetUp(self) :\n self.new_credential = Credential(\"snapchat\", \"chat@gmail.com\", \"Chat001\") # new credential", "def set_description(self, description):\n self.description = description\n if not self.record:\n return\n self.mdb.results.update({'_id':self.result_id}, \n {'$set':{'test_case':description}})", "def set_description(self, sNewDescription):\n\t\tcall_sdk_function('PrlVmDev_SetDescription', self.handle, sNewDescription)", "def set_description(self, description):\n self.description = description", "def set_description(self, description):\r\n self.__description = description", "def set_description(self, room_description):\n self.description = room_description", "async def set_chat_description(self, chat_id: typing.Union[base.Integer, base.String],\n description: typing.Union[base.String, None] = None) -> base.Boolean:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.SET_CHAT_DESCRIPTION, payload)\n\n return result", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def set_description(self, data):\n self._description = self._uni(data)", "def setDescription(self, valueName, valueDescription):\n\t\tself.settings[valueName][1] = valueDescription", "def description(self, description):\n\n self._set_field(\"description\", description)", "def description(request):\n if request.method != 'POST':\n description = request.issue.description or \"\"\n return HttpTextResponse(description)\n if not request.issue.edit_allowed:\n if not IS_DEV:\n return HttpTextResponse('Login required', status=401)\n issue = request.issue\n issue.description = request.POST.get('description')\n issue.put()\n return HttpTextResponse('')", "def _append_descriptions(self, issue, dep_name, dep_latest_version):\n logging.info(\"Updating JIRA issue {0} to track {1} upgrade process\".format(\n issue.key,\n dep_name))\n description = issue.fields.description + \"\"\"\\n\\n{0}\\n\n Please review and upgrade the {1} to the latest version {2} \\n \n cc: \"\"\".format(\n datetime.today(),\n dep_name,\n dep_latest_version\n )\n _, owners = self._find_owners(dep_name)\n for owner in owners:\n description += \"[~{0}], \".format(owner)\n try:\n self.jira.update_issue(issue, description=description)\n except Exception as e:\n traceback.print_exc()\n logging.error(\"Failed updating issue: \"+ str(e))", "def setServiceDescription(self, description):\n with self.zeroconf.lock:\n self.zeroconf.outbox.put(description)" ]
[ "0.6017124", "0.5905993", "0.57960504", "0.5608559", "0.551452", "0.54189515", "0.54140085", "0.54125625", "0.5373373", "0.5373373", "0.5362979", "0.52861714", "0.5260703", "0.5092259", "0.50158775", "0.49928087", "0.49136016", "0.49073347", "0.48982912", "0.48782173", "0.48606429", "0.4858133", "0.48535436", "0.48431158", "0.48224095", "0.48135018", "0.4807112", "0.47960526", "0.47939304", "0.47784913", "0.47728783", "0.4758105", "0.4755274", "0.47433016", "0.47403654", "0.47316313", "0.4708496", "0.46996537", "0.46971458", "0.46964005", "0.46935272", "0.46766517", "0.46766517", "0.46766517", "0.46766517", "0.46747887", "0.46736124", "0.46692005", "0.46613714", "0.46533507", "0.46526065", "0.46416327", "0.46312132", "0.4624938", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46182138", "0.46177736", "0.46130028", "0.46020412", "0.45924947", "0.45911086", "0.4590221" ]
0.56239367
3
Updates the specified tag definition. Setting `validator` determines the value type. Tags can use either a static value or a list of possible values. Static values are entered by a user applying the tag to a resource. Lists are created by you and the user must apply a value from the list. On update, any values in a list that were previously set do not change, but new values must pass validation. Values already applied to a resource do not change. You cannot remove list values that appear in a TagDefault. To remove a list value that appears in a TagDefault, first update the TagDefault to use a different value.
def update_tag(self, tag_namespace_id, tag_name, update_tag_details, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}/tags/{tagName}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_tag got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id, "tagName": tag_name } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_tag_details, response_type="Tag") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_tag_details, response_type="Tag")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validator(self, value: Optional[Dict[str, Any]]):\n self._validator = value", "def clean_value(tag):\n value = tag.get('v')\n if is_street_name(tag):\n return update_name(value, mapping)\n elif is_state(tag):\n return update_state(value)\n elif is_phone(tag):\n return update_phone(value)\n elif is_postcode(tag):\n return update_postcode(value)\n else:\n return value", "def update_tag(tag):\n remove_tag(tag)\n add_tag(tag)", "def validate(self):\n if self.validator is None:\n raise NoValidatorError('Field %s has no validator assigned.' %\n self.id)\n self.value = self.validator(self.value)", "def update(self, instance, validated_data):", "def update(self, instance, validated_data):\n pass", "def __set__(self, message_instance, value):\n # Reaches in to message instance directly to assign to private tags.\n if value is None:\n if self.repeated:\n raise ValidationError(\n 'May not assign None to repeated field %s' % self.name)\n else:\n message_instance._Message__tags.pop(self.number, None)\n else:\n if self.repeated:\n value = FieldList(self, value)\n else:\n value = self.validate(value)\n message_instance._Message__tags[self.number] = value", "def __set__(self, message_instance, value):\n # Reaches in to message instance directly to assign to private tags.\n if value is None:\n if self.repeated:\n raise ValidationError(\n 'May not assign None to repeated field %s' % self.name)\n else:\n message_instance._Message__tags.pop(self.number, None)\n else:\n if self.repeated:\n value = FieldList(self, value)\n else:\n value = self.validate(value)\n message_instance._Message__tags[self.number] = value", "def update_tags(self, tags, **kwargs):\n request = RequestMiddleware.get_request()\n is_admin = request.user and request.user.is_admin\n # Keep all tags that start with pf: because they are reserved.\n preserved = [tag for tag in self.tags if tag.startswith('pf:')]\n if is_admin:\n remove = [tag[1:] for tag in tags if tag.startswith('-pf:')]\n preserved = [tag for tag in preserved if tag not in remove]\n\n # Filter out new tags that are invalid or reserved.\n accepted = [tag for tag in tags\n if TAG_REGEX_COMPILED.match(tag)\n and (is_admin or not tag.startswith('pf:'))]\n # Limit the number of tags per entity.\n if len(accepted + preserved) > settings.MAX_TAGS_PER_ENTITY:\n accepted = accepted[:settings.MAX_TAGS_PER_ENTITY - len(preserved)]\n self.tags = list(set(accepted + preserved))", "def update_tag_default(self, tag_default_id, update_tag_default_details, **kwargs):\n resource_path = \"/tagDefaults/{tagDefaultId}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\",\n \"opc_request_id\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_tag_default got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagDefaultId\": tag_default_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing),\n \"opc-request-id\": kwargs.get(\"opc_request_id\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_tag_default_details,\n response_type=\"TagDefault\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_tag_default_details,\n response_type=\"TagDefault\")", "def _validate_tags(\n instance: typing.Dict[str, typing.Any],\n schema: typing.Dict[str, typing.Any], path: typing.List[str],\n strict: bool = False\n) -> None:\n if not isinstance(instance, dict):\n raise ValidationError('instance must be dict', path)\n valid_keys = {'_type', 'tags'}\n required_keys = valid_keys\n schema_keys = set(instance.keys())\n invalid_keys = schema_keys - valid_keys - opt_federation_keys\n if invalid_keys:\n raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)\n missing_keys = required_keys - schema_keys\n if missing_keys:\n raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)\n if instance['_type'] != 'tags':\n raise ValidationError('expected _type \"tags\"', path)\n if not isinstance(instance['tags'], list):\n raise ValidationError('tags must be list', path)\n errors = []\n tags = []\n for index, item in enumerate(instance['tags']):\n if not isinstance(item, str):\n errors.append(ValidationError('invalid tag type: {}'.format(type(item)), path + ['tags', str(index)]))\n elif item in tags:\n errors.append(ValidationError('duplicate tag: {}'.format(item), path + ['tags', str(index)]))\n elif item.lower() != item:\n errors.append(ValidationError('tag not lowercase: {}'.format(item), path + ['tags', str(index)]))\n elif any(c not in 'abcdefghijklmnopqrstuvwxyz0123456789_-äöüß' for c in item):\n errors.append(ValidationError('tag contains invalid character: {}'.format(item), path + ['tags', str(index)]))\n elif strict and all(c in string.digits for c in item) and not flask.current_app.config['ENABLE_NUMERIC_TAGS']:\n errors.append(ValidationError('numeric tags are not supported', path + ['tags', str(index)]))\n else:\n tags.append(item)\n\n if len(errors) == 1:\n raise errors[0]\n elif len(errors) > 1:\n raise ValidationMultiError(errors)", "def update(self, instance, validated_data):\n\n # Update the Attribute instance\n instance.name = validated_data.get('name', instance.name)\n instance.save()\n\n # If there is no supplied values then do nothing with it\n if validated_data.get('values'):\n # Delete any AttributeValue not included in the request\n value_ids = [item.get('id') for item in validated_data['values']]\n for value in instance.values.all():\n if value.id not in value_ids:\n value.delete()\n\n # Create or update AttributeValue instances that are in the request\n for item in validated_data['values']:\n value = AttributeValue(\n id=item.get('id'),\n name=item['name'],\n value=item['value'],\n attribute=instance)\n value.save()\n\n return instance", "def update(self, tag, params={}, **options):\n path = \"/tags/%s\" % (tag)\n return self.client.put(path, params, **options)", "def cb_receive_tag_set_values(cls, session, node_id, tg_id, tag_id, value):\n tag = super(AvatarHeight, cls).cb_receive_tag_set_values(session, node_id, tg_id, tag_id, value)\n update_3dview(tag.tg.node)\n return tag", "def validate_default(self, value):\n return self.__validate(value, self.validate_default_element)", "def update(self, instance, validated_data):\n instance.cat_name = validated_data.get('cat_name', instance.cat_name)\n instance.img = validated_data.get('img', instance.img)\n instance.desc = validated_data.get('desc', instance.desc)\n instance.save()\n return instance", "def run_validator(self, validator):\n if validator is None:\n return\n\n value = self.value\n\n # Boolean validator\n if validator is bool:\n # Value must \"look like\" a boolean value\n if InvenTree.helpers.is_bool(value):\n # Coerce into either \"True\" or \"False\"\n value = InvenTree.helpers.str2bool(value)\n else:\n raise ValidationError({\n 'value': _('Value must be a boolean value')\n })\n\n # Integer validator\n if validator is int:\n\n try:\n # Coerce into an integer value\n value = int(value)\n except (ValueError, TypeError):\n raise ValidationError({\n 'value': _('Value must be an integer value'),\n })\n\n # If a list of validators is supplied, iterate through each one\n if type(validator) in [list, tuple]:\n for v in validator:\n self.run_validator(v)\n\n if callable(validator):\n # We can accept function validators with a single argument\n\n if self.is_bool():\n value = self.as_bool()\n\n if self.is_int():\n value = self.as_int()\n\n validator(value)", "def validate_default_element(self, value):\n return self.validate_element(value)", "def update(self, instance, validated_data):\n if 'status' in validated_data:\n instance.status = validated_data['status']\n if 'description' in validated_data:\n instance.description = validated_data['description']\n if 'was_deleted' in validated_data:\n instance.was_deleted = validated_data['was_deleted']\n instance.save()\n return instance", "def update(self, instance, validated_data):\n if 'status' in validated_data:\n instance.status = validated_data['status']\n if 'description' in validated_data:\n instance.description = validated_data['description']\n if 'was_deleted' in validated_data:\n instance.was_deleted = validated_data['was_deleted']\n instance.save()\n return instance", "def _update_default(self, default_value):\n if self.type == \"uri_folder\" or self.type == \"uri_file\":\n self.default = default_value\n return\n else:\n if isinstance(default_value, float) and not math.isfinite(default_value):\n # Since nan/inf cannot be stored in the backend, just ignore them.\n # logger.warning(\"Float default value %r is not allowed, ignored.\" % default_value)\n return\n \"\"\"Update provided default values.\n Here we need to make sure the type of default value is allowed or it could be parsed..\n \"\"\"\n if default_value is not None and not isinstance(default_value, self._allowed_types):\n try:\n default_value = self._parse(default_value)\n except Exception as e:\n if self.name is None:\n msg = \"Default value of %s Input cannot be parsed, got '%s', type = %s.\" % (\n self.type,\n default_value,\n type(default_value),\n )\n else:\n msg = \"Default value of %s Input '%s' cannot be parsed, got '%s', type = %s.\" % (\n self.type,\n self.name,\n default_value,\n type(default_value),\n )\n raise MldesignerComponentDefiningError(cause=msg) from e\n self.default = default_value", "def post_validated(self, struct, item, value):\n return value", "def cb_receive_tag_set_values(cls, session, node_id, tg_id, tag_id, value):\n tag = super(AvatarWidth, cls).cb_receive_tag_set_values(session, node_id, tg_id, tag_id, value)\n update_3dview(tag.tg.node)\n return tag", "def post(self):\n args = change_tag_or_sentiment_parser.parse_args() \n sentence = args[\"sentence\"]\n value = args[\"value\"]\n whether_allowed = args[\"whether_allowed\"]\n\n if not whether_allowed:\n return {\"success\": False,\n \"error\": True,\n \"messege\": \"Right now, Updating Tags or sentiments are not allowed\",\n }\n\n\n tag_list = [\"food\", \"service\", \"cost\", \"null\", \"ambience\", \"overall\"]\n sentiment_list = [\"positive\", \"super-positive\", \"neutral\", \"negative\", \"super-negative\", \"mixed\"]\n\n print value, sentence\n if not value in (tag_list+sentiment_list):\n return {\"success\": False,\n \"error\": True,\n \"messege\": \"Error occured\",\n }\n\n if value in [\"food\", \"service\", \"cost\", \"null\", \"ambience\", \"overall\"]:\n training_tag_collection.update({\"sentence\": sentence}, {\"$set\": {\n \"review_id\": \"misc\",\n \"tag\": value, }}, upsert=True)\n print \"tag updated\"\n\n if value in [\"positive\", \"super-positive\", \"neutral\", \"negative\", \"super-negative\"]:\n training_sentiment_collection.update({\"sentence\": sentence}, {\"$set\": {\n \"review_id\": \"misc\",\n \"sentiment\": value,\n }}, upsert=True)\n print \"sentiment updated\"\n return {\"success\": True,\n \"error\": False,\n \"messege\": \"Updated!!!\",\n }", "def set_definition(self, definition):\n return self.client._perform_json(\n \"PUT\", \"/admin/groups/%s\" % self.name,\n body = definition)", "def set_tag(self, val):\n self.__tag__ = val", "def validate_input(self, definition):\n \"\"\"Implement your own validation logic to validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n opt_labels = definition.parameters.get('label', None)\n pass", "def validate_attribute_values(tag,\n attribute_name,\n attribute_value,\n default_values):\n if not attribute_value:\n return\n\n if attribute_value not in default_values:\n raise TagAttributeError('<{tag}>: {attribute_name} attribute '\n 'values should be one of these: {values}'\n .format(tag=tag,\n attribute_name=attribute_name,\n values=','.join(default_values)))", "def set_tagtype(tagvec, tagtype, tag, lang_name):\n\n if tagtype == \"Definiteness\" and tagvec[0] is None:\n set_tagtype(tagvec, \"POS\", \"DET\", lang_name)\n\n i = TAGTYPE_INDICES[tagtype]\n\n if tagvec[i] is not None and tagvec[i] != tag and tagvec[i] != \"DET\":\n if lang_name in UNIMORPH_CONFLICT_RESOLUTION:\n for ordering in (tag, tagvec[i]), (tagvec[i], tag):\n if ordering in UNIMORPH_CONFLICT_RESOLUTION[lang_name]:\n tagvec[i] = UNIMORPH_CONFLICT_RESOLUTION[lang_name][ordering]\n return\n\n raise ValueError(f\"Warning: setting {tagtype} to {tag}, already set to {tagvec[i]}\")\n\n tagvec[i] = tag", "def update(self, instance: Snippet, validated_data: dict) -> Snippet:\n instance.title = validated_data.get('title', default=instance.title)\n instance.code = validated_data.get('code', default=instance.code)\n instance.language = validated_data.get('language', default=instance.language)\n instance.style = validated_data.get('style', default=instance.style)\n instance.save()\n return instance", "def cb_receive_tag_set_values(cls, session, node_id, tg_id, tag_id, value):\n tag = super(AvatarLens, cls).cb_receive_tag_set_values(session, node_id, tg_id, tag_id, value)\n update_3dview(tag.tg.node)\n return tag", "def validate_default_element(self, value):\n if isinstance(value, (six.string_types, six.integer_types)):\n # Validation of the value does not happen for delayed resolution\n # enumerated types. Ignore if type is not yet resolved.\n if self.__type:\n self.__type(value)\n return value\n\n return super(EnumField, self).validate_default_element(value)", "def update_widget_attribute(self, pyte_widget: Pyted_Widget_Type, attr: str,\n new_value: Union[str, bool, List[str]],\n init=False) -> Union[None, tuple]:\n\n old_value = getattr(pyte_widget, attr)\n\n if not init:\n setattr(pyte_widget, attr, new_value)\n\n try:\n tk_widget = pyte_widget.tk_name\n except AttributeError:\n tk_widget = None\n\n attr_template = pyte_widget.get_code_template(attr)\n\n if attr_template == pyted_widget_types.CONFIG_CODE:\n tk_widget[attr] = getattr(pyte_widget, attr)\n\n elif attr_template == pyted_widget_types.TITLE_CODE:\n return\n\n elif attr_template == pyted_widget_types.GRID_CODE:\n if init:\n # when user form is drawn grid placement will be handled by user form initialisation code\n return\n try:\n old_position = {'row': tk_widget.grid_info()['row'], 'column': tk_widget.grid_info()['column']}\n new_position = {'row': tk_widget.grid_info()['row'], 'column': tk_widget.grid_info()['column']}\n except KeyError:\n # widget has remove set true so no need to update tk_widget\n return\n new_attr_val = getattr(pyte_widget, attr)\n new_position[attr] = new_attr_val\n if (int(new_position['row']) >= int(self.widgets.find_pyte_parent(pyte_widget).number_rows) or\n int(new_position['column']) >= int(self.widgets.find_pyte_parent(pyte_widget).number_columns)):\n # pyte_widget.row = old_position['row']\n # pyte_widget.column = old_position['column']\n pyte_widget.remove = True\n pyte_widget.tk_name.grid_remove()\n self.handles.remove_selected_widget_handles()\n self.user_form.new_filler_label(self.widgets.find_tk_parent(pyte_widget),\n old_position['column'], old_position['row'])\n messagebox.showwarning('Widget being moved off grid',\n 'Row or column greater than grid size. Widget has been removed. '\n 'To get widget back move back onto grid and set remove to false')\n else:\n\n filler_widget = self.widgets.find_tk_parent(pyte_widget).grid_slaves(row=new_position['row'],\n column=new_position['column'])[0]\n if filler_widget not in self.user_form.filler_labels and filler_widget != pyte_widget.tk_name:\n # trying to move widget onto existing widget\n pyte_widget.remove = True\n pyte_widget.tk_name.grid_remove()\n self.handles.remove_selected_widget_handles()\n self.user_form.new_filler_label(self.widgets.find_tk_parent(pyte_widget),\n old_position['column'], old_position['row'])\n messagebox.showwarning('Widget being moved onto existing widget',\n 'Row and column the same as another widget. Widget has been removed. '\n 'To get widget back move back onto empty slot and set remove to false')\n return\n filler_widget.grid(row=old_position['row'], column=old_position['column'])\n tk_widget.grid({attr: new_attr_val})\n self.handles.place_selected_widget_handles(pyte_widget.tk_name)\n\n elif attr_template == pyted_widget_types.GRID_SIZE_CODE:\n if init:\n # when user form is drawn the widget parent will be handled by user form initialisation code\n return\n self.user_form.empty_tk_container_widget(pyte_widget)\n self.user_form.fill_tk_container_frame(pyte_widget)\n self.handles.place_selected_widget_handles(pyte_widget.tk_name)\n\n elif attr_template == pyted_widget_types.ROW_CONFIGURE or attr_template == pyted_widget_types.COLUMN_CONFIGURE:\n # row and column configuration handled elsewhere in program\n pass\n\n elif attr_template == pyted_widget_types.BESPOKE_CODE and attr == 'remove':\n if init:\n # when user form is drawn grid_remove will be handled by user form initialisation code\n return\n\n tk_widget_in_grid = not(len(pyte_widget.tk_name.grid_info()) == 0)\n if getattr(pyte_widget, 'remove'):\n if tk_widget_in_grid:\n widget_to_hide = pyte_widget\n self.user_form.new_filler_label(self.widgets.find_tk_parent(widget_to_hide), widget_to_hide.column,\n widget_to_hide.row)\n widget_to_hide.tk_name.grid_remove()\n self.handles.remove_selected_widget_handles()\n else:\n # remove attribute is false, if widget not displayed then try to display it\n if not tk_widget_in_grid:\n # check that the widget is on the grid\n if (int(pyte_widget.row) >= int(self.widgets.find_pyte_parent(pyte_widget).number_rows) or\n int(pyte_widget.column) >= int(self.widgets.find_pyte_parent(pyte_widget).number_columns)):\n messagebox.showwarning('Widget off grid',\n 'Row or column greater than grid size. '\n 'To get widget back move back onto grid and set remove to false')\n setattr(pyte_widget, 'remove', True)\n return\n # check that there is not a widget already visible\n filler_widget = self.widgets.find_tk_parent(pyte_widget).grid_slaves(row=pyte_widget.row,\n column=pyte_widget.column)[0]\n if filler_widget not in self.user_form.filler_labels:\n pyte_widget.remove = True\n pyte_widget.tk_name.grid_remove()\n # self.remove_selected_widget_handles()\n messagebox.showwarning('Existing widget at grid location',\n 'Row and column the same as another widget. '\n 'To get widget back move onto empty slot and set remove to false')\n return\n # remove filler label and show user widget\n filler_widget = self.widgets.find_tk_parent(pyte_widget).grid_slaves(row=pyte_widget.row,\n column=pyte_widget.column)[0]\n filler_widget.grid_forget()\n filler_widget.destroy()\n pyte_widget.tk_name.grid(row=pyte_widget.row, column=pyte_widget.column)\n self.handles.place_selected_widget_handles(pyte_widget.tk_name)\n\n elif attr_template == pyted_widget_types.BESPOKE_CODE and attr == 'name':\n if init:\n # when user form is drawn the widget name will be handled by user form initialisation code\n return\n # check name is really changed\n if new_value == old_value:\n return\n # check name is not already taken\n for i_pyte_widget in self.widgets.widget_list:\n if i_pyte_widget != pyte_widget:\n if pyte_widget.name == i_pyte_widget.name:\n # can't messagebox here as this would move focus out of entry box and cause binding to run again\n # messagebox.showwarning('Renaming problem',\n # 'Name already exists for another widget and Name not changed')\n setattr(pyte_widget, attr, old_value)\n return 'Renaming problem', 'Name already exists for another widget and Name not changed'\n for i_pyte_widget in self.widgets.widget_list:\n if i_pyte_widget.parent == old_value:\n i_pyte_widget.parent = new_value\n # self.update_navigator_tree()\n self.navigator_tree_obj.navigator_tree_change_item_name(pyte_widget, old_value)\n # raise Exception(f'renaming widget not yet implemented')\n\n elif attr_template == pyted_widget_types.BESPOKE_CODE and (attr == 'comment'):\n if init:\n # when user form is drawn the tk_name will be handled by user form initialisation code\n return\n return\n\n elif attr_template == pyted_widget_types.BESPOKE_CODE and (attr == 'win_close'):\n if init:\n # when user form is drawn the tk_name will be handled by user form initialisation code\n return\n return\n\n elif attr_template == pyted_widget_types.BESPOKE_CODE and (attr == 'tab_text'):\n if init:\n # when user form is drawn the tk_name will be handled by user form initialisation code\n return\n tk_parent = self.widgets.find_tk_parent(pyte_widget)\n if isinstance(tk_parent, ttk.Notebook):\n tk_parent.tab(pyte_widget.tk_name, text=new_value)\n # self.widgets.find_tk_parent(pyte_widget).tab(pyte_widget.tk_name, text=new_value)\n return\n\n elif attr_template == pyted_widget_types.BESPOKE_CODE and attr == 'tk_name':\n if init:\n # when user form is drawn the tk_name will be handled by user form initialisation code\n return\n raise Exception(f'renaming tk_name for widget should not occur')\n\n elif attr_template == pyted_widget_types.BESPOKE_CODE and attr == 'parent':\n # not used as parent attribute not shown in attribute edit frame\n if init:\n # when user form is drawn the widget parent will be handled by user form initialisation code\n return\n raise Exception(f'renaming widget parent not yet implemented')\n\n elif attr_template == pyted_widget_types.VAR_SET_CODE:\n setattr(pyte_widget, pyted_widget_types.VAR_SET_CODE, new_value)\n\n elif attr_template.startswith('<'):\n if init:\n # when user form is drawn the widget parent will be handled by user form initialisation code\n return\n return\n\n else:\n raise Exception(f'attr_template \"{attr_template}\" for \"{attr}\" not yet configured')\n # print(f'attr_template {attr_template} not yet implemented for {attr}')", "def update(self, instance, validated_data):\n instance.title = validated_data.get('title', instance.title)\n instance.inspection_tag = validated_data.get('inspection_tag', instance.code)\n instance.content = validated_data.get('content', instance.language)\n instance.status = validated_data.get('status', instance.style)\n instance.save()\n return instance", "def update(self, instance, validated_data):\n instance.title = validated_data.get('title', instance.title)\n instance.inspection_tag = validated_data.get('inspection_tag', instance.code)\n instance.content = validated_data.get('content', instance.language)\n instance.status = validated_data.get('status', instance.style)\n instance.save()\n return instance", "def make_generic_v1_field_validator(validator: V1Validator) -> core_schema.FieldValidatorFunction:\n sig = signature(validator)\n\n needs_values_kw = False\n\n for param_num, (param_name, parameter) in enumerate(sig.parameters.items()):\n if can_be_keyword(parameter) and param_name in ('field', 'config'):\n raise PydanticUserError(\n 'The `field` and `config` parameters are not available in Pydantic V2, '\n 'please use the `info` parameter instead.',\n code='validator-field-config-info',\n )\n if parameter.kind is Parameter.VAR_KEYWORD:\n needs_values_kw = True\n elif can_be_keyword(parameter) and param_name == 'values':\n needs_values_kw = True\n elif can_be_positional(parameter) and param_num == 0:\n # value\n continue\n elif parameter.default is Parameter.empty: # ignore params with defaults e.g. bound by functools.partial\n raise PydanticUserError(\n f'Unsupported signature for V1 style validator {validator}: {sig} is not supported.',\n code='validator-v1-signature',\n )\n\n if needs_values_kw:\n # (v, **kwargs), (v, values, **kwargs), (v, *, values, **kwargs) or (v, *, values)\n val1 = cast(V1ValidatorWithValues, validator)\n\n def wrapper1(value: Any, info: core_schema.FieldValidationInfo) -> Any:\n return val1(value, values=info.data)\n\n return wrapper1\n else:\n val2 = cast(V1OnlyValueValidator, validator)\n\n def wrapper2(value: Any, _: core_schema.FieldValidationInfo) -> Any:\n return val2(value)\n\n return wrapper2", "def validate_updates(taglist):\n preexisting_keys = list_of_keys_of(taglist.current_list)\n keys_of_tags_to_update = unicode_decode_keys(list_of_keys_of(taglist.updates))\n\n non_existent_key_set = list(set(keys_of_tags_to_update) - set(preexisting_keys))\n\n if non_existent_key_set:\n raise_validation_error(\n problematic_key_set=non_existent_key_set,\n problem_message=strings['tags.tag_keys_dont_exist_for_update'],\n exception_class=InvalidAttemptToModifyTagsError\n )", "def update(context, namespace_name, id, values, session):\n namespace_api.get(context, namespace_name, session)\n\n metadata_tag = _get(context, id, session)\n metadef_utils.drop_protected_attrs(models.MetadefTag, values)\n # values['updated_at'] = timeutils.utcnow() - done by TS mixin\n try:\n metadata_tag.update(values.copy())\n metadata_tag.save(session=session)\n except db_exc.DBDuplicateEntry:\n LOG.debug(\"Invalid update. It would result in a duplicate\"\n \" metadata tag with same name=%(name)s\"\n \" in namespace=%(namespace_name)s.\",\n {'name': values['name'],\n 'namespace_name': namespace_name})\n raise exc.MetadefDuplicateTag(\n name=values['name'], namespace_name=namespace_name)\n\n return metadata_tag.to_dict()", "def add_validator(self, validator: Callable, name: str) -> None:\n if self._get_hcell().get(\"UNTRANSLATED\"):\n raise AttributeError(\n \"Cannot invoke Cell.add_validator: cell must be translated first\"\n )\n return self.handle.add_validator(validator, name=name)", "def validate_default_element(self, value):\n return self.validate_element(value)", "def update(self, instance, validated_data):\n instance.description = validated_data.get('description', instance.description)\n instance.tender_number = validated_data.get('tender_number', instance.tender_number)\n instance.start_date = validated_data.get('start_date', instance.start_date)\n instance.end_date = validated_data.get('end_date', instance.end_date)\n instance.save()\n return instance", "def processTags(request, media, form, update):\n if update:\n if 'tags' in request.POST:\n tag_names = form.cleaned_data['tags'].split(',')\n media.tag_set.clear()\n for tag_name in tag_names:\n tag, dummy = Tag.objects.get_or_create(name=tag_name.strip())\n media.tag_set.add(tag)\n media.save()\n else:\n if 'tags' in request.POST:\n tag_names = form.cleaned_data['tags'].split(',')\n for tag_name in tag_names:\n tag, dummy = Tag.objects.get_or_create(name=tag_name.strip())\n media.tag_set.add(tag)\n media.save()", "def update_tag(self, tag):\n resp = self.put(_u.build_uri(\"tag\", domain=self.domain),\n data={'tag': tag})\n return utils.handle_response(resp)", "def validate_batch_jobdef(jobdef_name, jobdef_meta):\n jobdef_config = [\n {\n 'field_name': 'job_definition_name',\n 'field_value': jobdef_name,\n 'prefix': '',\n 'validators': [\n _validate_required_field\n ]\n },\n {\n 'field_name': 'job_definition_type',\n 'field_value': jobdef_meta.get('job_definition_type'),\n 'field_options': JOB_DEFINITION_TYPES,\n 'prefix': '',\n 'required': True,\n 'validators': [\n _validate_options_field\n ]\n },\n {\n 'field_name': 'parameters',\n 'field_value': jobdef_meta.get('parameters'),\n 'prefix': '',\n 'required_type': dict,\n 'validators': [\n _validate_field_type\n ]\n },\n {\n 'field_name': 'retry_strategy',\n 'field_value': jobdef_meta.get('retry_strategy'),\n 'prefix': '',\n 'required_type': dict,\n 'validators': [\n _validate_field_type\n ]\n },\n {\n 'field_name': 'propagate_tags',\n 'field_value': jobdef_meta.get('propagate_tags'),\n 'prefix': '',\n 'required_type': bool,\n 'validators': [\n _validate_field_type\n ]\n },\n {\n 'field_name': 'timeout',\n 'field_value': jobdef_meta.get('timeout'),\n 'prefix': '',\n 'required_type': dict,\n 'validators': [\n _validate_field_type\n ]\n },\n {\n 'field_name': 'tags',\n 'field_value': jobdef_meta.get('tags'),\n 'prefix': '',\n 'required_type': dict,\n 'validators': [\n _validate_field_type\n ]\n },\n {\n 'field_name': 'platform_capabilities',\n 'field_value': jobdef_meta.get('platform_capabilities'),\n 'prefix': '',\n 'required_type': list,\n 'validators': [\n _validate_field_type\n ]\n }\n ]\n\n job_definition_type = jobdef_meta.get('job_definition_type')\n container_properties = jobdef_meta.get('container_properties')\n node_properties = jobdef_meta.get('node_properties')\n\n _process_config(jobdef_config)\n\n if job_definition_type == 'container':\n if not container_properties and not node_properties:\n raise AssertionError(\n \"Either 'container_properties' or 'node_properties' must be specified \"\n \"for 'container' job definition type.\"\n )\n if container_properties:\n _validate_container_properties(container_properties)\n if node_properties:\n _validate_node_properties(node_properties)", "def cb_receive_tag_set_values(cls, session, node_id, tg_id, tag_id, value):\n tag = super(AvatarDistance, cls).cb_receive_tag_set_values(session, node_id, tg_id, tag_id, value)\n update_3dview(tag.tg.node)\n return tag", "def _update(self, data=None, _validate=False):\n if data is None:\n # If no data is passed, take no action.\n pass\n elif _mapping_resolver.get_type(data) == \"MAPPING\":\n with self._suspend_sync:\n for key, new_value in data.items():\n try:\n # The most common usage of SyncedCollections is with a\n # single object referencing an underlying resource at a\n # time, so we should almost always find that elements\n # of data are already contained in self._data, so EAFP\n # is the best choice for performance.\n existing = self._data[key]\n except KeyError:\n # If the item wasn't present at all, we can simply\n # assign it.\n if not _validate:\n self._validate({key: new_value})\n self._data[key] = self._from_base(new_value, parent=self)\n else:\n if new_value == existing:\n continue\n if _sc_resolver.get_type(existing) == \"SYNCEDCOLLECTION\":\n try:\n existing._update(new_value)\n continue\n except ValueError:\n pass\n\n # Fall through if the new value is not identical to the\n # existing value and\n # 1) The existing value is not a SyncedCollection\n # (in which case we would have tried to update it), OR\n # 2) The existing value is a SyncedCollection, but\n # the new value is not a compatible type for _update.\n if not _validate:\n self._validate({key: new_value})\n self._data[key] = self._from_base(new_value, parent=self)\n\n to_remove = [key for key in self._data if key not in data]\n for key in to_remove:\n del self._data[key]\n else:\n raise ValueError(\n \"Unsupported type: {}. The data must be a mapping or None.\".format(\n type(data)\n )\n )", "def on_widget_val_changed(self, val: Data):\n self.update_node_input(val, silent=self._prevent_update.blocked)", "def _update_entry(self, section, key, value):\n entries = section[key] if key in section else []\n if type(entries) != list:\n entries = [entries]\n if len(entries) < self.dom_id - 2:\n raise ValueError('Cannot set namelist value for domain %d, previous domains not filled out.' % self.dom_id)\n if len(entries) <= self.dom_id - 1:\n entries.append(value)\n else:\n entries[self.dom_id-1] = value\n section[key] = entries", "def listfield_validate_method(self, value_list, model_instance):\r\n if not self.editable:\r\n # Skip validation for non-editable fields\r\n return\r\n #Validate choices\r\n if self.choices:\r\n valid_values = []\r\n for choice in self.choices:\r\n if isinstance(choice[0], (list, tuple)):\r\n #this is an optgroup, so look inside it for the options\r\n for optgroup_choice in choice[0]:\r\n valid_values.append(optgroup_choice[0])\r\n else:\r\n valid_values.append(choice[0])\r\n for value in value_list:\r\n if value not in value_list:\r\n #TODO: if there is more than 1 invalid value then this should show all of the invalid values\r\n raise ValidationError(self.error_messages['invalid_choice'] % value)\r\n #Validate null-ness\r\n if value_list is None and not self.null:\r\n raise ValidationError(self.error_messages['null'])\r\n\r\n if not self.blank and value in EMPTY_VALUES:\r\n raise ValidationError(self.error_messages['blank'])", "def update_tag(request_form, tag_id):\n values = {'tag': request_form.get('tag').lower()}\n db_session.query(Tags).filter_by(id=tag_id).update(values)\n db_session.commit()\n return 'Updated tag #%s: %s.' % (tag_id, values['tag']), 'success'", "def updateWidget(self, name, pos, value):\n try:\n self.widgets[\"%s%d.entry\" % (name, pos)].delete(0,'end')\n self.widgets[\"%s%d.entry\" % (name, pos)].insert(0,value)\n except: pass", "def update_existing_entry(client, list_id, mail_addr, merge_fields, l_tags):\n # hash mail address \n mail_h = hash_string(mail_addr)\n # send entry\n try:\n response = client.lists.set_list_member(list_id, mail_h,\n {\"email_address\": mail_addr, \"status_if_new\": \"subscribed\",\n \"status\": \"subscribed\", \"merge_fields\": merge_fields})\n print(response)\n except ApiClientError as error:\n print(\"Error on mail address {}: {}\".format(mail_addr, error.text))\n for tag in l_tags:\n try:\n response = client.lists.update_list_member_tags(list_id, mail_h, \n {\"tags\": [{\"name\": tag, \"status\": \"active\"}]})\n print(response)\n except ApiClientError as error:\n print(\"Error on updating tag '{}' for mail address {}: {}\".format(tag, mail_addr, error.text))", "def __init__(self, name=None, values=None, default_value=None):\n self.swagger_types = {\n 'name': 'str',\n 'values': 'list[TagPropertyAllowedValue]',\n 'default_value': 'str'\n }\n\n self.attribute_map = {\n 'name': 'name',\n 'values': 'values',\n 'default_value': 'defaultValue'\n }\n\n self._name = name\n self._values = values\n self._default_value = default_value", "def _update_default(self, default_value):\n enum_val = self._parse(default_value)\n if self._enum_class and isinstance(enum_val, self._enum_class):\n enum_val = enum_val.value\n self.default = enum_val", "def _update_default(self, default_value):\n enum_val = self._parse(default_value)\n if self._enum_class and isinstance(enum_val, self._enum_class):\n enum_val = enum_val.value\n self.default = enum_val", "def tag_updater(self, tags):\n for tag in tags:\n #check if the tag exists\n exists = False\n tag = self.tags.find_one({'TagName': tag})\n if tag is not None:\n self.tags.update_one({'TagName': tag}, {'$set': {'Count': tag['Count']+1}}) \n else:\n #insert new tag\n Id = self.id_generator(self.tags)\n self.tags.insert_one({\"Id\":Id, \"TagName\":tag, \"Count\":0})", "def update(self, instance, validated_data):\n instance.unit_system = validated_data.get(\n 'system',\n instance.unit_system)\n instance.code = validated_data.get('code', instance.code)\n instance.value = validated_data.get('name', instance.name)\n instance.dimension = validated_data.get('date_obj', instance.dimension)\n return instance", "def tags(self, val: list):\n self._tags = []\n if val is not None:\n for item in val:\n self._tags.append(item)", "def validate_default(self, value):\n return self.__validate(value, self.validate_default_element)", "def validate(self):\n for search_tag_name in self.get_search_tag_names():\n search_tag_obj = Tag(search_tag_name)\n for search_tag_value in self.get_search_tag_values(search_tag_name):\n for new_tag_name in self.get_new_tag_names(search_tag_name, search_tag_value):\n new_tag_obj = Tag(new_tag_name)\n new_tag_value = self.get_new_tag_value(search_tag_name, search_tag_value, new_tag_name)\n if new_tag_obj.repeatable:\n if not isinstance(new_tag_value, list):\n raise KeyError('%s needs a list'%(new_tag_name))\n else:\n if isinstance(new_tag_value, list):\n raise KeyError('%s needs a scalar value'%(new_tag_name))", "def run_validators(self, value):\r\n for validator in self.validators:\r\n if isinstance(validator, validators.UniqueTogetherValidator):\r\n self.validators.remove(validator)\r\n super(ProfileSerializer, self).run_validators(value)", "def update(self, instance, validated_data):\n if 'tags' in validated_data:\n tags_data = validated_data.pop('tags')\n for tag_data in tags_data:\n instance.tags.add(tag_data)\n\n super(ProjectSerializer, self).update(instance, validated_data)\n return instance", "def _entry_tag_is_valid(entry: _LexiconEntry) -> None:\n tag = _tag_of(entry)\n\n if tag not in tags.VALID_TAGS:\n raise InvalidLexiconEntryError(\n \"Entry 'tag' field has invalid value. It can only be one of the valid\"\n \" tags that are defined in 'morphotactics_compiler/tags.py'.\")", "def _list4_validator(_: object, attrib: 'attrs.Attribute[List[Vec]]', value: object) -> None:\n if not isinstance(value, list):\n raise TypeError(attrib.name + ' should be a list!')\n if len(value) != 4:\n raise ValueError(attrib.name + ' must have 4 values!')", "def validator_info(self, validator_info):\n\n self._validator_info = validator_info", "def update(self, instance, validated_data):\n instance.title = validated_data.get('title', instance.title)\n instance.code = validated_data.get('code', instance.code)\n instance.linenos = validated_data.get('linenos', instance.linenos)\n instance.language = validated_data.get('language', instance.language)\n instance.style = validated_data.get('style', instance.style)\n instance.save()\n return instance", "def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable':\n sig = signature(validator)\n args = list(sig.parameters.keys())\n first_arg = args.pop(0)\n if first_arg == 'self':\n raise ConfigError(\n f'Invalid signature for validator {validator}: {sig}, \"self\" not permitted as first argument, '\n f'should be: (cls, value, values, config, field), \"values\", \"config\" and \"field\" are all optional.'\n )\n elif first_arg == 'cls':\n # assume the second argument is value\n return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:])))\n else:\n # assume the first argument was value which has already been removed\n return wraps(validator)(_generic_validator_basic(validator, sig, set(args)))", "def validate(self, list_value):\n errors = DataDefinition.validate(self, list_value)\n if errors:\n return errors\n\n for index, value in enumerate(list_value):\n errors = self.element_type.validate(value)\n if errors:\n msg = message_factory.get_message(\n 'vapi.data.list.invalid.entry',\n str(value), index)\n return [msg] + errors\n\n return None", "def pre_validated(self, struct, item, value):\n return value", "def updateTag(self, authenticationToken, tag):\r\n pass", "def update(self, val, feats):\n raise NotImplementedError", "def update(self, instance, validated_data):\n instance.title = validated_data.get('title', instance.title)\n instance.owner = validated_data.get('owner', instance.owner)\n instance.language = validated_data.get('language', instance.language)\n instance.classes = validated_data.get('classes', instance.classes)\n instance.methods = validated_data.get('methods', instance.methods)\n instance.style = validated_data.get('style', instance.style)\n instance.example = validated_data.get('example', instance.example)\n instance.save()\n return instance", "def setValue(self, value):\n if value is None or ((type(value) in [list,tuple]) and not(value)):\n self.clearValue()\n else:\n if isinstance(self.callback, collections.Callable):\n value = self.callback(self.cast(value))\n self.data = self.validate(value)", "def updateWidget(self, name, pos, value):\n try:\n self.widgets[\"%s%d.entry\" % (name, pos)].delete(0,'end')\n self.widgets[\"%s%d.entry\" % (name, pos)].insert(0,str(value))\n except: pass", "def set_definition(self, definition):\n return self.client._perform_json(\n \"PUT\", \"/admin/users/%s\" % self.login,\n body = definition)", "def _validate_optional_value(dictionary, value, classinstance, type_name, yaml_file):\n\n if value in dictionary:\n validate_type(dictionary[value], value, classinstance, type_name, yaml_file)\n del dictionary[value]", "def validate_tags_or_list(x):\n from .. import AWSHelperFn\n\n if isinstance(x, (AWSHelperFn, Tags, list)):\n return x\n\n raise ValueError(f\"Value {x} of type {type(x)} must be either Tags or list\")", "def validate_tag(tag=None):\n if not tag:\n raise AttributeError('Tag cannot be empty')\n\n if tag not in TAGS:\n raise ValueError('{0} tag is not supported')", "def __set__(self, instance, value):\n # Run process for the nested field type for each value in list\n instance._values[self.name] = [self.field.process(v) for v in value]", "def validate(self, instance, value):", "def validate(self, instance, value):", "def edit_tags(self):\n os.system(\"clear\")\n while True:\n tag_categories = [\"meal\", \"genre\", \"complexity\", \"course\", \"no change\"]\n _, key = _num_select(\"Which tag would you like to edit\", tag_categories)\n if key == \"meal\":\n _, value = _num_select(\"Which tag would you like to apply\",\n [\"breakfast\", \"lunch\", \"dinner\"])\n self.tags[key]=value\n elif key == \"genre\":\n genres = [\"american\", \"italian\", \"mexican\", \"asian\", \"indian\", \"misc\"]\n _, value = _num_select(\"Which tag would you like to apply\",\n genres)\n elif key == \"complexity\":\n _, value = _num_select(\"Which tag would you like to apply\",\n [\"simple\", \"intermediate\", \"complicated\"])\n elif key == \"course\":\n _, value = _num_select(\"Which tag would you like to apply\",\n [\"appetizer\", \"salad\", \"side\", \"main\", \"dessert\"])\n else:\n return", "def remove(self, value):\n tags = self.__all_tags()\n if value in tags:\n tags.remove(value)\n self.__post_changes(tags)", "def cb_receive_tag_set_values(cls, session, node_id, tg_id, tag_id, value):\n tag = super(AvatarScene, cls).cb_receive_tag_set_values(session, node_id, tg_id, tag_id, value)\n update_3dview(tag.tg.node)\n return tag", "def register_validator(self, validator, validator_advice):\n self.validators.append(validator)\n self.advice_functions[validator] = validator_advice", "def setAddTags(self,value):\n self.PDFreactorConfiguration.in1[\"addTags\"] = value", "def addValidator(self, *args):\n return _libsbml.SBMLDocument_addValidator(self, *args)", "def validate(self, value):\n return self.__validate(value, self.validate_element)", "def define(self, key, default, validator=None, hook=None, strict=False):\n # type: (str, Any, Callable, Callable, bool) -> ConfigValue\n self._check_key(key)\n npath = self._active_path + (key,)\n\n if npath in self._config:\n raise ValueError('Attempted to set existing entry at ' + str(npath))\n\n cfg_value = ConfigValue(self._tmp_comment, self._tmp_docstring, default, npath,\n validator, hook, strict)\n self._config[npath] = cfg_value\n self._tmp_comment = ''\n self._tmp_docstring = ''\n return cfg_value", "def update_tag_with_http_info(self, tag_id, body, **kwargs):\n\n all_params = ['tag_id', 'body']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method update_tag\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'tag_id' is set\n if ('tag_id' not in params) or (params['tag_id'] is None):\n raise ValueError(\"Missing the required parameter `tag_id` when calling `update_tag`\")\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `update_tag`\")\n\n\n collection_formats = {}\n\n path_params = {}\n if 'tag_id' in params:\n path_params['tag_id'] = params['tag_id']\n\n query_params = []\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['X-Token']\n\n return self.api_client.call_api('/tags/{tag_id}', 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Tag',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def update(self, instance, validated_data):\n instance.config_type = validated_data.get('config_type', instance.config_type)\n instance.upload.delete(False) # delete the old file..\n instance.upload = validated_data.get('upload', instance.upload)\n instance.name = validated_data.get('name', instance.name)\n instance.filename = validated_data.get('filename', instance.filename)\n instance.content_type = validated_data.get('content_type', instance.content_type)\n instance.updated_at = timezone.now()\n instance.save()\n return instance", "def cb_receive_tag_set_values(cls, session, node_id, tg_id, tag_id, value):\n tag = super(AvatarLocation, cls).cb_receive_tag_set_values(session, node_id, tg_id, tag_id, value)\n update_3dview(tag.tg.node)\n return tag", "def update_definition(self, course_key, definition):\n bulk_write_record = self._get_bulk_ops_record(course_key)\n if bulk_write_record.active:\n bulk_write_record.definitions[definition['_id']] = definition\n else:\n self.db_connection.insert_definition(definition, course_key)", "def remove_attributes_with_default_values(self, cls: Type) -> None:\n def matches(value_node: yaml.Node, default: Any) -> bool:\n if value_node.tag == 'tag:yaml.org,2002:null':\n return default is None\n\n if value_node.tag == 'tag:yaml.org,2002:int':\n return int(value_node.value) == int(default)\n\n if value_node.tag == 'tag:yaml.org,2002:float':\n return float(value_node.value) == float(default)\n\n if value_node.tag == 'tag:yaml.org,2002:bool':\n if default is False:\n return (\n str(value_node.value).lower() == 'n' or\n str(value_node.value).lower() == 'no' or\n str(value_node.value).lower() == 'false' or\n str(value_node.value).lower() == 'off')\n elif default is True:\n return (\n str(value_node.value).lower() == 'y' or\n str(value_node.value).lower() == 'yes' or\n str(value_node.value).lower() == 'true' or\n str(value_node.value).lower() == 'on')\n return False\n\n return bool(value_node.value == default)\n\n defaults = defaulted_attributes(cls)\n\n self.yaml_node.value = [\n (name_node, value_node)\n for name_node, value_node in self.yaml_node.value\n if (\n name_node.value not in defaults or\n not matches(value_node, defaults[name_node.value]))]", "def validate(self, value):\n errors = DataDefinition.validate(self, value)\n if errors:\n return errors\n\n if value.value is None:\n # None is a valid value for optional\n pass\n elif value.is_set():\n errors = self.element_type.validate(value.value)\n if errors:\n msg = message_factory.get_message('vapi.data.optional.validate')\n return [msg] + errors\n\n return None", "def update(self, instance, validated_data):\n instance.system = validated_data.get('system', instance.system)\n instance.unit = validated_data.get('unit', instance.unit)\n instance.value = validated_data.get('value', instance.value)\n instance.date_obj = validated_data.get('date_obj', instance.date_obj)\n return instance", "def is_tag_list(value):\r\n for tag_name in parse_tag_input(value):\r\n if len(tag_name) > settings.MAX_TAG_LENGTH:\r\n raise forms.ValidationError(\r\n _('Each tag may be no more than %s characters long.') % settings.MAX_TAG_LENGTH)\r\n return value", "def build_value_type(validator: Validator) -> Callable[[Any, Any], Any]:\n return functools.partial(ValueTypeDefinition, validator)", "def _validate(self, instance, value):", "def update_schema(cls,yaml_str,dpath=\"properties\",update=True):\n sub_schema=yaml_manager.readstring(yaml_str)\n orig_schema=cls._validation_schema\n new_schema=orig_schema.get_copy()\n if update:\n new_schema.update_nested(dpath,sub_schema)\n else:\n new_schema.set_nested(dpath,sub_schema)\n return new_schema", "def definition(self, definition: List[PipelineDefinition]):\r\n self._definition = definition" ]
[ "0.47837424", "0.46425956", "0.46075892", "0.45793426", "0.4565419", "0.4549842", "0.4544998", "0.4527444", "0.44851163", "0.44719568", "0.44582808", "0.44389117", "0.44280824", "0.44129667", "0.43951792", "0.43817663", "0.43775243", "0.43701255", "0.4363229", "0.4363229", "0.4351574", "0.43394598", "0.4327502", "0.4317291", "0.43040946", "0.42756826", "0.42714065", "0.42675135", "0.42558315", "0.42465365", "0.42424682", "0.42310083", "0.42306498", "0.42177236", "0.42177236", "0.42115575", "0.4205719", "0.42052054", "0.4202319", "0.41874394", "0.41825625", "0.41820607", "0.41753134", "0.41730753", "0.4151294", "0.41468796", "0.4141396", "0.41398376", "0.41363958", "0.41334078", "0.41330263", "0.41302556", "0.41298676", "0.41279054", "0.41279054", "0.41237593", "0.41219875", "0.41149494", "0.41147935", "0.41069955", "0.4105849", "0.41044778", "0.4095366", "0.4094859", "0.40824568", "0.407958", "0.4070771", "0.40680158", "0.4065646", "0.4061853", "0.40583894", "0.40456623", "0.40400922", "0.40354842", "0.40350497", "0.4033224", "0.40300846", "0.40269762", "0.40183526", "0.4011612", "0.4011612", "0.40116113", "0.4009207", "0.4006673", "0.40025437", "0.40016887", "0.400018", "0.3999583", "0.3996606", "0.39948675", "0.39947522", "0.39944023", "0.3989897", "0.39862812", "0.39787576", "0.3968828", "0.396378", "0.39623377", "0.39622355", "0.39603594", "0.39522955" ]
0.0
-1
Updates the specified tag default. If you specify that a value is required, a value is set during resource creation (either by the user creating the resource or another tag defualt). If no value is set, resource creation is blocked. If the `isRequired` flag is set to \"true\", the value is set during resource creation. If the `isRequired` flag is set to \"false\", the value you enter is set during resource creation.
def update_tag_default(self, tag_default_id, update_tag_default_details, **kwargs): resource_path = "/tagDefaults/{tagDefaultId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match", "opc_request_id" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_tag_default got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagDefaultId": tag_default_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing), "opc-request-id": kwargs.get("opc_request_id", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_tag_default_details, response_type="TagDefault") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_tag_default_details, response_type="TagDefault")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_default(self, name, default, group=None):\n opt_info = self._get_opt_info(name, group)\n opt_info['default'] = self._get_enforced_type_value(\n opt_info['opt'], default)\n opt_info['location'] = LocationInfo(\n Locations.set_default,\n _get_caller_detail(3), # this function has a decorator to skip\n )", "def _update_default(self, default_value):\n if self.type == \"uri_folder\" or self.type == \"uri_file\":\n self.default = default_value\n return\n else:\n if isinstance(default_value, float) and not math.isfinite(default_value):\n # Since nan/inf cannot be stored in the backend, just ignore them.\n # logger.warning(\"Float default value %r is not allowed, ignored.\" % default_value)\n return\n \"\"\"Update provided default values.\n Here we need to make sure the type of default value is allowed or it could be parsed..\n \"\"\"\n if default_value is not None and not isinstance(default_value, self._allowed_types):\n try:\n default_value = self._parse(default_value)\n except Exception as e:\n if self.name is None:\n msg = \"Default value of %s Input cannot be parsed, got '%s', type = %s.\" % (\n self.type,\n default_value,\n type(default_value),\n )\n else:\n msg = \"Default value of %s Input '%s' cannot be parsed, got '%s', type = %s.\" % (\n self.type,\n self.name,\n default_value,\n type(default_value),\n )\n raise MldesignerComponentDefiningError(cause=msg) from e\n self.default = default_value", "def Option(name: str, value: Union[str, int], default: Optional[bool] = None) -> Dict:\n doc = {'name': name, 'value': value}\n if default is not None:\n doc['isDefault'] = default\n return doc", "def default(self, default):\n\n self._set_field(\"value\", default)", "def SetDefaultVersion(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def set_default(self, default):\n\n\t\tif default is not None and not isinstance(default, bool):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: default EXPECTED TYPE: bool', None, None)\n\t\t\n\t\tself.__default = default\n\t\tself.__key_modified['default'] = 1", "def _set_default(name, value, context):\n if name not in context:\n context[name] = value", "def delete_tag_default(self, tag_default_id, **kwargs):\n resource_path = \"/tagDefaults/{tagDefaultId}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_request_id\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_tag_default got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagDefaultId\": tag_default_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-request-id\": kwargs.get(\"opc_request_id\", missing),\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def _update_annotation_with_default(anno, name, default):\n # Create instance if is type class\n complete_annotation = anno\n if _is_dsl_type_cls(anno):\n complete_annotation = anno()\n complete_annotation.name = name\n if default is Input._EMPTY:\n return complete_annotation\n if isinstance(complete_annotation, Input):\n # Non-parameter Input has no default attribute\n if complete_annotation._is_parameter_type and complete_annotation.default is not None:\n # logger.warning(\n # f\"Warning: Default value of f{complete_annotation.name!r} is set twice: \"\n # f\"{complete_annotation.default!r} and {default!r}, will use {default!r}\"\n # )\n pass\n complete_annotation._update_default(default)\n return complete_annotation", "def setdefault(self, k, d=None): # real signature unknown; restored from __doc__\n pass", "def create_tag_default(self, create_tag_default_details, **kwargs):\n resource_path = \"/tagDefaults\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\",\n \"opc_request_id\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_tag_default got unknown kwargs: {!r}\".format(extra_kwargs))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing),\n \"opc-request-id\": kwargs.get(\"opc_request_id\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n body=create_tag_default_details,\n response_type=\"TagDefault\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n body=create_tag_default_details,\n response_type=\"TagDefault\")", "def default_value(self, value: Any) -> None:\n self.sdc_resource.set_input_default_value(self, value)\n self._default_value = value", "def _update_annotation_with_default(\n anno: Union[Annotation, Input, Output], name: str, default: Any\n ) -> Union[Annotation, Input, Output]:\n # Create instance if is type class\n complete_annotation = anno\n if _is_dsl_type_cls(anno):\n complete_annotation = anno()\n complete_annotation._port_name = name\n if default is Input._EMPTY:\n return complete_annotation\n if isinstance(complete_annotation, Input):\n # Non-parameter Input has no default attribute\n if complete_annotation._is_primitive_type and complete_annotation.default is not None:\n # logger.warning(\n # f\"Warning: Default value of f{complete_annotation.name!r} is set twice: \"\n # f\"{complete_annotation.default!r} and {default!r}, will use {default!r}\"\n # )\n pass\n complete_annotation._update_default(default)\n if isinstance(complete_annotation, Output) and default is not None:\n msg = (\n f\"Default value of Output {complete_annotation._port_name!r} cannot be set:\"\n f\"Output has no default value.\"\n )\n raise UserErrorException(msg)\n return complete_annotation", "def _default(self, section, option, default):\r\n if not self.has_section(section):\r\n self.add_section(section)\r\n if not self.has_option(section, option):\r\n self.set(section, option, default)\r\n self.save()", "def default(self, default):\n\n self._default = default", "def f_default(self, default = 1) :\n pass", "def default(default_value, force=False):\n def default_setter(value):\n \"\"\"\n Sets the value to the given default value, assuming the original value\n is not set or the default value is set to forced.\n\n :param Any value: Injected by CKAN core\n :rtype: Any\n \"\"\"\n return value if value and not force else default_value\n\n return default_setter", "def setdefault(self, name, default):\n return self.data.setdefault(name, default)", "def validate_default(self, value):\n return self.__validate(value, self.validate_default_element)", "def _defaulted(cls, value, default):\n return default if value is None else value", "def default(self, value):\n # save {value} as the default\n self._default = value\n # all done\n return", "def default_value(self, default_value):\n\n self._default_value = default_value", "def default_value(self, default_value):\n\n self._default_value = default_value", "def default_value(self, default_value):\n\n self._default_value = default_value", "def get_tag_default(self, tag_default_id, **kwargs):\n resource_path = \"/tagDefaults/{tagDefaultId}\"\n method = \"GET\"\n\n expected_kwargs = [\"retry_strategy\"]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"get_tag_default got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagDefaultId\": tag_default_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\"\n }\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"TagDefault\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n response_type=\"TagDefault\")", "def set_default_to_meta(meta, key, default_value):\n if callable(default_value):\n default_value = default_value()\n\n meta.setdefault(key, default_value)", "def register_option_pair(key, default_value):\n\n _OPTION_TEMPLATE[key] = default_value", "def _update_default(self, default_value):\n enum_val = self._parse(default_value)\n if self._enum_class and isinstance(enum_val, self._enum_class):\n enum_val = enum_val.value\n self.default = enum_val", "def _update_default(self, default_value):\n enum_val = self._parse(default_value)\n if self._enum_class and isinstance(enum_val, self._enum_class):\n enum_val = enum_val.value\n self.default = enum_val", "def SetDefaultVersion(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def default_setter(value):\n return value if value and not force else default_value", "def set_statement_default_value(self, value):\n self.set_value_into_input_field(self.statement_default_value_textbox_locator, value)\n self.click_element(self.statement_fields_bulk_edit_popup_title_locator)", "def setDefault(self):\n for attr in self._filter():\n if attr.attrName() in ('tx', 'ty', 'tz', 'rx', 'ry', 'rz'):\n defaultValue = 0\n elif attr.attrName() in ('sx', 'sy', 'sz', 'v'):\n defaultValue = 1\n else:\n defaultValue = pm.addAttr(attr, query=True, defaultValue=True)\n\n if not (defaultValue is None):\n try:\n attr.set(defaultValue)\n except RuntimeError as message:\n pm.warning(message.message[:-1])", "def set_defaults(self):\r\n for name, option in self.options.iteritems():\r\n if not option.is_required():\r\n self.set_value(name, option, option.default)", "def set_default_values_as_needed(self):\n if self.verbose:\n click.echo('Updating required default values')\n for field in ARGUMENTS_DEFAULT_VALUES:\n if self.__class__.__name__ in ARGUMENTS_DEFAULT_VALUES[field][1]:\n self.data[field] = ARGUMENTS_DEFAULT_VALUES[field][0]", "def validate_default(self, value):\n return self.__validate(value, self.validate_default_element)", "def check_default(self, sec, name, default):\n if default is None:\n raise\n\n # print debug message saying default value was used\n if not default:\n default_text = 'empty string'\n else:\n default_text = default\n\n msg = \"Setting [{}] {} to default value ({})\".format(sec,\n name,\n default_text)\n if self.logger:\n self.logger.debug(msg)\n else:\n print('DEBUG: {}'.format(msg))\n\n # set conf with default value so all defaults can be added to\n # the final conf and warning only appears once per conf item\n # using a default value\n self.set(sec, name, default)", "def default(self, default):\n self._default = default\n return self", "def setdefault(self, key):\n pass", "def set_value_to_default(self):\n self.setValue(self.default_value)", "def default(self, value):\n # also easy\n self._default = value\n # all done\n return", "def default(value, replacement):\n return value if value is not None else replacement", "def set_default(self, section, option, value=None):\r\n self.defaults[section, option] = value", "def validate_default_value(self):\n if self.has_default_value:\n if not self.is_valid_value(self.default):\n raise AttributeSchemaError(\n \"Default value '%s' is not compliant with the schema\"\n )", "def set_attrs_default(input_object, attr_name_list = None):\n if attr_name_list is None:\n attr_name_list = []\n if len(attr_name_list) > 0:\n attr_list = [input_object.attr(attr_name) for attr_name in attr_name_list]\n else:\n attr_list = general.get_channelbox_attributes(input_object)\n\n for attr in attr_list:\n current_val = attr.get()\n if hasattr(attr, 'addAttr'):\n attr.addAttr(e = True, defaultValue = current_val)", "def input_with_default(prompt, default):\n response = raw_input(\"%s (Default %s) \"%(prompt, default))\n if not response:\n return default\n return response", "def default_arg(default):\n class DefaultArg(argparse.Action):\n def __call__(self, parser, namespace, value, option_string):\n if value is None:\n setattr(namespace, self.dest, default)\n else:\n setattr(namespace, self.dest, value)\n\n return DefaultArg", "def is_default(self, is_default):\n\n self._is_default = is_default", "def update_with_defaults(**kwargs):\n # Update the defaults with the input values\n with open(DEFAULTS, \"r\") as f:\n defaults = json.load(f)\n return _update(kwargs, defaults)", "def set_is_default(self):\n self.is_default = True", "def set_is_default(self):\n self.is_default = True", "def help_default_values():\n click.echo_via_pager(docgen.generate_default_value_help())", "def setdefault(pb_or_dict, key, value):\n if not get(pb_or_dict, key, default=None):\n set(pb_or_dict, key, value)", "def get_default_value(self, tag, primitive_type, hint=None):\n # initialize\n default_value = self.get_default_value_of_type(primitive_type)\n\n # use example value as default (if exist)\n if self.use_examples_for_default and self.get_examples_values:\n examples_values = self.get_examples_values(tag)\n if examples_values:\n default_value = list(examples_values)[0]\n\n # use response value as default (if exist)\n if self.use_response_for_default and self.get_response_values:\n response_values = self.get_response_values(tag, hint)\n if response_values:\n default_value = response_values[0]\n\n return default_value", "def set_defaults(context: CreateCommandsContext):\n job_default_parameters: List[\n Parameter\n ] = context.settings.job_default_parameters\n logger.info(\n \"Please set default rows current value shown in [brackets]. Pressing enter\"\n \" without input will keep current value\"\n )\n try:\n project_name = click.prompt(\n \"Please enter default IDIS project name:\",\n show_default=True,\n default=job_default_parameters.project_name,\n )\n\n destination_path = click.prompt(\n \"Please enter default job destination directory:\",\n show_default=True,\n default=job_default_parameters.destination_path,\n )\n except Abort:\n logger.info(\"Cancelled\")\n\n job_default_parameters.project_name = project_name\n job_default_parameters.destination_path = destination_path\n context.settings.save_to()\n logger.info(\"Saved\")", "def set_default_version(self) -> Optional[bool]:\n return pulumi.get(self, \"set_default_version\")", "def addDefault(self, name, object):\n if name is None:\n raise ValueError(\"Name cannot be None\")\n self.defaultChoice = name\n self.addObject(name, object)", "def _add_default_tags(self):\n self.tags.add_tag('ban', required=True)", "def create_object_parameter_from_default(obj, default):\n values = []\n if default.enum:\n for v in DefaultParameterVl.objects.filter(parameter=default).all():\n values.append({'value' : v.value,\n 'caption' : v.caption})\n return create_object_parameter(obj, 'user', False,\n tp = default.tp,\n name=default.name,\n descr=default.descr,\n values=values)", "def _set_default_attr(self, default_attr):\n for attr, val in six.iteritems(default_attr):\n if getattr(self, attr, None) is None:\n setattr(self, attr, val)", "def default(prompt, default, validator=(lambda x: True), hint=None):\n user_input = input(\"{0} [{1}]\".format(prompt, default))\n while not validator(user_input):\n user_input = input(\"{0} [{1}]\".format(prompt, default))\n return user_input or default", "def default_package_version(self, default_package_version):\n self._default_package_version = default_package_version", "def setdefault(self, key, default=None):\n if key in self:\n return self[key]\n else:\n self[key] = default\n return default", "def is_default(self):\n return self._tag == 'default'", "def setdefault_key_value(self):\n raise NotImplementedError", "def setdefault(self, key, default=None):\n val = self.get(key, _MISSING)\n if val is _MISSING:\n val = default\n self[key] = default\n return val", "def use_defaults(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_defaults\")", "def use_defaults(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_defaults\")", "def set_default_value(self, field):\n if field._default is not None:\n if callable(field._default):\n default = field._default()\n else:\n default = field._default\n self.defaults[field.name] = default", "def _add_serve_metric_default_tags(default_tags: Dict[str, str]):\n if context.get_internal_replica_context() is None:\n return default_tags\n if DEPLOYMENT_TAG in default_tags:\n raise ValueError(f\"'{DEPLOYMENT_TAG}' tag is reserved for Ray Serve metrics\")\n if REPLICA_TAG in default_tags:\n raise ValueError(f\"'{REPLICA_TAG}' tag is reserved for Ray Serve metrics\")\n if APPLICATION_TAG in default_tags:\n raise ValueError(f\"'{APPLICATION_TAG}' tag is reserved for Ray Serve metrics\")\n replica_context = context.get_internal_replica_context()\n # TODO(zcin): use replica_context.deployment for deployment tag\n default_tags[DEPLOYMENT_TAG] = replica_context.deployment\n default_tags[REPLICA_TAG] = replica_context.replica_tag\n if replica_context.app_name:\n default_tags[APPLICATION_TAG] = replica_context.app_name\n return default_tags", "def replace_defaults(d):\n\n # remove the defaults section\n defaults = d.pop('.defaults')\n\n # look for default tags and replace them\n for k, v in defaults.items():\n recursive_search_replace(d, '!' + k + '!', v)", "def setdefault(self, k, default=_MISSING):\n if not super(OrderedMultiDict, self).__contains__(k):\n self[k] = None if default is _MISSING else default\n return self[k]", "def _default_value(self):\n raise NotImplementedError", "def SetDefaultVersion(self, request, timeout, metadata=None, with_call=False, protocol_options=None):\n raise NotImplementedError()", "def setdefault(self, value: Any) -> None:\n self.default_factory = value \n return", "def update_default_from_dict(self, key, value):\n pass", "def validate_default_element(self, value):\n return self.validate_element(value)", "def defaultStatus(self, value=None):\n raise NotImplementedError", "def default(self, option: str, default: Any = None) -> Any:\n if option == 'pull':\n return self.get('force-pull', default=default)\n\n return super().default(option, default=default)", "def default(self, name, new=None, erase=False):\n # Check existence\n if name not in self._defaults:\n raise tools.UnavailableException(self._defaults, name, what=\"model default\")\n # Get current\n old = self._defaults[name]\n # Set if needed\n if erase or new is not None:\n self._defaults[name] = new\n # Return current/old\n return old", "def test_string_default(self):\n tag = Tag()\n self.assertEqual(tag.value, 'default')", "def test_set_default(self):\n result = self.param_dict.get_config()\n self.assertEquals(result[\"foo\"], None)\n self.param_dict.set_default(\"foo\")\n self.assertEquals(self.param_dict.get(\"foo\"), 10)\n self.param_dict.update(\"foo=1000\")\n self.assertEquals(self.param_dict.get(\"foo\"), 1000)\n self.param_dict.set_default(\"foo\")\n self.assertEquals(self.param_dict.get(\"foo\"), 10)\n \n self.assertRaises(ValueError, self.param_dict.set_default, \"qux\")", "def argument(arg, default):\n return \"{0}={1}\".format(arg, default) if default else arg", "def _default_value(self):\n return None", "def default_input(prompt, default_value):\r\n item = input(prompt + \"[Enter for \" + default_value + \"]: \").lower()\r\n if item == \"\":\r\n item = default_value\r\n return item", "def setdefault(self, value: Any) -> None: # type: ignore\n self.default_factory = value \n return", "def is_default(self, is_default: bool):\n\n self._is_default = is_default", "def update(self, instance, validated_data):\r\n default_task = instance.task\r\n default_group = instance.group\r\n result = super().update(instance, validated_data)\r\n result.task = default_task\r\n result.group = default_group\r\n return result", "def setdefault(self, key: str, default: Any = None) -> Any:\n try:\n return self[key]\n except KeyError:\n self[key] = default\n return self[key]", "def createDevIDAttr(shapefileName, defaultVal):\n\n inputds = ogr.Open(shapefileName,update=True)\n if not inputds:\n sys.exit(\"Unable to open input file '{0}'\".format(shapefileName))\n\n inputlyr = inputds.GetLayer()\n\n # Create field definition(s)\n # Add input Layer Fields to the output Layer if defined in field_names arg.\n inLayerDefn = inputlyr.GetLayerDefn()\n if inLayerDefn.GetFieldIndex(cc.DEV_LAYER_ATTRIBUTE_NAME) == -1:\n print(\"\\tCreating an Attribute '{0}' in vector file '{1}'\".format(cc.DEV_LAYER_ATTRIBUTE_NAME,shapefileName))\n\n inputlyr.CreateField(ogr.FieldDefn(cc.DEV_LAYER_ATTRIBUTE_NAME, ogr.OFTInteger))\n\n for inFeature in inputlyr:\n inFeature.SetField(cc.DEV_LAYER_ATTRIBUTE_NAME,defaultVal)\n inputlyr.SetFeature(inFeature)\n\n inputds.Destroy()\n print(\"\\tCreated an Attribute '{0}' in vector file '{1}'\".format(cc.DEV_LAYER_ATTRIBUTE_NAME,shapefileName))", "def test_with_default() -> None:\n soup = generate_case(\"with_default\")\n\n tests.html_schema_doc_asserts.assert_default_values(soup, ['\"Linux\"', '[\"white\", \"blue\"]', \"2\"])", "def form_RadioChoiceDefault(request):\n schema = schemaish.Structure()\n schema.add('myRadio', schemaish.Integer())\n options = [(1,'a'),(2,'b'),(3,'c')]\n\n form = formish.Form(schema, 'form')\n form['myRadio'].widget = formish.RadioChoice(options)\n form['myRadio'].default = 2\n return form", "def validate_default_element(self, value):\n return self.validate_element(value)", "def default_value(self, val):\n self.set_property(\"DefaultValue\", val)", "def _default_value(self, addr, size, name=None, inspect=True, events=True, key=None, **kwargs):\n pass", "def _validate_usage_of_optional(self) -> None:\n # Because None can be the default value, None cannot be used to to indicate no default. This is why we need the optional field. This check prevents users of InputSpec from setting these two values to an inconsistent state, forcing users of InputSpec to be explicit about optionality.\n if self.optional is False and self.default is not None:\n raise ValueError(\n f'`optional` argument to {self.__class__.__name__} must be True if `default` is not None.'\n )", "def default():", "def get_default_value(self, request):\n\n return self.default_value", "def is_default(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"is_default\")", "def prompt(name, default):\n value = raw_input('%s [%s]: ' %(name, default))\n if not value:\n value = default\n return value" ]
[ "0.6250223", "0.62021977", "0.6062567", "0.60212696", "0.59388936", "0.5849943", "0.58077544", "0.5761992", "0.5740362", "0.57305574", "0.5730093", "0.57221216", "0.5609175", "0.5585611", "0.5582013", "0.5553106", "0.5544306", "0.55127645", "0.55110776", "0.5498642", "0.5480265", "0.546484", "0.546484", "0.546484", "0.54296577", "0.53810877", "0.5378663", "0.5361359", "0.5361359", "0.5359547", "0.53293985", "0.53279024", "0.5325579", "0.5324184", "0.53097934", "0.530413", "0.5294968", "0.5290877", "0.52771276", "0.52578956", "0.5245251", "0.52323735", "0.52284914", "0.52112377", "0.52056193", "0.5200164", "0.5185198", "0.5184831", "0.5182544", "0.51776236", "0.51776236", "0.5170183", "0.5166872", "0.51658607", "0.5161732", "0.5154837", "0.51480526", "0.51345515", "0.5129933", "0.5122746", "0.511219", "0.5100005", "0.5048771", "0.5046439", "0.50462735", "0.504453", "0.5037626", "0.5037626", "0.5033232", "0.5025753", "0.5021156", "0.5017397", "0.5012647", "0.50098777", "0.5001173", "0.49952722", "0.49806464", "0.49738774", "0.4972458", "0.49579772", "0.4944069", "0.49386165", "0.4937359", "0.49239823", "0.4919169", "0.49165216", "0.49137786", "0.491059", "0.4910117", "0.49082544", "0.48797634", "0.48791328", "0.4875552", "0.4842832", "0.48341814", "0.481782", "0.4805036", "0.4799003", "0.47978172", "0.47963566" ]
0.7059619
0
Updates the the specified tag namespace. You can't update the namespace name. Updating `isRetired` to 'true' retires the namespace and all the tag definitions in the namespace. Reactivating a namespace (changing `isRetired` from 'true' to 'false') does not reactivate tag definitions. To reactivate the tag definitions, you must reactivate each one individually after you reactivate the namespace,
def update_tag_namespace(self, tag_namespace_id, update_tag_namespace_details, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}" method = "PUT" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_tag_namespace got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_tag_namespace_details, response_type="TagNamespace") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_tag_namespace_details, response_type="TagNamespace")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update():\n for namespace in metadata.get_namespaces():\n logging.info('Switching namespace: \\'%s\\'', namespace)\n namespace_manager.set_namespace(namespace)\n update_per_namespace()\n\n namespace_manager.set_namespace('')\n return ('', 204)", "def updateSBMLNamespace(self, *args):\n return _libsbml.SBMLDocument_updateSBMLNamespace(self, *args)", "def ReplaceNamespace(self, request, global_params=None):\n config = self.GetMethodConfig('ReplaceNamespace')\n return self._RunMethod(\n config, request, global_params=global_params)", "def ReplaceNamespace(self, request, global_params=None):\n config = self.GetMethodConfig('ReplaceNamespace')\n return self._RunMethod(\n config, request, global_params=global_params)", "def update_overrides(self, app, name, namespace,\n flag='reset', override_values=None):\n if override_values is None:\n override_values = {}\n body = {'flag': flag, 'values': override_values, 'attributes': {}}\n return self._update(self._path(app) +\n '?name=' + name +\n '&namespace=' + namespace, body)", "def post_namespace_update(self, resource_id, resource_dict):\n pass", "def remove_namespace(self, doc, namespace):\r\n ns = u'{%s}' % namespace\r\n nsl = len(ns)\r\n for elem in doc.getiterator():\r\n if elem.tag.startswith(ns):\r\n elem.tag = elem.tag[nsl:]\r\n else:\r\n pass", "def updateNamespace(self):\n import addict\n self.namespace['config'] = addict.Dict(self.namespace['config'])", "def remove_namespace(doc, namespace=u\"{http://www.EcoInvent.org/EcoSpold02}\"):\n ns = u'{}'.format(namespace)\n nsl = len(ns)\n for elem in doc.getiterator():\n if elem.tag.startswith(ns):\n elem.tag = elem.tag[nsl:]", "def patch_well_known_namespaces(etree_module):\n etree_module._namespace_map.update({\n \"http://www.w3.org/1999/02/22-rdf-syntax-ns#\": \"rdf\", \n \"http://purl.org/rss/1.0/\": \"rss\", \n \"http://purl.org/rss/1.0/modules/taxonomy/\": \"taxo\", \n \"http://purl.org/dc/elements/1.1/\": \"dc\", \n \"http://purl.org/rss/1.0/modules/syndication/\": \"syn\", \n \"http://www.w3.org/2003/01/geo/wgs84_pos#\": \"geo\"})", "def fix(self):\n for namespace in pm.listNamespaces():\n for elem in namespace.ls():\n elem.rename(elem.split(\":\")[-1])\n namespace.remove()\n\n self.run()", "def setElementNamespace(self, *args):\n return _libsbml.ASTBasePlugin_setElementNamespace(self, *args)", "def update(context, namespace_name, id, values, session):\n namespace_api.get(context, namespace_name, session)\n\n metadata_tag = _get(context, id, session)\n metadef_utils.drop_protected_attrs(models.MetadefTag, values)\n # values['updated_at'] = timeutils.utcnow() - done by TS mixin\n try:\n metadata_tag.update(values.copy())\n metadata_tag.save(session=session)\n except db_exc.DBDuplicateEntry:\n LOG.debug(\"Invalid update. It would result in a duplicate\"\n \" metadata tag with same name=%(name)s\"\n \" in namespace=%(namespace_name)s.\",\n {'name': values['name'],\n 'namespace_name': namespace_name})\n raise exc.MetadefDuplicateTag(\n name=values['name'], namespace_name=namespace_name)\n\n return metadata_tag.to_dict()", "def setElementNamespace(self, *args):\n return _libsbml.SBasePlugin_setElementNamespace(self, *args)", "def reconciliateNs(self, doc):\n if doc is None: doc__o = None\n else: doc__o = doc._o\n ret = libxml2mod.xmlReconciliateNs(doc__o, self._o)\n return ret", "def pre_namespace_update(self, resource_id, resource_dict):\n pass", "def test_update_xmlns(self):\n namespace, xmlns = utils.update_xmlns({})\n self.assertEqual(\n namespace, utils.DEFAULT_NCNS\n )\n self.assertEqual(\n xmlns, {\n utils.DEFAULT_NCNS: utils.NETCONF_NAMESPACE\n }\n )", "def setNamespaces(self, *args):\n return _libsbml.XMLToken_setNamespaces(self, *args)", "def replace_namespaced_net_namespace(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method replace_namespaced_net_namespace\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `replace_namespaced_net_namespace`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `replace_namespaced_net_namespace`\")\n\n resource_path = '/oapi/v1/netnamespaces/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1NetNamespace',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def test_patch_net_namespace(self):\n pass", "def sync_namespace(alias, reg_code, authToken, space=None, action=None):\n if space == None:\n action = 'get'\n print(\" ACTION: GET\")\n elif action == None:\n if 'aeskey' not in space:\n print(\"Space not encrypted\")\n quit()\n action = 'update'\n print(\" ACTION: UPDATE\")\n elif action == 'delete':\n print(\" ACTION: DELETE\")\n url = endpoint('namespace')\n headers={'authorizationToken': authToken}\n data = json.dumps({'action': action, 'alias': alias, 'reg_code': reg_code, 'namespace': space})\n payload_size = sys.getsizeof(data)\n print(\" Size of payload is: %s\" % (convert_size(payload_size)))\n print(\" Max payload is: %s\" % (convert_size(max_payload_size)))\n if payload_size >= max_payload_size:\n print(\" OVER MAX PAYLOAD: %s\" % (convert_size(max_payload_size)))\n quit()\n r = requests.post(url, headers=headers, data=data) \n print(\" Request made\")\n if r.status_code == 403:\n print(\" Invalid registration code, exiting\")\n quit()\n elif r.status_code == 406:\n print(\" Namespace mismatch\")\n quit()\n else:\n print(\" └──statusCode:\" + str(r.status_code) )\n return r", "def replace_namespaced_namespace_status(self, body, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.replace_namespaced_namespace_status_with_http_info(body, name, **kwargs)\n else:\n (data) = self.replace_namespaced_namespace_status_with_http_info(body, name, **kwargs)\n return data", "async def namespace_watcher(patch,logger,meta,body,event,**kwargs):\n new_ns = meta['name']\n logger.debug(f\"New namespace created: {new_ns} re-syncing\")\n v1 = client.CoreV1Api()\n \n for k,v in csecs.items():\n obj_body = v['body']\n #logger.debug(f'k: {k} \\n v:{v}')\n matcheddns = v['syncedns']\n logger.debug(f\"Old matched namespace: {matcheddns} - name: {v['body']['metadata']['name']}\")\n ns_new_list=get_ns_list(logger,obj_body,v1)\n logger.debug(f\"new matched list: {ns_new_list}\")\n if new_ns in ns_new_list:\n logger.debug(f\"Clonning secret {v['body']['metadata']['name']} into the new namespace {new_ns}\")\n create_secret(logger,new_ns,v['body'],v1)\n # if there is a new matching ns, refresh memory\n v['syncedns'] = ns_new_list\n \n # update ns_new_list on the object so then we also delete from there\n return {'syncedns': ns_new_list}", "def setNamespaces(self, *args):\n return _libsbml.SBase_setNamespaces(self, *args)", "def update_namespaces_info(self):\n namespaces = BlockDev.nvdimm_list_namespaces(idle=True)\n\n self._namespaces = dict((namespace.dev, namespace) for namespace in namespaces)", "def test_replace_namespaced_deployment_config_status(self):\n pass", "def replace_namespaced_namespace(self, body, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.replace_namespaced_namespace_with_http_info(body, name, **kwargs)\n else:\n (data) = self.replace_namespaced_namespace_with_http_info(body, name, **kwargs)\n return data", "def set_namespace(self, namespace: str) -> None:\n self._namespace = namespace", "def change_tag_namespace_compartment(self, tag_namespace_id, change_tag_namespace_compartment_detail, **kwargs):\n resource_path = \"/tagNamespaces/{tagNamespaceId}/actions/changeCompartment\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"change_tag_namespace_compartment got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagNamespaceId\": tag_namespace_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=change_tag_namespace_compartment_detail)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=change_tag_namespace_compartment_detail)", "def updateNamespace(self):\n self.namespace['pipeline_manager'] = self", "def gen_namespace(self, node):\n node.functions = self.define_function_suffix(node.functions)\n for ns in node.namespaces:\n self.gen_namespace(ns)", "def test_patch_namespaced_deployment_config(self):\n pass", "def update_tag(tag):\n remove_tag(tag)\n add_tag(tag)", "def __setattr__(self, name, value):\n if not isinstance(name, str):\n raise ValueError('Namespace label must be a string')\n if name.startswith('_'):\n raise ValueError('Namespace cannot start with an underscore')\n\n if name in self._namespaces:\n raise ValueError('Namespaces cannot be redefined')\n\n self._namespaces[name] = Namespace(name, label=value)", "def reconciliateNs(self, tree):\n if tree is None: tree__o = None\n else: tree__o = tree._o\n ret = libxml2mod.xmlReconciliateNs(self._o, tree__o)\n return ret", "def set_test_namespace_value(namespace_name=None):\r\n global namespace_value\r\n namespace_value = namespace_name", "def patch_namespaced_net_namespace(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method patch_namespaced_net_namespace\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `patch_namespaced_net_namespace`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `patch_namespaced_net_namespace`\")\n\n resource_path = '/oapi/v1/netnamespaces/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'PATCH',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1NetNamespace',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def replace_namespaced_namespace_with_http_info(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method replace_namespaced_namespace\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `replace_namespaced_namespace`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `replace_namespaced_namespace`\")\n\n resource_path = '/api/v1/namespaces/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n return self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1Namespace',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'))", "def set_namespace(key, dic):\n\tnew_namespace(key)\n\tREGISTRY[key] = Namespace(dic)", "def test_patch_namespaced_deployment_config_status(self):\n pass", "def remove_namespace(namespace, response, resp_keys=[]):\n if isinstance(namespace, str):\n namespace = str.encode(namespace)\n for key in resp_keys:\n response[key] = remove_namespace(namespace, response[key])\n if isinstance(response, (int, float, bool)):\n pass\n elif isinstance(response, bytes):\n response = response.replace(namespace, b'', 1)\n elif isinstance(response, (tuple, list)):\n response = tuple([remove_namespace(namespace, x) for x in response])\n return response", "def setNs(self, ns):\n if ns is None: ns__o = None\n else: ns__o = ns._o\n libxml2mod.xmlSetNs(self._o, ns__o)", "def namespace_name(self, namespace_name):\n\n self._namespace_name = namespace_name", "def set_target_namespace(self, namespace):\n # do shit\n self.target_namespace = namespace.strip(\":\")", "def namespace(self, namespace):\n\n self._namespace = namespace", "def namespace(self, namespace):\n\n self._namespace = namespace", "def register_variable_namespace(self, name, namespace, t=None):\n if name in self.variable_namespace:\n old = self.variable_namespace[name]\n if old != namespace:\n msg = (\"overwriting namespace for variable {0}:\\n\"\n \" old: {1}\\n\"\n \" new: {2}\")\n warn(msg.format(name, old, namespace), RuntimeWarning)\n self.variable_namespace[name] = namespace\n if self.isenum(t):\n t = self.canon(t)\n for n, _ in t[1][2][2]:\n self.register_variable_namespace(n, namespace)", "def test_replace_net_namespace(self):\n pass", "def resetElementNamespace(self, *args):\n return _libsbml.ListOfLayouts_resetElementNamespace(self, *args)", "def addNamespace(self, *args):\n return _libsbml.SBMLNamespaces_addNamespace(self, *args)", "def test_replace_namespaced_deployment_config(self):\n pass", "def modify_package_state(self):\n ns_inst = NSInstModel.objects.filter(id=self.ns_inst_id)\n ns_insts = NSInstModel.objects.filter(nspackage_id=ns_inst[0].nspackage_id)\n if len(ns_insts) == 1:\n sdc_run_catalog.modify_nsd_state(ns_inst[0].nspackage_id, 0)", "def removePackageNamespace(self, *args):\n return _libsbml.SBMLNamespaces_removePackageNamespace(self, *args)", "def addPackageNamespace(self, *args):\n return _libsbml.SBMLNamespaces_addPackageNamespace(self, *args)", "def addNamespace(self, *args):\n return _libsbml.XMLToken_addNamespace(self, *args)", "async def save_namespace(self, **kwargs) -> None:\n namespace = self._get_namespace(**kwargs)\n await self.AD.state.save_namespace(namespace)", "def removeNamespace(self, *args):\n return _libsbml.SBMLNamespaces_removeNamespace(self, *args)", "def setSBMLNamespaces(self, *args):\n return _libsbml.XMLInputStream_setSBMLNamespaces(self, *args)", "def namespace(self, namespace: str):\n\n self._namespace = namespace", "def test_replace_namespaced_policy_binding(self):\n pass", "def setSBMLNamespaces(self, *args):\n return _libsbml.XMLOutputStream_setSBMLNamespaces(self, *args)", "def update(self, xact, path, msg, flags=rwdts.Flag.REPLACE):\n self._log.debug(\"Updating NSR xact = %s, %s:%s regh = %s\", xact, path, msg, self.regh)\n self.regh.update_element(path, msg, flags)\n self._log.debug(\"Updated NSR xact = %s, %s:%s\", xact, path, msg)", "def update(self, namespace_dict=None, **kwargs):\n if namespace_dict is not None:\n self.__dict__.update(namespace_dict)\n self.__dict__.update(kwargs)", "def patch_namespaced_namespace(self, body, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.patch_namespaced_namespace_with_http_info(body, name, **kwargs)\n else:\n (data) = self.patch_namespaced_namespace_with_http_info(body, name, **kwargs)\n return data", "def test_replace_namespaced_route_status(self):\n pass", "def replace_namespaced_service(self, body, namespace, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.replace_namespaced_service_with_http_info(body, namespace, name, **kwargs)\n else:\n (data) = self.replace_namespaced_service_with_http_info(body, namespace, name, **kwargs)\n return data", "def remove(self, *args):\n return _libsbml.XMLNamespaces_remove(self, *args)", "def _remove_namespace(self, xml):\n response = self.re_ns_decl.sub('', xml) # Remove namespace declarations\n response = self.re_ns_open.sub('<', response) # Remove namespaces in opening tags\n response = self.re_ns_close.sub('/', response) # Remove namespaces in closing tags\n return response", "def _remove_namespace(self, xml):\n response = self.re_ns_decl.sub('', xml) # Remove namespace declarations\n response = self.re_ns_open.sub('<', response) # Remove namespaces in opening tags\n response = self.re_ns_close.sub('/', response) # Remove namespaces in closing tags\n return response", "def removeNamespace(self, *args):\n return _libsbml.XMLToken_removeNamespace(self, *args)", "def update_tag(self, tag_namespace_id, tag_name, update_tag_details, **kwargs):\n resource_path = \"/tagNamespaces/{tagNamespaceId}/tags/{tagName}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_tag got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tagNamespaceId\": tag_namespace_id,\n \"tagName\": tag_name\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_tag_details,\n response_type=\"Tag\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_tag_details,\n response_type=\"Tag\")", "def declareNamespace (self, namespace, prefix=None, add_to_map=False):\n if not isinstance(namespace, pyxb.namespace.Namespace):\n raise pyxb.UsageError('declareNamespace: must be given a namespace instance')\n if namespace.isAbsentNamespace():\n raise pyxb.UsageError('declareNamespace: namespace must not be an absent namespace')\n if prefix is None:\n prefix = namespace.prefix()\n if prefix is None:\n pfxs = self.__inScopePrefixes.get(namespace)\n if pfxs:\n prefix = next(iter(pfxs))\n while prefix is None:\n self.__namespacePrefixCounter += 1\n candidate_prefix = 'ns%d' % (self.__namespacePrefixCounter,)\n if not (candidate_prefix in self.__inScopeNamespaces):\n prefix = candidate_prefix\n ns = self.__inScopePrefixes.get(prefix)\n if ns:\n if ns != namespace:\n raise pyxb.LogicError('Prefix %s is already in use for %s' % (prefix, ns))\n return prefix\n if not self.__mutableInScopeNamespaces:\n self.__clonePrefixMap()\n self.__mutableInScopeNamespaces = True\n self.__addPrefixMap(prefix, namespace)\n return prefix", "def clean_up_namespaces(node, namespace=None):\n if namespace is not None:\n Namespaces.delete_namespace(node, namespace)\n return\n\n namespace_copy = deepcopy(Namespaces.__namespaces)\n for namespace_name in namespace_copy:\n Namespaces.delete_namespace(node, namespace_name)", "def reset(self, state, namespace='*'):\n with self.lock:\n if namespace == '*':\n for cur_namespace in self.camera_namespaces:\n for camera in self.camera_namespaces[cur_namespace]:\n camera.reset_pose(state)\n else:\n for camera in self.camera_namespaces[namespace]:\n camera.reset_pose(state)", "def setSBMLNamespacesAndOwn(self, *args):\n return _libsbml.SBase_setSBMLNamespacesAndOwn(self, *args)", "def reactivate(self):\r\n self.require_item()\r\n\r\n url = '{0}/reactivate'.format(self.get_url())\r\n request = http.Request('PUT', url)\r\n\r\n return request, parsers.parse_empty", "def create_tag_namespace(self, create_tag_namespace_details, **kwargs):\n resource_path = \"/tagNamespaces\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_tag_namespace got unknown kwargs: {!r}\".format(extra_kwargs))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n body=create_tag_namespace_details,\n response_type=\"TagNamespace\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n header_params=header_params,\n body=create_tag_namespace_details,\n response_type=\"TagNamespace\")", "def reset (self):\n self.__inScopeNamespaces = self.__initialScopeNamespaces\n self.__inScopePrefixes = self.__initialScopePrefixes\n self.__mutableInScopeNamespaces = False\n self.__namespacePrefixCounter = 0", "def reconfigure_namespace(self, namespace, mode, **kwargs):\n\n if namespace not in self.namespaces.keys():\n raise ValueError(\"Namespace '%s' doesn't exist.\" % namespace)\n\n info = self.namespaces[namespace]\n\n sector_size = kwargs.get(\"sector_size\", None)\n map_location = kwargs.get(\"map_location\", None)\n\n if sector_size and mode != \"sector\":\n raise ValueError(\"Sector size cannot be set for selected mode '%s'.\" % mode)\n\n if map_location and mode != \"memory\":\n raise ValueError(\"Map location cannot be set for selected mode '%s'.\" % mode)\n\n mode_t = BlockDev.nvdimm_namespace_get_mode_from_str(mode)\n\n if sector_size:\n extra = {\"-l\": str(sector_size)}\n elif map_location:\n extra = {\"-M\": map_location}\n else:\n extra = None\n\n BlockDev.nvdimm_namespace_reconfigure(namespace, mode_t, info.enabled, extra)\n\n # and update our namespaces info \"cache\"\n self.update_namespaces_info()", "def updateSpreads (self, namespaceguid, jobguid = \"\", executionparams = {}):\n params =dict()\n params['namespaceguid'] = namespaceguid\n executionparams['rootobjecttype'] = 'dssnamespace'\n\n \n return q.workflowengine.actionmanager.startRootobjectAction('dssnamespace', 'updateSpreads', params, jobguid=jobguid, executionparams=executionparams)", "def setNsProp(self, ns, name, value):\n if ns is None: ns__o = None\n else: ns__o = ns._o\n ret = libxml2mod.xmlSetNsProp(self._o, ns__o, name, value)\n if ret is None:raise treeError('xmlSetNsProp() failed')\n __tmp = xmlAttr(_obj=ret)\n return __tmp", "def addPackageNamespaces(self, *args):\n return _libsbml.SBMLNamespaces_addPackageNamespaces(self, *args)", "def test_replace_namespaced_policy(self):\n pass", "def setNs(self, node):\n if node is None: node__o = None\n else: node__o = node._o\n libxml2mod.xmlSetNs(node__o, self._o)", "def test_patch_namespaced_policy_binding(self):\n pass", "async def add_namespace(self, namespace: str, **kwargs) -> Union[str, None]:\n if namespace == self.get_namespace(): # if it belongs to this app's namespace\n raise ValueError(\"Cannot add namespace with the same name as operating namespace\")\n\n writeback = kwargs.get(\"writeback\", \"safe\")\n persist = kwargs.get(\"persist\", True)\n\n return await self.AD.state.add_namespace(namespace, writeback, persist, self.name)", "def test_patch_namespaced_policy(self):\n pass", "def xmlrpc_namespace():", "def set_doc_namespace(self, doc, namespace):\n if not self.doc_namespace_set:\n self.doc_namespace_set = True\n if validations.validate_doc_namespace(namespace):\n doc.namespace = namespace\n return True\n else:\n raise SPDXValueError('Document::Namespace')\n else:\n raise CardinalityError('Document::Comment')", "def __init__ (self,\n dom_node=None,\n parent_context=None,\n including_context=None,\n recurse=True,\n default_namespace=None,\n target_namespace=None,\n in_scope_namespaces=None,\n expanded_name=None,\n finalize_target_namespace=True): # MUST BE True for WSDL to work with minidom\n from pyxb.namespace import builtin\n\n if dom_node is not None:\n try:\n assert dom_node.__namespaceContext is None\n except AttributeError:\n pass\n dom_node.__namespaceContext = self\n\n self.__defaultNamespace = default_namespace\n self.__targetNamespace = target_namespace\n if self.__InitialScopeNamespaces is None:\n self.__BuildInitialPrefixMap()\n self.__inScopeNamespaces = self.__InitialScopeNamespaces\n self.__inScopePrefixes = self.__InitialScopePrefixes\n self.__mutableInScopeNamespaces = False\n self.__namespacePrefixCounter = 0\n\n if parent_context is not None:\n self.__inScopeNamespaces = parent_context.__inScopeNamespaces\n self.__inScopePrefixes = parent_context.__inScopePrefixes\n if parent_context.__mutableInScopeNamespaces:\n self.__clonePrefixMap()\n self.__defaultNamespace = parent_context.defaultNamespace()\n self.__targetNamespace = parent_context.targetNamespace()\n self.__fallbackToTargetNamespace = parent_context.__fallbackToTargetNamespace\n if in_scope_namespaces is not None:\n self.__clonePrefixMap()\n self.__mutableInScopeNamespaces = True\n for (pfx, ns) in six.iteritems(in_scope_namespaces):\n self.__removePrefixMap(pfx)\n self.__addPrefixMap(pfx, ns)\n\n # Record a copy of the initial mapping, exclusive of namespace\n # directives from C{dom_node}, so we can reset to that state.\n self.__initialScopeNamespaces = self.__inScopeNamespaces\n self.__initialScopePrefixes = self.__inScopePrefixes\n self.__mutableInScopeNamespaces = False\n\n if self.__targetNamespace is None:\n self.__pendingReferencedNamespaces = set()\n attribute_map = {}\n if dom_node is not None:\n if expanded_name is None:\n expanded_name = pyxb.namespace.ExpandedName(dom_node)\n for ai in range(dom_node.attributes.length):\n attr = dom_node.attributes.item(ai)\n if builtin.XMLNamespaces.uri() == attr.namespaceURI:\n prefix = attr.localName\n if 'xmlns' == prefix:\n prefix = None\n self.processXMLNS(prefix, attr.value)\n else:\n if attr.namespaceURI is not None:\n uri = utility.NamespaceForURI(attr.namespaceURI, create_if_missing=True)\n key = pyxb.namespace.ExpandedName(uri, attr.localName)\n else:\n key = pyxb.namespace.ExpandedName(None, attr.localName)\n attribute_map[key] = attr.value\n\n if finalize_target_namespace:\n tns_uri = None\n tns_attr = self._TargetNamespaceAttribute(expanded_name)\n if tns_attr is not None:\n tns_uri = attribute_map.get(tns_attr)\n self.finalizeTargetNamespace(tns_uri, including_context=including_context)\n\n # Store in each node the in-scope namespaces at that node;\n # we'll need them for QName interpretation of attribute\n # values.\n if (dom_node is not None) and recurse:\n from xml.dom import Node\n assert Node.ELEMENT_NODE == dom_node.nodeType\n for cn in dom_node.childNodes:\n if Node.ELEMENT_NODE == cn.nodeType:\n NamespaceContext(dom_node=cn, parent_context=self, recurse=True)", "def replace_namespaced_namespace_finalize(self, body, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.replace_namespaced_namespace_finalize_with_http_info(body, name, **kwargs)\n else:\n (data) = self.replace_namespaced_namespace_finalize_with_http_info(body, name, **kwargs)\n return data", "def updateFromNamespace(self, args: argparse.Namespace, *, remove: bool = False):\n undefined = object()\n\n for field in dataclasses.fields(self):\n # In case a user wants to overwrite a field with None,\n # we use not None but `undefined` as the default value\n member = getattr(args, field.name, undefined)\n if member is undefined:\n continue\n setattr(self, field.name, member)\n if remove:\n delattr(args, field.name)", "def replace_namespaced_service_status(self, body, namespace, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.replace_namespaced_service_status_with_http_info(body, namespace, name, **kwargs)\n else:\n (data) = self.replace_namespaced_service_status_with_http_info(body, namespace, name, **kwargs)\n return data", "def ReplaceServiceAccount(self, request, global_params=None):\n config = self.GetMethodConfig('ReplaceServiceAccount')\n return self._RunMethod(\n config, request, global_params=global_params)", "def ReplaceServiceAccount(self, request, global_params=None):\n config = self.GetMethodConfig('ReplaceServiceAccount')\n return self._RunMethod(\n config, request, global_params=global_params)", "def set_ns_prefix(self, ns_for_name: Dict[str, Tuple[str, str]]) -> None:\n self.c_prefix, self.f_prefix = ns_for_name[self.class_name]\n self.ret_type.set_ns_prefix(ns_for_name, self.c_prefix, self.f_prefix)\n for param in self.params:\n param.set_ns_prefix(ns_for_name, self.c_prefix, self.f_prefix)", "def updateUniProtTaxonomy(self):\n logger.info(\"Running updateUniProtTaxonomy...\")\n ok = False\n try:\n ptsW = ProteinTargetSequenceWorkflow(self.__cfgOb, self.__cachePath)\n ok = ptsW.updateUniProtTaxonomy()\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n return ok", "def namespace(*args, absoluteName: bool=True, addNamespace: AnyStr=\"\", collapseAncestors:\n AnyStr=\"\", deleteNamespaceContent: bool=True, exists: Union[AnyStr, bool]=\"\",\n force: bool=True, isRootNamespace: Union[AnyStr, bool]=\"\",\n mergeNamespaceWithOther: AnyStr=\"\", mergeNamespaceWithParent: bool=True,\n mergeNamespaceWithRoot: bool=True, moveNamespace: List[AnyStr, AnyStr]=None,\n parent: AnyStr=\"\", recurse: bool=True, relativeNames: bool=True, removeNamespace:\n AnyStr=\"\", rename: List[AnyStr, AnyStr]=None, setNamespace: AnyStr=\"\",\n validateName: AnyStr=\"\", q=True, query=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def replace_namespaced_namespace_status_with_http_info(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method replace_namespaced_namespace_status\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `replace_namespaced_namespace_status`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `replace_namespaced_namespace_status`\")\n\n resource_path = '/api/v1/namespaces/{name}/status'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n return self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1Namespace',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'))", "def setPackageVersion(self, *args):\n return _libsbml.ISBMLExtensionNamespaces_setPackageVersion(self, *args)" ]
[ "0.6844458", "0.60155666", "0.5898544", "0.5898544", "0.56981826", "0.5425525", "0.53448004", "0.5315865", "0.52691925", "0.52540755", "0.5249237", "0.522236", "0.51815814", "0.51776683", "0.51709676", "0.507831", "0.50578856", "0.49961218", "0.49445054", "0.49280253", "0.4923932", "0.49225587", "0.4905982", "0.48991808", "0.48775885", "0.48559847", "0.48417673", "0.47709802", "0.47590753", "0.47578782", "0.47542632", "0.47526205", "0.47522646", "0.4745279", "0.47392672", "0.47303414", "0.47301093", "0.47285792", "0.4714662", "0.4696622", "0.46894646", "0.46852466", "0.46714023", "0.46702176", "0.46512225", "0.46512225", "0.46477565", "0.4645903", "0.46433014", "0.46400085", "0.46305588", "0.46238995", "0.461423", "0.46107322", "0.46106243", "0.46010256", "0.4592156", "0.45852736", "0.4561131", "0.4544733", "0.45420453", "0.45403028", "0.45387036", "0.45385978", "0.4533349", "0.45232123", "0.45205325", "0.45178676", "0.45178676", "0.45165125", "0.45101163", "0.4509844", "0.45098254", "0.45016986", "0.45010826", "0.44942617", "0.4494104", "0.44919664", "0.44848803", "0.44791085", "0.44760796", "0.44719616", "0.44688424", "0.44606304", "0.44570124", "0.44556722", "0.44525954", "0.44444543", "0.44416663", "0.44404846", "0.44368276", "0.44264683", "0.44068712", "0.4401468", "0.4401468", "0.43962282", "0.43735063", "0.43444476", "0.4332751", "0.43287617" ]
0.54768914
5
Updates the description of the specified user.
def update_user(self, user_id, update_user_details, **kwargs): resource_path = "/users/{userId}" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_user got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_user_details, response_type="User") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_user_details, response_type="User")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(self, user: U) -> None:\n ...", "def update_user():", "def update_user():\n #TODO user update \n pass", "async def set_profile_description(self, ctx, *, description: str):\n max_words = self.plugin.data.profile.max_description_length\n if len(description) > max_words:\n res = f\"{ctx.emotes.web_emotion.xx} Sorry but profile description cannot exceed {max_words} word limit.\"\n return await ctx.send_line(res)\n profile = await self.cache.get_profile(ctx.author.id)\n await profile.set_description(description)\n embed = self.bot.theme.embeds.primary(title=\"✅ Your Profile Description has been updated to:\")\n embed.set_author(name=ctx.author.name, icon_url=ctx.author.avatar_url)\n embed.description = profile.description\n await ctx.send(\"\", embed=embed)", "def test_040_update_user(self):\n\n testflow.step(\"Updating user %s\", TEST_USER2)\n assert USER_CLI.run(\n 'edit',\n TEST_USER2,\n attribute='firstName=userX2',\n )[0]", "def about_me(user_id, text):\n UserProfile.objects.filter(pk=user_id).update(about_me=text)", "def update_profile(username):\n\n description = request.json.get('description')\n token = request.headers.get('token')\n\n if description is None:\n return jsonify({'message': 'New description not provided'}), 404\n\n # Token Validation\n token_valid, response = is_token_valid(token)\n if not token_valid:\n return response\n token_username = response\n\n # Privilege handling\n if token_username != username:\n return jsonify({'message': \"You may not edit others profiles\"}), 404\n\n if username not in Profiles.keys():\n return jsonify({'message': 'User {} not found'.format(username)}), 404\n\n Profiles[username]['description'] = description\n return Profiles[username]", "def _update_user(cursor, user_id, user):\n # Create a tuple with user fields\n user_data = (user[User.PROPERTIES.FOLLOWERS],\n user[User.PROPERTIES.FOLLOWING],\n user[User.PROPERTIES.DESIGNS],\n user[User.PROPERTIES.COLLECTIONS],\n user[User.PROPERTIES.MAKES],\n user[User.PROPERTIES.LIKES],\n user[User.PROPERTIES.SKILL_LEVEL],\n user_id)\n\n cursor.execute(dbq.UPDATE_USER, user_data)\n logger.debug(\"user_id {} updated\".format(user_id))", "def update_user(mysql: MySQL, uid: int, name: str,\n description: str, contact: str) -> str:\n dao_user = DAOUser(mysql)\n user_to_update = dao_user.get_user_by_uid(uid)\n if user_to_update is None:\n raise ObjectNotExistsError(\"The user\")\n else:\n user_to_update.name = name\n user_to_update.description = description\n user_to_update.contact = contact\n dao_user.update_user(user_to_update)\n return \"User info updated.\"", "def updateUser(self, uID, ufirstname, ulastname, udescription, urole, uclassification):\n cursor = self.conn.cursor()\n query= \"UPDATE Users \"\\\n \"SET ufirstname= %s, ulastname= %s, udescription= %s, urole= %s, uclassification= %s \"\\\n \"WHERE uID= %s; \"\n cursor.execute(query,(ufirstname, ulastname, udescription, urole, uclassification,uID,))\n self.conn.commit()\n return uID", "def put(self, user_id):\r\n return update_user(request, user_id)", "def update_user(self, user):\n query = TABELLE['id_users']['update']\n return self.execute(query,\n (user['admin'], user['tester'], user['loot_user'], user['loot_admin'], user['banned'],\n user['id']))", "def update_user_data(self, new_user: User):\n self.user_data.update_user_data(new_user)", "def update(self, user_id, first_name=None, last_name=None, email=None, title=None,\n dept=None, notes=None, admin_role=None, app_role=None, email_notification=None):\n\n url = \"{0}/users/{1}\".format(self.base_url, user_id)\n url = self._add_token_to_url(url)\n payload = self.get(user_id)\n\n # get rid of fields that aren't required for PUT\n pop_fields = ['complete_json',\n 'entity_type',\n 'id',\n 'image',\n 'is_deleted',\n 'tags',\n 'username']\n for field in pop_fields:\n payload.pop(field)\n\n # replace fields with updated ones from kwargs\n if first_name:\n payload[\"first_name\"] = first_name\n if last_name:\n payload[\"last_name\"] = last_name\n if email:\n payload[\"email\"] = email\n if title:\n payload[\"title\"] = title\n if dept:\n payload[\"dept\"] = dept\n if notes:\n payload[\"notes\"] = notes\n if app_role:\n payload[\"user_type\"] = app_role\n if email_notification is not None:\n payload[\"subscribed_to_emails\"] = email_notification\n\n # Logic for setting admin status is slightly more complicated:\n if admin_role is None:\n pass\n elif admin_role == \"app_admin\":\n payload[\"admin\"] = True\n payload[\"roles\"] = \"\"\n elif admin_role == \"data_admin\":\n payload[\"admin\"] = False\n payload[\"roles\"] = \"data_admin\"\n else:\n payload[\"admin\"] = False\n payload[\"roles\"] = \"\"\n\n self.logger.debug(\"Sending the user information {0} to {1}\".format(json.dumps(payload), url))\n self.session.headers.update({\"Content-Type\": \"application/json\"}) # Set special header for this post\n response = self.session.put(url, data=json.dumps(payload), verify=False)\n self.logger.debug(\"Received response code {0} with reason {1}...\".format(response.status_code, response.reason))\n self.session.headers.pop(\"Content-Type\") # Remove header, as it affects other tests\n return response.json()['response']", "def modify_user(user_data):\r\n raise NotImplementedError()", "def do_user_update():\n targetUsers = User.query.filter_by(id=request.form['id']).all()\n if not any(targetUsers):\n return user_list(\"Unknown user.\")\n\n targetUser = targetUsers[0]\n\n targetUser.first_name = request.form['first_name']\n targetUser.name = request.form['name']\n targetUser.nick = request.form['nick']\n targetUser.mail = request.form['mail']\n targetUser.role = request.form['role']\n targetUser.state = request.form['state']\n targetUser.gender = request.form['gender']\n targetUser.meter_id = request.form['meter_id']\n targetUser.group_id = request.form['group_id']\n\n db.session.commit()\n return user_list(\"Updated user \" + targetUser.name)", "async def profile_description(self, ctx):\n profile = await self.cache.get_profile(ctx.author.id)\n embed = self.bot.theme.embeds.primary()\n embed.set_author(name=f\"{ctx.author.name}'s Profile Description\", icon_url=ctx.author.avatar_url)\n embed.description = profile.description\n await ctx.send(embed=embed)", "def request_description_update():\n global should_update_description\n should_update_description = True", "def fusion_api_edit_user(self, body, uri, api=None, headers=None):\n return self.user.update(body, uri, api, headers)", "def _edit_user(self):\n users = fileIO.load_json(\"users.json\")\n print(\"The list of users is as follows: \")\n for i in users:\n print(users[i][\"name\"])\n #List specific user's settings and get user id\n userID = self._list_user_settings(users)\n #Loop until valid option given\n option = False\n while not option:\n option = input(\"Please enter the setting you would like to change: \")\n if option not in users[userID]:\n option = False\n print(\"That setting is not valid.\")\n #Get input for new setting\n args = input(\"Please enter what you would like to change that setting to: \")\n #Output\n command = \"edit_user {0} {1} {2}\\r\\n\".format(userID, option, args)\n return(command)", "def updateUser(self, payload):\n\t\turl = \"https://habitica.com/api/v3/user\"\n\t\treturn(putUrl(url, self.credentials, payload))", "def update_user(self, u, p):\r\n\t\tlogger.debug(\"Entering\")\r\n\t\tval, msg = self.add_user(u, p)\r\n\t\t\r\n\t\tif val:\r\n\t\t\tmsg = \"%s has been updated.\" % u\r\n\t\t\r\n\t\tlogger.debug(\"Exiting\")\r\n\t\treturn val, msg", "def change_user_info(uid, name, description):\r\n session = tables.get_session()\r\n if session is None:\r\n return {'success': False}\r\n response = {}\r\n try:\r\n user_account = UserAccount()\r\n response['success'] = user_account.update_user_basic_info(uid, name, description, session)\r\n session.commit()\r\n except SQLAlchemyError as err:\r\n LOGGER.error('Check user password failed: %s', err)\r\n return response\r\n finally:\r\n session.close()\r\n return response", "def edit_user(user_id):\n\n db_user = User.query.get_or_404(user_id)\n\n return render_template(\"edit_user.html\",\n headline=f\"Edit Blogly {db_user.get_full_name()}\",\n user=db_user)", "def update_profile_data(self, **kwargs):\n # TODO: double check that the following will actually check if the user is not logged in, unit test\n if not self.uprofile:\n return None\n desc = kwargs.get('description', self.uprofile.description)\n self.uprofile.description = desc\n self.uprofile.save()\n return self.uprofile", "async def update_user(new_data: UpdateUser, user_id: str = Path(..., description=\"ID value of the desired user\"),\n db_handler: DBHandler = Depends(database_dependency)):\n try:\n updated_record = await db_handler.update_user(user_id=user_id, new_data=new_data)\n updated_record = init_BaseUser(updated_record)\n except DBHandlerException as e:\n return JSONResponse(status_code=400)\n\n return updated_record", "def description(self, new_description):\r\n self.set({\"description\": new_description})", "def update_user(id):\n pass", "def update_user(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload=user, request_type=self.REQUEST_PUT, version=\"v1\")", "def update_user(context, params):\n\n user = User.objects.filter(id=params.get('id')).first()\n if not user:\n raise ValueError(\"user not found\")\n user.language = Language.objects.filter(id=params.get('language_id', None)).first()\n user.deputy = User.objects.filter(id=params.get('deputy_id', None)).first()\n # user.edited_by = context.user\n\n user.save()\n\n update_person(context, user, params)\n\n user.save()\n return user", "def update(self, description=None, command=None, password=None):\n self._router_request(\n self._make_request_data(\n 'updateUserCommand',\n data=dict(\n params=dict(\n uid=self.parent,\n id=self.id,\n description=description if description else self.description,\n command=command if command else self.command,\n password=password,\n )\n )\n )\n )\n\n uc_data = self.list_user_commands(self.parent)\n for uc in uc_data:\n if uc['id'] == self.id:\n self.__init__(self.api_url, self.api_headers, self.ssl_verify, uc, parent=self.parent)\n\n return True", "def update_user(self, user_id, **kwargs):\n user = self.get(user_id, raise_error=True)\n if 'display_name' in kwargs:\n user.display_name = kwargs['display_name']\n if 'email' in kwargs:\n user.email = kwargs['email']\n if 'verified' in kwargs:\n user.verified = kwargs['verified']\n self.session.add(user)", "def update_user(self, userId, newName, newPhone):\n\n try:\n query = \"update user set userName = '{}', phone='{}' where userId ={}\".format(newName, newPhone, userId)\n print(query)\n cur = self.con.cursor()\n cur.execute(query)\n self.con.commit()\n\n logger.info(\"updated\")\n except Exception as e:\n logger.error(\"Error occured at data Update \", e)", "def update_description(self, host, baseUrl, description):\n self._host = host\n self._urlBase = baseUrl\n self._description = description\n return", "def updateUser(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_description_debounced(self, value):\n self.update_description(value)", "def update_user(user_id, data):\n logging.debug(\"Uptating user: user_id={}\".format(user_id))\n return ask('appusers/{0}'.format(user_id), data, 'put')", "def update_user(self, username, password, fullname, description, email):\n params = {\n \"f\" : \"json\",\n \"username\" : username\n }\n if password is not None:\n params['password'] = password\n if fullname is not None:\n params['fullname'] = fullname\n if description is not None:\n params['description'] = description\n if email is not None:\n params['email'] = email\n uURL = self._url + \"/users/update\"\n return self._con.post(path=uURL, postdata=params)", "def addUser(self, description, user_uuid):\n # removes the request type in the description\n if 'type' in description.keys():\n del description['type']\n # add user debug log\n log(logging.DEBUG, \"\\\"%s\\\": %s\" % (user_uuid, description))\n # add new user with next uid to registry users and increment uid\n user = UserDescription(self.uid_counter, description)\n self.uid_counter += 1\n self.users[user_uuid] = user\n # creates corresponding message and receipt boxes\n for path in [self.userMessageBox(user_uuid), self.userReceiptBox(user_uuid)]:\n try:\n os.mkdir(path)\n except:\n logging.exception(\"Cannot create directory \" + path)\n sys.exit(1)\n # create user description file in the corresponding message box\n try:\n path = os.path.join(MBOXES_PATH, user_uuid, DESC_FILENAME)\n log(logging.DEBUG, \"Adding user description on \" + path)\n self.saveOnFile(path, json.dumps(description))\n except:\n logging.exception(\"Cannot create description file\")\n sys.exit(1)\n # if successful returns the user\n return user", "def add_description(self, desc):\n self.description = desc", "def set_desc(self, item_desc):\r\n self.description = item_desc", "def update_user(self, instance, user, name=None, password=None, host=None):\n return instance.update_user(user, name=name, password=password,\n host=host)", "def update_user(self, user_name, new_user_name=None, new_path=None):\r\n params = {'UserName' : user_name}\r\n if new_user_name:\r\n params['NewUserName'] = new_user_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateUser', params)", "def desc_user(self):\n print(f\"\\tF.N: {self.first_name.title()}\\n\\tL.N:{self.last_name.title()}\\n\\tEmail: {self.email}\\n\\tage:{self.age}\")", "def put(self, user_id):\n data = request.json\n return update_user(data, user_id)", "def update_user_info(user, save=True):\n p = bayou.Person.from_default_services(user.username)\n\n user.email = p.email if p.email else user.email\n user.first_name = p.first_name if p.first_name else user.first_name\n user.last_name = p.surname if p.surname else user.last_name\n\n if save:\n user.save()\n\n return user", "def set_description(self, desc: str) -> None:\n self.metadata.data[\"description\"] = desc", "def edit_user(user_id):\n user = User.query.get_or_404(user_id)\n\n return render_template('edit-user.html', user=user)", "def user_edit(request):\n\n if request.method != 'POST':\n return HttpResponseNotAllowed(['POST'])\n\n data = json.loads(request.body.decode('utf-8'))\n\n auth_token = str(data.get('auth_token', ''))\n edit_user_info = data.get('edit_user_info', '')\n username = str(edit_user_info.get('username', ''))\n\n try:\n if not verify_admin(auth_token):\n raise PlantalyticsAuthException(ADMIN_INVALID)\n\n message = (\n 'Attempting to edit info for user: {}.'\n ).format(username)\n logger.info(message)\n check_user_parameters(edit_user_info)\n cassy.edit_user(edit_user_info)\n message = (\n 'Successfully edited info for user: {}.'\n ).format(username)\n logger.info(message)\n body = {\n 'errors': {}\n }\n return HttpResponse(\n json.dumps(body),\n content_type='application/json'\n )\n except PlantalyticsException as e:\n message = (\n 'Error attempting to edit user info. Error code: {}'\n ).format(str(e))\n logger.warn(message)\n error = custom_error(str(e))\n return HttpResponseForbidden(error, content_type='application/json')\n except Exception as e:\n message = (\n 'Unknown error occurred while attempting to edit user info:'\n )\n logger.exception(message)\n error = custom_error(UNKNOWN, str(e))\n return HttpResponseServerError(error, content_type='application/json')", "def _update_context_with_user_info(context, user, user_certificate):\n user_fullname = get_preferred_certificate_name(user)\n\n context['username'] = user.username\n context['course_mode'] = user_certificate.mode\n context['accomplishment_user_id'] = user.id\n context['accomplishment_copy_name'] = user_fullname\n context['accomplishment_copy_username'] = user.username\n\n context['accomplishment_more_title'] = _(\"More Information About {user_name}'s Certificate:\").format(\n user_name=user_fullname\n )\n # Translators: This line is displayed to a user who has completed a course and achieved a certification\n context['accomplishment_banner_opening'] = _(\"{fullname}, you earned a certificate!\").format(\n fullname=user_fullname\n )\n\n # Translators: This line congratulates the user and instructs them to share their accomplishment on social networks\n context['accomplishment_banner_congrats'] = _(\"Congratulations! This page summarizes what \"\n \"you accomplished. Show it off to family, friends, and colleagues \"\n \"in your social and professional networks.\")\n\n # Translators: This line leads the reader to understand more about the certificate that a student has been awarded\n context['accomplishment_copy_more_about'] = _(\"More about {fullname}'s accomplishment\").format(\n fullname=user_fullname\n )", "def test_update_the_created_user():\n pytest.test_user.name += \"Updated\"\n response = api_helper.update_user(pytest.test_user)\n assert response.status_code == 200", "def mod_user(self, username, data):\n headers = {\"user-agent\": self.u_agent}\n req_url = self.normalize_admin_url(u\"users/{}\".format(username))\n res = requests.put(\n req_url,\n headers=headers,\n auth=self.auth,\n data=json.dumps(data),\n verify=False,\n )\n if res.status_code == 200:\n return Response(0, u\"User {} has been modified\".format(username))\n else:\n return Response(res.status_code, res)", "def update_user(cls, **kwargs):\n return cls._do_call(\n 'PUT', cls.api_endpoint + 'users', params=kwargs)", "def describe_user(self):\n message = f\"This user's full name is {self.first_name.title()} \"\n message += f\"{self.last_name.title()}. {self.first_name.title()} is \"\n message += f\"{self.age} years old, lives in \"\n message += f\"{self.location.title()} and uses {self.username} \"\n message += f\"as username.\"\n print(message)", "def clean_user_desc(self):\n desc = self.data['user']['description']\n if desc is not None:\n desc = ' '.join(re.sub(\"(RT : )|(@[\\S]+)|(&\\S+)|(http\\S+)\", \" \", desc).split())\n desc = \" \".join(re.sub(\"(#\\S+)\", ' ', desc).split())\n desc = ''.join(list(filter(lambda x: x.isalpha() or x is ' ',\n desc))).replace(' ', ' ').replace(' ', ' ').lower().strip()\n return {'plain_desc': desc}", "def edit_user(user_id):\n user = User.query.get_or_404(user_id)\n return render_template(\"users/edit_user.html\", user=user)", "def AddUser(parser, help_text):\n parser.add_argument('--user', help=help_text)", "def edit_user_name(self, dto):\n user_id = dto[\"user_id\"]\n user_pin = dto[\"pin\"]\n new_user_name = dto[\"new_user_name\"]\n user = self._find_user_by_id_and_pin(user_id, user_pin)\n self.validate_user_name(new_user_name)\n user[\"user_name\"] = new_user_name\n self._user_dao.save_user(user)", "def testUpdateUser(self):\n UserAPI().create([(u'test', u'secret', u'name', u'name@example.com')])\n user = getUser(u'test')\n passwordHash = user.passwordHash\n self.store.commit()\n info = TUserUpdate(u'test', u'password', u'new-name',\n u'new-name@example.com')\n with login(u'fluiddb', self.admin.objectID, self.transact) as session:\n yield self.facade.updateUser(session, info)\n\n self.store.rollback()\n self.assertEqual(u'test', user.username)\n self.assertNotEqual(passwordHash, user.passwordHash)\n self.assertEqual(u'new-name', user.fullname)\n self.assertEqual(u'new-name@example.com', user.email)", "def set_user_suggestion(user_id: int):\n session = Session()\n\n user = get_user_by_id(user_id)\n\n user.suggestion_expiry = properties.vote_expiration()\n\n session.merge(user)\n session.commit()\n session.close()", "def update_user_profile(user_info):\n user_id = user_info[\"USER_ID\"]\n user_collection.find_one_and_update(\n {\"_id\": user_id},\n {\n \"$set\": {\n \"username\": user_info[\"username\"],\n \"email\": user_info[\"email\"],\n \"avatar\": user_info[\"avatar\"],\n \"githubURL\": user_info[\"githubURL\"],\n \"linkedinURL\": user_info[\"linkedinURL\"],\n \"stackoverflowURL\": user_info[\"stackoverflowURL\"],\n \"skills\": user_info[\"skills\"],\n }\n },\n upsert=False,\n )", "def update_user(user_id):\n\n user = User.query.get_or_404(user_id)\n user.first_name = request.form[\"edit_first_name\"]\n user.last_name = request.form[\"edit_last_name\"]\n user.image_url = request.form[\"edit_image_url\"]\n\n db.session.add(user)\n db.session.commit()\n return redirect(\"/users\")", "def description(self, value):\n self._update_values('description', value)", "def edit_show_user(user_id):\n edited_user = User.query.get_or_404(user_id)\n\n edited_user.first_name = request.form['first_name']\n edited_user.last_name = request.form['last_name']\n edited_user.image_url = request.form['image_url']\n\n db.session.add(edited_user)\n db.session.commit()\n\n return redirect('/')", "def describe_user(self):\n print(\"We have stored next information about user \" +\n self.first_name.title() + \" \" + self.last_name.title() +\n \":\")\n print(\"- Username: \" + self.username)\n print(\"- Age: \" + str(self.age))\n print(\"- Location: \" + self.location.title())", "def update_description(self, option, desc):\n _, command = self.__options[option]\n self.__options[option] = (desc, command)", "def update_user(self, user_id, new_user_info):\n for user in self.get_all_dbusers():\n if user.get(\"user_id\") == user_id:\n self.ireporter_db.update_data_user_role(user_id, new_user_info.get(\"is_admin\"))\n return user\n return None", "def update_user(user_id):\n user = User.query.get_or_404(user_id)\n user.first_name = request.form['first_name']\n user.last_name = request.form['last_name']\n user.image_url = request.form['image_url']\n\n\n db.session.add(user)\n db.session.commit()\n flash(f\"{user.full_name} user has been edited.\")\n\n return redirect(\"/users\")", "async def slashtag_edit_description(\n self, ctx: commands.Context, tag: GuildTagConverter, *, description: str\n ):\n await ctx.send(await tag.edit_description(description))", "def user_changes(self, user, what=None):\n pass", "def set_description(self, data):\n self._description = self._uni(data)", "def set_description(self, description):\n self.description = description", "async def rep_user(self, ctx, *, user: discord.Member = None):\n if user and user.bot:\n return await ctx.send_line(\"😔 Sorry but I just can't do that.\")\n if user and user.id == ctx.author.id:\n return await ctx.send_line(\"🙂 Nice try but wouldn't that be unfair?\")\n author_profile = await self.cache.get_profile(ctx.author.id)\n if user is None:\n if author_profile.can_rep:\n res = \"👌 You can rep someone now.\"\n else:\n res = f\"⏳ You can rep again {author_profile.next_rep.humanize()}.\"\n return await ctx.send_line(res)\n\n if author_profile.can_rep:\n target_profile = await self.cache.get_profile(user.id)\n if not target_profile:\n res = self.plugin.data.responses.no_profile.format(user_name=user.name)\n return await ctx.send_line(res)\n await target_profile.rep(author_profile)\n res = f\"You added one reputation point to {user.name}.\"\n await ctx.send_line(res, ctx.author.avatar_url)\n else:\n res = f\"⏳ You can rep again {author_profile.next_rep.humanize()}.\"\n await ctx.send_line(res)", "def update(self, oid, name=None, email=None, default_project=None, \n domain=None, password=None, enabled=None, description=None):\n data = {\"user\": {}}\n \n if name is not None:\n data['user']['name'] = name\n if email is not None:\n data['user']['email'] = email \n if default_project is not None:\n data['user']['default_project_id'] = default_project\n if domain is not None:\n data['user']['domain_id'] = domain\n if password is not None:\n data['user']['password'] = password\n if enabled is not None:\n data['user']['enabled'] = enabled\n if description is not None:\n data['user']['description'] = description \n \n path = '/users/%s' % oid\n res = self.client.call(path, 'PATCH', data=json.dumps(data), \n token=self.manager.identity.token)\n self.logger.debug('Update openstack user: %s' % truncate(res))\n return res[0]['user']", "def edit_item(self, item_id, name, description, category_id, user_id):\r\n item = self._db_manager.get_item(item_id)\r\n if item is None:\r\n flash(\"Invalid item.\")\r\n return\r\n if item[\"user_id\"] != user_id:\r\n flash(\"Only the original creator can edit an item.\")\r\n return\r\n category = self._db_manager.get_category(category_id)\r\n if category is None:\r\n flash(\"Invalid category.\")\r\n return\r\n if category[\"user_id\"] != user_id:\r\n flash(\"You can only add items to categories you created.\")\r\n return\r\n flash(self._db_manager.edit_item(\r\n item_id=item_id,\r\n name=name,\r\n description=description,\r\n category_id=category_id\r\n ))", "def _set_desc(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0 .. 64']}), default=unicode(\"\"), is_leaf=True, yang_name=\"desc\", rest_name=\"desc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u\"Description of the user (default='')\", u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"desc must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0 .. 64']}), default=unicode(\"\"), is_leaf=True, yang_name=\"desc\", rest_name=\"desc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u\"Description of the user (default='')\", u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__desc = t\n if hasattr(self, '_set'):\n self._set()", "def put(self, user_id):\n\n user_data, error = user_schema.load(api.payload['data'])\n\n user = User.objects.get_or_404(public_id=user_id)\n user.update(updated_at=datetime.utcnow, **user_data)\n \n return user_schema.dump(user)", "def update_item(self, id: str, user: User, **kwargs) -> None:", "def edit_user(user_id):\n\n user = User.query.get_or_404(user_id)\n return render_template('users/edit.html', user=user)", "def save_with_metadata(self, user):\r\n self.save()\r\n self.runtime.modulestore.update_item(self, user.id if user else None)", "def update_user(self,\r\n olduser,\r\n newuser,\r\n entrylist=None):\r\n\r\n if entrylist is None:\r\n entrylist = self.apply_limit(self.find_within(indexfrom=0,orequal=True))\r\n\r\n if not isinstance(entrylist[0], str):\r\n entrylist = [str(a_temp)\r\n for a_temp in entrylist]\r\n\r\n for i in entrylist:\r\n if i in self.indexes():\r\n if self.get_metadata_from_note(i)['user'] == olduser:\r\n tempnote = self.get_note(i).change_user(newuser)\r\n self.add_note(i,note=tempnote)", "def update(\n self, name: str = None, company: str = None, bio: str = None, avatar: str = None\n ):\n query = gql(\n \"\"\"\n mutation UserUpdate($user: UserUpdateInput!) {\n userUpdate(user: $user)\n }\n \"\"\"\n )\n params = {\"name\": name, \"company\": company, \"bio\": bio, \"avatar\": avatar}\n\n params = {\"user\": {k: v for k, v in params.items() if v is not None}}\n\n if not params[\"user\"]:\n return SpeckleException(\n message=\"You must provide at least one field to update your user profile\"\n )\n\n return self.make_request(\n query=query, params=params, return_type=\"userUpdate\", parse_response=False\n )", "def sipserver_user_update(self, user: str, password: str) -> None:\n self.update_endpoint_in_sipserver(endpoint=user, password=password)", "def describe_user(self):\n print(\"\\nThis is \" + self.first_name + \" \" +\n self.last_name + \"'s user details:\")\n print(\"Username: \" + self.username)\n print(\"Email: \" + self.email)", "def describe_user(self):\n print(\"\\nThis is \" + self.first_name + \" \" +\n self.last_name + \"'s user details:\")\n print(\"Username: \" + self.username)\n print(\"Email: \" + self.email)", "async def setuserinfo(self, ctx, server: str, user_uuid: str, user_intid: str):\n self.settings.setUserInfo(server, user_uuid, user_intid)\n await ctx.send(inline('Done'))", "def set_description(self, description):\r\n self.__description = description", "def edit_user_process(user_id):\n\n # extract form data, edit, commit, then redirect to /users\n first_name = request.form[\"first-name\"].strip()\n last_name = request.form[\"last-name\"].strip()\n image_url = request.form[\"image-url\"].strip()\n\n msg = db_edit_user(user_id, first_name, last_name, image_url)\n\n flash(msg[\"text\"], msg[\"severity\"])\n\n return redirect(f\"/users/{user_id}\")", "async def nick(\n self, context: Context, user: discord.User, *, nickname: str = None\n ) -> None:\n member = context.guild.get_member(user.id) or await context.guild.fetch_member(\n user.id\n )\n try:\n await member.edit(nick=nickname)\n embed = discord.Embed(\n description=f\"**{member}'s** new nickname is **{nickname}**!\",\n color=0x9C84EF,\n )\n await context.send(embed=embed)\n except:\n embed = discord.Embed(\n description=\"An error occurred while trying to change the nickname of the user. Make sure my role is above the role of the user you want to change the nickname.\",\n color=0xE02B2B,\n )\n await context.send(embed=embed)", "async def update(self):\n self.data = await self.api.user.get()", "def test_that_a_user_can_edit_their_profile(self):\n self.authorize_user(self.user_login_details)\n url = self.profiles_url + \\\n '{}'.format(self.user['user']['username']) + \"/\"\n response = self.client.patch(url, data=self.user_bio)\n self.assertEqual(response.data['bio'], \"You are a peculiar man.\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def updateUserElder(self, address, newUser):\n try:\n DatabaseCollections.userCollection.update_one(\n {\n \"address\": address\n },\n {\"$set\": {\n \"newUser\": newUser\n }})\n return True\n except IOError:\n return False", "def update_db_with_user_edits(user_id):\n user = User.query.get_or_404(user_id)\n user.first_name = request.form['first_name']\n user.last_name = request.form['last_name']\n user.img_url = request.form['img_url']\n\n db.session.add(user)\n db.session.commit()\n\n return redirect('/users')", "def edit_user(user_id):\n\n user = User.query.get_or_404(user_id)\n \n first = request.form['first_name']\n last = request.form['last_name']\n image = request.form['image_url']\n \n if not first or not last:\n flash(\"Please enter first and last name.\")\n return redirect(f\"/users/{user.id}/edit\")\n \n user.first_name = first\n user.last_name = last\n \n if image:\n user.image_url = image\n\n db.session.add(user)\n db.session.commit()\n\n return redirect(\"/users\")", "def request_user_update():\n target_user = User.query.filter_by(id=request.args['id']).first()\n if target_user is None:\n return user_list(\"Unknown user.\")\n\n return Response(render_template('admin/user/create-update.html',\n csrf_token=(\n get_raw_jwt() or {}).get(\"csrf\"),\n target=\"/admin/user/update\",\n genders=list(GenderType),\n states=list(StateType),\n groups=Group.query.all(),\n roles=list(RoleType),\n id=target_user.id,\n gender=target_user.gender,\n first_name=target_user.first_name,\n name=target_user.name,\n nick=target_user.nick,\n mail=target_user.mail,\n meter_id=target_user.meter_id,\n group_id=target_user.group_id,\n role=target_user.role,\n state=target_user.state),\n mimetype='text/html')", "def _save_user(self, user):\n self.firebase.patch(f'/{self.USERS_KEY}', {str(user.id): user.username})", "def set_description(desc):\n global last_description\n last_description = desc", "def edit_user(user_id):\n user = User.query.get_or_404(user_id)\n return render_template('/users/edit_page.html', user=user)", "def update(self, user, name=None, password=None, host=None):\n if not any((name, password, host)):\n raise exc.MissingDBUserParameters(\"You must supply at least one of \"\n \"the following: new username, new password, or new host \"\n \"specification.\")\n if not isinstance(user, CloudDatabaseUser):\n # Must be the ID/name\n user = self.get(user)\n dct = {}\n if name and (name != user.name):\n dct[\"name\"] = name\n if host and (host != user.host):\n dct[\"host\"] = host\n if password:\n dct[\"password\"] = password\n if not dct:\n raise exc.DBUpdateUnchanged(\"You must supply at least one changed \"\n \"value when updating a user.\")\n uri = \"/%s/%s\" % (self.uri_base, user.name)\n body = {\"user\": dct}\n resp, resp_body = self.api.method_put(uri, body=body)\n return None", "def view_update_user(self, user, username, password):\r\n user.realm._checker.passwd(username, password, True)", "def update( self, trans, id, payload, **kwd ):\n current_user = trans.user\n user_to_update = self.user_manager.by_id( self.decode_id( id ) )\n\n # only allow updating other users if they're admin\n editing_someone_else = current_user != user_to_update\n is_admin = trans.api_inherit_admin or self.user_manager.is_admin( current_user )\n if editing_someone_else and not is_admin:\n raise exceptions.InsufficientPermissionsException( 'you are not allowed to update that user', id=id )\n\n self.user_deserializer.deserialize( user_to_update, payload, user=current_user, trans=trans )\n return self.user_serializer.serialize_to_view( user_to_update, view='detailed' )" ]
[ "0.67517585", "0.6600952", "0.6573515", "0.6480376", "0.6474432", "0.63814116", "0.63341343", "0.6327405", "0.61766285", "0.6165234", "0.6114876", "0.61027694", "0.60700554", "0.605967", "0.60183775", "0.60172874", "0.5994123", "0.59738964", "0.5973046", "0.59612536", "0.595875", "0.5957942", "0.59511316", "0.5908265", "0.58833355", "0.5879638", "0.5848835", "0.584457", "0.5781585", "0.57758564", "0.57754594", "0.57700574", "0.57660156", "0.5761796", "0.57588804", "0.57375795", "0.5717507", "0.5715698", "0.5702241", "0.5689819", "0.5677928", "0.5672659", "0.5670311", "0.5665661", "0.56644994", "0.56536627", "0.5637176", "0.56241316", "0.5608866", "0.560593", "0.5604933", "0.55928844", "0.5587372", "0.55803305", "0.5578308", "0.55782795", "0.55777955", "0.5549047", "0.5544633", "0.5540389", "0.5539073", "0.55383503", "0.5537792", "0.5521526", "0.55159515", "0.5514963", "0.55133164", "0.551235", "0.55092055", "0.5505746", "0.5505615", "0.5495422", "0.5494098", "0.54735696", "0.5467237", "0.54648167", "0.5464492", "0.5455819", "0.54521865", "0.54520196", "0.5450917", "0.544489", "0.54434043", "0.5442086", "0.5442086", "0.5440711", "0.54406685", "0.5439118", "0.5434324", "0.5427454", "0.5425625", "0.5422685", "0.5421616", "0.5417035", "0.5410761", "0.5407208", "0.54061526", "0.5405714", "0.53926194", "0.5392327", "0.53918976" ]
0.0
-1
Updates the capabilities of the specified user.
def update_user_capabilities(self, user_id, update_user_capabilities_details, **kwargs): resource_path = "/users/{userId}/capabilities" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_user_capabilities got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_user_capabilities_details, response_type="User") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_user_capabilities_details, response_type="User")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_user():", "def update(self, user: U) -> None:\n ...", "def update(self, user: 'User', privileges: 'Optional[List[str]]' = None) -> 'Optional[User]':\n return self._update(schema=UserSchema(), entity=user, privileges=privileges)", "def getCapabilities4User(session_key, user=None):\n\n roles = []\n capabilities = []\n\n # Get user info\n if user is not None:\n logger.debug('Retrieving role(s) for current user: %s', user)\n userEntities = entity.getEntities('authentication/users/%s' % user, count=-1, sessionKey=session_key)\n\n for stanza, settings in userEntities.items():\n if stanza == user:\n for key, val in settings.items():\n if key == 'roles':\n logger.debug('Successfully retrieved role(s) for user: %s', user)\n roles = val\n\n # Get capabilities\n for role in roles:\n logger.debug('Retrieving capabilities for current user: %s', user)\n roleEntities = entity.getEntities('authorization/roles/%s' % role, count=-1, sessionKey=session_key)\n\n for stanza, settings in roleEntities.items():\n if stanza == role:\n for key, val in settings.items():\n if key == 'capabilities' or key == 'imported_capabilities':\n logger.debug('Successfully retrieved %s for user: %s', key, user)\n capabilities.extend(val)\n\n return capabilities", "def update_user_entitlement(self, document, user_id):\n route_values = {}\n if user_id is not None:\n route_values['userId'] = self._serialize.url('user_id', user_id, 'str')\n content = self._serialize.body(document, '[JsonPatchOperation]')\n response = self._send(http_method='PATCH',\n location_id='8480c6eb-ce60-47e9-88df-eca3c801638b',\n version='6.0-preview.3',\n route_values=route_values,\n content=content,\n media_type='application/json-patch+json')\n return self._deserialize('UserEntitlementsPatchResponse', response)", "def set_capabilities(self, capabilities: WlSeat.capability) -> None:\n lib.wlr_seat_set_capabilities(self._ptr, capabilities)", "def update_caps(self, caps, source):\n return ObjectCapabilities.update_capabilities(self, caps, source)", "def update_user():\n #TODO user update \n pass", "def user_capacity(self, user_capacity: SmartSsdUserCapacity):\n\n self._user_capacity = user_capacity", "def update_capabilities(self):\n LOG.debug((\"Store %s doesn't support updating dynamic \"\n \"storage capabilities. Please overwrite \"\n \"'update_capabilities' method of the store to \"\n \"implement updating logics if needed.\") %\n reflection.get_class_name(self))", "def update_user(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload=user, request_type=self.REQUEST_PUT, version=\"v1\")", "def update_user(self, user):\n query = TABELLE['id_users']['update']\n return self.execute(query,\n (user['admin'], user['tester'], user['loot_user'], user['loot_admin'], user['banned'],\n user['id']))", "def set_capabilities(self, *dynamic_capabilites):\n for cap in dynamic_capabilites:\n self._capabilities |= int(cap)", "def updateUser(self, payload):\n\t\turl = \"https://habitica.com/api/v3/user\"\n\t\treturn(putUrl(url, self.credentials, payload))", "def sipserver_user_update(self, user: str, password: str) -> None:\n self.update_endpoint_in_sipserver(endpoint=user, password=password)", "def modify_user(user_data):\r\n raise NotImplementedError()", "def update_user_data(self, new_user: User):\n self.user_data.update_user_data(new_user)", "def update_user(user_id, data):\n logging.debug(\"Uptating user: user_id={}\".format(user_id))\n return ask('appusers/{0}'.format(user_id), data, 'put')", "def update_user_metrics(self,user_id:int)->None:\n with connection.cursor() as cursor:\n cursor.execute(f\"SELECT update_user_metrics({user_id})\")\n ##TODO: this should return something ", "def update_user(self, instance, user, name=None, password=None, host=None):\n return instance.update_user(user, name=name, password=password,\n host=host)", "def update(self, user):\n\n\t\tif self == user.classroom:\n\t\t\treturn\n\n\t\tself.size += user.classroom.size\n\t\tuser.set_classroom(self)", "def update_user(cls, **kwargs):\n return cls._do_call(\n 'PUT', cls.api_endpoint + 'users', params=kwargs)", "def put(self, user_id):\r\n return update_user(request, user_id)", "def do_user_update():\n targetUsers = User.query.filter_by(id=request.form['id']).all()\n if not any(targetUsers):\n return user_list(\"Unknown user.\")\n\n targetUser = targetUsers[0]\n\n targetUser.first_name = request.form['first_name']\n targetUser.name = request.form['name']\n targetUser.nick = request.form['nick']\n targetUser.mail = request.form['mail']\n targetUser.role = request.form['role']\n targetUser.state = request.form['state']\n targetUser.gender = request.form['gender']\n targetUser.meter_id = request.form['meter_id']\n targetUser.group_id = request.form['group_id']\n\n db.session.commit()\n return user_list(\"Updated user \" + targetUser.name)", "def test_040_update_user(self):\n\n testflow.step(\"Updating user %s\", TEST_USER2)\n assert USER_CLI.run(\n 'edit',\n TEST_USER2,\n attribute='firstName=userX2',\n )[0]", "def update_user(self, user, name=None, password=None, host=None):\n return self._user_manager.update(user, name=name, password=password,\n host=host)", "def update_user(self):\n self.client.force_authenticate(user=self.user)\n self.response = self.client.patch(\n reverse(\n 'edit_account',kwargs={ 'pk': self.user.id}),\n self.updated_data, format='json'\n )\n self.user = CustomUser.objects.get(username=self.user.username)", "def update_user(user_id):\n update_usr = request.get_json()\n if not update_usr:\n abort(400, {'Not a JSON'})\n usr = storage.get(User, user_id)\n if not usr:\n abort(404)\n else:\n for key, value in update_usr.items():\n setattr(usr, key, value)\n storage.save()\n return jsonify(usr.to_dict())", "def update_user_affinity(self, user_id, candidate_with_feedback):\n # Update only user's neighbor that is Candidate with feedback\n neigh = candidate_with_feedback.neighbor_id_rated\n self.user_affinity.update_preference(elem1=user_id,\n elem2=neigh,\n feedback=candidate_with_feedback.feedback)", "def put(self, user_id):\n data = request.json\n return update_user(data, user_id)", "def update_usermenity(user_id):\n user = storage.get(User, user_id)\n\n if user is None:\n abort(404)\n\n put_data = request.get_json()\n if not put_data:\n abort(400, 'Not a JSON')\n\n for k, v in put_data.items():\n if k not in ['id', 'email', 'created_at', 'updated_at']:\n setattr(user, k, v)\n else:\n continue\n user.save()\n storage.save()\n return make_response(jsonify(user.to_dict()), 200)", "def mod_user(self, username, data):\n headers = {\"user-agent\": self.u_agent}\n req_url = self.normalize_admin_url(u\"users/{}\".format(username))\n res = requests.put(\n req_url,\n headers=headers,\n auth=self.auth,\n data=json.dumps(data),\n verify=False,\n )\n if res.status_code == 200:\n return Response(0, u\"User {} has been modified\".format(username))\n else:\n return Response(res.status_code, res)", "def test_add_capability_with_extras():\n mock = MagicMock()\n with patch.dict(dism.__salt__, {\"cmd.run_all\": mock}):\n with patch.dict(dism.__grains__, {\"osversion\": 10}):\n dism.add_capability(\"test\", \"life\", True)\n mock.assert_called_once_with(\n [\n dism.bin_dism,\n \"/Quiet\",\n \"/Online\",\n \"/Add-Capability\",\n \"/CapabilityName:test\",\n \"/Source:life\",\n \"/LimitAccess\",\n \"/NoRestart\",\n ]\n )", "def update_user_async(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}\".format(self.API_URL, self.USER_ENDPOINT)\n return self.__create_request(payload=user, request_type=self.REQUEST_PUT, version=\"v2\")", "def update_user_profile(IamUserArn=None, SshUsername=None, SshPublicKey=None, AllowSelfManagement=None):\n pass", "def wmUpdateUser(self):\n user_role = uiCommon.GetSessionUserRole()\n if user_role != \"Administrator\":\n raise Exception(\"Only Administrators can edit user accounts.\")\n\n args = uiCommon.getAjaxArgs()\n\n u = catouser.User()\n u.FromID(args[\"ID\"])\n\n if u.ID:\n # these changes are done BEFORE we manipulate the user properties for update.\n\n new_pw = uiCommon.unpackJSON(args.get(\"Password\"))\n random_pw = args.get(\"NewRandomPassword\")\n\n # if a password was provided, or the random flag was set...exclusively\n if new_pw:\n # if the user requesting the change *IS* the user being changed...\n # set force_change to False\n force = True\n if u.ID == uiCommon.GetSessionUserID():\n force = False\n\n u.ChangePassword(new_password=new_pw, force_change=force)\n uiCommon.WriteObjectChangeLog(catocommon.CatoObjectTypes.User, u.ID, u.FullName, \"Password changed.\")\n elif random_pw:\n u.ChangePassword(generate=random_pw)\n uiCommon.WriteObjectChangeLog(catocommon.CatoObjectTypes.User, u.ID, u.FullName, \"Password reset.\")\n\n # now we can change the properties\n u.LoginID = args.get(\"LoginID\")\n u.FullName = args.get(\"FullName\")\n u.Status = args.get(\"Status\")\n u.AuthenticationType = args.get(\"AuthenticationType\")\n u.ForceChange = args.get(\"ForceChange\")\n u.Email = args.get(\"Email\")\n u.Role = args.get(\"Role\")\n u.FailedLoginAttempts = args.get(\"FailedLoginAttempts\")\n u.Expires = args.get(\"Expires\")\n\n u._Groups = args.get(\"Groups\")\n\n if u.DBUpdate():\n uiCommon.WriteObjectChangeLog(catocommon.CatoObjectTypes.User, u.ID, u.ID, \"User updated.\")\n\n return json.dumps({\"result\": \"success\"})", "def updateUser(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def _update_user(cursor, user_id, user):\n # Create a tuple with user fields\n user_data = (user[User.PROPERTIES.FOLLOWERS],\n user[User.PROPERTIES.FOLLOWING],\n user[User.PROPERTIES.DESIGNS],\n user[User.PROPERTIES.COLLECTIONS],\n user[User.PROPERTIES.MAKES],\n user[User.PROPERTIES.LIKES],\n user[User.PROPERTIES.SKILL_LEVEL],\n user_id)\n\n cursor.execute(dbq.UPDATE_USER, user_data)\n logger.debug(\"user_id {} updated\".format(user_id))", "def updateUserRating(definition, increase):\n user = mongo.db.users.find_one({\"_id\": definition[\"submitted_by\"]})\n mongo.db.users.update_one(\n {\"_id\": user[\"_id\"]},\n {\"$inc\": {\"total_rating\": increase}})", "def control_capabilities(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"control_capabilities\"), kwargs)", "def update_tag_user_acl(session, tag_id=None, user_id=None,\n allow_install=False, allow_uninstall=False, allow_reboot=False,\n allow_schedule=False, allow_wol=False, allow_snapshot_creation=False,\n allow_snapshot_removal=False, allow_snapshot_revert=False,\n allow_tag_creation=False, allow_tag_removal=False, allow_read=False,\n date_modified=datetime.now(),\n username='system_user'\n ):\n session = validate_session(session)\n user = None\n\n if user_id and tag_id:\n user = session.query(TagUserAccess).\\\n filter(TagUserAccess.user_id == user_id).\\\n filter(TagUserAccess.tag_id == tag_id).first()\n if user:\n try:\n user.allow_install = allow_install\n user.allow_uninstall = allow_uninstall\n user.allow_reboot = allow_reboot\n user.allow_schedule = allow_schedule\n user.allow_wol = allow_wol\n user.allow_snapshot_creation = allow_snapshot_creation\n user.allow_snapshot_removal = allow_snapshot_removal\n user.allow_snapshot_revert = allow_snapshot_revert\n user.allow_tag_creation = allow_tag_creation\n user.allow_tag_removal = allow_tag_removal\n user.allow_read = allow_read\n user.date_modified = date_modified\n session.commit()\n return({\n 'pass': True,\n 'message': 'ACL for User %s was modified for Tag %s' % \\\n (user_id, tag_id)\n })\n except Exception as e:\n session.rollback()\n return({\n 'pass': False,\n 'message': 'Failed to modify ACL for User %s on Tag %s' % \\\n (user_id, tag_id)\n })\n else:\n return({\n 'pass': False,\n 'message': 'Invalid user_id %s and or tag_id' % \\\n (user_id, tag_id)\n })", "def update_user(BrokerId=None, ConsoleAccess=None, Groups=None, Password=None, Username=None):\n pass", "def _save_user(self, user):\n self.firebase.patch(f'/{self.USERS_KEY}', {str(user.id): user.username})", "async def set_mod(request: Request, user: User) -> Message:\n user_id = user.user_id\n conn: Connection = request.state.db_conn\n async with conn.transaction():\n user_state = await conn.fetchrow(\"SELECT is_mod FROM users WHERE user_id = $1\", user_id)\n if user_state is None:\n return Message(message=f\"User with user_id {user_id} does not exist.\")\n elif user_state['is_mod']:\n return Message(message=f\"User with user_id {user_id} is already a mod.\")\n\n await conn.execute(\"UPDATE users SET is_mod = true WHERE user_id = $1\", user_id)\n return Message(message=f\"Successfully set user with user_id {user_id} to mod.\")", "def promote_user(self, username):\n parser_promote.add_argument('isadmin', choices=[\"True\", \"False\"],\n required=True, nullable=False,\n help=\"(Accepted values: True, False)\"\n )\n args = parser_promote.parse_args()\n isAdmin = request.json.get('isadmin')\n\n query = \"\"\"UPDATE users SET isadmin=%s WHERE username=%s\"\"\"\n values = isAdmin, username\n\n conn = self.db\n cursor = conn.cursor()\n cursor.execute(query, values)\n conn.commit()\n return True", "def add_user_to_group(user, group):\n Command.run(['usermod', '-a', '-G', user, group])", "def update_user(cursor, username, attr, value):\n if attr not in ['username', 'password', 'email', 'groups']:\n raise ValueError(f\"{attr!r} is not a valid user attribute\")\n if attr == 'password':\n value = argon2.hash(value)\n elif attr == 'groups':\n current = get_usergroups(cursor, username)\n for group in current.difference(value):\n remove_usergroup(cursor, username, group)\n for group in value.difference(current):\n create_usergroup(cursor, username, group)\n return \n\n cursor.execute(f\"\"\"\n UPDATE users\n SET\n {attr} = ?\n WHERE\n username = ?\n \"\"\", (value, username))", "def user_update(user_id, user_info):\n user = lookup_user_by_id(user_id)\n for (key, value) in user_info.iteritems():\n if key == \"first_name\" and value is not None:\n user.first_name = value\n elif key == \"last_name\" and value is not None:\n user.last_name = value\n elif key == \"email\" and value is not None:\n try:\n lookup_user_by_email(value)\n except:\n user.email = value\n elif key == \"password\" and value is not None:\n user.set_password(value)\n elif key == \"active\" and value is not None:\n if value:\n user.activate()\n else:\n user.deactivate()\n send_activation_mail.delay(user_id)\n elif key == \"social\" and value is not None:\n user.meta['social'] = value\n elif key == \"address\" and value is not None:\n user.meta['address'] = value\n elif key == \"crm\" and value is not None:\n user.meta['crm'] = value\n elif key == \"local\" and value is not None:\n user.meta['local'] = value\n return user_to_dict(user)", "def set_user(self, name, password, user_level):\n params = {'Username': name, 'Password': password, 'UserLevel': user_level}\n return self.mycam.devicemgmt.SetUser(params)", "def update_user(self, user_id, name, passwd):\n\n # find the user\n um = User(self.settings)\n user = um.find_user(user_id)\n\n # update it\n status, user = user.update(name, passwd)\n\n # json representation\n json_user = user.to_json()\n \n # return\n return status, json_user", "def promote_user(server_object, client, address, command_args):\n\n\t#: Get the information for who's changing the permission of whose client,\n\t#: and to what permission.\n\tchanger = server_object.usrs[address]\n\tchangee = command_args[1]\n\tnew_permission = command_args[2]\n\n\t#: Get the client's current permissino level.\n\tcur_permission = server_object.permissions[server_object.get_ip(changee)].permission\n\n\t#: Log to the server console that the client's permission has been changed.\n\tprint(\"{} has changed {}'s permission from {} to {}\".format(\n\t\t\tchanger,\n\t\t\tchangee,\n\t\t\tcur_permission,\n\t\t\tnew_permission\n\t\t)\n\t)\n\n\t#: Attempt to change the permission, and recieve the error code.\n\tcode = server_object.change_permissions(server_object.get_ip(changee), new_permission)\n\n\t#: If the code is -1, then an invalid permission type was passed.\n\tif code == -1:\n\t\tclient.send(\"{} is not a valid permission type.\".format(new_permission).encode())\n\n\t#: If the code is 0, then the user was already that permission level\n\telif code == 0:\n\t\tclient.send(\"{} is already {}\".format(changee, new_permission).encode())\n\n\t#: Otherwise no error was encountered, and the user's permission was changed.\n\telse:\n\t\tclient.send(\"{}'s permission has been updated\".format(changee).encode())", "def update_user(user_id):\n netAdminToolDB = app.config['DATABASE']\n user = netAdminToolDB.get_user(user_id)\n if user == None:\n return jsonify({'error': 'User_id not found'}), 404\n\n input = request.get_json()\n\n if input == None:\n return jsonfiy({'error': 'Invalid PUT request'}), 400\n\n # Send input directly to update_user function, which checks each key\n netAdminToolDB.update_user(user_id, **input)\n user = netAdminToolDB.get_user(user_id)\n userDict = dict(user)\n uri = url_for('get_user', user_id=user.id, _external=True)\n userDict['uri'] = uri\n\n return jsonify({'user': userDict}), 200", "def update(self, user_id, first_name=None, last_name=None, email=None, title=None,\n dept=None, notes=None, admin_role=None, app_role=None, email_notification=None):\n\n url = \"{0}/users/{1}\".format(self.base_url, user_id)\n url = self._add_token_to_url(url)\n payload = self.get(user_id)\n\n # get rid of fields that aren't required for PUT\n pop_fields = ['complete_json',\n 'entity_type',\n 'id',\n 'image',\n 'is_deleted',\n 'tags',\n 'username']\n for field in pop_fields:\n payload.pop(field)\n\n # replace fields with updated ones from kwargs\n if first_name:\n payload[\"first_name\"] = first_name\n if last_name:\n payload[\"last_name\"] = last_name\n if email:\n payload[\"email\"] = email\n if title:\n payload[\"title\"] = title\n if dept:\n payload[\"dept\"] = dept\n if notes:\n payload[\"notes\"] = notes\n if app_role:\n payload[\"user_type\"] = app_role\n if email_notification is not None:\n payload[\"subscribed_to_emails\"] = email_notification\n\n # Logic for setting admin status is slightly more complicated:\n if admin_role is None:\n pass\n elif admin_role == \"app_admin\":\n payload[\"admin\"] = True\n payload[\"roles\"] = \"\"\n elif admin_role == \"data_admin\":\n payload[\"admin\"] = False\n payload[\"roles\"] = \"data_admin\"\n else:\n payload[\"admin\"] = False\n payload[\"roles\"] = \"\"\n\n self.logger.debug(\"Sending the user information {0} to {1}\".format(json.dumps(payload), url))\n self.session.headers.update({\"Content-Type\": \"application/json\"}) # Set special header for this post\n response = self.session.put(url, data=json.dumps(payload), verify=False)\n self.logger.debug(\"Received response code {0} with reason {1}...\".format(response.status_code, response.reason))\n self.session.headers.pop(\"Content-Type\") # Remove header, as it affects other tests\n return response.json()['response']", "def update_node_user_acl(session, node_id=None, user_id=None,\n allow_install=False, allow_uninstall=False, allow_reboot=False,\n allow_schedule=False, allow_wol=False, allow_snapshot_creation=False,\n allow_snapshot_removal=False, allow_snapshot_revert=False,\n allow_tag_creation=False, allow_tag_removal=False, allow_read=False,\n date_modified=datetime.now(), username='system_user'\n ):\n session = validate_session(session)\n user = None\n if user_id and node_id:\n user = session.query(NodeUserAccess).\\\n filter(NodeUserAccess.user_id == user_id).\\\n filter(NodeUserAccess.node_id == node_id).first()\n if user:\n try:\n user.allow_install = allow_install\n user.allow_uninstall = allow_uninstall\n user.allow_reboot = allow_reboot\n user.allow_schedule = allow_schedule\n user.allow_wol = allow_wol\n user.allow_snapshot_creation = allow_snapshot_creation\n user.allow_snapshot_removal = allow_snapshot_removal\n user.allow_snapshot_revert = allow_snapshot_revert\n user.allow_tag_creation = allow_tag_creation\n user.allow_tag_removal = allow_tag_removal\n user.allow_read = allow_read\n user.date_modified = date_modified\n session.commit()\n return({\n 'pass': True,\n 'message': 'ACL for User %s was modified for Node %s' % \\\n (user_id, node_id)\n })\n except Exception as e:\n session.rollback()\n return({\n 'pass': False,\n 'message': 'Failed to modify ACL for User %s on Node %s' % \\\n (user_id, node_id)\n })\n else:\n return({\n 'pass': False,\n 'message': 'Invalid user_id %s and or node_id %s' % \\\n (user_id, node_id)\n })", "def updateSkillForPlayer(self, userid, name, level):\r\n if not isinstance(userid, int):\r\n userid = self.getUserIdFromSteamId(userid)\r\n self.execute(\"UPDATE Skill SET level=? WHERE UserID=? AND name=?\", level, userid, name)", "def upgrade(message, target, num):\n return\n users = hf.get_users()\n\n for user in users:\n if user[\"name\"] != target:\n continue\n try:\n user[\"approval_level\"] = int(num)\n except Exception:\n message.reply(\":x: That's not a number, ya dingus. :)\")\n return\n\n hf.save_users(users)\n\n message.reply(\"Successfully upgraded user {} to approval level \"\n \"{}.\".format(target, num))", "def update_user(id):\n with app.app_context():\n user = User.query.get(id)\n if user is None:\n return \"User not found\", 404\n skills = validate_skills(request.get_json().get(\"skills\"))\n if not skills:\n return \"Invalid skills\", 400\n\n for skill in skills:\n skill_db = Skill.query.filter_by(name=skill).first()\n if skill_db is None:\n skill_db = Skill(name=skill)\n db.session.add(skill_db)\n \n user.skills = [\n skill for skill in Skill.query.filter(Skill.name.in_(skills)).all()\n ]\n \n users_response = UsersResponse(\n users=[\n {\n \"id\": user.id,\n \"name\": user.name,\n \"skills\": [skill.name for skill in user.skills]\n }\n ]\n )\n db.session.commit()\n return users_response.json(), 200", "def update_user_opt_in_status(self, user_id, channel_name):\n # type: (str, str) -> dict\n self.request_url = \"{0}/{1}/{2}/{3}?channelType={4}\".format(\n self.API_URL, self.USER_ENDPOINT, user_id, self.OPTOUT_ENDPOINT, channel_name\n )\n return self.__create_request(payload={\"optOut\": False}, request_type=self.REQUEST_PUT, version=\"v1\")", "def update_user(self, user_id, **kwargs):\n user = self.get(user_id, raise_error=True)\n if 'display_name' in kwargs:\n user.display_name = kwargs['display_name']\n if 'email' in kwargs:\n user.email = kwargs['email']\n if 'verified' in kwargs:\n user.verified = kwargs['verified']\n self.session.add(user)", "def update_user_profile(req_data):\n logger.debug(\"entering function update_user_profile\")\n\n update_fields = {}\n for field in req_data:\n update_fields[field] = req_data[field]\n if \"password\" in req_data:\n update_fields[\"password\"] = generate_password_hash(req_data[\"password\"])\n\n find_query = {\"user_id\": current_user.id}\n update_query = {\"$set\": update_fields}\n run_update_one_query(config.USERS_COL, find_query, update_query,\n error=True, error_msg=PROFILE_UPDATE_FAILED_ERR_MSG)\n logger.info(\"Profile update success for %s\", current_user.id)\n\n logger.debug(\"exiting function update_user_profile\")\n return get_success_response(PROFILE_UPDATE_SUCCESS_MSG)", "def update_user_login(sender, user, **kwargs):\n user.userlogin_set.create(timestamp=timezone.now())\n user.save()\n\n bonus_wallet = BonusWallet.objects.filter(user=user)\n if not bonus_wallet.exists():\n bonus_wallet = BonusWallet.objects.create(user=user)\n bonus_wallet.save()\n else:\n bonus_wallet = bonus_wallet[0]\n\n login_bonus = LoginBonus.objects.create(wallet=bonus_wallet)\n bonus_wallet.value += Decimal(login_bonus.value)\n bonus_wallet.save()", "def add_user_entitlement(self, user_entitlement):\n content = self._serialize.body(user_entitlement, 'UserEntitlement')\n response = self._send(http_method='POST',\n location_id='387f832c-dbf2-4643-88e9-c1aa94dbb737',\n version='6.0-preview.3',\n content=content)\n return self._deserialize('UserEntitlementsPostResponse', response)", "def test_add_capability():\n mock = MagicMock()\n with patch.dict(dism.__salt__, {\"cmd.run_all\": mock}):\n with patch.dict(dism.__grains__, {\"osversion\": 10}):\n dism.add_capability(\"test\")\n mock.assert_called_once_with(\n [\n dism.bin_dism,\n \"/Quiet\",\n \"/Online\",\n \"/Add-Capability\",\n \"/CapabilityName:test\",\n \"/NoRestart\",\n ]\n )", "async def put(\n self, user_id: str, /, data: UpdateAdministratorRoleRequest\n ) -> Union[r200[UserResponse], r404]:\n\n if user_id == self.request[\"client\"].user_id:\n raise HTTPBadRequest(text=\"Cannot change own role\")\n\n try:\n administrator = await get_data_from_req(\n self.request\n ).administrators.set_administrator_role(user_id, data.role)\n except ResourceNotFoundError:\n raise NotFound()\n\n return json_response(administrator, status=200)", "def test_users_update(mocker):\r\n mocker.patch('subprocess.call')\r\n users.update(user_dict)\r\n subprocess.call.assert_called_with([\r\n 'usermod',\r\n '-p',\r\n password,\r\n '-G',\r\n 'wheel,dev',\r\n 'kevin',\r\n ])", "def api_extend_account():\n user_id = request.form.get('user_id')\n today_plus_180 = get_expiration_date(180)\n user = UserEntity.get_by_id(user_id)\n user = UserEntity.update(user, access_expires_at=today_plus_180)\n return jsonify_success(\n {\"message\": \"Updated expiration date to {}\".format(today_plus_180)})", "def sync_keycloak_user(oidc_user: OIDCUser, claims: dict):\n oidc_user.user.universal_id = claims.get(\"universal-id\")\n oidc_user.user.authorization_id = claims.get(\"sub\")\n oidc_user.user.first_name = claims.get(\"given_name\") or \"\"\n oidc_user.user.last_name = claims.get(\"family_name\") or \"\"\n oidc_user.user.display_name = claims.get(\"display_name\") or \"\"\n oidc_user.user.email = claims.get(\"email\")\n oidc_user.user.has_efiling_early_adopters = \"/efiling-early-adopters\" in claims.get(\"groups\", {})\n oidc_user.user.save()", "def promote_user(username):\n user = User.get_user_by_username(username)\n user.is_admin = True\n user.save()", "def update_user(user_id):\n\n user = storage.get(\"User\", user_id)\n\n if user is None:\n abort(404)\n\n json_input = request.get_json()\n\n if json_input is None:\n abort(400, \"Not a JSON\")\n\n for key, value in json_input.items():\n if key not in ['id', 'email', 'created_at', 'updated_at']:\n setattr(user, key, value)\n user.save()\n return jsonify(user.to_dict())", "def permit_user(self, perm_name, user):\n try:\n perm_set = self.permissions[perm_name]\n except KeyError:\n raise PermissionError(\"Permission does not Exists\")\n else:\n if user.username not in self.authenticator.users:\n raise UsernameNotFoundError\n perm_set.add(user.username)\n if 'add' and 'property' in perm_name:\n user.can_add_property = True", "def update_user(user_id):\n body = request.get_json(silent=True)\n if body is None:\n abort(400, jsonify(error=\"Not a JSON\"))\n user = models.storage.get('User', user_id)\n if user is None:\n abort(404)\n for key, value in body.items():\n if key not in ('id', 'email', 'created_at', 'updated_at'):\n setattr(user, key, value)\n user.save()\n return jsonify(user.to_dict())", "def updatePassword(con, options, dbName, userName, userInfo):\n if checkUsername(userName):\n trace(\"For dbName='%s', alter user '%s' password\" % (dbName, userName))\n userPassword = userInfo[\"password\"]\n optionalDbExecute(con, options, \"alter user %s with password '%s'\" % (userName, userPassword))", "def update_db_with_user_edits(user_id):\n user = User.query.get_or_404(user_id)\n user.first_name = request.form['first_name']\n user.last_name = request.form['last_name']\n user.img_url = request.form['img_url']\n\n db.session.add(user)\n db.session.commit()\n\n return redirect('/users')", "def test_users_update(mocker):\n mocker.patch('subprocess.call')\n users.update(user_dict)\n subprocess.call.assert_called_with([\n 'usermod',\n '-p',\n password,\n '-G',\n 'wheel,dev',\n 'kevin',\n ])", "def update_user(self, user_id, new_user_info):\n for user in self.get_all_dbusers():\n if user.get(\"user_id\") == user_id:\n self.ireporter_db.update_data_user_role(user_id, new_user_info.get(\"is_admin\"))\n return user\n return None", "def test_update_hyperflex_capability_info(self):\n pass", "def change_user_password(self, user, new_pass):\n return self.update(user, password=new_pass)", "def update_user(id):\n pass", "def set_user(self, user):\n self._user = user", "def update_user(self, queue: SubnetQueue, *args):", "def update_user_feature(\n train, user_features, item_features, lambda_user,\n nz_user_itemindices, I):\n for d, user_d in enumerate(nz_user_itemindices): # iterate over non zero users\n nnz_items_per_user = len(user_d[1]) # Number of items user d has rated\n if (nnz_items_per_user == 0): nnz_items_per_user = 1\n \n # Least squares solution\n A_d = np.dot(item_features[user_d[1]].T, item_features[user_d[1]]) + lambda_user * nnz_items_per_user * I\n V_d = np.dot(item_features[user_d[1]].T, train[user_d[1],user_d[0]].todense())\n user_features[:,user_d[0]] = np.linalg.solve(A_d,V_d)", "def test_manage_user_roles__manage_subset(self, appbuilder, user, role, monkeypatch):\n sm = appbuilder.sm\n\n monkeypatch.setattr(sm, 'roles_to_manage', {'Admin', 'Viewer', 'Op', 'User'})\n user.roles.append(role('Other'))\n user.roles.append(role('Viewer'))\n\n sm.manage_user_roles(user, ['Admin', 'User'])\n\n assert {r.name for r in user.roles} == {'Admin', 'User', 'Other'}", "def test_update_user_profile(setup_client, setup_user):\n client = setup_client\n user = setup_user\n payload = {\n \"name\": \"New name\",\n \"role\": \"Purchaser\",\n \"password\": \"New password\"\n }\n res = client.patch(ME_URL, payload)\n user.refresh_from_db()\n assert res.status_code == status.HTTP_200_OK\n assert user.name == payload[\"name\"]\n assert user.role == payload[\"role\"]\n assert user.check_password(payload[\"password\"])\n assert res.status_code == status.HTTP_200_OK", "def patch_user(user_id):\n success = True\n try:\n usr = db.session.query(User).get(user_id)\n for item in request.json:\n if item == 'username':\n usr.username = request.json['username']\n elif item == 'email':\n usr.username = request.json['email']\n db.session.commit()\n except:\n success = False\n return jsonify(success=success)", "def update_user(self, user_name, new_user_name=None, new_path=None):\r\n params = {'UserName' : user_name}\r\n if new_user_name:\r\n params['NewUserName'] = new_user_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateUser', params)", "def update_user(username):\n name = request.get_json().get(\"name\", None)\n role = request.get_json().get(\"role\", None)\n email = request.get_json().get(\"email\", None)\n return jsonify(\n admin.update_user(current_app.scoped_session(), username, role, email, name)\n )", "def update_field(self, uid, body, user_auth):\n user_record = UserRecord.get_user(uid, auth=admin_sdk.auth)\n user = Admin.query.filter_by(uid=user_record.uid).first()\n\n if not user_record or not user:\n raise HandlerException(404, \"Not found user\")\n\n if user_auth[\"uid\"] == uid:\n raise HandlerException(\n 401, \"Logged user can't modify own profile in this endpoint\"\n )\n\n if not user_auth[\"b\"].has_access(user.privileges, True):\n raise HandlerException(\n 401,\n \"Logged user doesn't have sufficient permissions \\\n to update a user with equal or higher privileges\",\n )\n\n user_record.serialize(body)\n user_record.update_user()\n\n if \"privileges\" in body:\n user_record.make_claims(\n {\"admin\": True, \"access_level\": body[\"privileges\"]}\n )\n\n user.serialize(body)\n user.save()\n\n return {\"uid\": user_record.uid, \"a\": user_record, \"b\": user}", "def put(self, user_id):\n\n user_data, error = user_schema.load(api.payload['data'])\n\n user = User.objects.get_or_404(public_id=user_id)\n user.update(updated_at=datetime.utcnow, **user_data)\n \n return user_schema.dump(user)", "def release(self, user):\n ret = self._communicate('put %s' % user)\n return ret == 'okay'", "def update_user(user_id):\n new_dict = request.get_json(silent=True)\n if type(new_dict) is dict:\n user_obj = storage.get(\"User\", user_id)\n if user_obj is None:\n abort(404)\n for k, v in new_dict.items():\n if k not in [\"id\", \"email\", \"created_at\", \"updated_at\"]:\n setattr(user_obj, k, v)\n user_obj.save()\n return jsonify(user_obj.to_dict()), 200\n else:\n response = jsonify({\"error\": \"Not a JSON\"}), 400\n return response", "def update(self, uid, body, user_auth):\n user_record = UserRecord.get_user(uid, auth=admin_sdk.auth)\n user = Admin.query.filter_by(uid=user_record.uid).first()\n\n if not user_record or not user:\n raise HandlerException(404, \"Not found user\")\n\n if user_auth[\"uid\"] == uid:\n raise HandlerException(\n 401, \"Logged user can't modify own profile in this endpoint\"\n )\n\n if not user_auth[\"b\"].has_access(user.privileges, True):\n raise HandlerException(\n 401,\n \"Logged user doesn't have sufficient permissions \\\n to create a user with equal or higher privileges\",\n )\n\n user_record.serialize(body)\n user_record.update_user()\n\n if \"privileges\" in body:\n user_record.make_claims(\n {\"admin\": True, \"access_level\": body[\"privileges\"]}\n )\n\n user.serialize(body)\n user.save()\n\n return {\"uid\": user_record.uid, \"a\": user_record, \"b\": user}", "def update_user(user_id):\n\n user = User.query.get_or_404(user_id)\n user.first_name = request.form[\"edit_first_name\"]\n user.last_name = request.form[\"edit_last_name\"]\n user.image_url = request.form[\"edit_image_url\"]\n\n db.session.add(user)\n db.session.commit()\n return redirect(\"/users\")", "def updateUserElder(self, address, newUser):\n try:\n DatabaseCollections.userCollection.update_one(\n {\n \"address\": address\n },\n {\"$set\": {\n \"newUser\": newUser\n }})\n return True\n except IOError:\n return False", "def view_update_user(self, user, username, password):\r\n user.realm._checker.passwd(username, password, True)", "def update_user_key(self, key, obj, validity = 0, max_queries_per_ip_per_hour = 0, max_hits_per_query = 0, indexes = None):\n if obj is dict:\n params = obj\n else:\n params = {\"acl\": obj}\n if validity != 0:\n params[\"validity\"] = validity\n if max_queries_per_ip_per_hour != 0:\n params[\"maxQueriesPerIPPerHour\"] = max_queries_per_ip_per_hour\n if max_hits_per_query != 0:\n params[\"maxHitsPerQuery\"] = max_hits_per_query\n if not indexes is None:\n params['indexes'] = indexes\n return AlgoliaUtils_request(self.headers, self.write_hosts, \"PUT\", \"/1/keys/\" + key, self.timeout, params)", "def test_resource_user_resource_change_user_patch(self):\n pass", "def assign_user_features(self, user_data):\n assert \"user\" in user_data.columns, \"Data must contain `user` column.\"\n user_data = user_data.drop_duplicates(subset=[\"user\"], keep=\"last\")\n user_row_idx, user_id_mask = get_row_id_masks(\n user_data[\"user\"], self.user_unique_vals\n )\n self.user_sparse_unique = update_new_sparse_feats(\n user_data,\n user_row_idx,\n user_id_mask,\n self.user_sparse_unique,\n self.sparse_unique_vals,\n self.multi_sparse_unique_vals,\n self.user_sparse_col,\n self.col_name_mapping,\n self.sparse_offset,\n )\n self.user_dense_unique = update_new_dense_feats(\n user_data,\n user_row_idx,\n user_id_mask,\n self.user_dense_unique,\n self.user_dense_col,\n )", "def addCapability(self, capability):\n self.capabilities.add(capability)", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user" ]
[ "0.5838249", "0.5771097", "0.57477796", "0.57301706", "0.5712233", "0.56142646", "0.5583086", "0.55510235", "0.5524724", "0.5517849", "0.5512739", "0.55080456", "0.54812056", "0.5419713", "0.54144186", "0.5388361", "0.5363638", "0.5359393", "0.5356132", "0.53485364", "0.5303966", "0.52659965", "0.52228224", "0.52068657", "0.5206759", "0.51840377", "0.51431036", "0.51288265", "0.5102041", "0.50938725", "0.507958", "0.50671023", "0.5036121", "0.50218624", "0.5016121", "0.5010207", "0.5003651", "0.49881467", "0.49758038", "0.49712518", "0.49700248", "0.496984", "0.49572125", "0.49511427", "0.49354744", "0.49326822", "0.49324507", "0.49188676", "0.49152085", "0.49101895", "0.48949033", "0.48842824", "0.48827073", "0.48681825", "0.48670036", "0.48526928", "0.48490328", "0.48476794", "0.48400307", "0.4839244", "0.48288932", "0.481126", "0.48080513", "0.48063236", "0.480434", "0.48030886", "0.47986722", "0.4796854", "0.47883266", "0.47667253", "0.47659382", "0.47569412", "0.4753186", "0.4745688", "0.47437415", "0.47378737", "0.47221506", "0.47210893", "0.4716099", "0.47158018", "0.47155207", "0.4709364", "0.47087705", "0.47075287", "0.47021225", "0.47003713", "0.46879154", "0.46856052", "0.4671689", "0.4668018", "0.46669102", "0.4665105", "0.46606052", "0.46524426", "0.46460223", "0.46431774", "0.46421126", "0.46339834", "0.4633188", "0.4633188" ]
0.71365345
0
Updates the state of the specified user.
def update_user_state(self, user_id, update_state_details, **kwargs): resource_path = "/users/{userId}/state" method = "PUT" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "update_user_state got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_state_details, response_type="User") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=update_state_details, response_type="User")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_user():", "def update_user():\n #TODO user update \n pass", "def update(self, user: U) -> None:\n ...", "def set_state_of_user(user_id, state):\n if not ValidStates.has_value(state):\n raise RuntimeError(\"Invalid event alteration state reached\")\n UserEventAlterationMachine.state_dict[user_id] = state", "def update_user_data(self, new_user: User):\n self.user_data.update_user_data(new_user)", "def update_user(self, user):\n query = TABELLE['id_users']['update']\n return self.execute(query,\n (user['admin'], user['tester'], user['loot_user'], user['loot_admin'], user['banned'],\n user['id']))", "def put(self, user_id):\r\n return update_user(request, user_id)", "async def update(self, ctx, user: discord.Member=None):\n\n if not user:\n user = ctx.message.author\n\n steam_id = get_user_steam_id(user.id)\n key = get_steam_key()\n\n if not steam_id:\n await self.bot.say(\"{}, your Discord ID is not yet connected to a Steam profile. Use `{}game steamlink` to link them.\".format(user.mention, ctx.prefix))\n return\n\n if key:\n set_steam_games(steam_id, user.id)\n await self.bot.say(\"{}, your Steam games have been updated!\".format(user.mention))\n else:\n await self.bot.say(\"Sorry, you need a Steam API key to make requests to Steam. Use `{}game steamkey` for more information.\".format(ctx.prefix))", "def update_user(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload=user, request_type=self.REQUEST_PUT, version=\"v1\")", "def do_user_update():\n targetUsers = User.query.filter_by(id=request.form['id']).all()\n if not any(targetUsers):\n return user_list(\"Unknown user.\")\n\n targetUser = targetUsers[0]\n\n targetUser.first_name = request.form['first_name']\n targetUser.name = request.form['name']\n targetUser.nick = request.form['nick']\n targetUser.mail = request.form['mail']\n targetUser.role = request.form['role']\n targetUser.state = request.form['state']\n targetUser.gender = request.form['gender']\n targetUser.meter_id = request.form['meter_id']\n targetUser.group_id = request.form['group_id']\n\n db.session.commit()\n return user_list(\"Updated user \" + targetUser.name)", "def update_user(self, instance, user, name=None, password=None, host=None):\n return instance.update_user(user, name=name, password=password,\n host=host)", "def update_user(id):\n pass", "def updateUser(self, payload):\n\t\turl = \"https://habitica.com/api/v3/user\"\n\t\treturn(putUrl(url, self.credentials, payload))", "def update_user_state(mess_chat_id, state):\n connection = connection_to_db()\n cursor = connection.cursor()\n\n cursor.execute(\n \"UPDATE user_state SET state = {0} \"\n \"WHERE user_id = {1};\".format(state, mess_chat_id)\n )\n\n connection.commit()", "def put(self, user_id):\n data = request.json\n return update_user(data, user_id)", "def update_user(cls, **kwargs):\n return cls._do_call(\n 'PUT', cls.api_endpoint + 'users', params=kwargs)", "def update_user(self):\n self.client.force_authenticate(user=self.user)\n self.response = self.client.patch(\n reverse(\n 'edit_account',kwargs={ 'pk': self.user.id}),\n self.updated_data, format='json'\n )\n self.user = CustomUser.objects.get(username=self.user.username)", "def _update_user(cursor, user_id, user):\n # Create a tuple with user fields\n user_data = (user[User.PROPERTIES.FOLLOWERS],\n user[User.PROPERTIES.FOLLOWING],\n user[User.PROPERTIES.DESIGNS],\n user[User.PROPERTIES.COLLECTIONS],\n user[User.PROPERTIES.MAKES],\n user[User.PROPERTIES.LIKES],\n user[User.PROPERTIES.SKILL_LEVEL],\n user_id)\n\n cursor.execute(dbq.UPDATE_USER, user_data)\n logger.debug(\"user_id {} updated\".format(user_id))", "def updateUser(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_user(self, user_id, **kwargs):\n user = self.get(user_id, raise_error=True)\n if 'display_name' in kwargs:\n user.display_name = kwargs['display_name']\n if 'email' in kwargs:\n user.email = kwargs['email']\n if 'verified' in kwargs:\n user.verified = kwargs['verified']\n self.session.add(user)", "def set_user(self, user):\r\n self.user = user", "def update_user(user_id, data):\n logging.debug(\"Uptating user: user_id={}\".format(user_id))\n return ask('appusers/{0}'.format(user_id), data, 'put')", "def modify_user(user_data):\r\n raise NotImplementedError()", "def test_040_update_user(self):\n\n testflow.step(\"Updating user %s\", TEST_USER2)\n assert USER_CLI.run(\n 'edit',\n TEST_USER2,\n attribute='firstName=userX2',\n )[0]", "def sipserver_user_update(self, user: str, password: str) -> None:\n self.update_endpoint_in_sipserver(endpoint=user, password=password)", "def update_user(self, u, p):\r\n\t\tlogger.debug(\"Entering\")\r\n\t\tval, msg = self.add_user(u, p)\r\n\t\t\r\n\t\tif val:\r\n\t\t\tmsg = \"%s has been updated.\" % u\r\n\t\t\r\n\t\tlogger.debug(\"Exiting\")\r\n\t\treturn val, msg", "def update_user(self, user, name=None, password=None, host=None):\n return self._user_manager.update(user, name=name, password=password,\n host=host)", "def wmUpdateUser(self):\n user_role = uiCommon.GetSessionUserRole()\n if user_role != \"Administrator\":\n raise Exception(\"Only Administrators can edit user accounts.\")\n\n args = uiCommon.getAjaxArgs()\n\n u = catouser.User()\n u.FromID(args[\"ID\"])\n\n if u.ID:\n # these changes are done BEFORE we manipulate the user properties for update.\n\n new_pw = uiCommon.unpackJSON(args.get(\"Password\"))\n random_pw = args.get(\"NewRandomPassword\")\n\n # if a password was provided, or the random flag was set...exclusively\n if new_pw:\n # if the user requesting the change *IS* the user being changed...\n # set force_change to False\n force = True\n if u.ID == uiCommon.GetSessionUserID():\n force = False\n\n u.ChangePassword(new_password=new_pw, force_change=force)\n uiCommon.WriteObjectChangeLog(catocommon.CatoObjectTypes.User, u.ID, u.FullName, \"Password changed.\")\n elif random_pw:\n u.ChangePassword(generate=random_pw)\n uiCommon.WriteObjectChangeLog(catocommon.CatoObjectTypes.User, u.ID, u.FullName, \"Password reset.\")\n\n # now we can change the properties\n u.LoginID = args.get(\"LoginID\")\n u.FullName = args.get(\"FullName\")\n u.Status = args.get(\"Status\")\n u.AuthenticationType = args.get(\"AuthenticationType\")\n u.ForceChange = args.get(\"ForceChange\")\n u.Email = args.get(\"Email\")\n u.Role = args.get(\"Role\")\n u.FailedLoginAttempts = args.get(\"FailedLoginAttempts\")\n u.Expires = args.get(\"Expires\")\n\n u._Groups = args.get(\"Groups\")\n\n if u.DBUpdate():\n uiCommon.WriteObjectChangeLog(catocommon.CatoObjectTypes.User, u.ID, u.ID, \"User updated.\")\n\n return json.dumps({\"result\": \"success\"})", "def set_user(self, user):\n self._user = user", "def update(self, user):\n\n\t\tif self == user.classroom:\n\t\t\treturn\n\n\t\tself.size += user.classroom.size\n\t\tuser.set_classroom(self)", "def update(self, request, *args, **kwargs):\n return super(UserViewSet, self).update(request, *args, **kwargs)", "def receive_state_of_user(user_id):\n if not UserEventAlterationMachine.state_dict or not UserEventAlterationMachine.state_dict[user_id]:\n UserEventAlterationMachine.state_dict[user_id] = 0\n return UserEventAlterationMachine.state_dict[user_id]", "def user_update(user, action, change, data={}):\n return user, action, change()", "def _save_user(self, user):\n self.firebase.patch(f'/{self.USERS_KEY}', {str(user.id): user.username})", "def update_usermenity(user_id):\n user = storage.get(User, user_id)\n\n if user is None:\n abort(404)\n\n put_data = request.get_json()\n if not put_data:\n abort(400, 'Not a JSON')\n\n for k, v in put_data.items():\n if k not in ['id', 'email', 'created_at', 'updated_at']:\n setattr(user, k, v)\n else:\n continue\n user.save()\n storage.save()\n return make_response(jsonify(user.to_dict()), 200)", "def change_user_status(self, status, client):\n if self.verify_status(status, client):\n client.set_status(status)\n self.send_message('Estado actualizado exitosamente.', client.get_socket())", "def user_update(user_id, user_info):\n user = lookup_user_by_id(user_id)\n for (key, value) in user_info.iteritems():\n if key == \"first_name\" and value is not None:\n user.first_name = value\n elif key == \"last_name\" and value is not None:\n user.last_name = value\n elif key == \"email\" and value is not None:\n try:\n lookup_user_by_email(value)\n except:\n user.email = value\n elif key == \"password\" and value is not None:\n user.set_password(value)\n elif key == \"active\" and value is not None:\n if value:\n user.activate()\n else:\n user.deactivate()\n send_activation_mail.delay(user_id)\n elif key == \"social\" and value is not None:\n user.meta['social'] = value\n elif key == \"address\" and value is not None:\n user.meta['address'] = value\n elif key == \"crm\" and value is not None:\n user.meta['crm'] = value\n elif key == \"local\" and value is not None:\n user.meta['local'] = value\n return user_to_dict(user)", "def update_user(self, user_id, name, passwd):\n\n # find the user\n um = User(self.settings)\n user = um.find_user(user_id)\n\n # update it\n status, user = user.update(name, passwd)\n\n # json representation\n json_user = user.to_json()\n \n # return\n return status, json_user", "def testUpdateUser(self):\n UserAPI().create([(u'test', u'secret', u'name', u'name@example.com')])\n user = getUser(u'test')\n passwordHash = user.passwordHash\n self.store.commit()\n info = TUserUpdate(u'test', u'password', u'new-name',\n u'new-name@example.com')\n with login(u'fluiddb', self.admin.objectID, self.transact) as session:\n yield self.facade.updateUser(session, info)\n\n self.store.rollback()\n self.assertEqual(u'test', user.username)\n self.assertNotEqual(passwordHash, user.passwordHash)\n self.assertEqual(u'new-name', user.fullname)\n self.assertEqual(u'new-name@example.com', user.email)", "async def update(self):\n self.data = await self.api.user.get()", "def update_user(user_id):\n update_usr = request.get_json()\n if not update_usr:\n abort(400, {'Not a JSON'})\n usr = storage.get(User, user_id)\n if not usr:\n abort(404)\n else:\n for key, value in update_usr.items():\n setattr(usr, key, value)\n storage.save()\n return jsonify(usr.to_dict())", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user", "def user(self, user):\n\n self._user = user", "def update_user_async(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}\".format(self.API_URL, self.USER_ENDPOINT)\n return self.__create_request(payload=user, request_type=self.REQUEST_PUT, version=\"v2\")", "def user(self, user):\n self.user_id = user.get_id()", "def update(self, request, *args, **kwargs):\n username = kwargs.get(\"user\")\n response = super().update(request, *args, **kwargs)\n cache.set(f\"{USER_PROFILE_PREFIX}{username}\", response.data)\n return response", "async def set_mod(request: Request, user: User) -> Message:\n user_id = user.user_id\n conn: Connection = request.state.db_conn\n async with conn.transaction():\n user_state = await conn.fetchrow(\"SELECT is_mod FROM users WHERE user_id = $1\", user_id)\n if user_state is None:\n return Message(message=f\"User with user_id {user_id} does not exist.\")\n elif user_state['is_mod']:\n return Message(message=f\"User with user_id {user_id} is already a mod.\")\n\n await conn.execute(\"UPDATE users SET is_mod = true WHERE user_id = $1\", user_id)\n return Message(message=f\"Successfully set user with user_id {user_id} to mod.\")", "def patch(self, user_id):\n\n data = request.get_json()\n\n res = self._user.update_user(user_id, data)\n\n if res:\n return {\n \"status\": 200,\n \"data\": [{\n \"id\": res[\"id\"],\n \"message\": \"user record has been updated\"\n }]\n }, 200\n else:\n return {\n \"status\": 404,\n \"error\": \"Not found for id {}\".format(user_id)\n }, 404", "def view_update_user(self, user, username, password):\r\n user.realm._checker.passwd(username, password, True)", "def update_user(context, params):\n\n user = User.objects.filter(id=params.get('id')).first()\n if not user:\n raise ValueError(\"user not found\")\n user.language = Language.objects.filter(id=params.get('language_id', None)).first()\n user.deputy = User.objects.filter(id=params.get('deputy_id', None)).first()\n # user.edited_by = context.user\n\n user.save()\n\n update_person(context, user, params)\n\n user.save()\n return user", "def update_user(self, queue: SubnetQueue, *args):", "def update_user(request_form, user_id, password_hash=None):\n values = {'login': request_form.get('login').strip(),\n 'password': password_hash if password_hash else request_form.get('password').strip()}\n db_session.query(Users).filter_by(id=user_id).update(values)\n db_session.commit()\n return 'Updated user #%s: %s.' % (user_id, values['login']), 'success'", "def user_update(sender, instance, created, **kwargs):\n payload = DiscordUserSerializer(instance).data\n ws.send_notification(ws.types.USER_UPDATE, payload)", "def set_balance(self, user, to):\n to_exec = \"UPDATE users SET balance = %s WHERE snowflake_pk = %s\"\n self.__cursor.execute(to_exec, (to, user.id,))\n self.__connection.commit()", "def user_changes(self, user, what=None):\n pass", "def update_user_login(sender, user, **kwargs):\n user.userlogin_set.create(timestamp=timezone.now())\n user.save()\n\n bonus_wallet = BonusWallet.objects.filter(user=user)\n if not bonus_wallet.exists():\n bonus_wallet = BonusWallet.objects.create(user=user)\n bonus_wallet.save()\n else:\n bonus_wallet = bonus_wallet[0]\n\n login_bonus = LoginBonus.objects.create(wallet=bonus_wallet)\n bonus_wallet.value += Decimal(login_bonus.value)\n bonus_wallet.save()", "def set_user(self, user: User):\n self.__user = user", "def update_user(user_id):\n netAdminToolDB = app.config['DATABASE']\n user = netAdminToolDB.get_user(user_id)\n if user == None:\n return jsonify({'error': 'User_id not found'}), 404\n\n input = request.get_json()\n\n if input == None:\n return jsonfiy({'error': 'Invalid PUT request'}), 400\n\n # Send input directly to update_user function, which checks each key\n netAdminToolDB.update_user(user_id, **input)\n user = netAdminToolDB.get_user(user_id)\n userDict = dict(user)\n uri = url_for('get_user', user_id=user.id, _external=True)\n userDict['uri'] = uri\n\n return jsonify({'user': userDict}), 200", "def mod_user(self, username, data):\n headers = {\"user-agent\": self.u_agent}\n req_url = self.normalize_admin_url(u\"users/{}\".format(username))\n res = requests.put(\n req_url,\n headers=headers,\n auth=self.auth,\n data=json.dumps(data),\n verify=False,\n )\n if res.status_code == 200:\n return Response(0, u\"User {} has been modified\".format(username))\n else:\n return Response(res.status_code, res)", "def request_user_update():\n target_user = User.query.filter_by(id=request.args['id']).first()\n if target_user is None:\n return user_list(\"Unknown user.\")\n\n return Response(render_template('admin/user/create-update.html',\n csrf_token=(\n get_raw_jwt() or {}).get(\"csrf\"),\n target=\"/admin/user/update\",\n genders=list(GenderType),\n states=list(StateType),\n groups=Group.query.all(),\n roles=list(RoleType),\n id=target_user.id,\n gender=target_user.gender,\n first_name=target_user.first_name,\n name=target_user.name,\n nick=target_user.nick,\n mail=target_user.mail,\n meter_id=target_user.meter_id,\n group_id=target_user.group_id,\n role=target_user.role,\n state=target_user.state),\n mimetype='text/html')", "def view_update_user(self, user, new_pw, old_pw):\r\n user.realm._checker.passwd(user.userID, new_pw, old_pw)", "async def update(self, ctx):\n if is_support_guild(ctx.guild.id):\n await ctx.send('Sorry, this discord does not allow update, saveid, '\n 'leaderboard, and series commands so as not to overload me. '\n 'Try `!careerstats` or `!yearlystats` with your customer ID to test '\n 'or go to #invite-link to bring the bot to your discord for all functionality')\n return\n await ctx.send(f'Updating user: {ctx.author.name}, this may take a minute')\n log.info(f'Updating user: {ctx.author.name}')\n await self.updater.update_member(ctx)", "def UpdateFromServer(self):\n self.status = GetUserStatus(self.accesskey)", "def fusion_api_edit_user(self, body, uri, api=None, headers=None):\n return self.user.update(body, uri, api, headers)", "async def update_internal_user(self, internal_user: InternalUser) -> InternalUser:\n\t\t...", "def update_user(user_id):\n\n user = User.query.get_or_404(user_id)\n user.first_name = request.form[\"edit_first_name\"]\n user.last_name = request.form[\"edit_last_name\"]\n user.image_url = request.form[\"edit_image_url\"]\n\n db.session.add(user)\n db.session.commit()\n return redirect(\"/users\")", "def update_login_data(self, user: IUser):\n request = self.request\n if not user.last_login_at:\n e = events.FirstLogin(request, user)\n request.registry.notify(e)\n\n # Update user security details\n user.last_login_at = now()\n user.last_login_ip = request.client_addr", "def toggle_active(self, user):\n user.active = not user.active\n # noinspection PyUnresolvedReferences\n self.save(user)\n return True", "def update_user(self, user_id, new_user_info):\n for user in self.get_all_dbusers():\n if user.get(\"user_id\") == user_id:\n self.ireporter_db.update_data_user_role(user_id, new_user_info.get(\"is_admin\"))\n return user\n return None", "def SetUserInformation(self, user_info):\n self._session[_USER_INFO_KEY] = user_info", "def update( self, trans, id, payload, **kwd ):\n current_user = trans.user\n user_to_update = self.user_manager.by_id( self.decode_id( id ) )\n\n # only allow updating other users if they're admin\n editing_someone_else = current_user != user_to_update\n is_admin = trans.api_inherit_admin or self.user_manager.is_admin( current_user )\n if editing_someone_else and not is_admin:\n raise exceptions.InsufficientPermissionsException( 'you are not allowed to update that user', id=id )\n\n self.user_deserializer.deserialize( user_to_update, payload, user=current_user, trans=trans )\n return self.user_serializer.serialize_to_view( user_to_update, view='detailed' )", "def update_user(user_id):\n user = User.query.get_or_404(user_id)\n user.first_name = request.form['first_name']\n user.last_name = request.form['last_name']\n user.image_url = request.form['image_url']\n\n\n db.session.add(user)\n db.session.commit()\n flash(f\"{user.full_name} user has been edited.\")\n\n return redirect(\"/users\")", "def update_user(self) -> db.User:\n log.debug(\"Fetching updated user data from the database\")\n self.user = self.session.query(db.User).filter(db.User.user_id == self.chat.id).one_or_none()\n return self.user", "def update_item(self, id: str, user: User, **kwargs) -> None:", "def update_users(self):\n conn = sqlite3.connect(self.__DB)\n cursor = conn.cursor()\n\n users_data = []\n unsaved_histories_data = []\n for key, user in self.__users.items(): # here, key it's actually users id\n users_data.append((user.get_balance(), key))\n for register in user.get_history():\n register_str, is_saved = register\n if not is_saved:\n unsaved_histories_data.append((register_str, key))\n\n cursor.executemany('''\n UPDATE users\n SET balance=?\n WHERE id=?;\n ''', users_data)\n\n cursor.executemany('''\n INSERT INTO history (register, owner)\n VALUES (?, ?);\n ''', unsaved_histories_data)\n\n conn.commit()\n conn.close()\n\n self.load_users() # RELOADING!!! Pew, pew, pew, pew, pew...", "def set_state(self, service, key, value, context=None):\n return self._client.call_method(\n 'UserAndJobState.set_state',\n [service, key, value], self._service_ver, context)", "def update_db_with_user_edits(user_id):\n user = User.query.get_or_404(user_id)\n user.first_name = request.form['first_name']\n user.last_name = request.form['last_name']\n user.img_url = request.form['img_url']\n\n db.session.add(user)\n db.session.commit()\n\n return redirect('/users')", "def test_set_user_status(self):\n pass", "def updateBuddy(self,username,online,evilness,signontime,idletime,userclass,away):\n print \"status changed for\",username", "def update(self, user_id, first_name=None, last_name=None, email=None, title=None,\n dept=None, notes=None, admin_role=None, app_role=None, email_notification=None):\n\n url = \"{0}/users/{1}\".format(self.base_url, user_id)\n url = self._add_token_to_url(url)\n payload = self.get(user_id)\n\n # get rid of fields that aren't required for PUT\n pop_fields = ['complete_json',\n 'entity_type',\n 'id',\n 'image',\n 'is_deleted',\n 'tags',\n 'username']\n for field in pop_fields:\n payload.pop(field)\n\n # replace fields with updated ones from kwargs\n if first_name:\n payload[\"first_name\"] = first_name\n if last_name:\n payload[\"last_name\"] = last_name\n if email:\n payload[\"email\"] = email\n if title:\n payload[\"title\"] = title\n if dept:\n payload[\"dept\"] = dept\n if notes:\n payload[\"notes\"] = notes\n if app_role:\n payload[\"user_type\"] = app_role\n if email_notification is not None:\n payload[\"subscribed_to_emails\"] = email_notification\n\n # Logic for setting admin status is slightly more complicated:\n if admin_role is None:\n pass\n elif admin_role == \"app_admin\":\n payload[\"admin\"] = True\n payload[\"roles\"] = \"\"\n elif admin_role == \"data_admin\":\n payload[\"admin\"] = False\n payload[\"roles\"] = \"data_admin\"\n else:\n payload[\"admin\"] = False\n payload[\"roles\"] = \"\"\n\n self.logger.debug(\"Sending the user information {0} to {1}\".format(json.dumps(payload), url))\n self.session.headers.update({\"Content-Type\": \"application/json\"}) # Set special header for this post\n response = self.session.put(url, data=json.dumps(payload), verify=False)\n self.logger.debug(\"Received response code {0} with reason {1}...\".format(response.status_code, response.reason))\n self.session.headers.pop(\"Content-Type\") # Remove header, as it affects other tests\n return response.json()['response']", "def setUser(self, value):\n return self._set(user=value)", "def set_user_params(self, **params):\n self.workflow.user_params.update(params)\n return self", "def fusion_api_switch_active_user(self, user):\n # logger._log_to_console_and_log_file(\"Switched to user: %s\" % user)\n return self.loginsession.switch_active_user(user)", "def save(self, context=None):\n updates = self.obj_get_changes()\n self.dbapi.update_user(context, self.id, updates)\n self.obj_reset_changes()", "def update_user(request):\n post = request.POST.dict()\n user = post.get('user_id')\n if user is None:\n response = {'status':-1, 'status_message':'No user_id specified'}\n return HttpResponse(json.dumps(response))\n try:\n user_obj = User.objects.get(id=user)\n except User.DoesNotExist:\n response = {'status':-1, 'status_message':'Invalid user_id: {}'.format(user)}\n return HttpResponse(json.dumps(response))\n user_obj.first_name = post.get('first_name')\n user_obj.last_name = post.get('last_name')\n password = post.get('password')\n if password and password != \"**********\":\n # update the password\n user_obj.set_password(password)\n if post.get('username'):\n user_obj.username = post['username']\n user_obj.email = post.get('email')\n user_obj.is_superuser = json.loads(post.get('is_admin', 'false'))\n user_obj.is_active = json.loads(post.get('is_enabled', 'false'))\n user_obj.save()\n response = {'status': 1, 'status_message': 'Success'}\n return HttpResponse(json.dumps(response))", "def updateUserElder(self, address, newUser):\n try:\n DatabaseCollections.userCollection.update_one(\n {\n \"address\": address\n },\n {\"$set\": {\n \"newUser\": newUser\n }})\n return True\n except IOError:\n return False", "def save(self, **kwargs):\n payload = self.context['payload']\n user = User.objects.get(username=payload['user'])\n user.is_verified = True\n user.save()", "def update_user(self, user_name, new_user_name=None, new_path=None):\r\n params = {'UserName' : user_name}\r\n if new_user_name:\r\n params['NewUserName'] = new_user_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateUser', params)", "def update_state(self, context):\n pass", "def update_user(user_id):\n user = User.query.get_or_404(user_id)\n user.first_name = request.form['first_name']\n user.last_name = request.form['last_name']\n user.image_url = request.form['image_url']\n\n db.session.add(user)\n db.session.commit()\n\n return redirect(\"/users\")", "def update_user(BrokerId=None, ConsoleAccess=None, Groups=None, Password=None, Username=None):\n pass", "def put(self, user_id):\n\n current_app.logger.info(\"PUT: {}\".format(request.full_path))\n\n args = self.put_parser.parse_args()\n updated_bill = UserBillsModel.change_user_bill(user_id, args['price'])\n\n # may be check on 404 ?\n\n return {'bill': updated_bill}, 200", "def save(self):\n payload = self.context['payload']\n user = User.objects.get(username=payload['user'])\n user.is_verified = True\n user.save()" ]
[ "0.76819336", "0.7584826", "0.74265414", "0.72629994", "0.69820327", "0.69404083", "0.68895626", "0.6865466", "0.67882496", "0.672065", "0.6654662", "0.66312045", "0.6603028", "0.65605366", "0.65469825", "0.65042037", "0.64609027", "0.6450725", "0.64290047", "0.6422216", "0.6411004", "0.6398283", "0.6384868", "0.63751197", "0.6346387", "0.6333635", "0.6291247", "0.6280495", "0.62479246", "0.6214911", "0.621087", "0.62077034", "0.61899793", "0.61851776", "0.6166341", "0.6158883", "0.6151279", "0.6145799", "0.6143463", "0.61357903", "0.6128575", "0.61183465", "0.61183465", "0.61183465", "0.61183465", "0.61183465", "0.61183465", "0.61183465", "0.61183465", "0.61183465", "0.61147225", "0.6106829", "0.6096958", "0.6092119", "0.6082352", "0.6073132", "0.6069147", "0.6067234", "0.60520214", "0.6035371", "0.6021454", "0.6008905", "0.60075814", "0.60033387", "0.60026306", "0.5976611", "0.5973883", "0.59652674", "0.59625304", "0.59598756", "0.5954703", "0.5946993", "0.5933559", "0.5921385", "0.5920945", "0.5914612", "0.58819103", "0.5874926", "0.587308", "0.58637005", "0.5863417", "0.5861009", "0.58601695", "0.585853", "0.58505946", "0.58449346", "0.58421445", "0.5834157", "0.5828763", "0.5828733", "0.5827637", "0.5826252", "0.58257926", "0.5825279", "0.5824639", "0.5824143", "0.5823614", "0.582304", "0.5821648", "0.58211946" ]
0.64827514
16
Uploads an API signing key for the specified user. Every user has permission to use this operation to upload a key for their own user ID. An administrator in your organization does not need to write a policy to give users this ability. To compare, administrators who have permission to the tenancy can use this operation to upload a key for any user, including themselves.
def upload_api_key(self, user_id, create_api_key_details, **kwargs): resource_path = "/users/{userId}/apiKeys" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "upload_api_key got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_api_key_details, response_type="ApiKey") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_api_key_details, response_type="ApiKey")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def api_key( self, trans, user_id, **kwd ):\n user = self.get_user( trans, user_id )\n key = self.create_api_key( trans, user )\n return key", "def upload_key():\n data = check_args(('cloudProvider', 'key'))\n provider = jobs.init_provider(data, True)\n key = decrypt_key(data['key'], data['username'])\n provider.save_key(key)\n return make_response()", "def upload_signing_cert(self, cert_body, user_name=None):\r\n params = {'CertificateBody' : cert_body}\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('UploadSigningCertificate', params,\r\n verb='POST')", "def create_access_key(self, user_name=None):\r\n params = {'UserName' : user_name}\r\n return self.get_response('CreateAccessKey', params)", "def get_key(self, user, api_key):\n return True", "def add_user_key(self, obj, validity = 0, max_queries_per_ip_per_hour = 0, max_hits_per_query = 0):\n if obj is dict:\n params = obj\n else:\n params = {\"acl\": obj}\n if validity != 0:\n params[\"validity\"] = validity\n if max_queries_per_ip_per_hour != 0:\n params[\"maxQueriesPerIPPerHour\"] = max_queries_per_ip_per_hour\n if max_hits_per_query != 0:\n params[\"maxHitsPerQuery\"] = max_hits_per_query\n return AlgoliaUtils_request(self.client.headers, self.write_hosts, \"POST\", \"/1/indexes/%s/keys\" % self.url_index_name, self.client.timeout, params)", "def add_user_key(self, obj, validity = 0, max_queries_per_ip_per_hour = 0, max_hits_per_query = 0, indexes = None):\n if obj is dict:\n params = obj\n else:\n params = {\"acl\": obj}\n if validity != 0:\n params[\"validity\"] = validity\n if max_queries_per_ip_per_hour != 0:\n params[\"maxQueriesPerIPPerHour\"] = max_queries_per_ip_per_hour\n if max_hits_per_query != 0:\n params[\"maxHitsPerQuery\"] = max_hits_per_query\n if not indexes is None:\n params['indexes'] = indexes\n return AlgoliaUtils_request(self.headers, self.write_hosts, \"POST\", \"/1/keys\", self.timeout, params)", "def key_upload(self, key=None):\n raise NotImplementedError", "def api_key_set(self, api_key):\n self.request('/v1.1/auth_key', 'POST', body={'auth_key': api_key})", "def generate_user_api_key(user):\n now = datetime.datetime.utcnow()\n payload = {\n 'iss': 'minesweeper-api',\n 'aud': 'client',\n 'iat': now,\n 'nbf': now,\n 'exp': now + _get_api_token_exp_from_config(),\n 'user_id': str(user.id),\n 'is_admin': user.is_admin,\n }\n bytestring = jwt.encode(payload, _get_api_key_from_config())\n token = bytestring.decode('utf-8')\n return token", "def create_api_key(sender, **kwargs):\r\n if kwargs.get('created') is True:\r\n ApiKey.objects.create(user=kwargs.get('instance'))", "def send_api_data(apiuser_with_custom_defaults, simple_sig_auth_credentials):\n content_types = {\n 'put': 'application/json',\n 'patch': 'application/json-patch+json',\n 'post': 'application/json'\n }\n\n def _send_api_data(api_client, url, req_body, method, content_type=None):\n test_cls = apiuser_with_custom_defaults()\n api_user = test_cls.objects.create_user('test', 'sec', password='pw',\n email='test@test.com',\n first_name='F', last_name='L')\n content_type = content_type or content_types[method]\n api_client.credentials(**simple_sig_auth_credentials(api_user,\n req_body))\n do_send = getattr(api_client, method)\n resp = do_send(url, req_body, content_type=content_type)\n api_client.credentials()\n return resp\n return _send_api_data", "def put(self, **kwargs):\n contract = {\n \"pushRegKey\": [\"id\",\"+\"]\n }\n try:\n self.check_params_conform(contract)\n except ValidatorException:\n return\n\n user_id = kwargs[\"id\"]\n user = models.User.get_by_id(user_id)\n if user is None:\n self.abort(422, \"Could not find user\")\n\n user.pushRegKey = self.get_param(\"pushRegKey\")\n user.put()\n\n self.set_default_success_response()\n self.send_response()", "def update_user_key(self, key, obj, validity = 0, max_queries_per_ip_per_hour = 0, max_hits_per_query = 0):\n if obj is dict:\n params = obj\n else:\n params = {\"acl\": obj}\n if validity != 0:\n params[\"validity\"] = validity\n if max_queries_per_ip_per_hour != 0:\n params[\"maxQueriesPerIPPerHour\"] = max_queries_per_ip_per_hour\n if max_hits_per_query != 0:\n params[\"maxHitsPerQuery\"] = max_hits_per_query\n return AlgoliaUtils_request(self.client.headers, self.write_hosts, \"PUT\", \"/1/indexes/%s/keys/%s\" % (self.url_index_name, key), self.client.timeout, params)", "def store_apikey_in_keyring(platform_id='public', # type: str\n base_url=None, # type: str\n keyring_entries_username=KR_DEFAULT_USERNAME, # type: str\n apikey=None, # type: str\n ):\n client = ODSClient(platform_id=platform_id, base_url=base_url, keyring_entries_username=keyring_entries_username)\n client.store_apikey_in_keyring(apikey=apikey)", "def api_key(request):\r\n user_acct = request.user\r\n return _api_response(request, {\r\n 'api_key': user_acct.api_key,\r\n 'username': user_acct.username\r\n })", "def update_user_key(self, key, obj, validity = 0, max_queries_per_ip_per_hour = 0, max_hits_per_query = 0, indexes = None):\n if obj is dict:\n params = obj\n else:\n params = {\"acl\": obj}\n if validity != 0:\n params[\"validity\"] = validity\n if max_queries_per_ip_per_hour != 0:\n params[\"maxQueriesPerIPPerHour\"] = max_queries_per_ip_per_hour\n if max_hits_per_query != 0:\n params[\"maxHitsPerQuery\"] = max_hits_per_query\n if not indexes is None:\n params['indexes'] = indexes\n return AlgoliaUtils_request(self.headers, self.write_hosts, \"PUT\", \"/1/keys/\" + key, self.timeout, params)", "def get_api_key_from_user_id(self, user_id: str) -> str:\n response = self.get(self.url + \"/my-account\", params={\"id\": user_id})\n return self.get_api_key_from_response(response)", "def key_upload(self, key=None):\n\n name = key[\"name\"]\n cloud = self.cloud\n Console.msg(f\"upload the key: {name} -> {cloud}\")\n try:\n r = self.cloudman.create_keypair(name, key['public_key'])\n except: # openstack.exceptions.ConflictException:\n raise ValueError(f\"key already exists: {name}\")\n\n return r", "def create_apikey(self, username, api_key):\r\n return 'ApiKey %s:%s' % (username, api_key)", "def user_put(user_id):\n user = storage.get(\"User\", user_id)\n if user is None:\n abort(404)\n us = request.get_json()\n if us is None:\n abort(400, \"Not a JSON\")\n else:\n for key, value in us.items():\n if key in ['id'] and key in ['created_at']\\\n and key in ['email'] and key in ['updated_at']:\n pass\n else:\n setattr(user, key, value)\n storage.save()\n resp = user.to_dict()\n return jsonify(resp), 200", "def create_key(iam_username):\n\n try:\n response = iam.create_access_key(UserName=iam_username)\n access_key = response[\"AccessKey\"][\"AccessKeyId\"]\n secret_key = response[\"AccessKey\"][\"SecretAccessKey\"]\n json_data = json.dumps({\"AccessKey\": access_key, \"SecretKey\": secret_key})\n secretmanager.put_secret_value(SecretId=iam_username, SecretString=json_data)\n\n \n emailmsg = (\n \"Hello,\\n\\n\"\n \"A new access key has been created for key rotation. \\n\\n\"\n f\"Access Key Id: {access_key}\\n\"\n f\"Secrets Manager Secret Id: {iam_username}\"\n )\n\n emailmsg = (\n f\"{emailmsg}\\n\\n\"\n f\"Please obtain the new access key information from \"\n \"secrets manager using the secret Id provided above in \"\n f\"{AWS_REGION_NAME} and update your application within 14 days \"\n \"to avoid interruption.\\n\"\n )\n\n sns.publish(\n TopicArn=SNS_TOPIC_ARN,\n Message=emailmsg,\n Subject=f\"AWS Access Key Rotation: New key is available for \"\n f\"{iam_username}\",\n )\n print(f\"New access key has been created for {iam_username}\")\n return {\"status\": 200}\n except ClientError as e:\n print(e)\n return {\"status\": 500}", "def post(self):\n user = users.get_current_user()\n if not user or not users.is_current_user_admin():\n self.abort(400)\n key_data = self.request.POST.get('default_public_key').value\n email = self.request.POST.get('email', '')\n key_name = \"default encryption key\"\n key_description = \"This is the default encryption key used when no user encryption key found.\"\n is_default_key = True\n if len(email) > 0:\n is_default_key = False\n user = users.User(email)\n key_name = \"encryption key for %s\" % user.nickname()\n key_description = \"The encryption key used for encrypting data uploaded by %s\" % user.nickname()\n is_success = KeyUtils.save_publickey(key_data, key_name, key_description, is_default_key, user) \n self.response.write({'status' : 'success' if is_success else 'failure'})", "def get_key(self, user, api_key):\r\n from delicious_cake.models import ApiKey\r\n\r\n try:\r\n ApiKey.objects.get(user=user, key=api_key)\r\n except ApiKey.DoesNotExist:\r\n return self._unauthorized()\r\n\r\n return True", "def test_with_other_signer(self):\n user_key = Key()\n user_id = user_key.public_key\n manager_key = Key()\n manager_id = manager_key.public_key\n other_key = Key()\n name = self.test.user.name()\n\n message = protobuf.user_transaction_pb2.CreateUser(\n user_id=user_id, name=name, metadata=None, manager_id=manager_id\n )\n inputs, outputs = self.rbac.user.make_addresses(\n message=message, signer_keypair=other_key\n )\n payload = self.rbac.user.batch.make_payload(\n message=message,\n message_type=self.rbac.user.message_type,\n inputs=inputs,\n outputs=outputs,\n )\n _, status = self.rbac.user.send(signer_keypair=other_key, payload=payload)\n self.assertStatusInvalid(status)", "def put(self, user_id):\n\n user_data, error = user_schema.load(api.payload['data'])\n\n user = User.objects.get_or_404(public_id=user_id)\n user.update(updated_at=datetime.utcnow, **user_data)\n \n return user_schema.dump(user)", "def handle_image(data, user_id):\n\n import boto3\n\n s3_client = boto3.client('s3')\n bucket_name = 'grouplite-bucket'\n key = str(user_id)+'-'+str(datetime.now())\n\n s3_client.put_object(Bucket=bucket_name, Body=data, Key=key)\n return key", "def get_key(self, user):\r\n from delicious_cake.models import ApiKey\r\n\r\n try:\r\n key = ApiKey.objects.get(user=user)\r\n except ApiKey.DoesNotExist:\r\n return False\r\n\r\n return key.key", "def add_user():\n load_jws_from_request(request)\n if not hasattr(request, 'jws_header') or request.jws_header is None:\n return \"Invalid Payload\", 401\n username = request.jws_payload['data'].get('username')\n address = request.jws_header['kid']\n user = SLM_User(username=username)\n ses.add(user)\n try:\n ses.commit()\n except Exception as ie:\n current_app.logger.exception(ie)\n ses.rollback()\n ses.flush()\n return 'username taken', 400\n userkey = UserKey(key=address, keytype='public', user_id=user.id,\n last_nonce=request.jws_payload['iat']*1000)\n ses.add(userkey)\n try:\n ses.commit()\n except Exception as ie:\n current_app.logger.exception(ie)\n ses.rollback()\n ses.flush()\n #ses.delete(user)\n #ses.commit()\n return 'username taken', 400\n jresult = jsonify2(userkey, 'UserKey')\n current_app.logger.info(\"registered user %s with key %s\" % (user.id, userkey.key))\n return current_app.bitjws.create_response(jresult)", "def key_request(self, user):\n\t\tclient_log.debug(f'Запрос публичного ключа для {user}')\n\t\treq = {\n\t\t\tACTION: PUBLIC_KEY_REQUEST,\n\t\t\tTIME: time.time(),\n\t\t\tACCOUNT_NAME: user\n\t\t}\n\t\twith socket_lock:\n\t\t\tsend_message(self.transport, req)\n\t\t\tans = get_message(self.transport)\n\t\tif RESPONSE in ans and ans[RESPONSE] == 511:\n\t\t\treturn ans[DATA]\n\t\telse:\n\t\t\tclient_log.error(f'Не удалось получить ключ собеседника{user}.')", "def save_symmetric_key(self, key, user):\n self.temp_passphrase = key\n self.send_request(user, self.KM_TEMP_KEY_ACK)", "def create_user_key_file(username: str):\n\n user: User = UserModel().get_user(username=username)\n user_key: Key = user.public_key\n\n public_key: bytes = user_key.public_key\n\n if not os.path.exists(\"./ssh_ca\"):\n os.mkdir(\"./ssh_ca\")\n\n with open(f\"./ssh_ca/{username}.pub\") as public_key_file:\n public_key_file.write(public_key.decode())", "def get_activation_key(self, user):\n return signing.dumps(obj=user.get_username(), salt=REGISTRATION_SALT)", "def add_user_grant(self, permission, user_id, recursive=False,\r\n headers=None, display_name=None):\r\n if permission not in S3Permissions:\r\n raise self.connection.provider.storage_permissions_error(\r\n 'Unknown Permission: %s' % permission)\r\n policy = self.get_acl(headers=headers)\r\n policy.acl.add_user_grant(permission, user_id,\r\n display_name=display_name)\r\n self.set_acl(policy, headers=headers)\r\n if recursive:\r\n for key in self:\r\n key.add_user_grant(permission, user_id, headers=headers,\r\n display_name=display_name)", "def generate_api_key(self, **kwargs):\n\n all_params = []\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method generate_api_key\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/apikeys/_generate'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['privileges', 'apikey']\n\n response = self.api_client.call_api(resource_path, 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='ApiKey',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def put(self, user_id):\n self.conn = pecan.request.db_conn\n self.conn.change_billing_owner(request.context,\n project_id=self.project_id,\n user_id=user_id)", "def sign(self, body, external_aad, private_key):", "def upload_file_to_s3(file, user_id, type_data, name):\n b = boto_init_s3(\"yapster\")\n path_bucket = \"\"\n if b:\n if type_data == \"cover\":\n path_bucket = \"yapsterusers/uid/\" + user_id + \"/cover/cover\"\n\n if type_data == \"profile\":\n path_bucket = \"yapsterusers/uid/\" + user_id + \"/profile/profile\"\n\n if type_data == \"yap_audio\":\n path_bucket = \"yapsterusers/uid/\" + user_id + \"/yaps/1/audio/\" + name\n\n if type_data == \"yap_image\":\n path_bucket = \"yapsterusers/uid/\" + user_id + \"/yaps/1/image/\" + name\n\n if type_data == \"original\":\n path_bucket = \"yapsterusers/uid/\" + user_id + \"/yaps/1/audio/original\"\n\n k = b.get_key(path_bucket)\n if not k:\n try:\n k = b.new_key(path_bucket)\n except:\n return \"error occured\"\n k.set_contents_from_file(file)\n\n return path_bucket", "def get_api_key_params(user):\n if user and user.is_authenticated():\n api_key, _ = APIKey.objects.get_or_create(user=user)\n return urlencode({'user': user.pk, 'key': api_key.key})\n return ''", "def add_ssh_key(self, user_id, title, ssh_key):\n _gu = self.get_user(user_id)\n if _gu is None:\n return None\n\n # build URL and make request\n return self._post(\n '/users/{0}/keys'.format(_gu['id']),\n data={'title': title, 'key': ssh_key},\n )", "def add(self, user):\r\n url = '{0}/{1}'.format(self.get_url(), user)\r\n\r\n # include a body, because requests does not send content-length when no\r\n # body is present, and that makes GitHub respond with HTTP 411\r\n return http.Request('PUT', url, '*'), parsers.parse_empty", "def add(ctx: CLIContext, user_id, resource_policy, admin, inactive, rate_limit):\n with Session() as session:\n try:\n data = session.KeyPair.create(\n user_id,\n is_active=not inactive,\n is_admin=admin,\n resource_policy=resource_policy,\n rate_limit=rate_limit)\n except Exception as e:\n ctx.output.print_mutation_error(\n e,\n item_name='keypair',\n action_name='add',\n )\n sys.exit(1)\n if not data['ok']:\n ctx.output.print_mutation_error(\n msg=data['msg'],\n item_name='keypair',\n action_name='add',\n )\n sys.exit(1)\n ctx.output.print_mutation_result(\n data,\n item_name='keypair',\n extra_info={\n 'access_key': data['keypair']['access_key'],\n 'secret_key': data['keypair']['secret_key'],\n },\n )", "def api_key(self, value):\n self.__creds.api_key_v2 = value", "def add_key(mu_key):\n params['key'] = mu_key", "def generate_access_key(self):\n\t\tfrom app import app\n\t\ts = JSONWebSignatureSerializer(app.config['SECRET_KEY'])\n\t\taccess_key = s.dumps({'username': self.username}) \n\t\tself.access_key = access_key", "async def apikey_bing(self, ctx, key):\n settings = loadauth()\n settings['apikey'] = key\n saveauth(settings)\n return await self.bot.say(\"Bing API key saved.\")", "def call_api(self, user, data):\n data = json.dumps(data)\n headers = self.build_jwt_headers(user)\n return self.client.post(self.url, data, content_type='application/json', **headers)", "def encoded_jwt(private_key, user):\n kid = JWT_KEYPAIR_FILES.keys()[0]\n scopes = ['openid']\n return generate_signed_access_token(\n kid, private_key, user, 3600, scopes, forced_exp_time=None)", "def update_keys(user_id):\n\n if not request.json:\n abort(400)\n\n new_pub_keys = request.json[\"public_keys\"]\n\n db_conn = sqlite3.connect(db_path)\n db = db_conn.cursor()\n db_pub_keys = []\n try:\n for row in db.execute(\"SELECT public_key FROM public_keys WHERE username=? AND status=?;\", [user_id, PK_STATUS_OK]):\n db_pub_keys.append(row[0])\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n\n to_add = []\n to_revoke = []\n\n # Put the keys not present in the database in the list of keys to add\n for new_key in new_pub_keys:\n if(new_key not in db_pub_keys):\n to_add.append((user_id, new_key, PK_STATUS_OK))\n # Put the keys not in the new list in the list of keys to revoke\n for db_key in db_pub_keys:\n if(db_key not in new_pub_keys):\n to_revoke.append((PK_STATUS_REVOKED, user_id, db_key))\n\n try:\n db.executemany('INSERT INTO public_keys (username, public_key, status) VALUES (?,?,?);', to_add)\n db.executemany('UPDATE public_keys SET status=? WHERE username=? AND public_key=?;', to_revoke)\n db_conn.commit()\n db_conn.close()\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n\n return jsonify({'status':True})", "def get_activation_key(self, user):\n\t\treturn signing.dumps(\n\t\t\tobj=getattr(user, user.USERNAME_FIELD),\n\t\t\tsalt=REGISTRATION_SALT\n\t\t)", "def put(self, user_id):\n data = request.json\n return update_user(data, user_id)", "def write_key(api_key, output_path, client_module=pyalveo):\n client = client_module.Client(api_key, API_URL, use_cache=False)\n outfile = open(output_path, 'w')\n outfile.write(api_key)\n outfile.close()", "def test_upload(self):\n fake_key_name = 'fake_key_name'\n fake_file_name = 'fake_file_name'\n\n with patch('iceit.backends.Key', spec=True) as mock_key:\n mock_key.return_value = mock_key\n backend = self.test_init_valid()\n\n backend.upload(fake_key_name, fake_file_name)\n mock_key.assert_called_once_with(backend.bucket, fake_key_name)\n self.assertTrue(mock_key.encrypted)\n self.assertTrue(mock_key.set_contents_from_filename.called)\n mock_key.set_acl.assert_called_once_with(\"private\")", "def put_user_id(user_id):\r\n obj = storage.get(User, user_id)\r\n if obj is None:\r\n abort(404)\r\n user = request.get_json()\r\n if user is None:\r\n abort(400, \"Not a JSON\")\r\n for key, value in user.items():\r\n if key not in ['id', 'email', 'created_at', 'updated_at']:\r\n setattr(obj, key, value)\r\n obj.save()\r\n return jsonify(obj.to_dict()), 200", "def _get_api_key(self):\n self.api.apikey = self.api.action.user_show(id=self.username)['apikey']", "def update_public_key(self, uid: str, hex_bytes: str) -> str:\n return self.context.put(\n \"/dsum/public_key\", {\"uid\": uid, \"key\": hex_bytes}, None,\n \"DSum: failed updating the Curve 25519 public key with uid: %s\" % uid)['uid']", "def create_api_keys(self, **kwargs):\n\n all_params = ['api_key']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method create_api_keys\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/apikeys'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'api_key' in params:\n body_params = params['api_key']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['privileges', 'apikey']\n\n response = self.api_client.call_api(resource_path, 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='ApiKeyWithPrivileges',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def put_user(user_id=None):\n result = request.get_json()\n if not result:\n abort(400, {\"Not a JSON\"})\n obj = storage.get('User', user_id)\n if obj is None:\n abort(404)\n invalid_keys = [\"id\", \"email\", \"created_at\", \"updated_at\"]\n for key, value in result.items():\n if key not in invalid_keys:\n setattr(obj, key, value)\n storage.save()\n return jsonify(obj.to_dict()), 200", "def upload_fileobj(self, bucket_name, file_obj, key):\n self._client.upload_fileobj(Fileobj=file_obj, Bucket=bucket_name, Key=key)", "def with_user(data_builder, randstr, as_public):\n api_key = randstr()\n user = data_builder.create_user(api_key=api_key, root=False)\n session = copy.deepcopy(as_public)\n session.headers.update({'Authorization': 'scitran-user ' + api_key})\n return attrdict.AttrDict(user=user, api_key=api_key, session=session)", "def add_user():\n if not request.json:\n abort(400)\n\n db_conn = sqlite3.connect(db_path)\n db = db_conn.cursor()\n\n username = request.json['username']\n public_key = request.json['public_key']\n\n try:\n db.execute(\"INSERT INTO users (username) VALUES (?)\", [username])\n db.execute(\"INSERT INTO public_keys (username, public_key, status) VALUES (?,?,?)\", [username, public_key, PK_STATUS_OK])\n db_conn.commit()\n db_conn.close()\n except sqlite3.IntegrityError:\n db_conn.close()\n abort(400)\n return jsonify({'success':True})", "def create_account(self, user):\n tx = self.iroha.transaction(\n [\n self.iroha.command(\n \"CreateAccount\",\n account_name=user.gov_id,\n domain_id=\"afyamkononi\",\n public_key=user.public_key,\n )\n ]\n )\n IrohaCrypto.sign_transaction(tx, self.creator_account_details.private_key)\n return self.send_transaction_and_return_status(tx)", "def generate_secured_api_key(self, private_api_key, tag_filters, user_token = None):\n if type(tag_filters) is list:\n tag_filters = ','.join(map(lambda t: ''.join(['(', ','.join(t), ')']) if type(t) is list else str(t), tag_filters))\n if type(tag_filters) is dict:\n try:\n iteritems = tag_filters.iteritems(); #Python3.X Fix\n except AttributeError:\n iteritems = tag_filters.items();\n tag_filters = {}\n for k, v in iteritems:\n if isinstance(v, (list, dict, tuple, bool)):\n tag_filters[k] = json.dumps(v)\n else:\n tag_filters[k] = v\n tag_filters = urlencode(tag_filters)\n return hmac.new(str.encode(private_api_key), str.encode(''.join([str(tag_filters), str(user_token or '')])), hashlib.sha256).hexdigest()", "def perform_create(self, serializer):\n km_user = KMUser.objects.get(pk=self.kwargs.get(\"pk\"))\n\n return serializer.save(km_user=km_user)", "def perform_create(self, serializer):\n km_user = KMUser.objects.get(pk=self.kwargs.get(\"pk\"))\n\n return serializer.save(km_user=km_user)", "def perform_create(self, serializer):\n km_user = KMUser.objects.get(pk=self.kwargs.get(\"pk\"))\n\n return serializer.save(km_user=km_user)", "def upload():\n\n # TODO: decorator to check token\n token = request.headers.get(\"Authorization\")\n\n has_text = bool(request.get_json())\n has_file = request.files and request.files[\"file\"]\n if not has_text and not has_file:\n error = \"No text input and no file provided\"\n return jsonify({\"success\": False, \"message\": error})\n\n filename, error = save_text(request)\n if error:\n return jsonify({\"success\": False, \"message\": error})\n\n job_id = schedule(filename, token)\n add_user_job(job_id, token)\n\n return jsonify({\"success\": True, \"data\": {\"jobId\": job_id}})", "def user_key(user_name=DEFAULT_USER_NAME):\n return ndb.Key('User', user_name)", "def create_keys(self):\n crypto_tool = CryptoTools()\n # creating RSA keys for the signer user\n public_key, private_key = crypto_tool.create_key_with_entropy()\n self.priv_key = crypto_tool.get_pem_format(private_key).decode(\"utf-8\")\n self.pub_key = crypto_tool.get_pem_format(public_key).decode(\"utf-8\")", "def addPubKey(User, pubkey):\n with cd('~%s' % (User)):\n sudo('mkdir -p .ssh && chmod 700 .ssh', user=User)\n # add key if it doesn't already exist #\n _hazKey = 'no'\n _hazFile = sudo(\"[ -f .ssh/authorized_keys ] && echo 'yes' || echo 'no'\", user=User)\n if _hazFile == 'yes':\n # authorized_keys exist - check if the key already exists\n _hazKey = sudo(\"grep '%s' .ssh/authorized_keys >/dev/null 2>&1 && echo 'yes'\" % (pubkey), user=User)\n if _hazKey == 'no':\n sudo(\"echo '%s' >> .ssh/authorized_keys\" % (pubkey), user=User)\n else:\n print \"[Info] User '%s' key already exists on host '%s'\" % (User, env.host_string)", "def test_create_api_key(self):\n pass", "def upload_local_public_key(use_poweruser=False,\n local_pub_key_path='~/.ssh/id_rsa.pub'):\n target_user = env.user\n target_home = '.'\n acting_user = env.user\n remote_run = run\n use_sudo = False\n\n if use_poweruser:\n use_sudo = True\n remote_run = sudo\n acting_user = env.poweruser\n # switch to power user to login and create key file\n # (we do not allow unprivileged user login with password)\n with settings(hide('everything'), user=acting_user, warn_only=True):\n target_home = run(\"getent passwd {}\"\n \"|awk -F: '{{print $6}}'\".format(target_user))\n if not exists(target_home):\n print(red(\"User's home directory does not exist\"))\n return\n\n pubkey_path = os.path.expanduser(local_pub_key_path)\n if not os.path.exists(pubkey_path):\n print(red(\"Local public key not found: {}\".format(pubkey_path)))\n return\n\n key = ' '.join(open(pubkey_path).read().strip().split(' ')[:2])\n with settings(user=acting_user), cd(target_home):\n remote_run('mkdir -p .ssh')\n # 'append' with use_sudo duplicates lines within 'cd'.\n # https://github.com/fabric/fabric/issues/703\n # Passing 'shell=True' to append() (which is supported in\n # Fabric 1.6) fixes this issue.\n append('.ssh/authorized_keys', key, partial=True, shell=True,\n use_sudo=use_sudo)\n remote_run('chmod 600 .ssh/authorized_keys')\n remote_run('chmod 700 .ssh')\n remote_run('chown -R {0}:{0} .ssh'.format(target_user))", "def add_user_grant(self, permission, user_id, recursive=False, headers=None):\r\n if permission not in GSPermissions:\r\n raise self.connection.provider.storage_permissions_error(\r\n 'Unknown Permission: %s' % permission)\r\n acl = self.get_acl(headers=headers)\r\n acl.add_user_grant(permission, user_id)\r\n self.set_acl(acl, headers=headers)\r\n if recursive:\r\n for key in self:\r\n key.add_user_grant(permission, user_id, headers=headers)", "def user_id_put(user_id):\n user = storage.get(\"User\", user_id)\n json_string_dict = request.get_json()\n\n if user is None:\n abort(404)\n if json_string_dict is None:\n return make_response(jsonify('Not a JSON'), 400)\n\n for key, value in json_string_dict.items():\n if key not in ['id', 'email', 'created_at', 'updated_at']:\n setattr(user, key, value)\n user.save()\n return jsonify(user.to_dict())", "def create_user(user_id, password_16char, public_key_32char):\n headers = {'Content-type': 'application/json'}\n payload = {'user_id': user_id\n , 'user_password': password_16char\n , 'public_key': public_key_32char}\n response = requests.post(\"http://localhost:5000/user/createUser\", data=json.dumps(payload), headers=headers)\n return response.text", "async def add_key(request: web.Request) -> web.Response:\n if not request.can_read_body:\n return web.json_response({'message': \"Must upload key file\"},\n status=400)\n data = await request.post()\n keyfile = data.get('key')\n if not keyfile:\n return web.json_response(\n {'message': \"No key 'key' in request\"}, status=400)\n\n add_key_result = wifi.add_key(keyfile.filename, keyfile.file.read())\n\n response_body = {\n 'uri': '/wifi/keys/{}'.format(add_key_result.key.directory),\n 'id': add_key_result.key.directory,\n 'name': os.path.basename(add_key_result.key.file)\n }\n if add_key_result.created:\n return web.json_response(response_body, status=201)\n else:\n response_body['message'] = 'Key file already present'\n return web.json_response(response_body, status=200)", "def upload(bucket, key, content, extra_agrs):\n # validate_content(content)\n validate_bucket_name(bucket)\n validate_key_name(key)\n client = get_client()\n if extra_agrs:\n client.put_object(Body=content, Bucket=bucket, Key=key, ContentType=extra_agrs['ContentType'])\n else:\n client.put_object(Body=content, Bucket=bucket, Key=key)", "def create_keypair(self, username):\n msg = \"create_keypair not implemented\"\n raise NotImplementedError(msg)", "def _get_private_key(self, user_obj):\n return user_obj.private_key.encode('utf-8')", "def put(self, id=None):\n key_data = self.request.get('key_data', None)\n if id and key_data:\n id = str(urllib.unquote(id))\n public_key = PublicKey.get_by_id(long(id))\n if public_key:\n public_key.publickey = key_data\n public_key.put()\n self.response.write({'status' : 'success'})\n return\n else:\n self.abort(404)\n self.abort(400)", "def create_access_key(self, user_name=None, delegate_account=None):\n self.log.debug(\"Creating access key for \" + user_name )\n params = {'UserName': user_name}\n if delegate_account:\n params['DelegateAccount'] = delegate_account\n response = self.connection.get_response('CreateAccessKey', params)\n access_tuple = {}\n access_tuple['access_key_id'] = response['create_access_key_response']\\\n ['create_access_key_result']['access_key']['access_key_id']\n access_tuple['secret_access_key'] = response['create_access_key_response']\\\n ['create_access_key_result']['access_key']['secret_access_key']\n return access_tuple", "def set_APIKey(self, value):\n super(UpdateTriggerInputSet, self)._set_input('APIKey', value)", "def upload_file(self, path, log_as_output: bool = True):\n\n if os.path.isfile(path):\n remote_path = os.path.join(self.key, file_utils.get_filename(path, exclude_extension=False))\n res_key = self._env.upload_file(path,\n self._env.DataType.EXPERIMENT,\n file_name=remote_path,\n track_event=False)\n if log_as_output:\n # Track as output\n self.log_output_file(res_key)\n return res_key\n elif os.path.isdir(path):\n remote_path = os.path.join(self.key, file_utils.get_folder_name(path) + \".zip\")\n res_key = self._env.upload_folder(path,\n self._env.DataType.EXPERIMENT,\n file_name=remote_path,\n track_event=False)\n if log_as_output:\n # Track as output\n self.log_output_file(res_key)\n return res_key\n else:\n self.log.warning(\"Provided path is not a file or folder. \" + str(path))\n return None", "def encode_auth_token(self, user_id):\n try:\n payload = {\n 'exp': datetime.datetime.utcnow() + datetime.timedelta(days=1, seconds=0),\n 'iat': datetime.datetime.utcnow(),\n 'sub': user_id\n }\n return jwt.encode(\n payload,\n app.config.get('SECRET_KEY'),\n algorithm='HS256'\n )\n except Exception as e:\n return e", "def update_access_key(self, access_key_id, status, user_name=None):\r\n params = {'AccessKeyId' : access_key_id,\r\n 'Status' : status}\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('UpdateAccessKey', params)", "def delete_api_key(self, user_id, fingerprint, **kwargs):\n resource_path = \"/users/{userId}/apiKeys/{fingerprint}\"\n method = \"DELETE\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"delete_api_key got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"userId\": user_id,\n \"fingerprint\": fingerprint\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params)", "def send_key(key_id):\n\tkey_id = str(key_id)\n\tGPG.send_keys(KEYSERVER, key_id)\n\tif key_id == GPG.search_keys(key_id, KEYSERVER)[0]['keyid']:\n\t\treturn key_id\n\telse:\n\t\terror = 'Error uploading key ', key_id\n\t\treturn error", "def _save_user(self, user):\n self.firebase.patch(f'/{self.USERS_KEY}', {str(user.id): user.username})", "def send_key(key_id):\n key_id = str(key_id)\n GPG.send_keys(KEYSERVER, key_id)\n if key_id == GPG.search_keys(key_id, KEYSERVER)[0]['keyid']:\n return key_id\n else:\n error = 'Error uploading key ', key_id\n return error", "def put(self, user_id):\r\n return update_user(request, user_id)", "def authorize_user(case: APITestCase, user: User):\n\n token = Token.objects.create(user=user)\n case.client.credentials(HTTP_AUTHORIZATION=f'Token {token}')", "def record_ingredient_request_for_user(self, ingredient_doc, user_doc):\n try:\n self.client.connect()\n # get latest user\n latest_user_doc = self.client[self.db_name][user_doc['_id']]\n # see if user has an array of ingredients, if not create it\n if 'ingredients' not in latest_user_doc.keys():\n latest_user_doc['ingredients'] = []\n # find the ingredient that matches the name of the passed in ingredient\n # if it doesn't exist create it\n user_ingredients = list(filter(lambda x: x['name'] == ingredient_doc['name'], latest_user_doc['ingredients']))\n if len(user_ingredients) > 0:\n user_ingredient = user_ingredients[0]\n else:\n user_ingredient = {'name': ingredient_doc['name']}\n latest_user_doc['ingredients'].append(user_ingredient)\n # see if the user_ingredient exists, if not create it\n if 'count' not in user_ingredient.keys():\n user_ingredient['count'] = 0\n # increment the count on the user_ingredient\n user_ingredient['count'] += 1\n # save the user doc\n latest_user_doc.save()\n # add a new doc with the user/ingredient details\n user_ingredient_doc = {\n 'type': 'userIngredientRequest',\n 'user_id': user_doc['_id'],\n 'user_name': user_doc['name'],\n 'ingredient_id': ingredient_doc['_id'],\n 'ingredient_name': ingredient_doc['name'],\n 'date': int(time.time()*1000)\n }\n db = self.client[self.db_name]\n db.create_document(user_ingredient_doc)\n finally:\n self.client.disconnect()", "def set_APIKey(self, value):\n super(RetrieveUserDashboardInputSet, self)._set_input('APIKey', value)", "def add_key(self, device, key):\n if not self.enabled:\n return\n self.keys[device] = key\n fh = open(self.path, \"w\")\n json.dump(self.keys, fh)\n fh.close()\n os.chmod(self.path, 0o600)", "def send_exchange_request(self, user):\n self.current['user'] = user\n self.send_request(user, self.KM_REQUEST_KEY)", "def user_key(user):\n return ndb.Key('User', user.email())", "def set_APIKey(self, value):\n super(ImageInputSet, self)._set_input('APIKey', value)", "def test_get_user_api_keys(self):\n pass", "async def steamkey(self, ctx, key):\n\n set_steam_key(key)\n await self.bot.say(\"The Steam API key has been successfully added! Delete the previous message for your own safety!\")", "def upload_file(self, bucket_name, file_path, key):\n self._client.upload_file(Filename=file_path, Bucket=bucket_name, Key=key)" ]
[ "0.67768776", "0.65675324", "0.609125", "0.595348", "0.5758476", "0.5749899", "0.5690145", "0.55707395", "0.55044675", "0.54585487", "0.54316515", "0.5418301", "0.5411513", "0.54110056", "0.5376245", "0.53506505", "0.53419787", "0.53266", "0.5320789", "0.5302657", "0.52935576", "0.52846843", "0.52385896", "0.5234902", "0.52131903", "0.5200968", "0.51774526", "0.5163708", "0.5158022", "0.51535976", "0.5139492", "0.5133074", "0.513259", "0.513095", "0.5108834", "0.5098053", "0.5080783", "0.5072281", "0.50670826", "0.5061424", "0.5038479", "0.501928", "0.5013588", "0.49973756", "0.49957785", "0.49896064", "0.49885812", "0.49791422", "0.49571648", "0.49499023", "0.49494296", "0.49406543", "0.49232537", "0.49154195", "0.48870277", "0.4884965", "0.4871852", "0.48717543", "0.48661262", "0.48567143", "0.4854648", "0.48451775", "0.48450637", "0.48367006", "0.48367006", "0.48367006", "0.48268133", "0.48215315", "0.48128363", "0.48118326", "0.48068956", "0.479876", "0.47973734", "0.479647", "0.479241", "0.4786341", "0.4783291", "0.47831678", "0.47763708", "0.47678882", "0.47637573", "0.4759652", "0.472505", "0.4724867", "0.4724721", "0.4708907", "0.47083375", "0.4708079", "0.470737", "0.47045273", "0.46933788", "0.46895596", "0.4677052", "0.4674536", "0.46740508", "0.46699435", "0.46687225", "0.46686918", "0.46652365", "0.46520215" ]
0.651776
2
return the classroom that has given classroomId. Otherwise return None
def getClassroomById(classroomId): for classroom in classroomEntities: if classroom["classroomId"] == classroomId: return classroom.copy() return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_room(self, name=None, id=None):\n \n if(name):\n return self.rooms[name] if name in self.rooms else None\n if(id):\n return next((v for (k,v) in self.rooms.items() if v.id == id), None)\n return None", "def getRoomById(self, id):\n for room in self.rooms:\n if room.id == id:\n return room\n\n return None", "def GetRoom(self, id):\n try:\n return self._rooms[id]\n except:\n return None", "def find_general_class(self, class_id):\n for class_ in my_classes:\n if class_.class_id == class_id:\n return class_\n\n return None", "def get_room_by_id(self, id):\n if not isinstance(id, int):\n id = int(id)\n if self.rooms.has_key(id):\n return self.rooms[id]\n raise RuntimeError, \"Room not known\"", "def get_skill_class(cursor, _class):\n cursor.execute('SELECT id FROM classes WHERE temp_id = ?', (_class,))\n data = cursor.fetchone()\n try:\n return data[0]\n except TypeError:\n l.error(\"The Class {} doesn't exists.\".format(_class))", "def get_course(self, id):\n id = str(id)\n for i in range(len(self.courses)):\n if self.courses[i].id == id:\n return self.courses[i]", "def deleteClassroom(classroomId):\n for classroom in classroomEntities:\n if classroom[\"classroomId\"] == classroomId:\n selectedClassroom = classroom\n classroomEntities.remove(selectedClassroom)\n return True\n return False", "def get_room(self, roomName):\n for room in self.rooms:\n if roomName == room.get_name():\n return room", "def get_room(self, room_name):\r\n try:\r\n return self._rooms[room_name]\r\n except KeyError:\r\n return None", "def find_category(category_id: TourneyCategoryID) -> Optional[TourneyCategory]:\n return TourneyCategory.query.get(category_id)", "def find_by_id(self, id_):\n return self.by_id.get(id_)", "def get_for_type(class_, vehicle):\n Category = class_\n found = session.query(Category).filter_by(name=vehicle.get_category_id()).first()\n return found", "def get_cell(self, cell_id: str) -> Optional[Cell]:\n\n for cell in self.cells:\n if cell.id == cell_id:\n return cell\n return None", "def _find_room_helper(room_obj_list, room):\n\tfor r_obj in room_obj_list:\n\t\tif r_obj[0] == room:\n\t\t\treturn r_obj[1]\n\treturn None", "def helpClassroom(classroomId):\n selectedClassroomCopy = getClassroomById(classroomId)\n print(\"Class Id: \" + selectedClassroomCopy[\"classroomId\"])\n print(\"Name: \" + selectedClassroomCopy[\"classroomName\"])\n print(\"Capacity: \" + selectedClassroomCopy[\"capacity\"])\n print(\"Location: \" + selectedClassroomCopy[\"location\"])\n return True", "def by_id(cls, id):\n try:\n return DBSession.query(cls).filter(cls.id == id).one()\n except (NoResultFound, MultipleResultsFound):\n return None", "def get_trainer_by_id(self, id):\n # Validates id\n TrainerManager._int_validator(id)\n # Database Query\n session = self._db_session()\n existing_trainer = session.query(RegularTrainer).filter(\n RegularTrainer.trainer_id == id).first()\n if isinstance(\n existing_trainer,\n AbstractTrainer) and existing_trainer.type == \"Gym Leader\":\n existing_trainer = None\n if existing_trainer is None:\n existing_trainer = session.query(GymLeader).filter(\n GymLeader.trainer_id == id).first()\n session.close()\n\n return existing_trainer", "def _get_traj_by_id(self, itsid):\n for traj in self._trajlist:\n if traj.id == itsid:\n return traj\n return None", "def get_room(room_id):\n try:\n room_id = int(room_id)\n room_entry = read_criteria(Room,{\"id\":room_id},session)\n except ValueError:\n room_entry = None\n # if the provided id doesn't match any room in the db, return -1 to indicate not found\n if room_entry is None:\n room = {\"roomId\":-1}\n status_code = 404\n else:\n status_code = 200\n room = room_json(room_entry, session,app.config[\"OFFLINE_TESTING\"], login_session)\n return generate_response(room,status_code)", "def what_is(self, _id):\n for g in self.groups:\n if _id in self.h_group_ids[g]:\n return g\n return None", "def get_rel_thread(self, org_id, rel_id):\n for thread in self.get_org_question(org_id).iter('Thread'):\n if thread.attrib['THREAD_SEQUENCE'] == org_id + \"_\" + rel_id:\n return thread\n return None", "def get_by_id(cls, id):\n return cls.query().get(id)", "def get_course_by_id(course_id):\n course = Courses.query. \\\n filter_by(id=course_id). \\\n first_or_404()\n\n return course", "def modifyClassroom(classroomId, classroomName, capacity,location):\n for classroom in classroomEntities:\n if classroom[\"classroomId\"] == classroomId:\n selectedClassroom = classroom\n selectedClassroom[\"classroomName\"] = classroomName\n selectedClassroom[\"capacity\"] = capacity\n selectedClassroom[\"location\"] = location\n return True\n return False", "async def get_category(cls, session: AsyncSession, id: int) -> Optional[Category]:\n\n stmt = select(Category).where(Category.id == id)\n result = await session.execute(stmt)\n return result.scalars().first()", "def get_project(self, id):\n for project in self.projects:\n if project.id == int(id):\n ret_val = project\n break\n else:\n ret_val = None\n\n return ret_val", "def get_course_by_id(course_key, depth=0):\r\n course = modulestore().get_course(course_key, depth=depth)\r\n if course:\r\n return course\r\n else:\r\n raise Http404(\"Course not found.\")", "def get_room_name(dungeon, room):\n for n in dungeon:\n if dungeon[n] == room:\n return n\n return None", "def get_by_id(cls, item_id):\n return db_session.query(cls).filter(cls.id == item_id).first()", "def get_node_by_id(self, id):\r\n for n in self.nodes:\r\n if n.id==id:\r\n return n\r\n return None", "def get_entrance_junction(self, id):\n return self.sections[id][0]", "def get_room(self, name):\n for i in self.rooms:\n if self.rooms[i].name == name:\n return self.rooms[i]\n raise RuntimeError, \"Room '%s' not known\" % name", "def station_for_id(station_id: int) -> Optional[Dict]:\n for s in STATIONS:\n if s[\"id\"] == station_id:\n return s\n return None", "def get(self, cls, idvalue):\n result = self.imap.get(cls, idvalue)\n if result is None:\n result = self.find(cls, dict(_id=idvalue)).first()\n return result", "def get_element_from_id(self, identifier):\n classification, org, rel, com = classify_id(identifier)\n if classification == id_classification.org:\n return self.get_org_question(org)\n elif classification == id_classification.rel:\n return self.get_rel_question(org, rel)\n elif classification == id_classification.com:\n return self.get_rel_comment(org, rel, com)\n return None", "def _get_section(self, sections, section_id):\n for section in sections:\n\t if section['section_id'] == section_id:\n\t return section", "def _get_section(self, sections, section_id):\n for section in sections:\n\t if section['section_id'] == section_id:\n\t return section", "def by_id(cls, _id):\n return dbsession.query(cls).filter_by(id=_id).first()", "def by_id(cls, _id):\n return dbsession.query(cls).filter_by(id=_id).first()", "def by_id(cls, _id):\n return dbsession.query(cls).filter_by(id=_id).first()", "def getParticipant(self, discordId):\n if discordId in participants:\n return participants[discordId]\n else:\n return None", "def get_cab_route_by_id(self, id):\n cab_route = self.admin_repository.get_cab_route_by_id(id)\n if cab_route:\n print(\"Cab Number : {}\".format(cab_route[1]))\n print(\"Route Id : {}\".format(cab_route[2]))\n print(\"Stop Name : {}\".format(cab_route[3]))\n print(\"Stop stage : {}\".format(cab_route[4]))\n print(\"Timings : {}\".format(cab_route[5]))\n return cab_route\n else:\n print(\"Invalid Input\")\n return False", "def get_course(self):\n bib = self.get_bib()\n obj = race()\n course = find(obj.courses, name=str(bib))\n if course:\n return course\n\n # get course via group\n person = self.get_person()\n if person and isinstance(person, Person):\n if person.group:\n return person.group.course\n\n return None", "def get_thermostat_type(self, home_id: str, room_id: str) -> Optional[str]:\n for module in self.modules.get(home_id, {}).values():\n if module.get(\"room_id\") == room_id:\n return module.get(\"type\")\n\n return None", "def get_user_type_rec(_id, _class):\n try:\n return _class.objects.get(user_id=_id)\n except Exception as e:\n return None", "def find_instance(cls, identifier):\r\n for instance in cls.all:\r\n if instance.identifier == identifier:\r\n return instance\r\n return None", "def find(cls, id=None):\n return cls.query.filter_by(id=id).one_or_none()", "async def get_match_from_id(match_id: int) -> Match or None:\n if match_id is None:\n return None\n\n if match_id in match_library:\n return match_library[match_id]\n\n raw_data = await matchdb.get_raw_match_data(match_id)\n if raw_data is not None:\n return await make_match_from_raw_db_data(raw_data)\n else:\n return None", "def get_physics_object_from_id(self, id):\n for p in self.physics_objects:\n if p.canvas_id == id:\n return p", "def get_thermostat(self, room_id: str) -> Dict:\n for key, value in self.thermostats.items():\n if value[\"id\"] == room_id:\n return self.thermostats[key]\n\n raise InvalidRoom(\"No room with ID %s\" % room_id)", "def get_by_id(cls, id):\n return db.session.query(cls).get(id)", "def get_session(self, id):\n target_session = None\n for s in self.current_sessions:\n if s.game_id == id:\n target_session = s\n return target_session\n # return self.current_sessions[id]", "def get_room_by_name(name: str, context: 'GameContext') -> Room | None:\n for room in context.rooms:\n if room.name == name:\n return room\n\n # return None # default", "def get_by_id(cls, id):\n try:\n return cls.objects.get(id=id)\n except(IntegrityError, OperationalError):\n return None", "def getTeacherById(teacherId):\n for teacher in teacherEntities:\n if teacher[\"teacherId\"] == teacherId:\n return teacher.copy()\n return None", "def get_colony(self, cid):\n found = False\n for colony in self.trees:\n if colony.contains(cid):\n found = True\n break\n if found:\n return colony\n else:\n msg = \"There's no colony corresponding to {}\".format(cid)\n msg += \" in this container {}\".format(self.label)\n raise ParsingContainerError(msg)", "def get_course(self, course_key, depth=None):\r\n assert(isinstance(course_key, CourseKey))\r\n store = self._get_modulestore_for_courseid(course_key)\r\n try:\r\n return store.get_course(course_key, depth=depth)\r\n except ItemNotFoundError:\r\n return None", "def get_room_or_error(room_id, user):\n # Check if the user is logged in\n if not user.is_authenticated:\n raise ClientError(\"USER_HAS_TO_LOGIN\")\n # Find the room they requested (by ID)\n try:\n room = NotifRoom.objects.get(pk=room_id)\n except NotifRoom.DoesNotExist:\n raise ClientError(\"ROOM_INVALID\")\n # Check permissions\n if room.staff_only and not user.is_staff:\n raise ClientError(\"ROOM_ACCESS_DENIED\")\n return room", "def get_route_by_id(self, route_id):\n route = self.admin_repository.get_route_by_id(route_id)\n if route:\n print('''Route Id: {}\\nRoute: {}\\n\n '''.format(route[0], route[1]))\n return route\n else:\n print(\"Invalid Route Id\")\n return False", "def get_org_question(self, org_id):\n for question in self.merged_root.iter('OrgQuestion'):\n if question.attrib['ORGQ_ID'] == org_id:\n return question\n return None", "def lookup_by_class(dict_,class_):\n v = None\n for c in classlist(class_)[::-1]:\n if c in dict_:\n v = dict_[c]\n break\n return v", "def get_object(self, ObjectClass, id):\n try:\n object = ObjectClass.objects.get(id=id)\n except (ObjectClass.DoesNotExist, ObjectClass.MultipleObjectsReturned):\n object = None\n return object", "def _internal_lookup_device_by_keyid(self, keyid) -> Optional[LandscapeDevice]:\n\n self.landscape_lock.acquire()\n try:\n device = None\n if keyid in self._all_devices:\n device = self._all_devices[keyid]\n finally:\n self.landscape_lock.release()\n\n return device", "def find_project_for_story(story_id):\n\n for project in Project.all():\n story = project.load_story(story_id)\n if story is not None:\n return project\n\n #Not found\n print \"No project found for story: #{}\".format(story_id)\n return None", "def get_course(self, course_key, depth=None):\r\n assert(isinstance(course_key, SlashSeparatedCourseKey))\r\n location = course_key.make_usage_key('course', course_key.run)\r\n try:\r\n return self.get_item(location, depth=depth)\r\n except ItemNotFoundError:\r\n return None", "def from_id(self, id_):\n return next((season for season in self._seasons if season.id == id_), None)", "def get_by_id(c_id):\n return cr.get_by_id(c_id)", "def get_param_by_id(self, param_id):\n try:\n return list(filter(lambda param: param.id == param_id, self.params))[0]\n except IndexError:\n return None", "def by_oid(oid, cls=None):\n\n if cls is None:\n cls = base.Point\n\n if cls.oid == oid:\n return cls\n\n for c in cls.__subclasses__():\n cc = by_oid(oid, c)\n if cc is not None:\n return cc\n\n return None", "def get_object(self, pk):\n try:\n # x = category__job_title__program_id\n return JobCatalog.objects.get(Q(id=pk) | Q(uid=pk))\n except JobCatalog.DoesNotExist:\n raise Http404", "def get(cls, id):\n\n return cls.query.get(id)", "def get(cls, id):\n\n return cls.query.get(id)", "def find_project_for_story(story_id):\r\n\r\n for project in Project.all():\r\n story = project.load_story(story_id)\r\n if story is not None:\r\n return project\r\n\r\n #Not found\r\n print \"No project found for story: #{}\".format(story_id)\r\n return None", "def getContactOrRoomOrGroupById(self, id):\n return self.getContactById(id)\\\n or self.getRoomById(id)\\\n or self.getGroupById(id)", "def get_project(db, id):\n \n for element in db:\n if element['project_no'] == id:\n return element\n return None", "def get_place_by_id(id):\n rv = query_db('select * from places where place_id = ?',\n [id])\n return rv[0] if rv else None", "def get_object(self, id_):\n return self._objects.get(id_, None)", "async def matchStrandToClass(pdbid:str, strand_id:str)->Union[str, None]:\n CYPHER=\"\"\"match (r:RibosomeStructure{{_rcsb_id: \"{}\"}})-[]-(rp:RibosomalProtein{{entity_poly_strand_id:\"{}\"}})-[]-(n:NomenclatureClass)\n return n.class_id\"\"\".format(pdbid.upper(), strand_id)\n resp = _neoget(CYPHER)\n if len(resp) > 0:\n return resp[0]\n else:\n return None", "def find_by_id(cls, iid: int):\n return cls.query.filter_by(id=iid).first()", "def container_by_id(self, id):\n if not id:\n return None\n return next((container for container in self.containers(all=True)\n if container['Id'] == id), None)", "def at(cls, _id):\n return cls.where(cls.primarykey == _id)", "def class_id(self):\n return self._class_id", "def get_param_by_id(self, id_):\n try:\n return list(filter(lambda param: param.id == id_, self.params))[0]\n except IndexError:\n return None", "def course_from_id(course_id):\r\n return modulestore().get_course(course_id)", "def get_course(self):\n db = Course._file.read_db()\n courses = db[\"courses\"]\n for crs in courses:\n if crs[\"course_name\"] == self._course_name:\n return Course(**crs)\n break", "def findLocationById(cls, id):\r\n return cls.query.filter_by(id=id).first()", "def getAny(self, classname):\n data = silent_request(conn, 'GET', '/%s/' % classname)\n if data: return data[0]['id']\n else: return None", "def find(id: Union[int, str]) -> 'Wall':\n pass", "def find_category(category_id: TicketCategoryID) -> Optional[TicketCategory]:\n category = DbCategory.query.get(category_id)\n\n if category is None:\n return None\n\n return _db_entity_to_category(category)", "def get_question(self, id):\n\t\tif id < len(self.questions) and id >= 0:\n\t\t\treturn self.questions[id]\n\t\telse:\n\t\t\treturn None", "async def fetch(cls, id: Union[str, int]) -> Optional[\"Role\"]:\n query = \"\"\"SELECT * FROM roles WHERE id = $1;\"\"\"\n role = await cls.pool.fetchrow(query, int(id))\n\n if role is not None:\n role = cls(**role)\n\n return role", "def get_classifier(self, classifier_id):\n return self.session.query(self.Classifier).get(classifier_id)", "def get_person(self, id):\n if self.people is None:\n self.people = self.get_people()\n\n for person in self.people:\n if person['person']['id'] == id:\n return person['person']\n\n return None", "def get_or_none(classmodel, **kwargs):\n\ttry:\n\t\treturn classmodel.objects.get(**kwargs)\n\texcept classmodel.DoesNotExist:\n\t\treturn None", "def get_by_id(cls, id):\n e = api.get([key.Key(cls.__name__, id)])\n if e:\n return cls.from_entity(e[0])\n raise ObjectDoesNotExist", "def convthread(self, convthread_id):\n\n df = self.dfs[\"convthreads\"]\n tag_records = df[df.id == convthread_id]\n if 1 == len(tag_records): \n return tag_records.values[0]\n elif 1 < len(tag_records): \n raise Exception(\"More than one record exist by convthread_id\")\n else :\n import warnings\n warnings.warn(\"No record matched with convthread_id\", Warning)\n return None", "def get_skill_from_id(skill_id):\n return Skill.query.filter_by(id=skill_id).first()", "def getCamp(self, id):\n return self.__camps[id];", "def find_child(self, data):\n for c in self.traverse():\n if c == self: continue\n if c.data == data: return c\n return None" ]
[ "0.62929714", "0.62483174", "0.61793196", "0.6084332", "0.6035611", "0.5742967", "0.569907", "0.5663777", "0.56626433", "0.5645693", "0.5644132", "0.5593693", "0.55252326", "0.54916966", "0.54792434", "0.5459054", "0.54231", "0.5395465", "0.5352263", "0.5347712", "0.5322388", "0.530201", "0.5295201", "0.52760977", "0.5248419", "0.524741", "0.5246495", "0.5236366", "0.5229425", "0.5194463", "0.5186899", "0.51754206", "0.5173287", "0.5143409", "0.5134897", "0.51304895", "0.5121366", "0.5121366", "0.5120183", "0.5120183", "0.5120183", "0.51125836", "0.51107675", "0.5109214", "0.51031744", "0.5083707", "0.50722444", "0.5071624", "0.5056647", "0.5053996", "0.5051392", "0.50486326", "0.5047235", "0.5034744", "0.50199616", "0.5015312", "0.50057167", "0.49975964", "0.4985305", "0.49696082", "0.49682173", "0.4965422", "0.49423954", "0.4940544", "0.49329287", "0.49298817", "0.49240202", "0.49229035", "0.49224216", "0.49084386", "0.4907865", "0.4898519", "0.4898519", "0.48973244", "0.4897153", "0.48969358", "0.4894232", "0.48887542", "0.48876593", "0.4887182", "0.4877162", "0.48740277", "0.4872237", "0.48645827", "0.48644102", "0.4858271", "0.48539013", "0.48517454", "0.48513478", "0.48377094", "0.4835663", "0.48347464", "0.48312318", "0.48285803", "0.48272407", "0.48206556", "0.48120937", "0.4808427", "0.4804494", "0.47983766" ]
0.83576775
0
store the classroom inside the classroom data list. return True if operation is successful
def addClassroom(classroomName, capacity,location): for classroom in classroomEntities: if classroom["classroomName"] == classroomName: print("Two classrooms can not have same name") return False if classroomEntities==[]: lastSavedIdNumber = "0" else: lastSavedId=classroomEntities[-1]["classroomId"] #update classroomId as first element in classroomEntities list lastSavedIdNumber=lastSavedId[2:] numberOfDigitsInID = 3 if lastSavedIdNumber == "9" * len(lastSavedIdNumber): numberOfDigitsInID = len(lastSavedIdNumber) + 1 classroomId="CR"+str(int(lastSavedIdNumber)+1).rjust(numberOfDigitsInID,"0") # add the new Classroom newClassroom = {} newClassroom["classroomId"] = classroomId newClassroom["classroomName"] = classroomName newClassroom["capacity"] = capacity newClassroom["location"] = location classroomEntities.append(newClassroom) print(f"Class Room is added into the system, Class Room id is {classroomId}.") return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modifyClassroom(classroomId, classroomName, capacity,location):\n for classroom in classroomEntities:\n if classroom[\"classroomId\"] == classroomId:\n selectedClassroom = classroom\n selectedClassroom[\"classroomName\"] = classroomName\n selectedClassroom[\"capacity\"] = capacity\n selectedClassroom[\"location\"] = location\n return True\n return False", "def save(self, force_insert=False, force_update=False, using=None,\n\t\t\t update_fields=None):\n\t\tif (self.capacity - self.occupied_sits) < 0:\n\t\t\traise ValueError(\"all sits in this classroom are occupied try other classes\")\n\t\telse:\n\t\t\tsuper(ClassRoom, self).save()", "def saveClassroomData():\n with open(\"ClassRoomData.txt\",\"wb\") as classroomData:\n pickle.dump(classroomEntities,classroomData)", "def store_data(self, data):\n self.data.append(data)", "def class_to_db(self):", "def save_data(self):\n db.session.add(self)\n db.session.commit( )", "def store_all_to_database(self, session):\n\n description = 'Established in 1974, JSM is a family-owned provider of quality apartments. We offer a variety of units from studios to five bedrooms with every location benefitting from our award winning amenities, responsive 24 hour maintenance, and friendly property management staff. JSM Development began in Champaign, IL, and manages roughly 1,500 apartments and 450,000 sq/ft of commercial space. JSM has been a major contributor to the development of Campustown in Champaign and the East Campus area in Urbana at the University of Illinois. These popular locations are now home to major national retailers such as Urban Outfitters, Chipotle, Panera, Cold Stone Creamery, and Noodles & Co.'\n\n # Insert a JSM company instance into the database\n current_company = Company(\n name='JSM',\n baseurl='https://apartments.jsmliving.com/',\n description = description\n )\n session.add(current_company)\n\n # Iterate over the apartments, storing each in the database\n for apartment in self.apartment_data:\n logging.info(\"Inserting %s to database\", apartment['name'])\n new_apartment = Apartment(\n company=current_company,\n url=apartment['url'],\n name=apartment['name'],\n bedrooms=apartment['bedrooms'],\n bathrooms=apartment['bathrooms'],\n price=apartment['price'],\n leasing_period=apartment['leasing_period'],\n description=apartment['description'],\n address=apartment['address'],\n lat=apartment['lat'],\n lng=apartment['lng']\n )\n session.add(new_apartment)\n\n # Insert images for the given apartment\n for index, image_url in enumerate(apartment['image_urls']):\n new_image = Image(\n url=image_url,\n apartment_id=new_apartment.id,\n type=0,\n image_index=index\n )\n session.add(new_image)\n\n # Connect images to apartment\n new_apartment.images.append(new_image)\n\n # Insert floorplan image, if it exists\n if apartment['floorplan_url'] != 0:\n new_floorplan_image = Image(\n url=apartment['floorplan_url'],\n apartment_id=new_apartment.id,\n type=1,\n image_index=len(apartment['image_urls'])\n )\n session.add(new_floorplan_image)\n\n # Connect images to apartment\n new_apartment.images.append(new_floorplan_image)\n\n # Insert amenities for the given apartment\n for amenity in apartment['amenities']:\n new_amenity = Amenity(\n apartment_id=new_apartment.id,\n amenity=amenity\n )\n session.add(new_amenity)\n\n # Connect amenity to apartment\n new_apartment.amenities.append(new_amenity)\n\n # Write all queries to the database\n session.commit()", "def deleteClassroom(classroomId):\n for classroom in classroomEntities:\n if classroom[\"classroomId\"] == classroomId:\n selectedClassroom = classroom\n classroomEntities.remove(selectedClassroom)\n return True\n return False", "def add_room(self, data):\n room_id = data['room_id']\n x, y = literal_eval(data['coordinates'])\n room_data = {'id': data['room_id'],\n 'title': data['title'],\n 'description' : data['description'],\n 'coordinates': literal_eval(data['coordinates']),\n 'elevation': data['elevation'],\n 'terrain': data['terrain'],\n 'exits' : {direction: '?' for direction in data['exits']}\n }\n self.rooms.setdefault(room_id, room_data)", "def store(self):\n\n pass", "def test_if_data_can_be_saved(self):\n object_count = Room.query.count()\n\n room = Room(name='Jinja', room_type='meeting',\n capacity=5,\n location_id=1,\n calendar_id='andela.com_3836323338323230343935@resource.calendar.google.com', # noqa: E501\n image_url=\"https://www.officelovin.com/wp-content/uploads/2016/10/andela-office-main-1.jpg\") # noqa: E501\n room.save()\n\n new_count = Room.query.count()\n\n self.assertNotEquals(object_count, new_count)\n assert object_count < new_count", "def save_room(self, room_name, room_no_of_members, this_room_type):\n cursor = self.cur()\n cursor.execute('INSERT INTO room (name, no_of_members, room_type) VALUES(?, ?, ?)', (room_name, room_no_of_members, this_room_type)\n )", "def save(self, db):\n db.query(\n \"INSERT INTO rooms (name, type) VALUES(:name, :type)\",\n name=self.name, type='L'\n )", "def save(self, case) -> bool:\n if case:\n key = case_key(case)\n case.key = key\n self.cases[key] = case\n the_redis = DARedis()\n the_redis.set_data(self.user_cases_key, self.cases)\n return True", "def save(self, db):\n db.query(\n \"INSERT INTO rooms (name, type) VALUES(:name, :type)\",\n name=self.name, type='O'\n )", "async def save(self) -> None:\n if not hasattr(self, 'errors'):\n raise RuntimeError('you must call is_valid() before save instance')\n if self.errors:\n raise RoomValidationError(self.errors)\n if hasattr(self, '_id'):\n data = self.loads()\n room_id = data.pop('_id')\n await room_collection.replace_one({'_id': room_id}, data)\n else:\n result = await room_collection.insert_one(self.loads())\n self._id = result.inserted_id", "def callback_object(self, data):\n\n try:\n # TODO support multiple of the same object\n # Save an array of object locations\n self.redis.set(self.prefix+\"_\"+data.name, json.dumps([{\n \"name\": data.name,\n \"time\": data.time,\n \"x\": data.x,\n \"y\": data.y,\n \"z\": data.z\n }]))\n except:\n rospy.logerr(\"Cannot insert row\")", "def save(self, data):\n data['id'] = self.id\n\n self.db.append(data)", "def test_PUT_room(self):\n\t\t# 1)\n\t\tself.POST_room()\n\t\t# 2)\n\t\tNEW_ROOM_DATA = {'count': '3', 'name': 'NEW-ROOM-NAME'}\n\t\trv = self.PUT_data('/api/room/' + self.room_id, NEW_ROOM_DATA)\n\t\t# 3)\n\t\tdata = self.GET_data('/api/room/' + self.room_id)\n\t\tself.assertDataMatch(TEST_ROOM_DATA, data, ['type'])\n\t\t# 4)\n\t\tself.assertDataMatch(NEW_ROOM_DATA, data, NEW_ROOM_DATA.keys())\n\t\tself.validate_last_modified(data)", "def put(self,data):\n\n \n try:\n\n db = getDatabase()\n connection = db.connect()\n \n connection.put(self,data)\n except Exception as e:\n raise e\n finally:\n db.dispose()", "def store(self) -> None:\n # Store the centroids\n if self._centroids != {}:\n with open(self._path_model / f\"{self}\", 'w') as file:\n json.dump({k: v.tolist() for k, v in self._centroids.items()}, file, sort_keys=True)\n else:\n print(\"No centroids created yet to store!\")\n \n # Store the (validation) clusters\n with open(self._path_data / f\"{self}-train\", 'w') as file:\n json.dump(self._clusters, file, indent=2, sort_keys=True)\n with open(self._path_data / f\"{self}-val\", 'w') as file:\n json.dump(self._clusters_val, file, indent=2, sort_keys=True)", "def save(self):\n self.lock.acquire()\n try:\n self.xml.set(\"name\",self.name)\n self.xml.set(\"room\",self.room)\n self.xml.set(\"type\",self.type)\n self.xml.find(\"address\").text = \":\".join([str(x) for x in self.address])\n if self.pos is not None:\n self.xml.find(\"pos\").text = \" \".join([str(x) for x in self.pos])\n self.xml.find(\"icon\").text = self.icon\n \n finally:\n self.lock.release()\n \n self.house.save_devices()", "def save_data(self, new):\n db = self.check_db()\n db.append(new)\n\n return db", "def put(data):", "def test_insert(self):\n c = city.City(name=\"Freiburg\")\n p1 = city.Citizen(name=\"Peter\")\n p2 = city.Citizen(name=\"Georg\")\n c.add(p1, p2, rel=city.hasInhabitant)\n\n with DataspaceSession(URI) as session:\n wrapper = city.CityWrapper(session=session)\n wrapper.add(c)\n session.commit()\n\n check_state(self, c, p1, p2, db=DB)", "def booking(self, customer, room):\n self.room[room] = customer\n return True", "def _provision(self, data):\n count = 0\n for (key, value) in {**data}.items():\n if hasattr(self, key):\n count += 1\n setattr(self, key, data.pop(key))\n return count > 0", "def store_if_new(self, act_list):\n self.create_connection()\n c = self.get_db_cursor()\n for act in act_list:\n strava_id = act.get_strava_id()\n ride_data = (strava_id, act.get_athlete(), act.get_name(),\n act.get_gmt_date(), act.get_elapsed_time(), act.get_distance(),\n act.get_elevation(), act.get_ride_type(), act.get_trainer_ride())\n sql = 'INSERT INTO rides VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) '\n sql += ' WHERE NOT EXISTS(SELECT id FROM rides WHERE rides.id = %s' % strava_id\n c.execute(sql, ride_data)\n self.commit_and_close()", "def add(self, workout, database):\n if not database.session:\n logger.error(\"no database session\")\n return False\n\n self.cleanup_sportstype(workout)\n self.associate_sport(database)\n id = database.session.query(SportsType.id).filter(\n SportsType.name == self.name).first()\n if id:\n self.id = id[0]\n return False\n else:\n try:\n database.session.add(self)\n database.session.flush()\n except exc.SQLAlchemyError as e:\n logger.error(\"Database error: {}\".format(e.args))\n return False\n logger.info(\"Adding new sportstype '{}' id {} of sport {}\".format(\n self.name, self.id, self.sport_id))\n return True", "def _update_class(self, course, semester, year):\n\n if cache_result := cache.get(f'no classes {course.id}'):\n print(f'no classes found for course {course.id} at {cache_result}')\n return\n\n # Get response from SIS class resource\n response = sis_class_resource.get(\n semester=semester,\n year=year,\n course_id=course.id,\n abbreviation=course.abbreviation,\n course_number=course.course_number,\n )\n\n if len(response) == 0:\n cache.add(f'no classes {course.id}', datetime.datetime.now(), timeout=7 * 24 * 60 * 60)\n print(f'no classes found for course {course.id}')\n return\n\n updated_section_ids = set()\n primary_sect_id_to_sections = defaultdict(list)\n\n # Map response to Section and Enrollment objects and persist to database\n section_extras = {\n 'course_id': int(course.id),\n 'abbreviation': course.abbreviation,\n 'course_number': course.course_number,\n 'semester': semester,\n 'year': year,\n }\n for sect in response:\n if not sect:\n continue\n section_dict = section_mapper.map(sect, extras=section_extras)\n section, created = self.update_or_create_from_dict(section_dict)\n if not section:\n continue\n\n updated_section_ids.add(section.id)\n\n if section_dict['primary_section']:\n primary_sect_id_to_sections[section_dict['primary_section']].append(section)\n\n # Update enrollment\n if semester != 'summer' and section.is_primary and not section.disabled:\n enrollment_dict = enrollment_mapper.map(sect, extras={'section_id': section.id})\n enrollment_service.update_or_create_from_dict(enrollment_dict)\n\n # Add associations between primary and non-primary sections\n for related_sections in primary_sect_id_to_sections.values():\n primary_section = [s for s in related_sections if s.is_primary][0]\n other_sections = [s for s in related_sections if not s.is_primary]\n primary_section.associated_sections.add(*other_sections)\n for section in related_sections:\n section.save()\n\n if len(updated_section_ids) > 0:\n print({\n 'message': 'Updated sections for course',\n 'course': course,\n 'sections updated': len(updated_section_ids),\n })\n\n # Disable existing section if data not found in response\n sections_to_disable = Section.objects.filter(\n course_id=course.id,\n semester=semester,\n year=year,\n ).exclude(id__in=updated_section_ids)\n for section in sections_to_disable:\n if not section.disabled:\n section.disabled = True\n section.save()\n print({\n 'message': 'Disabling section not in API response.',\n 'section': section,\n })\n\n # Update derived enrollment fields in course object\n course_service._update_derived_enrollment_fields(course)", "def store(self, key, a):\n if key in self.SMGData.keys():\n self.SMGData[key] = a\n else:\n raise Exception('Key does not exist in the data structure')", "def save(self):\n self.logger.debug(\"In save.\")\n\n if not self.is_valid():\n self.logger.error(\"Cannot save, data is invalid\")\n return False\n\n session = iHMPSession.get_session()\n self.logger.info(\"Got iHMP session.\")\n\n success = False\n\n if self.id is None:\n # The document has not yet been saved\n prep_data = self._get_raw_doc()\n self.logger.info(\"Got the raw JSON document.\")\n\n try:\n self.logger.info(\"Attempting to save a new node.\")\n node_id = session.get_osdf().insert_node(prep_data)\n self.logger.info(\"Save for HostSeqPrep %s successful.\", node_id)\n self.logger.info(\"Setting ID for HostSeqPrep %s.\", node_id)\n\n self._set_id(node_id)\n self._version = 1\n success = True\n except Exception as insert_exception:\n self.logger.error(\"An error occurred while inserting \" + \\\n \"%s %s. Reason: %s\", __name__, self._id,\n insert_exception\n )\n else:\n prep_data = self._get_raw_doc()\n\n try:\n self.logger.info(\"Attempting to update %s with ID: %s.\", __name__, self._id)\n session.get_osdf().edit_node(prep_data)\n self.logger.info(\"Update for %s %s successful.\", __name__, self._id)\n success = True\n except Exception as edit_exception:\n self.logger.error(\"An error occurred while updating %s \" + \\\n \" %s. Reason: %s\", __name__, self._id,\n edit_exception\n )\n\n return success", "def sync_venture_role_to_ralph3(data):\n creating = False\n try:\n conf_class = ImportedObjects.get_object_from_old_pk(\n ConfigurationClass, data['id']\n )\n except ImportedObjectDoesNotExist:\n creating = True\n conf_class = ConfigurationClass()\n logger.info(\n 'Configuration class {} ({}) not found - creating new one'.format(\n data['name'], data['id']\n )\n )\n\n try:\n conf_class.module = ImportedObjects.get_object_from_old_pk(\n ConfigurationModule, data['venture']\n )\n except ImportedObjectDoesNotExist:\n logger.error(\n 'Venture with old_pk={} not found for role {}'.format(\n data['venture'], data['id']\n )\n )\n return\n\n conf_class.class_name = data['name']\n conf_class.save()\n if creating:\n ImportedObjects.create(conf_class, data['id'])\n logger.info('Synced configuration class {}'.format(conf_class))", "def add(self, data, check_exists=True): # pragma: no cover\n raise NotImplementedError", "def save(self):\n data = self.serialize()\n\n self.validate(data)\n\n saved_data = DATABASE_CONNECTION.insert(self.__class__.__name__, data)\n\n self.__dict__.update(saved_data)", "def save_class_list():\r\n try:\r\n classStringList.clear() #clear the classString List\r\n for i in range(0,len(classes)):\r\n classStringList.append(classes[i].csvRow()) #enter classes to the classStringList from the classes\r\n f = open(\"mySchedule.csv\", 'w', newline ='')\r\n csv.writer(f).writerow([\"Day\", \"Class\", \"Start Time\", \"End Time\"])\r\n for classCSVString in classStringList:\r\n csv.writer(f).writerow(classCSVString)\r\n f.close()\r\n except Exception as e:\r\n print(\"Exception found:\" + e)", "def store_object(self, _object):\n\n # replace an existing list member, else, append\n\n index = [self.object_store.index(_object_) for _object_ in self.object_store if _object_.LocalID == _object.LocalID]\n\n if index != []:\n\n self.object_store[index[0]] = _object\n\n #if self.settings.LOG_VERBOSE: logger.debug('Updating a stored object: %s in region \\'%s\\'' % (_object.FullID, self.region.SimName))\n\n else:\n\n self.object_store.append(_object)\n\n #if self.settings.LOG_VERBOSE: logger.debug('Stored a new object: %s in region \\'%s\\'' % (_object.LocalID, self.region.SimName))", "def store(self, subj):\n if subj in self.__lst:\n raise ValueError('Disciplina exista deja')\n self.__lst.append(subj)", "def saveDB(self):\n job_obj = JobData.objects.all()\n new_id = len(job_obj)+1\n print new_id\n\n newjob = JobData()\n\n for job in self.job_list:\n newjob.job_id = new_id\n newjob.tool_name = job['jobname']\n newjob.tool_id = job['job_id']\n newjob.save()\n\n return", "def save_data(self):\n data = self.data\n if data is not None:\n data = base64.encodestring(pickle.dumps(data))\n connection = self._open_db()\n cursor = connection.cursor()\n cursor.execute('UPDATE sessions SET data = ? WHERE id = ?;',\n (data, self.sid))\n cursor.close()\n connection.commit()\n connection.close()", "def storeMolecule():\n pass", "def store_data(job_list):\n\n if not job_list:\n raise ValueError('Job list is empty. To proceed, it must contain at least one item.')\n \n if not isfile('/data/visited_jobs.db'):\n print('DB not found')\n ds.create_db()\n\n \n accepted, not_accepted = 0, 0\n\n for job in job_list:\n job.hash = h.get_hash(job.description)\n if h.is_seen(job.hash):\n not_accepted += 1\n continue\n elif f.accepted_title(job.title) and f.accepted_level(job.level) and f.accepted_description(job.description):\n job.accepted = True\n ds.insert_job(job)\n accepted += 1\n else:\n job.accepted = False\n ds.insert_job(job)\n not_accepted += 1\n print(f'Jobs accepted: {accepted}\\nJobs not accepted: {not_accepted}')", "def test_update(self):\n c = city.City(name=\"Paris\")\n p1 = city.Citizen(name=\"Peter\")\n c.add(p1, rel=city.hasInhabitant)\n\n with DataspaceSession(URI) as session:\n wrapper = city.CityWrapper(session=session)\n cw = wrapper.add(c)\n session.commit()\n\n p2 = city.Citizen(name=\"Georg\")\n cw.add(p2, rel=city.hasInhabitant)\n cw.name = \"Freiburg\"\n session.commit()\n\n check_state(self, c, p1, p2, db=DB)", "def insert(self, val):\n if val in self.record:\n return False\n \n self.record[val] = len(self.data)\n self.data.append(val)\n return True", "def testAssignClassifications(self):\n classifications = [c.UID for c in self.directory.getClassifications()]\n self.person.setClassifications(classifications)\n for c in self.person.getClassifications():\n self.failUnless(c.id in ['faculty', 'staff', 'grad-students'])\n self.failUnlessEqual(c.Type(), 'Classification')", "def save(self):\n if not connection.connected:\n raise Exception('Not connected to the database.')\n if not self._retrieved:\n self.insert()\n self._retrieved = True\n else:\n self.update()", "def save_data(self):\n pass", "def write_new_club(name, description, categories):\n clubs = read_json()\n\n if name in [club[\"name\"] for club in clubs]: # if club already exists, update it\n\n for i, club in enumerate(clubs):\n if name == club[\"name\"]:\n updated_club = clubs[i]\n updated_club[\"name\"] = name\n updated_club[\"description\"] = description\n updated_club[\"categories\"] = categories\n del clubs[i]\n clubs.append(updated_club)\n break # stop when correct club is found\n\n write_json(clubs)\n return True\n else: \n club_json = {\"name\": name, \"categories\": categories, \"description\": description,\n \"favourites\": 0}\n clubs.append(club_json) # add new club if it doesn't exist\n write_json(clubs)\n\n existing_comments = get_all_comments()\n existing_comments[name] = [] # add the new club to the comments JSON file.\n\n return False", "def assign_room(self, person, room):\n if self.all_rooms[room]['room'].room_type == \"OfficeSpace\":\n person.set_office(room)\n occupant = person.name + \"\\t\" + person.email\n self.offices[room]['room'].allocate_room_space()\n self.offices[room]['occupants'].append(occupant)\n\n elif self.all_rooms[room]['room'].room_type == \"LivingSpace\":\n if not person.set_livingspace(self.living_spaces[room]['room'].name) == -1:\n occupant = person.name + \"\\t\" + person.email\n self.living_spaces[room]['room'].allocate_room_space()\n self.living_spaces[room]['occupants'].append(occupant)", "def __setitem__(self, (essid, key), results):\n with SessionContext(self.SessionClass) as session:\n q = session.query(ESSID_DBObject)\n essid_obj = q.filter(ESSID_DBObject.essid == essid).one()\n session.add(PYR2_DBObject(essid_obj, key, results))\n try:\n session.commit()\n except sql.exc.IntegrityError:\n # Assume we hit a concurrent insert that causes\n # a constraint-error on (essid-key).\n session.rollback()\n q = session.query(PYR2_DBObject).join(ESSID_DBObject)\n q = q.filter(sql.and_( \\\n ESSID_DBObject.essid == essid_obj.essid, \\\n PYR2_DBObject.key == key))\n result_obj = q.one()\n result_obj.pack(results)\n session.commit()", "def add_record(self, data):\n if self.current_trip is None:\n print \"no trip to add data\"\n return\n self.current_trip.store_data(data)", "def save(self):\n try:\n db.session.add(self)\n db.session.commit()\n return True\n except SQLAlchemyError as e:\n db.session.rollback()\n logger.error(\"database operation error: \", e)\n return False", "def insert_data(self):\n\n pass", "def add_data(self, data):\n for i, row in enumerate(self._grid):\n for j, column in enumerate(row):\n if self._grid[i][j] is None:\n self._grid[i][j] = data\n return True\n return False", "def save_data(self, record):\n self.dbm.addRecord(record)", "def test_insert_data(self):\n self.engine.insert_data(self.correct_camper_data)\n self.assertDictEqual(\n self.ds.store,\n {\n 3: Camper(**{\n \"id\": 3,\n \"latitude\": 38.7436883,\n \"longitude\": -9.1952226,\n \"price_per_day\": 85.5,\n \"weekly_discount\": 0.25\n })\n })", "def add():\n prev_courses = Course._file.read_db()\n course_name = input(\"Please, type course name >\")\n # check course for uniqueness/ instantiating blank class with one attribute\n c = Course(course_name)\n if c.is_course_exists():\n print(\"{} is already exists\".format(course_name))\n return\n\n prev_courses[\"courses\"].append({\n \"course_name\": course_name,\n \"teacher\": input(\"Please, type teacher's email >\"),\n \"total_place\": int(input(\"Please, type total enrolled number >\")),\n \"students\": []\n })\n Course._file.write_db(prev_courses)\n print(\"New course - {} is added\".format(course_name))\n return", "def update(self, user):\n\n\t\tif self == user.classroom:\n\t\t\treturn\n\n\t\tself.size += user.classroom.size\n\t\tuser.set_classroom(self)", "def save(self):\n try:\n db.session.add(self)\n db.session.commit()\n return True\n except SQLAlchemyError as error_message:\n app_logger.error(error_message)\n return False", "def save(cls):\n playerdata = getAttributes(cls)\n Data.object_dump(playerdata, \"savedata.dat\")\n del playerdata", "def saveData(self):\n pass", "def add_vertex(self, room):\r\n if room['room_id'] not in self.rooms:\r\n self.rooms[room['room_id']] = room\r\n # self.rooms[room['room_id']]['exits'] = {\r\n # d: '?' for d in room['exits']}\r", "def insert_chromosome(self, chromosome, index):\n if chromosome is None:\n #No class is schedule in that time slot \n if self.chromo_list[index] is None:\n #Create new list with empty chromosome object\n new_list = [Chromsome()]\n #Assigns new_chromo the empty chromosome object\n new_chromo = new_list[0]\n #Inserts the new list into the master chromosome list\n self.chromo_list.insert(index,new_list)\n \n \n #Class is already scheduled in the time slot\n else:\n #Get the existing list\n exist_list = self.chromo_list[index]\n #Append empty chromosome object to end of list\n exist_list.append(Chromosome())\n #Assigns new_chromo the empty chromosome object\n new_chromo = exist_list[-1]\n #Sets overlap to be true because another class is\n #scheduled at the same time\n new_chromo.overlap = True\n #Reassigns the list in chromo_list\n self.chromo_list[index] = exist_list\n\n #Returns pointer to the inserted chromosome\n return new_chromo\n\n else:\n #No class is schedule in that time slot \n if self.chromo_list[index] is None:\n #Inserts the existing chromosome into an empty list\n new_list = [chromosome]\n #Assigns new_chromo to the inserted object\n new_chromo = new_list[0]\n #Inserts the new list into the master chromosome list\n self.chromo_list[index] = new_list\n\n #Class is already scheduled in the time slot\n else:\n #Gets the existing list\n exist_list = self.chromo_list[index]\n #Adds the existing chromosome to the end of the existing list\n exist_list.append(chromosome)\n #Assigns new_chromo to the newly inserted chromosome\n new_chromo = exist_list[-1]\n #Inserts the existing list back into the master chromosome list\n self.chromo_list[index] = exist_list\n\n #Returns pointer to the chromosome that has been inserted\n return new_chromo", "def insert(self, data):\r\n pass", "def _write(self, data):\n self.db.append(data)\n\n with open(self.DB_FILE, 'w') as outfile:\n json.dump(self.db, outfile)", "def save_to_users(self):\n Data.add_data(self.user_data())", "def insert(self, row):\n if not self.loaded:\n print(\"Database is not loaded\")\n return False\n\n self.rows.append(row)\n return True", "def put(self):\n pass", "def put(self):\n pass", "def upsert_location(self, location):", "def _put(self, key, data):\n path = self._get_key_path(key)\n with open(path, \"wb\") as pickle_file:\n pickle.dump(data, pickle_file)", "def save(self):\n self.remove()\n self._data.append(self)", "def helpClassroom(classroomId):\n selectedClassroomCopy = getClassroomById(classroomId)\n print(\"Class Id: \" + selectedClassroomCopy[\"classroomId\"])\n print(\"Name: \" + selectedClassroomCopy[\"classroomName\"])\n print(\"Capacity: \" + selectedClassroomCopy[\"capacity\"])\n print(\"Location: \" + selectedClassroomCopy[\"location\"])\n return True", "def put(self):\n return", "def service_mange(self, room, service):\n self.room[room] = service\n return True", "def test_user_enrolled_to_classroom(self):\n self.client.force_authenticate(self.global_user_1)\n data = self.client.get(self.api_classroom_detail_url, {'user': 'current'}).data\n\n self.assertTrue(data.get('enrolled'))", "def store(self, idCust, idBook, flag, id):\n allR=self.__loadFromFile()\n\n rt=Rent( idBook,idCust, flag, id)\n if rt in allR:\n raise RepositoryExceptionRent(\"\\n Duplicated id \\n\".upper())\n\n\n allR.append(rt)\n self.__storeToFile(allR)", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def _update(self, course_name: str, newdata: ParseType) -> None:\n\n self.courses[course_name] = newdata", "def save(self):\n self.logger.debug(\"In save.\")\n\n # If node previously saved, use edit_node instead since ID\n # is given (an update in a way)\n # can also use get_node to check if the node already exists\n if not self.is_valid():\n self.logger.error(\"Cannot save, data is invalid.\")\n return False\n\n session = iHMPSession.get_session()\n self.logger.info(\"Got iHMP session.\")\n\n osdf = session.get_osdf()\n\n success = False\n\n if self._id is None:\n self.logger.info(\"About to insert a new %s OSDF node.\", __name__)\n\n # Get the JSON form of the data and load it\n self.logger.debug(\"Converting %s to parsed JSON form.\", __name__)\n data = json.loads(self.to_json())\n\n try:\n node_id = osdf.insert_node(data)\n\n self._set_id(node_id)\n self._version = 1\n success = True\n except Exception as save_exception:\n self.logger.exception(save_exception)\n self.logger.error(\"An error occurred when saving %s.\", self)\n else:\n self.logger.info(\"%s already has an ID, so we \" + \\\n \"do an update (not an insert).\", __name__)\n\n try:\n node_data = self._get_raw_doc()\n self.logger.info(\"%s already has an ID, so we do an \" + \\\n \"update (not an insert).\", __name__)\n node_id = self._id\n self.logger.debug(\"%s OSDF ID to update: %s.\", __name__, node_id)\n osdf.edit_node(node_data)\n\n node_data = osdf.get_node(node_id)\n latest_version = node_data['ver']\n\n self.logger.debug(\"The version of this %s is now: %s\",\n __name__, latest_version\n )\n self._version = latest_version\n success = True\n except Exception as update_exception:\n self.logger.exception(update_exception)\n self.logger.error(\"An error occurred when updating %s.\", self)\n\n return success", "def test_0_put(self):\n self.assertIsNotNone(save_node_info(self.node.name, self.node))", "def put(self, data: Analyzable, data_category: DatasetCategory = None):\n\n if self.finalized:\n raise RuntimeError(\"Cannot add more elements to a finalized database.\")\n\n # If full\n if self.__get_total_size() >= self.max_data:\n return\n\n # Automatically split sets using split ratio\n if data_category is None:\n if self.__get_training_size() == 0:\n self.__add_to_training(data)\n elif self.__get_testing_size() == 0:\n self.__add_to_testing(data)\n else:\n self.__add_to_training(data) \\\n if self.__get_current_split_ratio() <= self.split_ratio \\\n else self.__add_to_testing(data)\n\n # Manually assign data\n elif data_category == DatasetCategory.TRAINING:\n self.__add_to_training(data)\n elif data_category == DatasetCategory.TESTING:\n self.__add_to_testing(data)", "async def put(self, collection, key, data):\n _LOGGER.debug(\"Putting %s to memory\", collection, key)\n if self.databases:\n for database in self.databases:\n await database.put(collection, key, data)", "async def put(self, collection, key, data):\n raise NotImplementedError", "def POST_room(self):\n\t\tif not self.list_id:\n\t\t\tself.POST_list()\n\t\trv = self.POST_data('/api/list/' + self.list_id + '/room', data=TEST_ROOM_DATA)\n\t\tself.assertEqual(rv.status_code, 200)\n\t\tself.room_id = json.loads(rv.data)['_id']", "def test_change_classroom_specific_for_coach_pt1(self):\n self.assertTrue(self.coach2.has_perm('auth.change_classroom', self.classrooms[1]))", "def Persist(self) -> bool:", "def Persist(self) -> bool:", "def _persist(self):\n trunk.set(self.uuid, self.json)", "def update(self):\n self.haveClub = len(self.clubs()) > 0", "def save(self) -> bool:\n title = self.line_edit_title.text().strip()\n if title == \"\":\n QMessageBox.information(self,\n self.tr(\"Information\"),\n self.tr(\"Title field is empty\"))\n return False\n\n lecturer = self.line_edit_lecturer.text().strip()\n if lecturer == \"\":\n QMessageBox.information(self,\n self.tr(\"Information\"),\n self.tr(\"Lecturer field is empty\"))\n return False\n\n classes = self.line_edit_classes.text().strip()\n if classes == \"\":\n QMessageBox.information(self,\n self.tr(\"Information\"),\n self.tr(\"Classes field is empty\"))\n return False\n\n pair_type = self.combo_box_type.currentData(Qt.UserRole)\n subgroup = self.combo_box_subgroup.currentData(Qt.UserRole)\n\n start_time = self.combo_box_start.currentText()\n end_time = self.combo_box_end.currentText()\n\n if self.list_widget_date.count() == 0:\n QMessageBox.information(self,\n self.tr(\"Information\"),\n self.tr(\"No dates\"))\n return False\n\n new_pair = StudentPair()\n new_pair[\"title\"].set_title(title)\n new_pair[\"lecturer\"].set_lecturer(lecturer)\n new_pair[\"type\"].set_type(pair_type)\n new_pair[\"classroom\"].set_classroom(classes)\n new_pair[\"subgroup\"].set_subgroup(subgroup)\n new_pair[\"time\"].set_time(start_time, end_time)\n for date in self._dates:\n new_pair[\"dates\"].add_date(date)\n\n self._edit_pair = new_pair\n\n return True", "def store_data(self, data):\n if not self.light.hasAttr(self.custom_data_storage_attr_name):\n pm.addAttr(\n self.light,\n ln=self.custom_data_storage_attr_name,\n dt='string'\n )\n\n self.light.setAttr(self.custom_data_storage_attr_name, data)", "def test_POST_room(self):\n\t\tself.POST_list()\n\t\trooms_list_before = self.GET_data('/api/list/' + self.list_id)['rooms']\n\n\t\t# after posting list, cleaner's lists should contain id of posted list\n\t\trv = self.app.post('/api/list/' + self.list_id + '/room')\n\t\troom_id = json.loads(rv.data)[\"_id\"]\n\t\trooms_list_after = self.GET_data('/api/list/' + self.list_id)['rooms']\n\t\tself.assertEqual(len(rooms_list_before) + 1, len(rooms_list_after))\n\t\tself.assertTrue(room_id in rooms_list_after)", "def enqueue(self, data):\n # Checking to avoid duplicate entry (not mandatory)\n if data not in self.queue:\n self.queue.insert(0, data)\n return True\n return False", "def do_update(self, args):\n args = args.split()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n if len(args) == 2:\n print(\"** attribute name missing **\")\n return\n if len(args) == 3:\n print(\"** value missing **\")\n return\n if args[0] not in HBNBCommand.valid_classes.keys():\n print(\"** class doesn't exist **\")\n return\n all_objs = storage.all(args[0])\n for k, v in all_objs.items():\n if k == args[1]:\n setattr(v, args[2], args[3])\n storage.save()\n return\n print(\"** no instance found **\")", "def save_config(self):\n try:\n print(\"Clearing active users\")\n for room in self.rooms:\n room.room_attrbts['active'].clear()\n print('Saving config...')\n print(\"Known clients:\")\n self.pp.pprint(self.clients)\n print(\"Known rooms:\")\n for room in self.rooms: \n self.pp.pprint(room.name)\n self.pp.pprint(room.room_attrbts)\n path = os.environ.get('HOME') + '/.tinyserver'\n roomJSON = jsonpickle.encode(self.rooms)\n with open(path, 'w') as f:\n json.dump(roomJSON, f)\n except Exception as e:\n print(\"Error saving config!! {0}\".format(e))", "def add(self, data):\n if self._filter(data):\n id = self.db._generate_id(data)\n \n if not id == None:\n if self.db._store:\n self.db.append(id, str(data))\n print id, \"stored to\", self.db._generate_path(id)\n else:\n print id\n print data.show2()" ]
[ "0.6729376", "0.6159639", "0.6049469", "0.5342558", "0.5292166", "0.52788186", "0.5239066", "0.5235893", "0.5207037", "0.52025837", "0.5175275", "0.5164154", "0.51413965", "0.5103515", "0.51000875", "0.50629807", "0.5052926", "0.5046601", "0.5038495", "0.50214577", "0.49945176", "0.49926698", "0.4989717", "0.49747926", "0.4972683", "0.49703386", "0.49688008", "0.49534214", "0.4934908", "0.49208352", "0.49129063", "0.4912398", "0.49059764", "0.48936155", "0.48828727", "0.4870689", "0.48503911", "0.48428732", "0.48304307", "0.48286656", "0.482229", "0.48168722", "0.48014113", "0.47995642", "0.47973287", "0.4792908", "0.47733656", "0.476405", "0.47635403", "0.47612503", "0.47602737", "0.4752108", "0.4734987", "0.47296786", "0.47226807", "0.47225302", "0.4719795", "0.47164884", "0.47108048", "0.47091863", "0.47056615", "0.47023827", "0.4702106", "0.46979946", "0.46966076", "0.4691684", "0.4680219", "0.4677776", "0.4677776", "0.4666979", "0.4663788", "0.46619245", "0.46578616", "0.46519375", "0.46499875", "0.46466786", "0.46450567", "0.46429247", "0.46429247", "0.46429247", "0.46429247", "0.46412945", "0.46389568", "0.46376866", "0.46308511", "0.4622808", "0.46200833", "0.46175286", "0.46127576", "0.46112645", "0.46112645", "0.46074688", "0.46051687", "0.45924056", "0.458859", "0.45881793", "0.45865571", "0.45839104", "0.45777318", "0.4572476" ]
0.75351626
0
modify content of a already stored classroom. return True if operation is successful
def modifyClassroom(classroomId, classroomName, capacity,location): for classroom in classroomEntities: if classroom["classroomId"] == classroomId: selectedClassroom = classroom selectedClassroom["classroomName"] = classroomName selectedClassroom["capacity"] = capacity selectedClassroom["location"] = location return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def addClassroom(classroomName, capacity,location):\n for classroom in classroomEntities:\n if classroom[\"classroomName\"] == classroomName:\n print(\"Two classrooms can not have same name\")\n return False\n\n if classroomEntities==[]:\n lastSavedIdNumber = \"0\"\n else:\n lastSavedId=classroomEntities[-1][\"classroomId\"] #update classroomId as first element in classroomEntities list\n lastSavedIdNumber=lastSavedId[2:]\n numberOfDigitsInID = 3\n if lastSavedIdNumber == \"9\" * len(lastSavedIdNumber):\n numberOfDigitsInID = len(lastSavedIdNumber) + 1\n classroomId=\"CR\"+str(int(lastSavedIdNumber)+1).rjust(numberOfDigitsInID,\"0\")\n\n # add the new Classroom\n newClassroom = {}\n newClassroom[\"classroomId\"] = classroomId\n newClassroom[\"classroomName\"] = classroomName\n newClassroom[\"capacity\"] = capacity\n newClassroom[\"location\"] = location\n classroomEntities.append(newClassroom)\n print(f\"Class Room is added into the system, Class Room id is {classroomId}.\")\n return True", "def save(self, force_insert=False, force_update=False, using=None,\n\t\t\t update_fields=None):\n\t\tif (self.capacity - self.occupied_sits) < 0:\n\t\t\traise ValueError(\"all sits in this classroom are occupied try other classes\")\n\t\telse:\n\t\t\tsuper(ClassRoom, self).save()", "def update(self):\n return True", "def test_update(self):\n c = city.City(name=\"Paris\")\n p1 = city.Citizen(name=\"Peter\")\n c.add(p1, rel=city.hasInhabitant)\n\n with DataspaceSession(URI) as session:\n wrapper = city.CityWrapper(session=session)\n cw = wrapper.add(c)\n session.commit()\n\n p2 = city.Citizen(name=\"Georg\")\n cw.add(p2, rel=city.hasInhabitant)\n cw.name = \"Freiburg\"\n session.commit()\n\n check_state(self, c, p1, p2, db=DB)", "def update_content(self):\n raise NotImplementedError", "def update(self, user):\n\n\t\tif self == user.classroom:\n\t\t\treturn\n\n\t\tself.size += user.classroom.size\n\t\tuser.set_classroom(self)", "def do_update(self, arg):\n if len(arg) == 0:\n print(\"** class name missing **\")\n return\n coms = tuple(arg.split())\n if coms[0] not in self.cls:\n print(\"** class doesn't exist **\")\n elif len(coms) < 2:\n print(\"** instance id missing **\")\n return\n obj = coms[0] + \".\" + coms[1]\n if obj not in storage.all().keys():\n print(\"** no instance found **\")\n elif len(coms) < 3:\n print(\"** attribute name missing **\")\n elif len(coms) < 4:\n print(\"** value missing **\")\n else:\n typecast = type(eval(coms[3]))\n form = coms[3].strip('\"')\n form = form.strip(\"'\")\n setattr(storage.all()[obj], coms[2], typecast(form))", "def do_update(self, arg):\n arg = arg.split()\n try:\n h = arg[0] + \".\" + arg[1]\n except:\n pass\n objects = storage.all()\n if len(arg) is 0:\n print(\"** class name missing **\")\n elif len(arg) == 1 and arg[0] in self.dict.keys():\n print(\"** instance id missing **\")\n elif arg[0] not in self.dict.keys():\n print(\"** class doesn't exist **\")\n elif h not in objects.keys():\n print(\"** no instance found **\")\n elif len(arg) <= 2:\n print(\"** attribute name missing **\")\n elif len(arg) <= 3:\n print(\"** value missing **\")\n else:\n setattr(objects[h], arg[2], arg[3])\n storage.save()", "def do_update(self, args):\n args = shlex.split(args)\n if len(args) == 0:\n print(\"** class name missing **\")\n return False\n elif args[0] in classes:\n if len(args) > 1:\n k = args[0] + \".\" + args[1]\n if k in models.storage.all():\n if len(args) > 2:\n if len(args) > 3:\n try:\n if isinstance(args[2], datetime) is True:\n pass\n if args[0] in classes:\n if isinstance(args[2], ints) is True:\n args[3] = int(args[3])\n elif isinstance(args[2], floats) is True:\n args[3] = float(args[3])\n except:\n pass\n setattr(models.storage.all()[k], args[2], args[3])\n models.storage.all()[k].save()\n else:\n print(\"** value missing **\")\n else:\n print(\"** attribute name missing **\")\n else:\n print(\"** no instance found **\")\n else:\n print(\"** instance id missing **\")\n else:\n print(\"** class doesn't exist **\")", "def do_update(self, *args):\n if len(args) == 1:\n args = [ele for ele in args[0].split(' ')]\n if args[0] == '':\n print(\"** class name missing **\")\n return\n if args[0] not in self.list_classes:\n print(\"** class doesn't exist **\")\n return\n if len(args) < 2:\n print(\"** instance id missing **\")\n return\n elif len(args) < 3:\n print(\"** attribute name missing **\")\n return\n elif len(args) < 4:\n print(\"** value missing **\")\n return\n\n storage.reload()\n dict_objs = storage.all()\n if dict_objs is None or dict_objs == []:\n print(\"** no instance found **\")\n return\n\n key = \"{}.{}\".format(args[0], args[1])\n if key in dict_objs.keys():\n obj = dict_objs[key]\n if args[2] in obj.__class__.__dict__:\n obj.__dict__[args[2]] =\\\n type(obj.__class__.__dict__[args[2]])(args[3])\n else:\n obj.__dict__[args[2]] = args[3]\n storage.save()\n else:\n print(\"** no instance found **\")", "def do_update(self, args):\n args = args.split()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n if len(args) == 2:\n print(\"** attribute name missing **\")\n return\n if len(args) == 3:\n print(\"** value missing **\")\n return\n if args[0] not in HBNBCommand.valid_classes.keys():\n print(\"** class doesn't exist **\")\n return\n all_objs = storage.all(args[0])\n for k, v in all_objs.items():\n if k == args[1]:\n setattr(v, args[2], args[3])\n storage.save()\n return\n print(\"** no instance found **\")", "def do_update(self, line):\n if line:\n args = shlex.split(line)\n if len(args) < 2:\n print(\"** instance id missing **\")\n return False\n elif len(args) < 3:\n print(\"** attribute name missing **\")\n return False\n elif len(args) == 3:\n print(\"** value missing **\")\n return False\n else:\n obj_name, obj_id, obj_attr, obj_value = args\n obj_repr = \"{}.{}\".format(obj_name, obj_id)\n data = FileStorage()\n data.reload()\n data_loaded = data.all()\n for key, value in data_loaded.items():\n if key == obj_repr:\n obj = eval(obj_name)(**value.to_dict())\n if obj_name in obj.__dict__.keys():\n obj[obj_name] = obj_value\n else:\n setattr(obj, obj_attr, obj_value)\n d = {}\n for s_key, s_value in data_loaded.items():\n d[s_key] = s_value.to_dict()\n with open(data.path(), mode='w', encoding=\"utf-8\") as file:\n file.write(json.dumps(d))\n break\n else:\n print(\"** class doesn't exist **\")\n else:\n print(\"** class name missing **\")", "def saveClassroomData():\n with open(\"ClassRoomData.txt\",\"wb\") as classroomData:\n pickle.dump(classroomEntities,classroomData)", "def do_update(self, arg):\n args = arg.split()\n object_dict = storage.all()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if args[0] in self.class_dict:\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n elif len(args) == 2:\n print(\"** attribute name missing **\")\n return\n elif len(args) == 3:\n print(\"** value missing **\")\n return\n else:\n print(\"** class doesn't exist **\")\n return\n\n for i in range(len(args)):\n if args[i].startswith('\"') and args[i].endswith('\"'):\n args[i] = args[i][1:-1]\n\n for full_key in object_dict.keys():\n key = full_key.split('.')\n key_id = key[1]\n if args[0] in self.class_dict:\n if args[1] == object_dict[full_key].id:\n setattr(object_dict[full_key], args[2], args[3])\n setattr(object_dict[full_key], \"updated_at\",\n datetime.now())\n storage.save()\n return\n else:\n print(\"** class doesn't exist **\")\n return\n print(\"** no instance found **\")", "def Persist(self) -> bool:", "def Persist(self) -> bool:", "def update_has_data(self):\n self.main()", "def class_to_db(self):", "def do_update(self, args):\n args = shlex.split(args)\n dicti = storage.all()\n if not args:\n print(\"** class name missing **\")\n elif not args[0] in name_of_class:\n print(\"** class doesn't exist **\")\n elif len(args) == 1:\n print(\"** instance id missing **\")\n elif not \"{}.{}\".format(args[0], args[1]) in dicti:\n print(\"** no instance found **\")\n elif len(args) == 2:\n print(\"** attribute name missing **\")\n elif len(args) == 3:\n print(\"** value missing **\")\n else:\n key = dicti[\"{}.{}\".format(args[0], args[1])]\n setattr(key, args[2], args[3])\n key.save()", "def save(self):\n if not connection.connected:\n raise Exception('Not connected to the database.')\n if not self._retrieved:\n self.insert()\n self._retrieved = True\n else:\n self.update()", "def save(self):\n self.logger.debug(\"In save.\")\n\n if not self.is_valid():\n self.logger.error(\"Cannot save, data is invalid\")\n return False\n\n session = iHMPSession.get_session()\n self.logger.info(\"Got iHMP session.\")\n\n success = False\n\n if self.id is None:\n # The document has not yet been saved\n prep_data = self._get_raw_doc()\n self.logger.info(\"Got the raw JSON document.\")\n\n try:\n self.logger.info(\"Attempting to save a new node.\")\n node_id = session.get_osdf().insert_node(prep_data)\n self.logger.info(\"Save for HostSeqPrep %s successful.\", node_id)\n self.logger.info(\"Setting ID for HostSeqPrep %s.\", node_id)\n\n self._set_id(node_id)\n self._version = 1\n success = True\n except Exception as insert_exception:\n self.logger.error(\"An error occurred while inserting \" + \\\n \"%s %s. Reason: %s\", __name__, self._id,\n insert_exception\n )\n else:\n prep_data = self._get_raw_doc()\n\n try:\n self.logger.info(\"Attempting to update %s with ID: %s.\", __name__, self._id)\n session.get_osdf().edit_node(prep_data)\n self.logger.info(\"Update for %s %s successful.\", __name__, self._id)\n success = True\n except Exception as edit_exception:\n self.logger.error(\"An error occurred while updating %s \" + \\\n \" %s. Reason: %s\", __name__, self._id,\n edit_exception\n )\n\n return success", "def put(self):\n pass", "def put(self):\n pass", "def put(self):\n return", "def save(self):\n self.logger.debug(\"In save.\")\n\n # If node previously saved, use edit_node instead since ID\n # is given (an update in a way)\n # can also use get_node to check if the node already exists\n if not self.is_valid():\n self.logger.error(\"Cannot save, data is invalid.\")\n return False\n\n session = iHMPSession.get_session()\n self.logger.info(\"Got iHMP session.\")\n\n osdf = session.get_osdf()\n\n success = False\n\n if self._id is None:\n self.logger.info(\"About to insert a new %s OSDF node.\", __name__)\n\n # Get the JSON form of the data and load it\n self.logger.debug(\"Converting %s to parsed JSON form.\", __name__)\n data = json.loads(self.to_json())\n\n try:\n node_id = osdf.insert_node(data)\n\n self._set_id(node_id)\n self._version = 1\n success = True\n except Exception as save_exception:\n self.logger.exception(save_exception)\n self.logger.error(\"An error occurred when saving %s.\", self)\n else:\n self.logger.info(\"%s already has an ID, so we \" + \\\n \"do an update (not an insert).\", __name__)\n\n try:\n node_data = self._get_raw_doc()\n self.logger.info(\"%s already has an ID, so we do an \" + \\\n \"update (not an insert).\", __name__)\n node_id = self._id\n self.logger.debug(\"%s OSDF ID to update: %s.\", __name__, node_id)\n osdf.edit_node(node_data)\n\n node_data = osdf.get_node(node_id)\n latest_version = node_data['ver']\n\n self.logger.debug(\"The version of this %s is now: %s\",\n __name__, latest_version\n )\n self._version = latest_version\n success = True\n except Exception as update_exception:\n self.logger.exception(update_exception)\n self.logger.error(\"An error occurred when updating %s.\", self)\n\n return success", "def deleteClassroom(classroomId):\n for classroom in classroomEntities:\n if classroom[\"classroomId\"] == classroomId:\n selectedClassroom = classroom\n classroomEntities.remove(selectedClassroom)\n return True\n return False", "def do_update(self, args):\n args = shlex.split(args)\n if len(args) == 0:\n print(\"** class name missing **\")\n elif not args[0] in class_type:\n print(\"** class doesn't exist **\")\n elif len(args) == 1:\n print(\"** instance id missing **\")\n elif (\"{}.{}\".format(args[0], args[1]) not in storage.all().keys()):\n print(\"** no instance found **\")\n elif len(args) == 2:\n print(\"** attribute name missing **\")\n elif len(args) == 3:\n print(\"** value missing **\")\n else:\n new_dict = models.storage.all()\n tmp = \"{}.{}\".format(args[0], args[1])\n if tmp in new_dict.keys():\n attr = getattr(new_dict[tmp], args[2], \"\")\n setattr(new_dict[tmp], args[2], type(attr)(args[3]))\n new_dict[tmp].save()", "def save(self):\n self.session.modified = True", "def update(self):\n db.session.commit()", "def update(self):\n db.session.commit()", "def update_course(self):\n # ensure that updating course is exists\n if self.is_course_exists():\n db = Course._file.read_db()\n for crs_i in range(len(db[\"courses\"])):\n if db[\"courses\"][crs_i][\"course_name\"] == self._course_name:\n\n # ensuring that user does not provided less number of limited places\n if db[\"courses\"][crs_i][\"total_place\"] > self._total_place:\n print(\"{} course's limited places number must be more than {}\".format(\n self._course_name,\n db[\"courses\"][crs_i][\"total_place\"]\n ))\n return\n\n db[\"courses\"][crs_i][\"teacher\"] = self._teacher\n db[\"courses\"][crs_i][\"total_place\"] = self._total_place\n break\n self._file.write_db(db)\n print(\"The course - {} is updated\".format(self._course_name))\n return self.get_course().course_info()", "def put(self):\n self._val = True", "def _update_class(self, course, semester, year):\n\n if cache_result := cache.get(f'no classes {course.id}'):\n print(f'no classes found for course {course.id} at {cache_result}')\n return\n\n # Get response from SIS class resource\n response = sis_class_resource.get(\n semester=semester,\n year=year,\n course_id=course.id,\n abbreviation=course.abbreviation,\n course_number=course.course_number,\n )\n\n if len(response) == 0:\n cache.add(f'no classes {course.id}', datetime.datetime.now(), timeout=7 * 24 * 60 * 60)\n print(f'no classes found for course {course.id}')\n return\n\n updated_section_ids = set()\n primary_sect_id_to_sections = defaultdict(list)\n\n # Map response to Section and Enrollment objects and persist to database\n section_extras = {\n 'course_id': int(course.id),\n 'abbreviation': course.abbreviation,\n 'course_number': course.course_number,\n 'semester': semester,\n 'year': year,\n }\n for sect in response:\n if not sect:\n continue\n section_dict = section_mapper.map(sect, extras=section_extras)\n section, created = self.update_or_create_from_dict(section_dict)\n if not section:\n continue\n\n updated_section_ids.add(section.id)\n\n if section_dict['primary_section']:\n primary_sect_id_to_sections[section_dict['primary_section']].append(section)\n\n # Update enrollment\n if semester != 'summer' and section.is_primary and not section.disabled:\n enrollment_dict = enrollment_mapper.map(sect, extras={'section_id': section.id})\n enrollment_service.update_or_create_from_dict(enrollment_dict)\n\n # Add associations between primary and non-primary sections\n for related_sections in primary_sect_id_to_sections.values():\n primary_section = [s for s in related_sections if s.is_primary][0]\n other_sections = [s for s in related_sections if not s.is_primary]\n primary_section.associated_sections.add(*other_sections)\n for section in related_sections:\n section.save()\n\n if len(updated_section_ids) > 0:\n print({\n 'message': 'Updated sections for course',\n 'course': course,\n 'sections updated': len(updated_section_ids),\n })\n\n # Disable existing section if data not found in response\n sections_to_disable = Section.objects.filter(\n course_id=course.id,\n semester=semester,\n year=year,\n ).exclude(id__in=updated_section_ids)\n for section in sections_to_disable:\n if not section.disabled:\n section.disabled = True\n section.save()\n print({\n 'message': 'Disabling section not in API response.',\n 'section': section,\n })\n\n # Update derived enrollment fields in course object\n course_service._update_derived_enrollment_fields(course)", "def sync_venture_role_to_ralph3(data):\n creating = False\n try:\n conf_class = ImportedObjects.get_object_from_old_pk(\n ConfigurationClass, data['id']\n )\n except ImportedObjectDoesNotExist:\n creating = True\n conf_class = ConfigurationClass()\n logger.info(\n 'Configuration class {} ({}) not found - creating new one'.format(\n data['name'], data['id']\n )\n )\n\n try:\n conf_class.module = ImportedObjects.get_object_from_old_pk(\n ConfigurationModule, data['venture']\n )\n except ImportedObjectDoesNotExist:\n logger.error(\n 'Venture with old_pk={} not found for role {}'.format(\n data['venture'], data['id']\n )\n )\n return\n\n conf_class.class_name = data['name']\n conf_class.save()\n if creating:\n ImportedObjects.create(conf_class, data['id'])\n logger.info('Synced configuration class {}'.format(conf_class))", "def update(cls) -> None:\n raise NotImplementedError", "def do_update(self, line):\n args = shlex.split(line)\n size = len(args)\n db = models.storage.all()\n if size == 0:\n print(\"** class name missing **\")\n elif not args[0] in self.__names:\n print(\"** class doesn't exist **\")\n elif size == 1:\n print(\"** instance id missing **\")\n elif not (args[0] + \".\" + args[1]) in db:\n print(\"** no instance found **\")\n elif size == 2:\n print(\"** attribute name missing **\")\n elif size == 3:\n print(\"** value missing **\")\n else:\n new_dict = db[args[0] + \".\" + args[1]].to_dict()\n val = args[3]\n if self.is_int(val):\n val = int(val)\n elif self.is_float(val):\n val = float(val)\n new_dict[args[2]] = val\n obj = self.__names[args[0]](**new_dict)\n db[args[0] + \".\" + args[1]] = obj\n models.storage.save()", "def _update_single(self, disc, class_num):\n self.cursor.execute(self.UPDATE, (class_num, disc))\n self.conn.commit()", "def _update(self, course_name: str, newdata: ParseType) -> None:\n\n self.courses[course_name] = newdata", "def save(self, case) -> bool:\n if case:\n key = case_key(case)\n case.key = key\n self.cases[key] = case\n the_redis = DARedis()\n the_redis.set_data(self.user_cases_key, self.cases)\n return True", "def changeClass(self, newClass):\n\t\turl = \"https://habitica.com/api/v3/user/change-class?class=\" + newClass\n\t\treturn(postUrl(url, self.credentials))", "def update(self, data):\n self.content = data", "def modify_element_in_store(entry_sequence, modified_element, is_propagated_call = False):\n\t\tglobal board, node_id\n\t\tsuccess = False\n\t\ttry:\n\t\t\tboard[int(entry_sequence)] = modified_element\n\t\t\tsuccess = True\n\t\texcept Exception as e:\n\t\t\tprint e\n\t\treturn success", "def save(self, content):\n raise NotImplemented()", "def update(self):\n\n pass", "def _update(self):\n if self._dirty:\n return self._to_flattr_dict()\n return False", "def do_update(self, line):\n\n args = line.split()\n\n if not args:\n print(\"** class name missing **\")\n elif args[0] not in HBNBCommand.class_list:\n print(\"** class doesn't exist **\")\n elif len(args) == 1:\n print(\"** instance id missing **\")\n elif len(args) > 1:\n key = args[0] + \".\" + args[1]\n dict_objects = storage.all()\n obj = dict_objects.get(key)\n if obj is None:\n print(\"** no instance found **\")\n else:\n if len(args) == 2:\n print(\"** attribute name missing **\")\n elif len(args) == 3:\n print(\"** value missing **\")\n else:\n setattr(obj, args[2], str(args[3].replace('\"', '')))\n storage.save()", "def update(self):\n data = self.serialize()\n\n self.validate(data)\n\n saved_data = DATABASE_CONNECTION.update(self.__class__.__name__, data['id'], data)\n\n self.__dict__.update(saved_data)", "def test_PUT_room(self):\n\t\t# 1)\n\t\tself.POST_room()\n\t\t# 2)\n\t\tNEW_ROOM_DATA = {'count': '3', 'name': 'NEW-ROOM-NAME'}\n\t\trv = self.PUT_data('/api/room/' + self.room_id, NEW_ROOM_DATA)\n\t\t# 3)\n\t\tdata = self.GET_data('/api/room/' + self.room_id)\n\t\tself.assertDataMatch(TEST_ROOM_DATA, data, ['type'])\n\t\t# 4)\n\t\tself.assertDataMatch(NEW_ROOM_DATA, data, NEW_ROOM_DATA.keys())\n\t\tself.validate_last_modified(data)", "def update_class(self, class_info):\n SchemaValidator(self.schema_extension_only, self.schema_nx).validate_class_schema(class_info)\n self.schema[\"@graph\"].append(class_info)\n self.load_schema(self.schema)\n print(\"Updated the class {} successfully!\".format(class_info[\"rdfs:label\"]))", "def update(self, data):\n pass", "def update(self, data):\n pass", "def update(self, data):\n pass", "def update(self, data):\n pass", "def save(self, db):\n db.query(\n \"INSERT INTO rooms (name, type) VALUES(:name, :type)\",\n name=self.name, type='L'\n )", "def update(self) -> py_trees.common.Status:\n self.logger.debug(\"%s.update()\" % (self.__class__.__name__))\n self.blackboard.foo.bar.wow = \"colander\"\n\n return py_trees.common.Status.SUCCESS", "def test_change_classroom_specific_for_coach_pt1(self):\n self.assertTrue(self.coach2.has_perm('auth.change_classroom', self.classrooms[1]))", "def save(self):\n # TODO (Pierre): code", "def update_contact_in_db(self):\n self.init_db(self._testing)\n\n # making sure that the object is in the db\n assert not self.uid == \"\"\n\n self._update_row_in_db(Contact.table_name, Contact.columns, self.values_with_uid)", "def put(self,data):\n\n \n try:\n\n db = getDatabase()\n connection = db.connect()\n \n connection.put(self,data)\n except Exception as e:\n raise e\n finally:\n db.dispose()", "def test_update_overwrite(self):\n existing_mode = self.course_mode\n existing_masters_mode = CourseMode.objects.create(\n course_id=self.course.id,\n mode_slug='masters',\n min_price=10000,\n currency='USD',\n sku='DEF456',\n bulk_sku='BULK-DEF456'\n )\n new_mode = CourseMode(\n course_id=self.course.id,\n mode_slug='credit',\n min_price=500,\n currency='USD',\n sku='ABC123',\n bulk_sku='BULK-ABC123'\n )\n\n path = reverse('commerce_api:v1:courses:retrieve_update', args=[str(self.course.id)])\n data = json.dumps(self._serialize_course(self.course, [new_mode]))\n response = self.client.put(path, data, content_type=JSON_CONTENT_TYPE)\n assert response.status_code == 200\n\n # Check modes list in response, disregarding its order.\n expected_dict = self._serialize_course(self.course, [new_mode])\n expected_items = expected_dict['modes']\n actual_items = json.loads(response.content.decode('utf-8'))['modes']\n self.assertCountEqual(actual_items, expected_items)\n\n # The existing non-Masters CourseMode should have been removed.\n assert not CourseMode.objects.filter(id=existing_mode.id).exists()\n\n # The existing Masters course mode should remain.\n assert CourseMode.objects.filter(id=existing_masters_mode.id).exists()", "def helpClassroom(classroomId):\n selectedClassroomCopy = getClassroomById(classroomId)\n print(\"Class Id: \" + selectedClassroomCopy[\"classroomId\"])\n print(\"Name: \" + selectedClassroomCopy[\"classroomName\"])\n print(\"Capacity: \" + selectedClassroomCopy[\"capacity\"])\n print(\"Location: \" + selectedClassroomCopy[\"location\"])\n return True", "def save(self, db):\n db.query(\n \"INSERT INTO rooms (name, type) VALUES(:name, :type)\",\n name=self.name, type='O'\n )", "async def save(self) -> None:\n if not hasattr(self, 'errors'):\n raise RuntimeError('you must call is_valid() before save instance')\n if self.errors:\n raise RoomValidationError(self.errors)\n if hasattr(self, '_id'):\n data = self.loads()\n room_id = data.pop('_id')\n await room_collection.replace_one({'_id': room_id}, data)\n else:\n result = await room_collection.insert_one(self.loads())\n self._id = result.inserted_id", "def save(self):\n try:\n db.session.add(self)\n db.session.commit()\n return True\n except SQLAlchemyError as e:\n db.session.rollback()\n logger.error(\"database operation error: \", e)\n return False", "def save(self):\n ret = False\n\n # we will only use the primary key if it hasn't been modified\n pk = None\n if self.schema.pk.name not in self.modified_fields:\n pk = self.pk\n\n if pk:\n ret = self.update()\n else:\n ret = self.insert()\n\n return ret", "def modificacion(self, socio):\n\n aux = self.buscar(socio.id)\n print('El socio a modificar en capa de datos:', aux.id, aux.nombre)\n\n if aux == None:\n return False\n else:\n #persona = session.query(Socio).filter(Socio.dni == aux.id)\n aux.nombre = socio.nombre\n aux.apellido = socio.apellido\n aux.dni = socio.dni\n\n session.commit()\n\n return aux", "def update(self):\n raise NotImplementedError", "def update(self) -> None:\n ...", "def do_update(self, arg):\n if type(arg) == str:\n arg_list = shlex.shlex(arg)\n arg_list.wordchars += \"-\"\n arg_list = list(arg_list)\n try:\n idx_start = arg_list.index(\"[\")\n idx_end = arg_list.index(\"]\")\n list_str = \"\".join(arg_list[idx_start:idx_end + 1])\n list_str = eval(list_str)\n list_start = arg_list[:idx_start]\n list_end = arg_list[idx_end + 1:]\n arg_list = list_start\n arg_list.append(list_str)\n arg_list.extend(list_end)\n except ValueError:\n pass\n else:\n arg_list = arg\n if not arg:\n print(\"** class name missing **\")\n return\n if arg_list[0] not in HBNBCommand.class_list:\n print(\"** class doesn't exist **\")\n return\n if len(arg_list) < 2:\n print(\"** instance id missing **\")\n return\n key = arg_list[0] + \".\" + arg_list[1]\n if key not in storage.all():\n print(\"** no instance found **\")\n return\n if len(arg_list) == 3 and type(arg_list[2]) == dict:\n obj = storage.all()[key]\n for key, val in arg_list[2].items():\n setattr(obj, key, val)\n obj.save()\n return\n if len(arg_list) < 3:\n print(\"** attribute name missing **\")\n return\n if len(arg_list) < 4:\n print(\"** value missing **\")\n return\n obj = storage.all()[key]\n if type(arg_list[3]) != list:\n arg_list[3].replace('\"', \"\").replace(\"'\", \"\")\n setattr(obj, arg_list[2].replace('\"', \"\").replace(\"'\", \"\"),\n arg_list[3])\n obj.save()", "def update(self, key, val):\n if key in self._datastore:\n self._datastore[key] = val\n return True\n else:\n raise KeyError(\n \"Tried to update a non existing record\"\n )", "def update(self):\n self.haveClub = len(self.clubs()) > 0", "def do_update(self, line):\n try:\n tokens = split(line)\n except ValueError:\n return None\n if len(tokens) < 1:\n print(\"** class name missing **\")\n else:\n objects = models.storage.all()\n cls = models.getmodel(tokens[0])\n if cls is None:\n print(\"** class doesn't exist **\")\n elif len(tokens) < 2:\n print(\"** instance id missing **\")\n elif \".\".join(tokens[:2]) not in objects:\n print(\"** no instance found **\")\n elif len(tokens) < 3:\n print(\"** attribute name missing **\")\n elif len(tokens) < 4:\n print(\"** value missing **\")\n else:\n obj = objects[\".\".join(tokens[:2])]\n for key, value in zip(tokens[2::2], tokens[3::2]):\n try:\n setattr(obj, key, int(value))\n except ValueError:\n try:\n setattr(obj, key, float(value))\n except ValueError:\n try:\n setattr(obj, key, str(value))\n except ValueError:\n pass\n obj.save()", "def put(self, data):\n if 'content' in data:\n self.context.content = data['content']\n self.context.mtime = datetime.datetime.now()\n self.db.flush()\n return self.context.as_dict(self.user)", "def put(self):\n raise NotImplementedError()", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update(self):\n pass", "def update_knowledge(self):\n pass", "def writable(self):\n return True", "def save(self):\n # type: () -> bool\n\n return self.query.commit(self.id, self)", "def save(self):\n try:\n db.session.add(self)\n db.session.commit()\n return True\n except SQLAlchemyError as error_message:\n app_logger.error(error_message)\n return False", "def updatestatus(self):\n self.status = self.query()\n if self.status['success']:\n return True\n else:\n return False", "def update(self) -> None:\n pass", "def update(self) -> None:\n pass", "def save(self):\n raise NotImplementedError", "def save(self):\n raise NotImplementedError", "def save(self):\n raise NotImplementedError", "def save(self):\n self.save_to_db()\n if hasattr(self, 'id'):\n self.status_code = 201\n return True\n else:\n self.errors['messages'].append(\"DataBase Error, Please Try again\")\n self.status_code = 500\n return False" ]
[ "0.642563", "0.5946925", "0.54554087", "0.5389277", "0.5383477", "0.5381614", "0.53707135", "0.536546", "0.5361684", "0.5358011", "0.5356025", "0.5323734", "0.5304585", "0.52719533", "0.5269355", "0.5269355", "0.52631587", "0.5255712", "0.5246466", "0.5222454", "0.5217273", "0.5204923", "0.5204923", "0.5193499", "0.51909333", "0.5182403", "0.5150957", "0.51488924", "0.5134032", "0.5134032", "0.51322514", "0.5120766", "0.51098144", "0.50914276", "0.50875694", "0.50833035", "0.5077261", "0.50553864", "0.50507873", "0.5032144", "0.5021279", "0.5018853", "0.50125474", "0.4999562", "0.49849746", "0.49848694", "0.49795002", "0.49763396", "0.49656215", "0.49501872", "0.49501872", "0.49501872", "0.49501872", "0.4948974", "0.4943851", "0.49299452", "0.49284795", "0.49214032", "0.49106228", "0.49039614", "0.4894774", "0.48915717", "0.48860458", "0.4881629", "0.48813874", "0.48785576", "0.4877737", "0.48764345", "0.4853422", "0.48522687", "0.48466837", "0.48415062", "0.48355836", "0.4833217", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48325628", "0.48283026", "0.48180005", "0.48164144", "0.48095366", "0.48030716", "0.48028135", "0.48028135", "0.47985947", "0.47985947", "0.47985947", "0.47886553" ]
0.697157
0
delete a classroom from the system. return True if operation is successful
def deleteClassroom(classroomId): for classroom in classroomEntities: if classroom["classroomId"] == classroomId: selectedClassroom = classroom classroomEntities.remove(selectedClassroom) return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_remove_classroom_specific_for_coach_pt1(self):\n self.assertTrue(self.coach1.has_perm('auth.remove_classroom', self.classrooms[0]))", "def test_remove_classroom_specific_for_learner(self):\n self.assertFalse(self.learner1.has_perm('auth.remove_classroom', self.classrooms[1]))", "def delete_room(context):\n\n room = context.get('spark.room')\n bearer = context.get('spark.CISCO_SPARK_PLUMBERY_BOT')\n\n print(\"Deleting Cisco Spark room '{}'\".format(room))\n\n url = 'https://api.ciscospark.com/v1/rooms'\n headers = {'Authorization': 'Bearer '+bearer}\n response = requests.get(url=url, headers=headers)\n\n if response.status_code != 200:\n print(response.json())\n raise Exception(\"Received error code {}\".format(response.status_code))\n\n actual = False\n for item in response.json()['items']:\n\n if room in item['title']:\n print(\"- found it\")\n print(\"- DELETING IT\")\n\n url = 'https://api.ciscospark.com/v1/rooms/{}'.format(item['id'])\n headers = {'Authorization': 'Bearer '+bearer}\n response = requests.delete(url=url, headers=headers)\n\n if response.status_code != 204:\n raise Exception(\"Received error code {}\".format(response.status_code))\n\n actual = True\n\n if actual:\n print(\"- room will be re-created in Cisco Spark\")\n else:\n print(\"- no room with this name yet\")\n\n context.set('spark.room_id', None)", "def test_remove_classroom_specific_for_coach_pt2(self):\n self.assertFalse(self.coach1.has_perm('auth.remove_classroom', self.classrooms[1]))", "def unspawn(self):\n global NodeTypeclass\n if not NodeTypeclass:\n from .room import XYZRoom as NodeTypeclass\n\n xyz = (self.X, self.Y, self.Z)\n\n try:\n nodeobj = NodeTypeclass.objects.get_xyz(xyz=xyz)\n except django_exceptions.ObjectDoesNotExist:\n # no object exists\n pass\n else:\n nodeobj.delete()", "def delete():", "def do_destroy(self, arg):\n if len(arg) == 0:\n print(\"** class name missing **\")\n return\n coms = tuple(arg.split())\n if coms[0] not in self.cls:\n print(\"** class doesn't exist **\")\n elif len(coms) < 2:\n print(\"** instance id missing **\")\n else:\n obj = coms[0] + \".\" + coms[1]\n if obj not in storage.all().keys():\n print(\"** no instance found **\")\n else:\n del storage.all()[obj]\n storage.save()", "def room_delete(room_id):\n room = Room.query.get(room_id)\n if room is None:\n abort(404, 'room not found')\n\n get_db().delete(room)\n get_db().commit()\n\n return '', 204", "def delete(room_id):\n\n entry = Room.objects.filter(room_id=room_id).first()\n if entry is not None:\n entry.delete()\n\n entries = Players.objects.filter(room_id=room_id)\n if entries.count():\n entries.delete()\n\n round.dialog.delete_rounds(room_id=room_id, called_from=__path__+\":\"+utils.fname())", "def do_destroy(self, args):\n args = shlex.split(args)\n if len(args) == 0:\n print(\"** class name missing **\")\n return False\n if args[0] in classes:\n if len(args) > 1:\n key = args[0] + \".\" + args[1]\n if key in models.storage.all():\n models.storage.all().pop(key)\n models.storage.save()\n else:\n print(\"** no instance found **\")\n else:\n print(\"** instance id missing **\")\n else:\n print(\"** class doesn't exist **\")", "def test_delete(self):\n c = city.City(name=\"Freiburg\")\n p1 = city.Citizen(name=\"Peter\")\n p2 = city.Citizen(name=\"Georg\")\n p3 = city.Citizen(name=\"Hans\")\n c.add(p1, p2, p3, rel=city.hasInhabitant)\n\n with DataspaceSession(URI) as session:\n wrapper = city.CityWrapper(session=session)\n cw = wrapper.add(c)\n session.commit()\n\n cw.remove(p3.uid)\n session.prune()\n session.commit()\n\n check_state(self, c, p1, p2, db=DB)", "def delete(self, registration):\n return Car.delete(registration)", "def delete():\n Course.print_all_crs()\n course_name = input(\"Please, type course name >\")\n c = Course(course_name)\n if c.is_course_exists():\n db = Course._file.read_db()\n for crs_i in range(len(db[\"courses\"])):\n if db[\"courses\"][crs_i][\"course_name\"] == course_name:\n del db[\"courses\"][crs_i]\n break\n Course._file.write_db(db)\n print(\"{} course is deleted\".format(course_name))\n else:\n print(\"Failed. {} course does not exist\".format(course_name))", "def do_destroy(self, arg):\n arg_list = arg.split(\" \") if type(arg) == str else arg\n if not arg:\n print(\"** class name missing **\")\n return\n if arg_list[0] not in HBNBCommand.class_list:\n print(\"** class doesn't exist **\")\n return\n if len(arg_list) < 2:\n print(\"** instance id missing **\")\n return\n key = arg_list[0] + \".\" + arg_list[1]\n if key in storage.all():\n del storage.all()[key]\n storage.save()\n return\n print(\"** no instance found **\")", "def do_destroy(self, arg):\n args = shlex.split(arg)\n if len(args) == 0:\n print(\"** class name missing **\")\n elif args[0] in class_type:\n if len(args) > 1:\n key = args[0] + \".\" + args[1]\n if key in models.storage.all():\n models.storage.all().pop(key)\n models.storage.save()\n else:\n print(\"** no instance found **\")\n else:\n print(\"** instance id missing **\")\n else:\n print(\"** class doesn't exist **\")", "def do_destroy(self, arg):\n args = arg.split()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n elif len(args) < 2 and args[0] in self.class_dict:\n print(\"** instance id missing **\")\n return\n elif len(args) < 2:\n print(\"** class name missing **\")\n return\n\n object_dict = storage.all()\n if args[0] in self.class_dict:\n for full_key in object_dict:\n key = full_key.split(\".\")\n if key[1] == args[1]:\n del object_dict[full_key]\n storage.save()\n return\n print(\"** no instance found **\")\n else:\n print(\"** class doesn't exist **\")", "def delete(self):\n\n\n try:\n db = getDatabase()\n connection = db.connect()\n\n connection.delete(self)\n except Exception as e:\n raise e\n finally:\n db.dispose()", "def delete(self, name):\n instance = self.get_one_instance('name', name)\n\n if type(instance) != self.Component:\n set_session_var('errors', str(instance))\n return None\n\n res = delete_in_db(instance)\n\n if res != 'deleted':\n set_session_var('errors', str(res))\n else:\n set_session_var('success', res)\n\n return True", "def test_delete_lecture(lecture_class, course, valid_datetime):\n id = lecture_class.create_lecture(course, valid_datetime)\n assert id != None\n assert lecture_class.delete_lecture()", "def do_destroy(self, args):\n args = args.split()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n if args[0] not in HBNBCommand.class_check:\n print(\"** class doesn't exist **\")\n return\n\n all_objs = storage.all()\n key = args[0] + '.' + args[1]\n if key in all_objs:\n all_objs.pop(key)\n storage.save()\n else:\n print(\"** no instance found **\")", "def delete(self):\n db.session.delete(self)\n try:\n db.session.commit()\n return True\n except Exception as error:\n db.session.rollback()\n print(error.args)\n return False", "def delete(self):\n ...", "def test_remove_students():\n classroom = setup_for_test()\n student = Student(\"Andrew Tsukuda\")\n classroom.add_student(student)\n assert len(classroom.student_dir) == 1\n assert classroom.student_dir[0].ID == 1\n classroom.remove_student(\"Andrew Tsukuda\")\n assert len(classroom.student_dir) == 0", "def do_destroy(self, *args):\n args = [ele for ele in args[0].split(' ')]\n if args[0] == '':\n print(\"** class name missing **\")\n return\n if args[0] not in self.list_classes:\n print(\"** class doesn't exist **\")\n return\n if len(args) != 2:\n print(\"** instance id missing **\")\n return\n\n storage.reload()\n dict_objs = storage.all()\n if dict_objs is None or dict_objs == []:\n print(\"** no instance found **\")\n return\n\n key = \"{}.{}\".format(args[0], args[1])\n if key in dict_objs.keys():\n del dict_objs[key]\n storage.save()\n else:\n print(\"** no instance found **\")", "def delete(self):\n # gid must be specified for deletion\n gid = self.get_query_argument('gid')\n self.write(self._rpc.aria2.remove(self._token, gid))", "def do_destroy(self, arg):\n arg = arg.split()\n try:\n args = arg[0] + \".\" + arg[1]\n except:\n pass\n objects = storage.all()\n if len(arg) is 0:\n print(\"** class name missing **\")\n elif len(arg) == 1 and arg[0] in self.dict.keys():\n print(\"** instance id missing **\")\n elif arg[0] not in self.dict.keys():\n print(\"** class doesn't exist **\")\n elif args not in objects:\n print(\"** no instance found **\")\n else:\n del objects[args]\n storage.save()", "def delete_game(self, room_code: str) -> None:\n self.games_table.delete_item(Key={\"room_code\": room_code})", "async def delete(self):\r\n try:\r\n data = await self.request.json()\r\n agent_uuid = data.get(\"agent_uuid\")\r\n agent_to_delete = Agent.filter(Agent.uuid == agent_uuid).first()\r\n sys_id = (\r\n System.select().where(System.agent_uuid == agent_to_delete).execute()\r\n )\r\n if sys_id:\r\n logger.error(\"Agent not deleted\")\r\n return web.Response(text=\"Agent not deleted.\")\r\n else:\r\n agent_to_delete.delete_instance()\r\n logger.info(\"Agent deleted successfully\")\r\n return web.Response(text=\"Agent deleted successfully.\")\r\n except Exception as ex:\r\n error_message = str(ex)\r\n logger.error(error_message)\r\n return web.Response(text=error_message, status=500)", "def delete(self):\n DBSESSION.delete(self)\n DBSESSION.commit()\n LOG.info(f\"Register of {self.str_representation} with id = {self.id} was successfully deleted.\")", "def test_remove_coach_specific_for_coach_pt1(self):\n self.assertTrue(self.coach2.has_perm(self.AUTH_REMOVE_COACH, self.classrooms[1]))", "def delete(self):\r\n s = self.get_session()\r\n s.delete(self)\r\n s.commit()", "def delete(self, instance: BaseModel):\n with rconnect() as conn:\n id = str(instance.id)\n try:\n query = self.q.get(id).delete()\n rv = query.run(conn)\n except Exception as e:\n console.warn(e)\n raise\n else:\n return True", "def delete(cls, pk):\n ins = cls.get(pk)\n if ins is None:\n return False\n DBSession().delete(ins)\n return True", "def delete(self, room_id):\n # Check for Authorization header in the form of \"Bearer <token>\"\n if \"Authorization\" not in request.headers:\n raise AuthorizationError(\"No password specified\")\n temp_pass = request.headers.get(\"Authorization\").split(\" \")[1]\n\n room = redis_store.get(room_id)\n if room is not None:\n room = loads(room)\n\n if room.get('delete_password') == temp_pass:\n redis_store.delete(room_id)\n return {'success': True}\n else:\n raise AuthorizationError(\"Wrong one time password for host.\")\n else:\n # raise 404\n raise ResourceDoesNotExist('Resource not found.')", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete_leader(self):", "def remove(self):\n instance = self.get_object() \n instance.delete() \n return self.response(status='Successfully Delete')", "def delete(self):\r\n db.session.delete(self)\r\n db.session.commit()", "def test_remove_classroom_optional_object_error(self):\n with self.assertRaises(InvalidPermission):\n self.admin.has_perm('auth.remove_classroom', {})", "def delete(self) -> bool:\n return False", "def delete_vehicles(world):\n actors = world.get_actors()\n for actor in actors:\n # if actor.type_id == vehicle.*\n id = actor.type_id\n actor_type = id.split(\".\")\n\n # destroy vehicle\n if actor_type[0] == \"vehicle\":\n actor.destroy()\n print(\"vehicles are deleted\")", "def delete(self, application_id):", "def do_destroy(self, line):\n args = line.split()\n\n if not args:\n print(\"** class name missing **\")\n elif args[0] not in HBNBCommand.class_list:\n print(\"** class doesn't exist **\")\n elif len(args) < 2:\n print(\"** instance id missing **\")\n else:\n key = args[0] + \".\" + args[1]\n dict_objects = storage.all()\n obj = dict_objects.get(key)\n if obj:\n dict_objects.pop(key)\n storage.save()\n else:\n print(\"** no instance found **\")", "def delete(self, *args, **kwargs):\n self.portrait.delete()\n super(Giza, self).delete(*args, **kwargs)", "def destroy(self):\n db.session.delete(self)\n db.session.commit()\n return True", "def do_destroy(self, args):\n args = shlex.split(args)\n dicti = storage.all()\n if not args:\n print(\"** class name missing **\")\n elif not args[0] in name_of_class:\n print(\"** class doesn't exist **\")\n elif len(args) == 1:\n print(\"** instance id missing **\")\n elif \"{}.{}\".format(args[0], args[1]) in dicti:\n dicti.pop(\"{}.{}\".format(args[0], args[1]))\n storage.save()\n else:\n print(\"** no instance found **\")", "def handle_delete_room(self, lobby_command, client_socket):\n user = self.clients[client_socket]['data'].decode('utf-8')\n roomname = lobby_command.split()[1]\n msg = f\"Handling room deletion of {roomname} by {user}\"\n print(msg)\n for _room in self.rooms:\n if _room.name == roomname and user in _room.room_attrbts['admins']:\n msg = f\"Room {roomname} is being deleted by admin {user}\"\n self.rooms.remove(_room)\n self.log_and_send(client_socket, msg)\n return\n msg = f\"Room {roomname} was not found or user is not permitted to delete\"\n self.log_and_send(client_socket, msg)", "def delete(self):\n try:\n db.session.delete(self)\n db.session.commit()\n return True\n except Exception as e:\n db.session.rollback()\n return {\n \"message\": \"Error encountered during deletion.\",\n \"help\": \"Ensure the database is running properly.\",\n \"exception\": str(e)\n }", "def test_delete(self):\n person = Person('test_person_b')\n person.delete()\n with database() as db:\n results = db.query(\"SELECT * FROM persons WHERE person_name = 'test_person_b'\")\n self.assertEqual(results, [])", "def delete(self):\n try:\n db.session.delete(self)\n db.session.commit()\n return True\n except SQLAlchemyError as error_message:\n app_logger.error(error_message)\n return False", "def delete(self):\n return self.parent.delete_instance(self.name)", "def delete(self, app, role, privilege):\n \n # check user's privileges\n h.checkAccess('delete')\n\n model = RolesModel()\n model.deletePrivilege( app, role, privilege )\n\n # returns empty reply", "def station_delete(request, station):\n st = connection.Station.find_one({'name': station})\n if st and not st.get('lock'):\n st.delete()\n return HttpResponseRedirect(reverse('deliverycenters.views.station_list'))", "def delete():\n # Must be logged in to perform any delete commands.\n auth_required()\n pass", "def delete(self, cls, id):\n pass", "def delete(self):\n self.logger.debug(\"In delete.\")\n\n if self._id is None:\n self.logger.warn(\"Attempt to delete a %s with no ID.\", __name__)\n raise Exception(\"{} does not have an ID.\".format(__name__))\n\n id = self._id\n\n session = iHMPSession.get_session()\n self.logger.info(\"Got iHMP session.\")\n\n # Assume failure\n success = False\n\n try:\n self.logger.info(\"Deleting %s with ID %s.\", __name__, id)\n session.get_osdf().delete_node(id)\n success = True\n except Exception as delete_exception:\n self.logger.exception(delete_exception)\n self.logger.error(\"An error occurred when deleting %s.\", self)\n\n return success", "def do_delete(self, arg):\n \treturn False", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def test_03_delete(self, application=None):\n self.direct_login_user_1()\n self.my_context_dict[\"new_application\"].delete()\n\n if self.my_context_dict[\"new_application\"] in self._objects_to_delete:\n ## no need to delete it in tearDownClass if delete succeeded\n self._objects_to_delete.remove(self.my_context_dict[\"new_application\"])", "def delete(self):\n self.model.remove_agents(self)", "def delete_station(pool, latitude, longitude, station_type):\n\n connection = pool.connection()\n try:\n initial_value = str(station_type.value)\n\n if len(initial_value)==6:\n pattern = \"{}_____\".format(initial_value[0])\n elif len(initial_value)==7:\n pattern = \"{}{}_____\".format(initial_value[0], initial_value[1])\n\n with connection.cursor() as cursor:\n sql_statement = \"DELETE FROM `station` WHERE `id` like %s and `latitude`=%s and `longitude`=%s\"\n row_count = cursor.execute(sql_statement, (pattern, latitude, longitude))\n connection.commit()\n if row_count > 0:\n return True\n else:\n logger.info(\"There's no record of station in the database with latitude={}, \"\n \"longitude={}, and station_type{}\".format(latitude, longitude, station_type))\n return False\n except Exception as exception:\n connection.rollback()\n error_message = \"Deleting station with latitude={}, longitude={}, and station_type{} failed.\"\\\n .format(latitude, longitude, station_type)\n logger.error(error_message)\n traceback.print_exc()\n raise exception\n finally:\n if connection is not None:\n connection.close()", "def delete(self, _id):", "def delete(self):\n self.manager.delete(self.name)", "def delete(self):\n self.manager.delete(self.name)", "def delete(isvgAppliance, uuid, check_mode=False, force=False):\n if force is True or _exists(isvgAppliance, uuid) is True:\n if check_mode is True:\n return isvgAppliance.create_return_object(changed=True)\n else:\n return isvgAppliance.invoke_delete(\n \"Delete a snmp object\",\n \"/rsp_snmp_objs/{0}\".format(uuid))\n\n return isvgAppliance.create_return_object()", "def delete(self, class_name):\n\n return self.client.delete(Classes.PATH_CLASS_TEMPLATE.format(class_name=class_name))", "def test_delete_route(self):\n\n delete = {\n 'ip': 'test_ip',\n 'next_hop': 'test_nexthop',\n 'communities': 'test_commu'\n }\n route_id = self.database.add_route(delete)\n self.database.delete_route({'_id': route_id})\n route = self.database.route.find_one({'_id': route_id})\n self.assertEqual(route, None, 'deletion failed')", "def __delete__(self, instance):\n self.session.close()", "def delete(self):\n self._client.delete(self)", "def delete(self):\n\n raise NotImplementedError('Must be implemented by subclasses')", "def do_destroy(self, line):\n list_line = line.split(' ')\n if line == \"\":\n print(\"** class name missing **\")\n elif list_line[0] not in HBNBCommand.classes.keys():\n print(\"** class doesn't exist **\")\n elif len(list_line) < 2:\n print(\"** instance id missing **\")\n elif list_line[0] + '.' + list_line[1] not in \\\n models.storage.all().keys():\n print(\"** no instance found **\")\n else:\n models.storage.all().pop(list_line[0] + '.' + list_line[1], None)\n models.storage.save()", "def delete_associated_subclass(listing):\n if veh_models.Sedan.objects.filter(vehicle=listing.vehicle).exists():\n veh_models.Sedan.objects.filter(vehicle=listing.vehicle).delete()\n elif veh_models.Truck.objects.filter(vehicle=listing.vehicle).exists():\n veh_models.Truck.objects.filter(vehicle=listing.vehicle).delete()\n elif veh_models.Coupe.objects.filter(vehicle=listing.vehicle):\n veh_models.Coupe.objects.filter(vehicle=listing.vehicle).delete()\n elif veh_models.SUV.objects.filter(vehicle=listing.vehicle).exists():\n veh_models.SUV.objects.filter(vehicle=listing.vehicle).delete()", "def help_destroy(self):\n print(\"delete an instance based on the class name and id\")", "def delete(self):\n os.system(\"rm \"+self._name)", "def delete(self, session: Session) -> None:\n session.query(Match).filter_by(id=self.id).delete()", "def delete_room(\n self,\n uuid: str,\n delete_room_input: DeleteRoomInput\n ) -> bool:\n\n # setup query parameters\n parameters = dict()\n parameters[\"uuid\"] = GraphQLParam(uuid, \"UUID\", True)\n parameters[\"input\"] = GraphQLParam(\n delete_room_input,\n \"DeleteLabInput\",\n False\n )\n\n # make the request\n response = self._mutation(\n name=\"deleteRoom\",\n params=parameters,\n fields=None\n )\n\n # response is a bool\n return response", "def test_forum_delete(forum):\n forum.delete()\n\n forum = Forum.query.filter_by(id=forum.id).first()\n\n assert forum is None", "def eliminar_rs(self):\n group = Group.objects.get(name=self.nombre)\n if self.es_utilizado():\n return False\n else:\n group.delete()\n self.delete()\n return True", "def testDelete(self):\n self.assertEqual(SequencingMachine.objects.count(), 1)\n response = self.runDelete(self.root, sequencer=self.hiseq2000.sodar_uuid)\n self.response_204(response)\n self.assertEqual(SequencingMachine.objects.count(), 0)", "def remove(self):\n self._switch.odlclient._request(self._path, method=\"delete\")", "def delete(self):\n raise NotImplementedError", "def delete(self):\n\n raise NotImplementedError()", "async def delete(self):\r\n\r\n data = await self.request.json()\r\n system_uuid = data.get(\"sys_id\")\r\n sys_del = System.get(System.uuid == system_uuid)\r\n if not sys_del:\r\n response_obj = {\"status\": \"failed\", \"reason\": \"System not Present\"}\r\n return web.Response(text=str(response_obj), status=500)\r\n try:\r\n sys_del.delete_instance()\r\n logger.info(\"System deleted successfully!!!\")\r\n return web.Response(text=\"Successful\", status=200)\r\n except Exception as ex:\r\n response_obj = {\"status\": \"failed\", \"reason\": str(ex)}\r\n error_message = str(ex)\r\n logger.error(error_message)\r\n return web.Response(text=str(response_obj), status=500)", "def deleteDevice(serial):\n swDB = switchdb.DB()\n swDB.deleteBySerial(serial)\n swDB.close()", "def test_delete_cluster_role(self):\n pass", "def test_delete(self):\n scenario = factories.Scenario(config='', status=Scenario.Status.INACTIVE)\n scenario.delete()\n self.assertEqual(scenario.status, Scenario.Status.INACTIVE)", "def delete(self):\n try:\n db.session.delete(self)\n db.session.commit()\n return True\n except SQLAlchemyError:\n db.session.rollback()\n return False", "def delete(self):\n DATABASE_CONNECTION.delete(self.__class__.__name__, self.id)", "def delete_cluster(self):" ]
[ "0.6959816", "0.6681781", "0.6642126", "0.64679796", "0.6322488", "0.6272679", "0.62258446", "0.62023646", "0.6199112", "0.6172018", "0.6158727", "0.6144896", "0.61320245", "0.60920924", "0.60902715", "0.6075575", "0.6064415", "0.6015804", "0.6012518", "0.5992586", "0.59777194", "0.5955315", "0.5931716", "0.5928342", "0.59209853", "0.59111434", "0.58980626", "0.5897864", "0.58675337", "0.5856675", "0.5838597", "0.58382267", "0.58339655", "0.5824454", "0.58218384", "0.58218384", "0.58218384", "0.58218384", "0.5808379", "0.5807718", "0.5774442", "0.5758021", "0.57507336", "0.57379615", "0.57365274", "0.57326233", "0.57302177", "0.5720941", "0.56943613", "0.5690146", "0.5689385", "0.5688112", "0.56851923", "0.56843704", "0.5681505", "0.5680304", "0.5668761", "0.56485", "0.5645056", "0.5641688", "0.5629321", "0.5629321", "0.5629321", "0.5629321", "0.5629321", "0.5629321", "0.5629321", "0.5629321", "0.5629321", "0.5621702", "0.56197923", "0.5616221", "0.56131107", "0.5603181", "0.5603181", "0.5600093", "0.5593112", "0.55870616", "0.55792785", "0.5573066", "0.55539024", "0.5551166", "0.55427486", "0.5541362", "0.5540884", "0.55380565", "0.5536752", "0.55333906", "0.5532777", "0.55212075", "0.5520027", "0.5513111", "0.550926", "0.5487715", "0.54783934", "0.5474188", "0.54708004", "0.5462531", "0.5459344", "0.54592335" ]
0.7279858
0
prints dictionary with classroom details
def helpClassroom(classroomId): selectedClassroomCopy = getClassroomById(classroomId) print("Class Id: " + selectedClassroomCopy["classroomId"]) print("Name: " + selectedClassroomCopy["classroomName"]) print("Capacity: " + selectedClassroomCopy["capacity"]) print("Location: " + selectedClassroomCopy["location"]) return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_print_room_data(self, room):\n habitants = self.all_rooms[room]['occupants']\n\n return {'room': room, 'names': habitants}", "def print_room(room):\r\n cls()\r\n print(\"\\n\" + room[\"name\"].upper() + \"\\n\")\r\n wrap_print(room[\"description\"] + print_room_items(room) + print_room_entities(room))", "def __repr__(self):\n return \"Classroom('{}', {}, {})\".format(self.number, self.capacity,\n str(self.equipment))", "def __str__(self):\n print(\"Welcome to our house\")\n for room in self.rooms:\n print(room.name, room.sqr_ft)", "def get_details(self):\n print(self.name)\n print(10 * \"-\" + \"\\n\")\n print(self.description)\n for direction in self.linked_rooms:\n room = self.linked_rooms[direction]\n print(\"The \" + room.get_name() + \" is \" + direction)\n print(\"\\n\")", "def map_room_list():\n for room in map_rooms:\n print(f\"{room}: \")\n for description in map_rooms[room]:\n print(f\"{description} - {map_rooms[room][description]}\")", "def __str__(self):\n return 'Classroom {} has a capacity of {} persons and ' \\\n 'has the following equipment: {}.'.format(\n self.number, str(self.capacity), ', '.join(self.equipment))", "def show_info(self): \n color= Fore.WHITE\n print(f\"\"\" {color} \nNombre: {self.name} \nRuta: {self.route }\nFecha de salida: {self.departure_date}\"\"\")\n print(\"<\"*8, \">\"*8)\n print(\"El precio por habitacion es:\")\n for key, value in self.prize.items():\n color_value= (Fore.GREEN + str(value))\n color_key= Fore.WHITE + \"Habitacion\" + \" \" + key\n print(f\"\"\" {color_key} : {color_value}$ \"\"\")\n \n print(Fore.WHITE + \"<\"*8, \">\"*8)\n for floor, info in self.floors_info.items():\n piso=(Fore.WHITE + floor)\n print(f\" {piso}:{info} \")\n \n \n print(\"<\"*8, \">\"*8)\n print(\"Capacidad por tipo de habitacion: \")\n for key, value in self.room_capacity.items():\n print(f\"Habitacion {key}: {value} personas \",\"\\t\")\n return \"\"", "def show_classes():\n for obj in Classes.get_all_obj_list():\n print('\\033[33;1m[%s] [%s]校区 [%s]班级 学费[%s]\\033[0m'.center(60, '-') \\\n % (obj.school_nid.get_obj_by_uuid().name, obj.school_nid.get_obj_by_uuid().addr, \\\n obj.name, obj.tuition))", "def __repr__(self):\r\n return {'name':self.name, 'weight':self.organ_weight_grams, 'vital organ': self.vital_organ, 'organ system': self.organ_system}", "def printDict(self):\n print str(self)", "def print_occupants(self):\n for num, member in enumerate(self.occupants, start=1):\n print(num, member.name)", "def display(self):\r\n\t\tfor key, value in self.__dict__.items():\r\n\t\t\tprint(key.upper(), value, sep=': ')\r\n\r\n\t\tprint(\"\")", "def __repr__(self):\r\n return {'name':self.name, 'weight':self.organ_weight_grams, 'vital organ': self.vital_organ, 'organ system': self.organ_system, 'brain volume': self.brain_volume}", "def print(self):\n for fiction in self.fictions:\n print(fiction.__dict__)", "def info():\n # -------- Task 1 -------------------------\n # Please complete the following information\n\n return {\"agent name\": \"?\", # COMPLETE HERE\n \"student name\": [\"?\"], # COMPLETE HERE\n \"student number\": [\"?\"]} # COMPLETE HERE", "def show_data(self, ):\r\n return print('society_name : {}\\n'\r\n 'flat : {}\\n'\r\n 'house_no : {}\\n'\r\n 'no_of_members : {}\\n'\r\n 'income : {}\\n '\r\n .format(self.society_name, self.flat, self.house_no, self.no_of_members, self.income))", "def __repr__(self):\r\n return {'name':self.name, 'weight':self.organ_weight_grams, 'vital organ': self.vital_organ, 'organ system': self.organ_system, 'heart thickness': self.heart_thickness_cm, 'heart breadth': self.heart_breadth_cm, \"heart length\": self.heart_length_cm}", "def print_room(self, room_name):\n pass", "def printMajors(self):\n import mush\n skills = mush.combatSkills+mush.magicSkills+mush.stealthSkills\n for className, stats in sorted(self.classStats.items()):\n print className,'-------------------------------'\n skillStats = [(key,value) for key,value in stats.items() if key in skills]\n skillStats.sort(key=lambda a: a[1][1],reverse=True)\n for low,high in ((0,5),(5,10)):\n for skill,stat in sorted(skillStats[low:high]):\n print '%-13s %3d' % (skill,stat[1])\n print", "def print_state(self):\n print(self.identifier, \n self.gender, \n self.age,\n self.sexual_activity,\n self.disease_status,\n self.time_since_infection,\n self.number_of_partners,\n self.current_partners)", "def print_details(self):\n print(\"[{}]\".format(self.name))\n print(\"ID: \" + str(self.id))\n print(\"name: %s\" % self.name)\n print(\"URL: %s\" % self.url)\n print(\"CPUs: \" + str(self.cpus) + \" cores\")\n print(\"Mem: \" + self.memory_str)\n print(\"Tasks: \" + str(self.tasks_len))\n print(\"Uptime %s\" + self.uptime)\n print(\"Uptime Descriptive %s\" + self.uptime_descriptive)\n print(\" \")", "def printSchedule():\r\n print(\"{0:^45}\".format(\"Your Schedule:\\n\"))\r\n print(\" Day Class Time\")\r\n if(len(classes) == 0):\r\n print(\"\\nThere are no classes\\n\")\r\n return\r\n for class_ in classes:\r\n print(class_.scheduleString())\r\n print()", "def print_scoreboard(self):\n output = ''\n # parallel dictionaries with innings and scores\n innings = []\n away = []\n home = []\n for x in self:\n innings.append(x['inning'])\n away.append(x['away'])\n home.append(x['home'])\n # go through all the information and make a nice output\n # that looks like a scoreboard\n output += 'Inning\\t'\n for x in innings:\n output += str(x) + ' '\n output += '\\n'\n for x in innings:\n output += '---'\n output += '\\nAway\\t' + self.__enumerate_scoreboard(away)\n output += '\\nHome\\t' + self.__enumerate_scoreboard(home)\n return output", "def print_room_entities(room):\r\n room_entities = room[\"entities\"]\r\n if (len(room_entities) != 0):\r\n entity_descriptions = \"\"\r\n for entity in room_entities.values():\r\n if entity[\"alive\"]:\r\n entity_descriptions += entity[\"summary\"]\r\n else:\r\n entity_descriptions += \"A \" + entity[\"name\"] + \" corpse lies on the ground.\"\r\n return \" \" + entity_descriptions\r\n else:\r\n return \" You are by yourself in here.\"", "def print_class_dict_rough(self):\n for tag in self.post_div.find_all(\"b\"):\n if tag.next_sibling is not None and tag.next_sibling.name == \"br\":\n text = str(tag.text).lower()\n while \" \" in text:\n text = text.replace(\" \", \"-\")\n i = 0\n while i < len(text):\n if not text[i].isalpha() and text[i] != \"-\":\n text = text[:i] + text[i + 1:]\n else:\n i += 1\n if len(text) > 0:\n if tag.find_next(\"a\") is not None:\n link = tag.find_next(\"a\")[\"href\"]\n else:\n link = \"\"\n print(\"\\\"\" + text + \"\\\":\\\"\" + link + \"\\\",\")", "def __str__(self):\n print_info = f\"\\nStudent ID: {self._id}, Name: {self._name}, \" \\\n f\"Year: {self._year} \\nPhone: {str(self._phone)}, \" \\\n f\"Address: {str(self._address)} \" \\\n f\"\\nClasses: {str(self._classes)}\" \\\n f\"\\nBirth Date: {self._date}\"\n return print_info", "def print_league_attributes(league):\n output_str = \"\"\n for team in league:\n output_str += f\"{team}: \" + '\\t'\n for position in league[team]:\n output_str += f\"{position}: {league[team][position]} \"\n output_str += \"\\n\"\n print(output_str)", "def __str__(self):\n #{{{ Nicely print of elements in class.\n\n if config.verbose: print \"Stations():\"\n\n for st in self.stachan_cache.keys():\n chans = self.stachan_cache[st].keys()\n print \"\\t%s: %s\" % (st,chans)", "def print_rooms():\n for room_choice in rooms:\n print(f\" - {room_choice.upper()}\")", "def show_all_information(self):\n return self.__dict__\n # print(self.first_name)\n # print(self.last_name)\n # print(self.age)\n # print(self.name)\n # print(self.gender)\n # print(self.number_of_children)", "def __repr__(self):\n dic = self.toDict()\n body = '<gbif.taxonomy.Presences instance: \\n'\n for key,value in dic.iteritems():\n c = 'key %s : %i \\n' %(key,int(value))\n body += c\n body += '\\>'\n return body", "def pprint(self):\r\n for i in self.items():\r\n print '%s => %r'%i", "def display(self):\n logging.info(\"Display Carte : {}\".format(self.name))\n for row in self.map:\n #print(row)\n for cell in row:\n print(cell, end = \"\")\n print(\"\")", "def format_room(room):\n new = {}\n new[\"name\"] = \"Room\"\n new[\"info\"] = {}\n for key in room:\n if key != \"containedSpaces\" and key != \"topLevelSpace\" and key != \"parentSpace\" and key != 'description' and key != 'id' and key!= 'type':\n new[\"info\"][key] = room[key]\n return new", "def print_car(car):\n for key, value in car.items():\n print(f\"{key}: {value}\")", "def get_info(self):\n if self.own_home:\n return print(f'Hi! My name is {self.name}, I\\'m {self.age}. Currently I have {self.own_home} house')\n return print(f'Hi! My name is {self.name}, I\\'m {self.age}. I don\\'t have any home now!')", "def details(self) -> str:\n return f\"- **language**: [{self.language}]\\n\" \\\n f\"- **opengame**: [{self.opengame}]\\n\" \\\n f\"- **system**: [{self.system}]\\n\" \\\n f\"- **mode**: [{self.mode}]\\n\" \\\n f\"- **attributes**: [{self.attributes}]\\n \" \\\n f\"- **score_threshold**: [{self.score_threshold}]\\n \" \\\n f\"- **monsters**: [{self.monsters}]\\n\"", "def print_state(self):\n print(self.type,\n self.persons[0].identifier,\n self.persons[1].identifier)", "def print_dict(self):\n print(self.__dict__)", "def printResults(contact_map):\n print(\"----\")\n for participant in contact_map.values():\n print participant.getName()\n print \"Messages: \", participant.getMessageCount()\n print \"Words: \", participant.getWordCount()\n print \"Avg Words: \", participant.avgWords()\n print \"Messages initiaited: \", participant.getFirstMessageCount()\n print \"Hourly count: \", participant.getHourlyMessageCount()\n print \"Daily count: \", participant.getDailyMessageCount()\n print \"Monthly count: \", participant.getMonthlyMessageCount()\n print \"Most common word: \", participant.getMostCommonWord()\n print \"----\"", "def print_room(room):\r\n\r\n for row in room:\r\n for cell in row:\r\n if cell == \"obstacle\":\r\n print(\"O\", end=\"\")\r\n elif cell == \"robot\":\r\n print(\"R\", end=\"\")\r\n elif cell == \"empty\":\r\n print(\" \", end=\"\")\r\n elif cell == \"dirt\":\r\n print(\"*\", end=\"\")\r\n\r\n print()", "def printResults(self):\n for key in self.mDict.keys():\n print ('for {:d}, entries = {:d} and exits = {:d}'.format (key, self.mDict.get(key).get ('entries'), self.mDict.get(key).get ('exits')))", "def __str__(self):\n out = \"\"\n for key, val in self.parking_license_pairs.items():\n out += 'Parking {}: {}\\n'.format(key, val)\n\n return out", "def __str__(self):\n\t\t\n\t\tstring = \"{Jokers: \"\n\t\tfor card in self.jokers:\n\t\t\tstring += str(card)+\", \"\n\n\t\tstring += \"}\"\n\n\t\tfor i in range(len(self.grps)):\n\t\t\tstring += \", {group \"+str(i+1)+\": \"\n\t\t\tfor card in self.grps[i]:\n\t\t\t\tstring += str(card)+\", \"\n\t\t\tstring += \"}\"\n\t\treturn string", "def course_info(self):\n print(\"Course name: {}\".format(self._course_name))\n print(\"Lead teacher: {}\".format(self._teacher))\n\n if len(self._students) == 0:\n print(\"Course does not enrolled by any student\")\n else:\n print(\"Enrolled: {}/{}\".format(len(self._students), self._total_place))", "def view_map():\n print(\"\"\"\n ____________________________________Client Rooms______________________\n| |1 Locker Rooms 2| 1 | 2 | |\n| |_________ ________| | | Dance |\n| | | |__| |__| Floor |\n| | | Hall |\n| Garage | Front | _______|_______ |\n| | Lobby | | |_ ____________|\n| | | | Storage |\n| | | Lounge |______________|\n| ______________ Car\n|___________________Front Entrance______________________| Allyway\n\"\"\")", "def printDetails(self):\n print str(self.number) + \": \" + self.title\n print \"URL: \" + self.URL\n print \"domain: \" + self.domain\n print \"score: \" + str(self.score) + \" points\"\n print \"submitted by: \" + self.submitter\n print \"# of comments: \" + str(self.commentCount)\n print \"'discuss' URL: \" + self.commentsURL\n print \"HN ID: \" + str(self.id)\n print \" \"", "def get_info(self):\n out = ''\n for k in sorted(self.components.keys()):\n out += '{:s}: {:s}'.format(k, self.info[k]) + '\\n'\n return(out)", "def printStructureChatbotDict(self):\n if self.dictChatBots == {}:\n self.output.exec('No hay chatbots creados.')\n else:\n result = \", \".join(str(value.name) for key, value in self.dictChatBots.items()) # une los nombres de los chatbots\n self.output.exec('Los chatbot creados son: '+ result)", "def showInfo(self):\n print(\"dotAstro ID: \" + str(self.id) +\n \"Num LCs: \" + str(len(self.lcs)))", "def print_loc_acrnym():\n\n #Method2\n val = College.objects.values('acronym','contact')\n for i in val:\n print(i['acronym'],i['contact'])", "def affichage(carnet: List[Dict]) -> None:\n for membre in carnet:\n for cle in membre.keys():\n print(\"{} : {}\".format(cle, membre[cle]), end=\"\\t\\t\")\n print(\"\\n\")", "def info(self):\n return (f\"Match id: {self._id}\\n\"\n f\"dire_score: {self.dire_score}\\n\"\n f\"dire_team: {self.dire_team}\\n\"\n f\"duration: {self.duration}\\n\"\n f\"game_mode: {self.game_mode}\\n\"\n f\"patch: {self.patch}\\n\"\n f\"radiant_score: {self.radiant_score}\\n\"\n f\"radiant_team: {self.radiant_team}\\n\"\n f\"radiant_win: {self.radiant_win}\\n\"\n f\"skill: {self.skill}\\n\"\n f\"start_time: {self.start_time}\\n\")", "def show_map(self):\n print(self.__str__())", "def print_job_classes_info(self, class_list, show_jobs_flag=False):\n\n job_classes_dict = {}\n for job in self.jobs:\n\n classes_job_belongs_to = job.get_class_name(class_list)\n\n # print job-class info only if\n if show_jobs_flag:\n print(\"job name: {}\".format(job.label))\n print(\"-----> belongs to classes: {}\".format(classes_job_belongs_to))\n\n for job_class_name in classes_job_belongs_to:\n job_classes_dict.setdefault(job_class_name, []).append(job)\n\n print(\"============ SIM: {} ===============\".format(self.name))\n\n total_jobs_in_classes = 0\n for k,v in job_classes_dict.items():\n print(\"CLASS: {}, contains {} jobs\".format(k, len(v)))\n total_jobs_in_classes += len(v)\n\n print(\"total n jobs {}\".format(len(self.jobs)))\n print(\"total n in classes {}\".format(total_jobs_in_classes))", "def json(self):\n return {'name': self.neighbourhood_group, 'neighbourhood': self.room_type}", "def print_room(dungeon, room):\n if not isinstance(room, str):\n name = get_room_name(dungeon, room)\n else:\n name = room\n room = dungeon[name]\n w, h = room_wh(room)\n room_str = ''\n for y in range(h):\n row = ''\n for x in range(w):\n #print room\n tile = room[x][y]\n if isinstance(tile, list):\n row = row + room[x][y][0]\n else:\n row = row + room[x][y]\n #print row\n room_str = room_str + '\\n' + row\n room_str = room_str + '\\n\\n'\n #clear()\n #print \"Room: \", name\n #print \"Hp: \", player['hp']\n #print room\n #stdout.write(\"Room: %s\" % name)\n #stdout.write(\"%s\" % room_str)\n #stdout.flush()\n print room_str", "def print_state(self):\n print(\"n\\tg\\to\\ta\\tc\\ts\\ttau\\td\\tN\")\n for p in self.persons:\n p.print_state()\n print(\"type\\tpersons\")\n for ps in self.partnerships:\n ps.print_state()", "def print(self):\n tiles = list(map(list, zip(*self.tiles))) # transposed\n print('tiles = [')\n for row in tiles:\n print('\\t' + repr(row))\n print(']')\n print('props = [')\n for prop in self.props:\n print('\\t' + repr(prop))\n print(']')", "def room_wall(self):\n print(f\"You are in {self.room}.\")\n print(\"Which wall would you like to look at?\")\n for key, walls in wall.items():\n print(f\" - {key}: {walls}\")", "def club_info(self, cid):\r\n headers = {\"Content-type\": \"application/x-www-form-urlencoded\", \"Accept\": \"text/plain\",\r\n 'Referer': 'http://' + self.domain + '/', \"User-Agent\": user_agent}\r\n req = self.session.get('http://' + self.domain + '/clubInfo.phtml?cid=' + cid, headers=headers).content\r\n soup = BeautifulSoup(req, \"html.parser\")\r\n plist = list()\r\n for i in soup.find('table', cellpadding=2).find_all('tr')[1:]:\r\n plist.append('%s\\t%s\\t%s\\t%s\\t%s' % (\r\n i.find_all('td')[0].text, i.find_all('td')[1].text, i.find_all('td')[2].text, i.find_all('td')[3].text,\r\n i.find_all('td')[4].text))\r\n return soup.title.text, plist", "def print_people_strategies():\n\t\tfor person in sorted(Simulation.community):\n\t\t\tSimulation.community[person].print_info()\n\t\tPerson.person_progression.write(\"--------------- END OF WEEK ---------------\" + \"\\n\")", "def __str__(self):\r\n\r\n for att in self.__dict__:\r\n print(\"%s: %r\" % (att, getattr(self, att)))\r\n\r\n return \"Planet Population class object attributes\"", "async def roominfo(self, ctx: Message):\n\t\tawait self.send(\n\t\t f\"Name: {self.room.name} • Description: {self.room.description} • ID: {self.room.id} • Member Count: {self.room.count} • Created at: {self.room.created_at} • Is Private?: {self.room.is_private}\"\n\t\t)", "def __str__(self):\n return f\"{self.semester} | {self.school} | {self.position} | {self.class_name}\"", "def __str__(self):\n return_string = self.name + \"\\n\" + str(self.traits)\n\n return return_string", "def print_arch(self):\n \n print(\"\\rAgent [#{}]\".format(self.agent_id), end=\"\\n\\n\")\n \n print(\"\\rActor (Local):\")\n print(self.actor_local)\n \n print(\"\\rActor (Target):\")\n print(self.actor_target)\n \n print(\"\\rCritic (Local):\")\n print(self.critic_local)\n \n print(\"\\rCritic (Target):\")\n print(self.critic_target)\n \n if self.agent_id != NUM_AGENTS:\n print(\"\\r_______________________________________________________________\", end=\"\\n\\n\")", "def __str__(self):\n output_dict = {\n 'first_name': self.first_name,\n 'last_name': self.last_name,\n 'year': self.year,\n 'major': self.major,\n 'academic_interests': self.academic_interests,\n 'post_grad_goal': self.post_grad_goal,\n 'software_experience': self.software_experience,\n 'hobbies': self.hobbies\n }\n return str(output_dict)", "def pprint(self):\n # just here for defining the interface; work is done in subclasses\n pass", "def print_info(self):\n\n n_metabolites = len(self.metabolites)\n n_reactions = len(self.reactions)\n n_constraints = len(self.constraints)\n n_variables = len(self.variables)\n\n info = pd.DataFrame(columns=['value'])\n info.loc['name'] = self.name\n info.loc['description'] = self.description\n info.loc['num constraints'] = n_constraints\n info.loc['num variables'] = n_variables\n info.loc['num metabolites'] = n_metabolites\n info.loc['num reactions'] = n_reactions\n info.index.name = 'key'\n\n print(info)", "def print_info(c, timestamp):\r\n print(f\"\\n[{timestamp}] [{id(c)}] [Fitness: {c.fitness()}]\\n \" +\r\n f\"Age: {c.age} seconds, F.Eaten: {c.food_eaten}, P.Eaten: {c.poison_eaten}\\n\" +\r\n f\"currHP: {c.health}, Gen: {c.gen}, Childs: {c.childs}\\n\" +\r\n f\"DNA: {c.dna}\\n\" +\r\n f\"FoodAttr: {c.food_attraction}, PoisonAttr: {c.poison_attraction}\\n\" +\r\n f\"FoodDist: {c.food_dist}, PoisonDist: {c.poison_dist}\\n\" +\r\n f\"MaxHealth: {c.max_health}, MaxVel: {c.max_vel}, Size: {c.size}\\n\" +\r\n f\"MaxSteer: {c.max_steer_force}, DirAngleMult: {c.dir_angle_mult}\\n\")", "def print_parking(type):\r\n if (type=='Regular' or type=='Priority' or type=='VIP'):\r\n tempList=[]\r\n for i in carsSeq:\r\n if i[1]==type:\r\n tempList.append(i)\r\n for i in range(len(tempList)):\r\n if i==len(tempList)-1:\r\n print(\"car: {0}, parking time: {1}\".format(tempList[i][0],tempList[i][2])) \r\n else:\r\n print(\"Unknown parking lot type\")", "def info(self) -> dict:", "def print_pairing_info(melon_types):\n\n # melon_types is the list of class instances\n # melon is ONE of the class instance \n for melon in melon_types:\n # getting the each instance and it's instance pairing list\n print(f\"{melon.name} pairs with: \\n - {melon.pairings}\")", "def printClassifier(self):\n classifier_info = \"\"\n for att in range(cons.env.format_data.numb_attributes):\n att_info = cons.env.format_data.attribute_info[att]\n if att in self.specified_attributes: #If the attribute was specified in the rule\n i = self.specified_attributes.index(att)\n #-------------------------------------------------------\n # CONTINUOUS ATTRIBUTE\n #-------------------------------------------------------\n if att_info[0]:\n classifier_info += str(self.condition[i][0])+';'+str(self.condition[i][1]) + \"\\t\"\n #-------------------------------------------------------\n # DISCRETE ATTRIBUTE\n #-------------------------------------------------------\n else:\n classifier_info += str(self.condition[i]) + \"\\t\"\n else: # Attribute is wild.\n classifier_info += '#' + \"\\t\"\n #-------------------------------------------------------------------------------\n specificity = len(self.condition) / float(cons.env.format_data.numb_attributes)\n\n if cons.env.format_data.discrete_action:\n classifier_info += str(self.action)+\"\\t\"\n else:\n classifier_info += str(self.action[0])+';'+str(self.action[1])+\"\\t\"\n #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\n classifier_info += '{:.1f}'.format(self.prediction)+\"\\t\"+'{:.2f}'.format(self.error)+\"\\t\"+'{:.2f}'.format(self.fitness)+\"\\t\"+str(self.numerosity)+\"\\t\"+str(self.ga_count)+\"\\t\"\n classifier_info += '{:.1f}'.format(self.mean_actionset_sz)+\"\\t\\t\"+str(self.ga_timestamp)+\"\\t\\t\"+str(self.init_timestamp)+\"\\t\\t\"+'{:.2f}'.format(specificity)+\"\\t\\t\"\n classifier_info += '{:.1f}'.format(self.delete_vote)+\"\\t\\t\"+str(self.action_cnt)+\"\\n\"\n\n #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\n return classifier_info", "def printsection(section):\n print('===')\n for key in section.keys():\n print(\"Key: %s\" % key)\n for item in section[key]:\n print(' %s' % item)", "def print_out_dict(word_dict):\n for word_class in WORD_CLASSES:\n \n last_word = \"\"\n if(word_class in word_dict):\n print(f\"{word_class}:\")\n for word in sorted(word_dict[word_class]):\n if(word != last_word):\n spacing = \" \"*(20-len(word))\n print(spacing+word)\n last_word = word", "def __str__(self):\n return \"{\" + (\", \".join(\"%s: %s\"%(ngram, value) for (ngram, value) in self.items())) + \"}\"", "def __str__(self):\n return '{0}: {1} \"{2}\" {3}'.format(\n self.race, self.name, self.nick, self.surname)", "def __str__(self):\n return \"CLA obj [\" + self.marathonURL + \", \" + self.appid + \", \" + str(self.totalports) + \"]\"", "def print_individuals(self):\n pt = PrettyTable()\n pt.field_names = ['ID', 'Name', 'Gender', 'Birthday', 'Age', 'Alive', 'Death', 'Child', 'Spouse']\n for i in self.individuals.values():\n pt.add_row(i.get_values())\n print(pt)", "def __str__(self):\n return self.room_name", "def print_player_info(self):\n\t\tclear_screen()\n\n\t\tprint(\"# PLAYER INFO #\\n\")\n\t\tprint(\"Name{:.>17} \".format(self.info['Name']))\n\t\tprint(\"Race{:.>17} \".format(self.info['Race']))\n\t\tprint(\"Level{:.>16} \".format(self.stats['Level']))\n\t\tprint(\"Hit Points{:.>11} \".format(self.stats['HP']))\n\t\tprint(\"Gold Pieces{:.>10} \".format(self.stats['GOLD']))\n\t\n\t\tpress_enter()", "def display(self):\n # type: ()->None\n print('============')\n for key, value in self._ifAttributes.items():\n if isinstance(value, list):\n print(key + ': ')\n for item in value:\n print('\\t' + item)\n elif isinstance(value, dict):\n print(key + ': ')\n for item in value.keys():\n print('\\t' + item + ': ' + value[item])\n else:\n print(key + ': ' + str(value))\n print('============')", "def print_dd_dict( self, ):\n print( self._dd_dict )", "def print_problem(self):\n print('\\n*****************')\n print('PROBLEM: ' + self.problem)\n print('OBJECTS: ' + str(self.objects))\n print('INIT: ' + str(self.init))\n print('GOAL: ' + str(self.goal))\n print('AGENTS: ' + str(self.agents))\n print('****************')", "def __repr__(self):\n out = ''\n out += f'\\nPlayer {self.number}: {self.name}\\n'\n\n # checks for trail options before printing.\n if len(self.trail_options) > 0:\n out += f'\\nTrail Options:\\n'\n for item in self.trail_options:\n out += f' {item}'\n else:\n out += f'\\nSadly, {self.name} is out of trail options.\\n'\n\n # checks for supplies before printing.\n if len(self.supplies) > 0:\n out += f'\\nSupplies:\\n'\n for item in self.supplies:\n out += f' {item[0]}\\n'\n else:\n out += f'\\nSadly, {self.name} is out of supplies.\\n'\n\n return out", "def display_info(club_data):\n\n for item in club_data:\n if \":\" in item:\n print(f\" {item}\")\n else:\n print(f\"\\nCLUB NAME: {item}\")", "def print_schedule():\n clear_screen()\n print(\"====Current Schedule====\")\n days = ['sun', 'mon', 'tues', 'wed', 'thurs', 'fri', 'sat']\n with open('current_courses.json', 'r') as current_file:\n schedule = json.load(current_file)\n for day in days:\n for val, val2 in schedule.items():\n if day in val2[0]:\n print(day, val, str(val2[1])+'-'+str(val2[2])+\" Presumed Grade: \"+ val2[3])\n return 0", "def __str__(self):\n\n # Seven tag roster list\n strl = [\"Event\",\"Site\",\"Date\",\"Round\",\"White\",\"Black\",\"Result\"]\n out = \"\"\n\n # We first print in order of STR, then any others\n for k in strl:\n if k in self.keys():\n out += '[{} \"{}\"]\\n'.format(k, self[k])\n\n for k in self.keys():\n if k not in strl:\n out += '[{} \"{}\"]\\n'.format(k, self[k])\n\n # If there are no tag pairs, the extra newline is not needed\n if out:\n out += \"\\n\"\n return out", "def Print(self):\n print(self.__dict__)", "def print_agent(agent):\n agent_string = \"FOUND:\\n\"\n for key in place_detail_keys:\n agent_string += \"\\t%s: %s\\n\" % (key, agent[key])\n log.debug(agent_string)", "def show_priveleges(self):\n print(\"This user:\")\n for privelege in self.priveleges:\n print(privelege)", "def game_info(tytuł, gatunek, **cechy):\r\n\tgra = {}\r\n\tgra['Tytuł'] = tytuł\r\n\tgra['Gatunek'] = gatunek\r\n\t\r\n\tfor k, v in cechy.items():\r\n\t\tgra[k] = v\r\n\t\r\n\treturn gra", "def print_item(group):\n print(\"\\tName: {}\".format(group.name))\n print(\"\\tId: {}\".format(group.id))\n print(\"\\tLocation: {}\".format(group.location))\n print(\"\\tTags: {}\".format(group.tags))\n if hasattr(group, 'status'):\n print(\"\\tStatus: {}\".format(group.status))\n if hasattr(group, 'state'): # Site\n print(\"\\tStatus: {}\".format(group.state))\n if hasattr(group, 'properties'):\n print_properties(group.properties)\n print(\"\\n\\n\")", "def _player_info(self):\n return \"%r %s seat:%s m:%r c:%s b:%s \" % (self.name, self.serial, self.seat, self.money, self._chips, self._bet)", "def __repr__(self) -> str:\n # start the textual representation off with a newline\n weekly_schedule = \"\\n\"\n # iterate through each day of the week in the schedule\n for day in self.schedule.keys():\n # add the name of the current day of the week\n weekly_schedule += day + \":\\n\\n\\t\"\n # access the schedule for the current day of the week\n schedule_for_day = self.schedule[day]\n # add all of the classes of the current day of the week\n weekly_schedule += \"\\n\\t\".join(map(str, schedule_for_day))\n # for course in schedule_for_day:\n # weekly_schedule += str(course) + \"\\n\"\n # if not processing the last day of the week, add two newlines\n if day != list(self.schedule.keys())[-1]:\n weekly_schedule += \"\\n\\n\"\n # if processing the last day of the week, add one newline\n else:\n weekly_schedule += \"\\n\"\n # return a string that displays all courses taught in a week\n return weekly_schedule", "def _print_results(results, title=''):\n pstr = '[' + title + ']: ' if title else ''\n for k, v in results.items():\n pstr += '\\t{}: {}'.format(k, v)\n print(pstr)", "def info(self):\n self.update_info()\n print('Number of electrodes: ' + str(self.n_elecs))\n print('Recording time in seconds: ' + str(self.dur))\n print('Sample Rate in Hz: '+ str(self.sample_rate))\n print('Number of sessions: ' + str(self.n_sessions))\n print('Date created: ' + str(self.date_created))\n print('Meta data: ' + str(self.meta))" ]
[ "0.63964313", "0.63553905", "0.6345478", "0.6339327", "0.62925047", "0.62898177", "0.6135573", "0.6135079", "0.6008951", "0.5964196", "0.58336455", "0.5817201", "0.5811558", "0.576086", "0.57605565", "0.5755586", "0.57405245", "0.5738328", "0.57352346", "0.57244223", "0.5717494", "0.5688538", "0.567821", "0.5660881", "0.5656806", "0.5641036", "0.5625916", "0.5624289", "0.56238544", "0.5621007", "0.56177616", "0.5611888", "0.56095535", "0.5607088", "0.5580512", "0.5562094", "0.55478376", "0.5543433", "0.5542418", "0.55242604", "0.5508665", "0.55075705", "0.5506912", "0.54842407", "0.5482432", "0.54795957", "0.54749584", "0.54595894", "0.5459532", "0.54521143", "0.5446798", "0.54458445", "0.54395914", "0.5429603", "0.54254735", "0.5413626", "0.5393518", "0.53904986", "0.53822756", "0.53793013", "0.5372805", "0.5370309", "0.53665507", "0.5360685", "0.5352967", "0.5344061", "0.5343022", "0.53419894", "0.5340219", "0.53397954", "0.53296846", "0.53293854", "0.53237885", "0.53165287", "0.53090805", "0.53012186", "0.52985287", "0.52907896", "0.5289371", "0.52855337", "0.5282206", "0.5279192", "0.5274084", "0.527327", "0.527005", "0.52603316", "0.5252593", "0.52387756", "0.5238149", "0.5233721", "0.5226368", "0.52252144", "0.5220643", "0.521067", "0.5208772", "0.5206696", "0.5206215", "0.51975226", "0.51967967", "0.519641" ]
0.56665343
23
saves classroomEntities in the ClassRoomData file
def saveClassroomData(): with open("ClassRoomData.txt","wb") as classroomData: pickle.dump(classroomEntities,classroomData)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save(cls):\n playerdata = getAttributes(cls)\n Data.object_dump(playerdata, \"savedata.dat\")\n del playerdata", "def class_to_db(self):", "def saveTeachersData():\n with open(\"TeacherData.txt\",\"wb\") as teacherData:\n pickle.dump(teacherEntities,teacherData)", "def save():", "def persist(self):\n pass", "def save_data(self):\n db.session.add(self)\n db.session.commit( )", "def save(self):\n # TODO (Pierre): code", "def save_data(self):\n pass", "def addClassroom(classroomName, capacity,location):\n for classroom in classroomEntities:\n if classroom[\"classroomName\"] == classroomName:\n print(\"Two classrooms can not have same name\")\n return False\n\n if classroomEntities==[]:\n lastSavedIdNumber = \"0\"\n else:\n lastSavedId=classroomEntities[-1][\"classroomId\"] #update classroomId as first element in classroomEntities list\n lastSavedIdNumber=lastSavedId[2:]\n numberOfDigitsInID = 3\n if lastSavedIdNumber == \"9\" * len(lastSavedIdNumber):\n numberOfDigitsInID = len(lastSavedIdNumber) + 1\n classroomId=\"CR\"+str(int(lastSavedIdNumber)+1).rjust(numberOfDigitsInID,\"0\")\n\n # add the new Classroom\n newClassroom = {}\n newClassroom[\"classroomId\"] = classroomId\n newClassroom[\"classroomName\"] = classroomName\n newClassroom[\"capacity\"] = capacity\n newClassroom[\"location\"] = location\n classroomEntities.append(newClassroom)\n print(f\"Class Room is added into the system, Class Room id is {classroomId}.\")\n return True", "def save(self):\n\n pass", "def save(self):\n raise NotImplementedError", "def save(self):\n raise NotImplementedError", "def save(self):\n raise NotImplementedError", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save (self):\n pass", "def save(self):\n with open(self.__file_path, \"w\", encoding=\"UTF-8\") as file:\n parsed_dict = {\n key: value.to_dict()\n for key, value in self.__objects.items()\n }\n save_data(parsed_dict, file)", "def save_class(self):\n with open(self.savefile, \"w\") as f:\n data = {\"name\": self.name, \"host\": self.host, \"port\": self.port}\n json.dump(data, f)", "def save_file(self):\n # paginate over deputies and senators getting their fields\n fieldnames = set([])\n congressmen = self.deputies + self.senators\n for data in congressmen:\n fieldnames = fieldnames.union(data.dump().keys())\n\n\n with open(IDENTITY_FILE_UPDATED, 'a') as csvfile:\n writer = csv.DictWriter(csvfile, fieldnames=list(fieldnames), delimiter=';')\n writer.writeheader()\n\n for data in congressmen:\n writer.writerow(data.dump())", "def saveData(self):\n pass", "def persist(self, file_name, model_dir):\n pass", "def save_db(self) -> None:", "def save(self):\n raise NotImplementedError()", "def save(self):\n joblib.dump(\n self.classifier, \"data/models/badlymappedfinder/badlymappedfinder.joblib\",\n )", "def save():\n pass", "def persist(self, file_name, model_dir):\n\n pass", "def save(self):\n data = (\n self.Joints,\n self.Links,\n self.joint_syms,\n self.global_syms,\n self.name,\n self.sym_prefix,\n )\n cloudpickle.dump(data, open(self.save_filename, \"wb\"))", "def save(self):\n joblib.dump(\n self.classifier, \"data/models/repeatsfinder/repeatsfinder.joblib\",\n )", "def save(self):\n self.lock.acquire()\n try:\n self.xml.set(\"name\",self.name)\n self.xml.set(\"room\",self.room)\n self.xml.set(\"type\",self.type)\n self.xml.find(\"address\").text = \":\".join([str(x) for x in self.address])\n if self.pos is not None:\n self.xml.find(\"pos\").text = \" \".join([str(x) for x in self.pos])\n self.xml.find(\"icon\").text = self.icon\n \n finally:\n self.lock.release()\n \n self.house.save_devices()", "def _save(self):\n self.logger.debug(\"Saving to persistence\")\n try:\n data = self.persistence_serialize()\n except NotImplementedError:\n # allow backwards compatibility or persisted_values way\n # generate item to be persisted by gathering all variables\n # to be persisted into a dictionary\n data = {persisted_var: getattr(self, persisted_var)\n for persisted_var in self.persisted_values()}\n\n # save generated dictionary under block's id\n self._persistence.save(data, self.id())", "def persist(self) -> None:\n logger.info('Generating or Updating meta data file {}'.format(self.file_path))\n with open(self.file_path, 'w', encoding='utf-8') as meta_file:\n meta_file.write(json.dumps(self, default=lambda value: value.__dict__))", "def save(self) -> None:\n pass", "def save(self) -> None:\n pass", "def save(self) -> None:\n pass", "def do_save(self):\n id_client = json.loads(request.data.decode('UTF-7'))['id']\n g_list_of_classifier.save_in_file(CLASSIFIER_PATH + str(id_client) + \n '.cls')\n return ''", "def save(self):\n store = datastore.DataStore()\n store.connect()\n store.setup()\n store.put(self.as_doc())", "def save_annotations(self):\n r = requests.get(\n f'{self.api_host}/v1/entity-annotations?'\n f'annotation_type=Source reliability (binary)&size=100',\n headers=self.get_request_headers()\n )\n\n entity_annotations = r.json().get('entity_annotations')\n\n for annotation in entity_annotations:\n annotation_id = annotation.get('entity_id')\n with open(\n f'{self.data_folder}/annotations/{annotation_id}.json',\n 'w'\n ) as f:\n json.dump(annotation, f)", "def save(self, force_insert=False, force_update=False, using=None,\n\t\t\t update_fields=None):\n\t\tif (self.capacity - self.occupied_sits) < 0:\n\t\t\traise ValueError(\"all sits in this classroom are occupied try other classes\")\n\t\telse:\n\t\t\tsuper(ClassRoom, self).save()", "def save(self):\r\n # os.mkdirs(DATADIR, exist_ok=True)\r\n savefile = os.path.join(wg.DATADIR, str(self.guild.id) + \".json\")\r\n\r\n savedata = {\r\n 'userchars': {id:self.usercharacters[id].to_data() for id in self.usercharacters},\r\n 'guildid': self.guild.id,\r\n 'last_known_name': self.guild.name,\r\n }\r\n\r\n with tempfile.NamedTemporaryFile(mode=\"w\", dir=wg.DATADIR) as outf:\r\n json.dump(savedata, outf, indent=1)\r\n if os.path.exists(savefile):\r\n os.unlink(savefile)\r\n os.link(outf.name, savefile)\r\n\r\n wg.log.info(f'Guild {debug_id(guild=self.guild)} saved. '\r\n f'{len(self.usercharacters)} user chars and {len(self.npcs)} npcs.')\r\n\r\n pass", "def save(self, data):\n self.write(data)", "def save(self):\n self.session.commit()", "def save(self, path):\n save(self.actor_net.state_dict(), path + '_actor.pkl')\n save(self.critic_net.state_dict(), path + '_critic.pkl')", "def save(self) -> None:\n self.save_metadata()\n self.save_files()", "def store(self) -> None:\n # Store the centroids\n if self._centroids != {}:\n with open(self._path_model / f\"{self}\", 'w') as file:\n json.dump({k: v.tolist() for k, v in self._centroids.items()}, file, sort_keys=True)\n else:\n print(\"No centroids created yet to store!\")\n \n # Store the (validation) clusters\n with open(self._path_data / f\"{self}-train\", 'w') as file:\n json.dump(self._clusters, file, indent=2, sort_keys=True)\n with open(self._path_data / f\"{self}-val\", 'w') as file:\n json.dump(self._clusters_val, file, indent=2, sort_keys=True)", "def write(self):\n db_handle = open(settings.DATA_PATH, 'wb')\n cPickle.dump(dict(self), db_handle)\n db_handle.close()", "def save(self):\n\n # TODO:Find place to save data, write logic to save images(Filter out video?)", "def save(self):\n # TODO: save the file", "def save(self, db):\n db.query(\n \"INSERT INTO rooms (name, type) VALUES(:name, :type)\",\n name=self.name, type='O'\n )", "def _save(self):\n with open(self.file_path, 'w') as fid:\n json.dump(self.data, fid, indent=4, sort_keys=True)", "def save(self):\n self.rpc.call(MsfRpcMethod.CoreSave)", "def save(self):\n data = self.serialize()\n\n self.validate(data)\n\n saved_data = DATABASE_CONNECTION.insert(self.__class__.__name__, data)\n\n self.__dict__.update(saved_data)", "def save(self):\n\n toStore = {\n key: obj.to_dict()\n for key, obj in FileStorage.__objects.items()\n }\n with open(FileStorage.__file_path, 'wt') as file:\n json.dump(toStore, file)", "def save(self):\n\n self.__session.commit()", "def save(self):\n\n self.__session.commit()", "def save(self):\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n # Ensure store path exists\n store_path = self.manager.store_path\n if not os.path.exists(store_path):\n os.makedirs(store_path)\n \n # Get filepath\n filename = self._filename\n \n # Write into file\n raw = self.to_json()\n self.service.log.store('Saving %s' % filename)\n f = open(filename, 'w')\n f.write(raw)\n f.close()", "def save(self):\n with open(self.file, 'w', encoding='utf-8') as self.contacts_file:\n self.contacts_file.seek(0)\n for line in self.contacts:\n self.contacts_file.write(\",\".join(line))\n self.contacts_file.write(\"\\n\")\n self.contacts_file.truncate()\n self.contacts_file.close()", "def save(self):\r\n db.session.add(self)\r\n db.session.commit()", "def save(self, db):\n db.query(\n \"INSERT INTO rooms (name, type) VALUES(:name, :type)\",\n name=self.name, type='L'\n )", "def save(self):\n if PYTHON3:\n fileobj = open(self.filename, 'w', encoding=self.ENCODING, errors=\"replace\")\n else:\n fileobj = open(self.filename, 'w')\n self.save_to_fileobj(fileobj)\n fileobj.close()", "def save(self, fname):\n pass", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save_world(self):\n pass", "def save(self):\n \n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n self.db.commit()", "def save(self):\n if self.loaded:\n list_embeddingNames = [self.embeddings.vsm_name, self.synset_embeddings.vsm_name, self.imagined_embeddings.vsm_name]\n full_file_name = self.resource_manager.get_multimodal_dataset(self.corpus, list_embeddingNames)\n logging.info('Saving dataset to [%s]', full_file_name)\n with lzma.open(full_file_name, 'wb') as f:\n pickle.dump(self, f)\n else:\n logging.error('Dataset not loaded, call \"build\" method first!')", "def save(self):\n with open(self.fkit.path) as fp:\n for processor in self._field.processors:\n fp = processor(fp)\n storage = FileStorage(fp)\n storage.filename = self.get_filename()\n self.uset.save(storage, folder=self.folder, name=self.get_filename())", "def save(self,outPath=None):\n if (not self.canSave): raise StateError(_(\"Insufficient data to write file.\"))\n if not outPath:\n fileInfo = self.fileInfo\n outPath = os.path.join(fileInfo.dir,fileInfo.name)\n out = file(outPath,'wb')\n #--Tes3 Record\n self.tes3.setChanged()\n self.tes3.hedr.setChanged()\n self.tes3.hedr.numRecords = len(self.records) #--numRecords AFTER TES3 record\n self.tes3.getSize()\n self.tes3.dump(out)\n #--Other Records\n for record in self.records:\n record.getSize()\n record.dump(out)\n out.close()", "def save(self):\n #--Data file exists?\n filePath = self.path\n if os.path.exists(filePath):\n ins = open(filePath)\n outData = compat.uncpickle(ins)\n ins.close()\n #--Delete some data?\n for key in self.deleted:\n if key in outData:\n del outData[key]\n else:\n outData = {}\n #--Write touched data\n for key in self.changed:\n outData[key] = self.data[key]\n #--Pickle it\n tempPath = filePath+'.tmp'\n cPickle.dump(outData,open(tempPath,'w'))\n renameFile(tempPath,filePath,True)", "def store_all_to_database(self, session):\n\n description = 'Established in 1974, JSM is a family-owned provider of quality apartments. We offer a variety of units from studios to five bedrooms with every location benefitting from our award winning amenities, responsive 24 hour maintenance, and friendly property management staff. JSM Development began in Champaign, IL, and manages roughly 1,500 apartments and 450,000 sq/ft of commercial space. JSM has been a major contributor to the development of Campustown in Champaign and the East Campus area in Urbana at the University of Illinois. These popular locations are now home to major national retailers such as Urban Outfitters, Chipotle, Panera, Cold Stone Creamery, and Noodles & Co.'\n\n # Insert a JSM company instance into the database\n current_company = Company(\n name='JSM',\n baseurl='https://apartments.jsmliving.com/',\n description = description\n )\n session.add(current_company)\n\n # Iterate over the apartments, storing each in the database\n for apartment in self.apartment_data:\n logging.info(\"Inserting %s to database\", apartment['name'])\n new_apartment = Apartment(\n company=current_company,\n url=apartment['url'],\n name=apartment['name'],\n bedrooms=apartment['bedrooms'],\n bathrooms=apartment['bathrooms'],\n price=apartment['price'],\n leasing_period=apartment['leasing_period'],\n description=apartment['description'],\n address=apartment['address'],\n lat=apartment['lat'],\n lng=apartment['lng']\n )\n session.add(new_apartment)\n\n # Insert images for the given apartment\n for index, image_url in enumerate(apartment['image_urls']):\n new_image = Image(\n url=image_url,\n apartment_id=new_apartment.id,\n type=0,\n image_index=index\n )\n session.add(new_image)\n\n # Connect images to apartment\n new_apartment.images.append(new_image)\n\n # Insert floorplan image, if it exists\n if apartment['floorplan_url'] != 0:\n new_floorplan_image = Image(\n url=apartment['floorplan_url'],\n apartment_id=new_apartment.id,\n type=1,\n image_index=len(apartment['image_urls'])\n )\n session.add(new_floorplan_image)\n\n # Connect images to apartment\n new_apartment.images.append(new_floorplan_image)\n\n # Insert amenities for the given apartment\n for amenity in apartment['amenities']:\n new_amenity = Amenity(\n apartment_id=new_apartment.id,\n amenity=amenity\n )\n session.add(new_amenity)\n\n # Connect amenity to apartment\n new_apartment.amenities.append(new_amenity)\n\n # Write all queries to the database\n session.commit()", "def save(self):\n\n for i in self.bots:\n try:\n i.save()\n except Exception, ex:\n handle_exception()", "def save(self):\n for t in self.ace_types:\n self.api.api_request(\"PUT\", self.url + t, data={t: self[t]})", "def save(self, *args, **kwargs):\n pass", "def modifyClassroom(classroomId, classroomName, capacity,location):\n for classroom in classroomEntities:\n if classroom[\"classroomId\"] == classroomId:\n selectedClassroom = classroom\n selectedClassroom[\"classroomName\"] = classroomName\n selectedClassroom[\"capacity\"] = capacity\n selectedClassroom[\"location\"] = location\n return True\n return False", "def save(self):\n d1 = {}\n with open(self.__file_path, mode=\"w\") as f:\n for k, v in self.__objects.items():\n d1[k] = v.to_dict()\n json.dump(d1, f)" ]
[ "0.66771996", "0.66205716", "0.6545138", "0.60703945", "0.60378635", "0.60299575", "0.6015636", "0.5970347", "0.596366", "0.590775", "0.58763117", "0.58763117", "0.58763117", "0.58696264", "0.58696264", "0.58696264", "0.58696264", "0.58696264", "0.5866105", "0.5827522", "0.5819063", "0.58154714", "0.5800954", "0.5773427", "0.57664865", "0.5759937", "0.575428", "0.5740315", "0.57371706", "0.57309514", "0.57258546", "0.57079804", "0.5644727", "0.5632527", "0.56291336", "0.56291336", "0.56291336", "0.56171906", "0.5600447", "0.5599877", "0.5593998", "0.5582601", "0.5575891", "0.5567979", "0.55528", "0.5543081", "0.55337507", "0.55318445", "0.5530113", "0.552907", "0.5490633", "0.547235", "0.5469896", "0.54666334", "0.5465158", "0.54635787", "0.54635787", "0.5460901", "0.54564863", "0.54564863", "0.54564863", "0.54564863", "0.54455614", "0.5437395", "0.5436752", "0.5430434", "0.5425933", "0.5422957", "0.54214644", "0.54214644", "0.54214644", "0.54214644", "0.54214644", "0.54214644", "0.54214644", "0.54214644", "0.5416008", "0.54081964", "0.54021025", "0.54021025", "0.54021025", "0.54021025", "0.54021025", "0.54021025", "0.54021025", "0.54021025", "0.54021025", "0.54021025", "0.54021025", "0.53940225", "0.53886056", "0.5387567", "0.53830975", "0.53778434", "0.5370457", "0.53692466", "0.5366557", "0.5362933", "0.5361305", "0.53608865" ]
0.8425549
0
Get the escape code associated to name. `name` can be either a varialble_name, a capname or a tcap code See man terminfo(5) to see which names are available. If the name is not supported, None is returned. If the name isn't present in the database an exception is raised.
def get(self, name): # `name` is most likely a capname, so we try that first for i in (self._by_capname, self._by_var, self._by_tcap_code): if i.get(name): return i.get(name) else: raise TerminfoError("'%s' is not a valid terminfo entry" % name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_code_by_name(self, name):\n raise NotImplementedError()", "def _find_ebcdic_codec(code_name):\n return _codec_name_to_info_map.get(code_name)", "def get_sup_code_by_name(self, name):\n raise NotImplementedError()", "def getEncoder (name):\n for m in _registeredEncoders:\n if m.name () == name:\n return m\n return None", "def get_postal_code_by_name(self, name):\n raise NotImplementedError()", "def get_card_by_name(self,name):\n try:\n card_id = self._category2id['name'][name].values()\n except KeyError:\n print \"No card by given name! [{}]\".format(name)\n return None\n\n if len(card_id) > 1:\n print \"Multiple cards match name, returning first...\"\n\n return self._id2database[card_id[0]]", "def GetEncodingFromName(*args, **kwargs):\n return _gdi_.FontMapper_GetEncodingFromName(*args, **kwargs)", "def lookup(codec_name):\n result = _find_ebcdic_codec(codec_name)\n if result is None:\n raise LookupError(\n \"EBCDIC codec is %r but must be one of: %s\" % (codec_name, codec_names)\n )\n return result", "def getByName(name: str) -> AugmentationType | None:\n if name.upper() in [aug.name for aug in AugmentationType]:\n return AugmentationType[name.upper()]\n else:\n return None", "def get_employeeOnName(self, name):\n from Employee import Employee\n cursor = self.dbconnect.get_cursor()\n cursor.execute('SELECT * FROM employee WHERE name=%s ', (name,))\n if (cursor.rowcount != 0):\n row = cursor.fetchone()\n return Employee(row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8])\n else:\n return None", "def get_seq_by_name(self, name: str) -> Optional['Sequencer']:\n for seq in self.Sequencers:\n if seq.Name.lower() == name.lower():\n return seq\n return None", "def convert_charref(self, name):\r\n try:\r\n n = int(name)\r\n except ValueError:\r\n return\r\n if not 0 <= n <= 127 : # ASCII ends at 127, not 255\r\n return\r\n return self.convert_codepoint(n)", "def get_summoner_by_name(self, name: str) -> Optional[Summoner]:\n self.logger.info(\"Getting Summoner: {}\".format(name))\n # Check db first\n q = self.db.session.query(Summoner).filter(Summoner.name.ilike('%{}%'.format(name)))\n summoner = q.first()\n if summoner:\n self.logger.info(\"Summoner found in db: {}\".format(summoner))\n return summoner\n else:\n self.logger.info(\"Summoner not found in db: {}\".format(summoner))\n return self.insert_or_update_summoner(name)", "def FindName(*args, **kwargs):\n return _gdi_.ColourDatabase_FindName(*args, **kwargs)", "def get_code (self, name):\n containment = self.containments.get (name)\n if containment is None:\n raise ImportError ('No such module: \\'{}\\''.format (name))\n return compile (containment [0], containment [1], 'exec')", "def get(self, coder_name):\n coder = self.coders.get(coder_name)\n return coder", "def get_typecode(self, name):\n return self.codes['type_codes'][name]", "def get_author_by_name(self, name):\n\n cur = self.conn.cursor()\n query = 'SELECT author_id , name FROM author WHERE name = ? '\n cur.execute(query, (name,))\n return row_to_dict_or_false(cur)", "def find_byname(self, name):\n name = name.lower()\n try:\n return self.__byname[name]\n except KeyError:\n raise BadColor(name)", "def get_by_name(self, name):\n return self.by_name.get(name.upper())", "def getPlayerIDFromName(name):\n\n # Connect to the database.\n conn, c = main.connect()\n\n # Select the player that matches the name.\n SQL = \"SELECT playerID FROM player WHERE playerName=%s\"\n data = (name, )\n c.execute(SQL, data)\n\n toReturn = c.fetchone()\n\n conn.commit()\n conn.close()\n\n # Only return the first result\n return toReturn[0]", "def get_character_info(self, name):\n url = \"%s?%s\" % (self._base_url, urlencode({'name': name}))\n q = Request(url)\n q.add_header('User-Agent', 'curl/7.51.0')\n q.add_header('Accept', 'application/json')\n\n result = urlopen(q).read().decode('utf-8')\n data = json.loads(result)\n\n return data", "def decodeName(self, nameid):\n if nameid in self.invnames:\n return self.invnames[nameid]\n else:\n return None", "def _get_qiskit_gate_from_name(name):\n gates = {\n 'H': HGate,\n 'X': XGate,\n 'Y': YGate,\n 'Z': ZGate,\n 'SWAP': SwapGate,\n 'I': IGate,\n 'S': SGate,\n 'D-S': SdgGate,\n 'T': TGate,\n 'D-T': TdgGate,\n 'RX': RXGate,\n 'RY': RYGate,\n 'RZ': RZGate,\n 'C-H': CHGate,\n 'CNOT': CnotGate,\n 'C-Y': CYGate,\n 'CSIGN': CZGate,\n 'C-RZ': CRZGate,\n 'CCNOT': CCXGate,\n 'C-SWAP': CSwapGate,\n 'U': U3Gate,\n 'U3': U3Gate,\n 'U2': U2Gate,\n 'U1': U1Gate,\n 'U0': IGate,\n 'PH': RZGate,\n 'RXX': RXXGate,\n 'RZZ': RZZGate,\n 'R': RGate,\n 'MS': MSGate\n }\n try:\n gate = gates[name]\n except:\n gate = None\n return gate", "def FontMapper_GetEncodingFromName(*args, **kwargs):\n return _gdi_.FontMapper_GetEncodingFromName(*args, **kwargs)", "def exe(self, name):\n\n return name", "def what_can_it_do(self, name):\n stmt = \"MATCH (n:Concept { name: '%s' })-[:can]->(neighbors) RETURN neighbors.name as name\" % name\n return map(lambda result: result.name.encode('utf-8'), self.graph.cypher.execute(stmt))", "def by_name(cls, name):\n return dbsession.query(cls).filter_by(_name=str(name)).first()", "def by_name(cls, name):\n return dbsession.query(cls).filter_by(_name=str(name)).first()", "def by_name(cls, name):\n return dbsession.query(cls).filter_by(_name=str(name)).first()", "def ShortName(name):\n if name in CODEC_MAP:\n return CODEC_MAP[name].shortname\n raise encoder.Error('Unrecognized codec name %s' % name)", "def get_symbol(self, name): # pylint: disable=no-self-use,unused-argument\n if name in self._symbol_cache:\n return self._symbol_cache[name]\n return None", "def selection(self, name):\n try:\n return self._selections[name]\n except KeyError:\n raise Pype9NameError(\n \"No selection named '{}' (possible '{}')\"\n .format(name, \"', '\".join(self.selection_names)))", "def LongName(name):\n if name in CODEC_MAP:\n return CODEC_MAP[name].longname\n raise encoder.Error('Unrecognized codec name %s' % name)", "def get_name_by_collation(self, colname):\n for cs in self.charset_map:\n if cs[COLLATION_NAME] == colname:\n return cs[CHARACTER_SET_NAME]\n return None", "def _ident(name: Optional[Union[quoted_name, str]]) -> Optional[str]:\n if name is None:\n return name\n elif isinstance(name, quoted_name):\n return str(name)\n elif isinstance(name, str):\n return name", "def get_contract_by_name(self, name):\n return next((contract for contract in self.contracts\n if contract.name.lower() == name.lower()), None)", "def get_by_name(cls, name):\n return cls.query.filter(cls.name == name).first()", "def _get_guc(self, name):\n with self._lock:\n query = 'SHOW %s' % name\n\n if _green_callback:\n pgres = self._execute_green(query)\n else:\n pgres = libpq.PQexec(self._pgconn, ascii_to_bytes(query))\n\n if not pgres or libpq.PQresultStatus(pgres) != libpq.PGRES_TUPLES_OK:\n raise exceptions.OperationalError(\"can't fetch %s\" % name)\n rv = bytes_to_ascii(ffi.string(libpq.PQgetvalue(pgres, 0, 0)))\n libpq.PQclear(pgres)\n return rv", "def GetCommand(name, database):\n value = database.GetValue(name)\n if(value == None):\n return \"Name not found\"\n else:\n return value", "def get_func_by_name(self, name):\n if(name == self.name):\n res = self\n else:\n res = None\n return res", "def get_stock_code(stock_name):\n\n Stocks = get_db()['Stocks']\n stock = Stocks.find_one({'name': stock_name})\n if stock:\n return stock['code']\n else:\n raise StockNotFoundException(\n f\"Stock code is not found for stock name {stock_name}\")", "def get_macro(self, name: str) -> str:\n data = struct.pack('<HH{}s'.format(len(name)), 0, len(name), name.encode())\n return self.__intf(2, data)[2:].decode()", "def label(self, name: str) -> Optional[str]:\n _args = [\n Arg(\"name\", name),\n ]\n _ctx = self._select(\"label\", _args)\n return _ctx.execute_sync(Optional[str])", "def read_vocabulary_by_name(name):\n res = (\n select([vocabulary_table])\n .where(vocabulary_table.c.name == name)\n .execute().first())\n if res is not None:\n return dict(res)", "def get_region_by_name(self, name):\n raise NotImplementedError()", "def get_by_name(name: str):\n logger.debug('Retrieving employee by name %s.', name)\n try:\n query = db.session.query(Employee)\n employee = query.filter(\n Employee.name == name\n ).scalar()\n except Exception as exception:\n logger.error('An error occurred while retrieving employee by name %s.'\n ' Exception: %s', name, str(exception))\n db.session.rollback()\n raise\n db.session.commit()\n logger.info('Successfully retrieved employee by name %s.', name)\n return employee", "def GetEncodingName(*args, **kwargs):\n return _gdi_.FontMapper_GetEncodingName(*args, **kwargs)", "def desc(name):\n\n # This includes several different methods to try and get the desc. If\n # one fails, try the next.\n try:\n with open(os.path.join('/sys/class/input', name, 'device/name'), 'r') as f:\n return utils.readstr(f)\n except IOError as error:\n if error.errno == 2:\n with open(_INPUT_PATH(name), 'r') as f:\n try:\n name = fcntl.ioctl(f, _EVIOCGNAME, chr(0) * 256)\n return name.replace(chr(0), '')\n except: # pylint: disable=bare-except\n pass\n else:\n raise error\n\n return None", "def handle_entityref(self, name):\n text = six.unichr(name2codepoint[name])\n self.result.append(text)\n return text", "def n(name):\n return objc.sel_registerName(_utf8(name))", "def get_platform(self, name):\n if name in self.platforms:\n return name\n else:\n try:\n p = self.platforms['name'] = Platform.load(self, name)\n return p\n except IOError as e:\n print('Failed loading platform: {0}'.format(str(e)))\n return None", "def get(self, name):\n for func in (self.getarg, self.getflag, self.getcmd):\n try:\n return func(name)\n except KeyError:\n pass\n return None", "def get_zone(cls, name):\n\n def get_closest(n):\n \"\"\"\n Return closest matching zone\n \"\"\"\n while n:\n try:\n return DNSZone.objects.get(name=n)\n except DNSZone.DoesNotExist:\n pass\n n = \".\".join(n.split(\".\")[1:])\n return None\n\n if not name:\n return None\n if is_ipv4(name):\n # IPv4 zone\n n = name.split(\".\")\n n.reverse()\n return get_closest(\"%s.in-addr.arpa\" % (\".\".join(n[1:])))\n elif is_ipv6(name):\n # IPv6 zone\n d = IPv6(name).digits\n d.reverse()\n c = \".\".join(d)\n return get_closest(\"%s.ip6.arpa\" % c) or get_closest(\"%s.ip6.int\" % c)\n else:\n return get_closest(name)", "def get_card(self, name):\n for card in self.cards:\n if card.name == name:\n return card\n\n return None", "def name(self) -> str:\n _args: list[Arg] = []\n _ctx = self._select(\"name\", _args)\n return _ctx.execute_sync(str)", "def get_class_decoder_function_name(name):\n name = get_class_functional_name(name)\n return 'decode_{0}'.format(name)", "def find_by_name(name, engines=None):\n if engines is None:\n engines = ENGINES\n\n for egn in engines:\n if egn.name() == name:\n return egn\n\n return None", "async def rocketbyname(self, ctx, name, *args):\n if not can_answer(ctx):\n return\n rockets = launchlibrary.Rocket.fetch(api, name=name)\n if rockets:\n rocket = rockets[0]\n rocketname = rocket.name\n msg = '**__{0}__**\\n'\n msg = msg.format(rocketname)\n for arg, formatter in (('-id', id), ('-fid', familyid), ('-aid', agencyid), ('-p', padids), ('-w', rocketwikiurl)):\n if arg in args:\n msg = formatter(msg, rocket)\n else:\n msg = \"No rocket found with name provided.\"\n await send(ctx, msg, args)", "def get_name(self, chr_id):\n for cs in self.charset_map:\n if int(chr_id) == int(cs[ID]):\n return cs[CHARACTER_SET_NAME]\n return None", "async def get_demon(self, ctx, game: str, name: str):\n\n name = await self.nearest_spelling(ctx, name.lower(), self.names[game])\n if name is not None:\n name = \" \".join([i.capitalize() for i in name.split()])\n return name", "def searchDatabase(self, name: str) -> Database:\n for db in self._typeCheckerList:\n if db.name.lower() == name.lower():\n return db\n return None", "def name(self):\n name_length = self.unpack_word(0x48)\n unpacked_string = self.unpack_string(0x4C, name_length)\n if self.has_ascii_name():\n return unpacked_string.decode(\"windows-1252\")\n return unpacked_string.decode(\"utf-16le\")", "def getbyname(self,name):\n\t\t_result = None\n\t\tfor i in self._protocols.items():\n\t\t\tif i[1] == name: # re.match?\n\t\t\t\t_result = i[0]\n\n\t\treturn _result", "def search(self, name):\n\n name = name.lower().strip()\n exact_names = get_close_matches(name, self.possible_names, n=1)\n if not exact_names:\n return None\n else:\n exact_name = exact_names[0]\n id = self.df_possible_names[self.df_possible_names['name'] == exact_name].index[0] \n return self.df_possible_names.loc[id, 'id']", "def get_backend(self, name):\n if name == DATABASE_TYPE_MYSQL:\n ret = 2\n elif name == DATABASE_TYPE_POSTGRESQL:\n ret = 3\n elif name == DATABASE_TYPE_SQLITE:\n ret = 4\n # sqlcoder: this assignment fixes unicode problems for me with sqlite (windows, cp1252)\n # feel free to remove or improve this if you understand the problems\n # better than me (not hard!)\n Charset.not_needed1, Charset.not_needed2, Charset.not_needed3 = True, True, True\n else:\n raise ValueError('Unsupported database backend: %s' % self.supported_databases[name].db_server)\n\n return ret", "def _get_domain_for_name(self, name):\n domain = self.connection.lookupByName(name)\n return domain", "def get_character(self, name=None, id=None):\n\n if name is None and id is None:\n if len(self.characters) > 1:\n raise TooManyCharactersError(\"There are too many characters to choose from\")\n if len(self.characters) is 0:\n return None\n return self.characters[0]\n\n for character in self.characters:\n if name is not None and character.name == name:\n print(\"{} is {}\".format(character.name, name))\n return character\n \n if id is not None and character.id == id:\n print(\"{} is {}\".format(character.id, id))\n return character\n \n return None", "def get_control(self, name):\n\n return self._control_manager.get_control(name)", "def _byname(self, name):\n query = \"\"\"SELECT * \n FROM ppmxl \n WHERE id = '%s';\"\"\" % name\n result = self.corot.query(query)\n return result", "def color(name):\n\tif name not in colors:\n\t\traise ValueError('Bad color %s' % repr(name))\n\treturn u'§' + colors[name]", "def get_by_name_internal(self, name: str):\n name_bytes = str_to_bytes_pad(name, MAX_NAME_LEN)\n self.dev.apdu_exchange(0x0d, name_bytes)", "def name(self, name):\n return self[self.name_cache[name]]", "def by_name(cls, name):\n return cls.all().filter('name =', name).get()", "def codes(self, name):\n return self._get_valuemap(name, non_mapped='codes')", "def get_account_by_name(self, name):\n return next((account for account in self.accounts\n if account.ynab_account_name.lower() == name.lower()), None)", "def decode(name, f):\n data = post_file(ZX_URL, [\n ('f', name, f.read(),)\n ])\n try:\n tree = etree.fromstring(data)\n return tree.xpath(\"/html/body/div/table/tr[5]/td[2]/pre\")[0].text\n except:\n return None", "def from_name(self, name):\n return self._name_to_operator.get(name.lower())", "def get_by_name(name: str):\n return Category.query.filter_by(name=name).first()", "def get_by_name(self, name: str) -> Tuple[str, str]:\n name_bytes = str_to_bytes_pad(name, MAX_NAME_LEN)\n r = self.dev.apdu_exchange(0x05, name_bytes)\n login = bytes_to_str(r[:32])\n password = bytes_to_str(r[32:32+64])\n return (login, password)", "def get_machine(self, name):\n\n return self._machine_manager.get_machine(name)", "def get_package_decoder_file_name(name):\n name = get_package_name(name)\n return 'decoder_for_{0}_package'.format(name)", "def get_conv(name):\n trans_funs = {\n 'mbconv_transform': MBConv,\n 'mbtalkconv_transform': MBTalkConv,\n }\n assert name in trans_funs.keys(), \\\n 'Transformation function \\'{}\\' not supported'.format(name)\n return trans_funs[name]", "def get_board_by_name(self, name: str, *args):\n \n # Hard configure the pagination rate.\n page = 1\n page_limit = 500\n record_count = 500\n\n while record_count >= page_limit:\n boards_data = api.get_boards(\n 'id', 'name',\n api_key=self.__creds.api_key_v2, \n limit=page_limit,\n page=page)\n \n try:\n target_board = [board for board in boards_data if board['name'].lower() == name.lower()][0]\n return self.get_board_by_id(target_board['id'], *args)\n except IndexError:\n page += 1\n record_count = len(boards_data)\n continue\n raise MondayClientError('board_not_found', 'Could not find board with name \"{}\".'.format(name))", "def check_cross_match_info_by_name(name: str, logger=None):\n return get_cross_match_info(\n raw=ampel_api_name(name, with_history=False, logger=logger)[0], logger=logger\n )", "def get_by_name(self, name):\n ksat = Ksat.query.filter_by(name=name).first()\n\n return ksat", "def get_coding(text):\r\n for line in text.splitlines()[:2]:\r\n result = CODING_RE.search(line)\r\n if result:\r\n return result.group(1)\r\n return None", "def FontMapper_GetEncodingName(*args, **kwargs):\n return _gdi_.FontMapper_GetEncodingName(*args, **kwargs)", "def find_program(name):\r\n return name", "def get_pos_name(code, name='parent', english=True):\n return _get_pos_name(code, name, english)", "def find_cinema_by_name(name):\n return Cinema.objects.filter(name=name).first()", "def get_name():\n\n return character['Name']", "def find_by_name(name):\n return repository.find_by_name(name)", "def get_character_detail(chara_name: str) -> dict:\n\n chara_misc_json = load_characters_config()\n chara_details = list(filter(lambda x: (x['name'] == chara_name), chara_misc_json))\n\n if chara_details:\n return chara_details[0]\n else:\n return None", "def handle_charref(self,name):\r\n self.handle_data(unichr(int(name)))\r\n #self.handle_data(\"(charref %s)\" % name)\r\n #print \"handle_charref\", name\r\n #raise NotImplemented\r", "def get_resource_from_name(name):\n return _name_to_resources.get(name, None)", "def container_by_name(self, name):\n if not name:\n return None\n\n # docker prepends a '/' to container names in the container dict\n name = '/'+name\n return next((container for container in self.containers(all=True)\n if name in container['Names']), None)", "def name(self):\n if not self.has_name():\n return \"\"\n name_length = self.unpack_word(0x2)\n unpacked_string = self.unpack_string(0x14, name_length)\n if self.has_ascii_name():\n return unpacked_string.decode(\"windows-1252\")\n return unpacked_string.decode(\"utf-16le\")", "def get(self, name_or_id):\n \n r = self.library.database.get_name(name_or_id)\n\n if not r[0]:\n r = self.library.database.get_id(name_or_id)\n \n return r", "def find_by_name(self, name):\n return self.get(name)" ]
[ "0.6151477", "0.5700235", "0.54649127", "0.53456575", "0.51838666", "0.51424867", "0.50688034", "0.50449085", "0.50296956", "0.49381745", "0.49306858", "0.49157912", "0.4901924", "0.48943734", "0.48937386", "0.48490885", "0.48409435", "0.48289338", "0.4797155", "0.477383", "0.47610682", "0.4759655", "0.47535056", "0.4737278", "0.4727856", "0.47275203", "0.46980697", "0.46885777", "0.46885777", "0.46885777", "0.46708134", "0.46654618", "0.46609682", "0.46579123", "0.46552053", "0.464071", "0.4639803", "0.46257934", "0.46235856", "0.46074668", "0.46041685", "0.45959342", "0.45856968", "0.45802075", "0.45798457", "0.4574561", "0.457162", "0.45685607", "0.4567844", "0.4567201", "0.45664844", "0.45649955", "0.45580885", "0.45476142", "0.45442003", "0.4538684", "0.4524656", "0.45175204", "0.4504168", "0.4502425", "0.4502328", "0.44948274", "0.44931123", "0.44914332", "0.44887674", "0.44882545", "0.44767922", "0.44720256", "0.44633973", "0.4457008", "0.4455691", "0.44545415", "0.44509596", "0.44485092", "0.44412857", "0.4431271", "0.4430892", "0.44277358", "0.4426733", "0.4411253", "0.44086087", "0.4404823", "0.44047484", "0.4401613", "0.43999118", "0.43997914", "0.43949866", "0.43942013", "0.4392647", "0.43908793", "0.43869004", "0.43829444", "0.43829146", "0.43825206", "0.43799436", "0.4376124", "0.43652114", "0.43604544", "0.43568403", "0.43553406" ]
0.49186072
11
If the environment variable TERM is unset try with `fallback` if not empty. vt100 is a popular terminal supporting ANSI X3.64.
def load_terminfo(terminal_name=None, fallback='vt100'): terminal_name = os.getenv('TERM') if not terminal_name: if not fallback: raise TerminfoError('Environment variable TERM is unset and no fallback was requested') else: terminal_name = fallback if os.getenv('TERMINFO'): # from man terminfo(5): # if the environment variable TERMINFO is set, # only that directory is searched terminfo_locations = [os.getenv('TERMINFO')] else: terminfo_locations = [] # from most to least important if os.getenv('TERMINFO_DIRS'): for i in os.getenv('TERMINFO_DIRS').split(':'): # from man terminfo(5) # An empty directory name is interpreted as /usr/share/terminfo. terminfo_locations.append(i or '/usr/share/terminfo') terminfo_locations += [ os.path.expanduser('~/.terminfo'), '/etc/terminfo', '/usr/local/ncurses/share/terminfo', '/lib/terminfo', '/usr/share/terminfo' ] # remove duplicates preserving order terminfo_locations = list(OrderedDict.fromkeys(terminfo_locations)) terminfo_path = None for dirpath in terminfo_locations: path = os.path.join(dirpath, terminal_name[0], terminal_name) if os.path.exists(path): terminfo_path = path break if not path: raise TerminfoError("Couldn't find a terminfo file for terminal '%s'" % terminal_name) from terminfo_index import BOOLEAN_CAPABILITIES, NUMBER_CAPABILITIES, STRING_CAPABILITIES data = open(terminfo_path, 'rb').read() # header (see man term(5), STORAGE FORMAT) header = struct.unpack('<hhhhhh', data[:12]) # 2 bytes == 1 short integer magic_number = header[0] # the magic number (octal 0432) size_names = header[1] # the size, in bytes, of the names section size_booleans = header[2] # the number of bytes in the boolean section num_numbers = header[3] # the number of short integers in the numbers section num_offsets = header[4] # the number of offsets (short integers) in the strings section size_strings = header[5] # the size, in bytes, of the string table if magic_number != 0o432: raise TerminfoError('Bad magic number') # sections indexes idx_section_names = 12 idx_section_booleans = idx_section_names + size_names idx_section_numbers = idx_section_booleans + size_booleans if idx_section_numbers % 2 != 0: idx_section_numbers += 1 # must start on an even byte idx_section_strings = idx_section_numbers + 2 * num_numbers idx_section_string_table = idx_section_strings + 2 * num_offsets # terminal names terminal_names = data[idx_section_names:idx_section_booleans].decode('ascii') terminal_names = terminal_names[:-1].split('|') # remove ASCII NUL and split terminfo = Terminfo(terminal_names[0], terminal_names[1:]) # booleans for i, idx in enumerate(range(idx_section_booleans, idx_section_booleans + size_booleans)): cap = BooleanCapability(*BOOLEAN_CAPABILITIES[i], value=data[i] == b'\x00') terminfo.booleans[cap.variable] = cap # numbers numbers = struct.unpack('<'+'h' * num_numbers, data[idx_section_numbers:idx_section_strings]) for i,strnum in enumerate(numbers): cap = NumberCapability(*NUMBER_CAPABILITIES[i], value=strnum) terminfo.numbers[cap.variable] = cap # strings offsets = struct.unpack('<'+'h' * num_offsets, data[idx_section_strings:idx_section_string_table]) idx = 0 for offset in offsets: k = 0 string = [] while True and offset != -1: char = data[idx_section_string_table + offset + k:idx_section_string_table + offset + k + 1] if char == b'\x00': break string.append(char.decode('iso-8859-1')) k += 1 string = u''.join(string) cap = StringCapability(*STRING_CAPABILITIES[idx], value=string) terminfo.strings[cap.variable] = cap idx += 1 terminfo._reset_index() return terminfo
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _init_term(fullterm):\n if platform == 'win32':\n return True\n elif platform in ('darwin', 'linux'):\n global _STATIC_VARS\n fd = stdin.fileno()\n if not isatty(fd):\n return\n old = tcgetattr(fd)\n _STATIC_VARS.term_config = (fd, old)\n new = tcgetattr(fd)\n new[3] = new[3] & ~ICANON & ~ECHO\n new[6][VMIN] = 1\n new[6][VTIME] = 0\n if fullterm:\n new[6][VINTR] = 0\n new[6][VSUSP] = 0\n tcsetattr(fd, TCSANOW, new)\n # terminal modes have to be restored on exit...\n register(cleanup_console)\n return True\n else:\n return True", "def auto_color(stream=sys.stdin):\n term_name = os.environ.get(\"TERM\", \"\").lower()\n if (stream.isatty()\n and (term_name in KNOWN_TERMINAL_TYPES or \"xterm\" in term_name)):\n return VtColor()\n return NoColor()", "def get_term_colors():\n term = getenv('TERM')\n if not is_term() or not term:\n return 1\n if term in ('xterm-color', 'ansi', 'screen'):\n return 16\n if term in ('xterm-256color'):\n return 256\n return 1", "def term_support_color():\n return OS_VERSION[0] == \"Linux\" or OS_VERSION[0] == \"Darwin\"", "def set_terminal_encoding(encoding='utf_8'):\n sys.stdin = codecs.getreader(encoding)(sys.stdin)\n sys.stdout = codecs.getwriter(encoding)(sys.stdout)\n sys.stderr = codecs.getwriter(encoding)(sys.stderr)", "def fallback(self, kw):\n print(self.fallback_text.format(kw))\n return self.ask()", "def terminal_configured():\n return lnp.userconfig.get('terminal_type') is not None", "def reset_term_colors():\n sys.stdout.write(ENDC)", "def terminal_supports_color():\n plat = sys.platform\n supported_platform = plat != \"Pocket PC\" and (\n plat != \"win32\" or \"ANSICON\" in os.environ\n )\n # isatty is not always implemented, #6223.\n is_a_tty = hasattr(sys.stdout, \"isatty\") and sys.stdout.isatty()\n if not supported_platform or not is_a_tty:\n return False\n return True", "def preferredRenderer(*args, fallback: Union[AnyStr, bool]=\"\", makeCurrent: bool=True, q=True,\n query=True, **kwargs)->Union[None, Any]:\n pass", "def _get_terminal_exec(self):\n\n terminal = None\n\n try:\n with open(CONFIG_FILE_PATH) as conffile:\n config = yaml.load(conffile, yaml.SafeLoader)\n terminal = config.get('terminal', None)\n except yaml.YAMLError:\n print(\"Nautiterm: invalid configuration file at {path}, falling back\" +\n \" to {d}\".format(path=CONFIG_FILE_PATH, d=DEFAULT_TERMINAL_EXEC),\n file=sys.stderr)\n except IOError as ioe:\n # catch-all for permission errors and file not founds to be compatible\n # with Python 2 which doesn't have FileNotFoundError or PermissionError\n pass\n\n if not terminal:\n terminal = DEFAULT_TERMINAL_EXEC\n\n return terminal", "def _default_color_enabled() -> bool:\n import platform\n\n # If we're not attached to a terminal, go with no-color.\n if not sys.__stdout__.isatty():\n return False\n\n # On windows, try to enable ANSI color mode.\n if platform.system() == 'Windows':\n return _windows_enable_color()\n\n # We seem to be a terminal with color support; let's do it!\n return True", "def reset_terminal():\n if not mswin:\n subprocess.call([\"tset\", \"-c\"])", "def get_configured_terminal():\n s = lnp.userconfig.get_string('terminal_type')\n terminals = get_valid_terminals()\n for t in terminals:\n if s == t.name:\n return t\n return CustomTerminal", "def supports_color():\n plat = sys.platform\n supported_platform = plat != 'Pocket PC' and (plat != 'win32'\n or 'ANSICON' in os.environ)\n # isatty is not always implemented, #6223.\n is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()\n return supported_platform and is_a_tty", "def test_term_chars_default(self, instrument):\n assert instrument.term_chars == b'\\r'", "def defaultProcessOutputEncodingDecider(context, executable, **forfutureuse):\n\treturn __DEFAULT_PROCESS_ENCODING # stdout encoding will be None unless in a terminal", "def consolePrompt(prompt:str, nl:bool = True, default:str = None) -> str:\n\t\tanswer = None\n\t\ttry:\n\t\t\tanswer = Prompt.ask(f'[{Logging.terminalStyle}]{prompt}', console = Logging._console, default = default)\n\t\t\tif nl:\n\t\t\t\tLogging.console()\n\t\texcept KeyboardInterrupt as e:\n\t\t\tpass\n\t\texcept Exception:\n\t\t\tpass\n\t\treturn answer", "def stdout_supports_color():\r\n plat = sys.platform\r\n supported_platform = plat != 'Pocket PC' and (plat != 'win32' or\r\n 'ANSICON' in os.environ)\r\n\r\n is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()\r\n if not supported_platform or not is_a_tty:\r\n return False\r\n return True", "def supports_color():\n plat = sys.platform\n supported_platform = plat != 'Pocket PC' and \\\n (plat != 'win32' or 'ANSICON' in os.environ)\n\n is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()\n if not supported_platform or not is_a_tty:\n return False\n return True", "def get_custom_terminal_cmd():\n return lnp.userconfig.get_string('terminal')", "def setup_locale_environment(locale=None, text_mode=False, prefer_environment=False):\n\n # pylint: disable=environment-modify\n\n # Look for a locale in the environment. If the variable is setup but\n # empty it doesn't count, and some programs (KDE) actually do this.\n # If prefer_environment is set, the environment locale can override\n # the parameter passed in. This can be used, for example, by initial-setup,\n # to prefer the possibly-more-recent environment settings before falling back\n # to a locale set at install time and saved in the kickstart.\n if not locale or prefer_environment:\n for varname in (\"LANGUAGE\", \"LC_ALL\", \"LC_MESSAGES\", \"LANG\"):\n if varname in os.environ and os.environ[varname]:\n locale = os.environ[varname]\n break\n\n # Look for a locale in the firmware if there was nothing in the environment\n if not locale:\n locale = get_firmware_language(text_mode)\n\n # parse the locale using langtable\n if locale:\n env_langs = get_language_locales(locale)\n if env_langs:\n # the first langauge is the best match\n locale = env_langs[0]\n else:\n log.error(\"Invalid locale '%s' given on command line, kickstart or environment\", locale)\n locale = None\n\n # If langtable returned no locales, or if nothing was configured, fall back to the default\n if not locale:\n locale = constants.DEFAULT_LANG\n\n # Save the locale in the environment\n os.environ[\"LANG\"] = locale\n\n # Cleanup the rest of the environment variables\n for varname in (\"LANGUAGE\", \"LC_ALL\", \"LC_MESSAGES\"):\n if varname in os.environ:\n del os.environ[varname]", "def is_using_terminal(self):\n return self.using_terminal", "def tty_supports_color():\r\n\t\t\r\n\t\tplat = sys.platform\r\n\r\n\t\tif plat == \"win32\":\r\n\t\t\treturn False\r\n\t\telse:\r\n\t\t\tsupported_platform = plat != 'Pocket PC' and (plat != 'win32' or\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t 'ANSICON' in os.environ)\r\n\t\t# isatty is not always implemented, #6223.\r\n\t\t\tis_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()\r\n\t\t\treturn supported_platform and is_a_tty", "def supports_color(): # pragma: no cover # noqa\n plat = sys.platform\n supported_platform = plat != 'Pocket PC' and (\n plat != 'win32' or 'ANSICON' in os.environ\n )\n\n # isatty is not always implemented, #6223.\n is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()\n if not supported_platform or not is_a_tty:\n return False\n return True", "def xterm_title(value, max_length=74, bypass_term_check=False):\n TERM = os.getenv('TERM')\n if not bypass_term_check and TERM not in TERM_TITLE_SUPPORTED:\n return\n sys.stderr.write('\\033]2;'+value[:max_length]+'\u0007')\n sys.stderr.flush()", "def configure_terminal(termname):\n lnp.userconfig['terminal_type'] = termname\n lnp.userconfig.save_data()", "def getpreferredencoding() -> str:\n return locale.getpreferredencoding() or \"UTF-8\"", "def supports_color():\n unsupported_platform = (sys.platform in ('win32', 'Pocket PC'))\n # isatty is not always implemented, #6223.\n is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()\n if unsupported_platform or not is_a_tty:\n return False\n return True", "def GetEnvironFallback(var_list, default):\n for var in var_list:\n if var in os.environ:\n return os.environ[var]\n return default", "def getpreferredencoding():\n default_encoding = locale.getpreferredencoding() or \"UTF-8\"\n\n # On Windows, we mimic git/linux by trying to read the LC_ALL, LC_CTYPE, LANG env vars manually\n # (on Linux/MacOS the `getpreferredencoding()` call will take care of this).\n # We fallback to UTF-8\n if PLATFORM_IS_WINDOWS:\n default_encoding = \"UTF-8\"\n for env_var in [\"LC_ALL\", \"LC_CTYPE\", \"LANG\"]:\n encoding = os.environ.get(env_var, False)\n if encoding:\n # Support dotted (C.UTF-8) and non-dotted (C or UTF-8) charsets:\n # If encoding contains a dot: split and use second part, otherwise use everything\n dot_index = encoding.find(\".\")\n if dot_index != -1:\n default_encoding = encoding[dot_index + 1:]\n else:\n default_encoding = encoding\n break\n\n return default_encoding", "def resetTerminal():\n sys.stdout.write('\\n\\n') # add a few blank lines\n sys.stdout.flush()\n if os.name == 'nt':\n os.system('cls')\n else:\n os.system('clear')", "def is_colorterm():\n global _STATIC_VARS\n if 'colorterm' not in _STATIC_VARS:\n terms = ['ansi', 'xterm-color', 'xterm-256color', 'screen']\n _STATIC_VARS.colorterm = _STATIC_VARS.term and \\\n getenv('TERM') in terms\n return _STATIC_VARS.colorterm", "def test_term_chars_default(self, instrument):\n assert instrument.term_chars is None", "def terminal_init(self):\n pass", "def get_terminal_command(cmd, force_custom=False):\n log.d(\"Preparing terminal command for command line %s\", cmd)\n if not isinstance(cmd, list):\n cmd = [cmd, ]\n if sys.platform == 'darwin':\n return ['open', '-a', 'Terminal.app'] + cmd\n elif sys.platform.startswith('linux'):\n if force_custom:\n term = CustomTerminal.get_command_line()\n log.d(\"Using custom terminal: %s\", term)\n else:\n term = get_configured_terminal().get_command_line()\n log.d(\n \"Using configured terminal: %s, command line %s\", term,\n get_configured_terminal().name)\n if not term:\n raise Exception(\n 'No terminal configured! Use File > Configure Terminal.')\n if \"$\" in term:\n c = []\n for s in term:\n if s == '$':\n c += cmd\n else:\n c.append(s)\n return c\n else:\n return term + cmd\n raise Exception('No terminal launcher for platform: ' + sys.platform)", "def _is_terminal(self):\n raise NotImplementedError", "def get_terminal_width(default_width=80):\n try:\n return int(os.environ[\"COLUMNS\"])\n except (KeyError, ValueError):\n return default_width", "def __termcode(num):\r\n return \"\\033[%sm\" % num", "def reset_color():\n sys.stdout.write(\"\\033[0m\")", "def isTerminal(self) -> bool:\n ...", "def set_console_font(font):\n log.debug(\"setting console font to %s\", font)\n rc = execWithRedirect(\"setfont\", [font])\n if rc == 0:\n log.debug(\"console font set successfully to %s\", font)\n return True\n else:\n log.error(\"setting console font to %s failed\", font)\n return False", "def set_terminal_theme(theme: ApplicationTheme):\n # Change current mode.\n path = theme.path\n try:\n with open(path, 'r', encoding='utf-8') as f:\n pass\n except FileNotFoundError:\n path = theme.windows_path\n try:\n with open(path, 'r+', encoding='utf-8') as f:\n settings_string = ''\n key = theme.keys.split(theme.settings_delimiter)[-1]\n toggled_theme_name = theme.names[theme.mode == ThemeMode.light]\n\n # Search for line with key.\n current_theme_name = ''\n for line in f:\n if not current_theme_name and key in line and any(\n name in line for name in theme.names\n ):\n # Replace current theme with toggled theme.\n # Requires that theme name is either dark or\n # light theme. Else keep \"invalid\" theme rather\n # than raising an exception.\n current_theme_name = theme.names[theme.light_name in line]\n line = line.replace(current_theme_name, toggled_theme_name)\n settings_string += line\n if current_theme_name:\n f.seek(0)\n f.truncate()\n # Write new settings to file.\n f.write(settings_string)\n\n print('Set Terminal theme to:', toggled_theme_name)\n else:\n raise ValueError(\n 'Failed to find valid current theme for Terminal.')\n except Exception as e:\n print('Failed to set Terminal theme.')\n print(repr(e))", "def restore():\n global TERMATTRS\n termios.tcsetattr(stdin, termios.TCSANOW, TERMATTRS)", "def fall_back_message():\r\n card_title = \"Fallback Message\"\r\n fallback_string = \"Sorry. I couldn't understood it. Please say again.\"\r\n should_end_session = False\r\n session_attributes = { \r\n \"speech_output\": fallback_string,\r\n \r\n \r\n }\r\n\r\n return build_response(session_attributes, build_speechlet_response(card_title, fallback_string, \"Ask me to say hello...\", should_end_session))", "def on_xTerm(self):\n path = os.path.normpath(self.pXterm)\n os.system('start \"Toto\" /d \"%s\"' % path)", "def term():\n curses.endwin()\n unicornhathd.off()", "def stderr_log_scheme():\r\n if LogOptions._STDOUT_LOG_SCHEME is None:\r\n LogOptions.set_stderr_log_level(app.get_options().twitter_common_log_stderr_log_level)\r\n return LogOptions._STDOUT_LOG_SCHEME", "def set_unique_prompt (self, optional_prompt=None):\r\n if optional_prompt is not None:\r\n self.prompt = optional_prompt\r\n self.sendline (self.PROMPT_SET_SH) # sh-style\r\n i = self.expect ([TIMEOUT, self.PROMPT], timeout=10)\r\n if i == 0: # csh-style\r\n self.sendline (self.PROMPT_SET_CSH)\r\n i = self.expect ([TIMEOUT, self.PROMPT], timeout=10)\r\n if i == 0:\r\n return 0\r\n return 1", "def supports_color():\n\n sys_platform = sys.platform\n supported = sys_platform != \"Pocket PC\" and (\n sys_platform != \"win32\" or \"ANSICON\" in os.environ\n )\n\n atty_connected = hasattr(sys.stdout, \"isatty\") and sys.stdout.isatty()\n return supported and atty_connected", "def set_base_prompt(\n self, pri_prompt_terminator=\"#\", alt_prompt_terminator=\">\", delay_factor=1\n ):\n prompt = self.find_prompt(delay_factor=delay_factor)\n if not prompt[-1] in (pri_prompt_terminator, alt_prompt_terminator):\n raise ValueError(f\"Router prompt not found: {repr(prompt)}\")\n\n prompt = prompt.strip()\n if len(prompt) == 1:\n self.base_prompt = prompt\n else:\n # Strip off trailing terminator\n self.base_prompt = prompt[:-1]\n return self.base_prompt", "def render_defaults(stdscr):\n max_y = stdscr.getmaxyx()[0] - 1\n if superglobals.information_enabled:\n stdscr.addstr(0, 0, uname().system)\n stdscr.addstr(1, 0, uname().machine)\n \n for i in range(0, max_y + 1):\n stdscr.addstr(i, 43, \"│\") # Barrier that protects program from user input.", "def send_charset(self, offered):\n selected = None\n for offer in offered:\n try:\n codec = codecs.lookup(offer)\n except LookupError as err:\n self.log.info('LookupError: {}'.format(err))\n else:\n if (codec.name == self.env['CHARSET'] or not selected):\n self.env['CHARSET'] = codec.name\n selected = offer\n if selected:\n self.log.debug('Encoding negotiated: {env[CHARSET]}.'\n .format(env=self.env))\n return selected\n self.log.info('No suitable encoding offered by server: {!r}.'\n .format(offered))\n return None", "def detect_backend():\n try:\n from termpixels.unix import UnixBackend\n return UnixBackend()\n except:\n try:\n from termpixels.win32_vt import Win32VtBackend\n return Win32VtBackend()\n except Exception as e:\n raise e\n from termpixels.win32 import Win32Backend\n return Win32Backend()", "def get_console_width(fallback=75):\n if test_if_ipython():\n return fallback\n try:\n _, width = subprocess.check_output(['stty', 'size'], stderr=subprocess.PIPE).split()\n except:\n width = fallback\n width = int(width)\n return width", "def default(self, line):\n cmd,args,line = self.parseline(line)\n try:\n background = ampersand(line)\n if background:\n bg = BackProc(cmd,args)\n else:\n self.subproc(line)\n except:\n sys.stdout.write(\"Unknown command '{}' returning to shell...\\n\".format(line))", "def LCD_contrast(self, contrast):\n self.send_packet('\\x13' + str([contrast]))", "def setDefaultTerm(self, *args):\n return _libsbml.Transition_setDefaultTerm(self, *args)", "def clean_up_terminal(self) -> None:\n if self.stdscr:\n # Disable the Keypad mode\n self.stdscr.keypad(False)\n # Renable caracters echoing\n curses.echo()\n # Disable the interrupts\n curses.nocbreak()\n # Restore the terimnal to it's orginial operating mode\n curses.endwin()", "def isSpecial(ansiCode,string):\n if IS_TERMINAL and not IS_WIN32: return ansiCode+string+ANSI_END\n else: return string", "def stderrConnectedToTerm():\n return sys.stderr.isatty()", "def locale_supported_in_console(locale):\n\n locale_scripts = get_locale_scripts(locale)\n return set(locale_scripts).issubset(SCRIPTS_SUPPORTED_BY_CONSOLE)", "def no_color():\n return (bool(_environ.get(\"ACCELPY_NO_COLOR\", False)) and\n _system() != 'Windows')", "def init():\n global _default_foreground, _default_background, _default_style\n try:\n attrs = GetConsoleScreenBufferInfo().wAttributes\n except (ArgumentError, WindowsError):\n _default_foreground = GREY\n _default_background = BLACK\n _default_style = NORMAL\n else:\n _default_foreground = attrs & 7\n _default_background = (attrs >> 4) & 7\n _default_style = attrs & BRIGHT", "def default_locale(category: str | None = None, aliases: Mapping[str, str] = LOCALE_ALIASES) -> str | None:\n varnames = (category, 'LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG')\n for name in filter(None, varnames):\n locale = os.getenv(name)\n if locale:\n if name == 'LANGUAGE' and ':' in locale:\n # the LANGUAGE variable may contain a colon-separated list of\n # language codes; we just pick the language on the list\n locale = locale.split(':')[0]\n if locale.split('.')[0] in ('C', 'POSIX'):\n locale = 'en_US_POSIX'\n elif aliases and locale in aliases:\n locale = aliases[locale]\n try:\n return get_locale_identifier(parse_locale(locale))\n except ValueError:\n pass\n return None", "def _windows_enable_color() -> bool:\n # pylint: disable=invalid-name, import-error, undefined-variable\n # Pulled from: https://bugs.python.org/issue30075\n import msvcrt\n import ctypes\n from ctypes import wintypes\n kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) # type: ignore\n\n ERROR_INVALID_PARAMETER = 0x0057\n ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004\n\n def _check_bool(result: Any, _func: Any, args: Any) -> Any:\n if not result:\n raise ctypes.WinError(ctypes.get_last_error()) # type: ignore\n return args\n\n LPDWORD = ctypes.POINTER(wintypes.DWORD)\n kernel32.GetConsoleMode.errcheck = _check_bool\n kernel32.GetConsoleMode.argtypes = (wintypes.HANDLE, LPDWORD)\n kernel32.SetConsoleMode.errcheck = _check_bool\n kernel32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)\n\n def set_conout_mode(new_mode: int, mask: int = 0xffffffff) -> int:\n # don't assume StandardOutput is a console.\n # open CONOUT$ instead\n fdout = os.open('CONOUT$', os.O_RDWR)\n try:\n hout = msvcrt.get_osfhandle(fdout)\n old_mode = wintypes.DWORD()\n kernel32.GetConsoleMode(hout, ctypes.byref(old_mode))\n mode = (new_mode & mask) | (old_mode.value & ~mask)\n kernel32.SetConsoleMode(hout, mode)\n return old_mode.value\n finally:\n os.close(fdout)\n\n def enable_vt_mode() -> int:\n mode = mask = ENABLE_VIRTUAL_TERMINAL_PROCESSING\n try:\n return set_conout_mode(mode, mask)\n except WindowsError as exc:\n if exc.winerror == ERROR_INVALID_PARAMETER:\n raise NotImplementedError from exc\n raise\n\n try:\n enable_vt_mode()\n return True\n except NotImplementedError:\n return False", "def quit_alternate_screen(self) -> None:\n if self._in_alternate_screen:\n stdout = HANDLE(\n self._winapi(windll.kernel32.GetStdHandle, STD_OUTPUT_HANDLE)\n )\n self._winapi(windll.kernel32.SetConsoleActiveScreenBuffer, stdout)\n self._winapi(windll.kernel32.CloseHandle, self.hconsole)\n self.hconsole = stdout\n self._in_alternate_screen = False", "def _ask_prompt(question: str,\n console: io.IO,\n validate: Optional[Callable[[str], None]] = None,\n default: Optional[str] = None) -> str:\n validate = validate or (lambda x: None)\n while True:\n answer = console.ask(question)\n if default and not answer:\n answer = default\n try:\n validate(answer)\n break\n except ValueError as e:\n console.error(e)\n\n return answer", "def setFontFallback(self,value):\n self.PDFreactorConfiguration.in1[\"fontFallback\"] = value", "def get_charset(self, default: str) -> str:\n ...", "def cursor_set():\n print(\"\\033[0;0H\")", "def is_terminal(self) -> bool:\n pass", "def set_tty(self):\n try:\n # Increase the width of tty to enable long line grep in check_server()\n self.connector.setwinsize(24, 256)\n # echo default is False\n self.connector.setecho(False)\n return True\n except:\n error_log(\"set tty failed: exception type[%s], value[%s]\",\n sys.exc_info()[0], sys.exc_info()[1])\n return False", "def set_encoding(cls, encoding):\n import sys\n default_stdout = sys.stdout\n default_stderr = sys.stderr\n #importlib.reload(sys)\n #sys.setdefaultencoding(encoding)\n sys.stdout = default_stdout\n sys.stderr = default_stderr", "def choose_reboot():\n while True:\n choice = input(\"Would you like to reboot now ? [y\\\\N] \")\n if choice.lower() == 'n' or choice == '':\n return\n elif choice.lower() == 'y':\n break\n else:\n continue\n\n if os.name == 'nt':\n call('shutdown /r /t 00')\n else:\n call('reboot')", "def on_exit():\n print(\"\\033[0m\", end = \"\")", "def test_tabulate_ansi_escape_in_default_value():\n\n data = [[\"1\", None], [\"2\", \"Sam\"], [\"3\", \"Joe\"]]\n headers = [\"id\", \"name\"]\n\n styled = format_output(\n iter(data),\n headers,\n format_name=\"psql\",\n missing_value=\"\\x1b[38;5;10mNULL\\x1b[39m\",\n )\n unstyled = format_output(\n iter(data), headers, format_name=\"psql\", missing_value=\"NULL\"\n )\n\n stripped_styled = [strip_ansi(s) for s in styled]\n\n assert list(unstyled) == stripped_styled", "def _set_default_font(cls):\n if platform.system() == \"Linux\":\n for family in (\"DejaVu Sans\", \"Noto Sans\", \"Nimbus Sans\"):\n if family in tk.font.families():\n logger.debug(\"Setting default font to: '%s'\", family)\n tk.font.nametofont(\"TkDefaultFont\").configure(family=family)\n tk.font.nametofont(\"TkHeadingFont\").configure(family=family)\n tk.font.nametofont(\"TkMenuFont\").configure(family=family)\n break\n return tk.font.nametofont(\"TkDefaultFont\").configure()[\"family\"]", "def setup_locale(locale, lang=None, text_mode=False):\n\n if lang:\n lang.lang = locale\n\n # not all locales might be displayable in text mode\n if text_mode:\n # check if the script corresponding to the locale/language\n # can be displayed by the Linux console\n # * all scripts for the given locale/language need to be\n # supported by the linux console\n # * otherwise users might get a screen full of white rectangles\n # (also known as \"tofu\") in text mode\n # then we also need to check if we have information about what\n # font to use for correctly displaying the given language/locale\n\n script_supported = locale_supported_in_console(locale)\n log.debug(\"scripts found for locale %s: %s\", locale, get_locale_scripts(locale))\n\n console_fonts = get_locale_console_fonts(locale)\n log.debug(\"console fonts found for locale %s: %s\", locale, console_fonts)\n\n font_set = False\n if script_supported and console_fonts:\n # try to set console font\n for font in console_fonts:\n if set_console_font(font):\n # console font set successfully, skip the rest\n font_set = True\n break\n\n if not font_set:\n log.warning(\"can't set console font for locale %s\", locale)\n # report what exactly went wrong\n if not(script_supported):\n log.warning(\"script not supported by console for locale %s\", locale)\n if not(console_fonts): # no fonts known for locale\n log.warning(\"no console font found for locale %s\", locale)\n if script_supported and console_fonts:\n log.warning(\"none of the suggested fonts can be set for locale %s\", locale)\n log.warning(\"falling back to the English locale\")\n locale = constants.DEFAULT_LANG\n os.environ[\"LANG\"] = locale # pylint: disable=environment-modify\n\n # set the locale to the value we have selected\n log.debug(\"setting locale to: %s\", locale)\n setenv(\"LANG\", locale)\n locale_mod.setlocale(locale_mod.LC_ALL, locale)", "def test_prompt_init_default_instr_valid(self):\n self.assertEqual(self._getPrompt()._instr, '/dev/tty')", "def detect_shell() -> Optional[str]:\n shell_var = os.environ.get('SHELL')\n if shell_var:\n return os.path.basename(shell_var)\n return None", "def fg(r: int, g: int, b: int) -> str:\n return f\"\\033[38;2;{r};{g};{b}m\"", "def show_terminal_warning():\n\n # clear terminal\n nuqql.win.MAIN_WINS[\"screen\"].clear()\n\n # check if terminal is big enough for at least one character\n max_y, max_x = nuqql.win.MAIN_WINS[\"screen\"].getmaxyx()\n if max_y < 1:\n return\n if max_x < 1:\n return\n\n # print as much of the error message as possible\n msg = \"Invalid terminal size. Please resize.\"[:max_x - 1]\n nuqql.win.MAIN_WINS[\"screen\"].addstr(0, 0, msg)", "def __alt_prompt(self, prompt_text: str):\r\n if self.__use_windows_prompt:\r\n sys.stdout.write(prompt_text)\r\n sys.stdout.flush()\r\n i = sys.stdin.readline()\r\n return i.strip()\r\n return input(prompt_text)", "def get_system_encoding():\n try:\n encoding = locale.getdefaultlocale()[1] or 'ascii'\n codecs.lookup(encoding)\n except Exception as _:\n del _\n encoding = 'ascii'\n return encoding", "def get_system_encoding():\n try:\n encoding = locale.getdefaultlocale()[1] or 'ascii'\n codecs.lookup(encoding)\n except Exception as _:\n del _\n encoding = 'ascii'\n return encoding", "def set_console(stream=STDOUT, foreground=None, background=None, style=None):\n if foreground is None:\n foreground = _default_foreground\n if background is None:\n background = _default_background\n if style is None:\n style = _default_style\n attrs = get_attrs(foreground, background, style)\n SetConsoleTextAttribute(stream, attrs)", "def configure_terminal():\n fd = sys.stdin.fileno()\n # save old state\n flags_save = fcntl.fcntl(fd, fcntl.F_GETFL)\n attrs_save = termios.tcgetattr(fd)\n # make raw - the way to do this comes from the termios(3) man page.\n attrs = list(attrs_save) # copy the stored version to update\n # iflag\n attrs[0] &= ~(termios.IGNBRK | termios.BRKINT | termios.PARMRK \n | termios.ISTRIP | termios.INLCR | termios. IGNCR \n | termios.ICRNL | termios.IXON )\n # oflag\n attrs[1] &= ~termios.OPOST\n # cflag\n attrs[2] &= ~(termios.CSIZE | termios. PARENB)\n attrs[2] |= termios.CS8\n # lflag\n attrs[3] &= ~(termios.ECHONL | termios.ECHO | termios.ICANON\n | termios.ISIG | termios.IEXTEN)\n termios.tcsetattr(fd, termios.TCSANOW, attrs)\n # turn on non-blocking\n fcntl.fcntl(fd, fcntl.F_SETFL, flags_save | os.O_NONBLOCK)\n\n return dict(flags_save=flags_save, attrs_save=attrs_save)", "def setquit():\r\n if os.sep == ':':\r\n eof = 'Cmd-Q'\r\n elif os.sep == '\\\\':\r\n eof = 'Ctrl-Z plus Return'\r\n else:\r\n eof = 'Ctrl-D (i.e. EOF)'\r\n\r\n class Quitter(object):\r\n def __init__(self, name):\r\n self.name = name\r\n def __repr__(self):\r\n return 'Use %s() or %s to exit' % (self.name, eof)\r\n def __call__(self, code=None):\r\n # Shells like IDLE catch the SystemExit, but listen when their\r\n # stdin wrapper is closed.\r\n try:\r\n sys.stdin.close()\r\n except:\r\n pass\r\n raise SystemExit(code)\r\n __builtin__.quit = Quitter('quit')\r\n __builtin__.exit = Quitter('exit')", "def configure_custom_terminal(new_path):\n lnp.userconfig['terminal'] = new_path\n lnp.userconfig.save_data()", "def parse_ansi(self, string, strip_ansi=False, xterm256=False, mxp=False):\n if hasattr(string, \"_raw_string\"):\n if strip_ansi:\n return string.clean()\n else:\n return string.raw()\n\n if not string:\n return \"\"\n\n # check cached parsings\n global _PARSE_CACHE\n cachekey = \"%s-%s-%s-%s\" % (string, strip_ansi, xterm256, mxp)\n if cachekey in _PARSE_CACHE:\n return _PARSE_CACHE[cachekey]\n\n # pre-convert bright colors to xterm256 color tags\n string = self.brightbg_sub.sub(self.sub_brightbg, string)\n\n def do_xterm256_fg(part):\n return self.sub_xterm256(part, xterm256, \"fg\")\n\n def do_xterm256_bg(part):\n return self.sub_xterm256(part, xterm256, \"bg\")\n\n def do_xterm256_gfg(part):\n return self.sub_xterm256(part, xterm256, \"gfg\")\n\n def do_xterm256_gbg(part):\n return self.sub_xterm256(part, xterm256, \"gbg\")\n\n in_string = utils.to_str(string)\n\n # do string replacement\n parsed_string = []\n parts = self.ansi_escapes.split(in_string) + [\" \"]\n for part, sep in zip(parts[::2], parts[1::2]):\n pstring = self.xterm256_fg_sub.sub(do_xterm256_fg, part)\n pstring = self.xterm256_bg_sub.sub(do_xterm256_bg, pstring)\n pstring = self.xterm256_gfg_sub.sub(do_xterm256_gfg, pstring)\n pstring = self.xterm256_gbg_sub.sub(do_xterm256_gbg, pstring)\n pstring = self.ansi_sub.sub(self.sub_ansi, pstring)\n parsed_string.append(\"%s%s\" % (pstring, sep[0].strip()))\n parsed_string = \"\".join(parsed_string)\n\n if not mxp:\n parsed_string = self.strip_mxp(parsed_string)\n\n if strip_ansi:\n # remove all ansi codes (including those manually\n # inserted in string)\n return self.strip_raw_codes(parsed_string)\n\n # cache and crop old cache\n _PARSE_CACHE[cachekey] = parsed_string\n if len(_PARSE_CACHE) > _PARSE_CACHE_SIZE:\n _PARSE_CACHE.popitem(last=False)\n\n return parsed_string", "def _prompt(letters='yn', default=None):\n while True:\n try:\n input_text = sys.stdin.readline().strip()\n except KeyboardInterrupt:\n sys.exit(0)\n if input_text and input_text in letters:\n return input_text\n if default is not None and input_text == '':\n return default\n print('Come again?')", "def async_timeout_callback(self) -> None:\n\n # Use detected output encoding\n encoding = self.env.get('ENCODING', 'ascii')\n\n # Pull final encoding detected into session\n self.__telnet_startup.set_encoding()\n self.async_write(bytes(color(\n 'Terminal Detection Completed.\\r\\n', fg='blue', style='bold'), encoding=encoding))\n\n self.__is_detection_completed = True\n\n # Print out initial detection\n str_detect = b'\\r\\n'\n str_detect += b'term : %s \\r\\n' \\\n % bytes(self.env.get('TERM', None), encoding=encoding)\n str_detect += b'encoding : %s \\r\\n' \\\n % bytes(self.env.get('ENCODING', None), encoding=encoding)\n str_detect += b'lines : %s \\r\\n' \\\n % bytes(self.env.get('LINES', None), encoding=encoding)\n str_detect += b'cols : %s \\r\\n\\r\\n' \\\n % bytes(self.env.get('COLUMNS', None), encoding=encoding)\n self.async_write(str_detect)", "def get_fallback_response():\n\n speech_output = FALLBACK_MESSAGE\n return response(speech_response(speech_output, False))", "def get_fallback_response():\n\n speech_output = FALLBACK_MESSAGE\n return response(speech_response(speech_output, False))", "def test_prompt_msg_noask_default_fails(self):\n self.expected['failed'] = True\n self.expected['msg'] = \"Unexpected 'default' in non-question prompt.\"\n\n self.assertEquals(\n self.prompt._prompt(self.response, {\n \"say\": \"Hello World\",\n \"default\": \"foobar\"\n }),\n self.expected\n )", "def input_with_default(prompt, default):\n response = raw_input(\"%s (Default %s) \"%(prompt, default))\n if not response:\n return default\n return response", "def getCurrentDisplay():\n # Windows reports the active display as 'console', so we hardcode it\n return \"console\"", "def fg(value: int) -> str:\n return f\"\\033[38;5;{value}m\"", "def _make_term_color(fg, bg, bold=False, reverse=False):\n return '%s%02d;%02d;%02d;%02d%s' % \\\n (CSI, bold and 1 or 22, reverse and 7 or 27, 30 + fg, 40 + bg, END)" ]
[ "0.5692326", "0.53151155", "0.5231032", "0.52155876", "0.51974976", "0.51403886", "0.5131423", "0.50917715", "0.50907224", "0.5077649", "0.4965872", "0.48994294", "0.4885659", "0.48755345", "0.48475754", "0.48131225", "0.480396", "0.47808626", "0.4763937", "0.47169244", "0.47124702", "0.4706323", "0.4693541", "0.468458", "0.4677504", "0.46382722", "0.46357965", "0.46302018", "0.46000203", "0.4575656", "0.45679042", "0.45512155", "0.4543103", "0.45199847", "0.45117056", "0.4507327", "0.44877476", "0.44844136", "0.4440564", "0.44323564", "0.44137925", "0.44106454", "0.44091526", "0.44016168", "0.43937337", "0.43718493", "0.43542343", "0.43395895", "0.43309394", "0.43221644", "0.43213668", "0.43032232", "0.43007627", "0.43002522", "0.42992055", "0.42935008", "0.42829877", "0.426668", "0.4265931", "0.4257371", "0.42377317", "0.4226559", "0.4220392", "0.4219108", "0.42158777", "0.42134443", "0.4206225", "0.42003757", "0.41976523", "0.41708997", "0.41681623", "0.41633946", "0.4157863", "0.41533834", "0.41482174", "0.4148017", "0.41455787", "0.4145161", "0.4143176", "0.4139848", "0.41337028", "0.41334674", "0.411299", "0.4108739", "0.4099624", "0.4099624", "0.40916815", "0.4090361", "0.40902537", "0.40844476", "0.40818396", "0.4078587", "0.40724877", "0.40574798", "0.40574798", "0.40545264", "0.40453756", "0.40448505", "0.4041682", "0.40415606" ]
0.5457427
1
This function will create foreign table under the existing dummy schema.
def create_foreign_table(server, db_name, schema_name, fsrv_name, foreign_table_name): try: connection = get_db_connection(db_name, server['username'], server['db_password'], server['host'], server['port']) old_isolation_level = connection.isolation_level connection.set_isolation_level(0) pg_cursor = connection.cursor() pg_cursor.execute( "CREATE FOREIGN TABLE " + schema_name + "." + foreign_table_name + "(emp_name text NULL) SERVER %s" % fsrv_name) connection.set_isolation_level(old_isolation_level) connection.commit() # Get 'oid' from newly created foreign table pg_cursor.execute( "SELECT ftrelid FROM pg_foreign_table WHERE ftserver = " "(SELECT oid FROM pg_foreign_server WHERE srvname = '%s') ORDER BY " "ftrelid ASC limit 1" % fsrv_name) oid = pg_cursor.fetchone() ft_id = '' if oid: ft_id = oid[0] connection.close() return ft_id except Exception: traceback.print_exc(file=sys.stderr)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_table(self):\n pass", "def create_tables():\n db.create_all()", "def create_tables():\n db.create_all()", "def create_table(self, cursor: sqlite3.Cursor) -> None:\n\n if self.created:\n return\n\n # Preset this to true to work with Foreign key loops.\n self.created = True\n\n for _, model in self.foreigners.values():\n model.create_table(cursor)\n\n compiled_sql = self._create_table_sql()\n\n _LOGGER.debug(compiled_sql)\n\n cursor.execute(compiled_sql)\n\n for smodel in self.submodels.values():\n smodel.model.create_table(cursor)", "def create_tables():\n db.create_all()", "def create_example_test_table(conn):\n execute_sql_script(conn, \"06_create_example_test_table.sql\")", "def setup_tables(self):\n try:\n self.cursor.execute('CREATE SCHEMA sandbox')\n self.cursor.execute(\"DROP TABLE sandbox.dvds_rdbhdb_super;\")\n except (db.ProgrammingError, db.OperationalError), e:\n # sandbox may not exist\n pass #raise\n\n try:\n self.cursor.execute(\n \"\"\"CREATE TABLE sandbox.dvds_rdbhdb_super(\n id SERIAL PRIMARY KEY,\n name varchar(40) NOT NULL,\n rating float,\n UNIQUE(name)\n );\n \"\"\" )\n except db.ProgrammingError, e:\n if e[0] != '42P07':\n raise", "def test_dummydb_new_table(self):\n db = DummyDB()\n columns = {\n \"one\": int,\n \"two\": str,\n \"three\": bool,\n }\n db.create_table(\"new_table\", columns)", "def _create_tables():\n from Model.DataAccessor.DbAccessor.DbOrmAccessor import db\n db.create_tables([SubjectType, SubjectRegion, Subject])", "def _create_table(self) :\n\n cur = self.con.cursor()\n delete_sql = 'DROP TABLE IF EXISTS \"%s\"' % self.name\n cur.execute(delete_sql)\n\n col_sql = ','.join(['\"%s\" %s' % (self.cols[i], self.types[i])\n for i in range(len(self.cols))])\n create_sql = 'CREATE TABLE \"%s\" ( %s );' % (self.name, col_sql)\n cur.execute(create_sql)", "def db_create_table(db_in, tablename):\n connection = db_in.connection.cursor()\n connection.execute('CREATE TABLE IF NOT EXISTS %s(id INTEGER PRIMARY KEY);' % tablename)", "def test_dummydb_new_table_duplicate_name(self):\n db = DummyDB()\n columns = {\n \"one\": int,\n \"two\": str,\n \"three\": bool,\n }\n db.create_table(\"new_table\", columns)\n db.create_table(\"new_table\", columns)", "def create_base_table(self, table_name):\n print('new')\n # Create table at first.\n select_stm = self.construct_base_table()\n exec_query('DROP TABLE IF EXISTS %s;' % table_name) \n sql = \"\"\"\n CREATE TABLE %s AS\n %s\n \"\"\" % (table_name, select_stm)\n exec_query(sql)", "def create_table(self, name: str, columns=None, foreigns=None) -> None:\n\n if foreigns is None:\n foreigns = []\n\n if columns is None:\n columns = []\n\n sql = 'CREATE TABLE ' + name + ' ('\n\n for column in columns:\n sql += column.to_sql() + ','\n\n for foreign in foreigns:\n sql += foreign.to_sql()\n sql += ','\n\n sql = sql[:-1] + ');'\n\n self.cursor.execute(sql)", "def imp_create_tables():\n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} port={}\".format(*config['CLUSTER'].values()))\n cur = conn.cursor()\n \n # Drop the tables (uncomment if necessary)\n #drop_tables(cur, conn)\n\n # Create the tables\n create_tables(cur, conn)\n\n conn.close()", "def create_questions_table(conn):\n execute_sql_script(conn, \"04_create_questions_table.sql\")", "def create_table(self, create_table_sql):\n print('connect')\n conn = psycopg2.connect(self.name, sslmode='require')\n c = conn.cursor()\n c.execute(create_table_sql)\n conn.close()", "def check_and_create_table(self) -> None:\n table_ids = [t.table_id for t in self.instance.list_tables()]\n\n if not self.table_id in table_ids:\n self.table.create()\n f = self.table.column_family(self.family_id)\n f.create()\n\n f_inc = self.table.column_family(self.incrementer_family_id,\n gc_rule=MaxVersionsGCRule(1))\n f_inc.create()\n\n f_log = self.table.column_family(self.log_family_id)\n f_log.create()\n\n f_ce = self.table.column_family(self.cross_edge_family_id,\n gc_rule=MaxVersionsGCRule(1))\n f_ce.create()\n\n print(\"Table created\")", "def create_tables(self):\n\n self.cur.execute('''CREATE TABLE IF NOT EXISTS my_business_entry\n (\n id SERIAL PRIMARY KEY,\n url_yes_no boolean,\n url TEXT,\n phone_yes_no boolean,\n phone TEXT,\n rating TEXT,\n nr_of_ratings TEXT,\n myBusiness boolean,\n company TEXT\n );''')\n\n self.connection.commit()", "def create_table(conn, create_table_sql):\r\n try:\r\n c = conn.cursor()\r\n c.execute(create_table_sql)\r\n except Error as e:\r\n print(e)", "def create_table(self, conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table():\n conn = psycopg2.connect(host=\"localhost\", database=\"integration\", user=\"postgres\", password=\"postgres\")\n cursor = conn.cursor()\n cursor.execute(CREATE_TABLE)\n conn.commit()\n cursor.close()", "def create_table(self, create_table_sql):\n connection = self.__create_connection()\n try:\n c = connection.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_dataBase(conn, create_cmd):\n if conn:\n cursor = conn.cursor()\n cursor.execute(create_cmd)\n conn.commit()\n #print '[sql management] Table Created...'", "def create_train_table(conn):\n execute_sql_script(conn, \"03_create_train_table.sql\")", "def create_example_sample_submission_table(conn):\n execute_sql_script(conn, \"07_create_example_sample_submission_table.sql\")", "def create_table(conn, sql_create_table):\n try:\n c = conn.cursor()\n c.execute(sql_create_table)\n except Error as e:\n print(e)", "def create_table(conn, sql_create_table):\n try:\n c = conn.cursor()\n c.execute(sql_create_table)\n except Error as e:\n print(e)", "def create_table(conn, sql_create_table):\n try:\n c = conn.cursor()\n c.execute(sql_create_table)\n except Error as e:\n print(e)", "def create_table(self):\n Engine.create_table(self)\n self.connection.commit()", "def create_tables(): \n \n pk_contraint = \"CONSTRAINT {}_pk PRIMARY KEY ({})\"\n uq_contraint = \"CONSTRAINT {}_uq UNIQUE ({})\"\n fk_query = \"\"\"CONSTRAINT {}_fk_{} \n FOREIGN KEY ({}) \n REFERENCES {}({}) \n ON UPDATE CASCADE \n ON DELETE RESTRICT\n \"\"\"\n \n create_dict = {}\n index = 1\n\n\n ############################## public SCHEMA ##############################\n \n schema = 'public'\n create_schema(schema)\n\n #################### site ####################\n table_name = 'site'\n pk_id = 'site_id'\n uq_list = ['site_code']\n fk_dict = {}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n site_code CHAR(3),\n purok VARCHAR,\n sitio VARCHAR,\n barangay VARCHAR,\n municipality VARCHAR,\n province VARCHAR,\n region VARCHAR,\n psgc INTEGER,\n active BOOLEAN NOT NULL DEFAULT TRUE,\n season SMALLINT,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n\n ############################## spatial SCHEMA ##############################\n \n schema = 'spatial'\n create_schema(schema)\n \n #################### exposure ####################\n table_name = 'exposure'\n pk_id = 'exp_id'\n uq_list = ['exp_name']\n fk_dict = {}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n exp_name VARCHAR,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n \n #################### site_exposure ####################\n table_name = 'site_exposure'\n pk_id = 'se_id'\n uq_list = ['site_id', 'exp_id', 'geom']\n fk_dict = {'site_id': {'ref_schema': 'public', 'ref_table': 'site'},\n 'exp_id': {'ref_schema': 'spatial', 'ref_table': 'exposure'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n site_id INTEGER,\n exp_id INTEGER,\n label_name VARCHAR,\n geom GEOMETRY,\n activated DATE NOT NULL DEFAULT CURRENT_DATE,\n deactivated DATE,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n \n #################### feature ####################\n table_name = 'feature'\n pk_id = 'feat_id'\n uq_list = ['feat_name']\n fk_dict = {}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n feat_name VARCHAR,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### site_feature ####################\n table_name = 'site_feature'\n pk_id = 'sf_id'\n uq_list = ['site_id', 'feat_id', 'geom']\n fk_dict = {'site_id': {'ref_schema': 'public', 'ref_table': 'site'},\n 'feat_id': {'ref_schema': 'spatial', 'ref_table': 'feature'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n site_id INTEGER,\n feat_id INTEGER,\n geom GEOMETRY,\n activated DATE NOT NULL DEFAULT CURRENT_DATE,\n deactivated DATE,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### hazard_zone ####################\n table_name = 'hazard_zone'\n pk_id = 'hz_id'\n uq_list = ['site_id, geom']\n fk_dict = {'site_id': {'ref_schema': 'public', 'ref_table': 'site'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n site_id INTEGER,\n geom GEOMETRY,\n activated DATE NOT NULL DEFAULT CURRENT_DATE,\n deactivated DATE,\n {}, {} {}\n );\n \"\"\"\n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### monitoring ####################\n table_name = 'monitoring'\n pk_id = 'mon_id'\n uq_list = ['mon_name']\n fk_dict = {}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n mon_name VARCHAR,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### site_monitoring ####################\n table_name = 'site_monitoring'\n pk_id = 'sm_id'\n uq_list = ['site_id', 'mon_id', 'geom']\n fk_dict = {'site_id': {'ref_schema': 'public', 'ref_table': 'site'},\n 'mon_id': {'ref_schema': 'spatial', 'ref_table': 'monitoring'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n site_id INTEGER,\n mon_id INTEGER,\n label_name VARCHAR,\n geom GEOMETRY,\n activated DATE NOT NULL DEFAULT CURRENT_DATE,\n deactivated DATE,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n\n ############################### comm SCHEMA ###############################\n \n schema = 'comm'\n create_schema(schema)\n\n #################### gsm_server ####################\n table_name = 'gsm_server'\n pk_id = 'server_id'\n uq_list = ['server_name']\n fk_dict = {}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n server_name VARCHAR,\n platform_type VARCHAR,\n version SMALLINT,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### server_port ####################\n table_name = 'server_port'\n pk_id = 'port_id'\n uq_list = ['server_id', 'port']\n fk_dict = {'server_id': {'ref_schema': 'comm', 'ref_table': 'gsm_server'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n server_id INTEGER,\n port BOOLEAN,\n ser_port VARCHAR,\n pwr_on_pin SMALLINT,\n ring_pin SMALLINT,\n module_type SMALLINT,\n {}, {} {}\n );\n \"\"\"\n query += \"\"\" COMMENT ON TABLE {}.{} IS \n '0- left\n 1- right'\n ;\"\"\".format(schema, table_name)\n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### network_type ####################\n table_name = 'network_type'\n pk_id = 'prefix'\n uq_list = ['prefix']\n fk_dict = {}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} VARCHAR(3), \n carrier SMALLINT,\n {}, {} {}\n );\n \"\"\"\n query += \"\"\" COMMENT ON TABLE {}.{} IS \n '1- globe\n 2- smart\n 3- landline'\n ;\"\"\".format(schema, table_name)\n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### gsm_module ####################\n table_name = 'gsm_module'\n pk_id = 'gsm_id'\n uq_list = ['prefix', 'num', 'activated']\n fk_dict = {'prefix': {'ref_schema': 'comm', 'ref_table': 'network_type'},\n 'port_id': {'ref_schema': 'comm', 'ref_table': 'server_port'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n prefix VARCHAR(3),\n num CHAR(7),\n activated DATE NOT NULL DEFAULT CURRENT_DATE,\n port_id INTEGER,\n {}, {} {}\n );\n \"\"\"\n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n\n ############################# temporal SCHEMA #############################\n \n schema = 'temporal'\n create_schema(schema)\n\n #################### marker_observation ####################\n table_name = 'marker_observation'\n pk_id = 'mo_id'\n uq_list = ['site_id', 'ts']\n fk_dict = {'site_id': {'ref_schema': 'public', 'ref_table': 'site'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n site_id INTEGER,\n ts TIMESTAMP,\n meas_type VARCHAR(7),\n weather VARCHAR,\n observer_name VARCHAR,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### marker_history ####################\n table_name = 'marker_history'\n pk_id = 'hist_id'\n uq_list = ['sm_id', 'ts', 'event']\n fk_dict = {'sm_id': {'ref_schema': 'spatial', 'ref_table': 'site_monitoring'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL,\n sm_id BIGINT,\n ts TIMESTAMP,\n event BOOLEAN,\n label_name VARCHAR,\n {}, {} {}\n );\n \"\"\"\n query += \"\"\" COMMENT ON TABLE {}.{} IS \n '0- rename\n 1- reposition'\n ;\"\"\".format(schema, table_name)\n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### marker_data ####################\n table_name = 'marker_data'\n pk_id = 'data_id'\n uq_list = ['sm_id', 'mo_id']\n fk_dict = {'sm_id': {'ref_schema': 'spatial', 'ref_table': 'site_monitoring'},\n 'mo_id': {'ref_schema': 'temporal', 'ref_table': 'marker_observation'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL,\n mo_id BIGINT,\n sm_id BIGINT,\n measurement NUMERIC(5,1),\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### marker_alert ####################\n table_name = 'marker_alert'\n pk_id = 'alert_id'\n uq_list = ['data_id']\n fk_dict = {'data_id': {'ref_schema': 'temporal', 'ref_table': 'marker_data'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL,\n data_id BIGINT,\n displacement NUMERIC(4,1),\n time_delta FLOAT,\n alert_level SMALLINT,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### logger_model ####################\n table_name = 'logger_model'\n pk_id = 'model_id'\n uq_list = ['has_tilt', 'has_rain', 'has_piezo', 'has_soms', 'logger_type']\n fk_dict = {}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n has_tilt BOOLEAN,\n has_rain BOOLEAN,\n has_piezo BOOLEAN,\n has_soms BOOLEAN,\n logger_type SMALLINT,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n #################### logger ####################\n table_name = 'logger'\n pk_id = 'logger_id'\n uq_list = ['sm_id']\n fk_dict = {'sm_id': {'ref_schema': 'spatial', 'ref_table': 'site_monitoring'},\n 'model_id': {'ref_schema': 'temporal', 'ref_table': 'logger_model'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL, \n sm_id BIGINT,\n model_id INTEGER,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n \n #################### logger_mobile ####################\n table_name = 'logger_mobile'\n pk_id = 'mobile_id'\n uq_list = ['logger_id', 'activated']\n fk_dict = {'logger_id': {'ref_schema': 'temporal', 'ref_table': 'logger'},\n 'gsm_id': {'ref_schema': 'comm', 'ref_table': 'gsm_module'}}\n query = \"\"\"CREATE TABLE IF NOT EXISTS {}.{} (\n {} SERIAL,\n logger_id INTEGER,\n activated DATE NOT NULL DEFAULT CURRENT_DATE,\n sim_num VARCHAR(12),\n gsm_id INTEGER,\n {}, {} {}\n );\n \"\"\" \n create_dict[index] = {'schema': schema,\n 'table_name': table_name,\n 'query': query,\n 'pk_id': pk_id,\n 'uq_list': uq_list,\n 'fk_dict': fk_dict}\n index += 1\n\n\n #################### EXECUTE QUERY TO CREATE TABLES ####################\n for index in create_dict.keys():\n dct = create_dict[index]\n schema = dct['schema']\n table_name = dct['table_name']\n query = dct['query']\n pk_id = dct['pk_id']\n uq_list = dct['uq_list']\n fk_dict = dct['fk_dict']\n if len(fk_dict.keys()) == 0:\n fk_constraint = ''\n else:\n fk_constraint_list = ['']\n for fk_id in fk_dict.keys():\n ref_schema = fk_dict.get(fk_id)['ref_schema']\n ref_table = fk_dict.get(fk_id)['ref_table']\n fk_part = fk_query.format(table_name, ref_table, fk_id,\n \"{}.{}\".format(ref_schema, ref_table),\n fk_id)\n fk_constraint_list.append(fk_part)\n fk_constraint = ', '.join(fk_constraint_list)\n \n query = query.format(schema, table_name, pk_id, \n pk_contraint.format(table_name, pk_id),\n uq_contraint.format(table_name, ', '.join(uq_list)),\n \"{}\".format(fk_constraint))\n qdb.execute(query)", "def init_tables(self) -> None:\n # TODO(#93) maybe raise flag when the schema of existing tables isn't what we expect\n # it to be?\n # \"How to know that schema changes?\"\n # logger.warning(\"some message\")\n with self.table_access_condition:\n conn = self._get_connection()\n conn.execute(\"PRAGMA foreign_keys = 1\")\n with conn:\n c = conn.cursor()\n c.execute(CREATE_PROJECTS_TABLE)\n c.execute(CREATE_TASKS_TABLE)\n c.execute(CREATE_REQUESTERS_TABLE)\n c.execute(CREATE_TASK_RUNS_TABLE)\n c.execute(CREATE_ASSIGNMENTS_TABLE)\n c.execute(CREATE_UNITS_TABLE)\n c.execute(CREATE_WORKERS_TABLE)\n c.execute(CREATE_AGENTS_TABLE)\n c.execute(CREATE_QUALIFICATIONS_TABLE)\n c.execute(CREATE_GRANTED_QUALIFICATIONS_TABLE)\n c.execute(CREATE_ONBOARDING_AGENTS_TABLE)", "def create_db_tables():\n\n try:\n webapp.dbsql.create_all()\n webapp.dbsql.session.commit()\n except Exception as e:\n # TODO: melhorar o informe do erro\n raise e", "def create_table(create_table_sql):\n conn = DbUtil.connection\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def make_new_tbl(self):\n debug = False\n default_dd = getdata.get_default_db_dets()\n con, cur = default_dd.con, default_dd.cur\n oth_name_types = getdata.get_oth_name_types(self.settings_data)\n tblname = self.tblname_lst[0]\n if debug: print(f'DBE in make_new_tbl is: {default_dd.dbe}')\n getdata.make_sofa_tbl(\n con, cur, tblname, oth_name_types, headless=False)\n wx.MessageBox(\n _('Your new table has been added to the default SOFA database'))", "def create_table(self, schema: str, table: str, col_types: dict, non_null_columns: List[str]):\n return", "def setup_schema(self):\n models.Base.metadata.create_all(self.session.bind)", "def create_table_in_sqlite_db(self):\n with self.con:\n cur = self.con.cursor()\n cur.execute(\"\"\"DROP TABLE IF EXISTS {};\"\"\".format(self.table_name))\n base_create_query = \"\"\"CREATE TABLE {}({}, PRIMARY KEY ({}));\"\"\"\n columns = ','.join(['{} {}'.format(col, self.columns_types[col]) for col in self.table_columns])\n primary_keys = ','.join(['{}'.format(col) for col in self.table_primary_keys])\n create_query = base_create_query.format(self.table_name, columns, primary_keys)\n cur.execute(create_query)\n self.con.commit()", "def create_tables(cursor):\n\n cursor.execute(\"\"\"CREATE TABLE IF NOT EXISTS\n pencarian(\n timestamp DATETIME NOT NULL,\n query TEXT NOT NULL,\n id INTEGER PRIMARY KEY AUTOINCREMENT)\n \"\"\")\n cursor.execute(\"\"\"\n CREATE TABLE IF NOT EXISTS\n hasil(\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n query_id INTEGER,\n label TEXT NOT NULL,\n arti TEXT NOT NULL,\n FOREIGN KEY(query_id) REFERENCES pencarian(id))\n \"\"\")", "def create_table():\n\tCURSOR.execute(\"\"\"CREATE TABLE IF NOT EXISTS {} (\n\t\t\t[ID] NVARCHAR NOT NULL PRIMARY KEY,\n\t\t\t[Name] NVARCHAR,\n\t\t\t[Definition] NVARCHAR)\"\"\".format(TABLE_NAME))", "def create_tables(engine):\n BASE.metadata.create_all(engine)", "def create_tables():\n # Depending on your local settings, you may need to specify a user and password, e.g.\n # conn = psycopg2.connect(dbname=DBNAME, user=\"postgres\", password=\"password\")\n conn = psycopg2.connect(dbname=DBNAME)\n\n create_train_table(conn)\n create_questions_table(conn)\n create_lectures_table(conn)\n create_example_test_table(conn)\n create_example_test_table(conn)\n\n conn.close()", "def create_table(conn, create_table_sql):\n\ttry:\n\t\tc = conn.cursor()\n\t\tc.execute(create_table_sql)\n\texcept Error as e:\n\t\tprint(e)", "def make_redesigned_tbl(final_name, oth_name_types):\n debug = False\n dd = mg.DATADETS_OBJ\n if debug: print(f'DBE in make_redesigned_tbl is: {dd.dbe}')\n tmp_name = getdata.tblname_qtr(mg.DBE_SQLITE, mg.STRICT_TMP_TBL)\n unquoted_final_name = final_name\n final_name = getdata.tblname_qtr(mg.DBE_SQLITE, final_name)\n create_fld_clause = getdata.get_create_flds_txt(oth_name_types, \n strict_typing=False, inc_sofa_id=True)\n if debug: print(create_fld_clause)\n dd.con.commit()\n if debug:\n print(f'About to drop {final_name}')\n SQL_get_tbls = \"\"\"SELECT name\n FROM sqlite_master\n WHERE type = 'table'\n ORDER BY name\"\"\"\n dd.cur.execute(SQL_get_tbls)\n tbls = [x[0] for x in dd.cur.fetchall()]\n tbls.sort(key=lambda s: s.upper())\n print(tbls)\n wipe_tbl(unquoted_final_name)\n if debug:\n print(f'Supposedly just dropped {final_name}')\n SQL_get_tbls = \"\"\"SELECT name\n FROM sqlite_master\n WHERE type = 'table'\n ORDER BY name\"\"\"\n dd.cur.execute(SQL_get_tbls)\n tbls = [x[0] for x in dd.cur.fetchall()]\n tbls.sort(key=lambda s: s.upper())\n print(tbls)\n SQL_make_redesigned_tbl = f'CREATE TABLE {final_name} ({create_fld_clause})'\n dd.cur.execute(SQL_make_redesigned_tbl)\n dd.con.commit()\n oth_names = [objqtr(x[0]) for x in oth_name_types]\n null_plus_oth_flds = ' NULL, ' + ', '.join(oth_names)\n SQL_insert_all = (f'INSERT INTO {final_name} SELECT {null_plus_oth_flds} '\n f'FROM {tmp_name}')\n if debug: print(SQL_insert_all)\n dd.con.commit()\n dd.cur.execute(SQL_insert_all)\n dd.con.commit()", "def create_database_tables():\n with APP.app_context():\n DB.create_all()", "def create_table(conn, create_table_sql):\r\n try:\r\n c = conn.cursor()\r\n c.execute(create_table_sql)\r\n except Error as e:\r\n print(e)", "def _create_table_if_not_exists(self) -> None:\n COLUMN_DEFINITIONS = 'definitions'\n COLUMN_TYPE = 'type'\n\n KEY_REF = '$ref'\n\n TYPE_LOOKUP = {\n 'string': 'VARCHAR(255)',\n 'integer': 'INTEGER',\n 'boolean': 'BOOLEAN',\n 'number': 'INTEGER',\n }\n\n def ref_lookup(\n property: Dict[str, Any], fields: Dict[str, Any]\n ) -> Dict[str, Any]:\n ref = property[KEY_REF]\n property_lookup_name = ref[ref.rfind('/') + 1 :]\n return fields[COLUMN_DEFINITIONS][property_lookup_name]\n\n field_queries = []\n fields = json.loads(self.schema.schema_json())\n\n del fields[Keywords.Properties.value][\n Keywords.ID.value\n ] # Remove primary key field. It is handled with auto increment below.\n\n for property_name, property in fields[Keywords.Properties.value].items():\n if KEY_REF in property:\n property = ref_lookup(property, fields)\n field_queries.append(\n f'{property_name} {TYPE_LOOKUP[property[COLUMN_TYPE]]}'\n )\n table_columns = ', '.join(field_queries)\n\n with connect(**BaseModel.db_settings) as connection:\n cursor = connection.cursor()\n cursor.execute(\n f'CREATE TABLE IF NOT EXISTS {self.table_name} (ID INTEGER PRIMARY KEY AUTO_INCREMENT, {table_columns})'\n )\n self._table_created[self.table_name] = True", "def init_tables(self) -> None:\n with self.table_access_condition:\n conn = self._get_connection()\n conn.execute(\"PRAGMA foreign_keys = 1\")\n c = conn.cursor()\n c.execute(tables.CREATE_STUDIES_TABLE)\n c.execute(tables.CREATE_SUBMISSIONS_TABLE)\n c.execute(tables.CREATE_REQUESTERS_TABLE)\n c.execute(tables.CREATE_UNITS_TABLE)\n c.execute(tables.CREATE_WORKERS_TABLE)\n c.execute(tables.CREATE_RUNS_TABLE)\n c.execute(tables.CREATE_RUN_MAP_TABLE)\n c.execute(tables.CREATE_PARTICIPANT_GROUPS_TABLE)\n c.execute(tables.CREATE_PARTICIPANT_GROUP_QUALIFICATIONS_MAPPING_TABLE)\n conn.commit()", "def create_tables() -> None:\n print(\"Creating database tables using SQLAlchemy ORM\")\n Base.metadata.create_all(engine)\n print(\"Done creating tables\")", "def create_table(conn, create_table_sql):\r\n try:\r\n c = conn.cursor()\r\n c.execute(create_table_sql)\r\n conn.commit()\r\n except Error as e:\r\n print(e)", "async def _create_tables_declarative(self, base, engine):\n if hasattr(base, 'metadata'):\n base.metadata.create_all(bind=engine, checkfirst=True)\n return", "def new_table(self):\n self.c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS {table} (\n id integer primary key,\n {event} integer,\n {desc} text,\n {date} text,\n {link} text)\n \"\"\".format(\n table=TABLE,\n event=EVENT,\n desc=DESC,\n date=DATE,\n link=LINK,\n )\n )", "def _create_schema(self):\n self._conn.executescript(self._db_schema)", "def setup_table(self):\n\n self.setup.create_basic_table_in_dev()\n self.setup.insert_random_records_into_dev()", "def data_table_creation(cursor, connection_to_db):\n\n cursor.execute(\"\"\"\n\n CREATE TABLE IF NOT EXISTS data(\n question TEXT NOT NULL,\n answer TEXT NULL,\n question_type TEXT NOT NULL,\n question_type_answers TEXT NULL,\n PRIMARY KEY(question)\n );\n\n \"\"\")\n\n connection_to_db.commit()", "def create_table(self, name: str, fields: Iterable[Field]) -> DbTable:", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Exception as e:\n print(e)", "def test_create_edge_table(populated_workspace, managed_user, server):\n workspace, _, node_table, edge_table = populated_workspace\n\n aql = f\"FOR doc in {edge_table} RETURN doc\"\n new_table_name = \"new_table\"\n\n with conftest.login(managed_user, server):\n resp = server.post(\n f\"/api/workspaces/{workspace}/tables\",\n data=aql,\n query_string={\"table\": new_table_name},\n )\n\n assert resp.status_code == 200\n assert resp.data.decode() == new_table_name", "def create_table(self):\n self.db.query(f\"\"\"\n CREATE TABLE IF NOT EXISTS {self.table} (\n id INT UNSIGNED NOT NULL AUTO_INCREMENT,\n name VARCHAR(140) NOT NULL,\n PRIMARY KEY (id)\n )\n \"\"\")", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n\tconn.commit()\n except Error as e:\n print(e)", "def create_table(self):\n self.db.query(f\"\"\"\n CREATE TABLE IF NOT EXISTS {self.table} (\n id INT UNSIGNED NOT NULL AUTO_INCREMENT,\n name VARCHAR(140) NOT NULL,\n PRIMARY KEY (id)\n )\n \"\"\")\n\n self.db.query(\"\"\"\n CREATE TABLE IF NOT EXISTS product_category (\n product_id bigint unsigned,\n category_id int unsigned,\n CONSTRAINT pfk_product_2\n FOREIGN KEY (product_id)\n REFERENCES product(id),\n CONSTRAINT pfk_category_2\n FOREIGN KEY (category_id)\n REFERENCES category(id),\n PRIMARY KEY (product_id, category_id)\n )\n \"\"\")", "def create_tables(self):\n for query in table_create_sql:\n self.cursor.execute(query)\n\n self.commit()", "def create_all_tables():\n\tcommon_db.create_all_tables()", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n\tconn.commit()\n\tconn.close\n except Error as e:\n print(e)", "def create_database_structure(self):\n Base.metadata.create_all(self.engine)", "def create_table(conn, create_table_sql):\n\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def create_tables():\n with db.connect() as conn:\n conn.execute(\n \"CREATE TABLE IF NOT EXISTS url_list \"\n \"(url_id VARCHAR(20) NOT NULL UNIQUE, url_data VARCHAR(2083) NOT NULL);\"\n )", "def _init_db(self):\n cursor = self._main_connection.cursor()\n cursor.execute(self.sql[\"create_table\"])\n self._main_connection.commit()", "def create_table(conn, create_table_sql):\n try:\n c = conn.cursor()\n c.execute(create_table_sql)\n except sqlite3.Error as e:\n print(e)", "def create_table(drop_if_exists=True) -> None:\r\n connection = ConnectDB.connect()\r\n additional_types_query = \"\"\"DO\r\n BEGIN\r\n IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'gender') THEN\r\n CREATE TYPE Gender AS ENUM ('female', 'man');\r\n END IF;\r\n IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'ter') THEN\r\n CREATE TYPE ter AS ENUM ('city', 'village');\r\n END IF;\r\n END;\"\"\"\r\n\r\n ZnoData_drop_query = \"DROP TABLE IF EXISTS ZnoData;\"\r\n\r\n ZnoData_create_query = 'CREATE TABLE IF NOT EXISTS ZnoData ('\r\n fields = ''\r\n for col, t in table_columns_types.items():\r\n if fields == '':\r\n fields += (f'{col} {t}') + ' PRIMARY KEY, '\r\n else:\r\n fields += (f'{col} {t}')+ ', '\r\n ZnoData_create_query += fields[:-2] + ');'\r\n \r\n with connection.cursor() as cursor:\r\n cursor.execute(additional_types_query)\r\n if drop_if_exists:\r\n cursor.execute(ZnoData_drop_query)\r\n print('Table was droped.')\r\n cursor.execute(ZnoData_create_query)\r\n print('Table was created.')\r\n ConnectDB.disconnect(connection)", "def create_all_tables(self):\n pass", "def create_schema(engine):\n Base.metadata.create_all(bind=engine)", "def create_table(self, conn, create_table_sql):\n try:\n # create a Cursor object and call its .execute() method to perform SQL queries\n c = conn.cursor()\n # execute SQL queries: create a table named card\n c.execute(create_table_sql)\n except Error as e:\n print(e)", "def _create_table(self, table_name: str, column_name_type: List[tuple]):\n self.cursor.execute(f'''CREATE TABLE {table_name}\n ({', '.join([f\"{column} {column_type}\" for column, column_type in column_name_type])})''')\n self.conn.commit()", "def create_schema(query_root, host, port, db_name, user, password):\n try:\n conn = PGDB(host, port, db_name, user, password)\n try:\n conn.executeQueryFromFile(os.path.join(query_root, PREP_QUERY_DIR, \"create_tbl.sql\"))\n except Exception as e:\n print(\"unable to run create tables. %s\" % e)\n return 1\n conn.commit()\n conn.close()\n except Exception as e:\n print(\"unable to connect to the database. %s\" % e)\n return 1", "def create(self):\n self.execute(self.commands.create_table(self.name, self.primary_key_column))\n return self.commit()", "def test_table_definition(self):\n create_table(LowercaseKeyModel)\n create_table(CapitalizedKeyModel)\n\n delete_table(LowercaseKeyModel)\n delete_table(CapitalizedKeyModel)", "def create_tables(engine):\n Base.metadata.drop_all(bind=engine)\n Base.metadata.create_all(engine)", "def _create_labels_table(table_name, fk_column, fk_refcolumn, fk_index):\n op.create_table(\n table_name,\n sa.Column('id',\n sa.Integer(),\n autoincrement=True,\n nullable=False),\n sa.Column('key', sa.Text(), nullable=False),\n sa.Column('value', sa.Text(), nullable=False),\n sa.Column(fk_column, sa.Integer(), nullable=False),\n sa.Column('created_at', UTCDateTime(), nullable=False),\n sa.Column('_creator_id', sa.Integer(), nullable=False),\n sa.ForeignKeyConstraint(\n [fk_column],\n [fk_refcolumn],\n name=op.f('{0}_{1}'.format(table_name, fk_column)),\n ondelete='CASCADE'),\n sa.ForeignKeyConstraint(\n ['_creator_id'],\n [u'users.id'],\n name=op.f('{0}__creator_id_fkey'.format(table_name)),\n ondelete='CASCADE'),\n sa.PrimaryKeyConstraint(\n 'id',\n name=op.f('{0}_pkey'.format(table_name))),\n sa.UniqueConstraint(\n 'key', 'value', fk_column, name=op.f('{0}_key_value_key'))\n )\n op.create_index(op.f('{0}_created_at_idx'.format(table_name)),\n table_name,\n ['created_at'],\n unique=False)\n op.create_index(op.f('{0}__creator_id_idx'.format(table_name)),\n table_name,\n ['_creator_id'],\n unique=False)\n op.create_index(op.f('{0}_key_idx'.format(table_name)),\n table_name,\n ['key'],\n unique=False)\n op.create_index(op.f('{0}_{1}'.format(table_name, fk_index)),\n table_name,\n [fk_column],\n unique=False)", "def createTables(self):\n metadata = Base.metadata\n metadata.create_all(self._engine)\n return", "def create_tables(self):\n for name, attribute in self.__dict__.items():\n if hasattr(attribute, 'create_table_in_sqlite_db'):\n attribute.create_table_in_sqlite_db()", "def create_dtable(f):\n\n conn, cur = Database.connect(f, ErrorHandle.raise_handler)\n Database.execute(cur, 'CREATE TABLE IF NOT EXISTS dtables ('\n 'tname CHARACTER(32), '\n 'nodedriver CHARACTER(64), ' # Unused here.\n 'nodeurl CHARACTER(128), '\n 'nodeuser CHARACTER(16), ' # Unused here.\n 'nodepasswd CHARACTER(16), ' # Unused here.\n 'partmtd INT, '\n 'nodeid INT, '\n 'partcol CHARACTER(32), '\n 'partparam1 CHARACTER(128), '\n 'partparam2 CHARACTER(128)); ', ErrorHandle.raise_handler)", "def create_db(db, schema_json):\n with open(schema_json) as of:\n schema = json.load(of, object_pairs_hook=OrderedDict)\n # OrderedDict so that tables are created in the order specified,\n # allowing foreign keys to reference previously defined tables\n\n for table_name, columns in schema.items():\n col_types = columns.items() # dict -> tuple\n make_table(db, table_name, col_types)", "def create_tables(self):\n\n cur = self.conn.cursor()\n cur.execute('CREATE TABLE blog(blog_id INTEGER PRIMARY KEY, '\n ' title TEXT, subtitle TEXT, content TEXT, date TEXT, '\n ' author_id INTEGER, '\n 'FOREIGN KEY (author_id) REFERENCES author(author_id)) ')\n\n cur.execute('CREATE TABLE author(author_id INTEGER PRIMARY KEY, '\n ' name TEXT UNIQUE) ')\n\n cur.execute('CREATE TABLE password(password_id INTEGER PRIMARY KEY,'\n ' author_id INTEGER, '\n ' password TEXT, '\n 'FOREIGN KEY (author_id) REFERENCES author(author_id)) ')\n\n self.conn.commit()", "def _create_table(self):\n query = '''\n CREATE TABLE IF NOT EXISTS {} (\n id integer PRIMARY KEY,\n url text NOT NULL UNIQUE)\n '''.format(\n self.tablename\n )\n\n self.conn.execute(query)" ]
[ "0.68531334", "0.6769567", "0.6769567", "0.67081726", "0.6704613", "0.6654913", "0.6629334", "0.66245294", "0.6596241", "0.65905637", "0.6554986", "0.6512326", "0.64604354", "0.64442676", "0.6398214", "0.63658255", "0.63251376", "0.6320983", "0.6311493", "0.6310929", "0.6301367", "0.6300838", "0.629358", "0.62701035", "0.6254469", "0.6234495", "0.62333256", "0.62333256", "0.62333256", "0.6221567", "0.6221214", "0.62204593", "0.62155914", "0.62055963", "0.62000525", "0.6195262", "0.61832476", "0.6172452", "0.6168732", "0.61577475", "0.61542165", "0.6152565", "0.6146924", "0.6134763", "0.6133767", "0.61325514", "0.6130519", "0.6125872", "0.6119227", "0.6115018", "0.61088514", "0.61043334", "0.6083472", "0.60804695", "0.60743535", "0.60722595", "0.60621244", "0.60559744", "0.6055301", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60531527", "0.60491884", "0.6037543", "0.60369474", "0.6033939", "0.6009593", "0.5984387", "0.5984332", "0.59771687", "0.5975343", "0.5972272", "0.59684044", "0.5965329", "0.59549993", "0.59457517", "0.59386665", "0.59298944", "0.5923972", "0.59166324", "0.59076107", "0.5904326", "0.590403", "0.58953065", "0.5893626", "0.5890209", "0.5886892", "0.5882945" ]
0.7019781
0
This function will verify current foreign table.
def verify_foreign_table(server, db_name, fsrv_name): try: connection = get_db_connection(db_name, server['username'], server['db_password'], server['host'], server['port']) pg_cursor = connection.cursor() pg_cursor.execute( "SELECT ftrelid FROM pg_foreign_table WHERE ftserver = " "(SELECT oid FROM pg_foreign_server WHERE srvname = '%s') ORDER BY " "ftrelid ASC limit 1" % fsrv_name) fts = pg_cursor.fetchone() connection.close() return fts except Exception: traceback.print_exc(file=sys.stderr)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def foreign_key_check(self):\n # MyRocks doesn't support foreign key\n if self.is_myrocks_table:\n log.info(\n \"SKip foreign key check because MyRocks doesn't support \" \"this yet\"\n )\n return True\n foreign_keys = self.query(\n sql.foreign_key_cnt,\n (\n self.table_name,\n self._current_db,\n self.table_name,\n self._current_db,\n ),\n )\n if foreign_keys:\n fk = \"CONSTRAINT `{}` FOREIGN KEY (`{}`) REFERENCES `{}` (`{}`)\".format(\n foreign_keys[0][\"constraint_name\"],\n foreign_keys[0][\"col_name\"],\n foreign_keys[0][\"ref_tab\"],\n foreign_keys[0][\"ref_col_name\"],\n )\n raise OSCError(\n \"FOREIGN_KEY_FOUND\",\n {\"db\": self._current_db, \"table\": self.table_name, \"fk\": fk},\n )", "def verify_table(self):\n metadata = MetaData()\n metadata.reflect(bind = StatusSource.engine)\n mine = str(self.table.columns)\n verified = str(metadata.tables[self.tablename].columns)\n if mine != verified:\n raise DbException(\"Table '%s' in the database has schema %s whereas the query's schema is %s\" % (self.tablename, verified, mine))", "def verify_table(self):\r\n metadata = MetaData()\r\n metadata.reflect(bind = DbInsertStatusHandler.engine)\r\n mine = str(self.table.columns)\r\n verified = str(metadata.tables[self.tablename].columns)\r\n if mine != verified:\r\n raise DbException(\"Table '%s' in the database has schema %s whereas the query's schema is %s\" % (self.tablename, verified, mine))", "def check_foreign_key_exists(self, table_name, column_name, referenced_table, referenced_column):\n ans = self.execute(self.commands.foreign_key_exists(self.db.name, table_name, column_name, referenced_table, referenced_column))\n if not ans:\n return False\n return True", "def validate_table(self, table, table_struct, verbose=True):\n \n assert(self.connected)\n try: \n assert(self.check_table(table, verbose=False)) \n except AssertionError: \n raise TableNotFoundError\n \n GET_SCHEMA_INFORMATION_COMMAND = \"SELECT ORDINAL_POSITION, COLUMN_NAME, COLUMN_TYPE, IS_NULLABLE, COLUMN_KEY, EXTRA \" \\\n \t \"FROM INFORMATION_SCHEMA.COLUMNS \" \\\n \t \"WHERE TABLE_NAME='{0}' ORDER BY ORDINAL_POSITION\".format(table)\n \n GET_SCHEMA_FK_INFORMATION_COMMAND = \"SELECT COLUMN_NAME, CONSTRAINT_NAME, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME \" \\\n \"FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE \" \\\n \"WHERE REFERENCED_TABLE_SCHEMA = '{0}' AND TABLE_NAME = '{1}' AND COLUMN_NAME = '{2}'\"\n \n CHANGE_TYPE_COMMAND = \"ALTER TABLE {0} MODIFY {1} {2} {3}\"\n \n ADD_FK_COMMAND = \"ALTER TABLE {0} ADD FOREIGN KEY ({1}) REFERENCES {2}({3})\" \n \n DROP_FK_CONSTRAINT_COMMAND = \"ALTER TABLE {0} DROP FOREIGN KEY {1}\" \n \n \n self.cursor.execute(GET_SCHEMA_INFORMATION_COMMAND)\n \n # load all column info from the database \n columns = {}\n for c in self.cursor:\n columns[c[1]] = c\n \n for column,db_col in zip(table_struct,columns):\n \n # load parameter values from the DB \n (ord_pos, name, col_type, isnull, key_type, extra) = columns[db_col]\n \n isnull = isnull == 'YES'\n auto_increment = extra == 'auto_increment'\n foreign_key = key_type == 'MUL'\n \n # parse new parameter values\n struct_type = table_struct[column][0]\n parameters = table_struct[column][1] if ( len(table_struct[column]) > 1) else None\n \n # get parameters values in boolean format\n if (parameters == None):\n new_isnull = True\n new_auto_increment = False\n new_foreign_key = False\n else:\n if 'not_null' in parameters: new_isnull = not parameters['not_null']\n else: new_isnull = True\n \n if 'auto_increment' in parameters: new_auto_increment = parameters['auto_increment']\n else: new_auto_increment = False\n \n if 'foreign_key' in parameters: new_foreign_key = parameters['foreign_key']\n else: new_foreign_key = False\n \n \n \n \n if verbose: \n print(\"\\n---\\n\\nChecking column '{0}'...\".format(column))\n \n # check name, type and each parameter \n if name == column:\n \n # if something doesn't match, change within the database\n if ( col_type != struct_type ): \n if verbose:\n print(\"Column '{0}' found in the correct position with the incorrect type.\".format(column))\n print(\"Changing the type of '{0}' from '{1}' to '{2}'\".format(column, col_type, struct_type),)\n \n cmd = CHANGE_TYPE_COMMAND.format(table, column, struct_type.upper(), '')\n \n if verbose: print(\"\\t\" + cmd)\n \n self.cursor.execute(cmd) \n \n if ( isnull != new_isnull ):\n \n if verbose:\n print(\"Column '{0}' found in the correct position an incorrect parameter.\".format(column))\n print(\"Changing the type of '{0}' from '{1}' to '{2}'\".format(column, \"NOT NULLABLE\" if new_isnull else \"NULLABLE\", \"NULLABLE\" if new_isnull else \"NOT NULLABLE\"))\n \n \n cmd = CHANGE_TYPE_COMMAND.format(table, column, struct_type.upper(), \"NOT NULL\" if not new_isnull else \"\" )\n \n if verbose: print(\"\\t\" + cmd)\n \n \n self.cursor.execute(cmd)\n \n if ( auto_increment != new_auto_increment ):\n \n if verbose:\n print(\"Column '{0}' found in the correct position an incorrect parameter.\".format(column))\n print(\"Changing the type of '{0}' from '{1}' to '{2}'\".format(column, \"AUTO INCREMENT\" if new_auto_increment else \"none\", \"none\" if new_auto_increment else \"AUTO INCREMENT\"))\n \n \n cmd = CHANGE_TYPE_COMMAND.format(table, column, struct_type.upper(), \"AUTO INCREMENT\" if new_auto_increment else \"\" )\n \n if verbose: print(\"\\t\" + cmd)\n \n \n self.cursor.execute(cmd)\n \n \n if ( foreign_key != new_foreign_key ):\n \n \n if verbose:\n print(\"Column '{0}' found in the correct position an incorrect parameter.\".format(column))\n print(\"Changing the type of '{0}' from '{1}' to '{2}'\".format(column, \"FOREIGN KEY\" if new_auto_increment else \"none\", \"none\" if new_auto_increment else \"FOREIGN KEY\"))\n \n \n \n if ('foreign_key' in parameters and parameters['foreign_key']):\n \n referenced_table = parameters['references'].split('(')[0]\n referenced_column = parameters['references'].split('(')[1][:-1] \n \n \n if (not self.check_table(referenced_table, verbose=False)):\n raise(TableNotFoundError)\n \n \n if (not self.check_column(referenced_column, referenced_table, verbose=False)):\n raise(ColumnNotFoundError)\n \n \n cmd = ADD_FK_COMMAND.format(table,column,referenced_table, referenced_column)\n\n \n \n if verbose: print(\"\\t\" + cmd)\n \n try:\n self.cursor.execute(cmd) \n except:\n print(\" > Error: Cannot add foreign key constraint to column '{0}' in the table '{1}'. You must remove all data from\\n > this column using the clear_column() command first.\".format(column, table))\n \n else:\n \n # check if column has a foreign key constraint\n \n cmd = GET_SCHEMA_FK_INFORMATION_COMMAND.format(self.config['database'], table, column)\n \n self.cursor.execute(cmd)\n \n fk_name = None\n for row in self.cursor:\n fk_name = row[1]\n break\n \n if fk_name != None:\n cmd = DROP_FK_CONSTRAINT_COMMAND.format(table, fk_name)\n \n if verbose: \n print(\"Column '{0}' involved in foreign key constraint '{1}'\".format(column, fk_name))\n print(\"Dropping foreign key constraint '{0}'\".format(fk_name))\n print(\"\\t\" + cmd)\n \n self.cursor.execute(cmd)\n\n \n \n if verbose: print(\"Done.\")\n \n \n if (len(columns) > len(table_struct)):\n \n if verbose: print(\"\\n---\\n\\nExtra columns found in database\")\n \n for col in columns:\n if (col not in table_struct): \n \n if verbose:\n print(\"Column '{0}' found in the database but not found in the configuration.\".format(col))\n \n self.delete_column(col, table)\n \n \n elif(len(table_struct) > len(columns)):\n \n if verbose: print(\"\\n---\\n\\nExtra columns found in configuration. \")\n\n for col in table_struct:\n if col not in columns:\n if verbose: print(\"Column '{0}' found in configuration but not in database\".format(col))\n self.insert_column(col, table_struct[col][0], table, params = table_struct[col][1] if ( len(table_struct[col]) > 1) else None)", "def _is_foreign_key(self, key):\n return self._in_keys(key, self._foreign_keys)", "def check_table(cur, table: str) -> bool:\n table_data = cur.execute(f\"\"\"\n SELECT name \n FROM sqlite_master \n WHERE type='table' \n AND name='{table}'\n \"\"\")\n table_fetch = table_data.fetchall()\n if table_fetch:\n return True\n return False", "def isForeignKey(self):\n return self._foreignKey", "def test_foreign_column(self):\n\n table2 = self.h5file.create_table('/', 'other', self.tableDescription)\n self.assertRaises(ValueError, self.table.where,\n 'c_int32_a + c_int32_b > 0',\n {'c_int32_a': self.table.cols.c_int32,\n 'c_int32_b': table2.cols.c_int32})", "def table_check(tablename, path):\n instance = arcno(path)\n tablelist = [i for i,j in instance.actual_list.items()]\n return True if tablename in tablelist else False", "def enforce_foreign_keys(self):\n with self._get_db_connection() as conn:\n try:\n c = conn.cursor()\n c.execute('PRAGMA foreign_keys=ON')\n except Exception:\n conn.rollback()\n raise Exception(sys.exc_info())\n else:\n conn.commit()", "def check_table(self, table_name: str) -> bool:\n try:\n if self.engine.dialect.has_table(self.engine.connect(), table_name):\n return self.get_input(table_name)\n return False\n except Exception as err:\n logger.error(\"check_table [error] -> %s\" % err)\n return False", "def isForeignKey(cls, _field):\n return isinstance(_field, fields.ForeignKeyField)", "def test_foreign_key_through_pk(self):\n metadata = MetaData(schema=\"unique\")\n sa_models = construct_models(metadata)\n sa_model = sa_models[RelatedToItemViaPrimaryKey]\n table = sa_model.__table__\n self.assertEqual(len(table.foreign_keys), 1)\n foreign_key, *_ = table.foreign_keys\n foreign_column = foreign_key.column\n item_table = sa_models[Item].__table__\n self.assertIs(foreign_column.table, item_table)\n self.assertEqual(foreign_column.name, \"id\")\n self.assertEqual(foreign_column.type, item_table.c.id.type)", "def collect_drop_fk(self):\n try:\n conn = self.engine.connect()\n transactional = conn.begin()\n inspector = reflection.Inspector.from_engine(self.engine)\n\n for table_name in inspector.get_table_names():\n if table_name in self.table_list:\n for fk in inspector.get_foreign_keys(table_name):\n if not fk[\"name\"]:\n continue\n self.dest_fk.append(ForeignKeyConstraint((), (), name=fk[\"name\"]))\n self.contraints_columns[table_name].add(*fk[\"constrained_columns\"])\n transactional.commit()\n except Exception as err:\n logger.error(\"collect_drop_fk [error] -> %s\" % err)\n return False\n finally:\n conn.close()", "def checkTable(self, in_table_name):\n phrase1 = \"SELECT count(*) FROM sqlite_master\"\n phrase2 = \"type='table' AND name='{}';\".format(in_table_name)\n self.cursor.execute(\"{} WHERE {}\".format(phrase1, phrase2))\n return self.cursor.fetchone()[0] == 1", "def _check_foreign_cols(\n self, join_condition: ColumnElement[bool], primary: bool\n ) -> None:\n\n can_sync = False\n\n foreign_cols = self._gather_columns_with_annotation(\n join_condition, \"foreign\"\n )\n\n has_foreign = bool(foreign_cols)\n\n if primary:\n can_sync = bool(self.synchronize_pairs)\n else:\n can_sync = bool(self.secondary_synchronize_pairs)\n\n if (\n self.support_sync\n and can_sync\n or (not self.support_sync and has_foreign)\n ):\n return\n\n # from here below is just determining the best error message\n # to report. Check for a join condition using any operator\n # (not just ==), perhaps they need to turn on \"viewonly=True\".\n if self.support_sync and has_foreign and not can_sync:\n err = (\n \"Could not locate any simple equality expressions \"\n \"involving locally mapped foreign key columns for \"\n \"%s join condition \"\n \"'%s' on relationship %s.\"\n % (\n primary and \"primary\" or \"secondary\",\n join_condition,\n self.prop,\n )\n )\n err += (\n \" Ensure that referencing columns are associated \"\n \"with a ForeignKey or ForeignKeyConstraint, or are \"\n \"annotated in the join condition with the foreign() \"\n \"annotation. To allow comparison operators other than \"\n \"'==', the relationship can be marked as viewonly=True.\"\n )\n\n raise sa_exc.ArgumentError(err)\n else:\n err = (\n \"Could not locate any relevant foreign key columns \"\n \"for %s join condition '%s' on relationship %s.\"\n % (\n primary and \"primary\" or \"secondary\",\n join_condition,\n self.prop,\n )\n )\n err += (\n \" Ensure that referencing columns are associated \"\n \"with a ForeignKey or ForeignKeyConstraint, or are \"\n \"annotated in the join condition with the foreign() \"\n \"annotation.\"\n )\n raise sa_exc.ArgumentError(err)", "def __call__(self):\n try:\n _ = self.engine.table_names()\n except OperationalError:\n return False\n else:\n return True", "def verify(self):\n for col in self.columns:\n if col not in self.table_obj.columns.keys():\n raise Exception('{} column not found in {}'.format(\n col, self.table_obj))", "def verify(self):\n for col in self._columns:\n if col not in self._table_obj.columns.keys():\n raise GaiaException('{} column not found in {}'.format(\n col, self._table_obj))", "def test_foreign_key_through_unique_field(self):\n metadata = MetaData(schema=\"unique\")\n sa_models = construct_models(metadata)\n sa_model = sa_models[RelatedToItemViaUniqueField]\n table = sa_model.__table__\n self.assertEqual(len(table.foreign_keys), 1)\n foreign_key, *_ = table.foreign_keys\n foreign_column = foreign_key.column\n item_table = sa_models[Item].__table__\n self.assertIs(foreign_column.table, item_table)\n self.assertEqual(foreign_column.name, \"legacy_id\")\n self.assertEqual(foreign_column.type, item_table.c.legacy_id.type)", "def verify_database(self):\n super().verify_database(names=schema.zalert_names,\n script=schema.schema)", "def is_key_failure(e: sqlite3.IntegrityError) -> bool:\n return str(e) == \"FOREIGN KEY constraint failed\"", "def process(self):\n try:\n # self.alter_columns()\n self.collect_drop_fk()\n self.update_table()\n self.create_tables()\n self.db_operations.create_fk_constraint(self.fk_constraints, self.contraints_columns)\n return True\n except Exception as err:\n logger.error(\"create_tables [error] -> %s\" % err)", "def check_table(schemaname=settings.DEFAULT_SCHEMA, tablename=settings.STATES):\n\n conn = None\n cur = None\n\n try:\n\n conn = utils.pgconnect(**settings.DEFAULT_CONNECTION)\n cur = conn.cursor()\n cur.execute(\"\"\"SELECT to_regclass('%s.%s');\"\"\", (AsIs(schemaname), AsIs(tablename)))\n result = cur.fetchone()[0]\n\n return (True if result else False)\n\n except Exception as e:\n raise Exception(e)\n\n finally:\n if conn: conn = None\n if cur: cur = None", "def test_table_false_positives(self):\n pass", "def check_constraints(self, table_names=None):\r\n if self.connection:\r\n cursor = self.connection.cursor()\r\n else:\r\n cursor = self._cursor()\r\n if not table_names:\r\n cursor.execute('DBCC CHECKCONSTRAINTS WITH ALL_CONSTRAINTS')\r\n if cursor.description:\r\n raise DjangoIntegrityError(cursor.fetchall())\r\n else:\r\n qn = self.ops.quote_name\r\n for name in table_names:\r\n cursor.execute('DBCC CHECKCONSTRAINTS({0}) WITH ALL_CONSTRAINTS'.format(\r\n qn(name)\r\n ))\r\n if cursor.description:\r\n raise DjangoIntegrityError(cursor.fetchall())", "def _relation_check(self):\n seen = set()\n for entity in self.get_entities():\n for field in entity.fields.itervalues():\n if field.is_relation():\n seen.add(field.remote_name)\n missing = seen - set(self.entities.keys())\n if missing:\n raise exceptions.SchemaError(\n 'undefined entities referenced in relations: %s' % (\n ', '.join(missing)))", "def is_pure_binary(self, table, follow_naming_convention=True):\n\n # table has only two foreign_key constraints.\n # Each constraint is over only one column.\n if not (len(table.foreign_keys) == 2 and\n len(table.foreign_keys[0].foreign_key_columns) == 1 and\n len(table.foreign_keys[1].foreign_key_columns) == 1):\n return False\n\n fk0 = table.foreign_keys[0].foreign_key_columns[0]['column_name']\n fk1 = table.foreign_keys[1].foreign_key_columns[0]['column_name']\n\n # There is a uniqeness constraint on the pair of fkey columns.\n f = filter(lambda x: len(x.unique_columns) == 2 and fk0 in x.unique_columns and fk1 in x.unique_columns,\n table.keys)\n\n if len(list(f)) != 1:\n return False\n\n # Null is not allowed on the column.\n if table.column_definitions[fk0].nullok or table.column_definitions[fk1].nullok:\n return False\n\n if follow_naming_convention and not (fk0 in table.name and fk1 in table.name):\n return False\n\n return True", "def check_table(table_name, engine):\n sql = (\"SELECT \"\n \"* \"\n \"FROM information_schema.tables \"\n \"WHERE table_name = '{}'\".format(table_name)\n )\n result = engine.execute(sql)\n\n if len(result.fetchall()) > 0:\n return True\n else:\n return False", "def clean_table(self):\n return False", "def self_check(self, fqn, errors):\n\t\terrors.append(\"{}: self_check() must be implemented for SchemaBase derived classes.\".format(self.__class__.__name__))\n\t\treturn False", "def ofTable(self, tablename):\n return tablename == self._table.name", "def _refers_to_parent_table(self) -> bool:\n pt = self.parent_persist_selectable\n mt = self.child_persist_selectable\n result = False\n\n def visit_binary(binary: BinaryExpression[Any]) -> None:\n nonlocal result\n c, f = binary.left, binary.right\n if (\n isinstance(c, expression.ColumnClause)\n and isinstance(f, expression.ColumnClause)\n and pt.is_derived_from(c.table)\n and pt.is_derived_from(f.table)\n and mt.is_derived_from(c.table)\n and mt.is_derived_from(f.table)\n ):\n result = True\n\n visitors.traverse(self.primaryjoin, {}, {\"binary\": visit_binary})\n return result", "def _checktables(self):\n self._cur.execute(\"PRAGMA TABLE_INFO(NODES)\")\n if (self._cur.fetchone() == None):\n # table doesn't exist, create it\n # SQLite does have constraints implemented at the moment\n # so datatype will just be a string\n self._cur.execute(\"CREATE TABLE NODES\"\n + \"(ID INTEGER PRIMARY KEY AUTOINCREMENT,\"\n + \"DATA BLOB NOT NULL)\")\n self._cur.execute(\"CREATE TABLE TAGS\"\n + \"(ID INTEGER PRIMARY KEY AUTOINCREMENT,\"\n + \"DATA BLOB NOT NULL UNIQUE)\")\n self._cur.execute(\"CREATE TABLE LOOKUP\"\n + \"(NODE INTEGER NOT NULL, TAG INTEGER NOT NULL,\"\n + \" PRIMARY KEY(NODE, TAG))\")\n\n self._cur.execute(\"CREATE TABLE KEY\"\n + \"(THEKEY TEXT NOT NULL DEFAULT '')\");\n self._cur.execute(\"INSERT INTO KEY VALUES('')\");\n \n try:\n self._con.commit()\n except DatabaseError, e:\n self._con.rollback()\n raise e", "def check_and_create_table(self) -> None:\n table_ids = [t.table_id for t in self.instance.list_tables()]\n\n if not self.table_id in table_ids:\n self.table.create()\n f = self.table.column_family(self.family_id)\n f.create()\n\n f_inc = self.table.column_family(self.incrementer_family_id,\n gc_rule=MaxVersionsGCRule(1))\n f_inc.create()\n\n f_log = self.table.column_family(self.log_family_id)\n f_log.create()\n\n f_ce = self.table.column_family(self.cross_edge_family_id,\n gc_rule=MaxVersionsGCRule(1))\n f_ce.create()\n\n print(\"Table created\")", "def check_integrity(self):\n curves = list()\n data = list()\n schema = list()\n try:\n with sqlite3.connect(self.db_file) as conn:\n cur = conn.cursor()\n cur.execute(\"\"\"SELECT name\n FROM sqlite_master\n WHERE type='table'\"\"\")\n schema = cur.fetchall()\n # if len(schema) == 0:\n if not schema:\n raise Exception(\"No table in database.\")\n with sqlite3.connect(self.db_file) as conn:\n cur = conn.cursor()\n cur.execute(\"SELECT * FROM curves\")\n curves = cur.fetchall()\n cur.execute(\"PRAGMA table_info('data')\")\n data = cur.fetchall()\n if len(curves) + 1 != len(data):\n raise Exception(\"Mismatch between curves data and data table.\")\n return True\n except Exception as inst:\n print(inst.args[0])\n return False", "def test_db_table_creation_check(self):\n mock_cursor = Mock()\n mock_cursor.configure_mock(**{\"cursor.return_value.fetchone.return_value\": (\"vnf_table_2\")})\n status = misshtbtd.db_table_creation_check(mock_cursor, \"vnf_table_2\")\n self.assertEqual(status, True)", "def check_db(self):\n self.cursor.execute(\"\"\"PRAGMA table_info( birthday );\"\"\")\n if not self.cursor.fetchone():\n self.create_tables()", "def test_read_foreign_device_table(self):\n if _debug: TestAnnexJCodec._debug(\"test_read_foreign_device_table\")\n\n # read returns an empty table\n pdu_bytes = xtob('81.06.0004')\n\n self.request(ReadForeignDeviceTable())\n self.indication(pduData=pdu_bytes)\n\n self.response(PDU(pdu_bytes))\n self.confirmation(ReadForeignDeviceTable)", "def exists(self, table, cursor):\n cursor.execute(f\"SELECT name FROM sqlite_master WHERE type='table' AND name='{table}'\")\n res = cursor.fetchone()\n return True if res else False", "def __check_table(input_table):\n\n try:\n table = TABLE_TYPES[input_table]\n return table\n except KeyError:\n raise InvalidTableType(input_table)", "def _verify(self):\n pass", "def test_equality(self):\n foreign_object = Membership._meta.get_field(\"person\")\n self.assertEqual(\n foreign_object.path_infos,\n foreign_object.get_path_info(),\n )\n self.assertEqual(\n foreign_object.reverse_path_infos,\n foreign_object.get_reverse_path_info(),\n )", "def test_db_schema(client):\n table_names = [\"user\", \"house\", \"user_role\", \"model_param\"]\n with db.engine.connect() as connexion:\n for table_name in table_names:\n assert db.engine.dialect.has_table(connexion, table_name) == True", "def test_foreign_key(self):\r\n parent1 = Parent.objects.create(pk=1)\r\n child1 = Child.objects.create(parent=parent1)\r\n child2 = Child.objects.create(parent=None)\r\n self.assertEqual(child1.parent, parent1)\r\n self.assertEqual(child2.parent, None)\r\n self.assertEqual(Child.objects.get(parent=parent1), child1)\r\n self.assertEqual(Child.objects.get(parent=1), child1)\r\n self.assertEqual(Child.objects.get(parent='1'), child1)\r\n with self.assertRaises(ValueError):\r\n Child.objects.get(parent='a')\r\n self.assertEqual(Child.objects.get(parent=None), child2)", "def check_db_schema(self):\n if not self.db.get_tables():\n self.create_db_schema()", "def _init_check_database(self):\n # FIXME add additional checks, for example that columns in BY,\n # ACROSS, ON are not the same ? (see task structure notes)\n # also that location columns are not used\n if self.verbose:\n print('checking input database {}'.format(self.database))\n\n # check that required columns are present\n cols = set(self.db.columns)\n message = (\n ' argument is invalid, check that all the provided attributes '\n 'are defined in the database {}'.format(self.database))\n # the argument of issuperset needs to be a list ...\n assert cols.issuperset(self.on), 'ON' + message\n assert cols.issuperset(self.across), 'ACROSS' + message\n assert cols.issuperset(self.by), 'BY' + message\n\n for col in cols:\n assert '_' not in col, \\\n col + ': you cannot use underscore in column names'\n assert '#' not in col, \\\n col + ': you cannot use \\'#\\' in column names'\n\n if self.verbose:\n print(\"input database verified\")", "def has_table(self, name: str) -> bool:\n try:\n self.execute(\"select * from {table} limit 1\", name)\n return True\n except sqlite3.OperationalError:\n return False", "def table_exist(self, conn, tbl_name):\n name, schema = tuple(tbl_name.split('.'))\n df = conn.table_owner(name, schema)\n return (True, df['owner_name'][0]) if len(df) > 0 else (False, None)", "def test_fk_invalidation(self):\r\n a = Addon.objects.get(id=1)\r\n assert User.objects.get(name='clouseroo').from_cache is False\r\n a.save()\r\n\r\n assert User.objects.get(name='clouseroo').from_cache is False", "def has_table(self, table):\n return table in self.get_table_list(\".\" in table)", "def db_status_ok():\n for Model in apps.get_models():\n table_name = Model._meta.db_table\n if not db_table_exists(table_name):\n return False\n return True", "def _check_transactional_ddl(self):\n table_name = \"yoyo_tmp_{}\".format(utils.get_random_string(10))\n table_name_quoted = self.quote_identifier(table_name)\n sql = self.create_test_table_sql.format(table_name_quoted=table_name_quoted)\n with self.transaction() as t:\n self.execute(sql)\n t.rollback()\n try:\n with self.transaction():\n self.execute(\"DROP TABLE {}\".format(table_name_quoted))\n except self.DatabaseError:\n return True\n return False", "def check_database(self):\r\n self.logger.log(logger.LogLevel.INFO, 'Checking database tables')\r\n table_stats = self.check_table(query.TABLE_STATS, query.QUERY_CREATE_TABLE_STATS())\r\n if table_stats is False:\r\n return False\r\n table_tweets = self.check_table(query.TABLE_TWEETS, query.QUERY_CREATE_TABLE_TWEETS())\r\n if table_tweets is False:\r\n return False\r\n table_posts = self.check_table(query.TABLE_POSTS, query.QUERY_CREATE_TABLE_POSTS())\r\n if table_posts is False:\r\n return False\r\n table_follows = self.check_table(query.TABLE_FOLLOWS, query.QUERY_CREATE_TABLE_FOLLOWS())\r\n if table_follows is False:\r\n return False\r\n return True", "def _schema_valid_prod(self, table: TableSchema) -> bool:\n disk_schema = self._get_stored_schema(table['name'])\n if not disk_schema:\n return False\n \n # Column and field order will probably not match\n # TODO don't call update_table_schema twice\n _, alter_reqs = update_table_schema(disk_schema, table)\n return len(alter_reqs) == 0", "def has_table(self, table_name, timeout):\n _abstract()", "def has_table(self, table_name, timeout):\n _abstract()", "def tableExists(self):\n return self.entity in self.db.table_list().run(self.r)", "def test_foreign_key_backwards(self):\r\n parent = CharParent.objects.create(id=1)\r\n child = CharChild.objects.create(parent=parent)\r\n self.assertEqual(list(parent.charchild_set.all()), [child])\r\n\r\n parent = IntegerParent.objects.create(id=1)\r\n child = IntegerChild.objects.create(parent=parent)\r\n self.assertEqual(list(parent.integerchild_set.all()), [child])", "def check_table(table_name = None):\n\n if table_name is None:\n table_name = config[\"default-table\"]\n\n conn, tunnel = create_db_conn()\n \n result = None\n\n try:\n cur = conn.cursor()\n cur.execute(\"\"\"\n USE %s\n \"\"\"%(config['db'], ))\n\n cur.execute(\"\"\"\n SHOW TABLES;\n \"\"\")\n \n all_tables = cur.fetchall()\n if (table_name,) in all_tables:\n result = True\n else:\n result = False\n except Exception as e:\n print(\"check_table FAILED\")\n print(e)\n\n conn.close()\n tunnel.close()\n return result", "def check_schema(\n self, client: Any, dataframe: DataFrame, table_name: str, database: str = None\n ) -> DataFrame:", "def test_facility_fk(self):\n self.entity1.facility_id = 999\n\n with self.assertRaises(ValidationError):\n self.entity1.save()\n\n transaction.rollback()", "def verify(self):\r\n pass", "def verify(self):\n pass", "def verify(self):\n pass", "def test_instance(self):\n self.assertEqual(True, type(self.Test.defined_associations['thing']) is pyperry.association.HasOne)", "def allow_relation(self, obj1, obj2, **hints):\n\n if obj1._state.db == obj2._state.db:\n return True\n return False", "def check_table(self, table_name):\n table_exists = False\n connected = False\n if not self.connected:\n self.connect()\n connected = True\n\n # Check if the database already exists\n if self.dbmi.__name__ == \"sqlite3\":\n\n self.cursor.execute(\"SELECT name FROM sqlite_master WHERE \"\n \"type='table' AND name='%s';\" % table_name)\n name = self.cursor.fetchone()\n if name and name[0] == table_name:\n table_exists = True\n else:\n # Check for raster_base table\n self.cursor.execute(\"SELECT EXISTS(SELECT * FROM information_schema.tables \"\n \"WHERE table_name=%s)\", ('%s' % table_name,))\n if self.cursor.fetchone()[0]:\n table_exists = True\n\n if connected:\n self.close()\n\n return table_exists", "def linked_tables(self, table):\n fk0 = (table.foreign_keys[0].referenced_columns[0]['schema_name'],\n table.foreign_keys[0].referenced_columns[0]['table_name'])\n fk1 = (table.foreign_keys[1].referenced_columns[0]['schema_name'],\n table.foreign_keys[1].referenced_columns[0]['table_name'])\n return [fk0, fk1]", "def test_returns_correct_relation(self):\n rel = self.Test.current_scope()\n assert not rel\n scoped = self.Test.relation().clone()\n self.Test._scoped_methods = [scoped]\n self.assertEqual(self.Test.current_scope(), scoped)", "def has_table(self, table):\n con = self.connection\n cur = con.cursor()\n res = cur.execute(\"\"\"SELECT COUNT(*) FROM sqlite_master\n WHERE type='table' AND name='%s'\"\"\" % table)\n tcnt = cur.fetchall()\n cur.close()\n if tcnt[0][0] > 0:\n return True\n else:\n return False", "def testTable(self):\n self.assertGreater(len(self.auth.table(self.dataset, self.table)), 0)", "def cross_schema_fk_reflection(self):\n return exclusions.closed()", "def _table_exists(conn, table_name):\n # Query for the table.\n with conn:\n cur = conn.cursor()\n cur.execute(('SELECT name FROM sqlite_master'\n ' WHERE type=\"table\" AND name=\"%s\"') % table_name)\n return len(cur.fetchall()) == 1", "def test_at_least_one_non_primary_key_column_is_required(self):", "def test_defined_table_names(model):\n required_tables = {\"user\", \"service\", \"role\", \"permission\", \"instance\"}\n\n defined_tables = []\n for model in defined_models:\n defined_tables.append(model.__tablename__)\n\n assert len(required_tables.difference(defined_tables)) == 0", "def checkModel(self, model):\n # TODO", "def _check_file(self, name):\n self.assertTrue(os.path.exists(name), \"Could not find table %s.\" % name)", "def db_table_exists(table_name):\n return table_name in connection.introspection.table_names()", "def _validate_table_schema_has_correct_collation(\n instance: Recorder,\n table_object: type[DeclarativeBase],\n) -> set[str]:\n schema_errors: set[str] = set()\n # Mark the session as read_only to ensure that the test data is not committed\n # to the database and we always rollback when the scope is exited\n with session_scope(session=instance.get_session(), read_only=True) as session:\n table = table_object.__tablename__\n metadata_obj = MetaData()\n connection = session.connection()\n metadata_obj.reflect(bind=connection)\n dialect_kwargs = metadata_obj.tables[table].dialect_kwargs\n # Check if the table has a collation set, if its not set than its\n # using the server default collation for the database\n\n collate = (\n dialect_kwargs.get(\"mysql_collate\")\n or dialect_kwargs.get(\n \"mariadb_collate\"\n ) # pylint: disable-next=protected-access\n or connection.dialect._fetch_setting(connection, \"collation_server\") # type: ignore[attr-defined]\n )\n if collate and collate != \"utf8mb4_unicode_ci\":\n _LOGGER.debug(\n \"Database %s collation is not utf8mb4_unicode_ci\",\n table,\n )\n schema_errors.add(f\"{table}.utf8mb4_unicode_ci\")\n return schema_errors", "def confirm_schema_match():\n\n db_version = get_db_version()\n if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION:\n logging.error('The stored db schema version of %s is incompatible with required version %s',\n str(db_version), CURRENT_DATABASE_VERSION)\n sys.exit(43)\n elif db_version < CURRENT_DATABASE_VERSION:\n sys.exit(42)\n else:\n sys.exit(0)", "def enable_constraint_checking(self):\r\n if self.connection:\r\n cursor = self.connection.cursor()\r\n else:\r\n cursor = self._cursor()\r\n # don't check the data, just turn them on\r\n cursor.execute('EXEC sp_MSforeachtable \"ALTER TABLE ? WITH NOCHECK CHECK CONSTRAINT all\"')", "def CheckTblNameExist(tblname):\n if not engine.dialect.has_table(engine, tblname):\n return False\n else:\n return True", "def test_returns_correct_relation(self):\n self.assertEqual(type(self.Test.relation()).__name__, 'Relation')\n self.assertEqual(self.Test.relation().klass, self.Test)", "def test_father(self):\n user1 = User()\n self.assertTrue(issubclass(user1.__class__, BaseModel))", "def check_table(self, table, verbose=True): \n \n assert(self.connected)\n\n CHECK_TABLE_COMMAND = \"SHOW TABLES LIKE '{0}'\".format(table)\n \n \n self.cursor.execute(CHECK_TABLE_COMMAND)\n \n exists = False\n for row in self.cursor:\n exists = True\n break\n \n if verbose and exists: print(\"Table with name '{0}' found.\".format(table))\n elif verbose: print(\"Table with name '{0}' not found.\".format(table))\n \n return exists", "def test_instance(self):\n self.assertEqual(True, type(self.Test.defined_associations['something']) is pyperry.association.BelongsTo)", "def has_far_table(self):\r\n if (self.far_spherical_right is not None or self.far_cylinder_right is not None\r\n or self.far_axis_right is not None or self.far_av_right is not None or\r\n self.far_dnp_right is not None\r\n or self.far_spherical_left is not None or self.far_cylinder_left is not None\r\n or self.far_axis_left is not None or self.far_av_left is not None or\r\n self.far_dnp_left is not None):\r\n return True\r\n return False", "def verify(self):", "def create_foreign_table(server, db_name, schema_name, fsrv_name,\n foreign_table_name):\n\n try:\n connection = get_db_connection(db_name,\n server['username'],\n server['db_password'],\n server['host'],\n server['port'])\n old_isolation_level = connection.isolation_level\n connection.set_isolation_level(0)\n pg_cursor = connection.cursor()\n\n pg_cursor.execute(\n \"CREATE FOREIGN TABLE \" + schema_name + \".\" + foreign_table_name +\n \"(emp_name text NULL) SERVER %s\" % fsrv_name)\n\n connection.set_isolation_level(old_isolation_level)\n connection.commit()\n\n # Get 'oid' from newly created foreign table\n pg_cursor.execute(\n \"SELECT ftrelid FROM pg_foreign_table WHERE ftserver = \"\n \"(SELECT oid FROM pg_foreign_server WHERE srvname = '%s') ORDER BY \"\n \"ftrelid ASC limit 1\" % fsrv_name)\n\n oid = pg_cursor.fetchone()\n ft_id = ''\n if oid:\n ft_id = oid[0]\n connection.close()\n return ft_id\n except Exception:\n traceback.print_exc(file=sys.stderr)", "def test_(schema, schemas, expected_valid):\n name = \"Schema\"\n association = association_helper.TParentPropertySchema(\n parent=types.TNameSchema(name=\"ParentSchema\", schema={}),\n property=types.TNameSchema(name=\"PropertySchema\", schema={}),\n )\n\n returned_result = validation.association._check_2_or_fewer_primary_key(\n name=name, schema=schema, association=association, schemas=schemas\n )\n\n assert returned_result.valid == expected_valid\n if not expected_valid:\n expected_reasons = (\n name,\n association.parent.name,\n association.property.name,\n \"too many\",\n \"primary key\",\n )\n\n for reason in expected_reasons:\n assert reason in returned_result.reason", "def verify(self):\n\n errors = super(FragmentDataSourceModel, self).verify()\n errors += self._file_check(self.topology, 'itp')\n errors += self._file_check(self.coordinate, 'gro')\n\n return errors", "def modelfor(model, table):\n try:\n return model.__tablename__ == table\n except AttributeError:\n return False", "def test_joined_name(self):\n\n app = Zask(__name__)\n app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'\n db = sqlalchemy.SQLAlchemy(app)\n\n class Duck(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n\n class Donald(Duck):\n id = db.Column(\n db.Integer, db.ForeignKey(\n Duck.id), primary_key=True)\n\n self.assertEqual(Donald.__tablename__, 'donald')", "def check_table(self):\n self.missing()\n return self._table(self._data_list)", "def test_table_definition(self):\r\n create_table(LowercaseKeyModel)\r\n create_table(CapitalizedKeyModel)\r\n\r\n delete_table(LowercaseKeyModel)\r\n delete_table(CapitalizedKeyModel)", "def test_validation_column(self):\n assert self.check.validation_column == \"foo_bar_is_unique_identifier\"", "def test_instance(self):\n self.assertEqual(True, type(self.Test.defined_associations['things']) is pyperry.association.HasMany)", "def check_schema_initialized(self) -> bool:\n existing_tables = self.list_tables()\n exists = (\n self.gene_concepts_table in existing_tables\n and self.gene_metadata_table in existing_tables\n )\n if not exists:\n logger.info(\"Gene tables are missing or unavailable.\")\n return exists" ]
[ "0.73069215", "0.65584505", "0.65232533", "0.65034956", "0.6413673", "0.61686987", "0.6062092", "0.60607314", "0.5977171", "0.5951747", "0.59400225", "0.5912802", "0.5910147", "0.5875509", "0.5874834", "0.58528465", "0.5831443", "0.5819649", "0.5797078", "0.57933843", "0.57436293", "0.5734158", "0.5723164", "0.5646982", "0.56053764", "0.5597663", "0.55815846", "0.5563194", "0.55134875", "0.55132604", "0.55013317", "0.5499714", "0.546902", "0.5462401", "0.5451748", "0.5447974", "0.5444004", "0.5385337", "0.5383787", "0.5353591", "0.5351126", "0.5345768", "0.5344469", "0.53399044", "0.5326065", "0.5316584", "0.53153664", "0.53069836", "0.5304021", "0.53009194", "0.5298343", "0.529438", "0.5291147", "0.5286826", "0.5273625", "0.52730757", "0.5270136", "0.5270136", "0.5267381", "0.5254343", "0.52426636", "0.5240728", "0.5217672", "0.51963824", "0.51921993", "0.51921993", "0.5165361", "0.5161024", "0.5160803", "0.515896", "0.51519406", "0.51501894", "0.51436716", "0.51323104", "0.5129958", "0.5124632", "0.5119309", "0.51176995", "0.51136917", "0.51118064", "0.5097692", "0.5095469", "0.50853723", "0.50750005", "0.5074416", "0.5073943", "0.50685436", "0.5063407", "0.5062153", "0.505636", "0.5054992", "0.5054643", "0.5051816", "0.5038807", "0.50377566", "0.5036097", "0.50311816", "0.50301915", "0.5022539", "0.50212264" ]
0.69198555
1
Returns last blockchain value
def get_last_block(): if namoto_length < 1: return None return namoto_blockchain[-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_last_blockchain_value():\n return blockchain[-1]", "def last_value():\n return blockchain[-1]", "def get_last_blockchain_value():\n if len(blockchain)< 1:\n return None\n return blockchain[-1]", "def get_last_blockchain_value():\n if len(blockchain) < 1:\n return None\n return blockchain[-1]", "def get_last_blockchainvalue(self):\n if len(self.__chain) < 1:\n return None\n return self.__chain[-1]", "def get_last_blockchain_value(self):\n if len(self.__chain) < 1:\n return None\n return self.__chain[-1]", "def get_last_blockchain_value(self):\n if len(self.__chain) < 1:\n return None\n return self.__chain[-1]", "def get_last_blockchain_value(self):\n if len(self.__chain) < 1:\n return None\n return self.__chain[-1]", "def get_last_blockchain_value(self):\n if len(self.__chain) < 1:\n return None\n return self.__chain[-1]", "def get_last_blockchain_value(self):\n if len(self.__chain) < 1:\n return None\n return self.__chain[-1]", "def get_last_blockchain_value(self):\n # chekking if the blockchian is empty or not\n if len(self.__chain) < 1:\n return None\n return self.__chain[-1]", "def get_value(self):\n return self.last_value", "def last_value(self):\n return self._last_value", "def last_value(self):\n return self._value", "def last_value(self):\n return 0", "def get_last_block(self) -> Block:\n return self.blockchain[-1]", "def get_last(self):\n return self.get_block(len(self.chain)-1)", "def _get_previous_hash(self):\n return self.blockchain[-1]['hash']", "def get_last_block_hash(self):\n cmd = \"\"\" SELECT %s FROM %s WHERE %s = (SELECT MAX(%s) FROM %s); \"\"\" %(\n COL_BLOCKCHAIN_BLOCK_HASH, TABLE_BLOCKCHAIN, COL_BLOCKCHAIN_BLOCKID,\n COL_BLOCKCHAIN_BLOCKID, TABLE_BLOCKCHAIN)\n\n self.__dbcursor.execute(cmd)\n return self.__dbcursor.fetchone()", "def last_block(self):\n return self.chain[len(self.chain) - 1]", "def get_last_hash(self):\n return self.get_last().hash_block()", "def getLastBlock(self):\n if (len(self.chain) == 0):\n return -1\n else:\n return self.chain[len(self.chain)-1]", "def get_last_block(self):\n cmd = \"\"\" SELECT * FROM %s WHERE %s = (SELECT MAX(%s) FROM %s); \"\"\" %(\n TABLE_BLOCKCHAIN, COL_BLOCKCHAIN_BLOCKID, COL_BLOCKCHAIN_BLOCKID,\n TABLE_BLOCKCHAIN)\n\n self.__dbcursor.execute(cmd)\n return self.__dbcursor.fetchone()", "def last_block(self):\n return self.chain[-1]", "def last_block(self):\n return self.chain[-1]", "def latestValue(self):\n if len(self.values) > 0:\n return self.values[-1]\n else:\n return 0", "def last_value(self):\n return self._waveforms[-1].last_value", "def last_block(self):\n return self.client.call('GET', self.name + 'last-block')", "def last_value(self):\n return self._stop", "def getLast(self):\r\n return self._data[-1]", "def get_last_price_tmp(market):\n\trequest = api.get_ticker(market)\n\tif not request['message']:\n\t\tlast = str(request['result']['Last'])\n\t\treturn (last)\n\telse:\n\t\tprint(request['message'])\n\t\tsys.exit(0)", "def last_value(self):\n return self.samples[-1]", "def get_last_result(self):\n return self.last_result", "def lastTransaction(self):\n return self._ltid", "def getLast(self):\n return self.dataBuffer[len(self.dataBuffer) - 1]", "def last(self):\n diff = self.cost - self._last\n self._last = self.cost\n return diff", "def get_k_last(self, pair):\n pair_contract = self.conn.eth.contract(\n address=Web3.toChecksumAddress(pair), abi=SushiswapClient.PAIR_ABI)\n return pair_contract.functions.kLast().call()", "def last(self):\n return self.last and self.last.value or None", "def get_last_price(args):\n\tmarket = get_market(args)\n\trequest = api.get_ticker(market)\n\tif not request['message']:\n\t\tlast = str(request['result']['Last'])\n\t\treturn (last)\n\telse:\n\t\tprint(request['message'])\n\t\tsys.exit(0)", "def get_last_last_cost_val(self, var_name):\n if not var_name in self.cost.keys():\n raise ValueError('Error:' + var_name + ' are not stored in the keys')\n l = len(self.cost[var_name])\n if l < 2: raise ValueError('Error: out of length!')\n return self.get_cost_val(var_name, l - 2)", "def getCurrentPrice(self,primary,secondary):\n pair = self.getTradedPair(primary,secondary)\n uri = \"https://bittrex.com/api/v1.1/public/getticker?market=\"+pair\n jsonResponse = self.getJson(uri)\n currentPrice = jsonResponse[\"result\"][\"Last\"]\n return currentPrice", "def get_last_cost_val(self, var_name):\n if not var_name in self.cost.keys():\n raise ValueError('Error:' + var_name + ' are not stored in the keys')\n pos = len(self.cost[var_name]) - 1\n return self.get_cost_val(var_name, pos)", "def fetch_last_height(self, cb):\r\n self.send_command('blockchain.fetch_last_height', cb=cb)", "def get_last(network):\r\n\treturn IPAddress(IPNetwork(network).last - 1)", "def last_candle(self):\r\n if self.length() > 0:\r\n return self.candles[0]\r\n else:\r\n return None", "def last_percept(self):\n return self.percept", "def result(self):\n # most pythonic way to get last in last is -1\n return self.history[-1]", "def getLastUpdate():\n swDB = switchdb.DB()\n lastupdate = swDB.getLastUpdate()\n swDB.close()\n return lastupdate", "def last(self):\r\n if self.tail == None: #check if last(tail) node is empty\r\n return 'null' #if yes, then return null\r\n else: #if it is not empty\r\n return self.tail.data #return the data of tail node\r", "def last_state(self):\n return self._simstate", "def lastsave(self):\n self.connect()\n self._write('LASTSAVE\\r\\n')\n return self._get_numeric_response()", "def last(self):\n return Null", "def last(self):\n data = self._http_get(\"last\")\n return data.json()", "def get_last_cost_change(self, var_name):\n pos = len(self.cost_change[var_name]) - 1\n return self.get_cost_change_value(var_name, pos)", "def getLastData(self) -> ghidra.program.model.listing.Data:\n ...", "def last_update(self, value):\n if self._last_update != value:\n self._last_update = value\n return self._last_update", "def last_provided(self):\n last = self.current()\n if last == 0:\n return None\n return self._cnt2bc(last - 1)", "def get_first_block(blockchain):\n response = requests.get('https://api.blockcypher.com/v1/%s/main' % blockchain)\n if response.status_code == 200:\n return int(json.loads(response.content.decode('latin1'))['height'])\n elif response.status_code == 429:\n print('Too many requests')\n return -1", "def testnet_receive_coin(self):\n try:\n datas = get_transaction_details(tx_hash=self.tx_hash,\n coin_symbol='bcy')\n except:\n raise ValidationError('Hash da transacao invalido ou nao '\n 'identificado.')\n if datas.get('error'):\n raise ValidationError('Transacao nao encontrada.')\n vals = {'name': datas.get('hash')}\n if datas.get('confirmations') >= 2:\n vals.update({'confirmation': datas.get('confirmations'),\n 'date_time': str(datas.get('confirmed')),\n 'state': 'D',\n 'satoshi': datas.get('outputs')[0].get('value')})\n self.write(vals)\n return datas.get('hash')", "def get_price_1_cumulative_last(self, pair):\n pair_contract = self.conn.eth.contract(\n address=Web3.toChecksumAddress(pair), abi=SushiswapClient.PAIR_ABI)\n return pair_contract.functions.price1CumulativeLast().call()", "def last(self, trace):\n return trace[-1]", "def getCurrentPrice(self,primary,secondary):\n pair = self.getTradedPair(primary,secondary)\n uri = \"https://www.bitstamp.net/api/v2/ticker/\"\n requestUrl = uri + pair\n jsonResponse = self.getJson(requestUrl)\n currentPrice = jsonResponse[\"last\"]\n return currentPrice", "def get_last_modified_value(self):\n return self.get_text_from_element(self.last_modified_value_locator)", "def get_latest_transaction():\n\n transaction = get_db().execute('SELECT * FROM transactions ORDER BY id DESC').fetchone()\n\n return transaction", "def get_price_0_cumulative_last(self, pair):\n pair_contract = self.conn.eth.contract(\n address=Web3.toChecksumAddress(pair), abi=SushiswapClient.PAIR_ABI)\n return pair_contract.functions.price0CumulativeLast().call()", "def last(self) -> 'outputs.CommitmentPeriodResponse':\n return pulumi.get(self, \"last\")", "def get_last_measurement(self, param):\n return self.__buffer[param][-1]", "def latest_state_data(self):\n if not self.state_list:\n return None\n if not self.state_list[-1]:\n return None\n return self.state_list[-1]", "def _get_last_json_result_string(self):\n return self.last_json_result", "def get(self):\n now = datetime.datetime.utcnow()\n if now > self.time_of_next_update:\n self._update_value()\n return self.value", "def get_last_solution(self):\n return self.last_result", "def get_last(self):\n self.accumulated_time_last = pg.time.get_ticks() - self.start_time_last\n return self.accumulated_time_last", "def get_balance(self):\r\n return self.balance", "def get_last_event(self):\n return self.last_event_code", "def last_close(self):\n return self.data.last('1D').close.iat[0]", "def get_last_time(self):\n \n return self._last", "async def get_change(sochain_url, value_out, network, address):\n try:\n balance = await sochain_api.get_balance(sochain_url, network, address)\n balance = round(balance[0].amount * 10 ** 8)\n change = 0\n if balance - value_out > DUST_THRESHOLD:\n change = balance - value_out\n return change\n except Exception as err:\n raise Exception(str(err))", "def _get_last_read_id():\n webservice_url_initial = 'http://' + host_cmr + ':' + str(host_cmr_port) + '/rest/data/invocations/overview?latestReadId=' \\\n + str(MaxIdInDB[0])\n\n print('Web Service Url Initial for Last Read id is ', webservice_url_initial)\n response_summary = requests.get(webservice_url_initial)\n\n data = response_summary.json()\n df = pd.DataFrame(json_normalize(data))\n lastreadid_max = df[['id']].max()\n lastreadid_min = df[['id']].min()\n print('Last Read id VALUE in apm is ', lastreadid_max['id'])\n print('the min id VALUE in apm this json ', lastreadid_min['id'])\n\n if int(lastreadid_max) >= MaxIdInDB[0]:\n print(\"Send data to influx and MaxIDINDB[0] is from \", MaxIdInDB[0], ' to LastReadId:', int(lastreadid_max))\n a = lastreadid_max['id']\n print('a is ', a)\n return a\n time.sleep(1)", "def query_last_price(market_data):\n print(\"Consultando el último precio\")\n if market_data[\"marketData\"][\"LA\"]:\n last_price = market_data[\"marketData\"][\"LA\"][\"price\"]\n print(\n f\"Último precio operado: ${last_price:,.2f}\".replace('.', ','))\n return last_price\n print(\"Último precio operado: No hay datos disponibles\")\n return None", "def last_update(self):\n return self._last_update", "def last_update(self):\n return self._last_update", "def latest_data(self):\n if self._data:\n return self._data[0]\n return None", "def get_previous_block(self):\r\n return self.chain[-1] # Return the previous block\r", "def add_value(transaction_amount, last_transaction=[1]):\n blockchain.append([last_transaction, transaction_amount])", "def latest(self):\n return self.journal_data[self.latest_id]", "def latest(self):\n return self.series.tail(1)[0]", "def get_crypto_balance():\n return get_balance(CONF.base)", "def get_last(self, count):", "def getLast(self):\n\n if self.firstItem == None:\n raise Exception(\"cannot getLast - linked list is empty\")\n\n # 1. Find the last item\n lastItem = self.firstItem\n while lastItem.next != None:\n lastItem = lastItem.next\n\n # 2. Return the value\n return lastItem", "def get_latest(self, name):\n return self._scalar_history.get_latest(name)[1]", "def last_success(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"last_success\")", "def get_last_prices(self):\n return self.last_results", "def getCurrentPrice(self,primary,secondary):\n pair = self.getTradedPair(primary,secondary)\n jsonResponse = self.getJson(\"https://poloniex.com/public?command=returnTicker\")\n currentPrice = jsonResponse[pair][\"last\"]\n return currentPrice", "def get_last_save(self) -> Optional[int]:\n return self._bin_iter.get_last_save()", "def freshest_crl_value(self):\n\n if self._processed_extensions is False:\n self._set_extensions()\n return self._freshest_crl_value", "def getEnd(self) -> long:\n ...", "def GetLastOrderQuantity(self):\r\n return self.lastOrderQuantity", "def get_last(dev):\n\n dev = proc_dev_to_sysfs_dev(dev)\n try:\n with open('/tmp/check_disk_latency.{}.json'.format(dev)) as historyfd:\n return json.loads(historyfd.read())\n except IOError:\n return None", "def last_key(self):\n return self._last_key", "def get_last_update(self):\n return self.ticker.all().order_by('-created').first()" ]
[ "0.9357195", "0.92896146", "0.89885485", "0.8950827", "0.89315474", "0.8893679", "0.8893679", "0.8893679", "0.8893679", "0.8893679", "0.88605654", "0.74856514", "0.74814206", "0.7459868", "0.74472046", "0.7285969", "0.7243078", "0.7127286", "0.7126116", "0.7078929", "0.70271677", "0.7025945", "0.6987105", "0.6961448", "0.6961448", "0.6957076", "0.6880463", "0.68551946", "0.68416184", "0.6814621", "0.6795889", "0.6768686", "0.66324776", "0.65181416", "0.65092456", "0.6496973", "0.64715683", "0.6471297", "0.6415112", "0.6346825", "0.63402224", "0.63254714", "0.6262818", "0.6250334", "0.62211406", "0.61805016", "0.6174331", "0.6164831", "0.61315525", "0.61234033", "0.6103235", "0.61031675", "0.6093441", "0.60862297", "0.6085428", "0.6070356", "0.60676444", "0.6056266", "0.60465455", "0.60356385", "0.60344404", "0.60295075", "0.60211205", "0.60192984", "0.60185474", "0.6008175", "0.6007245", "0.5989071", "0.59728426", "0.596838", "0.59316057", "0.5912952", "0.5893037", "0.5882331", "0.587683", "0.58684015", "0.58600265", "0.58356166", "0.58310986", "0.5828194", "0.5828194", "0.58211833", "0.5805657", "0.58027315", "0.5796145", "0.5789864", "0.57819444", "0.578055", "0.5780181", "0.5779998", "0.5779352", "0.57677543", "0.57671034", "0.5738856", "0.5738419", "0.5738236", "0.5738083", "0.57368183", "0.5735299", "0.5723883" ]
0.7415065
15
Checks if previous blocks have been unchanged
def verify_chain(): block_index = 0 is_unchanged = True if namoto_length < 1: print('Blockchain is empty!') return None for block in namoto_blockchain: if block[0] == namoto_blockchain[block_index -1]: is_unchanged = True block_index += 1 else: is_unchanged = False break return is_unchanged
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _blocks_changed(block1, block2):\n if block1.name != block2.name:\n return True\n\n # Check for any changed blocks (symmetric difference operation of sets)\n block_diff = set(block1.to_dict().items()) ^ set(block2.to_dict().items())\n if len(block_diff) > 0:\n return True\n\n return False", "def blocks_changed(self):\n return self._blocks_in_top_level_config_changed() \\\n or self._blocks_in_components_changed() \\\n or self._blocks_removed_from_top_level_config() \\\n or self._new_components_containing_blocks() \\\n or self._removed_components_containing_blocks()", "def _check_for_added_blocks(old_components, new_components):\n for new_component_name, new_component in new_components.items():\n if new_component_name not in old_components and len(new_component.blocks) != 0:\n return True\n return False", "def changed_block(self, old_block, new_block):", "def changed(self):\r\n return self.value != self.previous_value", "def changed(self):\r\n return self.value != self.previous_value", "def _blocks_in_components_changed(self):\n for name, component in self._components.items():\n if name in self._cached_components \\\n and _blocks_changed_in_config(self._cached_components[name], self._components[name]):\n return True\n return False", "def changed(self) -> bool:\n for chunk_location, chunk in self._chunk_cache.items():\n if chunk is None:\n # if the chunk is None and the saved record is not None, the chunk has changed.\n if chunk_location not in self._chunk_index:\n return True\n _, save_chunk_index = self._chunk_index[chunk_location]\n chunk_storage = self._chunk_history[chunk_location]\n if chunk_storage[save_chunk_index] is not None:\n return True\n elif chunk.changed:\n return True\n for chunk_index, save_chunk_index in self._chunk_index.values():\n if chunk_index != save_chunk_index:\n return True\n return False", "def verify_chain(self, new_block=None):\n if new_block and (not new_block.is_valid()\n or self.get_last().hash_block() != new_block.prev_block_hash):\n return False, -2\n\n i = len(self.chain)-1\n for block in reversed(self.chain):\n prev_hash = self.chain[i-1].hash_block()\n if block.index == 0 or i == 0:\n break\n # block's header_hash property is already recalculated in is_valid() method\n elif block.is_valid() and prev_hash == block.prev_block_hash:\n i -= 1\n else:\n return False, block.index\n\n return True, -1", "def chainIsValid(self):\n for i in range(1, len(self.blocks)):\n prev_block = self.blocks[i-1]\n cur_block = self.blocks[i]\n if cur_block.header['prevBlockH'] != getHashBlock(prev_block):\n return False\n return True", "def _blocks_changed_in_config(old_config, new_config, block_comparator=_blocks_changed):\n\n for block_name in new_config.blocks.keys():\n # Check to see if there are any new blocks\n if block_name not in old_config.blocks.keys() or \\\n block_comparator(old_config.blocks[block_name], new_config.blocks[block_name]):\n return True\n\n for block_name in old_config.blocks.keys():\n if block_name not in new_config.blocks.keys() \\\n or block_comparator(old_config.blocks[block_name], new_config.blocks[block_name]):\n return True\n\n return False", "def validateBlock(self, currentBlock, previousBlock): \n \n # Check the block index\n if currentBlock.index != previousBlock.index + 1:\n return False\n if currentBlock.previousHash != previousBlock.hash:\n return False\n if currentBlock.hash != currentBlock.hashBlock():\n return False\n if not self.validateNonce(previousBlock.nonce, previousBlock.hash, currentBlock.nonce):\n return False\n return True", "def has_changed(self):\n return self.get_old_value() != self.get_current_value()", "def is_modified(self):\n return self._original_sections != self._sections", "def check_previousBlockH(self, previousBlockH):\n if getHashBlock(self.get_topBlock()) == previousBlockH:\n return True\n else:\n return False", "def changed(self):\n if self.exists():\n return self.current_content != self.content\n else:\n return True", "def valid_chain(self, block, prev_block):\n self.stop_mine()\n\n print('\\n //// MINING STOPPED\\n')\n\n print('\\n //// block entering valid_chain')\n pprint(block)\n\n if block is not None and block['message'] != 'mining stopped':\n if block['previous_hash'] == self.hash(prev_block):\n \n # Check that the Proof of Work is correct\n if self.valid_proof(prev_block['proof'], block['proof']):\n if block['index'] == self.last_block['index']:\n if self.last_block['timestamp'] > block['timestamp']:\n del self.chain[-1]\n self.chain.append(block)\n print('\\n //// true from equal index but older timestamp')\n return True\n\n elif self.last_block['timestamp'] == block['timestamp']:\n print('\\n //// true from timestamps are equal block isnt added')\n return True\n else:\n print('\\n //// true timestamp is newer not added but sending false')\n return False\n\n elif block['index'] > self.last_block['index']:\n print('\\n //// true from index is greater and block is added')\n self.chain.append(block)\n return True\n else:\n print('\\n //// false from adding block had index less than block already there')\n else:\n print('\\n //// false from not a valid proof')\n\n else:\n print('\\n //// false from hashes arent equal')\n if (block['timestamp'] < self.last_block['timestamp']):\n if (block['index'] == self.last_block['index']):\n print('\\n //// hashes arent equal but block is older, subtracting and adding')\n del self.chain[-1]\n self.chain.append(block)\n return True\n\n elif (block['timestamp'] > self.last_block['timestamp']):\n if(block['index'] > self.last_block['index']):\n self.chain.append(block)\n return True\n else:\n return True\n\n return False\n\n else:\n return 'reject'", "def is_changed(self, new_grid):\n for row in range(self._grid_height):\n for col in range(self._grid_width):\n if self.get_tile(row,col) != new_grid[row][col]:\n return True\n return False", "def dirty(self):\n return self._orig_line is not None", "def is_changed(self) -> bool:\n return self.selected_vms != self._initial_vms", "def is_blockchain_valid(self, last_block=[]):\n if last_block:\n last_block = [last_block.get_block_obj(True)]\n if len(self.blockchain) == 0:\n return False\n i = 0\n for block in self.blockchain + last_block:\n if block[\"hash\"] == \"0\":\n # the first block\n continue\n if self.blockchain[i][\"hash\"] != block[\"previous_hash\"]:\n return False\n i += 1\n return True", "def isUpdated(self):\n seq = self.readSeq()\n\n if (seq != self.seq):\n self.seq = seq\n return True\n else:\n return False", "def check_unstaged_changes(self):\n pass", "def is_changed(self, include_md: bool = True) -> bool:\n current = self.calculate_hash(include_md=include_md)\n stored = self.hash if include_md else self.stub_hash\n log.trace(f\"changed = {self.hash != current} | Stored: {stored} | Current: {current}\")\n return stored != current", "def check_structure_is_modified(self):\n if not self.structure_has_been_modified: \n print('NEED TO MODIFY STRUCTURE BEFORE PROCEEDING FURTHER!')\n sys.exit()", "def checkChanges(self):\n results = [\n self.values[1],\n self.values[f\"-{self.values[1]}-\"],\n self.values[\"-TOGGLE-ALL-\"],\n self.values[\"-INVITED-\"],\n self.values[\"-ASSIGNED-\"],\n self.values[\"-GRADED-\"],\n self.values[\"-BLOCKED-\"] ]\n\n if results == self.oldResults[1::]:\n self.oldResults = [False] + results\n\n elif (self.values[f\"-{self.values[1]}-\"] == [] and \\\n self.values[\"-TOGGLE-ALL-\"] == False and \\\n results[0] != self.oldResults[1]):\n self.window['-OUTPUT-'].update('')\n self.oldResults = [False] + results\n\n else:\n self.oldResults = [True] + results", "def changed(self):\n return True", "def test_missing_previous(self):\n db = MockDatabase()\n\n G = db.owner\n\n A1 = TestBlock(\n block_type=BlockTypes.CHECKPOINT,\n transaction={'balance': 0},\n links=G\n )\n result, errors = A1.validate_transaction(db)\n self.assertEqual(result, ValidationResult.valid)\n self.assertEqual(errors, [])\n # db.add_block(A1) MISSING!!!\n\n A2 = TestBlock(\n block_type=\"NonEurotoken\",\n transaction={},\n previous=A1\n )\n db.add_block(A2)\n\n A3 = TestBlock(\n block_type=BlockTypes.CHECKPOINT,\n transaction={'balance': 0},\n previous=A2,\n links=G\n )\n result, errors = A3.validate_transaction(db)\n self.assertEqual(result, ValidationResult.missing)\n self.assertEqual(errors, [BlockRange(A1.public_key, A1.sequence_number, A1.sequence_number)])", "def has_changed(self) -> bool:\n # TODO: Add in change logic here\n state = None\n if state != self._file_state:\n self._changed_flag = True\n self._file_state = state\n return self._changed_flag", "def has_previous(self):\n if self.idx < len(self.nodes):\n return True\n else:\n return False", "def dirty(self):\n return not self.consistent", "def check_block(self, block):\n pass", "def hasChanged(self):\n return ((self.mtime != getmtime(self.path)) or\n (self.size != os.path.getsize(self.path)) )", "def check_fixedblock(self):\n print('This will read the fixed block then display changes as they')\n print('occur. Typically the most common change is the incrementing')\n print('of the data pointer, which happens whenever readings are saved')\n print('to the station memory. For example, if the logging interval')\n print('is set to 5 minutes, the fixed block should change at least')\n print('every 5 minutes.')\n raw_fixed = self.station.get_raw_fixed_block()\n while True:\n new_fixed = self.station.get_raw_fixed_block(unbuffered=True)\n for ptr in range(len(new_fixed)):\n if new_fixed[ptr] != raw_fixed[ptr]:\n print(datetime.datetime.now().strftime('%H:%M:%S'), end=' ')\n print(' %04x (%d) %02x -> %02x' % (\n ptr, ptr, raw_fixed[ptr], new_fixed[ptr]))\n raw_fixed = new_fixed\n time.sleep(0.5)", "def block_seen(self):\n self.blocklist.update(self.mapping.values())\n self.mapping = dict()", "def has_state_changed(self) -> bool:\r\n ...", "def _removed_components_containing_blocks(self):\n\n # Check for removed blocks == check for added blocks in the other direction.\n return ActiveConfigHolder._check_for_added_blocks(self._components, self._cached_components)", "def partial_change(self):\n return self.attempted_change() and not all(self._get_field_data())", "def has_unsaved_changes(self):\n return self._file_content != self.buffer.text", "def valid_chain(self, chain):\n last_block = chain[0]\n current_index = 1\n\n while current_index < len(chain):\n block = chain[current_index]\n print(last_block)\n print(block)\n print(\"\\n--------\\n\")\n \n #check that the hash of the previous block is correct\n\n if block[\"previous_hash\"] != self.hash(last_block):\n print(\"Previous hash does not match\")\n return False\n\n if not self.valid_proof(block):\n print(\"Block proof of work is invalid\")\n return False\n\n last_block = block\n current_index += 1\n\n return True", "def hasChanged(self):\r\n if self.is_updated:\r\n self.is_updated = False\r\n return True\r\n else:\r\n return False\r\n\r\n # if not self.hasBeenUpdatedOnce:\r\n # self.hasBeenUpdatedOnce = True\r\n # return True\r\n # else:\r\n # if BLENDER_MODE == 'BPY':\r\n # # for e in dir(self.obj): print(e)\r\n # # print(self.obj, self.obj.name, self.obj.is_updated, self.obj.is_updated_data)\r\n # # return self.obj.is_updated # DOESN't UPDATE A THING!\r\n # # return True\r\n # return self.is_updated\r\n\r\n # return False # no update in BGE mode\r", "def has_position_changed(self, previous_matchday_standing):\n return \\\n self.position != previous_matchday_standing.position and \\\n self.played_games > previous_matchday_standing.played_games", "def has_changed(self):\n return bool(self.changed_data)", "def guard_liberate_transition(self):\n if self.get_free_positions:\n return True", "def hadChanged(self):\n return self.changed", "def _blocks_in_top_level_config_changed(self):\n return _blocks_changed_in_config(self._cached_config, self._config)", "def changed(self):\n if self.readable:\n old_data = self.data[:]\n data = self.tag.read_binary(0, self._max_le)\n size = data[0] * 256 + data[1] + 2\n tail = max(0, size - len(data))\n while len(data) < size:\n count = min(self._max_lc, size - len(data))\n data += self.tag.read_binary(len(data), count)\n self.data = str(data[2:size])\n return self.data != old_data\n return False", "def is_fixed_state( previous_live, live_cells ):\n fixed = False\n if previous_live[0].size == live_cells[0].size:\n if previous_live[1].size == live_cells[1].size:\n if (previous_live[0]==live_cells[0]).all():\n if (previous_live[1]==live_cells[1]).all():\n fixed = True\n return fixed", "def changed(self):\n\t\tpass", "def check_change(self, state_variables):\n for control in self.__control_list:\n if control[0] != 'control':\n\t\t\t\t# sum of values of state variables of interest in the previous and the current interval of time\n sum1 = np.matmul(control[1], state_variables[:,0])\n sum2 = np.matmul(control[1], state_variables[:,1])\n\n if (np.sign(sum1 - control[2]) != np.sign(sum2 - control[2])):\n self.__active_control = control\n return True\t\n return False", "def isValidBlock(self, block, unSpentTransactions):\n\n prevBlock = self.getBlock(self.tailBlockHash)\n if prevBlock.index+1 != block.index:\n return False\n elif prevBlock.currHash != block.prevHash:\n return False\n elif block.calculateHash() != block.currHash:\n return False\n return block.isValid(unSpentTransactions)", "def is_valid_block(self, first):\n return (self.a_cursor > first.a and\n self.b_cursor > first.b)", "def equals(self, state):\r\n return self.blockList == state.blockList", "def confirm_unchanged(packet, original, protocol, changed):\n for header in packet.layers:\n if packet.layers[header].protocol != protocol:\n continue\n for field in packet.layers[header].fields:\n # Skip checking the field we just changed\n if field in changed or field == \"load\":\n continue\n assert packet.get(protocol.__name__, field) == original.get(protocol.__name__, field), \"Tamper changed %s field %s.\" % (str(protocol), field)\n return True", "def deduce_new_block_origins(line, hints, block_origins):\n block_origins = copy(block_origins)\n # Storing information whether function deduced anything new\n sth_changed = False\n\n # forward loop\n i = 0\n while i < len(hints):\n # Situation when there is filled cell just before the block need not to\n # be checked, due to use of push_block_origins\n\n # check for empty space blocking placing\n changed1, block_origins = check_no_empty_cell_inside(\n line, hints, block_origins, i,\n )\n\n # check for filled space enforcing push of block origin\n changed2, block_origins = check_filled_cell_from_right(\n line, hints, block_origins, i,\n )\n\n if changed1 or changed2:\n sth_changed = True\n else:\n i += 1\n\n # backward loop analysis\n changed, block_origins = pull_block_origins(line, hints, block_origins)\n sth_changed = sth_changed or changed\n\n return sth_changed, block_origins", "def has_been_changed(self):\n return self._changed", "def dirty(self) -> bool:\n return len(self.detect_changed_files()) != 0", "def need_update(self):\n self.logging.debug( \"need_update()\" )\n\n for name in self.tables:\n\n md5 = self.dbs_tables[name]['md5']\n test = get_md5(self.dbs_tables[name]['path'])\n\n self.logging.debug('(%s) table:%s md5:[old: %s new: %s]' % \\\n (self.db,name,md5,test) )\n\n if test != md5: return True\n\n return False", "def status_change(previous, current):\n if previous in look_for:\n return current not in look_for\n elif current in look_for:\n return previous not in look_for", "def has_changed(self):\n timestamp = os.stat(self.filename).st_mtime\n if timestamp > self.last_timestamp:\n self.last_timestamp = timestamp\n return True\n return False", "def has_unsaved_changes(self):\n # TODO\n pass", "def hasPrevFrame(self):\n self.deleteDouble()\n return (len(self.activeFrames) > 1)", "def changed_chunks(self) -> Generator[DimensionCoordinates, None, None]:\n changed_chunks = set()\n for chunk_location, chunk in self._chunk_cache.items():\n if chunk is None:\n # if the chunk is None and the saved record is not None, the chunk has changed.\n if chunk_location in self._chunk_index:\n _, save_chunk_index = self._chunk_index[chunk_location]\n chunk_storage = self._chunk_history[chunk_location]\n if chunk_storage[save_chunk_index] is not None:\n changed_chunks.add(chunk_location)\n yield chunk_location\n else:\n changed_chunks.add(chunk_location)\n yield chunk_location\n\n elif chunk.changed:\n changed_chunks.add(chunk_location)\n yield chunk_location\n for chunk_location, (index, save_index) in self._chunk_index.items():\n if index != save_index and chunk_location not in changed_chunks:\n yield chunk_location", "def should_update_db(prev_event, current_event):\n return (current_event[\"has_clip\"] or current_event[\"has_snapshot\"]) and (\n prev_event[\"top_score\"] != current_event[\"top_score\"]\n or prev_event[\"entered_zones\"] != current_event[\"entered_zones\"]\n or prev_event[\"thumbnail\"] != current_event[\"thumbnail\"]\n or prev_event[\"has_clip\"] != current_event[\"has_clip\"]\n or prev_event[\"has_snapshot\"] != current_event[\"has_snapshot\"]\n )", "def modified(self):\r\n\t\treturn self.last_modified > self.last_processed", "def modified(self):\n\t\treturn self.last_modified > self.last_processed", "def update_blocks_closure(self, ln, block, fail_bool):\n\n if ln == Line.LINE_GREEN:\n # Check that block isnt already in that state\n if self.blocks_green_arr[block - 1].open == (not fail_bool):\n if fail_bool == True:\n self.blocks_green_arr[block - 1].num_faliures += 1\n else:\n self.blocks_green_arr[block - 1].num_faliures -= 1\n else:\n if fail_bool == True:\n self.blocks_green_arr[block - 1].num_faliures += 1\n else:\n self.blocks_green_arr[block - 1].num_faliures -= 1\n\n\n # Update block if fail\n if self.blocks_green_arr[block - 1].num_faliures > 0:\n if self.blocks_green_arr[block - 1].open:\n signals.ctc_update_failure_blocks_gui.emit(ln, fail_bool)\n self.blocks_green_arr[block - 1].open = False\n else:\n if not self.blocks_green_arr[block - 1].open:\n signals.ctc_update_failure_blocks_gui.emit(ln, fail_bool)\n self.blocks_green_arr[block - 1].open = True\n\n elif ln == Line.LINE_RED:\n # Check that block isnt already in that state\n if self.blocks_red_arr[block - 1].open == (not fail_bool):\n if fail_bool == True:\n self.blocks_red_arr[block - 1].num_faliures += 1\n else:\n self.blocks_red_arr[block - 1].num_faliures -= 1\n else:\n if fail_bool == True:\n self.blocks_red_arr[block - 1].num_faliures += 1\n else:\n self.blocks_red_arr[block - 1].num_faliures -= 1\n\n # Update block if fail\n if self.blocks_red_arr[block - 1].num_faliures > 0:\n if self.blocks_red_arr[block - 1].open:\n signals.ctc_update_failure_blocks_gui.emit(ln, fail_bool)\n self.blocks_red_arr[block - 1].open = False\n else:\n if not self.blocks_red_arr[block - 1].open:\n signals.ctc_update_failure_blocks_gui.emit(ln, fail_bool)\n self.blocks_red_arr[block - 1].open = True\n\n else:\n raise Exception(\"CTC : UPDATE BLOCK CLOSURES (maint. mode from SWTrack \\\n Cont. Send INVALID Line\")", "def is_map_updated(self):\r\n self.old_obs_len =0\r\n if len(self.obs_ls[0])!= self.old_obs_len:\r\n self.old_obs_len =len(self.obs_ls[0])\r\n return True\r\n return False", "def test_block_branch_not_changed_by_preview_handler(self, default_store):\n client = Client()\n client.login(username=self.user.username, password=self.user_password)\n\n with self.store.default_store(default_store):\n course = CourseFactory.create()\n\n block = ItemFactory.create(\n parent_location=course.location,\n category=\"problem\"\n )\n\n url = reverse_usage_url(\n 'preview_handler',\n block.location,\n kwargs={'handler': 'xmodule_handler/problem_check'}\n )\n response = client.post(url)\n self.assertEqual(response.status_code, 200)\n self.assertFalse(modulestore().has_changes(modulestore().get_item(block.location)))", "def mine(self):\n if not self.unconfirmed_transactions: \n return False\n \n last_block = self.last_block\n \n new_block = Block(index= last_block.index + 1, \n transactions = self.unconfirmed_transactions,\n timestamp = time.time(),\n previous_hash = last_block.hash)\n\n proof = self.proof_of_work(new_block)\n self.add_block(new_block, proof)\n self.unconfirmed_transactions = []\n return new_block.index", "def _check_for_change(self, sender, instance, created, **kwargs):\n previous = []\n current = []\n for i, f in enumerate(self.fields_to_track):\n previous.append(getattr(instance, self.tracker_attnames[i], None))\n current.append(self.get_tracked_value(instance, i))\n if created:\n previous = None\n if previous != current: # short circuit if nothing has changed\n sources, sinks = self.states_to_stocks_func(previous, current)\n for source, sink in zip(sources, sinks):\n if source is not sink: # short circuit if no change in state/stock\n self.create_flow_event(source, sink, instance)", "def changed(self) -> bool:\n return self._changed", "def check_guess_if_previous(self): # is a helper function to add_previous_guess()\n if self.guess in self.past_guesses:\n return False\n else:\n return True", "def check_modified(self) -> bool:\n return bool(self._modified)", "def analyze_state_changes(self):\n graph = self._graph\n lost_chunks = set(self._lost_chunks)\n op_states = self._op_states\n\n # mark lost virtual nodes as lost when some preds are lost\n for n in graph:\n if not isinstance(n.op, VirtualOperand) \\\n or op_states.get(n.op.key) == OperandState.UNSCHEDULED:\n continue\n if any(pred.key in lost_chunks for pred in graph.iter_predecessors(n)):\n lost_chunks.add(n.key)\n\n # collect operands with lost data\n op_key_to_chunks = defaultdict(list)\n lost_ops = set()\n for n in graph:\n op_key_to_chunks[n.op.key].append(n)\n if n.key in lost_chunks:\n lost_ops.add(n.op.key)\n\n # check data on finished operands. when data lost, mark the operand\n # and its successors as affected.\n affected_op_keys = set()\n for op_key in lost_ops:\n affected_op_keys.add(op_key)\n for n in op_key_to_chunks[op_key]:\n affected_op_keys.update(succ.op.key for succ in graph.iter_successors(n))\n\n # scan the graph from bottom and reassign new states\n new_states = dict()\n for chunk in graph.topological_iter(reverse=True):\n op_key = chunk.op.key\n if chunk.op.key not in affected_op_keys:\n continue\n\n can_be_ready = True\n stop_spread_states = (OperandState.RUNNING, OperandState.FINISHED)\n for pred in graph.iter_predecessors(chunk):\n pred_op_key = pred.op.key\n # mark affected, if\n # 1. data of the operand is lost\n # 2. state does not hold data, or data is lost,\n # for instance, operand is freed.\n if pred.key in lost_chunks or op_states.get(pred_op_key) not in stop_spread_states:\n affected_op_keys.add(pred_op_key)\n can_be_ready = False\n\n # update state given data preservation of prior nodes\n chunk_op_state = op_states.get(op_key)\n if can_be_ready and chunk_op_state != OperandState.READY:\n new_states[op_key] = OperandState.READY\n elif not can_be_ready and chunk_op_state != OperandState.UNSCHEDULED:\n new_states[op_key] = OperandState.UNSCHEDULED\n\n op_states.update(new_states)\n return new_states", "def is_dirty(self):\n return True in [n.is_dirty for n in self.nodes]", "def BlockheightCheck(self):\n if self.CurrentBlockheight == BC.Default().Height:\n if len(self.Peers) > 0:\n logger.debug(\"Blockheight is not advancing ...\")\n next_hash = BC.Default().GetHeaderHash(self.CurrentBlockheight + 1)\n culprit_found = False\n for peer in self.Peers:\n if next_hash in peer.myblockrequests:\n culprit_found = True\n peer.Disconnect()\n break\n\n # this happens when we're connecting to other nodes that are stuck themselves\n if not culprit_found:\n for peer in self.Peers:\n peer.Disconnect()\n else:\n self.CurrentBlockheight = BC.Default().Height", "def is_valid_block(last_block, block):\n if block.last_hash != last_block.hash:\n raise Exception('Incorrect last_hash')\n if hex_to_binary(block.hash)[0:block.difficulty] != '0' * block.difficulty:\n raise Exception('Proof of Work not fulfilled')\n if abs(block.difficulty - last_block.difficulty) > 1:\n raise Exception('Block difficulty must only adjust by 1')\n\n reconstructed_hash = crypto_hash(\n block.timestamp,\n block.last_hash,\n block.data,\n block.nonce,\n block.difficulty\n )\n\n if block.hash != reconstructed_hash:\n raise Exception('Incorrect Block hash')", "def validateChain(self, toValidateChain):\n # First validate both firsts blocks\n if toValidateChain[0].hashBlock() != self.__chain[0].hashBlock():\n return False\n\n # Then compare each block with previous \n for x in range(1, len(toValidateChain)):\n if not self.validateBlock(toValidateChain[x], toValidateChain[x - 1]):\n return False\n\n return True", "def change_valid(self, dx=0, dy=0, dr=0):\n ## Create list to hold now coords\n new_coords = []\n\n ## Calculate new rotation value \n new_r = (self.rotate+dr)%4\n\n ## Get rel coords for each block element\n for rel_x, rel_y in self.block_elements:\n ## rotate rel coords using new rotations value\n if new_r == 1:\n rel_x, rel_y = -rel_y, rel_x\n elif new_r == 2:\n rel_x, rel_y = -rel_x, -rel_y\n elif new_r == 3:\n rel_x, rel_y = rel_y, -rel_x \n\n ## Calculate new abs coords\n new_x = int(self.x + rel_x + dx)\n new_y = int(self.y + rel_y + dy)\n \n ## Append new coords to new_coords list\n new_coords.append((new_x, new_y))\n\n ## Check for border collisions\n max_x = self.parent.block_width - 1\n border_collision = any((\n ## Wall collision checks\n any([x < 0 for x,y in new_coords]),\n any([x > max_x for x,y in new_coords]),\n ## Floor collision\n any([y < 0 for x,y in new_coords]) \n ))\n\n ## If border collision, return false\n if border_collision:\n return False\n\n ## Check for collisions with crumble\n crumble = self.parent.crumble\n block_collision = any([crumble[x, y] for x,y in new_coords])\n\n ## If block collision, return false\n if block_collision:\n return False\n\n ## If no collision return True\n return True", "def _has_blocks_to_place(self, exclude=None):\n for block_ in self._inventory:\n if block_ != exclude:\n return True\n return False", "def check_if_legal(row, blocks):\n counter = 0\n compare_lst = []\n for square in row:\n if square == Black:\n counter += 1\n else:\n if counter > 0:\n compare_lst.append(counter)\n counter = 0\n if counter > 0:\n compare_lst.append(counter)\n if compare_lst == blocks:\n return True\n return False", "def isValid(self):\n currBlock = self.getBlock(self.tailBlockHash)\n while currBlock != self.genesisBlock:\n if not self.isValidBlock(currBlock):\n return False\n currBlock = self.getBlock(currBlock.prevHash)\n return True", "def hasInputsChanged(self):\n return False\n\n # XXX\n _parameters = None\n lastConfigChange = self.findLastConfigureOperation()\n if lastConfigChange:\n changeset = self._manifest.loadConfigChange(lastConfigChange)\n _parameters = changeset.inputs\n if not _parameters:\n return not not self.inputs\n\n # isn't it too early for this??\n inputs = self.getCurrentInputs(lastConfigChange)\n if set(inputs.keys()) != set(_parameters.keys()):\n return True # params were added or removed\n\n # XXX calculate and compare digests\n return False", "def isDirty(self):\n\t#@DEBUG christophe have to fix denoising optionnal issue prior to set isDirty() to True\n return False", "def test_value_change(self):\n before = self.data.diffusion_data[:, :, 0, 0]\n after = module_05.run_module(self.data).diffusion_data[:, :, 0, 0]\n self.assertFalse(np.all(before == after))", "def is_outdated(self):\n\n if not self.is_done:\n return False\n elif not (self.input_files and self.output_files):\n return False\n\n return fileutils.modified_after(self.input_files, self.output_files)", "def check_reorg(provider=None):\n web3 = None\n saved_block_number = Daemon.get_solo().block_number\n\n try:\n web3 = Web3Service(provider=provider).web3\n if web3.isConnected():\n current_block_number = web3.eth.blockNumber\n else:\n raise Exception()\n except:\n raise NetworkReorgException('Unable to get block number from current node. Check the node is up and running.')\n\n if current_block_number >= saved_block_number:\n # check last saved block hash haven't changed\n blocks = Block.objects.all().order_by('-block_number')\n if blocks.count():\n # check if there was reorg\n for block in blocks:\n try:\n node_block_hash = remove_0x_head(web3.eth.getBlock(block.block_number)['hash'])\n except:\n raise UnknownBlockReorg\n if block.block_hash == node_block_hash:\n # if is last saved block, no reorg\n if block.block_number == saved_block_number:\n return False, None\n else:\n # there was a reorg from a saved block, we can do rollback\n return True, block.block_number\n\n # Exception, no saved history enough\n errors = {\n 'saved_block_number': saved_block_number,\n 'current_block_number': current_block_number,\n 'las_saved_block_hash': blocks[0].block_hash\n }\n raise NoBackup(message='Not enough backup blocks, reorg cannot be rollback', errors=errors)\n\n else:\n # No backup data\n return False, None\n else:\n # check last common block hash haven't changed\n blocks = Block.objects.filter(block_number__lte=current_block_number).order_by('-block_number')\n if blocks.count():\n # check if there was reorg\n for block in blocks:\n try:\n node_block_hash = remove_0x_head(web3.eth.getBlock(block.block_number)['hash'])\n except:\n raise UnknownBlockReorg\n if block.block_hash == node_block_hash:\n # if is last saved block, no reorg\n if block.block_number == saved_block_number:\n return False, None\n else:\n # there was a reorg from a saved block, we can do rollback\n return True, block.block_number\n\n # Exception, no saved history enough\n errors = {\n 'saved_block_number': saved_block_number,\n 'current_block_number': current_block_number,\n 'las_saved_block_hash': blocks[0].block_hash\n }\n raise NoBackup(message='Not enough backup blocks, reorg cannot be rollback', errors=errors)\n else:\n # No backup data\n return False, None", "def updated(self):\n return self._dict_hash != self.gen_model_hash(self.json(sort_keys=True))", "def _blocks_removed_from_top_level_config(self):\n return any(name not in self._config.blocks for name in self._cached_config.blocks)", "def original_modified(self):\n if self.modified > self.created:\n return True\n else:\n return False", "def is_chain_valid(self, chain):\r\n previous_block = chain[0]\r\n block_index = 1\r\n while block_index < len(chain):\r\n block = chain[block_index]\r\n if block['previous_hash'] != self.hash(previous_block):\r\n return False\r\n previous_proof = previous_block['proof']\r\n proof = block['proof']\r\n hash_operation = self.hash(block)\r\n if hash_operation[:4] != '0000':\r\n return False\r\n previous_block = block\r\n block_index += 1\r\n return True", "def detect_paramchange(self,t_final):\n id1 = np.searchsorted(self.shift_times,t_final)-1\n if id1 != self.current_region:\n return True\n else:\n return False", "def mapCheck(block, posMap, changeX, changeY):\n\n # remove block from posMap\n mapDel(block, posMap)\n for (x, y) in block.coords:\n\n # check if there will not be index error\n if x + block.x + changeX < len(posMap[0]) and y + block.y + changeY < len(posMap):\n\n \"\"\" if a block exists in (x + block.x + changeX, y + block.y + changeY)\n add back to posMap\n return True\n \"\"\"\n if posMap[y + block.y + changeY][x + block.x + changeX]:\n mapAdd(block, posMap)\n return True\n else: # if index error -> return True\n return True\n\n mapAdd(block, posMap)\n return False", "def valid_chain(self, chain):\n\n last_block = chain[0]\n current_index = 1\n\n while current_index < len(chain):\n block = chain[current_index]\n print(f'{last_block}')\n print(f'{block}')\n print(\"\\n-----------\\n\")\n # Check that the hash of the block is correct\n last_block_hash = self.hash(last_block)\n if block['previous_hash'] != last_block_hash:\n return False\n\n # Check that the Proof of Work is correct\n if not self.valid_proof(last_block['proof'], block['proof'], last_block_hash):\n return False\n\n last_block = block\n current_index += 1\n\n return True", "def isChanged(self, p_int): # real signature unknown; restored from __doc__\n return False", "def has_been_modified(self):\n return self._has_been_modified", "def _check_by_changing():\n current_settings = read_from_archive(\n archive_path, TRAINING_SETTINGS_FILENAME\n )\n\n is_changed = False\n\n for key, obj in current_settings.items():\n if key == \"mark_up_source\":\n if obj != training_settings[key]:\n is_changed = True\n break\n elif key == \"bug_resolution\":\n current_metrics = {resolution[\"value\"] for resolution in obj}\n new_metrics = {\n resolution[\"value\"]\n for resolution in training_settings[\"bug_resolution\"]\n }\n if current_metrics.difference(new_metrics):\n is_changed = True\n break\n else:\n old_areas_of_testing = {\n entity[\"area_of_testing\"]: entity[\"entities\"]\n for entity in obj\n }\n new_areas_of_testing = {\n entity[\"area_of_testing\"]: entity[\"entities\"]\n for entity in training_settings[key]\n }\n for iteration, key_ in enumerate(old_areas_of_testing, 1):\n if key_ not in new_areas_of_testing or set(\n old_areas_of_testing[key_]\n ).difference(set(new_areas_of_testing[key_])):\n is_changed = True\n break\n\n if is_changed:\n delete_training_data(archive_path)", "def verify_chain():\n for (index, block) in enumerate(blockchain):\n if index == 0:\n continue\n if block['previous_hash'] != hash_block(blockchain[index - 1]):\n return False\n # Here [:-1] excludes the reward from being a part of validation\n if not valid_proof(block['transactions'][:-1], block['previous_hash'], block['proof']):\n print('Proof of work is invalid.')\n return False\n return True", "def valid_chain(self, chain):\n last_block = chain[0]\n current_index = 1\n\n while current_index < len(chain):\n block = chain[current_index]\n # print(f'{last_block}')\n # print(f'{block}')\n # print(\"\\n-----------\\n\")\n # Check that the hash of the block is correct\n last_block_hash = self.hash(last_block)\n if block['previous_hash'] != self.hash(last_block):\n return False\n\n # Check that the Proof of Work is correct\n if not self.valid_proof(last_block['proof'], block['proof'], last_block_hash):\n return False\n\n last_block = block\n current_index += 1\n\n return True" ]
[ "0.74993265", "0.72713685", "0.71471745", "0.69462794", "0.6817872", "0.6817872", "0.6773962", "0.67605287", "0.6748862", "0.6742866", "0.6739253", "0.6687307", "0.6665253", "0.6467874", "0.64111555", "0.6352307", "0.6308329", "0.6284981", "0.62747335", "0.62637866", "0.62513506", "0.6191631", "0.619035", "0.6181089", "0.6150997", "0.61486435", "0.6116824", "0.6093346", "0.6073372", "0.6063434", "0.60451984", "0.60082483", "0.60001355", "0.59979284", "0.5982589", "0.59482396", "0.5945424", "0.5932031", "0.59282464", "0.59163153", "0.5911072", "0.5902259", "0.58716255", "0.5867321", "0.58519006", "0.5841291", "0.58390915", "0.5821051", "0.5815257", "0.5809011", "0.580505", "0.5800269", "0.5786991", "0.5760464", "0.5743998", "0.57359034", "0.57319957", "0.57225823", "0.5718428", "0.57166463", "0.57108104", "0.5710156", "0.5685464", "0.5673052", "0.5668076", "0.56569064", "0.565638", "0.565476", "0.5654035", "0.56526816", "0.5640146", "0.5637926", "0.5637616", "0.5631291", "0.56311464", "0.5617511", "0.5617197", "0.56146073", "0.5613381", "0.56078464", "0.5606888", "0.5599084", "0.55933243", "0.55897796", "0.55894697", "0.55873424", "0.55848145", "0.5583909", "0.5579972", "0.5568794", "0.5566323", "0.55633426", "0.55620337", "0.55515194", "0.5540359", "0.5536778", "0.5536345", "0.55292094", "0.55231255", "0.5519332" ]
0.59465235
36
Randomly split a dataset into nonoverlapping new datasets of given lengths.
def transform_random_split(dataset, lengths, transforms=None): if sum(lengths) != len(dataset): raise ValueError("Sum of input lengths does not equal the length of the input dataset!") if transforms is None: transforms = [None] * len(lengths) indices = torch.randperm(sum(lengths)) return [TransformSubset(dataset, indices[offset - length:offset], transform) for offset, length, transform in zip(torch._utils._accumulate(lengths), lengths, transforms)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_split(dataset, lengths):\n if sum(lengths) != len(dataset):\n raise ValueError(\"Sum of input lengths does not equal the length of the input dataset!\")\n\n indices = randperm(sum(lengths))\n return [Subset(dataset, indices[offset - length:offset]) for offset, length in zip(_accumulate(lengths), lengths)]", "def train_test_split(dataset, split):\r\n train = list()\r\n train_size = split * len(dataset)\r\n dataset_copy = list(dataset) \r\n while len(train) < train_size:\r\n index = randrange(len(dataset_copy))\r\n train.append(dataset_copy.pop(index))\r\n return train, dataset_copy", "def randsplit(data, sections=2):\n ret = [[] for i in range(sections)]\n for item in data:\n ret[random.randrange(sections)].append(item)\n return ret", "def split_dataset(dataset, n, seed=0):\n assert n <= len(dataset)\n keys = list(range(len(dataset)))\n np.random.RandomState(seed).shuffle(keys)\n keys_1 = keys[:n]\n keys_2 = keys[n:]\n return _SplitDataset(dataset, keys_1), _SplitDataset(dataset, keys_2)", "def split_dataset(dataset, n, seed=0):\n assert(n <= len(dataset))\n keys = list(range(len(dataset)))\n np.random.RandomState(seed).shuffle(keys)\n keys_1 = keys[:n]\n keys_2 = keys[n:]\n return _SplitDataset(dataset, keys_1), _SplitDataset(dataset, keys_2)", "def split_data(self):\r\n print('split data')\r\n np.random.shuffle(self.dataList)\r\n l = len(self.dataList)/self.fold\r\n self.dataList = [self.dataList[i*l: (i+1)*l] for i in range(self.fold-1)] + [self.dataList[(self.fold-1)*l:]] # each element in the list is splitted data list\r", "def split_data(dataset, test_size=0.5):\n shuffled_data = np.random.RandomState(seed=721).permutation(dataset)\n train_set = shuffled_data[: int(len(dataset) * (1 - test_size)), :]\n test_set = shuffled_data[int(len(dataset) * (1 - test_size)):, :]\n return train_set, test_set", "def split_dataset(dataset, eval_proportion, shuffle=False):\n split_sizes = [1. - eval_proportion, eval_proportion]\n split_frames = []\n split_demos = []\n num_demos = dataset.get_num_demos()\n split_num_demos = [int(fraction * num_demos) for fraction in split_sizes]\n split_num_demos[0] += num_demos - sum(split_num_demos)\n num_instances = len(dataset)\n demos = list(range(num_demos))\n if shuffle:\n np.random.shuffle(demos)\n start_idx = 0\n for split_idx in range(len(split_sizes)):\n if split_sizes[split_idx] == 0:\n split_frames.append(None)\n continue\n split_frames.append([])\n split_demos.append(range(start_idx, start_idx + split_num_demos[split_idx]))\n for demo_idx in split_demos[split_idx]:\n demo_slice = dataset.get_demo_frame_idxs(demos[demo_idx])\n split_frames[split_idx].extend(\n list(range(demo_slice.start, demo_slice.stop)))\n start_idx += split_num_demos[split_idx]\n # Check if the split indices are unique\n assert len(set(split_frames[split_idx])) == len(split_frames[split_idx])\n\n if eval_proportion > 0:\n # Check that splits do not intersect\n for split_idx in range(len(split_frames)):\n for split_idx2 in range(split_idx + 1, len(split_frames)):\n assert len(set(split_frames[split_idx]).intersection(split_frames[split_idx2])) == 0\n assert sum([len(s) for s in split_frames]) == num_instances\n\n split_datasets = [Subset(dataset, split) if split is not None else None for split in split_frames]\n return split_datasets", "def splitData(data, class_label, seed, ratio):\n\t\n\trandom.seed(seed)\n\tsubset = data.clone()\n\tsize_data = subset.data.shape[0]\n\tn = int(np.floor(size_data * ratio)) # number of datasets in train\n\tindex = random.sample(range(1, size_data), n)\n\tsplit_list = [item for item in [0] for i in range(size_data)]\n\t\n\tfor i in index:\n\t\tsplit_list[i]=1\n\t\n\treturn split_list #returns list of indeces where 0 is test and 1 is training data ", "def resplit_datasets(dataset, other_dataset, random_seed=None, split=None):\n # Prevent circular dependency\n from torchnlp.datasets import Dataset\n\n concat = dataset.rows + other_dataset.rows\n shuffle(concat, random_seed=random_seed)\n if split is None:\n return Dataset(concat[:len(dataset)]), Dataset(concat[len(dataset):])\n else:\n split = max(min(round(len(concat) * split), len(concat)), 0)\n return Dataset(concat[:split]), Dataset(concat[split:])", "def random_split(self, nr_agents):\n np.random.seed(self.random_seed)\n # Get random indices\n indices = sorted(np.random.randint(0, high=self.samples.shape[0], size=nr_agents - 1).tolist())\n indices = [0] + indices\n indices += [self.samples.shape[0]]\n\n self.samples = self.partition(self.samples, indices, nr_agents)\n self.labels = self.partition(self.labels, indices, nr_agents)", "def split_dataset(df_playlists, df_interactions):\n df_train_pl, cat_pids = generate_train(df_playlists)\n df_test_pl, df_test_itr, df_eval_itr, df_train_itr = generate_test(cat_pids, df_playlists, df_interactions)\n\n return df_train_pl, df_train_itr, df_test_pl, df_test_itr, df_eval_itr", "def partition(data, n):\n splits = []\n remaining = data.copy(deep=True)\n for i in range(n):\n split = remaining.sample(frac=1/(n-i), random_state=10)\n splits.append(split)\n remaining = remaining.drop(split.index)\n return splits", "def split_dataset(dataset, test_size):\n train_data = dataset.skip(test_size).shuffle(SHUFFLE_BUFFER_SIZE)\n train_data = train_data.padded_batch(BATCH_SIZE)\n \n test_data = dataset.take(test_size)\n test_data = test_data.padded_batch(BATCH_SIZE)\n \n return train_data, test_data", "def assign_segments_to_random_blocks(shape, seg_lengths):\n seg_mask = np.zeros(shape, dtype='int')\n seg_lengths_sorted = sorted(seg_lengths, reverse=True)\n for i, seg_len in enumerate(seg_lengths_sorted):\n loc = np.random.randint(0, seg_mask.size)\n while (sum(seg_mask[loc:loc + seg_len]) != 0) or (loc + seg_len > seg_mask.size): # ensure that the segment\n loc = np.random.randint(0, seg_mask.size)\n seg_mask[loc:loc + seg_len] = i + 1\n return seg_mask", "def split_data(test_data, split_ratio):\n split_index = int(split_ratio * len(test_data))\n \n # randomly permute the values in place\n random.shuffle(test_data)\n \n # take slices of the determined size\n training_set = copy.copy(test_data[:split_index])\n test_data = copy.copy(test_data[split_index:])\n\n return training_set, test_data", "def split_datasets(img_lst):\n num = len(img_lst)\n\n idx = np.random.permutation(num)\n train_lst = np.array(img_lst)[idx[:int(num * .8)]] # 80/20 split\n validation_lst = np.array(img_lst)[idx[int(num * .8):int(num * .9)]]\n test_lst = np.array(img_lst)[idx[int(num * .9):]]\n return train_lst, validation_lst, test_lst", "def subsampleData(self, count):\n size = 0\n for block in self.blocks: size += len(block[1])\n subset = numpy.random.permutation(size)[:count]\n subset.sort()\n\n pos = 0\n index = 0\n ret = Dataset()\n for block in self.blocks:\n while subset[index]<(pos+len(block[1])):\n loc = subset[index] - pos\n ret.add(block[0][loc,:], block[1][loc])\n index += 1\n if index==subset.shape[0]: return ret\n pos += len(block[1])\n \n return ret", "def split_dataset(samples, values, ndata1):\n assert ndata1 <= samples.shape[1]\n assert values.shape[0] == samples.shape[1]\n II = np.random.permutation(samples.shape[1])\n samples1 = samples[:, II[:ndata1]]\n samples2 = samples[:, II[ndata1:]]\n values1 = values[II[:ndata1], :]\n values2 = values[II[ndata1:], :]\n return samples1, samples2, values1, values2", "def split_dataset(X, Y, train_size=0.8):\n if train_size != 1.0:\n return train_test_split(\n X, Y,\n train_size=train_size,\n stratify=Y\n )\n else:\n X_, Y_ = shuffle(\n X, Y\n )\n return X_, [], Y_, []", "def test_split(self):\n array = np.arange(1000)\n df = DataFlow.from_numpy(array)\n\n # first, test throw errors on invalid arguments\n def assert_invalid_arg(**kwargs):\n with self.assertRaises(ValueError):\n df.split(**kwargs)\n assert_invalid_arg(partitions=[])\n assert_invalid_arg(partitions=[1000, 1])\n assert_invalid_arg(partitions=[1000, -1])\n assert_invalid_arg(partitions=[1, 2])\n assert_invalid_arg(portions=[])\n assert_invalid_arg(portions=[1.0, 0.1])\n assert_invalid_arg(portions=[1.0, -1])\n assert_invalid_arg(portions=[0.1, 0.2])\n\n # next, test split without shuffling\n df1, df2, df3 = df.split(partitions=[700, 200, 100])\n np.testing.assert_array_equal(df1.all()[0], array[:700])\n np.testing.assert_array_equal(df2.all()[0], array[700:900])\n np.testing.assert_array_equal(df3.all()[0], array[900:1000])\n df1, df2, df3 = df.split(portions=[-1, 0.2, 0.1])\n np.testing.assert_array_equal(df1.all()[0], array[:700])\n np.testing.assert_array_equal(df2.all()[0], array[700:900])\n np.testing.assert_array_equal(df3.all()[0], array[900:1000])\n\n # finally, test split with shuffling\n df1, df2 = df.split(portions=[0.5, -1], shuffle=True)\n self.assertEquals(len(df1), 500)\n self.assertEquals(len(df2), 500)\n df_array = np.concatenate([df1.all()[0], df2.all()[0]], axis=0)\n self.assertFalse(np.all(df_array == array))\n np.testing.assert_array_equal(np.sort(df_array), array)", "def split_data(data, labels, proportion):\n size = data.shape[0]\n np.random.seed(42)\n s = np.random.permutation(size)\n split_idx = int(proportion * size)\n return (data[s[:split_idx]], data[s[split_idx:]], labels[s[:split_idx]], labels[s[split_idx:]])", "def shuffle_and_split_data(X_genesets, y, train_size, validate_size):\n permutation = np.random.permutation(y.size)\n y_permuted = y[permutation]\n X_genesets_permuted = [Xg[permutation, :] for Xg in X_genesets]\n X_groups_train = [Xg[0:train_size, :] for Xg in X_genesets_permuted]\n X_groups_validate = [Xg[train_size: validate_size + train_size, :] for Xg in X_genesets_permuted]\n X_groups_test = [Xg[validate_size + train_size:, :] for Xg in X_genesets_permuted]\n y_train = y_permuted[0:train_size]\n y_validate = y_permuted[train_size: validate_size + train_size]\n y_test = y_permuted[validate_size + train_size:]\n return X_groups_train, y_train, X_groups_validate, y_validate, X_groups_test, y_test", "def split_data(data, labels, val_size):\n # Shuffle index\n index = np.random.permutation(len(data))\n\n # Split into Datasets\n X_val = data[index][-val_size:]\n X_train = data[index][:-val_size]\n y_val = labels[index][-val_size:].ravel()\n y_train = labels[index][:-val_size].ravel()\n\n return X_train, X_val, y_train, y_val", "def uniform_split(self, nr_agents):\n indices = np.linspace(start=0, stop=self.samples.shape[0], num=nr_agents + 1, dtype=int).tolist()\n\n self.samples = self.partition(self.samples, indices, nr_agents)\n self.labels = self.partition(self.labels, indices, nr_agents)", "def split_data(df: pd.DataFrame):\n size = int(df.shape[0] * 0.8)\n indexes = np.random.choice(df.index, size, replace=False)\n train_set = df.loc[indexes]\n test_set = df.loc[~df.index.isin(indexes)]\n return train_set, test_set", "def create_dataset_splits(n, p=1.0):\n\tperm = np.random.permutation(n).tolist()\n\tidx = int(p * n)\n\treturn perm[:idx]", "def split_simplified_json_acceptor_dataset(dataset: SimpleJsonAcceptorDataset, split_list):\n import numpy as np\n # create a list of lengths [0.1, 0.4, 0.5] -> [100, 500, 1000(=len_data)]\n split_list = np.multiply(np.cumsum(split_list), len(dataset)).astype(\"int\").tolist()\n # list of shuffled indices to sample randomly\n shuffled_idx = list(range(len(dataset)))\n shuffle(shuffled_idx)\n # split the data itself\n new_data = [[] for _ in range(len(split_list))]\n for sub_data_idx, (start, end) in enumerate(zip([0] + split_list[:-1], split_list)):\n for i in range(start, end):\n new_data[sub_data_idx].append(dataset.__getitem__(shuffled_idx[i]))\n # create sub sets\n sub_datasets = []\n for i in range(len(new_data)):\n ready_dict = {\n \"_idx_to_chr\": dataset._idx_to_chr,\n \"_chr_embed\": dataset._chr_embed,\n \"data\": new_data[i]\n }\n sub_datasets.append(SimpleJsonAcceptorDataset(dataset._size, ready=ready_dict))\n return sub_datasets", "def split_data(x, y, ratio, seed=1):\n # set seed\n np.random.seed(seed)\n packed = np.vstack([y,x]).T\n np.random.shuffle(packed)\n N = y.shape[0]\n eightyN = int(ratio*N)\n xTrain = packed[0:eightyN,1]\n yTrain = packed[0:eightyN,0]\n xTest = packed[eightyN:N, 1]\n yTest = packed[eightyN:N,0]\n # ***************************************************\n # INSERT YOUR CODE HERE\n # split the data based on the given ratio: TODO\n # ***************************************************\n return xTrain, yTrain, xTest, yTest", "def split_dataset(self, split):\n trunk_pos_size = math.ceil((1 - split) * len(self.Pos))\n trunk_neg_size = math.ceil((1 - split) * len(self.Neg))\n trunk_num = int(1 / (1 - split))\n pos_temp = list()\n neg_temp = list()\n for index in range(trunk_num):\n pos_temp.append(self.Pos[index * trunk_pos_size:(index + 1) *\n trunk_pos_size])\n neg_temp.append(self.Neg[index * trunk_neg_size:(index + 1) *\n trunk_neg_size])\n self.test = pos_temp.pop(2) + neg_temp.pop(2)\n # self.train = [i for item in pos_temp + neg_temp for i in item]\n self.train = []\n for item in pos_temp + neg_temp:\n for i in item:\n self.train.append(i)\n\n random.shuffle(self.train)\n random.shuffle(self.test)", "def mnist_custom_split(split_ratio=0.8, random_seed=0, shuffle_dataset=True, dataset='mnist'):\n if dataset[:5] == 'mnist':\n dataset = datasets.MNIST(definitions.DATA_PATH)\n elif dataset[:6] == 'hmnist':\n dataset = datasets.DatasetFolder(definitions.HMNIST_DATA_FOLDER, data_loader, ALL_EXTS),\n elif dataset[:8] == 'diamonds':\n dataset = datasets.DatasetFolder(definitions.DIAMONDS_DATA_FOLDER, data_loader, ALL_EXTS),\n else:\n print('[ERROR] Unknown dataset for split_and_train! => %s' % dataset)\n exit(1)\n\n dataset_size = len(dataset)\n\n indices = list(range(dataset_size))\n split = int(np.floor(split_ratio * dataset_size))\n logger.debug('Split dataset {}'.format(split))\n if shuffle_dataset:\n np.random.seed(random_seed)\n np.random.shuffle(indices)\n # ==> Mistakes\n # train_indices, val_indices = indices[split:], indices[:split]\n train_indices, val_indices = indices[:split], indices[split:]\n\n # Creating PT data samplers and loaders:\n train_sampler = torch.utils.data.SubsetRandomSampler(train_indices)\n valid_sampler = torch.utils.data.SubsetRandomSampler(val_indices)\n\n return train_sampler, valid_sampler", "def split_cv(length, num_folds):\n splits = [SplitIndices([], []) for _ in range(num_folds)]\n indices = list(range(length))\n random.shuffle(indices)\n fold_length = (int)(length / num_folds)\n\n for y in range(1, num_folds + 1):\n fold_n = 1\n counter = 0\n for x in indices:\n if fold_n == y:\n splits[y - 1].test.append(x)\n else:\n splits[y - 1].train.append(x)\n counter = counter + 1\n if counter % fold_length == 0:\n fold_n = fold_n + 1\n return splits", "def some_simple_data(length=1000000):\n data = list(range(length))\n random.shuffle(data)\n return data", "def test_split_data(self):\n Xlists = tuple([[np.zeros((200,9)) for b in range(14)] for c in range(9)])\n ybinarylists = [np.zeros((14,12)) for c in range(9)]\n indices = slice(7, 9)\n x_test, y_test = tutorial_pamap2.split_data(Xlists, ybinarylists, \\\n indices)\n test = y_test[0].shape == (12,) and x_test[0].shape == (200, 9)\n assert test", "def split_dataset(samples, ratio=0.8):\n nsamples = len(samples)\n num_train = int(ratio*nsamples)\n\n # shuffle samples\n shuffle(samples)\n\n trainset = samples[:num_train]\n testset = samples[num_train:]\n\n return trainset, testset", "def split_dataset(dataset: torch.utils.data.Dataset, split_perc: float = 0.20):\n assert (split_perc >= 0.0) and (split_perc <= 1.0), (\n f\"FATAL ERROR: invalid split_perc value {split_perc}.\" f\"Expecting float >= 0.0 and <= 1.0\"\n )\n\n if split_perc > 0.0:\n num_recs = len(dataset)\n train_count = int((1.0 - split_perc) * num_recs)\n test_count = num_recs - train_count\n train_dataset, test_dataset = torch.utils.data.random_split(dataset, [train_count, test_count])\n return train_dataset, test_dataset\n else:\n return dataset, None", "def train_test_split_drifters():\n df = process_raw_df()\n ids = np.unique(df.index.get_level_values(level=0))\n rng = np.random.default_rng(seed=1)\n train_ids = np.sort(rng.choice(ids, size=len(ids)//2, replace=False))\n test_ids = np.sort(np.setdiff1d(ids, train_ids))\n train_df = df[df.index.get_level_values(level=0).isin(train_ids)].copy()\n test_df = df[df.index.get_level_values(level=0).isin(test_ids)].copy()\n return train_df, test_df", "def create_fixed_length_data(data, length, label, verbose=False):\n result = []\n labels = []\n\n for (idx, sample) in enumerate(data):\n if sample.shape[0] >= length:\n result.append(np.delete(sample[len(sample)-length:], 0, 1))\n labels.append(label)\n else:\n if verbose: print(\"Throw %d/%d (size: %d) dumped\" % (idx, len(data)-1, sample.shape[0]))\n return result, labels", "def test_n_group_split(self):\n # Test 2 groups like HalfSplitter first\n hs = NGroupPartitioner(2)\n\n for isreversed, splitter in enumerate((hs, hs)):\n if isreversed:\n spl = Splitter(attr='partitions', reverse=True)\n else:\n spl = Splitter(attr='partitions')\n splits = [ list(spl.generate(p)) for p in hs.generate(self.data) ]\n self.assertTrue(len(splits) == 2)\n\n for i, p in enumerate(splits):\n self.assertTrue( len(p) == 2 )\n self.assertTrue( p[0].nsamples == 50 )\n self.assertTrue( p[1].nsamples == 50 )\n\n assert_array_equal(splits[0][1-isreversed].sa['chunks'].unique,\n [0, 1, 2, 3, 4])\n assert_array_equal(splits[0][isreversed].sa['chunks'].unique,\n [5, 6, 7, 8, 9])\n assert_array_equal(splits[1][1-isreversed].sa['chunks'].unique,\n [5, 6, 7, 8, 9])\n assert_array_equal(splits[1][isreversed].sa['chunks'].unique,\n [0, 1, 2, 3, 4])\n\n # check if it works on pure odd and even chunk ids\n moresplits = [ list(spl.generate(p)) for p in hs.generate(splits[0][0])]\n\n for split in moresplits:\n self.assertTrue(split[0] != None)\n self.assertTrue(split[1] != None)\n\n # now test more groups\n s5 = NGroupPartitioner(5)\n\n # get the splits\n for isreversed, s5splitter in enumerate((s5, s5)):\n if isreversed:\n spl = Splitter(attr='partitions', reverse=True)\n else:\n spl = Splitter(attr='partitions')\n splits = [ list(spl.generate(p)) for p in s5splitter.generate(self.data) ]\n\n # must have 10 splits\n self.assertTrue(len(splits) == 5)\n\n # check split content\n assert_array_equal(splits[0][1-isreversed].sa['chunks'].unique,\n [0, 1])\n assert_array_equal(splits[0][isreversed].sa['chunks'].unique,\n [2, 3, 4, 5, 6, 7, 8, 9])\n assert_array_equal(splits[1][1-isreversed].sa['chunks'].unique,\n [2, 3])\n assert_array_equal(splits[1][isreversed].sa['chunks'].unique,\n [0, 1, 4, 5, 6, 7, 8, 9])\n # ...\n assert_array_equal(splits[4][1-isreversed].sa['chunks'].unique,\n [8, 9])\n assert_array_equal(splits[4][isreversed].sa['chunks'].unique,\n [0, 1, 2, 3, 4, 5, 6, 7])\n\n\n # Test for too many groups\n def splitcall(spl, dat):\n return list(spl.generate(dat))\n s20 = NGroupPartitioner(20)\n self.assertRaises(ValueError,splitcall,s20,self.data)", "def split_data(self, data, start, interval, shuffle=True, seed=0):\n if shuffle:\n random.seed(seed) # fix to default seed 0\n random.shuffle(data)\n\n data_1 = np.append(data[:start],data[start+interval:])\n data_2 = data[start:start+interval]\n\n return data_1, data_2", "def prepareDataBatches(self, traindata, trainlabel):\n index = np.random.permutation(len(traindata))\n traindata = traindata[index]\n trainlabel = trainlabel[index]\n split_no = int(len(traindata) / self.batchSize)\n return zip(np.split(traindata[:split_no*self.batchSize], split_no), np.split(trainlabel[:split_no*self.batchSize], split_no))", "def _shuffle(df, indices, chunksize):\n i = 0\n partition = []\n while len(indices) > chunksize:\n oids = df.reindex(indices[:chunksize])\n partition.append(oids)\n indices = indices[chunksize:]\n i += 1\n else:\n oids = df.reindex(indices)\n partition.append(oids)\n return partition", "def split_data(data, prob):\n results = [], []\n for row in data:\n results[0 if random.random() < prob else 1].append(row)\n return results", "def splitData(groupList, trainSize):\r\n from sklearn.model_selection import StratifiedShuffleSplit\r\n\r\n groupList[0]['text'] = cleanRealTexts(list(groupList[0]['text']))\r\n\r\n classLabels = np.array([])\r\n for i, group in enumerate(groupList):\r\n classLabels = np.append(classLabels, np.repeat(i, len(group)))\r\n\r\n classData = pd.concat(groupList).reset_index(drop=True)\r\n\r\n splits = list(StratifiedShuffleSplit(n_splits=i,\r\n test_size=1-trainSize,\r\n train_size=trainSize,\r\n random_state=0).split(X=classData, y=classLabels))[0]\r\n trainIdx, testIdx = splits\r\n\r\n trainData = classData.iloc[trainIdx]\r\n testData = classData.iloc[testIdx]\r\n trainLabels = classLabels[trainIdx]\r\n testLabels = classLabels[testIdx]\r\n\r\n return [[trainData, trainLabels], [testData, testLabels]]", "def build_splits(dataset, train_size, valid_size, by=['context_id'], seed=17):\n if isinstance(seed, RandomState):\n rng = seed\n else:\n rng = RandomState(seed)\n\n groups = dataset.groupby(by).groups\n context_ids = groups.keys()\n\n train_ids, other_ids = sklearn.cross_validation.train_test_split(\n context_ids, train_size=train_size, random_state=rng)\n valid_ids, test_ids = sklearn.cross_validation.train_test_split(\n other_ids, train_size=valid_size, random_state=rng)\n\n train_idx = context_id_to_idx(train_ids, groups)\n valid_idx = context_id_to_idx(valid_ids, groups)\n test_idx = context_id_to_idx(test_ids, groups)\n\n return dataset.ix[train_idx, :], dataset.ix[valid_idx, :], dataset.ix[test_idx, :]", "def split_data(dataset, ratio = 0.9):\n cutoff_row = int(dataset.shape[0] * ratio)\n return (dataset[:cutoff_row], dataset[cutoff_row:])", "def split_data(tweets, validation_split=0.1):\n index = int((1 - validation_split) * len(tweets))\n random.shuffle(tweets)\n return tweets[:index], tweets[index:]", "def partition_dataset(\n data: Sequence,\n ratios: Sequence[float] | None = None,\n num_partitions: int | None = None,\n shuffle: bool = False,\n seed: int = 0,\n drop_last: bool = False,\n even_divisible: bool = False,\n):\n data_len = len(data)\n datasets = []\n\n indices = list(range(data_len))\n if shuffle:\n # deterministically shuffle based on fixed seed for every process\n rs = np.random.RandomState(seed)\n rs.shuffle(indices)\n\n if ratios:\n next_idx = 0\n rsum = sum(ratios)\n for r in ratios:\n start_idx = next_idx\n next_idx = min(start_idx + int(r / rsum * data_len + 0.5), data_len)\n datasets.append([data[i] for i in indices[start_idx:next_idx]])\n return datasets\n\n if not num_partitions:\n raise ValueError(\"must specify number of partitions or ratios.\")\n # evenly split the data without ratios\n if not even_divisible and drop_last:\n raise RuntimeError(\"drop_last only works when even_divisible is True.\")\n if data_len < num_partitions:\n raise RuntimeError(f\"there is no enough data to be split into {num_partitions} partitions.\")\n\n if drop_last and data_len % num_partitions != 0:\n # split to nearest available length that is evenly divisible\n num_samples = math.ceil((data_len - num_partitions) / num_partitions)\n else:\n num_samples = math.ceil(data_len / num_partitions)\n # use original data length if not even divisible\n total_size = num_samples * num_partitions if even_divisible else data_len\n\n if not drop_last and total_size - data_len > 0:\n # add extra samples to make it evenly divisible\n indices += indices[: (total_size - data_len)]\n else:\n # remove tail of data to make it evenly divisible\n indices = indices[:total_size]\n\n for i in range(num_partitions):\n _indices = indices[i:total_size:num_partitions]\n datasets.append([data[j] for j in _indices])\n\n return datasets", "def split_data(data, prob):\n\tresults = [], []\n\tfor row in data:\n\t\tresults[0 if random.random() < prob else 1].append(row)\n\treturn results", "def split_data(data, prob):\n\tresults = [], []\n\tfor row in data:\n\t\tresults[0 if random.random() < prob else 1].append(row)\n\treturn results", "def split_simple_json_language_model_dataset(dataset: SimpleJsonLanguageModelDataset, split_list):\n import numpy as np\n # create a list of lengths [0.1, 0.4, 0.5] -> [100, 500, 1000(=len_data)]\n split_list = np.multiply(np.cumsum(split_list), len(dataset)).astype(\"int\").tolist()\n # list of shuffled indices to sample randomly\n shuffled_idx = list(range(len(dataset)))\n shuffle(shuffled_idx)\n # split the data itself\n new_data = [[] for _ in range(len(split_list))]\n for sub_data_idx, (start, end) in enumerate(zip([0] + split_list[:-1], split_list)):\n for i in range(start, end):\n new_data[sub_data_idx].append(dataset.__getitem__(shuffled_idx[i]))\n # create sub sets\n sub_datasets = []\n for i in range(len(new_data)):\n ready_dict = {\n \"_labels\": dataset._labels,\n \"_label_to_idx\": dataset._label_to_idx,\n \"_chr_embed\": dataset._chr_embed,\n \"_idx_to_chr\": dataset._idx_to_chr,\n \"data\": new_data[i],\n }\n sub_datasets.append(SimpleJsonLanguageModelDataset(dataset._size, ready=ready_dict))\n return sub_datasets", "def split_dataset(dataset, Ntotal, val_frac,\n batch_size, num_workers,\n random_seed=0, shuffle=True, balance=False):\n \n Nval = math.floor(Ntotal*val_frac)\n train_ds, val_ds = ch.utils.data.random_split(dataset, \n [Ntotal - Nval, Nval], \n generator=ch.Generator().manual_seed(random_seed))\n if balance: \n val_ds = balance_dataset(val_ds)\n split_datasets = [train_ds, val_ds]\n \n split_loaders = []\n for ds in split_datasets:\n split_loaders.append(ch.utils.data.DataLoader(ds, \n num_workers=num_workers, \n batch_size=batch_size, \n shuffle=shuffle))\n return split_datasets, split_loaders", "def split_samples(data):\n\n training_samples = data[0:9497]\n test_samples = data[9497:11300]\n\n return training_samples, test_samples", "def chunks(data, n):\n newn = int(len(data) / n) # chunk size \n \n for i in range(0, n-1):\n test_chunk = data[i*newn:i*newn+newn]\n train_chunk = [el for el in data if el not in test_chunk]\n yield train_chunk, test_chunk\n \n test_chunk = data[n*newn-newn:]\n train_chunk = [el for el in data if el not in test_chunk]\n \n yield train_chunk, test_chunk", "def split_dataset(x, y, seed=0):\n # split the data into label and unlabel\n x_unlabel, x_label, _, y_label = \\\n train_test_split(\n x,\n y,\n test_size=0.1,\n random_state=seed,\n )\n\n # split data into train and test data\n x_train, x_test, y_train, y_test = \\\n train_test_split(\n x_label,\n y_label,\n test_size=0.2,\n random_state=seed,\n )\n\n return Dataset(\n x_unlabel,\n Data(x_train, None, y_train),\n Data(x_test, None, y_test)\n )", "def separate(self):\n print(\"start dataset separating\")\n sum = 0\n for i in tqdm(range(len(self.itemlen))):\n il = self.itemlen[i]\n if il < 3:\n sum += il\n continue\n rarr = list(range(sum, sum+il))\n random.shuffle(rarr)\n self.train.append({\n 'input': self.input[rarr[0]],\n 'label': self.label[i]\n })\n self.val.append({\n 'input': self.input[rarr[1]],\n 'label': self.label[i]\n })\n for j in range(2, len(rarr)):\n self.test.append({\n 'input': self.input[rarr[j]],\n 'label': self.label[i]\n })\n sum += il", "def getSplits(df, train_size, val_size, test_size, seed=None):\n size = len(df)\n\n # size is considered a percentage if less than 1:\n train_size = int(train_size * size) if train_size < 1 else train_size\n val_size = int(val_size * size) if val_size < 1 else val_size\n test_size = int(test_size * size) if test_size < 1 else test_size\n\n if not seed is None:\n np.random.seed(seed)\n\n train_val_idx = np.random.choice(\n a=range(size),\n size=train_size + val_size,\n replace=False\n )\n train_idx = train_val_idx[:train_size]\n val_idx = train_val_idx[train_size:]\n\n train = df.iloc[train_idx]\n val = df.iloc[val_idx]\n test = df.drop(train.index).drop(val.index) # test is equal to the leftover\n\n assert len(train) + len(val) + len(test) == len(df)\n\n return train, val, test", "def split_data(y, tx, train_ratio):\n N = len(y)\n indices = np.random.permutation(N)\n \n tx = tx[indices, :]\n y = y[indices]\n \n bound = int(N * train_ratio)\n return y[:bound], tx[:bound, :], y[bound:], tx[bound:, :]", "def random_data_sample(data, seq_len, batch_size):\n\n assert isinstance(batch_size, int)\n assert isinstance(data, list)\n assert seq_len > 0\n assert batch_size > 0\n\n n = len(data)\n\n x, y = [], []\n for b in range(batch_size):\n random_idx = random.randint(0, n - seq_len - 1)\n x.append(data[random_idx:random_idx + seq_len])\n y.append(data[random_idx + 1:random_idx + seq_len + 1])\n return x, y", "def load_length_split_dataset(self, data_dir, logger=None):\n # TODO datadir required in signature?\n current_dir = Path()\n dir_path = current_dir / \"data\" / \"break_data\" / \"preprocessed\"\n file_name = \"dataset_preprocessed_length_split.pkl\"\n\n if not (dir_path / file_name).is_file():\n if logger:\n logger.info('Creating length split dataset...')\n threshold_amount_ops = 4\n\n train_filtererd = pd.DataFrame()\n validation_filtererd = pd.DataFrame()\n test_filtererd = pd.DataFrame()\n\n for i, example in enumerate(self.dataset_logical['train']):\n if example['operators'].count(',') < threshold_amount_ops:\n train_filtererd = train_filtererd.append(example, ignore_index=True)\n for i, example in enumerate(self.dataset_logical['validation']):\n if example['operators'].count(',') >= threshold_amount_ops:\n validation_filtererd = validation_filtererd.append(example, ignore_index=True)\n for i, example in enumerate(self.dataset_logical['test']):\n if example['operators'].count(',') >= threshold_amount_ops:\n test_filtererd = test_filtererd.append(example, ignore_index=True)\n\n to_save = {'train': Dataset.from_pandas(train_filtererd),\n 'validation': Dataset.from_pandas(validation_filtererd),\n 'test': Dataset.from_pandas(test_filtererd)}\n save_obj(dir_path, to_save, file_name)\n\n dataset = load_obj(dir_path, file_name)\n return dataset", "def gen_splits(n_splits, test_size, X, Y, groups=None, random_state=0):\n from sklearn.model_selection import GroupShuffleSplit\n\n gss = GroupShuffleSplit(\n n_splits=n_splits, test_size=test_size, random_state=random_state\n )\n train_test_splits = list(gss.split(X, Y, groups=groups))\n split_indices = list(range(n_splits))\n return train_test_splits, split_indices", "def split_data(num_samples, num_splits):\n\n kf = sklearn.model_selection.KFold(n_splits=num_splits, random_state=0);\n return kf.split(range(num_samples))", "def random_partition(n, n_data):\n all_idxs = np.arange(n_data)\n np.random.shuffle(all_idxs)\n idxs1 = all_idxs[:n]\n idxs2 = all_idxs[n:]\n return idxs1, idxs2", "def split_data_crossvalid(data):\n X_trainfolder = []\n X_testfolder = []\n y_trainfolder = []\n y_testfolder = []\n data = data[data[:, 0].argsort()]\n number_one = np.count_nonzero(data[:, :1])\n data_one = data[np.where(data[:, 0] == 1)]\n data_zero = data[np.where(data[:, 0] == 0)]\n one_ratio = round(number_one / len(data), 1)\n one_zero_ratio = 1 - one_ratio\n batch_one = int(70 * one_ratio)\n batch_zero = int(70 * one_zero_ratio)\n batchs = len(data) // 70\n for i in range(batchs):\n test_one = data_one[i * batch_one:(i + 1) * batch_one, :]\n train_one = np.delete(data_one, test_one, axis = 0)\n test_zero = data_zero[i * batch_zero:(i + 1) * batch_zero, :]\n train_zero = np.delete(data_zero, test_zero, axis = 0)\n train_sets = np.concatenate((train_one, train_zero), axis=0)\n test_sets = np.concatenate((test_one, test_zero), axis=0)\n np.random.shuffle(train_sets)\n np.random.shuffle(test_sets)\n X_trainfolder.append(train_sets[:, 1:])\n y_trainfolder.append(train_sets[:, 0])\n X_testfolder.append(test_sets[:, 1:])\n y_testfolder.append(test_sets[:, 0])\n return X_trainfolder, y_trainfolder, X_testfolder, y_testfolder", "def split_data(x, y, ratio, seed=1):\n np.random.seed(seed)\n\n N = len(y)\n rat = int(np.floor(ratio*N))\n idx = np.random.choice(np.arange(len(x)), N, replace=False)\n \n x_ = x[idx]\n y_ = y[idx]\n \n train_x = x_[:rat]\n test_x = x_[rat:]\n \n train_y = y_[:rat]\n test_y = y_[rat:]\n \n return train_x, train_y, test_x, test_y", "def split(self, num_or_size_splits, shuffle=False):\n raise NotImplementedError", "def partition_data(total_sample_size, num_groups):\n individual_sample_size = total_sample_size/num_groups\n indices = np.arange(0, total_sample_size, 1)\n groups = np.zeros(len(indices))\n for i in range(1, num_groups):\n groups[int(individual_sample_size*i):int(individual_sample_size*(i+1))] = i\n if total_sample_size % num_groups != 0:\n groups[individual_sample_size*num_groups:] = -1\n randoms = np.random.random_sample(len(groups))\n ordered_randoms_and_reordered_groups = np.array(sorted(zip(randoms, groups)))\n groups = ordered_randoms_and_reordered_groups[:, 1]\n return groups", "def __split_dataset(self):\n self.train, self.valid, _, _ = train_test_split(self.data, self.data, test_size=0.2)\n self.valid, self.test, _, _ = train_test_split(self.valid, self.valid, test_size=0.5)", "def split_list_by_num_samples(data, num_samples):\n\n new = []\n index = 0\n while index + num_samples < len(data):\n new.append(data[index : index + num_samples])\n index += num_samples\n\n return new", "def genChunkTestSets(data, nSets, ws, gapSize, dirName=\"test_data/\", ofCut=0.9):\n # Start times for windows with at least ofCut of data observed\n tOFCut = np.where(windowObsFrac(data, ws) > ofCut)[0]\n\n # Choose times for test intervals\n np.random.seed(np.random.randint(0, 100))\n sampleTs = np.random.choice(tOFCut, size=nSets, replace=False)\n\n for ti in sampleTs:\n # Randomly select a sensor\n sensor = np.random.randint(0, data.shape[1])\n # Remove some data to use for testing\n _, removedTimes = removeChunk(data, ti, ws, sensor, gapSize)\n\n # Save data in csvs\n np.savetxt(dirName + \"/ti=%i_tf=%i_sensor=%i.csv\"%(ti, ti+ws, sensor), removedTimes, \\\n delimiter=\" \", fmt=\"%i\")", "def shuffle_and_split_data_full_cv(X_genesets, y, train_validate_size):\n permutation = np.random.permutation(y.size)\n y_permuted = y[permutation]\n X_genesets_permuted = [Xg[permutation, :] for Xg in X_genesets]\n X_groups_train_validate = [Xg[0:train_validate_size, :] for Xg in X_genesets_permuted]\n X_groups_test = [Xg[train_validate_size:, :] for Xg in X_genesets_permuted]\n y_train_validate = y_permuted[0:train_validate_size]\n y_test = y_permuted[train_validate_size:]\n return X_groups_train_validate, y_train_validate, X_groups_test, y_test", "def split_group(nsubj, ngroups):\n groupsize = int(np.floor(nsubj / ngroups))\n rperm = np.argsort(np.random.rand(nsubj))\n samples = [rperm[i * groupsize: (i + 1) * groupsize]\n for i in range(ngroups)]\n return samples", "def split_dataset(df, predict_window):\n\n #split dataset into train and test datasets\n #train 80 percent of rows\n dataset_train = np.array(df[:int(df.shape[0]*0.8)])\n\n #test dataset is 20 percent of rows\n #50 - that's where historical data and prediction overlap\n dataset_test = np.array(df[int(df.shape[0]*0.8)- predict_window:])\n\n return dataset_train, dataset_test", "def shuffle_slice(a, start, stop):\n i = start\n while (i < stop-1):\n idx = random.randrange(i, stop)\n a[i], a[idx] = a[idx], a[i]\n i += 1", "def split_into_subarrays_of_max_len(arr, max_len=44100):\n return np.split(arr, np.arange(max_len, len(arr), max_len))", "def split(self, fractions=[0.8, 0.2]):\n\n if sum(fractions) > 1.0 or sum(fractions) <= 0:\n raise ValueError(\"the sum of fractions argument should be between 0 and 1\")\n\n # random indices\n idx = np.arange(self.n_samples)\n np.random.shuffle(idx)\n\n # insert zero\n fractions.insert(0, 0)\n\n # gte limits of the subsets\n limits = (np.cumsum(fractions) * self.n_samples).astype(np.int32)\n\n subsets = []\n # create output dataset\n for i in range(len(fractions) - 1):\n subsets.append(\n Dataset(self.inputs[idx[limits[i]:limits[i + 1]]], self.targets[idx[limits[i]:limits[i + 1]]]))\n\n return subsets", "def generate_equal_slices(list_to_slice, batch_size):\n\n assert len(list_to_slice) > 1\n\n list_slices = []\n\n sample_size = len(list_to_slice)\n\n for start_i in range(0, sample_size, batch_size):\n end_i = start_i + batch_size\n aslice = list_to_slice[start_i:end_i]\n if len(aslice) < batch_size:\n aslice_rep = aslice * math.ceil(batch_size/len(aslice))\n aslice = aslice_rep[:batch_size]\n\n assert len(aslice) == batch_size\n\n list_slices.append(aslice)\n\n return list_slices", "def split_dataset(dset, batch_size=128, thread_count=4):\n sampler_dset_train = data.sampler.SubsetRandomSampler(list(range(int(0.7*len(dset)))))\n sampler_dset_test = data.sampler.SubsetRandomSampler(list(range(int(0.7*len(dset)),\n int(0.85*len(dset)))))\n sampler_dset_validation = data.sampler.SubsetRandomSampler(list(range(int(0.85*len(dset)),\n len(dset))))\n\n loader_dset_train = data.DataLoader(\n dset, batch_size=batch_size, num_workers=thread_count,\n pin_memory=True, sampler=sampler_dset_train)\n loader_dset_test = data.DataLoader(\n dset, batch_size=batch_size, num_workers=thread_count,\n pin_memory=True, sampler=sampler_dset_test)\n loader_dset_validation = data.DataLoader(\n dset, batch_size=batch_size, num_workers=thread_count,\n pin_memory=True, sampler=sampler_dset_validation)\n\n return loader_dset_train, loader_dset_test, loader_dset_validation", "def scaffold_split(data: MoleculeDataset,\n sizes: Tuple[float, float, float] = (0.8, 0.1, 0.1),\n balanced: bool = False,\n seed: int = 0,\n logger: logging.Logger = None) -> Tuple[MoleculeDataset,\n MoleculeDataset,\n MoleculeDataset]:\n assert sum(sizes) == 1\n\n # Split\n train_size, val_size, test_size = sizes[0] * len(data), sizes[1] * len(data), sizes[2] * len(data)\n train, val, test = [], [], []\n train_scaffold_count, val_scaffold_count, test_scaffold_count = 0, 0, 0\n\n # Map from scaffold to index in the data\n scaffold_to_indices = scaffold_to_smiles(data.mols(), use_indices=True)\n\n if balanced: # Put stuff that's bigger than half the val/test size into train, rest just order randomly\n index_sets = list(scaffold_to_indices.values())\n big_index_sets = []\n small_index_sets = []\n for index_set in index_sets:\n if len(index_set) > val_size / 2 or len(index_set) > test_size / 2:\n big_index_sets.append(index_set)\n else:\n small_index_sets.append(index_set)\n random.seed(seed)\n random.shuffle(big_index_sets)\n random.shuffle(small_index_sets)\n index_sets = big_index_sets + small_index_sets\n else: # Sort from largest to smallest scaffold sets\n index_sets = sorted(list(scaffold_to_indices.values()),\n key=lambda index_set: len(index_set),\n reverse=True)\n\n for index_set in index_sets:\n if len(train) + len(index_set) <= train_size:\n train += index_set\n train_scaffold_count += 1\n elif len(val) + len(index_set) <= val_size:\n val += index_set\n val_scaffold_count += 1\n else:\n test += index_set\n test_scaffold_count += 1\n\n if logger is not None:\n logger.debug(f'Total scaffolds = {len(scaffold_to_indices):,} | '\n f'train scaffolds = {train_scaffold_count:,} | '\n f'val scaffolds = {val_scaffold_count:,} | '\n f'test scaffolds = {test_scaffold_count:,}')\n \n log_scaffold_stats(data, index_sets, logger=logger)\n\n # Map from indices to data\n train = [data[i] for i in train]\n val = [data[i] for i in val]\n test = [data[i] for i in test]\n\n return MoleculeDataset(train), MoleculeDataset(val), MoleculeDataset(test)", "def bootstrap_group(nsubj, ngroups):\n groupsize = nsubj\n samples = [(groupsize * np.random.rand(groupsize)).astype(np.int_)\n for i in range(ngroups)]\n return samples", "def split(\n items: typing.List[typing.Any],\n sizes: typing.List[float],\n random_state: int = 42,\n stratify: typing.Sequence[typing.Hashable] = None,\n group: typing.Sequence[typing.Hashable] = None,\n preserve: typing.Sequence[typing.Optional[int]] = None,\n) -> typing.Sequence[typing.Any]:\n splits: typing.List[typing.List[typing.Any]] = [[] for _ in range(len(sizes))]\n if group is None:\n group = list(range(len(items)))\n if stratify is None:\n stratify = [0] * len(items)\n if preserve is not None:\n assert len(items) == len(\n preserve\n ), \"When preserve is provided, it must be the same length as items.\"\n for item, preserveIdx in zip(items, preserve):\n if preserveIdx is not None:\n splits[preserveIdx].append(item)\n ideal_counts = [s * len(items) for s in sizes]\n items, stratify, group = [\n [\n entry\n for entry, preserveIdx in zip(current_list, preserve)\n if preserveIdx is None\n ]\n for current_list in [items, stratify, group]\n ]\n if len(items) == 0:\n # There's nothing left to split.\n return splits\n # Rebalance sizes so that we shuffle the remaining\n # items into the splits to try and match the originally\n # desired sizes.\n offsets = [\n max(target - len(split), 0) for split, target in zip(splits, ideal_counts)\n ]\n sizes = [offset / sum(offsets) for offset in offsets]\n assert (\n 0.99 < sum(sizes) < 1.01\n ), f\"The sizes must add up to 1.0 (they added up to {sum(sizes)}).\"\n assert len(group) == len(items), \"group must be the same length as the collection.\"\n assert len(stratify) == len(\n items\n ), \"stratify must be the same length as the collection.\"\n rng = np.random.default_rng(seed=random_state)\n grouped = [\n {**dict(zip([\"idxs\", \"stratifiers\"], zip(*grouper))), \"group\": g}\n for g, grouper in groupby_unsorted(\n list(zip(range(len(stratify)), stratify)),\n key=lambda v: typing.cast(typing.Sequence[typing.Hashable], group)[v[0]],\n )\n ]\n hashes = {\n h: list(g)\n for h, g in groupby_unsorted(\n grouped, key=lambda g: hash(tuple(set(g[\"stratifiers\"])))\n )\n }\n for subgroups in hashes.values():\n for a, u in zip(\n rng.choice(len(sizes), size=len(subgroups), p=sizes),\n subgroups,\n ):\n splits[a].extend(items[idx] for idx in u[\"idxs\"])\n return splits", "def rand_aligned_slices(maxdim=5, maxshape=16):\n ndim = randrange(1, maxdim+1)\n minshape = 2\n n = randrange(100)\n if n >= 95:\n minshape = 0\n elif n >= 90:\n minshape = 1\n all_random = True if randrange(100) >= 80 else False\n lshape = [0]*ndim; rshape = [0]*ndim\n lslices = [0]*ndim; rslices = [0]*ndim\n\n for n in range(ndim):\n small = randrange(minshape, maxshape+1)\n big = randrange(minshape, maxshape+1)\n if big < small:\n big, small = small, big\n\n # Create a slice that fits the smaller value.\n if all_random:\n start = randrange(-small, small+1)\n stop = randrange(-small, small+1)\n step = (1,-1)[randrange(2)] * randrange(1, small+2)\n s_small = slice(start, stop, step)\n _, _, _, slicelen = slice_indices(s_small, small)\n else:\n slicelen = randrange(1, small+1) if small > 0 else 0\n s_small = randslice_from_slicelen(slicelen, small)\n\n # Create a slice of the same length for the bigger value.\n s_big = randslice_from_slicelen(slicelen, big)\n if randrange(2) == 0:\n rshape[n], lshape[n] = big, small\n rslices[n], lslices[n] = s_big, s_small\n else:\n rshape[n], lshape[n] = small, big\n rslices[n], lslices[n] = s_small, s_big\n\n return lshape, rshape, tuple(lslices), tuple(rslices)", "def train_valid_split(X, y):\n random_indexes = np.random.permutation(len(y))\n train_inds = random_indexes[:(0.75*len(y))]\n valid_inds = random_indexes[(0.75*len(y)):]\n return X[train_inds], y[train_inds], X[valid_inds], y[valid_inds]", "def split_data(x, y, ratio, seed=1):\n # number of value\n num_points = len(y)\n # compute the index that split the datas\n split = int(np.floor(num_points * ratio))\n\n # set the seed to the given value\n np.random.seed(seed)\n # compute random indexes for training and testing\n rand_indexes = np.random.permutation(num_points)\n index_training = rand_indexes[:split]\n index_testing = rand_indexes[split:]\n\n return x[index_training], y[index_training], x[index_testing], y[index_testing]", "def split_dataset(dataset, test_size):\r\n random.shuffle(dataset)\r\n \r\n rating_negativ = []\r\n rating_positiv = []\r\n \r\n for row in dataset:\r\n if int(row[1]) == 0:\r\n rating_negativ.append(row)\r\n elif int(row[1]) == 1:\r\n rating_positiv.append(row)\r\n\r\n random.shuffle(rating_positiv)\r\n random.shuffle(rating_negativ) \r\n \r\n neg_train_data, neg_val_data = train_test_split(rating_negativ, test_size=test_size)\r\n pos_train_data, pos_val_data = train_test_split(rating_positiv, test_size=test_size)\r\n \r\n train_data = neg_train_data + pos_train_data\r\n val_data = neg_val_data + pos_val_data\r\n \r\n random.shuffle(train_data)\r\n random.shuffle(val_data)\r\n \r\n return train_data, val_data", "def splitList(itms, numGr):\n\ttcount = len(itms)\n\tcItems = list(itms)\n\tsz = int(len(cItems) / numGr)\n\tgroups = list()\n\tcount = 0\n\tfor i in range(numGr):\n\t\tif (i == numGr - 1):\n\t\t\tcsz = tcount - count\n\t\telse:\n\t\t\tcsz = sz + randint(-2, 2)\n\t\t\tcount += csz\n\t\tgr = list()\n\t\tfor j in range(csz):\n\t\t\tit = selectRandomFromList(cItems)\n\t\t\tgr.append(it)\t\n\t\t\tcItems.remove(it)\t\n\t\tgroups.append(gr)\n\treturn groups", "def create_batches(data_size, batch_size, shuffle=True):\r\n batches = []\r\n ids = list(range(data_size))\r\n if shuffle:\r\n random.shuffle(ids)\r\n for i in range(int(data_size / batch_size)):\r\n start = i * batch_size\r\n end = (i + 1) * batch_size\r\n batches.append(ids[start:end])\r\n # the batch of which the length is less than batch_size\r\n rest = data_size % batch_size\r\n if rest > 0:\r\n batches.append(list(ids[-rest:]) + [-1] * (batch_size - rest)) # -1 as padding\r\n return batches", "def test_slice_zero_length_dimension(setup_teardown_file):\n f = setup_teardown_file[3]\n\n for i, shape in enumerate([(0,), (0, 3), (0, 2, 1)]):\n dset = f.create_dataset('x%d'%i, shape, dtype=np.int32)\n assert dset.shape == shape\n out = dset[...]\n assert isinstance(out, np.ndarray)\n assert out.shape == shape\n out = dset[:]\n assert isinstance(out, np.ndarray)\n assert out.shape == shape\n if len(shape) > 1:\n out = dset[:, :1]\n assert isinstance(out, np.ndarray)\n assert out.shape[:2] == (0, 1)", "def split_data(x, y, ratio, seed=1):\n # set seed\n np.random.seed(seed)\n # ***************************************************\n # INSERT YOUR CODE HERE\n # split the data based on the given ratio: TODO\n # ***************************************************\n \n def split_data(x, y, ratio, seed=1):\n \"\"\"split the dataset based on the split ratio.\"\"\"\n # set seed\n np.random.seed(seed)\n # ***************************************************\n # INSERT YOUR CODE HERE\n # split the data based on the given ratio: TODO\n # ***************************************************\n trainDataLen = round(len(y)*ratio)\n \n trainDataID = random.sample(range(len(y)), trainDataLen)\n \n # USing bool value to obtaint he remainling data for validation data set\n validDataID = np.array(range(len(y))) + 1\n validDataID[trainDataID] = 0\n validDataID = validDataID >0\n \n \n # obtain the trainning data\n trainDataX = x[trainDataID]\n trainDataY = y[trainDataID]\n \n # obtain the validation data\n validDataX = x[validDataID]\n validDataY = y[validDataID] \n \n return trainDataX,trainDataY, validDataX, validDataY\n \n #raise NotImplementedError", "def get_disc_data_sets(sample_size):\n log.info(\"MAKING DATA SETS\")\n\n fakes = generate_fakes(int(sample_size / 2))\n reals = get_reals(int(sample_size / 2))\n total = numpy.concatenate((reals, fakes))\n real_labels = numpy.ones([len(reals)])\n fake_labels = numpy.zeros([len(fakes)])\n total_labels = numpy.concatenate((real_labels, fake_labels))\n return get_discriminator_splits(total, total_labels)", "def split_data(basedir, data_split=0.80):\n manip = data_manipulator(basedir)\n manip.train_test_split(data_split=data_split)", "def split_data(y, num_folds=10):\r\n print(f\"Creating splits...\", end=\"\")\r\n\r\n fold_dict = dict()\r\n start_index = 0\r\n # if the number of proteins is not evenly divisible by the number of folds, the last samples are distributed\r\n # evenly across folds\r\n fold_size = math.floor(len(y) / num_folds)\r\n for fold in range(num_folds):\r\n fold_dict[fold] = list(range(start_index, start_index + fold_size))\r\n start_index += fold_size\r\n\r\n # distributing samples which are left over (due to the number of samples not being divisible by the number of folds)\r\n # evenly across folds\r\n fold = 0\r\n while start_index < len(y):\r\n fold_dict[fold] += [start_index]\r\n start_index += 1\r\n fold += 1\r\n\r\n # sanity check that we did not loose any samples while splitting\r\n assert sum([len(fold) for fold in fold_dict.values()]) == len(y), \"Number of samples after splitting does not \" \\\r\n \"match number of samples before splitting.\"\r\n\r\n additional_text = \"\" if len(y) % num_folds == 0 else f\" with {len(y) % num_folds} left over samples \" \\\r\n f\"being distributed evenly among folds\"\r\n print(f\"done! Created {num_folds} splits of size {fold_size}{additional_text}.\")\r\n\r\n # TODO: use the results of this to determine if we should proceed with the current folds\r\n test_stratification(fold_dict, y)\r\n\r\n return fold_dict", "def user_games_split(list_len: int, k: int) -> Tuple[List[List[int]], List[List[int]]]:\n logging.getLogger(__name__).debug('user_games spliting...')\n data_train, data_test = [], []\n rand_idx = [j for j in range(list_len)]\n random.shuffle(rand_idx)\n for i in range(k):\n start = int(i * list_len / k)\n end = int((i + 1) * list_len / k)\n data_train.append(rand_idx[0:start] + rand_idx[end:list_len])\n data_test.append(rand_idx[start: end])\n return data_train, data_test", "def test_slice_of_length_zero(setup_teardown_file):\n f = setup_teardown_file[3]\n\n for i, shape in enumerate([(3, ), (2, 2, ), (2, 1, 5)]):\n dset = f.create_dataset('x%d'%i, data=np.zeros(shape, np.int32))\n assert dset.shape == shape\n out = dset[1:1]\n assert isinstance(out, np.ndarray)\n assert out.shape == (0,)+shape[1:]", "def build_toy_dataset(N):\n y_data = np.random.uniform(-10.5, 10.5, N)\n r_data = np.random.normal(size=N) # random noise\n x_data = np.sin(0.75 * y_data) * 7.0 + y_data * 0.5 + r_data * 1.0\n x_data = x_data.reshape((N, 1))\n return train_test_split(x_data, y_data, random_state=42)", "def unbalanced_split(dataset, test_size):\n\tprint(\"\\tSplitting data into *unbalanced* training and test sets\")\n\n\tdataset = dataset.drop(\"Date\", axis=1)\n\toutput = train_test_split(dataset.drop(\"Trend\", axis=1).values, dataset[\"Trend\"].values, test_size=test_size, random_state=RANDOM_STATE)\n\n\treturn output", "def split_data(self, data, ratio=0.7, shuffle=True, seed=0):\n if shuffle:\n random.seed(seed) # fix to default seed 0\n random.shuffle(data)\n\n size = int(len(data) * ratio)\n data_1 = data[:size]\n data_2 = data[size:]\n\n return data_1, data_2", "def _create_train_val_split(\n self, data_size, shuffle = False, seed = None\n ):\n val_size = int(np.round(data_size * self._val_fraction))\n val_size = max(1, val_size) if self._val_fraction > 0 else 0\n train_size = data_size - val_size\n train_split = np.concatenate(\n [np.ones([train_size], dtype=np.int32),\n np.zeros([val_size], dtype=np.int32)])\n if shuffle:\n np.random.RandomState(seed).shuffle(train_split)\n return train_split", "def test_train_split(X, y, test_size=0.2):\n idx = 0\n length_of_X = len(X)\n y_test = []\n X_test = []\n \n while idx < length_of_X*test_size:\n random_number_gen = np.random.randint(low=0, high=len(X))\n y_test.append(y[random_number_gen])\n X_test.append(X[random_number_gen])\n X = np.delete(X, random_number_gen, axis=0)\n y = np.delete(y, random_number_gen, axis=0)\n idx += 1\n return X, np.array(X_test), y, np.array(y_test)", "def _split_data(self, x, y):\n\tindices = range(self.N)\n\tnp.random.shuffle(indices)\n\ttrain_idx, test_idx = indices[:self.TRAIN_SIZE], indices[self.TRAIN_SIZE:]\n\treturn (x[train_idx,:], y[train_idx,:], x[test_idx,:], y[test_idx,:])" ]
[ "0.84061474", "0.6924902", "0.6857288", "0.6845405", "0.6843688", "0.659823", "0.65702295", "0.65012205", "0.648653", "0.6431003", "0.63632774", "0.6349559", "0.6339707", "0.63243866", "0.63061655", "0.62308013", "0.61856407", "0.6180755", "0.61523664", "0.6138766", "0.6098193", "0.6082283", "0.6082275", "0.6077038", "0.60729694", "0.6063966", "0.60194695", "0.59999216", "0.5969396", "0.5960141", "0.5958811", "0.59531695", "0.5950204", "0.5927988", "0.59150124", "0.59061813", "0.5896715", "0.5895139", "0.5888476", "0.5884981", "0.58822894", "0.5881444", "0.5877909", "0.58715606", "0.58699983", "0.5864039", "0.5859686", "0.5850704", "0.5848327", "0.5848327", "0.58407784", "0.58374745", "0.58351696", "0.582275", "0.58195084", "0.5812159", "0.58077174", "0.58035284", "0.5801303", "0.579982", "0.5794177", "0.5792476", "0.57918364", "0.5791092", "0.57883364", "0.5761589", "0.5755937", "0.57514393", "0.574995", "0.5745669", "0.57400423", "0.5734848", "0.57319176", "0.57258266", "0.5722425", "0.5713885", "0.57041526", "0.57014334", "0.56995624", "0.5693304", "0.56862575", "0.5685494", "0.56749177", "0.5671639", "0.5669368", "0.5668234", "0.5664895", "0.5657907", "0.5653588", "0.565131", "0.56501544", "0.56498265", "0.56496507", "0.5646336", "0.56457865", "0.56285954", "0.56264114", "0.5626307", "0.5624061", "0.56228596" ]
0.6593007
6
Calculates the vibrational partition function, assuming harmonic motion around the vibrational angular frequency omega.
def qvib(v): T = s.Symbol("T") return 1.0 / (1.0 - s.exp(-1.0 * (h * v) / (k * T)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def relative_partition_function(self):\n return self.overall_norm**2", "def partition_function(array, temp):\r\n\r\n # Constants imported from scipy.constants\r\n h = scipy.constants.h # Planck's constant\r\n # speed of light must be in cm/s as wavenumber is in cm-1\r\n c = scipy.constants.c * 100\r\n k = scipy.constants.k # Boltzmann constant\r\n T = temp # extracted from log file using extract_temp()\r\n\r\n # check if inputs are numpy arrays and convert if not.\r\n if not isinstance(array, np.ndarray):\r\n np.asarray(array)\r\n\r\n # conversion to exponent\r\n u = (h * array * c) / (k * T)\r\n\r\n # calculates natural log of an individual frequency contribution to the partition function\r\n Q_ = np.log(np.exp(-(u / 2)) / (1 - np.exp(-u)))\r\n # sums all the contributions together, giving the final result.\r\n Q = np.sum(Q_)\r\n return Q", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.harmonicoscillator_partitionfunction(Tlist, self.frequency) ** self.degeneracy", "def reduced_partition_function_ratio(light_freq, heavy_freq, temp):\r\n # check lengths of arrays are the same.\r\n # The error would only occur if the 2 molecules are different.\r\n assert len(light_freq) == len(\r\n heavy_freq\r\n ), \"Array lengths do not match - please ensure both your chosen log files optimise the same molecule!\"\r\n\r\n # check if numpy array and convert if not.\r\n if not isinstance(light_freq, np.ndarray):\r\n np.asarray(light_freq)\r\n if not isinstance(heavy_freq, np.ndarray):\r\n np.asarray(heavy_freq)\r\n\r\n # calculate ln of ratio of heavy / light frequencies\r\n ratio = np.log(np.divide(heavy_freq, light_freq))\r\n # check if length of ratio array is the same as the frequency arrays\r\n assert len(ratio) == len(light_freq) == len(heavy_freq)\r\n\r\n # add the ratios together\r\n ratio = np.sum(ratio)\r\n\r\n # calculate vibrational partition functions\r\n Q_light = partition_function(light_freq, temp)\r\n Q_heavy = partition_function(heavy_freq, temp)\r\n\r\n # print variables used to calculate RPFR\r\n print(\"Primed variables (v', Q') refer to the light isotope.\")\r\n print(\"ln(v/v'): \", ratio)\r\n print(\"lnQ: \", Q_heavy)\r\n print(\"lnQ': \", Q_light)\r\n\r\n # calculate RPFR, defined as 1000*ln(beta).\r\n beta = 1000 * (ratio + Q_heavy - Q_light)\r\n print(\"1000*lnB: \", beta)\r\n return beta, ratio, Q_heavy, Q_light", "def vol(x):\r\n return pi*(topdia(x)/2000.)**2 * length (x)", "def partition(v,m,I,V,sym):\n T = s.Symbol(\"T\")\n return qvib(v) + qtrans(m,V) + qrot(I,sym)", "def rk_adaptive(accel,m,r,h,v,recur,emin=10**-12,emax=10**-8,hmax=.1,hmin=.01,recurmax=100):\n k1v = accel(m,r)\n k1r = v\n k2v = accel(m,r + 0.25*k1r*h)\n k2r = v + (0.25*k1v)*h\n k3v = accel(m,r + (3/32.*k1r + 9/32.*k2r)*h)\n k3r = v + (3/32.*k1v + 9/32.*k2v)*h\n k4v = accel(m,r + (1932/2197.*k1r - 7200/2197.*k2r + 7296/2197.*k3r)*h)\n k4r = v + (1932/2197.*k1v - 7200/2197.*k2v + 7296/2197.*k3v)*h\n k5v = accel(m,r + (439/216.*k1r - 8*k2r + 3680/513.*k3r - 845/4104.*k4r)*h)\n k5r = v + (439/216.*k1v - 8*k2v + 3680/513.*k3v - 845/4104.*k4v)*h\n k6v = accel(m,r - (8/27.*k1r + 2*k2r - 3544/2565.*k3r + 1859/4104.*k4r - 11/40.*k5r)*h)\n k6r = v - (8/27.*k1v + 2*k2v - 3544/2565.*k3v + 1859/4104.*k4v - 11/40.*k5v)*h\n\n # 4th order calculation\n new_v4 = v + h*(25/216.*k1v + 1408/2565.*k3v + 2197/4104.*k4v - 1/5.*k5v)\n new_r4 = r + h*(25/216.*k1r + 1408/2565.*k3r + 2197/4104.*k4r - 1/5.*k5r)\n \n # 5th order calculation\n new_v5 = v + h*(16/135.*k1v + 6656/12825.*k3v+28561/56430.*k4v - 9/50.*k5v + 2/55.*k6v) \n new_r5 = r + h*(16/135.*k1r + 6656/12825.*k3r+28561/56430.*k4r - 9/50.*k5r + 2/55.*k6r) \n\n # Calculate truncation error between 5th and 4th order\n eps = np.abs( (np.max(np.abs(new_r5)) - np.max(np.abs(new_r4))) / np.max(np.abs(new_r4)))\n \n # Compare eps to emin and emax and update h accordingly\n if np.max(eps) < emin:\n if h*2.0 < hmax:\n h *= 2.0\n new_v = new_v5\n new_r = new_r5 \n \n if np.max(eps) > emax:\n if h/2.0 > hmin:\n h /= 2.0\n print h\n # Error too large, call rk_adaptive again with smaller h\n if recur < recurmax:\n recur += 1\n rk_adaptive(accel,m,r,h,v,recur)\n new_v = new_v5\n new_r = new_r5\n \n else:\n new_v = new_v5\n new_r = new_r5\n \n return new_v, new_r, h", "def omega(x):\n # if ramp_start_time - ramp_constant_time <= x <= end_ramp_end_time + ramp_constant_time:\n # f_t0 = smooth_factor * (x - ramp_start_time)\n # f_t1 = smooth_factor * (x - i_ramp_end_time)\n # if ramp_mode == 'with_end_acc':\n # f_t2 = smooth_factor * (x - steady_end_time)\n # f_t3 = smooth_factor * (x - end_ramp_end_time)\n # elif ramp_mode == 'no_end_acc':\n # f_t2 = smooth_factor * ramp_start_time\n # f_t3 = smooth_factor * i_ramp_end_time\n\n # omegax = (ramp_stage_acceleration / 2) / smooth_factor * (\n # logcosh(f_t0) - logcosh(f_t1) + logcosh(f_t3) - logcosh(f_t2))\n # else:\n # if bstroke == 'yes' and x <= 2 * (end_ramp_end_time +\n # ramp_constant_time):\n # x -= end_ramp_end_time + ramp_constant_time\n # f_t0 = smooth_factor * (x - ramp_start_time)\n # f_t1 = smooth_factor * (x - i_ramp_end_time)\n # if ramp_mode == 'with_end_acc':\n # f_t2 = smooth_factor * (x - steady_end_time)\n # f_t3 = smooth_factor * (x - end_ramp_end_time)\n # elif ramp_mode == 'no_end_acc':\n # f_t2 = smooth_factor * ramp_start_time\n # f_t3 = smooth_factor * i_ramp_end_time\n\n # omegax = -(ramp_stage_acceleration / 2) / smooth_factor * (\n # logcosh(f_t0) - logcosh(f_t1) + logcosh(f_t3) -\n # logcosh(f_t2))\n # else:\n # omegax = 0\n\n if bstroke == 'no':\n f_t0 = smooth_factor * (x - ramp_start_time)\n f_t1 = smooth_factor * (x - i_ramp_end_time)\n if ramp_mode == 'with_end_acc':\n f_t2 = smooth_factor * (x - steady_end_time)\n f_t3 = smooth_factor * (x - end_ramp_end_time)\n elif ramp_mode == 'no_end_acc':\n f_t2 = smooth_factor * ramp_start_time\n f_t3 = smooth_factor * i_ramp_end_time\n\n omegax = (ramp_stage_acceleration / 2) / smooth_factor * (\n logcosh(f_t0) - logcosh(f_t1) + logcosh(f_t3) - logcosh(f_t2))\n\n else:\n if x <= end_ramp_end_time + ramp_constant_time:\n f_t0 = smooth_factor * (x - ramp_start_time)\n f_t1 = smooth_factor * (x - i_ramp_end_time)\n if ramp_mode == 'with_end_acc':\n f_t2 = smooth_factor * (x - steady_end_time)\n f_t3 = smooth_factor * (x - end_ramp_end_time)\n elif ramp_mode == 'no_end_acc':\n f_t2 = smooth_factor * ramp_start_time\n f_t3 = smooth_factor * i_ramp_end_time\n\n omegax = (ramp_stage_acceleration /\n 2) / smooth_factor * (logcosh(f_t0) - logcosh(f_t1) +\n logcosh(f_t3) - logcosh(f_t2))\n\n else:\n x -= end_ramp_end_time + ramp_constant_time\n f_t0 = smooth_factor * (x - ramp_start_time)\n f_t1 = smooth_factor * (x - i_ramp_end_time)\n if ramp_mode == 'with_end_acc':\n f_t2 = smooth_factor * (x - steady_end_time)\n f_t3 = smooth_factor * (x - end_ramp_end_time)\n elif ramp_mode == 'no_end_acc':\n f_t2 = smooth_factor * ramp_start_time\n f_t3 = smooth_factor * i_ramp_end_time\n\n omegax = -(ramp_stage_acceleration / 2) / smooth_factor * (\n logcosh(f_t0) - logcosh(f_t1) + logcosh(f_t3) -\n logcosh(f_t2))\n\n return omegax", "def scaling_factor_epanechnikov( h ):\n h = np.array( h, copy=False).ravel()\n n = len(h)\n s = ( np.pi**(n/2.0) ) / sp.special.gamma( n/2.0 + 1 )\n s = (n/2.0 + 1)/s\n s /= np.product(h)\n return s", "def getPartitionFunction(self, Tlist, V=1.0):\n\t\treturn _modes.translation_partitionfunction(Tlist, self.mass, self.dimension, V)", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.freerotor_partitionfunction(Tlist, self.frequencies, 1 if self.linear else 0)", "def integrateMomentum(p, dt, fluid_v, fvolpp, mu_g, rfluid):\n\n #integration constants\n beta = 2.0/3.0 \n alpha1 = -4.0/3.0 \n alpha2 = 1.0/3.0 \n dtbeta = dt * beta \n\n vel1 = p.vel[0] \n pos1 = dtbeta * vel1 - alpha1 * p.pos[0] - alpha2 * p.pos[1] \n rp = p.get_density() \n D = p.get_diameter() \n mdot = (p.mass[0] - p.mass[1]) / dt \n \n mfluid = rfluid * fvolpp + 1e-30 # mass of fluid around particle\n fixedsrc = -alpha1 * p.vel[0] - alpha2 * p.vel[1] \n volp = math.pi * D * D * D / 6.0 \n volpp = fvolpp \n # enhance drag function for large volume fraction\n alphav = min(2.0, volp / max(volpp, 1e-30)) \n \n fp_vf = max((8.0 * alphav) ** 6.0 - 0.001, 0.0) \n\n #Integration loop\n max_iterations = 20\n for i in range(max_iterations): \n #Update fluid velocity based on delta particle momentum\n if i > 0: #Past first iteration\n fluid_v = fluid_v - ((vel1 - p.vel[0]) * p.mass[0] / mfluid ) \n\n dv = abs(fluid_v - vel1) \n Re = rfluid * D * dv / mu_g \n # blowing Reynolds number\n Re_b = abs(mdot / (D * mu_g * math.pi)) \n a = 0.09 + 0.077 * math.exp(-0.4 * Re) \n b = 0.4 + 0.77 * math.exp(-0.04 * Re) \n denom = 1.0 + a * Re_b **b \n\n fpblow = (1. + 0.0545 * Re + 0.1 * math.sqrt(Re) * (1.0 - 0.03 * Re)) / denom + fp_vf \n # Clift-Gauvin drag function (Crowe, 1998)\n fpcg = 1.0 + 0.15 * Re ** 0.687 + 0.0175 * Re / (1.0 + 4.25e4 * (Re+1e-20) **-1.16) + fp_vf \n # Choose drag function based on reynolds number. For high reynolds\n # number use Clift Gauvin, otherwise use blowing reynolds number \n if Re < 100:\n fp = fpblow\n else:\n fp = fpcg\n taup = rp * D ** 2 / (18.0 * mu_g * fp) \n vcoef = dtbeta / taup \n\n # vel1 = (vcoef*fluid_v + fixedsrc)/(1.+vcoef) \n f = (vcoef * fluid_v + fixedsrc) / (1.0 + vcoef) - vel1 \n df = -vcoef * p.mass[0] / (mfluid * (1.0 + vcoef)) - 1.0 \n vel1 -= -f/df \n pos1 = dtbeta * vel1 - alpha1 * p.pos[0] - alpha2 * p.pos[1] \n\n # If iterated at least 2 times, check for convergence\n if i > 1 and abs(f) / (abs(df) * (0.1 + abs(vel1))) < 1.0e-5 : \n break \n \n # Now advance the particle momentum in time\n p.vel[2] = p.vel[1] \n p.vel[1] = p.vel[0] \n p.vel[0] = vel1 \n p.pos[1] = p.pos[0] \n p.pos[0] = pos1", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.hinderedrotor_partitionfunction(Tlist, self.frequency, self.barrier) ** self.degeneracy", "def exp_V_half(self, x):\n return np.exp(-1j * self.V_fct_theta(x) / (2.0 * self.hbar))", "def acceleration( x, u, m, rho, P, b, h):\n\t\n n = x.size\n a = np.zeros((n,1))\n\n for i in range(0, n):\n \n # damping & harmonic potential (0.5 x^2)\n a[i] = a[i] - u[i]*b - x[i]\n\n # quantum pressure (pairwise calculation)\n x_js = np.delete(x,i)\n P_js = np.delete(P,i)\n rho_js = np.delete(rho,i)\n # first, calculate vector between two particles\n uij = x[i] - x_js\n # calculate acceleration due to pressure\n fac = -m * (P[i]/rho[i]**2 + P_js/rho_js**2)\n pressure_a = fac * kernel( uij, h, '1' )\n # accumulate contributions to the acceleration\n a[i] = a[i] + np.sum(pressure_a)\n\n return a", "def calculate_angular_velocity(headings, fs):\r\n heading_vectors = np.array([np.cos(headings), np.sin(headings)]).T\r\n sin_angular_change = np.cross(heading_vectors[:-1], heading_vectors[1:])\r\n angular_velocity = np.arcsin(sin_angular_change) * float(fs)\r\n return angular_velocity", "def Harmonic_potential(x):\n k=1\n return 0.5*k*(x**2)", "def velocity_field(xt,yt,x0,y0,Vinf,dia,rot,chord,B,param=None,veltype='all',integration='simp',m=220,n=200):\n rad = dia/2.\n tsr = rad*fabs(rot)/Vinf\n solidity = (chord*B)/rad\n\n # Translating the turbine position\n x0t = x0 - xt\n y0t = y0 - yt\n\n coef0,coef1,coef2,coef3,coef4,coef5,coef6,coef7,coef8,coef9 = coef_val()\n\n # Calculating EMG distribution parameters (based on polynomial surface fitting)\n if param is None:\n loc1 = _parameterval(tsr,solidity,coef0)\n loc2 = _parameterval(tsr,solidity,coef1)\n loc3 = _parameterval(tsr,solidity,coef2)\n spr1 = _parameterval(tsr,solidity,coef3)\n spr2 = _parameterval(tsr,solidity,coef4)\n skw1 = _parameterval(tsr,solidity,coef5)\n skw2 = _parameterval(tsr,solidity,coef6)\n scl1 = _parameterval(tsr,solidity,coef7)\n scl2 = _parameterval(tsr,solidity,coef8)\n scl3 = _parameterval(tsr,solidity,coef9)\n\n else:\n # Reading in EMG distribution parameters\n loc1 = param[0]\n loc2 = param[1]\n loc3 = param[2]\n spr1 = param[3]\n spr2 = param[4]\n skw1 = param[5]\n skw2 = param[6]\n scl1 = param[7]\n scl2 = param[8]\n scl3 = param[9]\n\n ###################################\n if veltype == 'vort':\n # VORTICITY CALCULATION (NO INTEGRATION)\n if x0t < 0.:\n vel = 0.\n else:\n vel = _vawtwake.vorticitystrength(x0t,y0t,dia,loc1,loc2,loc3,spr1,spr2,skw1,skw2,scl1,scl2,scl3)/rot\n ###################################\n else:\n # Integration of the vorticity profile to calculate velocity\n if integration == 'simp':\n # SIMPSON'S RULE INTEGRATION (must use polynomial surface coefficients from VAWTPolySurfaceCoef.csv)\n inte = 1 # Simpson's Rule\n # inte = 2 # Trapezoidal Rule (optional ability of the code-- faster but less accurate)\n\n if param is not None:\n print \"**** Using polynomial surface coefficients from VAWTPolySurfaceCoef.csv for Simpson's rule integration ****\"\n\n vel_xs,vel_ys = _vawtwake.vel_field(xt,yt,x0,y0,dia,rot,chord,B,Vinf,coef0,coef1,coef2,coef3,coef4,coef5,coef6,coef7,coef8,coef9,m,n,inte)\n\n if veltype == 'all':\n vel = sqrt((vel_xs*Vinf + Vinf)**2 + (vel_ys*Vinf)**2)/Vinf\n elif veltype == 'x':\n vel = (vel_xs*Vinf + Vinf)/Vinf\n elif veltype == 'y':\n vel = vel_ys\n elif veltype == 'ind':\n vel = np.array([vel_xs,vel_ys])\n ###################################\n elif integration == 'gskr':\n # 21-POINT GAUSS-KRONROD RULE QUADRATURE INTEGRATION\n xbound = (scl3+5.)*dia\n argval = (x0t,y0t,dia,loc1,loc2,loc3,spr1,spr2,skw1,skw2,scl1,scl2,scl3)\n if veltype == 'all' or veltype == 'x' or veltype == 'ind':\n vel_x = _dblquad(_vawtwake.integrandx,0.,xbound,lambda x: -1.*dia,lambda x: 1.*dia,args=argval)\n vel_xs = (vel_x[0]*fabs(rot))/(2.*pi)\n if veltype == 'all' or veltype == 'y' or veltype == 'ind':\n vel_y = _dblquad(_vawtwake.integrandy,0.,xbound,lambda x: -1.*dia,lambda x: 1.*dia,args=argval)\n vel_ys = (vel_y[0]*fabs(rot))/(2.*pi)\n\n if veltype == 'all':\n vel = sqrt((vel_xs + Vinf)**2 + (vel_ys)**2)/Vinf\n elif veltype == 'x':\n vel = (vel_xs + Vinf)/Vinf\n elif veltype == 'y':\n vel = vel_ys/Vinf\n elif veltype == 'ind':\n vel = np.array([vel_xs,vel_ys])/Vinf\n ###################################\n\n return vel", "def h_angular_momentum(r, v):\n m = Symbol(\"m\")\n return integrate(r.cross(v), m)", "def build_rhs():\n\n def div(\n coeff_rho,\n momentum_x,\n momentum_y,\n momentum_z,\n ):\n \"\"\"Computes the divergence of the velocity field.\"\"\"\n # Compute the fourth order derivative of the pressure for the face\n # velocity correction.\n p_corr = (\n states['p']\n if self._params.enable_rhie_chow_correction else states['dp'])\n d4p_dx4 = self._kernel_op.apply_kernel_op_x(p_corr, 'k4d2x')\n d4p_dy4 = self._kernel_op.apply_kernel_op_y(p_corr, 'k4d2y')\n d4p_dz4 = self._kernel_op.apply_kernel_op_z(p_corr, 'k4d2z',\n 'k4d2zsh')\n\n # Compute velocity gradient based on interpolated values on cell faces.\n coeff_x = dt / (4. * coeff_rho * dx**2)\n du = self._kernel_op.apply_kernel_op_x(momentum_x, 'kDx')\n du_dx = [\n du_i / (2. * dx) + coeff_x * d4p_dx4_i\n for du_i, d4p_dx4_i in zip(du, d4p_dx4)\n ]\n\n coeff_y = dt / (4. * coeff_rho * dy**2)\n dv = self._kernel_op.apply_kernel_op_y(momentum_y, 'kDy')\n dv_dy = [\n dv_i / (2. * dy) + coeff_y * d4p_dy4_i\n for dv_i, d4p_dy4_i in zip(dv, d4p_dy4)\n ]\n\n coeff_z = dt / (4. * coeff_rho * dz**2)\n dw = self._kernel_op.apply_kernel_op_z(momentum_z, 'kDz', 'kDzsh')\n dw_dz = [\n dw_i / (2. * dz) + coeff_z * d4p_dz4_i\n for dw_i, d4p_dz4_i in zip(dw, d4p_dz4)\n ]\n\n return [\n du_dx_i + dv_dy_i + dw_dz_i\n for du_dx_i, dv_dy_i, dw_dz_i in zip(du_dx, dv_dy, dw_dz)\n ]\n\n def add_factor(\n v,\n factor,\n ):\n return [factor * v_i for v_i in v]\n\n b_terms = {\n _B_TERM_SOURCE_RHO: add_factor(src_rho, inv_dt),\n }\n if isinstance(rho_info, ConstantDensityInfo):\n b_terms.update({\n _B_TERM_DIV:\n add_factor(\n div(rho_info.rho, states['u'], states['v'], states['w']),\n inv_dt * rho_info.rho),\n _B_TERM_DRHO_DT: [\n tf.zeros_like(src_rho_i) for src_rho_i in src_rho\n ],\n })\n\n elif isinstance(rho_info, VariableDensityInfo):\n b_terms.update({\n _B_TERM_DIV:\n add_factor(\n div(1.0, states['rho_u'], states['rho_v'], states['rho_w']),\n inv_dt),\n _B_TERM_DRHO_DT:\n add_factor(rho_info.drho_dt, inv_dt),\n })\n\n else:\n raise ValueError('`rho_info` has to be either `ConstantDensityInfo` or '\n '`VariableDensityInfo`.')\n\n # pylint: disable=g-complex-comprehension\n return [(div_i + drho_dt_i - src_rho_i)\n for div_i, drho_dt_i, src_rho_i in zip(\n b_terms[_B_TERM_DIV],\n b_terms[_B_TERM_DRHO_DT],\n b_terms[_B_TERM_SOURCE_RHO],\n )], b_terms\n # pylint: enable=g-complex-comprehension", "def fpart(x):\n return x - np.floor(x)", "def w_dispersion(q,v=1):\r\n # parameters for two-fluid hydrodynamic model from [1]\r\n Vol = np.sqrt(3)/2 * 4.63**2; # unit cell volume in graphene\r\n wr1= 4.08 / HARTREE; # Pi-electrons [eV]\r\n n1 = 2/Vol;\r\n wr2= 13.06 / HARTREE; # Sigma-electrons [eV]\r\n n2 = 6/Vol;\r\n \r\n # resonance frequencies\r\n w12 = wr1**2; # we neglect the acoustic velocity s=0\r\n w22 = wr2**2;\r\n\r\n # generalized plasma frequencies\r\n Q12 = 2*np.pi*n1*q * v ; # effective Omega_nu^2\r\n Q22 = 2*np.pi*n2*q * v ;\r\n\r\n # dispersion formula (17) in [1]\r\n A = 0.5*(w12 + Q12 + w22 + Q22);\r\n B = np.sqrt( 0.25*( w12 + Q12 - w22 - Q22 )**2 + Q12 * Q22 );\r\n\r\n return np.asarray([np.sqrt(A-B), np.sqrt(A+B)]);", "def normal_vol(k, f, t, alpha, beta, rho, volvol):\r\n # We break down the complex formula into simpler sub-components\r\n f_av = np.sqrt(f * k)\r\n A = - beta * (2 - beta) * alpha**2 / (24 * f_av**(2 - 2 * beta))\r\n B = rho * alpha * volvol * beta / (4 * f_av**(1 - beta))\r\n C = (2 - 3 * rho**2) * volvol**2 / 24\r\n FMKR = _f_minus_k_ratio(f, k, beta)\r\n ZXZ = _zeta_over_x_of_zeta(k, f, t, alpha, beta, rho, volvol)\r\n # Aggregate all components into actual formula (B.67a)\r\n v_n = alpha * FMKR * ZXZ * (1 + (A + B + C) * t)\r\n return v_n", "def _compute_f(self, p, dh, dv):\n return dh / (self.beta * p * dv)", "def v_from_omega_r(w, r):\n return w.cross(r)", "def sphvol(r):\n return (4./3.)*np.pi*(r**3.)", "def calc_V(A):\n return 1. / calc_rV(A)", "def sinu_continuous_kinematic_function(t, kinematic_parameters):\n flapping_wing_frequency = kinematic_parameters[0]\n flapping_angular_velocity_amplitude = kinematic_parameters[1]\n pitching_angular_velocity_amplitude = kinematic_parameters[2]\n flapping_acceleration_time_fraction = kinematic_parameters[3]\n pitching_time_fraction = kinematic_parameters[4]\n flapping_delay_time_fraction = kinematic_parameters[5]\n pitching_delay_time_fraction = kinematic_parameters[6]\n\n def dphi(x):\n \"\"\"flapping motion angular velocity function\"\"\"\n return -kf(\n flapping_wing_frequency, flapping_angular_velocity_amplitude,\n flapping_acceleration_time_fraction, flapping_delay_time_fraction,\n x)\n\n dphi_data = []\n dphi_data_abs = []\n for ti in t:\n dphi_data.append(dphi(ti))\n dphi_data_abs.append(np.abs(dphi(ti)))\n dphi_spl = UnivariateSpline(t, dphi_data, s=0)\n dphi_spl_abs = UnivariateSpline(t, dphi_data_abs, s=0)\n\n flapping_amplitude = dphi_spl_abs.integral(0, 1 / flapping_wing_frequency)\n\n print('flapping amplitude = %s' % (flapping_amplitude / 2))\n\n def ddphi(x):\n \"\"\"flapping angular acceleration function\"\"\"\n return dphi_spl.derivatives(x)[1]\n\n initial_phi = dphi_spl.integral(\n 0,\n np.abs(flapping_delay_time_fraction) / flapping_wing_frequency)\n initial_phi = -np.sign(flapping_delay_time_fraction) * initial_phi\n\n def phi(x):\n \"\"\"flapping motion function\"\"\"\n return flapping_amplitude / 4 + initial_phi + dphi_spl.integral(0, x)\n\n def dalf(x):\n \"\"\"flapping angular velocity function\"\"\"\n return kf_continuous(flapping_wing_frequency,\n pitching_angular_velocity_amplitude,\n pitching_time_fraction,\n pitching_delay_time_fraction, x)\n\n dalf_data = []\n dalf_data_abs = []\n for ti in t:\n dalf_data.append(dalf(ti))\n dalf_data_abs.append(np.abs(dalf(ti)))\n dalf_spl = UnivariateSpline(t, dalf_data, s=0)\n dalf_spl_abs = UnivariateSpline(t, dalf_data_abs, s=0)\n\n pitching_amplitude = dalf_spl_abs.integral(0, 1 / flapping_wing_frequency)\n print('pitching amplitude = %s' % (pitching_amplitude / 2))\n\n def ddalf(x):\n \"\"\"pitching angular acceleration function\"\"\"\n return dalf_spl.derivatives(x)[1]\n\n initial_alf = dalf_spl.integral(\n 0,\n np.abs(pitching_delay_time_fraction) / flapping_wing_frequency)\n initial_alf = -np.sign(pitching_delay_time_fraction) * initial_alf\n\n def alf(x):\n \"\"\"pitching motion function\"\"\"\n return initial_alf + dalf_spl.integral(0, x)\n\n kinematic_angles = []\n t_1st_cycle = [t1 for t1 in t if t1 <= 1 / flapping_wing_frequency]\n for ti in t_1st_cycle:\n kinematic_anglesi = [\n phi(ti),\n alf(ti),\n dphi(ti),\n dalf(ti),\n ddphi(ti),\n ddalf(ti)\n ]\n kinematic_angles.append(kinematic_anglesi)\n\n return kinematic_angles", "def velocity(self,level='cell'):\r\n\r\n # 每个section中总是储存t+1时刻的volume,t到t+1的flow,即一个仿真步长(step)过程中的流量和仿真步长结束时的元胞中车辆数\r\n # 但计算速度需要用到仿真步长开始时的元胞密度,因此要对应时刻的元胞中车辆数vol_t = Vol_t+1 + outflow_t - inflow_t \r\n vels = []\r\n vols = self.last_sim_step_volume()\r\n \r\n if level=='cell':\r\n # 计算第一个元胞\r\n vol = vols[0]\r\n outflow = self.flows[0]\r\n if vol == 0 :\r\n vels.append(0)\r\n else :\r\n vel = outflow*3600/(vol/self.cell_length)\r\n vels.append(round(vel,2))\r\n \r\n # 计算中间元胞\r\n for i in range(1,self.cells_number-1):\r\n vol = vols[i]\r\n outflow = self.flows[i]\r\n if vol == 0 :\r\n vels.append(0)\r\n else:\r\n vel = outflow*3600/(vol/self.cell_length)\r\n vels.append(round(vel,2))\r\n\r\n # 计算最后一个元胞\r\n vol = vols[-1]\r\n outflow = self.outflow\r\n if vol==0:\r\n vels.append(0)\r\n else:\r\n vel = outflow*3600/(vol/self.cell_length)\r\n vels.append(round(vel,2))\r\n \r\n return vels\r\n \r\n elif level=='section': \r\n # 先计算每一个元胞的再按照volume计算加权平均\r\n \r\n # 计算第一个元胞\r\n vol = vols[0]\r\n outflow = self.flows[0]\r\n if vol == 0 :\r\n vels.append(0)\r\n else :\r\n vel = outflow*3600/(vol/self.cell_length)\r\n vels.append(round(vel,2))\r\n \r\n # 计算中间元胞\r\n for i in range(1,self.cells_number-1):\r\n vol = vols[i]\r\n outflow = self.flows[i]\r\n if vol == 0 :\r\n vels.append(0)\r\n else:\r\n vel = outflow*3600/(vol/self.cell_length)\r\n vels.append(round(vel,2))\r\n\r\n # 计算最后一个元胞\r\n vol = vols[-1]\r\n outflow = self.outflow\r\n if vol==0:\r\n vels.append(0)\r\n else:\r\n vel = outflow*3600/(vol/self.cell_length)\r\n vels.append(round(vel,2)) \r\n\r\n \r\n # 将速度按照volume加权平均\r\n weighted_vels = [vel*vol for vel, vol in zip(vels,vols)]\r\n sum_vol = sum(vols)\r\n if sum_vol == 0:\r\n avg_vel = 0\r\n else:\r\n avg_vel = round(sum(weighted_vels)/sum_vol,2)\r\n \r\n return avg_vel\r\n\r\n\r\n else :\r\n raise ValueError('no such level for collecting data')", "def F_cont(self):\n x0 = self.edp_par['x0'].value\n A = self.edp_par['A'].value\n f1 = self.edp_par['f1'].value\n f2 = self.edp_par['f2'].value\n lr = self.latt_par['lambda_r'].value\n w = 0.5 * (self.qx*x0 + self.qz*A)\n arg1 = 0.5*self.qx*lr + w\n arg2 = 0.5*self.qx*lr - w\n fir = x0 * np.sin(w) / lr / w\n sec = (lr-x0) * np.cos(0.5*arg1) * np.sin(arg2) / lr / np.cos(0.5*arg2) / arg2 \n #sec = (-1)**self.k * (lr-x0) * sin(self.k*pi-w)/(self.k*pi-w)/lr\n return (fir + f1*sec + 2*f2*np.cos(w)/lr)", "def volterra_BM_path_chol(grid_points, M, H, T,rho):\n\n assert 0<H<1.0\n\n ## Step1: create partition\n\n X=np.linspace(0, T, num=grid_points)\n\n # get rid of starting point\n X=X[1:grid_points]\n\n ## Step 2: compute covariance matrix\n size=2*(grid_points-1)\n Sigma=np.zeros([size,size])\n #Sigma(1,1)\n for j in range(grid_points-1):\n for i in range(grid_points-1):\n if i==j:\n Sigma[i,j]=np.power(X[i],2*H)/2/H\n else:\n s=np.minimum(X[i],X[j])\n t=np.maximum(X[i],X[j])\n Sigma[i,j]=np.power(t-s,H-0.5)/(H+0.5)*np.power(s,0.5+H)*special.hyp2f1(0.5-H, 0.5+H, 1.5+H, -s/(t-s))\n #Sigma(1,2) and Sigma (2,1)\n for j in range(grid_points-1):\n for i in range(grid_points-1):\n Sigma[i,j+((grid_points-1))]=rho/(H+0.5)*(np.power(X[i],H+0.5)-np.power(X[i]-np.minimum(X[i],X[j]),H+0.5))\n Sigma[i+(grid_points-1),j]=rho/(H+0.5)*(np.power(X[j],H+0.5)-np.power(X[j]-np.minimum(X[i],X[j]),H+0.5))\n #Sigma(2,2)\n for j in range(grid_points-1):\n for i in range(grid_points-1):\n Sigma[i+(grid_points-1),j+(grid_points-1)]=np.minimum(X[i],X[j])\n\n ## Step 3: compute Cholesky decomposition\n P=np.linalg.cholesky(Sigma)\n\n ## Step 4: draw Gaussian rv\n\n Z=np.random.normal(loc=0.0, scale=1.0, size=[M,2*(grid_points-1)])\n\n ## Step 5: get (V,W) and add 0's in the beginning\n\n V=np.zeros((M,grid_points))\n W=np.zeros((M,grid_points))\n for i in range(M):\n aux=np.dot(P,Z[i,:])\n V[i,1:grid_points]=aux[0:(grid_points-1)]\n W[i,1:grid_points]=aux[(grid_points-1):2*(grid_points-1)]\n\n return V, W", "def func_omega_p_318(Ip):\n return (d12_318/hbar)*np.sqrt((2*Ip)/(c*epsilon_0))", "def velocity(n_core, q, beta_invariant, material_dispersion=None):\n c = scipy.constants.speed_of_light\n if material_dispersion is None:\n A = 2 / c / (2 + q)\n B = q * n_core**2 / c / (2 + q)\n else:\n N1 = n_core + material_dispersion\n y = 2 * n_core / N1\n A = 2 * N1 / n_core * (1 + 0.25 * y) / c / (q + 2)\n B = q * n_core**2 * A - 1 / 4 / c * N1 * n_core * y\n\n return A * beta_invariant + B / beta_invariant", "def calc_vol_vfrac(self, r_cool, PD, c):\n # core and reflector volume required for depletion calculation\n self.core_vol = self.r**2 * math.pi * self.z\n self.refl_vol = ((self.r + self.refl_t)**2 - self.r**2)*math.pi * self.z\n \n pitch = 2*r_cool*PD\n # calculate 'volumes' for fixed length\n v_cool = (r_cool ** 2 * math.pi)\n # clad volume fraction\n v_clad = ((r_cool + c)**2 - r_cool**2)*math.pi\n # fuel volume fraction\n v_cermet = (math.sqrt(3)*pitch**2 / 2.0) - (r_cool + c) ** 2 * math.pi \n\n self.cell_vol = v_cool + v_clad + v_cermet\n # calculate normalized vfracs from total cell volume\n self.vfrac_cool = v_cool / self.cell_vol\n self.vfrac_clad = v_clad / self.cell_vol\n self.vfrac_cermet = v_cermet / self.cell_vol", "def angular1(brdf_settings):\n # const\n scaleconst = 2*np.pi/366\n\n locals().update(brdf_settings)\n\n def scale(x, a=5, b=10, xmin=-1, xmax=1):\n \"\"\"\n rescale the sin\n a new min\n b = new max\n xmin = min of x\n xmax = max of x\n \"\"\"\n return (b - a)*(x - xmin)/(xmax - xmin) + a\n\n t = np.linspace(0, 2*np.pi, 366)\n\n\n noise = np.random.normal(0, 2*np.pi/100.0, size=366)\n\n szaMAX = 60\n szaMIN = 10\n sza_off = 0.5*np.pi # in pi\n\n sza_t = np.sin(noise + t + sza_off)\n SZA = scale(sza_t, a=szaMIN, b=szaMAX)\n\n\n # noisy it a bit?\n\n \"\"\"\n vza cycle\n \"\"\"\n vzaMAX = 45\n vzaMIN = 0\n vza_cycle = 6 # in days\n\n vza_t = np.sin(noise + t/(vza_cycle/366.0))\n VZA = scale(vza_t, a=vzaMIN, b=vzaMAX)\n\n \"\"\"\n raa cycle\n \"\"\"\n raaMAX = 360\n raaMIN = 0\n raa_cycle = 32 # in days\n\n raa_t = np.sin(t/(raa_cycle/366.0))\n RAA = scale(noise + vza_t, a=raaMAX, b=raaMIN)\n\n\n \"\"\"\n only need to return kernels really\n \"\"\"\n kerns = Kernels(VZA, SZA, RAA,\n LiType='Sparse', doIntegrals=False,\n normalise=True, RecipFlag=True, RossHS=False, MODISSPARSE=True,\n RossType='Thick',nbar=0.0)\n return kerns, VZA, SZA, RAA", "def __rho2v(self, vm, beta, rhoc, w, rho):\n if rho < 0:\n return float(vm)\n elif rho <= rhoc:\n return float(vm - vm * rho / beta)\n else:\n rhom = rhoc - (vm * rhoc - vm * (rhoc ** 2) / beta) / w\n # print('rho {0}; rhoc {1}'.format(rho, rhoc))\n return float(w * (rho - rhom) / rho)", "def main():\n \n # Particle in SHO - c.f. Mocz & Succi (2015) Fig. 2\n # parameters\n n = 100 # number of particles\n dt = 0.02 # timestep\n nt = 100 # number of timesteps\n nt_setup = 400 # number of timesteps to set up simulation\n n_out = 25 # plot solution every nout steps\n b = 4 # velocity damping for acquiring initial condition\n m = 1/n # mass of SPH particle ( m * n = 1 normalizes |wavefunction|^2 to 1)\n h = 40/n # smoothing length\n t = 0. # time\n\n # plot potential\n xx = np.linspace(-4.0, 4.0, num=400)\n xx = np.reshape(xx,(xx.size,1))\n fig = plt.plot(xx, 0.5*xx**2, linewidth=5, color=[0.7, 0.7, 0.9])\n \n # initialize\n x = np.linspace(-3.0, 3.0, num=n)\n x = np.reshape(x,(n,1))\n u = np.zeros((n,1))\n \n rho = density( x, m, h )\n P = pressure( x, rho, m, h )\n a = acceleration( x, u, m, rho, P, b, h )\n\n # get v at t=-0.5*dt for the leap frog integrator using Euler's method\n u_mhalf = u - 0.5 * dt * a\n\n # main loop (time evolution)\n for i in np.arange(-nt_setup, nt): # negative time (t<0, i<0) is used to set up initial conditions\n\n # leap frog\n u_phalf = u_mhalf + a*dt\n x = x + u_phalf*dt\n u = 0.5*(u_mhalf+u_phalf)\n u_mhalf = u_phalf\n if (i >= 0):\n t = t + dt\n print(\"%.2f\" % t)\n \n if (i == -1 ): # switch off damping before t=0\n u = np.zeros((n,1)) + 1.0\n u_mhalf = u\n b = 0 # switch off damping at time t=0\n \n # update densities, pressures, accelerations\n rho = density( x, m, h )\n P = pressure( x, rho, m, h )\n a = acceleration( x, u, m, rho, P, b, h)\n \n # plot solution every n_out steps\n if( (i >= 0) and (i % n_out) == 0 ):\n xx = np.linspace(-4.0, 4.0, num=400)\n xx = np.reshape(xx,(xx.size,1))\n rr = probeDensity(x, m, h, xx)\n rr_exact = 1./np.sqrt(np.pi) * np.exp(-(xx-np.sin(t))**2/2.)**2\n fig = plt.plot(xx, rr_exact, linewidth=2, color=[.6, .6, .6])\n fig = plt.plot(xx, rr, linewidth=2, color=[1.*i/nt, 0, 1.-1.*i/nt], label='$t='+\"%.2f\" % t +'$')\n # plot the t<0 damping process for fun\n if( i==-nt_setup or i==-nt_setup*3/4 or i==-nt_setup/2 ):\n xx = np.linspace(-4.0, 4.0, num=400)\n xx = np.reshape(xx,(xx.size,1))\n rr = probeDensity(x, m, h, xx)\n fig = plt.plot(xx, rr, linewidth=1, color=[0.9, 0.9, 0.9])\n \n plt.legend()\n plt.xlabel('$x$')\n plt.ylabel('$|\\psi|^2$')\n plt.axis([-2, 4, 0, 0.8])\n plt.savefig('solution.pdf', aspect = 'normal', bbox_inches='tight', pad_inches = 0)\n plt.close()", "def _vmomentsurfaceIntegrand(vR,vT,R,df,logSigmaR,logsigmaR2,sigmaR1,gamma,\n n,m):\n E,L= _vRpvTpRToEL(vR,vT,R,df._beta,sigmaR1,gamma,df._dftype)\n return vR**n*vT**m*df.eval(E,L,logSigmaR,logsigmaR2)*2.*nu.pi/df._gamma #correct", "def particle_velocityV(V,F,dt,Rv,sigma,epsilon,D,N): \n V += dt/2*(particle_forceV(Rv[-1], N, sigma, epsilon, D) + particle_forceV(Rv[-2], N, sigma, epsilon, D))\n return V", "def get_joint_space_volume(self) -> float:\n vol = 1.0\n for i in range(self.d):\n vol = vol * (self.lim_up[i] - self.lim_lo[i])\n return vol", "def acc(x: float, v: float, t: float) -> float:\n return -k*v - np.sin(x) + c*np.cos(omega*t)", "def getPartitionFunction(self, Tlist):\n\t\tQ = np.ones((len(Tlist)), np.float64) / self.symmetry\n\t\t# Active K-rotor\n\t\trotors = [mode for mode in self.modes if isinstance(mode, RigidRotor)]\n\t\tif len(rotors) == 0:\n\t\t\tTrot = constants.h * constants.c * 100.0 * 1.0 / constants.kB\n\t\t\tQ0 = [math.sqrt(T / Trot) for T in Tlist]\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\t# Other modes\n\t\tfor mode in self.modes:\n\t\t\tQ0 = mode.getPartitionFunction(Tlist)\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\treturn Q", "def rk8(accel,m,r,h,v): \n k1v = accel(m,r)\n k1r = v\n k2v = accel(m,r + 0.25*k1r*h)\n k2r = v + (0.25*k1v)*h\n k3v = accel(m,r + (5/72.*k1r + 1/72.*k2r)*h)\n k3r = v + (5/72.*k1v + 1/72.*k2v)*h\n k4v = accel(m,r + (1/32.*k1r +3/32.*k3r)*h)\n k4r = v + (1/32.*k1v +3/32.*k3v)*h\n k5v = accel(m,r + (106/125.*k1r- 408/125.*k3r + 352/125.*k4r)*h)\n k5r = v + (106/125.*k1v- 408/125.*k3v + 352/125.*k4v)*h\n k6v = accel(m,r + (1/48.*k1r+ 8/33.*k4r - 125/528.*k5r)*h)\n k6r = v + (1/48.*k1v+ 8/33.*k4v - 125/528.*k5v)*h\n k7v = accel(m,r + (-13893*k1r+ 39936*k4r -64125*k5r+ 60720*k6r)*h/26411.)\n k7r = v +(-13893*k1v+ 39936*k4v -64125*k5v+ 60720*k6v)*h/26411.\n k8v = accel(m,r + (37/392.*k1r+ 1625/9408.*k5r -2/15.*k6r+ 61/6720*k7r)*h)\n k8r = v + (37/392.*k1v+ 1625/9408.*k5v -2/15.*k6v+ 61/6720*k7v)*h\n k9v = accel(m,r +(17176/25515.*k1r - 47104/25515.*k4r + 1325/504.*k5r - 41792/25515.*k6r + 20237/145800.*k7r + 4312/6075.*k8r)*h)\n k9r = v + (17176/25515.*k1v - 47104/25515.*k4v + 1325/504.*k5v - 41792/25515.*k6v + 20237/145800.*k7v + 4312/6075.*k8v)*h\n k10v = accel(m,r + ( -23834/180075.*k1r - 77824/1980825.*k4r- 636635/633864.*k5r + 254048/300125.*k6r - 183/7000.*k7r + 8/11.*k8r - 324/3773.*k9r)*h)\n k10r = v + ( -23834/180075.*k1v - 77824/1980825.*k4v- 636635/633864.*k5v + 254048/300125.*k6v - 183/7000.*k7v + 8/11.*k8v - 324/3773.*k9v)*h\n k11v= accel(m,r + (12733/7600.*k1r - 20032/5225.*k4r + 456485/80256.*k5r - 42599/7125.*k6r + 339227/912000.*k7r - 1029/4108.*k8r + 1701/1408.*k9r + 5145/2432.*k10r)*h)\n k11r = v + (12733/7600.*k1v - 20032/5225.*k4v + 456485/80256.*k5v - 42599/7125.*k6v + 339227/912000.*k7v - 1029/4108.*k8v + 1701/1408.*k9v + 5145/2432.*k10v)*h\n k12v = accel(m,r + h*(-27061/204120.*k1r + 40448/280665.*k4r -1353775/1197504.*k5r + 17662/25515.*k6r - 71687/1166400.*k7r + 98/225.*k8r + 1/16.*k9r + 3773/11664.*k10r))\n k12r = v + h*(-27061/204120.*k1v + 40448/280665.*k4v -1353775/1197504.*k5v + 17662/25515.*k6v - 71687/1166400.*k7v + 98/225.*k8v + 1/16.*k9v + 3773/11664.*k10v)\n k13v = accel(m,r + h*(11203/8680.*k1r - 38144/11935.*k4r + 2354425/458304.*k5r - 84046/16275.*k6r + 673309/1636800.*k7r + 4704/8525.*k8r + 9477/10912.*k9r - 1029/992.*k10r + 19/341.*k12r))\n k13r = v + h*(11203/8680.*k1v - 38144/11935.*k4v + 2354425/458304.*k5v - 84046/16275.*k6v + 673309/1636800.*k7v + 4704/8525.*k8v + 9477/10912.*k9v - 1029/992.*k10v + 19/341.*k12v)\n\n\n new_v8 = v + h*(13/288.*k1v +32/125.*k6v + 31213/144000.*k7v + 2401/12375.*k8v + 1701/14080.*k9v + 2401/19200.*k10v + 19/450.*k11v) \n new_r8 = r + h*(13/288.*k1r +32/125.*k6r + 31213/144000.*k7r + 2401/12375.*k8r + 1701/14080.*k9r + 2401/19200.*k10r + 19/450.*k11r) \n \n return new_v8,new_r8", "def KS_Periodic(x, tmax, ntime, u):\n N = x.size\n v = np.fft.fft(u)\n\n # Precompute various ETDRK4 scalar quantities:\n h = 0.025\n k = (2 * np.pi / (x[-1] - 2 * x[0] + x[1])) * np.fft.fftfreq(N, d=1 / N)\n L = k**2 - k**4\n E = np.exp(h * L)\n E2 = np.exp(h * L / 2)\n M = 64\n r = np.exp(1j * np.pi * (np.arange(1, M + 1) - .5) / M)\n LR = h * np.tile(np.reshape(L, (N, 1)), (1, M)) + np.tile(r, (N, 1))\n Q = h * np.real(np.mean((np.exp(LR / 2) - 1) / LR, axis=1))\n f1 = h * np.real(np.mean(\n (-4 - LR + np.exp(LR) * (4 - 3 * LR + LR**2)) / LR**3, axis=1))\n f2 = h * np.real(np.mean(\n (2 + LR + np.exp(LR) * (-2 + LR)) / LR**3, axis=1))\n f3 = h * np.real(np.mean(\n (-4 - 3 * LR - LR**2 + np.exp(LR) * (4 - LR)) / LR**3, axis=1))\n\n # Main time-stepping loop:\n uu = u\n nmax = int(np.round(tmax / h))\n nplt = int(np.floor((tmax / (ntime - 1)) / h))\n g = -0.5j * k\n for n in range(nmax):\n Nv = g * np.fft.fft(np.real(np.fft.ifft(v))**2)\n a = E2 * v + Q * Nv\n Na = g * np.fft.fft(np.real(np.fft.ifft(a))**2)\n b = E2 * v + Q * Na\n Nb = g * np.fft.fft(np.real(np.fft.ifft(b))**2)\n c = E2 * a + Q * (2 * Nb - Nv)\n Nc = g * np.fft.fft(np.real(np.fft.ifft(c))**2)\n v = E * v + Nv * f1 + 2 * (Na + Nb) * f2 + Nc * f3\n if np.mod(n + 1, nplt) == 0:\n u = np.real(np.fft.ifft(v))\n uu = np.vstack((uu, u))\n\n return uu", "def rk4_perp(xy, v, NL, KL, KGdivKL, Mm, NP, nn, h):\n dx1 = h * v\n dv1 = h * fspring_perp(xy, NL, KL, KGdivKL, Mm, NP, nn)\n dx2 = h * (v + dv1 / 2.)\n dv2 = h * fspring_perp(xy + dx1 / 2., NL, KL, KGdivKL, Mm, NP, nn)\n dx3 = h * (v + dv2 / 2.)\n dv3 = h * fspring_perp(xy + dx2 / 2., NL, KL, KGdivKL, Mm, NP, nn)\n dx4 = h * (v + dv3)\n dv4 = h * fspring_perp(xy + dx3, NL, KL, KGdivKL, Mm, NP, nn)\n xout = xy + (dx1 + 2. * dx2 + 2. * dx3 + dx4) / 6.\n vout = v + (dv1 + 2. * dv2 + 2. * dv3 + dv4) / 6.\n\n return dx1, dv1, dx2, dv2, dx3, dv3, dx4, dv4, xout, vout", "def Mach(h,Vc,p):\n return np.sqrt(2/(gamma-1)*((1+p0/p*((1+(gamma-1)/(2*gamma)*rho0/p0*Vc**2)**(gamma/(gamma-1))-1))**((gamma-1)/gamma)-1))", "def get_partition_rate(self):\n\t\treturn float(self.data_split)", "def fAVM(RHOB,Dw,Ds,Df,Dc1,PHIc1,Ck,Dk,PHIk,RSK):\n#\n# 5.1.1 Initialise Outputs & Check for missing values in inputs:\n# --------------------------------------------------------------\n\tPHIt=MissingValue\n\tPHIe=MissingValue\n\tCBW=MissingValue\n\tBVW=MissingValue\n\tHCPV=MissingValue\n\tVf=MissingValue\n\tVs=MissingValue\n\tSwt=MissingValue\n\tSwe=MissingValue\n\tVc1=MissingValue\n\tVc2=MissingValue\n\tVc3=MissingValue\n\tVk=MissingValue\n\tToc=MissingValue\n\tQc=MissingValue\n\tGDen=MissingValue\n\tif MissingValue in (RHOB,Dw,Ds,Df,Dc1,PHIc1,Ck,Dk,PHIk,RSK):\n\t\treturn PHIt,PHIe,CBW,BVW,HCPV,Vf,Vs,Swt,Swe,Vc1,Vc2,Vc3,Vk,Toc,Qc,GDen\n#\n# 5.1.2 Initialise parameters:\n# ----------------------------\n\tNIter=0\n\tNIterMax=100\n\tErrIter=10000\n\tTolErrIter=0.0001\n\tIterEnd=0\n\tVk=0.000 # Initially assumme no kerogen\n\tDh=Df\n#\n#\t5.1.3 Start interative loop:\n#\t-----------------------------\n\twhile IterEnd==0:\n#\n# 5.5.3.1 Organic and Inorganic Component Density Values:\n# -------------------------------------------------------\n\t\tDBI=(1-PHIc1)*Dc1+(PHIc1*Dw) # Bulk Density of Inorganic Component\n\t\tDBO=(1-PHIk)*Dk+(PHIk*Dh)# Bulk Density of Organic Component\n#\n# 5.1.3.2 Compute Volume of Organic and Inorganic Component:\n# ----------------------------------------------------------\n\t\tVOR=(DBI-RHOB)/(DBI-DBO)\n\t\tVOR=ImposeLimits(VOR,0,1)\n\t\tVIN=(1-VOR)\n#\n# 5.1.3.3 Compute Volumetrics, Total & Effective Porosity and Total & Effective Water Saturation:\n# ---------------------------------------\t-------------------------------------------------------\n\t\tVc1=VIN*(1-PHIc1)\n\t\tVc2=0.000\n\t\tVc3=0.000\n\t\tVk=VOR*(1-PHIk)\n\t\tPHIt=VIN*PHIc1+VOR*PHIk\n\t\tPHIe=VOR*PHIk\n\t\tSwt=1-((VOR*PHIk)/PHIt)\n\t\tSwt=ImposeLimits(Swt,0,1)\n\t\tSwe=0.000\n\t\tSxot=Swt\n\t\tSxoe=Swe\n#\n# 5.1.3.4 Compute Bulk Volume of Water, Hydrocarbon Pore Volume and Pore Space Fluid Properties:\n# ---------------------------------------\t------------------------------------------------------\n\t\tBVW=PHIe*Swe\n\t\tHCPV=PHIe*(1-Swe)\n\t\tVs=RSK*Vk # Estimate volume of adsorbed (sorbed) hydrocarbon\n\t\tVs=ImposeLimits(Vs,0,HCPV)\n\t\tVf=(HCPV-Vs)\n\t\tVf=ImposeLimits(Vf,0,(HCPV-Vs))\n#\n# 5.1.3.5 Recompute hydrocarbon properties in the pore space:\n# -----------------------------------------------------------\n\t\tSum=Vs+Vf\n\t\tif(Sum<=0.000):\n\t\t\tDh=Df\n\t\telse:\n\t\t\tDh=(Ds*Vs+Df*Vf)/(Vs+Vf)\n#\n# 5.1.4 Test for interative computations:\n# ---------------------------------------\n\t\tNIter=NIter+1\n\t\tif(NIter>=NIterMax):\n\t\t\tIterEnd=1\n\t\telse:\t\t\t\n\t\t\tif(NIter<=2):\n\t\t\t\tResultOld=[1,1,1,1,1,1,1,1,1] # Initial Setting\n\t\t\t\tResultNew=[Vc1,Vc2,Vc3,Vk,Vs,Vf,PHIe,Swt,Swe] # Current Results\n\t\t\t\tErrIter=ComputeMatrixDifference(ResultOld,ResultNew)\n\t\t\t\tResultOld=ResultNew\n\t\t\telse:\n\t\t\t\tResultNew=[Vc1,Vc2,Vc3,Vk,Vs,Vf,PHIe,Swt,Swe] # Current Results\n\t\t\t\tErrIter=ComputeMatrixDifference(ResultOld,ResultNew)\n\t\t\t\tResultOld=ResultNew\n\t\t\t\tif(ErrIter<=TolErrIter):\n\t\t\t\t\tIterEnd=1\n#\n# 5.1.6 Preoutput computations:\n# ------------------------------\n\tQc=MissingValue\n\tDc2=0.00\n\tDc3=0.00\n\tCBW=PHIt-PHIe # The assumption is that all microporosity can be considered to be clay bound water.\n\tToc=fToc_Wtf(Vc1,Vc2,Vc3,Vk,0,Ck,Dc1,Dc2,Dc3,Dk,Dw) # TOC-wt fraction. Note: Vrw=0 in fToc_Wtf(Vc1,Vc2,Vc3,Vk,Vrw,Ck,Dc1,Dc2,Dc3,Dk,Dw)\n\tGDen=fOrmGDen(Vc1,Vc2,Vc3,Vk,0,Dc1,Dc2,Dc3,Dk,Dw) # Grain Density. Note: Vrw=0 in fOrmGDen(Vc1,Vc2,Vc3,Vk,Vrw,Dc1,Dc2,Dc3,Dk,Dw)\n#\n# 5.5.7 Output Results:\n# \t-------------------\n\treturn PHIt,PHIe,CBW,BVW,HCPV,Vf,Vs,Swt,Swe,Vc1,Vc2,Vc3,Vk,Toc,Qc,GDen", "def Partitioner(q,InvV,Posterior,m_points):\n \n m = InvV.n #get the number of maps being used \n Q = np.zeros([m,m_points.num]) #initialise the partition functions\n \n for j in range(m):\n #backmap the points from the posterior to the intermediate\n backmap = m_points.map(InvV,j)\n #determine the current mixture using a change of variables\n det = InvV.L[j,:,:].diagonal().prod()**2\n Q[j,:] = q[j] * multivariate_normal.pdf(backmap.all,mean=np.zeros(m_points.d),cov=np.eye(m_points.d)) * det\n \n #now we have the total mixture\n g_est = np.sum(Q,axis=0)\n\n for j in range(m):\n #the partitioner can be found from these\n Q[j,:] /= g_est\n #apply the partitioner to the posterior evaluations to get the partitioned components\n \n return Q", "def Partitioning(self, *args):\n return _hypre.HypreParVector_Partitioning(self, *args)", "def rfpart(x):\n return 1 - fpart(x)", "def Rfun(U,V,Q,Phi,Phibar, taudrag):\n \n Qclone=Q.copy()\n Qclone[Q<0]=0\n\n Ru=np.divide(np.multiply(-U,Qclone),Phi+Phibar)\n Rv=np.divide(np.multiply(-V,Qclone),Phi+Phibar)\n \n #reset to 0 if losing mass\n Ru[Q<0]=0\n Rv[Q<0]=0\n \n #if taudrag is infinity, only have the R componen \n if taudrag!=-1:\n F=Ru-(U/taudrag)\n G=Rv-(V/taudrag)\n \n else:\n F=Ru\n G=Rv\n \n return F, G", "def q_div(self, PFC, MHD, q):\n psi = PFC.psimin\n xyz = PFC.centers\n\n R_div,Z_div,phi_div = tools.xyz2cyl(xyz[:,0],xyz[:,1],xyz[:,2])\n\n R_omp = self.map_R_psi(psi,PFC)\n Z_omp = np.zeros(R_omp.shape)\n # Dot product between surface normal and B field\n #self.HFincidentAngle(PFC, MHD)\n # Calculate Magnitude of B at Divertor\n Bp_div = PFC.ep.BpFunc.ev(R_div,Z_div)\n Bt_div = PFC.ep.BtFunc.ev(R_div,Z_div)\n B_div = np.sqrt(Bp_div**2 + Bt_div**2)\n # Evaluate B at outboard midplane\n Bp_omp = PFC.ep.BpFunc.ev(R_omp,Z_omp)\n Bt_omp = PFC.ep.BtFunc.ev(R_omp,Z_omp)\n B_omp = np.sqrt(Bp_omp**2 + Bt_omp**2)\n\n# Bt_omp = MHD.ep.BtFunc.ev(R_omp,Z_omp)\n# BR_omp = MHD.ep.BRFunc.ev(R_omp,Z_omp)\n# BZ_omp = MHD.ep.BZFunc.ev(R_omp,Z_omp)\n# B_omp = np.sqrt(Bt_omp**2 + BR_omp**2 + BZ_omp**2)\n#\n# Bt_div = MHD.ep.BtFunc.ev(R_div,Z_div)\n# BR_div = MHD.ep.BRFunc.ev(R_div,Z_div)\n# BZ_div = MHD.ep.BZFunc.ev(R_div,Z_div)\n# B_div = np.sqrt(Bt_div**2 + BR_div**2 + BZ_div**2)\n\n\n #For Debugging, plot Bfield Ratio\n #import matplotlib.pyplot as plt\n #testB_div = B_div.reshape(self.grid['Nphi'],self.grid['Nswall']).T\n #testB_omp = B_omp.reshape(self.grid['Nphi'],self.grid['Nswall']).T\n #B_ratio = testB_div/testB_omp\n #CS = plt.contourf(self.grid['phi'], self.grid['Swall'],B_ratio,levels=30,cmap=plt.cm.cool)\n #plt.colorbar(CS, label=r'$B Ratio$')\n #plt.show()\n #Divertor heat flux\n q_div = np.zeros((len(xyz)))\n use = np.where(PFC.shadowed_mask == 0)[0]\n\n #Matt's method\n# q_div[use] = q[use] * B_div[use]/B_omp * PFC.bdotn[use]\n #Menard's Method\n q_div[use] = q[use] * B_div[use] * PFC.bdotn[use]\n\n #for i in range(len(q_div)):\n #\tif q_div[i] > 8.0: q_div[i] = 0.0\n #Plot q|| and qdiv\n #import matplotlib.pyplot as plt\n #plt.scatter(self.grid['Swall'][:,0], q_div[0:self.grid['Nswall']], label='qdiv')\n #plt.scatter(self.grid['Swall'][:,0], q[0:self.grid['Nswall']], label='q||')\n #plt.legend()\n #plt.show()\n return np.abs(q_div)", "def get_each_supply_air_volume(\n hc_period: np.ndarray,\n vav_system: bool, l_d_h: np.ndarray, l_d_cs: np.ndarray,\n theta_hs_out_h: np.ndarray, theta_hs_out_c: np.ndarray, theta_sur: np.ndarray,\n psi: float, l_duct: np.ndarray, theta_ac: np.ndarray,\n v_vent: np.ndarray, v_d_supply: np.ndarray, operation: np.ndarray) -> np.ndarray:\n\n c = get_specific_heat()\n rho = get_air_density()\n\n l_duct = np.array(l_duct).reshape(1, 5).T\n\n v_vent = v_vent.reshape(1, 5).T\n\n if vav_system:\n\n # np.where の条件式はどちらも評価するためゼロ割の警告が発生する。\n # それを避けるため、ゼロ割が発生する場合はゼロ割が発生しないようにダミーの値を設定しておく。\n theta_hs_out_h = np.where(theta_hs_out_h > theta_ac, theta_hs_out_h, theta_hs_out_h + 1)\n theta_hs_out_c = np.where(theta_ac > theta_hs_out_c, theta_hs_out_c, theta_hs_out_c - 1)\n\n v_h = np.where(theta_hs_out_h > theta_ac,\n np.clip(\n (l_d_h * 10 ** 6 + (theta_hs_out_h - theta_sur) * psi * l_duct * 3600)\n / (c * rho * (theta_hs_out_h - theta_ac)), v_vent, v_d_supply),\n v_vent)\n\n v_c = np.where(theta_ac > theta_hs_out_c,\n np.clip(\n (l_d_cs * 10 ** 6 + (theta_sur - theta_hs_out_c) * psi * l_duct * 3600)\n / (c * rho * (theta_ac - theta_hs_out_c)), v_vent, v_d_supply),\n v_vent)\n\n else:\n\n v_h = v_d_supply\n v_c = v_d_supply\n\n v_supply_h = np.where(operation == 'h', v_h, v_vent)\n v_supply_c = np.where(operation == 'c', v_c, v_vent)\n\n return v_supply_h * (hc_period == 'h') + v_supply_c * (hc_period == 'c') + v_vent * (hc_period == 'm')", "def __call__(self, pv, sp, dt, freeze_ff=False): \n error = sp - pv\n if not freeze_ff:\n #If freeze is True, use the value from last time. This enables\n #callers to decide that the PID controller can not increase\n #above a certain value\n self.integral += error * dt\n if self._i_max is not None:\n self.integral = max(-self._i_max, min(self.integral, self._i_max))\n derivative = (error - self.previous_error) / dt\n self.previous_error = error\n return self.kp * error + self.ki * self.integral + self.kd * derivative", "def calculate_ft(self):\n \n # Create a function which is able to evaluate B**2\n ffunc = scipy.interpolate.interp1d(self.psigrid, self.e.getF()[self.tind])\n def b2_func(R, Z, psi):\n bt = ffunc(psi)/R\n br = -self.psifunc.ev(R, Z, dy=1)/R\n bz = self.psifunc.ev(R, Z, dx=1)/R\n \n return bt**2 + br**2 + bz**2\n \n\n def b_bmax2(R,Z,psi):\n b2 = b2_func(R,Z,psi)\n return b2 / np.max(b2)\n \n def b_bmax(R,Z,psi):\n return np.sqrt(b_bmax2(R,Z,psi))\n \n # Evaluate the flux-surface averaged h^2 and h, as required\n fsa_h2 = self.fs_average(b_bmax2)\n fsa_h = self.fs_average(b_bmax)\n \n # This is the function which gets flux-surface averaged in equation (7)\n def ftl_func(R,Z,psi):\n h = b_bmax(R,Z,psi)\n h2 = b_bmax2(R,Z,psi)\n \n return (1 - (np.sqrt(1 - h) * (1 + 0.5 * h)))/h2\n \n \n # Equation 6, 7 in Lin-Liu\n fs_ftu = 1 - fsa_h2 / fsa_h**2 * (1 - np.sqrt(1 - fsa_h) * (1 + 0.5 * fsa_h))\n fs_ftl = 1 - fsa_h2 * self.fs_average(ftl_func)\n # Equation 18, 19 \n om = 0.75\n self.fs_ft = om*fs_ftu + (1-om)*fs_ftl", "def velocity_field(xt,yt,x0,y0,velf,dia,tsr,solidity):\n rad = dia/2.\n rot = tsr*velf/rad\n\n # Calculating EMG distribution parameters\n loc,spr,skw,scl = vorticity(tsr,solidity)\n \n # Translating the turbine position\n x0t = x0 - xt\n y0t = y0 - yt\n \n # Integration of the vorticity profile using Fortran code (vorticity.f90; _vortrun.so)\n vel_vs = dblquad(_vortmodel.integrand,0.,35.*dia,lambda x: -4.*dia,lambda x: 4.*dia, args=(x0t,y0t,dia,loc[0],loc[1],loc[2],spr[0],spr[1],skw[0],skw[1],scl[0],scl[1],scl[2]))\n \n # Calculating velocity deficit\n vel = (vel_vs[0]*(rot))/(2.*pi)\n vel = (vel + velf)/velf # normalization of velocity\n \n return vel", "def vert_integrate(self, u, d='up', Q='self'):\n s = \"::: vertically integrating function :::\"\n print_text(s, cls=self)\n\n if type(Q) != FunctionSpace:\n Q = self.Q\n ff = self.ff\n phi = TestFunction(Q)\n v = TrialFunction(Q)\n bcs = []\n # integral is zero on bed (ff = 3,5) \n if d == 'up':\n bcs.append(DirichletBC(Q, 0.0, ff, self.GAMMA_B_GND)) # grounded\n bcs.append(DirichletBC(Q, 0.0, ff, self.GAMMA_B_FLT)) # shelves\n a = v.dx(2) * phi * dx\n # integral is zero on surface (ff = 2,6) \n elif d == 'down':\n bcs.append(DirichletBC(Q, 0.0, ff, self.GAMMA_S_GND)) # grounded\n bcs.append(DirichletBC(Q, 0.0, ff, self.GAMMA_S_FLT)) # shelves\n bcs.append(DirichletBC(Q, 0.0, ff, self.GAMMA_U_GND)) # grounded\n bcs.append(DirichletBC(Q, 0.0, ff, self.GAMMA_U_FLT)) # shelves\n a = -v.dx(2) * phi * dx\n L = u * phi * dx\n name = 'value integrated %s' % d \n v = Function(Q, name=name)\n solve(a == L, v, bcs, annotate=False)\n print_min_max(u, 'vertically integrated function', cls=self)\n return v", "def f_van_der_pol(x, y, rpar):\n eps = rpar[0]\n return [y[1], ((1-y[0]**2)*y[1]-y[0])/eps]", "def _compute_psi(x, y, ll):\n if -1 <= x < 1:\n # Elliptic motion\n # Use arc cosine to avoid numerical errors\n return np.arccos(x * y + ll * (1 - x ** 2))\n elif x > 1:\n # Hyperbolic motion\n # The hyperbolic sine is bijective\n return np.arcsinh((y - x * ll) * np.sqrt(x ** 2 - 1))\n else:\n # Parabolic motion\n return 0.0", "def algorithm_2_21(p, t, x):\n\n t = np.array(t, dtype=np.float64)\n b = 1\n mu = index(x, t)\n\n for k in range(1, p + 1):\n t1 = t[mu - k + 1:mu + 1]\n t2 = t[mu + 1:mu + k + 1]\n omega = np.divide(\n (x - t1), (t2 - t1), out=np.zeros_like(t1), where=((t2 - t1) != 0))\n b = np.append((1 - omega) * b, 0) + np.insert((omega * b), 0, 0)\n\n return b", "def v_atm_n(f, t, alpha, beta, rho, volvol):\r\n f_av = f\r\n A = - beta * (2 - beta) * alpha**2 / (24 * f_av**(2 - 2 * beta))\r\n B = rho * alpha * volvol * beta / (4 * f_av**(1 - beta))\r\n C = (2 - 3 * rho**2) * volvol**2 / 24\r\n v_atm_n = alpha * f**beta * (1 + (A + B + C) * t)\r\n return v_atm_n", "def angular_function(j, theta, phi):\n l = sh_degree(j)\n m = sh_order(j)\n # We follow here reverse convention about theta and phi w.r.t scipy.\n sh = sph_harm(np.abs(m), l, phi, theta)\n if m < 0:\n return np.sqrt(2) * sh.real\n if m == 0:\n return sh.real\n if m > 0:\n return np.sqrt(2) * sh.imag", "def mt(P_1,V0_1,meanF_1,rho): \n psi = np.arctan2(V0_1[2],-V0_1[0])\n \n # Find swept ares\n idx_zmax = np.argmax(P_1[:,-1,2])\n idx_ymax = np.argmax(P_1[:,-1,1])\n idx_zmin = np.argmin(P_1[:,-1,2])\n \n Ad = np.linalg.norm(P_1[idx_zmax,-1,2]-P_1[idx_zmin,-1,2])*P_1[idx_ymax,-1,1]\n print P_1[idx_zmax,-1,2]\n V0 = np.linalg.norm(V0_1)\n \n Vi_1new = np.zeros_like(V0_1,dtype=float)\n\n while True:\n Vi_1 = Vi_1new\n \n Vi_1new[0] = meanF_1[0] / (2 * rho * Ad * np.sqrt( (V0*np.cos(psi)+Vi_1[0])**2 + (-V0*np.sin(psi)+Vi_1[2])**2 )) \n Vi_1new[2] = meanF_1[2] / (2 * rho * Ad * np.sqrt( (V0*np.cos(psi)+Vi_1[0])**2 + (-V0*np.sin(psi)+Vi_1[2])**2 )) \n \n if np.linalg.norm(Vi_1-Vi_1new) < 0.001:\n break\n\n return -Vi_1", "def phase_Venus_2(alpha):\n phase = 10.**(-0.4*( - 2.81914e-00*alpha + 8.39034e-03*alpha**2.))\n #1 Scale Properly\n h1 = phase_Venus_1(163.7) - 0. #Total height desired over range\n h2 = 10.**(-0.4*( - 2.81914e-00*163.7 + 8.39034e-03*163.7**2.)) - 10.**(-0.4*( - 2.81914e-00*179. + 8.39034e-03*179.**2.))\n phase = phase * h1/h2 #Scale so height is proper\n #2 Lateral movement to make two functions line up\n difference = phase_Venus_1(163.7) - h1/h2*(10.**(-0.4*( - 2.81914e-00*163.7 + 8.39034e-03*163.7**2.)))\n phase = phase + difference\n\n # + \n #-(- 2.81914e-00*163.7 + 8.39034e-03*163.7**2.)\n # - 1.\n return phase", "def get_effective_mass():\n\n H_BAR = 6.582119514e-16 # eV*s\n M_0 = 9.10938356e-31 # kg\n N_KPTS = 6 # Number of k-points included in the parabola.\n\n spin_up = Spin(1)\n\n band_structure = Vasprun('vasprun.xml').get_band_structure()\n\n # Locations of CBM and VBM in band_structure.bands\n cbm_band_index = band_structure.get_cbm()['band_index'][spin_up][0]\n cbm_kpoint_index = band_structure.get_cbm()['kpoint_index'][0]\n\n vbm_band_index = band_structure.get_vbm()['band_index'][spin_up][0]\n vbm_kpoint_index = band_structure.get_vbm()['kpoint_index'][0]\n\n k = {'electron': {'left': [], 'right': []},\n 'hole': {'left': [], 'right': []}}\n E = {'electron': {'left': [], 'right': []},\n 'hole': {'left': [], 'right': []}}\n\n e_ref_coords = band_structure.kpoints[cbm_kpoint_index]._ccoords\n h_ref_coords = band_structure.kpoints[vbm_kpoint_index]._ccoords\n\n for n in range(-N_KPTS, 1):\n e_coords = band_structure.kpoints[cbm_kpoint_index + n]._ccoords\n h_coords = band_structure.kpoints[vbm_kpoint_index + n]._ccoords\n\n k['electron']['left'].append(\n ((e_coords[0] - e_ref_coords[0])**2 +\n (e_coords[1] - e_ref_coords[1])**2 +\n (e_coords[2] - e_ref_coords[2])**2)**0.5\n )\n k['hole']['left'].append(\n ((h_coords[0] - h_ref_coords[0])**2 +\n (h_coords[1] - h_ref_coords[1])**2 +\n (h_coords[2] - h_ref_coords[2])**2)**0.5\n )\n\n e_energy = band_structure.bands[\n spin_up][cbm_band_index][cbm_kpoint_index + n]\n h_energy = band_structure.bands[\n spin_up][vbm_band_index][vbm_kpoint_index + n]\n\n E['electron']['left'].append(e_energy)\n E['hole']['left'].append(h_energy)\n\n for n in range(1, 1 + N_KPTS):\n e_coords = band_structure.kpoints[cbm_kpoint_index + n]._ccoords\n h_coords = band_structure.kpoints[vbm_kpoint_index + n]._ccoords\n\n k['electron']['right'].append(\n ((e_coords[0] - e_ref_coords[0])**2 +\n (e_coords[1] - e_ref_coords[1])**2 +\n (e_coords[2] - e_ref_coords[2])**2)**0.5\n )\n k['hole']['right'].append(\n ((h_coords[0] - h_ref_coords[0])**2 +\n (h_coords[1] - h_ref_coords[1])**2 +\n (h_coords[2] - h_ref_coords[2])**2)**0.5\n )\n\n e_energy = band_structure.bands[\n spin_up][cbm_band_index][cbm_kpoint_index + n]\n h_energy = band_structure.bands[\n spin_up][vbm_band_index][vbm_kpoint_index + n]\n\n E['electron']['right'].append(e_energy)\n E['hole']['right'].append(h_energy)\n\n # 2nd order fits\n e_l_fit = np.poly1d(\n np.polyfit(k['electron']['left'], E['electron']['left'], 2))\n e_r_fit = np.poly1d(\n np.polyfit(k['electron']['right'], E['electron']['right'], 2))\n h_l_fit = np.poly1d(\n np.polyfit(k['hole']['left'], E['hole']['left'], 2))\n h_r_fit = np.poly1d(\n np.polyfit(k['hole']['right'], E['hole']['right'], 2))\n\n # Curvatures\n e_l_curvature = e_l_fit.deriv().deriv()[0]\n e_r_curvature = e_r_fit.deriv().deriv()[0]\n h_l_curvature = h_l_fit.deriv().deriv()[0]\n h_r_curvature = h_r_fit.deriv().deriv()[0]\n\n # Unit conversion\n e_m_eff_l = 10 * ((H_BAR ** 2) / e_l_curvature) / M_0\n e_m_eff_r = 10 * ((H_BAR ** 2) / e_r_curvature) / M_0\n h_m_eff_l = -10 * ((H_BAR ** 2) / h_l_curvature) / M_0\n h_m_eff_r = -10 * ((H_BAR ** 2) / h_r_curvature) / M_0\n\n return {'electron': {'left': e_m_eff_l, 'right': e_m_eff_r},\n 'hole': {'left': h_m_eff_l, 'right': h_m_eff_r}}", "def calc_hypersphere_volume(r: float, n: int) -> float:\n return (math.pi ** (n / 2) * r ** n) / gamma((n / 2) + 1)", "def volume_oscillator(period1,period2):\n return period1/period2", "def get_angular_velocity(r, T):\n # http://www.hep.fsu.edu/~berg/teach/phy2048/0918.pdf\n # velocity = 2(pi)r/T\n return (2*math.pi*r)/T", "def rfpart(x):\n return 1 - Util.fpart(x)", "def fn_Calc_SearchVolume(az,el):\r\n return az*el/(57.296**2) # steradians\r", "def compute_volume(self) -> float:\n return (\n (1 if self.clockwise else -1)\n * np.sum(\n np.linalg.det(\n np.dstack(\n (\n self.vertices[self._faces[:, 0]],\n self.vertices[self._faces[:, 1]],\n self.vertices[self._faces[:, 2]],\n )\n )\n )\n )\n / 6\n )", "def detect_velocity(image):\n nonlocal prev, v_last\n curr_bgr = cv.warpPerspective(image, M, (160, 120))\n curr = cv.cvtColor(curr_bgr, cv.COLOR_BGR2GRAY)\n\n if prev is None:\n prev = curr\n v_last = 0.0\n return v_last, curr_bgr, np.zeros_like(image)\n\n flow = cv.calcOpticalFlowFarneback(\n prev, # Previous image\n curr, # Current image\n None, # Computed flow image that has the same size oas prev and type CV_32FC2.\n 0.5, # Specifies the image scale (<1) to build pyramids for each image.\n 3, # Number of pyramid layers including the initial image.\n 15, # winsize, averaging windows size.\n 3, # iterations, number of iterations the algorithm does at each pyramid level.\n 5, # standard deviation of the Gaussian that is used to smooth derivative\n 1.5,\n 0)\n\n mag, ang = cv.cartToPolar(flow[..., 0], flow[..., 1])\n\n v = mag * np.sin(ang)\n\n ######################\n ## Histogram for mag\n ar = np.arange(-20.0, 20.0, 0.50, dtype=np.float)\n his = np.histogram(v, bins=ar)\n\n for i, n in enumerate(his[0]):\n bgr = (255, 255, 0)\n if his[1][i] < 0:\n bgr = (0, 255, 255)\n\n #print('[{}] {} - {}'.format(i, n, his[1][i]))\n cv.rectangle( image, #curr_bgr,\n (i*2, HEIGHT),\n (i*2, HEIGHT - int(n / 10)),\n bgr, #(0, 255, 255),\n cv.FILLED)\n\n hsv = np.zeros_like(image)\n hsv[..., 0] = ang * 180 / np.pi / 2\n hsv[..., 1] = 255\n hsv[..., 2] = cv.normalize(np.abs(v), None, 0, 255, cv.NORM_MINMAX)\n hsv_bgr = cv.cvtColor(hsv, cv.COLOR_HSV2BGR)\n ##\n ######################\n\n v_abs = np.absolute(v)\n v = v[v_abs >= np.percentile(v_abs, VELOCITY_CUTOFF_PCT)]\n\n v_max = v_last + MAX_ACC\n v_min = v_last - MAX_ACC\n v = np.clip(v, v_min, v_max)\n if v.size > 0:\n v_avg = v.mean()\n else:\n if v_last > 0:\n v_avg = max(v_last - MAX_ACC, 0)\n elif v_last < 0:\n v_avg = min(v_last + MAX_ACC, 0)\n else:\n v_avg = 0\n\n prev = curr\n v_last = v_avg\n return v_last, curr_bgr, hsv_bgr", "def phase_velocity(self):\n return 1/np.sqrt(self.mu*self.epsilon)", "def get_vcond(lambdam, taum):\n return 2 * lambdam / taum", "def calP(self):\n N = len(self.listOfParticles)\n m = self.listOfParticles[0].m\n vsum = 0\n for particle in self.listOfParticles:\n vsum += particle.V.len()\n A = np.pi*self.R**2\n F = 0.5 * A * (2*self.R) * m * N * vsum**2\n return F", "def rk4(accel,m,r,h,v): \n k1v = accel(m,r) \n k1r = v \n k2v = accel(m,r + h*0.5*k1r) \n k2r = v+k1v*h*0.5 \n k3v = accel(m,r + h*0.5*k2r) \n k3r = v+k2v*h*0.5\n k4v = accel(m,r + h*k3r) \n k4r = v+k3v*h\n new_v = v + h*(k1v + 2*k2v + 2*k3v + k4v)/float(6)\n new_r = r + h*(k1r + 2*k2r + 2*k3r + k4r)/float(6)\n return new_v,new_r", "def frac_free_volume(v_sp, v_void):\n return (-0.3 * v_sp + 1.3 * v_void)/v_sp", "def volume(self):\n vol = ((self.I0 * self.V.omega *\n self._mu_0 / (self._mu_0 + self._mu_ex))\n * (1. - np.exp(-(self.V.tau / self._mu_0) -\n (self.V.tau / self._mu_ex)))\n * self.V.p(self.t_0, self.t_ex, self.p_0, self.p_ex,\n param_dict=self.param_dict))\n\n return (1. - self.bsf) * vol", "def derivative_phi_star(f: jnp.ndarray, rho: float) -> jnp.ndarray:\n return jnp.exp(f / rho)", "def u(self, f, rho=None, acceleration=None):\n if rho is None:\n rho = self.rho(f)\n v = self.j(f) / rho\n # apply correction due to forcing, which effectively averages the pre- and post-collision velocity\n correction = 0.0\n if acceleration is not None:\n if len(acceleration.shape) == 1:\n index = [Ellipsis] + [None]*self.D\n acceleration = acceleration[index]\n correction = acceleration / (2 * rho)\n return v + correction", "def v_from_p_function(self):\r\n\r\n track_c = [] # p classical function,\r\n for i in range(len(self.dt.momentum_t)):\r\n track_c.append(self.dt.momentum_t[i] / self.dt.mass)\r\n\r\n (fig, ax) = plt.subplots()\r\n\r\n ax.plot(\r\n self.dt.momentum_t,\r\n track_c,\r\n linestyle=':',\r\n linewidth=1,\r\n color='b',\r\n label='classic',\r\n )\r\n\r\n # marker=\"+\", markersize = 13,\r\n # ax.plot(self.dt.momentum_t, self.observer.velT, linestyle=\" \",\r\n # color=\"k\",marker=\"+\", markersize = 13, label=\"measurement\")\r\n\r\n ax.plot(\r\n self.dt.momentum_t,\r\n self.dt.vel_t,\r\n linestyle=' ',\r\n color='k',\r\n marker='o',\r\n label='result of measurements',\r\n )\r\n ax.plot(\r\n self.dt.momentum_t,\r\n self.dt.vel_anl,\r\n linestyle='-',\r\n color='red',\r\n linewidth=1,\r\n label='continuum',\r\n )\r\n\r\n # Euler's method == analitical function. We not plot it.\r\n\r\n ax.plot(\r\n self.dt.momentum_t,\r\n self.dt.vn,\r\n linestyle='--',\r\n color='blue',\r\n marker='x',\r\n linewidth=1,\r\n label=\"Euler's method\",\r\n )\r\n\r\n # error\r\n\r\n ax.errorbar(self.dt.momentum_t, self.dt.vel_t, fmt='k ',\r\n yerr=self.dt.vel_t_err)\r\n\r\n xm = -1.0\r\n for i in range(len(self.dt.momentum_t)):\r\n if self.dt.momentum_t[i] > xm:\r\n xm = self.dt.momentum_t[i]\r\n stepx = round(xm / float(len(self.dt.momentum_t)), 1)\r\n xm = round(xm + stepx, 1)\r\n ax.set_xlim([0, xm]) # xm = 0.85\r\n\r\n # signature on the horizontal x-axis\r\n\r\n ax.set_xlabel('p')\r\n\r\n # Create an instance of the class that will be responsible for the location of the labels (base is step on x)\r\n\r\n locatorx = matplotlib.ticker.MultipleLocator(base=stepx) # step on x is base=0.1\r\n\r\n # Set the locator for the main labels\r\n\r\n ax.xaxis.set_major_locator(locatorx)\r\n\r\n # line draw\r\n\r\n line = matplotlib.lines.Line2D([0.0, 9.0], [1.0, 1.0], color='b'\r\n )\r\n ax.add_line(line)\r\n plt.text(0.7, 1.01, u'light speed', horizontalalignment='center'\r\n )\r\n ax.set_ylim([0, 1.1])\r\n\r\n # signature on vertical y axis\r\n\r\n ax.set_ylabel('v')\r\n\r\n # Create an instance of the class that will be responsible for the location of the labels (base is step on y)\r\n\r\n locatory = matplotlib.ticker.MultipleLocator(base=0.1) # step on y is base=0.1\r\n\r\n # Set the locator for the main labels\r\n\r\n ax.yaxis.set_major_locator(locatory)\r\n\r\n ax.grid()\r\n\r\n # show legend\r\n\r\n ax.legend(loc='upper left')\r\n\r\n # show drawing\r\n # pylab.show()\r\n\r\n plt.show()", "def calc_ertelPV(n2, bx, rel_vorticity, g=9.8,f=-1e-4):\n\n # vertical component\n\n qvert = (f+rel_vorticity)*n2\n\n # baroclinic component\n qbc = -bx**2/f\n\n # Ertel PV\n\n ertelPV = qvert + qbc\n\n # If PV is unstable\n fq = ertelPV*f # fq > 0 stable\n\n return ertelPV, qvert, qbc, fq", "def RingPartition(ringsize, z, r, beta):\n # divide the ring into segments and initialise the coordinate of the segments\n location = list(range(ringsize))\n assert ringsize<=16 and ringsize>=5, \"Ring size greater than 16 or smaller than 5 is not supported\"\n if ringsize<=7 and ringsize>=5:\n segment1 = location[0:3]\n segment2 = location[2:-1]\n segment2.reverse()\n segment3 = location[-2:]+[location[0]]\n segment3.reverse()\n elif ringsize<=10 and ringsize>=8:\n segment1 = location[0:4]\n segment2 = location[3:-2]\n segment2.reverse()\n segment3 = location[-3:]+[location[0]]\n segment3.reverse()\n elif ringsize<=13 and ringsize>=11:\n segment1 = location[0:5]\n segment2 = location[4:-3]\n segment2.reverse()\n segment3 = location[-4:]+[location[0]]\n segment3.reverse()\n else: #ringsize<=16 and ringsize>=14:\n segment1 = location[0:6]\n segment2 = location[5:-4]\n segment2.reverse()\n segment3 = location[-5:] + [location[0]]\n segment3.reverse()\n segcoord_1_init = SegCoord(segment1, r, beta)\n segcoord_2_init = SegCoord(segment2, r, beta)\n segcoord_3_init = SegCoord(segment3, r, beta)\n Reflection = np.array((-1,1))\n OPsq = np.inner(segcoord_1_init[-1], segcoord_1_init[-1])\n PQsq = np.inner(segcoord_2_init[-1], segcoord_2_init[-1])\n OQsq = np.inner(segcoord_3_init[-1], segcoord_3_init[-1])\n segcoord_1 = [Reflection*item for item in segcoord_1_init]\n segcoord_2 = [x + np.sqrt((OQsq,0)) for x in segcoord_2_init]\n segcoord_3 = [np.array(x) for x in segcoord_3_init]\n # Link segment together\n xp = (OPsq+OQsq-PQsq)/(2*np.sqrt(OQsq))\n yp = np.sqrt(OPsq-np.square(xp))\n phi1, phi2, phi3 = np.arctan2(segcoord_1[-1][1],segcoord_1[-1][0]), np.arctan2(segcoord_2[-1][1], segcoord_2[-1][0]-np.sqrt(OQsq)), np.arctan2(segcoord_3[-1][1], segcoord_3[-1][0])\n phiseg1, phiseg2 = np.arctan2(yp,xp), np.arctan2(yp,xp-np.sqrt(OQsq))\n sigma1, sigma2, sigma3 = np.abs(phi1-phiseg1), np.abs(phiseg2-phi2), np.abs(phi3)\n Rsigma1, Rsigma2, Rsigma3 = RotationMatrix(-sigma1), RotationMatrix(sigma2), RotationMatrix(-sigma3)\n coordinate_1 = [np.array((0,0))]\n seg1_size = len(segcoord_1)\n for i in range(1,seg1_size-1):\n coordinate_1.append(np.matmul(Rsigma1,segcoord_1[i]))\n coordinate_1.append(np.array((xp,yp)))\n #### Check Here ####\n coordinate_2 = []\n seg2_size = len(segcoord_2)\n for i in range(seg2_size-2,0,-1):\n tmp = np.sqrt((OQsq,0))\n coordinate_2.append(tmp + np.matmul(Rsigma2, (segcoord_2[i]-tmp)))\n coordinate_3 = [np.sqrt((OQsq,0))]\n seg3_size = len(segcoord_3)\n for i in range(seg3_size-2,0,-1):\n coordinate_3.append(np.matmul(Rsigma3, segcoord_3[i]))\n coordinate = coordinate_1 + coordinate_2 + coordinate_3\n Rg = np.sum(coordinate,axis=0)\n phig = np.arctan2(Rg[1],Rg[0]) + np.pi/2\n Rphig = RotationMatrix(-phig) \n newcoord = [np.matmul(Rphig, coordinate[i]-Rg).tolist()+[z[i]] for i in range(ringsize)]\n origin = np.mean(newcoord,axis=0)\n finalcoord = np.array(newcoord)-origin\n return finalcoord", "def get_edfdv_sl(x, v):\n\n xm, vm = np.meshgrid(x, v, indexing=\"ij\")\n xm = xm.flatten()\n vm = vm.flatten()\n\n v_pad = _get_padded_grid_(v)\n f_pad = np.zeros((x.size, v.size + 2))\n\n def update_velocity_adv_sl(f, e, dt):\n \"\"\"\n evolution of df/dt = e df/dv according to the Backward Semi-Lagrangian technique popularized by [1]\n\n [1] - Cheng, C. ., & Knorr, G. (1976). The integration of the vlasov equation in configuration space.\n Journal of Computational Physics, 22(3), 330–351. https://doi.org/10.1016/0021-9991(76)90053-X\n\n :param f: distribution function. (numpy array of shape (nx, nv))\n :param e: electric field (numpy array of shape (nx,))\n :param dt: timestep (single float value)\n :return:\n \"\"\"\n\n f_pad[:, 1:-1] = f\n f_pad[:, 0] = f[:, -1]\n f_pad[:, -1] = f[:, 0]\n\n e_fit = interpolate.interp1d(x, e, kind=\"cubic\")\n\n em = e_fit(xm)\n\n f_interpolator = interpolate.RectBivariateSpline(x, v_pad, f_pad)\n f_out = f_interpolator(xm, vm - em * dt, grid=False).reshape((x.size, v.size))\n\n return f_out\n\n return update_velocity_adv_sl", "def CalcForce_aeroframe_DEP(V, CoefMatrix, x, rho, g):\r\n\r\n #Compute aero forces\r\n # here x must be of the form (alpha, beta, p, q, r, da, dr, de) (last one punctualy used)\r\n # set non dim for p,q,r\r\n nonDim=np.ones(7)\r\n nonDim[2]=g.b/(2*V)\r\n nonDim[3]=g.c/(2*V)\r\n nonDim[4]=g.b/(2*V)\r\n # F=np.dot(CoefMatrix,x[0:7]) # commented form, modification to account for symmetric drag increase of side slip\r\n F=np.zeros((3))\r\n M=np.zeros((3))\r\n xsym=np.copy(x[0:-1])\r\n xsym[1]=abs(xsym[1]) # make beta always positive since derivatives have already correct sign for drag and lift only\r\n xsym[-3]=abs(xsym[-3]) # make ailerons deflection always positive for drag increase and lift decrease\r\n xsym[-1]=abs(xsym[-1]) # make rudder deflection always positive for drag increase and lift decrease\r\n F[0]=np.dot(CoefMatrix[0],xsym)\r\n F[1]=np.dot(CoefMatrix[1],x[0:-1]) #side force\r\n F[2]=np.dot(CoefMatrix[2],xsym)\r\n M=np.dot(CoefMatrix[3:6,:],x[0:-1])\r\n# print(\"Printing moment coeff\")\r\n# print(M)\r\n\r\n \r\n #No need to project\r\n# alpha=x[0]\r\n# beta=x[1]\r\n# H=np.array([[math.cos(alpha)*math.sin(beta), -math.cos(alpha)*math.sin(beta), -math.sin(alpha)],[math.sin(beta), math.cos(beta), 0],[math.sin(alpha)*math.cos(beta), -math.sin(alpha)*math.sin(beta), math.cos(alpha)]])\r\n if V<=71 :\r\n Fbody=np.array([-F[0]-g.Cd0_fl,F[1],-F[2]-g.CL0_fl]) # add alpha=0 coefficients\r\n Moment=M+np.array([0,x[-1]*g.Cm_de+g.Cm0_fl,0])\r\n else:\r\n Fbody=np.array([-F[0]-g.Cd0,F[1],-F[2]-g.CL0]) # add alpha=0 coefficients\r\n Moment=M+np.array([0,x[-1]*g.Cm_de+g.Cm0,0])\r\n \r\n\r\n Fbody=0.5*V**2.0*rho*g.S*Fbody\r\n Moment=0.5*V**2.0*rho*g.S*g.b*Moment\r\n \r\n return np.append(Fbody, Moment)", "def vib_energy(frequency, temp):\n\n # vibrational temperature\n\n vtemp = plank_const * speed_ol / boltz_const * frequency\n\n vtemp = np.matrix(vtemp)\n\n temp = 1 / np.matrix(temp)\n\n # need to convert to array, because the element wise exponential\n # cannot work with matrix\n intermediate = np.array(vtemp.T * np.transpose(temp).T)\n\n # energy is a np.matrix, need to convert to array\n energy = R * vtemp * np.matrix((0.5 + (np.exp(intermediate) - 1) ** -1))\n energy = np.array(energy)[0]\n\n return energy", "def ivp_Gamma(r,k=2):\n r=np.asarray(r)\n Gamma=np.zeros(r.shape)\n # r<=1\n bLow=r<=1\n rl = r[bLow]\n Gamma[bLow] = np.pi/(k+1)* (1-(1-rl**2)**(1+k))\n # r>1\n Gamma[np.abs(r)>1] = np.pi/(k+1)\n return Gamma", "def width_v_pi_pi(\n model: SingleRhNeutrinoModel, form_factor: VectorFormFactorPiPi, **kwargs\n):\n mx = model.mx\n if mx < 2.0 * MPI:\n return 0.0\n\n u = 0.5 * np.tan(2 * model.theta)\n\n def integrand(s):\n z = s / mx**2\n ff = form_factor.form_factor(q=np.sqrt(s), couplings=(GVUU, GVDD))\n beta = (1.0 - 4 * MPI**2 / s) ** 1.5\n poly = (1 - z) ** 2 * (1 + 2 * z)\n return np.abs(ff) ** 2 * beta * poly\n\n pre = u**2 * GF**2 * mx**3 * (1 - 2 * SW**2) ** 2 / (768 * np.pi**3)\n quad_kwargs = _get_quad_kwargs(**kwargs)\n return pre * integrate.quad(integrand, 4 * MPI**2, mx**2, **quad_kwargs)[0]", "def get_heat_loss_coefficient_of_partition() -> float:\n return 1 / 0.46", "def VFI(method) :\n iteration=0 # Iteration Counter\n converged = 0 # Convergence Flag|\n \n#----- Initial Settings \n v_update = zeros(n_grid)\n v_func = empty(n_grid)\n k_next_vec = empty(n_grid)\n run_time = empty(2)\n \n def obj(k_next) :\n \"\"\"\n This function is used in value function iteration.\n It represents the objective function to be maximized for one node (state) of current capitals.\n Resulting value is maximized one corresponding to next period's capital as a maximizer. \n Next period's value is computed by interpolation.\n \n Input : k_next (next period's capital)\n \n Output : value_vec (maximized value resulting from choosing optimal capital in the next period)\n \"\"\" \n \n if method==1 :\n value_vec = -1 * (log(A*k_current**(alpha) - k_next) + beta*linear_interp(k_grid,v_update,k_next))\n elif method==2 :\n value_vec = -1 * (log(A*k_current**(alpha) - k_next) + beta*quad_interp(k_grid,v_update,k_next))\n elif method==3 :\n value_vec = -1 * (log(A*k_current**(alpha) - k_next) + beta*cubic_interp(k_grid,v_update,k_next))\n \n return value_vec\n\n#----- Value function iteration\n start = time.time() # start time\n while converged==0 :\n index = 0\n for k_current in k_grid :\n k_next = fminbound(obj,k_grid[0],k_grid[-1])\n v_func[index] = (-1) * obj(k_next)\n k_next_vec[index] = k_next\n index = index + 1\n dist = abs(max(v_func - v_update))\n if dist<tol :\n converged = 1\n v_k, g_k = v_func, k_next_vec\n v_update = v_func\n print \"Iteration : \",iteration,\"\",\"Distance : \",dist # convergence process\n iteration = iteration + 1\n v_func = empty(n_grid) \n k_next_vec = empty(n_grid)\n \n end = time.time() # end time\n run_time[0],run_time[1] = runtime_cal(start,end) # total running time\n \n return v_k, g_k, run_time, iteration", "def compute_zp_spectral_function(self):\n nomegase = self.nomegase\n nkpt = self.nkpt\n nband = self.nband\n\n self.spectral_function = np.zeros((nomegase, nkpt, nband), dtype=float)\n\n omega = np.einsum('kn,l->knl', np.ones((nkpt, nband)), self.omegase)\n\n self.spectral_function = (\n (1 / np.pi) * np.abs(self.self_energy.imag) /\n ((omega - self.self_energy.real) ** 2 + self.self_energy.imag ** 2)\n )", "def _vmomentderivsurfaceIntegrand(vR,vT,R,df,logSigmaR,logsigmaR2,sigmaR1,\n gamma,n,m,deriv):\n E,L= _vRpvTpRToEL(vR,vT,R,df._beta,sigmaR1,gamma,df._dftype)\n if deriv.lower() == 'r':\n return vR**n*vT**m*df.eval(E,L,logSigmaR,logsigmaR2)*2.*nu.pi/df._gamma*df._dlnfdR(R,vR*sigmaR1,vT*sigmaR1/gamma) #correct\n else:\n return 0.", "def cal_f_RK(yt, dyt, f, df, int_INV_D_pre, vw_div_vw0, fcn_D, cond_GT):\n phi_b = cond_GT['phi_bulk']\n ed = cond_GT['epsilon_d']\n\n y_new = yt + dyt\n f_new = f + df\n int_INV_D = int_INV_D_pre\n if df != 0.: # it is related with half-step for RK4 method\n int_INV_D += (dyt/2.)*(1./fcn_D(f, cond_GT) + 1./fcn_D(f_new, cond_GT))\n return (-1./ed)*(vw_div_vw0/fcn_D(f_new, cond_GT))*(f_new - phi_b*(1. - exp(-(vw_div_vw0/ed)*int_INV_D)))", "def div(\n coeff_rho,\n momentum_x,\n momentum_y,\n momentum_z,\n ):\n # Compute the fourth order derivative of the pressure for the face\n # velocity correction.\n p_corr = (\n states['p']\n if self._params.enable_rhie_chow_correction else states['dp'])\n d4p_dx4 = self._kernel_op.apply_kernel_op_x(p_corr, 'k4d2x')\n d4p_dy4 = self._kernel_op.apply_kernel_op_y(p_corr, 'k4d2y')\n d4p_dz4 = self._kernel_op.apply_kernel_op_z(p_corr, 'k4d2z',\n 'k4d2zsh')\n\n # Compute velocity gradient based on interpolated values on cell faces.\n coeff_x = dt / (4. * coeff_rho * dx**2)\n du = self._kernel_op.apply_kernel_op_x(momentum_x, 'kDx')\n du_dx = [\n du_i / (2. * dx) + coeff_x * d4p_dx4_i\n for du_i, d4p_dx4_i in zip(du, d4p_dx4)\n ]\n\n coeff_y = dt / (4. * coeff_rho * dy**2)\n dv = self._kernel_op.apply_kernel_op_y(momentum_y, 'kDy')\n dv_dy = [\n dv_i / (2. * dy) + coeff_y * d4p_dy4_i\n for dv_i, d4p_dy4_i in zip(dv, d4p_dy4)\n ]\n\n coeff_z = dt / (4. * coeff_rho * dz**2)\n dw = self._kernel_op.apply_kernel_op_z(momentum_z, 'kDz', 'kDzsh')\n dw_dz = [\n dw_i / (2. * dz) + coeff_z * d4p_dz4_i\n for dw_i, d4p_dz4_i in zip(dw, d4p_dz4)\n ]\n\n return [\n du_dx_i + dv_dy_i + dw_dz_i\n for du_dx_i, dv_dy_i, dw_dz_i in zip(du_dx, dv_dy, dw_dz)\n ]", "def calc_ked_WFI(self):\n\n #Initialize kinetic energy density\n self.ked_WFI = np.zeros( (self.grid.Nelem, 1))\n\n #Figure out the number of occupied orbitals\n if self.m == 0:\n if self.pol == 1:\n Nocc = np.floor(self.N/2)\n nu = self.N / 2 - Nocc\n else:\n Nocc = np.floor(self.N)\n nu = self.N - Nocc\n\n else:\n #m>0 orbitals hold twice as many electrons due to +-m symmetry\n if self.pol == 1:\n Nocc = np.floor(self.N / 4)\n nu = self.N / 4 - Nocc\n else:\n Nocc = np.floor(self.N/2)\n nu = self.N / 2 - Nocc\n\n #Construct density\n for i in range(int(Nocc)):\n # print(\"phi from pssolver\", self.phi)\n # print(\"phi subset\", self.phi[:,i])\n # print(\"integrate returns\", self.grid.integrate( self.phi[:,i]**2 )**0.5)\n\n #Normalized orbital\n phi_norm = self.phi[:,i] / self.grid.integrate( self.phi[:,i]**2 )**0.5\n phi_norm = phi_norm[:, None]\n self.ked_WFI += (phi_norm * (self.H0 @ phi_norm)) / self.grid.w[:, None]\n\n #If we are doing fractional robitals and are non-integer\n if self.FRACTIONAL is True and nu != 0:\n #Normalized orbital\n phi_norm = self.phi[:,i] / self.grid.integrate( self.phi[:, Nocc+1]**2)**0.5\n phi_norm = phi_norm[:, None]\n self.ked_WFI += nu * ( phi_norm * (self.H0 @ phi_norm) ) / self.grid.w[:, None]\n\n #Scale densities appropriately\n if self.m == 0:\n if self.pol == 1: #Unpolarized electrons\n self.ked_WFI = 2 * self.ked_WFI\n\n else: # m>0 orbitals hold twice as many electrons due to +-m symmetry\n if self.pol == 1:\n self.ked_WFI = 4 * self.ked_WFI\n else:\n self.ked_WFI = 2 * self.ked_WFI", "def apply_velocity(self, angles, velocity, phase, x):\r\n \r\n # VX\r\n v=velocity[0]*self.parameters[\"vx_amplitude\"]\r\n d=(x*2-1)*v\r\n if phase:\r\n angles[\"l_thigh_joint\"]+=d\r\n angles[\"l_ankle_joint\"]+=d\r\n angles[\"r_thigh_joint\"]+=d\r\n angles[\"r_ankle_joint\"]+=d\r\n else:\r\n angles[\"l_thigh_joint\"]-=d\r\n angles[\"l_ankle_joint\"]-=d\r\n angles[\"r_thigh_joint\"]-=d\r\n angles[\"r_ankle_joint\"]-=d\r\n\r\n # VY\r\n v=velocity[1]*self.parameters[\"vy_amplitude\"]\r\n d=(x)*v\r\n d2=(1-x)*v\r\n if v>=0:\r\n if phase:\r\n angles[\"l_hip_joint\"]-=d\r\n angles[\"l_foot_joint\"]-=d\r\n angles[\"r_hip_joint\"]+=d\r\n angles[\"r_foot_joint\"]+=d\r\n else:\r\n angles[\"l_hip_joint\"]-=d2\r\n angles[\"l_foot_joint\"]-=d2\r\n angles[\"r_hip_joint\"]+=d2\r\n angles[\"r_foot_joint\"]+=d2\r\n else:\r\n if phase:\r\n angles[\"l_hip_joint\"]+=d2\r\n angles[\"l_foot_joint\"]+=d2\r\n angles[\"r_hip_joint\"]-=d2\r\n angles[\"r_foot_joint\"]-=d2\r\n else:\r\n angles[\"l_hip_joint\"]+=d\r\n angles[\"l_foot_joint\"]+=d\r\n angles[\"r_hip_joint\"]-=d\r\n angles[\"r_foot_joint\"]-=d\r\n \r\n ## VT\r\n #v=velocity[2]*self.parameters[\"vt_amplitude\"]\r\n #d=(x)*v\r\n #d2=(1-x)*v\r\n #if v>=0:\r\n #if phase:\r\n #angles[\"j_pelvis_l\"]=-d\r\n #angles[\"j_pelvis_r\"]=d\r\n #else:\r\n #angles[\"j_pelvis_l\"]=-d2\r\n #angles[\"j_pelvis_r\"]=d2\r\n #else:\r\n #if phase:\r\n #angles[\"j_pelvis_l\"]=d2\r\n #angles[\"j_pelvis_r\"]=-d2\r\n #else:\r\n #angles[\"j_pelvis_l\"]=d\r\n #angles[\"j_pelvis_r\"]=-d\r", "def park91b_hf(xx):\n\n x1, x2, x3, x4 = xx.T\n\n term1 = (2 / 3) * np.exp(x1 + x2)\n term2 = -x4 * np.sin(x3)\n term3 = x3\n\n return term1 + term2 + term3", "def get_vdfdx_sl(x, v):\n\n xm, vm = np.meshgrid(x, v, indexing=\"ij\")\n xm = xm.flatten()\n vm = vm.flatten()\n\n x_pad = _get_padded_grid_(x)\n f_pad = np.zeros((x.size + 2, v.size))\n\n def update_spatial_adv_sl(f, dt):\n \"\"\"\n evolution of df/dt = v df/dx using the Backward Semi-Lagrangian method popularized by\n [1] and widely used since.\n\n [1] - Cheng, C. ., & Knorr, G. (1976). The integration of the vlasov equation in configuration space.\n Journal of Computational Physics, 22(3), 330–351. https://doi.org/10.1016/0021-9991(76)90053-X\n\n :param f: (float array (nx, nv)) distribution function\n :param dt: (float) timestep\n :return: (float array (nx, nv)) updated distribution function\n \"\"\"\n\n f_pad[1:-1, :] = f\n f_pad[0, :] = f[-1, :]\n f_pad[-1, :] = f[0, :]\n\n f_interpolator = interpolate.RectBivariateSpline(x_pad, v, f_pad)\n f_out = f_interpolator(xm - vm * dt, vm, grid=False).reshape((x.size, v.size))\n\n return f_out\n\n return update_spatial_adv_sl", "def freqdomain(self):\n \n\n #self.df = self.f[1] - self.f[0]\n #frequency vector\n #fv = fftshift(fftfreq(len(eta),1./fs))\n #fv = fv[len(fv)/2:]\n \n #spectral analysis\n self.sn1 = self.espec1(self.n1)\n self.sn2 = self.espec1(self.n2)\n self.sn3 = self.espec1(self.n3)\n self.sn12 = self.espec2(self.n1,self.n2)\n self.sn13 = self.espec2(self.n1,self.n3)\n self.sn23 = self.espec2(self.n2,self.n3)\n \n #delta freq\n self.df = self.f[3] - self.f[2]\n\n #calculo do numero de onda\n #self.wavenumber()\n #k = numeronda(h,f,len(f))\n #k = np.array(k)\n\n #calculo dos coeficientes de fourier - NDBC 96_01 e Steele (1992)\n c = self.sn2[:,1] + self.sn3[:,1]\n cc = np.sqrt(self.sn1[:,1] * (c))\n \n self.a1 = self.sn12[:,3] / cc\n self.b1 = self.sn13[:,3] / cc\n \n self.a2 = (self.sn2[:,1] - self.sn3[:,1]) / c\n self.b2 = 2 * self.sn12[:,2] / c\n \n #calcula direcao de onda\n #mean direction\n self.dire1 = np.array([np.angle(np.complex(self.b1[i],self.a1[i]),deg=True) for i in range(len(self.a1))])\n \n #principal direction\n self.dire2 = 0.5 * np.array([np.angle(np.complex(self.b2[i],self.a2[i]),deg=True) for i in range(len(self.a2))])\n \n #condicao para valores maiores que 360 e menores que 0\n self.dire1[np.where(self.dire1 < 0)] = self.dire1[np.where(self.dire1 < 0)] + 360\n self.dire1[np.where(self.dire1 > 360)] = self.dire1[np.where(self.dire1 > 360)] - 360\n self.dire2[np.where(self.dire2 < 0)] = self.dire2[np.where(self.dire2 < 0)] + 360\n self.dire2[np.where(self.dire2 > 360)] = self.dire2[np.where(self.dire2 > 360)] - 360\n \n #acha o indice da frequencia de pico\n ind = np.where(self.sn1[:,1] == np.max(self.sn1[:,1]))[0]\n \n #periodo de pico\n self.tp = (1. / self.f[ind])[0]\n \n #momento espectral de ordem zero total - m0\n self.m0 = np.sum(self.sn1[:,1]) * self.df\n \n #calculo da altura significativa\n self.hm0 = 4.01 * np.sqrt(self.m0)\n \n #direcao do periodo de pico\n self.dp = self.dire1[ind][0]\n \n #Espalhamento direcional\n #Formula do sigma1 do livro Tucker&Pitt(2001) \"Waves in Ocean Engineering\" pags 196-198\n c1 = np.sqrt(self.a1 ** 2 + self.b1 ** 2)\n c2 = np.sqrt(self.a2 ** 2 + self.b2 ** 2)\n \n s1 = c1 / (1-c1)\n s2 = (1 + 3 * c2 + np.sqrt(1 + 14 * c2 + c2 ** 2)) / (2 * (1 - c2))\n \n self.sigma1 = np.sqrt(2 - 2 * c1) * 180 / np.pi\n self.sigma2 = np.sqrt((1 - c2) / 2) * 180 / np.pi\n \n self.sigma1p = np.real(self.sigma1[ind])[0]\n self.sigma2p = np.real(self.sigma2[ind])[0]\n \n # pondaf = np.array([hm0, tp, dp, sigma1p, sigma2p])\n \n #hm0, tp, dp, sigma1, sigma2, sigma1p, sigma2p, f, df, k, sn, snx, sny, snn, snnx, snny, snxny, snxnx, snyny, a1, b1, a2, b2, dire1, dire2\n #return hm0, tp, dp, sigma1, sigma2, sigma1p, sigma2p, f, df, k, sn, snx, sny, snn, snnx, snny, snxny, snxnx, snyny, a1, b1, a2, b2, dire1, dire2", "def _get_F(self, omega, y): \n x = y[:-1]\n newt_lambda = y[-1]\n F = np.zeros([len(x)+1, 1])\n F[:-1] = omega @ x - newt_lambda/x\n F[-1] = x.sum()-1\n return F" ]
[ "0.5701903", "0.56463856", "0.563504", "0.5585956", "0.555302", "0.55438894", "0.54815304", "0.5467443", "0.5334224", "0.53278655", "0.53052276", "0.52949303", "0.52682424", "0.52625763", "0.5254825", "0.5235348", "0.5234946", "0.52277654", "0.52228475", "0.5213484", "0.5213479", "0.5209328", "0.5206368", "0.51751304", "0.51728237", "0.5165143", "0.5149565", "0.5144661", "0.51036495", "0.5065677", "0.5065451", "0.5053579", "0.5047318", "0.504438", "0.5033949", "0.50311613", "0.5027267", "0.5019895", "0.50121313", "0.49975523", "0.4992839", "0.49727505", "0.4972486", "0.4968485", "0.49678802", "0.49662724", "0.4956218", "0.4945585", "0.49435154", "0.49432445", "0.4942282", "0.49290913", "0.49211627", "0.49118778", "0.48872492", "0.48850873", "0.48816696", "0.48794737", "0.486327", "0.48567933", "0.4855022", "0.4854988", "0.48529592", "0.4851829", "0.48502266", "0.4847562", "0.48458838", "0.4845149", "0.48435354", "0.48298258", "0.48258632", "0.4823545", "0.48219386", "0.48163682", "0.4816031", "0.48144594", "0.48058045", "0.48055598", "0.48044127", "0.48034465", "0.48017523", "0.4799452", "0.47991648", "0.47927785", "0.47913352", "0.4790955", "0.47757024", "0.47739816", "0.4773637", "0.47729272", "0.4770059", "0.4770052", "0.47525442", "0.47514924", "0.47485664", "0.47465152", "0.47438058", "0.47408137", "0.47404012", "0.47403517", "0.47363713" ]
0.0
-1
Calculates the translational partition function, assuming free movement ie. no spatial potential.
def qtrans(m,V): T = s.Symbol("T") return (((2 * s.pi * m * k * T) / (h**2))**(3/2)) * V
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def relative_partition_function(self):\n return self.overall_norm**2", "def getPartitionFunction(self, Tlist, V=1.0):\n\t\treturn _modes.translation_partitionfunction(Tlist, self.mass, self.dimension, V)", "def heuristic_3_partition(game, player) -> float:\n\n partition_possible_factor = get_partition_possible_factor(game, player)\n\n return float(partition_possible_factor)", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.hinderedrotor_partitionfunction(Tlist, self.frequency, self.barrier) ** self.degeneracy", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.harmonicoscillator_partitionfunction(Tlist, self.frequency) ** self.degeneracy", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.freerotor_partitionfunction(Tlist, self.frequencies, 1 if self.linear else 0)", "def partition(seq):\n\n return 0", "def partition(game, player):\n height = game.height\n width = game.width\n blanks = game.get_blank_spaces()\n has_partition = False\n partition_col = int(game.width/2)\n partition_row = int(game.height/2)\n moves = game.get_legal_moves(player)\n if moves:\n player_location = game.get_player_location(player)\n for i in range(2, width - 3): #search for vertical partitions\n if (0,i) not in blanks and (0,i+1) not in blanks:\n j = 1\n while j < height and (j, i) not in blanks and (j, i + 1) not in blanks:\n j += 1\n if j == height:\n has_partition = True\n pb = partition_blanks(game, (0,i))\n if pb[0] > pb[1]: #more blanks on the left of the partition\n for move in moves:\n if move[1] < i:\n return has_partition, True\n return has_partition, False\n else: #more blanks on right of partition\n for move in moves:\n if move[1] > i + 1:\n return has_partition, True\n return has_partition, False\n\n for i in range(2, height - 3): #seach for horizontal partitions\n if (i,0) not in blanks and (i+1,0) not in blanks:\n j = 1\n while j < width and (i,j) not in blanks and (i+1, j) not in blanks:\n j += 1\n if j == width:\n has_partition = True\n pb = partition_blanks(game, (i, 0))\n if pb[0] > pb[1]: #more blanks on top of partition\n for move in moves:\n if move[0] < i:\n return has_partition, True\n return has_partition, False\n else: #more blanks below partition\n for move in moves:\n if move[0] > i + 1:\n return has_partition, True\n return has_partition, False\n\n return has_partition, False", "def partition(v,m,I,V,sym):\n T = s.Symbol(\"T\")\n return qvib(v) + qtrans(m,V) + qrot(I,sym)", "def getPartitionFunction(self, Tlist):\n\t\tQ = np.ones((len(Tlist)), np.float64) / self.symmetry\n\t\t# Active K-rotor\n\t\trotors = [mode for mode in self.modes if isinstance(mode, RigidRotor)]\n\t\tif len(rotors) == 0:\n\t\t\tTrot = constants.h * constants.c * 100.0 * 1.0 / constants.kB\n\t\t\tQ0 = [math.sqrt(T / Trot) for T in Tlist]\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\t# Other modes\n\t\tfor mode in self.modes:\n\t\t\tQ0 = mode.getPartitionFunction(Tlist)\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\treturn Q", "def _translate(self, update_g0=True) -> \"_StepResult\":\n assert self._coords is not None\n x0 = self._coords.x0\n\n trns_iters = [c for c in self._history if c.did_translation]\n\n if len(trns_iters) < 2:\n step_size = float(self.init_alpha)\n logger.info(\n f\"Did not have two previous translation step, guessing\"\n f\" α = {step_size} Å\"\n )\n\n else:\n prev_trns_iter = trns_iters[-2]\n logger.info(\n f\"Did {len(trns_iters)} previous translations, can \"\n f\"calculate α using the Barzilai–Borwein method\"\n )\n\n step_size = (\n np.abs(\n np.dot(\n (x0 - prev_trns_iter.x0),\n (self._coords.f_t - prev_trns_iter.f_t),\n )\n )\n / np.linalg.norm(self._coords.f_t - prev_trns_iter.f_t) ** 2\n )\n\n delta_x = step_size * self._coords.f_t\n trns_rms = MWDistance(np.sqrt(np.mean(np.square(delta_x))))\n\n if trns_rms < self.trns_tol:\n logger.info(f\"Step length small than tolerance {self.trns_tol}\")\n return _StepResult.skipped_translation\n\n logger.info(f\"Translating by ~{trns_rms:.4f} per coordinate\")\n\n coords = self._coords.copy()\n coords += delta_x\n\n self._coords = coords\n self._coords.phi = Angle(0.0) # Did not rotation\n self._coords.dist = trns_rms # but did translate\n\n if update_g0:\n self._update_gradient_at(DimerPoint.midpoint)\n\n return _StepResult.did_translation", "def fpart(x):\n return x - np.floor(x)", "def calc_partition_function_one(states, T):\n B = beta(T)\n Z = sum(np.exp(-B * states))\n\n return Z", "def get_s0_xy_equipartition(N, angle):\n s0x= [1]\n s0y = [0]\n s0z = [0]\n for i in range(1,N):\n s_old = [s0x [-1],s0y [-1],s0z [-1] ]\n #final_angle = random.choice([-1,1]) * np.radians(angle - 3 + 6*random.random())\n #final_angle = random.choice([-1,1]) * np.radians(angle * 2*random.random())\n final_angle = random.choice([-1,1]) * np.radians(random.gauss(0, 1.2 * angle))\n sin_final = sin(final_angle)\n cos_final = cos(final_angle)\n s_new = [cos_final*s_old[0] +sin_final*s_old[1], -sin_final*s_old[0] + cos_final*s_old[1],0]\n s0x.append(s_new[0])\n s0y.append(s_new[1])\n s0z.append(s_new[2])\n norm = np.linalg.norm([s0x [-1],s0y [-1],s0z [-1] ])\n s0x [-1] = s0x[-1] / norm\n s0y [-1] = s0y[-1] / norm\n s0z [-1] = s0z[-1] / norm\n\n return np.concatenate((s0x,s0y,s0z),axis = 0)", "def __lineartrans(self):\n do = self.domain\n self.transpoints = copy(self.pts)\n def t(x):\n return (x - do[0])/(do[1]-do[0])\n for i in range(len(self.transpoints)):\n self.transpoints[i,0] = t(self.transpoints[i,0])", "def inertia_tensor_partial(self, part, masswt=True, zero=ZERO):\n tensor = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]\n\n for i in part:\n if masswt:\n # I(alpha, alpha)\n tensor[0][0] += self.mass(i) * (self.y(i) * self.y(i) + self.z(i) * self.z(i))\n tensor[1][1] += self.mass(i) * (self.x(i) * self.x(i) + self.z(i) * self.z(i))\n tensor[2][2] += self.mass(i) * (self.x(i) * self.x(i) + self.y(i) * self.y(i))\n\n # I(alpha, beta)\n tensor[0][1] -= self.mass(i) * self.x(i) * self.y(i)\n tensor[0][2] -= self.mass(i) * self.x(i) * self.z(i)\n tensor[1][2] -= self.mass(i) * self.y(i) * self.z(i)\n\n else:\n # I(alpha, alpha)\n tensor[0][0] += self.y(i) * self.y(i) + self.z(i) * self.z(i)\n tensor[1][1] += self.x(i) * self.x(i) + self.z(i) * self.z(i)\n tensor[2][2] += self.x(i) * self.x(i) + self.y(i) * self.y(i)\n\n # I(alpha, beta)\n tensor[0][1] -= self.x(i) * self.y(i)\n tensor[0][2] -= self.x(i) * self.z(i)\n tensor[1][2] -= self.y(i) * self.z(i)\n\n # mirror\n tensor[1][0] = tensor[0][1]\n tensor[2][0] = tensor[0][2]\n tensor[2][1] = tensor[1][2]\n\n # Check the elements for zero and make them a hard zero.\n for i in range(3):\n for j in range(3):\n if math.fabs(tensor[i][j]) < zero:\n tensor[i][j] = 0.0\n return tensor", "def _compute_ptdfs(self):\n z = self._compute_z()\n s = np.zeros([self.n_nodes, self.n_nodes, self.n_nodes])\n for k in range(self.n_nodes):\n for L in range(self.n_nodes):\n for i in range(self.n_nodes):\n if k == 0 and L != 0:\n s[k, L, i] = -1 * z[L-1, i-1]\n elif k != 0 and L == 0:\n s[k, L, i] = z[k-1, i-1]\n elif k != 0 and L != 0 and k != L:\n s[k, L, i] = z[k-1, i-1] - z[L-1, i-1]\n return s", "def globalToLocal(x,y,heading,global_compl):\n local_compl = global_compl-np.array([x+1j*y]) #subtract x and y\n ang_deg = (np.angle(local_compl, deg=True)-heading) #rotate\n return toComplex(np.abs(local_compl),ang_deg) #and calc0", "def translation_separation(t1: np.ndarray, t2: np.ndarray) -> float:\n return np.linalg.norm(t1 - t2)", "def move(self):\n \"\"\" Responsible for transformations \"\"\"\n pos, com, success = self.perception \n if self.destination is None:\n return array([0,0])\n\n if not self.awake:\n return array([0,0])\n\n\n if self.phase == 4 and self.proper_formation is not None:\n no_go = []\n for i in range(0,len(self.proper_formation)):\n if i != self.order and self.proper_formation[i][0] == self.proper_formation[self.order][0]:\n no_go.append(self.transform(self.proper_formation[i][1] - self.position))\n pos = merge_array_lists(pos, no_go)\n\n if self.phase == 2:\n point = self.destination.copy() - self.position\n elif self.phase > 2:\n point = self.transform(self.destination.copy() - self.position)\n else:\n point = self.destination.copy()\n\n if not array_equal(point, array([0,0])):\n reachable, path = findpathtoclosest(array([0,0]), point, pos)\n \n if len(path) == 0:\n move = array([0,0]) \n else:\n move = path[0]\n if not reachable and not array_equal(move,array([0,0])):\n if self.phase == 2:\n self.closest_i_could_get = path[-1] + self.position\n elif self.phase > 2:\n self.closest_i_could_get = self.transform2(path[-1]) + self.position\n else:\n self.closest_i_could_get = path[-1]\n elif not reachable:\n if self.phase > 1:\n self.closest_i_could_get = self.position\n else:\n self.closest_i_could_get = array([0,0])\n else:\n self.closest_i_could_get = None\n\n if reachable and self.phase == 4 and array_equal(move,array([0,0])):\n move = self.randomStep()\n self.closest_i_could_get = None\n\n else:\n move = array([0,0])\n self.closest_i_could_get = None\n\n return move", "def Partitioning(self, *args):\n return _hypre.HypreParVector_Partitioning(self, *args)", "def fspring_perp(xy, NL, KL, KGdivKL, Mm, NP, nn):\n vecx = np.array([[KL[i, j] * (xy[i, 0] - xy[NL[i, j], 0]) for j in range(nn)] for i in range(NP)])\n vecy = np.array([[KL[i, j] * (xy[i, 1] - xy[NL[i, j], 1]) for j in range(nn)] for i in range(NP)])\n mag = np.sqrt(vecx ** 2 + vecy ** 2)\n mag[mag == 0.] = 1. # avoid divide by zero error\n KLnoz = KL.copy() # no zeros\n KLnoz[KLnoz == 0] = 1. # same value as mag[mag==0], so that stretch=0 for those\n stretch = mag - KL\n # print(stretch)\n vechx = vecx / mag\n vechy = vecy / mag\n dxvec = np.sum(stretch * vechx, axis=-1) / Mm\n dyvec = np.sum(stretch * vechy, axis=-1) / Mm\n # Add perp force\n dxvecP = KGdivKL * np.sum(stretch * (-vechy), axis=-1) / Mm\n dyvecP = KGdivKL * np.sum(stretch * vechx, axis=-1)\n ftx = -np.hstack((dxvec.reshape(NP, 1) + dxvecP.reshape(NP, 1), dyvec.reshape(NP, 1) + dyvecP.reshape(NP, 1)))\n return ftx", "def translate(self, vec):\n self.substrates = shapely.affinity.translate(self.substrates, vec[0], vec[1])\n self.partitionLine = shapely.affinity.translate(self.partitionLine, vec[0], vec[1])\n for annotation in self.annotations:\n o = annotation.origin\n annotation.origin = (o[0] + vec[0], o[1] + vec[1])\n\n def newRevertTransformation(point, orig=self.revertTransformation, vec=vec):\n prevPoint = (point[0] - vec[0], point[1] - vec[1])\n if orig is not None:\n return orig(prevPoint)\n return prevPoint\n self.revertTransformation = newRevertTransformation", "def _step(self) -> None:\n self._update_gradient_at(DimerPoint.left)\n\n self._optimise_rotation()\n trns_result = self._translate()\n\n if trns_result == _StepResult.skipped_translation:\n self._converged_translation = True\n\n return None", "def F_trans(self):\n rho_H1 = self.edp_par['rho_H1'].value\n Z_H1 = self.edp_par['Z_H1'].value\n sigma_H1 = self.edp_par['sigma_H1'].value\n rho_M = self.edp_par['rho_M'].value\n sigma_M = self.edp_par['sigma_M'].value\n psi = self.edp_par['psi'].value \n common_scale = self.edp_par['common_scale'].value\n \n # Calculate the intermediate variables\n alpha = self.qz*cos(psi) - self.qx*sin(psi)\n Z_CH2 = Z_H1 - sigma_H1\n Z_W = Z_H1 + sigma_H1\n DeltaZ_H = Z_W - Z_CH2\n \n # Calculate the Gaussian part \n FG = -rho_M*sigma_M * exp(-0.5*(alpha*sigma_M)**2)\n FG += 2*rho_H1*sigma_H1 * cos(alpha*Z_H1) * exp(-0.5*(alpha*sigma_H1)**2)\n FG *= np.sqrt(2*pi)\n \n # Calculate the strip part\n FS = -2 * sin(alpha*Z_CH2) / alpha\n \n # Calculate the bridging part\n FB = 1 / (alpha + pi/DeltaZ_H)\n FB += 1 / (alpha - pi/DeltaZ_H)\n FB *= sin(alpha*Z_W) + sin(alpha*Z_CH2)\n FB *= 0.5\n FB -= (sin(alpha*Z_W)-sin(alpha*Z_CH2)) / alpha\n \n return common_scale * (FG + FS + FB)", "def _partition(self, l, r):\n\t\tpivot = median([\n\t\t\tself.order_dict[self.arrangement[l]], \n\t\t\tself.order_dict[self.arrangement[(l+r)//2]], \n\t\t\tself.order_dict[self.arrangement[r]]]\n\t\t)\n\t\twhile True:\n\t\t\twhile ((self.order_dict[self.arrangement[l]] < pivot) or (l == self.swap_idx)) and (l < r):\n\t\t\t\tl += 1\n\t\t\twhile ((self.order_dict[self.arrangement[r]] > pivot) or (r == self.swap_idx)) and (l < r):\n\t\t\t\tr -= 1\n\t\t\tif l == r:\n\t\t\t\tbreak\n\t\t\tself._swap(l, r, self.swap_idx)\n\t\treturn l", "def rpartition(self, x):\n pass", "def F_trans(self):\n rho_H1 = self.edp_par['rho_H1'].value\n Z_H1 = self.edp_par['Z_H1'].value\n sigma_H1 = self.edp_par['sigma_H1'].value\n rho_H2 = self.edp_par['rho_H2'].value\n Z_H2 = self.edp_par['Z_H2'].value\n sigma_H2 = self.edp_par['sigma_H2'].value\n rho_M = self.edp_par['rho_M'].value\n sigma_M = self.edp_par['sigma_M'].value\n psi = self.edp_par['psi'].value \n common_scale = self.edp_par['common_scale'].value\n \n \n # Make sure Z_H2 > Z_H1. If Z_H2 < Z_H1, swap them\n if Z_H1 > Z_H2:\n Z_H1, Z_H2 = Z_H2, Z_H1\n sigma_H1, sigma_H2 = sigma_H2, sigma_H1\n rho_H1, rho_H2 = rho_H2, rho_H1\n \n # Calculate the intermediate variables\n alpha = self.qz*cos(psi) - self.qx*sin(psi)\n Z_CH2 = Z_H1 - sigma_H1\n Z_W = Z_H2 + sigma_H2\n DeltaZ_H = Z_W - Z_CH2\n \n # Calculate the Gaussian part \n FG = -rho_M*sigma_M * exp(-0.5*(alpha*sigma_M)**2)\n FG += 2*rho_H1*sigma_H1 * cos(alpha*Z_H1) * exp(-0.5*(alpha*sigma_H1)**2)\n FG += 2*rho_H2*sigma_H2 * cos(alpha*Z_H2) * exp(-0.5*(alpha*sigma_H2)**2)\n FG *= np.sqrt(2*pi)\n \n # Calculate the strip part\n FS = -2 * sin(alpha*Z_CH2) / alpha\n \n # Calculate the bridging part\n FB = 1 / (alpha + pi/DeltaZ_H)\n FB += 1 / (alpha - pi/DeltaZ_H)\n FB *= sin(alpha*Z_W) + sin(alpha*Z_CH2)\n FB *= 0.5\n FB -= (sin(alpha*Z_W)-sin(alpha*Z_CH2)) / alpha\n \n return common_scale * (FG + FS + FB)", "def fpart(x):\n return x - math.floor(x)", "def compute_static_zp_renormalization_nosplit(self):\n self.distribute_workload()\n self.zero_point_renormalization = self.sum_qpt_function('get_zpr_static_sternheimer')\n self.renormalization_is_dynamical = False", "def compute_trajectory():\n pass", "def heuristic_combined_1_3(game, player) -> float:\n\n center_available_factor = get_center_available_factor(game, player)\n partition_possible_factor = get_partition_possible_factor(game, player)\n\n return float(center_available_factor + partition_possible_factor)", "def AndresMarzoDelta_calc(TP, FP, FN, TN):\n try:\n part1 = TP + TN - 2 * math.sqrt(FP * FN)\n n = TP + FP + FN + TN\n return part1 / n\n except Exception:\n return \"None\"", "def F_trans(self):\n common_scale = self.edp_par['common_scale'].value\n R_HM = self.edp_par['R_HM'].value\n X_h = self.edp_par['X_h'].value\n psi = self.edp_par['psi'].value \n arg = self.qz*X_h*np.cos(psi) - self.qx*X_h*np.sin(psi)\n return common_scale * (R_HM*np.cos(arg) - 1)", "def computeOffsets_direct(self, subObj, tarPara):\n ## only works when the registered subject is available\n \n smpl = self.hresSMPL.copy()\n \n ## compute the posed offsets\n offsets = np.zeros_like(smpl.r)\n smpl = smplFromParas(smpl, offsets, tarPara[0], tarPara[1], tarPara[2])\n offsets_p = subObj.v - smpl.r\n \n ## unpose the offsets\n # 1. put on the offsets to body and do inverse pose\n smpl = smplFromParas(smpl, offsets_p, -tarPara[0], tarPara[1], tarPara[2])\n invposed_body_off = smpl.r\n \n # 2. generate naked body in inverse pose\n offsets = np.zeros_like(smpl.r)\n smpl = smplFromParas(smpl, offsets, -tarPara[0], tarPara[1], tarPara[2])\n invposed_body = smpl.r\n \n # 3. get the unposed/t-posed offsets\n offsets_t = invposed_body_off - invposed_body\n \n return offsets_t", "def compute_transition(seg_actif, seg_next):\n\n\t#Recuperation des points A (debut) et B (fin) du premier segment\n\ta = seg_actif.start\n\tb = seg_actif.end\n\n\t#calcul du track change entre les deux segments\n\ttrack_change = np.arccos((seg_actif.scal(seg_next)) / (seg_actif.norm() * seg_next.norm())) * RAD2DEG # en degrés\n\t#print(\"track_change=\", track_change) #en degré\n\n\t#calcul bank_angle, turn_radius et lead_distance\n\n\tif ALTITUDE>195:\n\t\tmax_angle = (16 - 25) / (300 - 195) * (ALTITUDE - 195) + 25\n\t\tbank_angle = max(5, min(0.5 * track_change, max_angle)) #en DEG\n\t\tturn_radius = GS ** 2 / (G * np.tan(bank_angle / RAD2DEG)) / NM2M # NM\n\t\tlead_distance = turn_radius * np.tan(0.5 * track_change / RAD2DEG) # NM\n\t\tif lead_distance > 20: # NM\n\t\t\tlead_distance = 20 # NM\n\t\t\tturn_radius = lead_distance / np.tan(0.5 * track_change / RAD2DEG)\n\t\t\tbank_angle = max(5, min(np.arctan(GS ** 2) / (G * turn_radius), max_angle))\n\t\t#print(\"lead_distance\", lead_distance)\n\telse :\n\t\tmax_angle = 25 #DEG\n\t\tbank_angle = max(5, min(0.5*track_change,max_angle)) #DEG\n\t\tturn_radius = GS**2 / (G*np.tan(bank_angle / RAD2DEG)) / NM2M # NM\n\t\tlead_distance = turn_radius * np.tan(0.5 * track_change / RAD2DEG)\n\n\t#calcul de b_in et b_out : points de debut et fin de la transition en arc de cercle\n\tif track_change < EPSILON:\n\t\tb_in = b\n\t\tb_out = b\n\t\tb_center = b\n\n\telse:\n\t\tnorme_act = seg_actif.norm()\n\t\tactive_track = get_track(seg_actif) # en RAD\n\t\tnext_track = get_track(seg_next)\n\t\tb_in = calcul_point_de_transition(a, norme_act, lead_distance, active_track)\n\t\tb_out = calcul_point_de_transition(b, lead_distance, 0, next_track)\n\n\t\t#calcul de l'angle a_b_bcenter et du point b_center (centre de l'arc de transition)\n\n\t\td = (turn_radius ** 2 + lead_distance ** 2) ** 0.5\n\t\tif seg_actif.det(seg_next) > 0:\n\t\t\ta_b_bc_angle = ((180 + track_change) / 2) / RAD2DEG # en rad\n\t\t\tb_center = g.Point(b.x + d * np.sin((active_track - a_b_bc_angle)),\n\t\t\t\t\t\t b.y + d * np.cos((active_track - a_b_bc_angle)))\n\t\t\tbank_angle = - bank_angle\n\n\t\telse:\n\t\t\ta_b_bc_angle = ((180 - track_change) / 2) / RAD2DEG\n\t\t\tb_center = g.Point(b.x - d * np.sin((active_track - a_b_bc_angle)),\n\t\t\t\t\t\t b.y - d * np.cos((active_track - a_b_bc_angle)))\n\n\treturn(track_change, turn_radius, b_in, b_out, b_center, lead_distance, bank_angle)", "def perform(self, node, inputs, outputs):\r\n x = inputs[0]\r\n L = inputs[1]\r\n dz = inputs[2]\r\n dx = outputs[0]\r\n N = x.shape[0]\r\n if self.lower:\r\n F = numpy.tril(dz)\r\n for k in xrange(N - 1, -1, -1):\r\n for j in xrange(k + 1, N):\r\n for i in xrange(j, N):\r\n F[i, k] -= F[i, j] * L[j, k]\r\n F[j, k] -= F[i, j] * L[i, k]\r\n for j in xrange(k + 1, N):\r\n F[j, k] /= L[k, k]\r\n F[k, k] -= L[j, k] * F[j, k]\r\n F[k, k] /= (2 * L[k, k])\r\n else:\r\n F = numpy.triu(dz)\r\n M = N - 1\r\n for k in xrange(N - 1, -1, -1):\r\n for j in xrange(k + 1, N):\r\n for i in xrange(j, N):\r\n F[k, i] -= F[j, i] * L[k, j]\r\n F[k, j] -= F[j, i] * L[k, i]\r\n for j in xrange(k + 1, N):\r\n F[k, j] /= L[k, k]\r\n F[k, k] -= L[k, j] * F[k, j]\r\n F[k, k] /= (2 * L[k, k])\r\n dx[0] = F", "def inertial_system_partial(self, part, masswt=True, zero=ZERO):\n return diagonalize3x3symmat(self.inertia_tensor_partial(part, masswt, zero))", "def Partitioner(q,InvV,Posterior,m_points):\n \n m = InvV.n #get the number of maps being used \n Q = np.zeros([m,m_points.num]) #initialise the partition functions\n \n for j in range(m):\n #backmap the points from the posterior to the intermediate\n backmap = m_points.map(InvV,j)\n #determine the current mixture using a change of variables\n det = InvV.L[j,:,:].diagonal().prod()**2\n Q[j,:] = q[j] * multivariate_normal.pdf(backmap.all,mean=np.zeros(m_points.d),cov=np.eye(m_points.d)) * det\n \n #now we have the total mixture\n g_est = np.sum(Q,axis=0)\n\n for j in range(m):\n #the partitioner can be found from these\n Q[j,:] /= g_est\n #apply the partitioner to the posterior evaluations to get the partitioned components\n \n return Q", "def desplazamientox(tiempo,velocidad):\r\n #se realiza un operacion para encontrar el el desplzamiento horizaontal\r\n x=tiempo*velocidad\r\n #se regresa el valor de x\r\n return x", "def _do_localisation(self):\n # Initiate global localization, wherein all particles are dispersed randomly through the free space in the map.\n self.global_localisation()\n\n #wait for pointcloud being destributet over the map\n count = 0\n while count < 50:\n self.rate.sleep()\n count = count + 1\n\n move_straight_count = 0\n while self.area_ellips > self.epsilon:\n range_front = []\n range_front[:20] = self.lidar_data[-20:]\n range_front[20:] = self.lidar_data[:20]\n\n obstacle_in_front = self._is_obstacle_in_front()\n if obstacle_in_front:\n # rotate to the right\n self._move(0, -0.75)\n else:\n if move_straight_count % 100 == 0:\n self._rotate_x_degrees(60, 360, True) \n\n # move straight forward\n move_straight_count = move_straight_count + 1\n self._move(0.25, 0)\n\n self._move(0, 0)\n return True", "def sinu_continuous_kinematic_function(t, kinematic_parameters):\n flapping_wing_frequency = kinematic_parameters[0]\n flapping_angular_velocity_amplitude = kinematic_parameters[1]\n pitching_angular_velocity_amplitude = kinematic_parameters[2]\n flapping_acceleration_time_fraction = kinematic_parameters[3]\n pitching_time_fraction = kinematic_parameters[4]\n flapping_delay_time_fraction = kinematic_parameters[5]\n pitching_delay_time_fraction = kinematic_parameters[6]\n\n def dphi(x):\n \"\"\"flapping motion angular velocity function\"\"\"\n return -kf(\n flapping_wing_frequency, flapping_angular_velocity_amplitude,\n flapping_acceleration_time_fraction, flapping_delay_time_fraction,\n x)\n\n dphi_data = []\n dphi_data_abs = []\n for ti in t:\n dphi_data.append(dphi(ti))\n dphi_data_abs.append(np.abs(dphi(ti)))\n dphi_spl = UnivariateSpline(t, dphi_data, s=0)\n dphi_spl_abs = UnivariateSpline(t, dphi_data_abs, s=0)\n\n flapping_amplitude = dphi_spl_abs.integral(0, 1 / flapping_wing_frequency)\n\n print('flapping amplitude = %s' % (flapping_amplitude / 2))\n\n def ddphi(x):\n \"\"\"flapping angular acceleration function\"\"\"\n return dphi_spl.derivatives(x)[1]\n\n initial_phi = dphi_spl.integral(\n 0,\n np.abs(flapping_delay_time_fraction) / flapping_wing_frequency)\n initial_phi = -np.sign(flapping_delay_time_fraction) * initial_phi\n\n def phi(x):\n \"\"\"flapping motion function\"\"\"\n return flapping_amplitude / 4 + initial_phi + dphi_spl.integral(0, x)\n\n def dalf(x):\n \"\"\"flapping angular velocity function\"\"\"\n return kf_continuous(flapping_wing_frequency,\n pitching_angular_velocity_amplitude,\n pitching_time_fraction,\n pitching_delay_time_fraction, x)\n\n dalf_data = []\n dalf_data_abs = []\n for ti in t:\n dalf_data.append(dalf(ti))\n dalf_data_abs.append(np.abs(dalf(ti)))\n dalf_spl = UnivariateSpline(t, dalf_data, s=0)\n dalf_spl_abs = UnivariateSpline(t, dalf_data_abs, s=0)\n\n pitching_amplitude = dalf_spl_abs.integral(0, 1 / flapping_wing_frequency)\n print('pitching amplitude = %s' % (pitching_amplitude / 2))\n\n def ddalf(x):\n \"\"\"pitching angular acceleration function\"\"\"\n return dalf_spl.derivatives(x)[1]\n\n initial_alf = dalf_spl.integral(\n 0,\n np.abs(pitching_delay_time_fraction) / flapping_wing_frequency)\n initial_alf = -np.sign(pitching_delay_time_fraction) * initial_alf\n\n def alf(x):\n \"\"\"pitching motion function\"\"\"\n return initial_alf + dalf_spl.integral(0, x)\n\n kinematic_angles = []\n t_1st_cycle = [t1 for t1 in t if t1 <= 1 / flapping_wing_frequency]\n for ti in t_1st_cycle:\n kinematic_anglesi = [\n phi(ti),\n alf(ti),\n dphi(ti),\n dalf(ti),\n ddphi(ti),\n ddalf(ti)\n ]\n kinematic_angles.append(kinematic_anglesi)\n\n return kinematic_angles", "def get_heat_loss_coefficient_of_partition() -> float:\n return 1 / 0.46", "def center_of_gravity_evaluation(F_PERC, P_PERC, afg, awg, mw, ed, ui, bi):\n\n max_seg_n = np.max([np.amax(afg.fuse_seg_nb), np.amax(awg.wing_seg_nb)])\n t_nb = afg.fus_nb + awg.w_nb # Number of parts not counting symmetry\n tot_nb = afg.fuse_nb + awg.wing_nb # Number of parts counting symmetry\n segments_nb = []\n fuse_fuel_vol = 0\n pass_vol = 0\n\n for i in range(1, afg.fus_nb + 1):\n segments_nb.append(afg.fuse_seg_nb[i - 1])\n if ui.F_FUEL[i - 1]:\n fuse_fuel_vol += afg.fuse_fuel_vol[i - 1]\n if np.all(afg.cabin_seg[:, i - 1]) == 1:\n pass_vol += afg.fuse_vol[i - 1]\n else:\n pass_vol += afg.fuse_cabin_vol[i - 1]\n\n htw = 0\n x0 = 0\n s = 0\n for i in range(1, awg.w_nb + 1):\n segments_nb.append(awg.wing_seg_nb[i - 1])\n if awg.wing_sym[i - 1] != 0:\n segments_nb.append(awg.wing_seg_nb[i - 1])\n s += 1\n if awg.is_horiz[i - 1 + s]:\n if i != awg.main_wing_index:\n htw = i\n else:\n x = np.amax(awg.wing_center_seg_point[:, i + s - 1, 0])\n if x > x0:\n tw = i\n x0 = x\n\n mass_seg_i = np.zeros((max_seg_n, tot_nb))\n oem_vol = (awg.wing_tot_vol - awg.wing_fuel_vol) + (np.sum(afg.fuse_vol) - fuse_fuel_vol)\n\n # Evaluating oem density, fuel density, passenger density\n if bi.USER_EN_PLACEMENT:\n oem_par = (mw.operating_empty_mass - mw.mass_engines) / oem_vol\n en = mw.mass_engines\n else:\n oem_par = mw.operating_empty_mass / oem_vol\n en = 0\n\n mpass_par = (mw.mass_payload * (P_PERC / 100.0)) / pass_vol\n\n mfuel_par = (mw.mass_fuel_tot * (F_PERC / 100.0)) / (awg.wing_fuel_vol + fuse_fuel_vol)\n\n mtom = (\n mw.operating_empty_mass\n + mw.mass_payload * (P_PERC / 100)\n + mw.mass_fuel_tot * (F_PERC / 100)\n - en\n )\n\n # Definition of the mass of each segment\n ex = False\n wg = []\n for i in range(1, afg.fus_nb + 1):\n if ui.F_FUEL[i - 1]:\n for j in range(1, afg.fuse_seg_nb[i - 1] + 1):\n mass_seg_i[j - 1][i - 1] = (\n oem_par + (mfuel_par * ui.F_FUEL[i - 1] / 100)\n ) * afg.fuse_seg_vol[j - 1][i - 1]\n else:\n for j in range(1, afg.fuse_seg_nb[i - 1] + 1):\n if int(afg.cabin_seg[j - 1][i - 1]) == 1:\n mass_seg_i[j - 1][i - 1] = (oem_par + mpass_par) * afg.fuse_seg_vol[j - 1][\n i - 1\n ]\n else:\n mass_seg_i[j - 1][i - 1] = oem_par * afg.fuse_seg_vol[j - 1][i - 1]\n w = 0\n for i in range(afg.fus_nb + 1, t_nb + 1):\n for j in range(1, awg.wing_seg_nb[i - 1 - afg.fus_nb] + 1):\n if awg.is_horiz[i + w - 1 - afg.fus_nb]:\n mass_seg_i[j - 1][i - 1 + w] = oem_par * (\n awg.wing_seg_vol[j - 1][i - 1 - afg.fus_nb]\n - awg.wing_fuel_seg_vol[j - 1][i - 1 - afg.fus_nb]\n ) + mfuel_par * (awg.wing_fuel_seg_vol[j - 1][i - 1 - afg.fus_nb])\n else:\n mass_seg_i[j - 1][i - 1 + w] = (\n oem_par * awg.wing_seg_vol[j - 1][i - 1 - afg.fus_nb]\n )\n wg.append(i - afg.fus_nb)\n if awg.wing_sym[i - 1 - afg.fus_nb] != 0:\n w += 1\n mass_seg_i[:, i - 1 + w] = mass_seg_i[:, i - 2 + w]\n wg.append(i - afg.fus_nb)\n if i + w == tot_nb:\n break\n # Mass check\n while not ex:\n if abs(round(mtom, 3) - round(np.sum(mass_seg_i), 3)) < 0.0001:\n ex = True\n else:\n mass = (round(mtom, 3) - round(np.sum(mass_seg_i), 3)) / 2\n if not ed.WING_MOUNTED:\n if htw != 0:\n a = wg.index(htw)\n else:\n a = wg.index(tw)\n else:\n a = wg.index(awg.main_wing_index)\n mass_seg_i[0][afg.fuse_nb + a] = mass_seg_i[0][afg.fuse_nb + a] + mass\n if awg.is_horiz[a]:\n mass_seg_i[0][afg.fuse_nb + a + 1] = mass_seg_i[0][afg.fuse_nb + a + 1] + mass\n else:\n mass_seg_i[0][afg.fuse_nb + a] = mass_seg_i[0][afg.fuse_nb + a] + mass\n\n awg.wing_center_seg_point.resize(max_seg_n, awg.wing_nb, 3)\n afg.fuse_center_seg_point.resize(max_seg_n, afg.fuse_nb, 3)\n\n airplane_centers_segs = np.concatenate(\n (afg.fuse_center_seg_point, awg.wing_center_seg_point), 1\n )\n\n # CoG evalution\n if bi.USER_EN_PLACEMENT:\n cog_enx = np.sum(ed.EN_PLACEMENT[:, 0] * ed.en_mass)\n cog_eny = np.sum(ed.EN_PLACEMENT[:, 1] * ed.en_mass)\n cog_enz = np.sum(ed.EN_PLACEMENT[:, 2] * ed.en_mass)\n else:\n cog_enx = 0.0\n cog_eny = 0.0\n cog_enz = 0.0\n\n center_of_gravity = []\n center_of_gravity.append(\n round((np.sum(airplane_centers_segs[:, :, 0] * mass_seg_i) + cog_enx) / mtom, 3)\n )\n center_of_gravity.append(\n round((np.sum(airplane_centers_segs[:, :, 1] * mass_seg_i) + cog_eny) / mtom, 3)\n )\n center_of_gravity.append(\n round((np.sum(airplane_centers_segs[:, :, 2] * mass_seg_i) + cog_enz) / mtom, 3)\n )\n\n for i in range(1, 4):\n if abs(center_of_gravity[i - 1]) < 10 ** (-5):\n center_of_gravity[i - 1] = 0.0\n\n return (center_of_gravity, mass_seg_i, airplane_centers_segs)", "def getTranslation(fracs):\n \n \n \n # Determine whether the shift needs to be from inf to 0 \n # or from -inf to 0\n \n # Along all x fractionals\n if abs(max(fracs[0]))>=abs(min(fracs[0])):\n minX = min([x for x in fracs[0] if x>0])\n else:\n minX = min([x for x in fracs[0] if x<0])\n \n # Along all y fractionals\n if abs(max(fracs[1]))>=abs(min(fracs[1])):\n minY = min([x for x in fracs[1] if x>0])\n else:\n minY = min([x for x in fracs[1] if x<0])\n \n # Along all z fractionals\n # Need to consider all atoms lying in a single\n # plane (e.g. graphene), thus the final \"else\"\n # statement\n if abs(max(fracs[2]))>abs(min(fracs[2])):\n minZ = min([x for x in fracs[2] if x>0])\n elif abs(max(fracs[2]))<abs(min(fracs[2])):\n minZ = min([x for x in fracs[2] if x<0])\n else:\n minZ = max(fracs[2])\n\n shift_vector = np.array([minX,minY,minZ])\n \n return(shift_vector)", "def fn(i, k):\n if k < 0: return inf # impossible \n if i == 0: return 0 \n return min(ceil((fn(i-1, k) + dist[i-1])/speed) * speed, dist[i-1] + fn(i-1, k-1))", "def matrix_contribution(self):\r\n \r\n import numpy as np\r\n import copy\r\n \r\n # The functions F and G sometimes return NaN, errors we catch through\r\n # np.nan_to_num. Suppress these error messages.\r\n import warnings\r\n warnings.filterwarnings('ignore')\r\n \r\n # Define the segment influence functions\r\n def F(Z):\r\n return np.nan_to_num(-0.5*(Z-1)*np.log((Z-1)/(Z+1))) - 1\r\n def G(Z):\r\n return np.nan_to_num(+0.5*(Z+1)*np.log((Z-1)/(Z+1))) + 1\r\n \r\n # We evaluate this block at its own vertices\r\n z = self.polygon\r\n \r\n # Pre-allocate an empty matrix for the block\r\n block = np.zeros((self.segments,self.segments))\r\n \r\n self.angles = []\r\n self.temp = []\r\n \r\n # Go through all vertices in the polygon\r\n for seg in range(self.segments):\r\n \r\n # Set the previous, current, and next vertex of the polygon\r\n if seg == self.segments-1:\r\n seg_minus = seg-1\r\n seg_center = seg\r\n seg_plus = 0\r\n else:\r\n seg_minus = seg-1\r\n seg_center = seg\r\n seg_plus = seg+1\r\n \r\n self.temp.append([self.polygon[seg_plus]-self.polygon[seg_center],\r\n self.polygon[seg_center]-self.polygon[seg_minus]])\r\n \r\n # To evaluate the effect of a vertex on itself, it is computationally\r\n # cleanest to evaluate it in terms of angles; these angles are \r\n # calculated according to Strack 1989, Eq. 35.29 and 35.30\r\n newtemp = np.angle(self.polygon[seg_plus]-self.polygon[seg_center]) - \\\r\n np.angle(self.polygon[seg_center]-self.polygon[seg_minus])\r\n \r\n if newtemp < -np.pi: newtemp += 2*np.pi\r\n if newtemp > +np.pi: newtemp -= 2*np.pi\r\n \r\n # self.angles.append(newtemp)\r\n # Sometimes, numerical imprecision causes the angle to fall outside\r\n # the range 0 and 2 pi; in that case, flip it back inside\r\n # if newtemp < 0: newtemp += 2*np.pi\r\n # if newtemp > 2*np.pi: newtemp -= 2*np.pi\r\n newtemp -= np.pi\r\n \r\n\r\n \r\n self.angles.append(newtemp)\r\n \r\n # Write the diagonal entries of the matrix\r\n block[seg,seg] = 1/(2*np.pi)*newtemp\r\n \r\n # Here we would normally add the factor for the conductivity difference\r\n # to the diagonal entries; since we only prepare the matrix here, \r\n # we skip it\r\n \r\n # # Determine the A_star variable (Strack 1989 35.4, 35.38)\r\n # A_star = self.model.k/(self.k - self.model.k)\r\n # block[seg,seg] -= A_star\r\n\r\n # Then handle all off-diagonal contributions\r\n for seg2 in range(self.segments):\r\n \r\n # Skip the diagonal\r\n if seg2 != seg:\r\n \r\n # Get the indices of the past, current, and next vertex\r\n if seg2 == self.segments-1:\r\n seg_minus = seg2-1\r\n seg_center = seg2\r\n seg_plus = 0\r\n else:\r\n seg_minus = seg2-1\r\n seg_center = seg2\r\n seg_plus = seg2+1\r\n \r\n # Calculate the local coordinates\r\n Z_before = \\\r\n (2*z[seg] - (self.polygon[seg_minus] + self.polygon[seg_center]))/(self.polygon[seg_center] - self.polygon[seg_minus])\r\n Z_after = \\\r\n (2*z[seg] - (self.polygon[seg_center] + self.polygon[seg_plus]))/(self.polygon[seg_plus] - self.polygon[seg_center])\r\n \r\n # And write the result into the correct matrix entries\r\n block[seg,seg2] = copy.copy(np.real(1/(2*np.pi*1j)*(G(Z_before)+F(Z_after))))\r\n \r\n return block", "def project (u, v):\r\n\r\n # Construct linear system Ap = d\r\n A = sps.lil_matrix ((width*height, width*height))\r\n d = np.zeros ((width*height))\r\n\r\n for i in range (1, height-1):\r\n for j in range (1, width-1):\r\n A[index(i,j), index(i,j)] = 4\r\n A[index(i,j), index(i-1,j)] = -1\r\n A[index(i,j), index(i+1,j)] = -1\r\n A[index(i,j), index(i,j-1)] = -1\r\n A[index(i,j), index(i,j+1)] = -1\r\n \r\n d[index(i,j)] = -1/h * (u[i,j] - u[i,j-1] + v[i,j] - v[i-1,j])\r\n\r\n # Unhandled boundary cases, we assume solid walls that don't move\r\n A[index(0,0), index(0,0)] = 2\r\n A[index(0,0), index(1,0)] = -1\r\n A[index(0,0), index(0,1)] = -1\r\n d[index(0,0)] = -1/h * (u[0,0] + v[0,0])\r\n\r\n A[index(height-1,0), index(0,0)] = 2\r\n A[index(height-1,0), index(height-1,1)] = -1\r\n A[index(height-1,0), index(height-2,0)] = -1\r\n d[index(height-1,0)] = -1/h * (u[height-1,0] - v[height-2,0])\r\n\r\n A[index(0,width-1), index(0,width-1)] = 2\r\n A[index(0,width-1), index(1,width-1)] = -1\r\n A[index(0,width-1), index(0,width-2)] = -1\r\n d[index(0,width-1)] = -1/h * (-u[0,width-2] + v[0,width-1])\r\n\r\n A[index(height-1,width-1), index(height-1,width-1)] = 2\r\n A[index(height-1,width-1), index(height-2,width-1)] = -1\r\n A[index(height-1,width-1), index(height-1,width-2)] = -1\r\n d[index(height-1,width-1)] = -1/h * (-u[height-1,width-2] - v[height-2,width-1])\r\n\r\n\r\n for i in range (1, height-1):\r\n A[index(i,0), index(i,0)] = 3\r\n A[index(i,0), index(i-1,0)] = -1\r\n A[index(i,0), index(i+1,0)] = -1\r\n A[index(i,0), index(i,1)] = -1\r\n d[index(i,0)] = -1/h * (u[i,0] + v[i,0] - v[i-1,0])\r\n\r\n for i in range (1, height-1):\r\n A[index(i,width-1), index(i,width-1)] = 3\r\n A[index(i,width-1), index(i-1,width-1)] = -1\r\n A[index(i,width-1), index(i+1,width-1)] = -1\r\n A[index(i,width-1), index(i,width-2)] = -1\r\n d[index(i,width-1)] = -1/h * (- u[i,width-2] + v[i, width-1] - v[i-1,width-1])\r\n\r\n for j in range (1, width-1):\r\n A[index(0,j), index(0,j)] = 3\r\n A[index(0,j), index(1,j)] = -1\r\n A[index(0,j), index(0,j-1)] = -1\r\n A[index(0,j), index(0,j+1)] = -1\r\n d[index(0,j)] = -1/h * (u[0,j] - u[0,j-1] + v[0,j])\r\n \r\n for j in range (1, width-1):\r\n A[index(height-1,j), index(height-1,j)] = 3\r\n A[index(height-1,j), index(height-2,j)] = -1\r\n A[index(height-1,j), index(height-1,j-1)] = -1\r\n A[index(height-1,j), index(height-1,j+1)] = -1\r\n d[index(height-1,j)] = -1/h * (u[height-1,j] - u[height-1,j-1] - v[height-2,j])\r\n\r\n\r\n A = A * dt / (density * h**2)\r\n\r\n A = sps.csr_matrix (A)\r\n p = np.reshape(spsolve (A, d), (height, width))\r\n\r\n # Calculate new velocity field based on this pressure field\r\n for i in range (height):\r\n for j in range (width):\r\n if (i == height-1 and j == width-1) or (i == height-1 and j == 0) or (i == 0 and j == width-1) or (i == 0 and j == 0):\r\n # Set vertical velocity to movement of solid wall 0\r\n u[i,j] = 0\r\n v[i,j] = 0\r\n elif i == height-1 or i == 0:\r\n u[i,j] = u[i,j] - dt / (density * h) * (p[i,j+1] - p[i,j])\r\n v[i,j] = 0\r\n elif j == width-1 or j == 0:\r\n u[i,j] = 0\r\n v[i,j] = v[i,j] - dt / (density * h) * (p[i+1,j] - p[i,j])\r\n else:\r\n u[i,j] = u[i,j] - dt / (density * h) * (p[i,j+1] - p[i,j])\r\n v[i,j] = v[i,j] - dt / (density * h) * (p[i+1,j] - p[i,j])\r\n\r\n # let's get some inflow\r\n u[4:12, 0] = 1\r\n\r\n return u, v, p", "def shrink_piecwise_linear(r,rvar,theta):\n ab0 = theta[...,0]\n ab1 = theta[...,1]\n sl0 = theta[...,2]\n sl1 = theta[...,3]\n sl2 = theta[...,4]\n\n # scale each column by sqrt(rvar)\n scale_out = tf.sqrt(rvar)\n scale_in = 1/scale_out\n rs = tf.sign(r*scale_in)\n ra = tf.abs(r*scale_in)\n\n # split the piecewise linear function into regions\n rgn0 = tf.to_float( ra<ab0)\n rgn1 = tf.to_float( ra<ab1) - rgn0\n rgn2 = tf.to_float( ra>=ab1)\n xhat = scale_out * rs*(\n rgn0*sl0*ra +\n rgn1*(sl1*(ra - ab0) + sl0*ab0 ) +\n rgn2*(sl2*(ra - ab1) + sl0*ab0 + sl1*(ab1-ab0) )\n )\n dxdr = sl0*rgn0 + sl1*rgn1 + sl2*rgn2\n dxdr = tf.reduce_mean(dxdr,0)\n return (xhat,dxdr)", "def partition_function(array, temp):\r\n\r\n # Constants imported from scipy.constants\r\n h = scipy.constants.h # Planck's constant\r\n # speed of light must be in cm/s as wavenumber is in cm-1\r\n c = scipy.constants.c * 100\r\n k = scipy.constants.k # Boltzmann constant\r\n T = temp # extracted from log file using extract_temp()\r\n\r\n # check if inputs are numpy arrays and convert if not.\r\n if not isinstance(array, np.ndarray):\r\n np.asarray(array)\r\n\r\n # conversion to exponent\r\n u = (h * array * c) / (k * T)\r\n\r\n # calculates natural log of an individual frequency contribution to the partition function\r\n Q_ = np.log(np.exp(-(u / 2)) / (1 - np.exp(-u)))\r\n # sums all the contributions together, giving the final result.\r\n Q = np.sum(Q_)\r\n return Q", "def t0shft(t,P,t0):\n t = t.copy()\n dt = 0\n\n t -= t0 # Shifts the timeseries s.t. transits are at 0,P,2P ...\n dt -= t0\n\n # The first transit is at t = nFirstTransit * P\n nFirstTrans = np.ceil(t[0]/P) \n dt -= nFirstTrans*P \n\n return dt", "def change_partition(amount):\n\n def part_tree(n, m):\n if n == 0:\n return tree(True)\n if n < 0 or m == 0:\n return tree(False)\n else:\n left = part_tree(n - m, m)\n right = part_tree(n, m // 2)\n return tree(m, [left, right])\n\n\n k = floor(log(amount) / log(2))\n l = pow(2, k)\n return part_tree(amount, l)", "def spring_particle(name, num_trajectories, NUM_PARTS, T_max, dt, sub_sample_rate, noise_std, seed):\n num_particles = NUM_PARTS\n collater = {}\n\n def diffeq_hyper(t, q, k, m, nparts):\n num_particles = nparts\n vels = q[2 * num_particles:]\n xs = q[:2 * num_particles]\n xs = xs.reshape(-1, 2)\n forces = np.zeros(xs.shape)\n new_k = np.repeat(k, num_particles) * np.tile(k, num_particles)\n new_k = np.repeat(new_k, 2).reshape(-1, 2)\n dx = np.repeat(xs, num_particles, axis=0) - np.tile(xs, (num_particles, 1))\n resu = -new_k * dx\n forces = np.add.reduceat(resu, np.arange(0, nparts * nparts, nparts)).ravel()\n\n return np.concatenate([vels / np.repeat(m, 2), forces]).ravel()\n\n def hamiltonian(vec, m, k, num_particles):\n num_particles = num_particles\n x = vec[:num_particles * 2]\n p = vec[2 * num_particles:]\n xs = x.reshape(-1, 2)\n ps = p.reshape(-1, 2)\n U1 = 0\n K = 0\n for i in range(num_particles):\n for j in range(i + 1, num_particles):\n U1 += .5 * k[i] * k[j] * ((xs[i] - xs[j]) ** 2).sum()\n K += 0.5 * ((ps[i] ** 2).sum()) / m[i]\n return K, U1\n\n theta = []\n dtheta = []\n energy = []\n mass_arr = []\n ks_arr = []\n lagrangian = []\n np.random.seed(seed)\n\n for traj in range(num_trajectories):\n ks = np.ones(NUM_PARTS)#np.random.uniform(.5, 1, size=(NUM_PARTS))\n positions = np.random.uniform(-1, 1, size=(NUM_PARTS, 2))\n velocities = np.random.uniform(-3, 3, size=(NUM_PARTS, 2))\n masses = np.ones(NUM_PARTS)#np.random.uniform(0.1, 1, size=NUM_PARTS)\n momentum = np.multiply(velocities, np.repeat(masses, 2).reshape(-1, 2))\n q = np.concatenate([positions, momentum]).ravel()\n qnrk = rk(lambda t, y: diffeq_hyper(t, y, ks, masses, num_particles), (0, T_max), q,\n t_eval=np.arange(0, T_max, dt),\n rtol=1e-12, atol=1e-12, method='DOP853')\n accum = qnrk.y.T\n ssr = int(sub_sample_rate / dt)\n accum = accum[::ssr]\n daccum = np.array([diffeq_hyper(0, accum[i], ks, masses, num_particles) for i in range(accum.shape[0])])\n energies = []\n lags = []\n for i in range(accum.shape[0]):\n ktmp, utmp = hamiltonian(accum[i], masses, ks, NUM_PARTS)\n energies.append(ktmp + utmp)\n lags.append(ktmp - utmp)\n\n accum += np.random.randn(*accum.shape) * noise_std\n daccum += np.random.randn(*daccum.shape) * noise_std\n\n theta.append(accum)\n dtheta.append(daccum)\n energy.append(energies)\n mass_arr.append(masses)\n ks_arr.append(ks)\n lagrangian.append(lags)\n\n collater['x'] = np.concatenate(theta)\n collater['dx'] = np.concatenate(dtheta)\n collater['energy'] = np.concatenate(energy)\n collater['lagrangian'] = np.concatenate(lagrangian)\n\n collater['mass'] = mass_arr\n collater['ks'] = ks_arr\n\n f = open(name + \".pkl\", \"wb\")\n pickle.dump(collater, f)\n f.close()\n\n return collater", "def jpar(self, phi, tl):\n\t return min(phi*self.KAPPA_2, self.jmax(tl))", "def compute_step(X):\n return MOVING_STEP", "def rfpart(x):\n return 1 - fpart(x)", "def OAVolterra_FrauenhoferZone_direct(p0,wD,dt,Nt):\n pFZ = np.zeros(Nt)\n for i in range(1,Nt-1):\n pFZ[i]=(p0[i+1]-p0[i-1])\n return pFZ/(2.*dt*wD)", "def cur_position_translation(self, translation):\n translation = np.array(translation)\n position = np.ravel(np.array(self.get_current_cartesian_position().position))\n return translation + position", "def translate(self, offset):\n return BSplineFunc(self.kvs, self.coeffs + offset)", "def protrudes((u,v)):\r\n return ((u,v,W), (u,v,S), (u,v-1,W), (u-1,v,S))", "def partition_pair_to_spart(part_pair):\n part_star = list(part_pair[0])\n part_circ_star = list(part_pair[1])\n add_zeros = len(part_circ_star) - len(part_star)\n if add_zeros != 0:\n new_star = part_star + [0]\n else:\n new_star = part_star\n diff_list = [a - b for a, b in zip(part_circ_star, new_star)]\n fermionic_parts = []\n bosonic_parts = []\n for k in range(len(diff_list)):\n if diff_list[k] == 0:\n bosonic_parts += [part_circ_star[k]]\n elif diff_list[k] == 1:\n fermionic_parts += [new_star[k]]\n else:\n raise Exception(\"This should not happen.\")\n # sparts = Superpartitions()\n return _Superpartitions([fermionic_parts, bosonic_parts])", "def _repartition(self, v, radius, contribute=False):\n # =============== PRE-ALLOCATION ===============\n # Compute on non-masked sources :\n xyz = self.xyz[~self.data.mask]\n # Get sign of the x coordinate :\n xsign = np.sign(xyz[:, 0]).reshape(1, -1)\n # Corticale repartition :\n repartition = np.ma.zeros((v.shape[0], v.shape[1]), dtype=np.int)\n\n # For each triangle :\n for k in range(3):\n # =============== EUCLIDIAN DISTANCE ===============\n eucl = cdist(v[:, k, :], xyz).astype(np.float32)\n mask = eucl <= radius\n # Contribute :\n if not contribute:\n # Get vertices signn :\n vsign = np.sign(v[:, k, 0]).reshape(-1, 1)\n # Find where vsign and xsign are equals :\n isign = np.logical_and(vsign != xsign, xsign != 0)\n mask[isign] = False\n\n # =============== REPARTITION ===============\n # Sum over sources dimension :\n sm = np.sum(mask, 1, dtype=np.int)\n smmask = np.invert(sm.astype(bool))\n repartition[:, k] = np.ma.masked_array(sm, mask=smmask)\n\n return repartition", "def partition(lst_part, p, r):\n cond_part = 0\n assign_part = 0\n pivot = lst_part[r] # Set pivot to last (right-most) value in list\n pivot_index = p\n for j in range(p,r):\n cond_part +=1\n if lst_part[j] <= pivot:\n lst_part[pivot_index],lst_part[j] = swap(lst_part[pivot_index],lst_part[j]) \n assign_part +=3\n pivot_index += 1 \n lst_part[pivot_index],lst_part[r] = swap(lst_part[pivot_index],lst_part[r])\n assign_part +=3\n return pivot_index,cond_part,assign_part", "def next_partition(Z, k, h):\n n = len(Z)\n for i in range(n-1, 0, -1):\n if(Z[i] <= k[i-1]):\n h[Z[i]] -= 1\n Z[i] += 1\n\n if Z[i] == len(h):\n h.append(1)\n else:\n h[Z[i]] += 1\n\n k[i] = Z[i] if (k[i] <= Z[i]) else k[i]\n\n for j in range(i+1, n):\n h[Z[j]] -= 1\n h[Z[0]] += 1\n\n Z[j] = Z[0]\n k[j] = k[i]\n\n while h[-1] == 0:\n del h[-1]\n\n return Z, k, h\n return None", "def pdfInTransformedSpace(self,x):\n if self.method == 'pca':\n coordinate = distribution1D.vectord_cxx(len(x))\n for i in range(len(x)):\n coordinate[i] = x[i]\n pdfInTransformedSpace = self._distribution.pdfInTransformedSpace(coordinate)\n else:\n self.raiseAnError(NotImplementedError,'ppfTransformedSpace not yet implemented for ' + self.method + ' method')\n return pdfInTransformedSpace", "def move():\n # step 1 of task analysis: get data\n data = get_data('MovementData/Walking_02.txt')\n # step 2: get the initial orientation of the sensor\n sensor_orientation = get_init_orientation_sensor(data.acc[0])\n # step 3: get the vector of the right horizontal semi-circular canal's on-direction\n rhscc_init_on_dir = get_init_on_dir_rh_scc(15)\n # preparation for step 4: align the angular velocity sensor data with the global coordinate system\n angular_velocities_aligned_globally = align_sensor_data_globally(data.omega, sensor_orientation)\n # step 4: calculate the stimulation of the cupula\n stimuli = get_scc_stimulation(angular_velocities_aligned_globally, rhscc_init_on_dir)\n # step 5: get the transfer function of the scc with the dynamics provided in the lecture\n scc_trans_fun = get_scc_transfer_fun(0.01, 5)\n # step 6: get the cupular deflection\n max_cupular_deflection = calculate_max_cupular_deflection(scc_trans_fun, stimuli, data.rate)\n # preparation for step 7: align the acceleration sensor data with the global coordinate system\n accelerations_aligned_globally = align_sensor_data_globally(data.acc, sensor_orientation)\n # step 8: calculate the maxmimum left- and rightwards stimulation of the otolithic organ\n max_left_right_stimuli = calculate_otolithic_max_stimuli(accelerations_aligned_globally, 1)\n # step 9: calculate the head orientation\n head_orientations = calculate_head_orientation(angular_velocities_aligned_globally, data.rate)\n\n return max_cupular_deflection, max_left_right_stimuli, head_orientations", "def reac_beam2DU_global(coord, I, Emod , U):\r\n vec = coord[1, :] - coord[0, :]\r\n L = np.linalg.norm(vec)\r\n nx = vec[0]/L\r\n ny = vec[1]/L\r\n \r\n Q = np.array([\r\n [-ny, nx, 0, 0, 0, 0],\r\n [0, 0, 1.0, 0, 0, 0],\r\n [0, 0, 0, -ny, nx, 0],\r\n [0, 0, 0, 0, 0, 1.0]])\r\n\r\n kl = (I*Emod/(L*L*L)) * np.array([\r\n [12.0, 6, -12.0, 6*L],\r\n [6, 4*L*L, -6*L, 2*L*L],\r\n [-12.0, -6*L, 12.0, -6*L],\r\n [6*L, 2*L*L, -6*L, 4*L*L]])\r\n kG = np.dot(np.dot(Q.T, kl), Q)\r\n fl = np.dot(kG , U)\r\n return fl", "def coord_space(\n a0: numpy.ndarray, a1: numpy.ndarray, a2: numpy.ndarray, rev: bool = False\n) -> Tuple[numpy.ndarray, Optional[numpy.ndarray]]:\n # dbg = False\n # if dbg:\n # print(a0.transpose())\n # print(a1.transpose())\n # print(a2.transpose())\n\n # a0 = acs[0]\n # a1 = acs[1]\n # a2 = acs[2]\n\n global gtm\n global gmry\n global gmrz, gmrz2\n\n tm = gtm\n mry = gmry\n mrz = gmrz\n mrz2 = gmrz2\n\n # tx acs[1] to origin\n # tm = homog_trans_mtx(-a1[0][0], -a1[1][0], -a1[2][0])\n set_homog_trans_mtx(-a1[0], -a1[1], -a1[2], tm)\n\n # directly translate a2 using a1\n p = a2 - a1\n sc = get_spherical_coordinates(p)\n\n # if dbg:\n # print(\"p\", p.transpose())\n # print(\"sc\", sc)\n\n # mrz = homog_rot_mtx(-sc[1], \"z\") # rotate translated a2 -azimuth about Z\n set_Z_homog_rot_mtx(-sc[1], mrz)\n # mry = homog_rot_mtx(-sc[2], \"y\") # rotate translated a2 -polar_angle about Y\n set_Y_homog_rot_mtx(-sc[2], mry)\n\n # mt completes a1-a2 on Z-axis, still need to align a0 with XZ plane\n # mt = mry @ mrz @ tm # python 3.5 and later\n mt = gmry.dot(gmrz.dot(gtm))\n\n # if dbg:\n # print(\"tm:\\n\", tm)\n # print(\"mrz:\\n\", mrz)\n # print(\"mry:\\n\", mry)\n # # print(\"mt \", mt)\n\n p = mt.dot(a0)\n\n # if dbg:\n # print(\"mt:\\n\", mt, \"\\na0:\\n\", a0, \"\\np:\\n\", p)\n\n # need azimuth of translated a0\n # sc2 = get_spherical_coordinates(p)\n # print(sc2)\n azimuth2 = _get_azimuth(p[0], p[1])\n\n # rotate a0 -azimuth2 about Z to align with X\n # mrz2 = homog_rot_mtx(-azimuth2, \"z\")\n set_Z_homog_rot_mtx(-azimuth2, mrz2)\n\n # mt = mrz2 @ mt\n mt = gmrz2.dot(mt)\n\n # if dbg:\n # print(\"mt:\", mt, \"\\na0:\", a0, \"\\np:\", p)\n # # print(p, \"\\n\", azimuth2, \"\\n\", mrz2, \"\\n\", mt)\n\n # if dbg:\n # print(\"mt:\\n\", mt)\n # print(\"<<<<<<==============================\")\n\n if not rev:\n return mt, None\n\n # rev=True, so generate the reverse transformation\n\n # rotate a0 theta about Z, reversing alignment with X\n # mrz2 = homog_rot_mtx(azimuth2, \"z\")\n set_Z_homog_rot_mtx(azimuth2, mrz2)\n # rotate a2 phi about Y\n # mry = homog_rot_mtx(sc[2], \"y\")\n set_Y_homog_rot_mtx(sc[2], mry)\n # rotate a2 theta about Z\n # mrz = homog_rot_mtx(sc[1], \"z\")\n set_Z_homog_rot_mtx(sc[1], mrz)\n # translation matrix origin to a1\n # tm = homog_trans_mtx(a1[0][0], a1[1][0], a1[2][0])\n set_homog_trans_mtx(a1[0], a1[1], a1[2], tm)\n\n # mr = tm @ mrz @ mry @ mrz2\n mr = gtm.dot(gmrz.dot(gmry.dot(gmrz2)))\n # mr = numpy.dot(tm, numpy.dot(mrz, numpy.dot(mry, mrz2)))\n\n return mt, mr", "def rk4_perp(xy, v, NL, KL, KGdivKL, Mm, NP, nn, h):\n dx1 = h * v\n dv1 = h * fspring_perp(xy, NL, KL, KGdivKL, Mm, NP, nn)\n dx2 = h * (v + dv1 / 2.)\n dv2 = h * fspring_perp(xy + dx1 / 2., NL, KL, KGdivKL, Mm, NP, nn)\n dx3 = h * (v + dv2 / 2.)\n dv3 = h * fspring_perp(xy + dx2 / 2., NL, KL, KGdivKL, Mm, NP, nn)\n dx4 = h * (v + dv3)\n dv4 = h * fspring_perp(xy + dx3, NL, KL, KGdivKL, Mm, NP, nn)\n xout = xy + (dx1 + 2. * dx2 + 2. * dx3 + dx4) / 6.\n vout = v + (dv1 + 2. * dv2 + 2. * dv3 + dv4) / 6.\n\n return dx1, dv1, dx2, dv2, dx3, dv3, dx4, dv4, xout, vout", "def _partition_D(model):\n\n D1_indices = [] # A list of the indices for the unknown nodal displacements\n D2_indices = [] # A list of the indices for the known nodal displacements\n D2 = [] # A list of the values of the known nodal displacements (D != None)\n\n # Create the auxiliary table\n for node in model.Nodes.values():\n \n # Unknown displacement DX\n if node.support_DX==False and node.EnforcedDX == None:\n D1_indices.append(node.ID*6 + 0)\n # Known displacement DX\n elif node.EnforcedDX != None:\n D2_indices.append(node.ID*6 + 0)\n D2.append(node.EnforcedDX)\n # Support at DX\n else:\n D2_indices.append(node.ID*6 + 0)\n D2.append(0.0)\n\n # Unknown displacement DY\n if node.support_DY == False and node.EnforcedDY == None:\n D1_indices.append(node.ID*6 + 1)\n # Known displacement DY\n elif node.EnforcedDY != None:\n D2_indices.append(node.ID*6 + 1)\n D2.append(node.EnforcedDY)\n # Support at DY\n else:\n D2_indices.append(node.ID*6 + 1)\n D2.append(0.0)\n\n # Unknown displacement DZ\n if node.support_DZ == False and node.EnforcedDZ == None:\n D1_indices.append(node.ID*6 + 2)\n # Known displacement DZ\n elif node.EnforcedDZ != None:\n D2_indices.append(node.ID*6 + 2)\n D2.append(node.EnforcedDZ)\n # Support at DZ\n else:\n D2_indices.append(node.ID*6 + 2)\n D2.append(0.0)\n\n # Unknown displacement RX\n if node.support_RX == False and node.EnforcedRX == None:\n D1_indices.append(node.ID*6 + 3)\n # Known displacement RX\n elif node.EnforcedRX != None:\n D2_indices.append(node.ID*6 + 3)\n D2.append(node.EnforcedRX)\n # Support at RX\n else:\n D2_indices.append(node.ID*6 + 3)\n D2.append(0.0)\n\n # Unknown displacement RY\n if node.support_RY == False and node.EnforcedRY == None:\n D1_indices.append(node.ID*6 + 4)\n # Known displacement RY\n elif node.EnforcedRY != None:\n D2_indices.append(node.ID*6 + 4)\n D2.append(node.EnforcedRY)\n # Support at RY\n else:\n D2_indices.append(node.ID*6 + 4)\n D2.append(0.0)\n\n # Unknown displacement RZ\n if node.support_RZ == False and node.EnforcedRZ == None:\n D1_indices.append(node.ID*6 + 5)\n # Known displacement RZ\n elif node.EnforcedRZ != None:\n D2_indices.append(node.ID*6 + 5)\n D2.append(node.EnforcedRZ)\n # Support at RZ\n else:\n D2_indices.append(node.ID*6 + 5)\n D2.append(0.0)\n \n # Legacy code on the next line. I will leave it here until the line that follows has been proven over time.\n # D2 = atleast_2d(D2)\n \n # Convert D2 from a list to a matrix\n D2 = array(D2, ndmin=2).T\n\n # Return the indices and the known displacements\n return D1_indices, D2_indices, D2", "def _pid_control(self, waypoint, vehicle_location, vehicle_rotation):\n ## Vehicle transform should be [x,y,z, roll, pitch, yaw]\n\n v_begin = np.array(vehicle_location)\n v_end = v_begin + np.array([math.cos(math.radians(vehicle_rotation)),\n math.sin(math.radians(vehicle_rotation)),0])\n # print(\"vbegin\"+str(v_begin)+\"vend\"+ str(v_end)+\"waypoint\"+str(waypoint))\n v_vec = np.array([v_end[0] - v_begin[0], v_end[1] - v_begin[1], 0.0])\n w_vec = np.array([waypoint[0] -\n v_begin[0], waypoint[1] -\n v_begin[1], 0.0])\n _dot = math.acos(np.clip(np.dot(w_vec, v_vec) /\n (np.linalg.norm(w_vec) * np.linalg.norm(v_vec)), -1.0, 1.0))\n\n _cross = np.cross(v_vec, w_vec)\n if _cross[2] < 0:\n _dot *= -1.0\n\n self._e_buffer.append(_dot)\n if len(self._e_buffer) >= 2:\n _de = (self._e_buffer[-1] - self._e_buffer[-2]) / self._dt\n _ie = sum(self._e_buffer) * self._dt\n else:\n _de = 0.0\n _ie = 0.0\n # print(\"dot\"+str(_dot)+\"de\"+str(_de)+\"ie\"+str(_ie))\n return np.clip((self._K_P * _dot) + (self._K_D * _de /\n self._dt) + (self._K_I * _ie * self._dt), -1.0, 1.0)", "def T(self):\n\n # Calculate the direction cosines for the local x-axis\n # The local x-axis will run from the i-node to the j-node\n xi = self.i_node.X\n xj = self.j_node.X\n yi = self.i_node.Y\n yj = self.j_node.Y\n zi = self.i_node.Z\n zj = self.j_node.Z\n x = [(xj - xi), (yj - yi), (zj - zi)]\n x = x/norm(x)\n \n # The local y-axis will be in the plane of the plate\n # Find a vector in the plate's local xy plane\n xn = self.n_node.X\n yn = self.n_node.Y\n zn = self.n_node.Z\n xy = [xn - xi, yn - yi, zn - zi]\n\n # Find a vector perpendicular to the plate surface to get the orientation of the local z-axis\n z = cross(x, xy)\n \n # Divide the vector by its magnitude to produce a unit z-vector of direction cosines\n z = z/norm(z)\n\n # Calculate the local y-axis as a vector perpendicular to the local z and x-axes\n y = cross(z, x)\n \n # Divide the z-vector by its magnitude to produce a unit vector of direction cosines\n y = y/norm(y)\n\n # Create the direction cosines matrix\n dirCos = array([x, y, z])\n \n # Build the transformation matrix\n transMatrix = zeros((24, 24))\n transMatrix[0:3, 0:3] = dirCos\n transMatrix[3:6, 3:6] = dirCos\n transMatrix[6:9, 6:9] = dirCos\n transMatrix[9:12, 9:12] = dirCos\n transMatrix[12:15, 12:15] = dirCos\n transMatrix[15:18, 15:18] = dirCos\n transMatrix[18:21, 18:21] = dirCos\n transMatrix[21:24, 21:24] = dirCos\n \n return transMatrix", "def hw_func(self):\n i, o = self.inl[0].to_flow(), self.outl[0].to_flow()\n\n if abs(i[0]) < 1e-4:\n return i[1] - o[1]\n\n v_i = v_mix_ph(i, T0=self.inl[0].T.val_SI)\n v_o = v_mix_ph(o, T0=self.outl[0].T.val_SI)\n flow_dir = np.sign(i[0])\n\n return ((i[1] - o[1]) * flow_dir -\n (10.67 * abs(i[0]) ** 1.852 * self.L.val /\n (self.ks.val ** 1.852 * self.D.val ** 4.871)) *\n (9.81 * ((v_i + v_o) / 2) ** 0.852))", "def _fp32_mte_process(axis_0_index, h_lp_index, sub_h_size):\n\n def _fp32_inner_mte(w_lp_index, sub_w_size):\n \"\"\"\n inner mte\n \"\"\"\n # move data in\n in_offset = (block_idx * per_core_col_size + axis_0_index * axis_1 * axis_2 +\n h_lp_index * max_no_core_axis_size * axis_2 +\n w_lp_index * max_core_axis_size)\n data_in_inf = (sub_h_size, sub_w_size, axis_1, axis_2, in_offset)\n _data_move_in_last_dim_be_one_block(tik_inst, ub_input, data_in, data_in_inf)\n\n # move data out\n out_offset = (block_idx * per_core_col_size + axis_0_index * axis_2 +\n h_lp_index * max_no_core_axis_size * axis_0 * axis_2 +\n w_lp_index * max_core_axis_size)\n data_out_inf = (sub_h_size, sub_w_size, axis_0, axis_1, axis_2, out_offset)\n _data_move_out_last_dim_be_one_block(tik_inst, data_out, ub_input, data_out_inf)\n\n with tik_inst.for_range(0, loop_cnt) as w_lp_idx:\n _fp32_inner_mte(w_lp_idx, max_core_axis_size)\n with tik_inst.if_scope(left_data > 0):\n _fp32_inner_mte(loop_cnt, left_data)", "def split_translation(t):\n from math import fmod\n\n def _split1(x):\n x_part = fmod(x, 1.0)\n x_whole = x - x_part\n if x_part > 0.5:\n x_part -= 1\n x_whole += 1\n elif x_part < -0.5:\n x_part += 1\n x_whole -= 1\n\n return (x_whole, x_part)\n\n _tt = [_split1(x) for x in t]\n\n return tuple(t[0] for t in _tt), tuple(t[1] for t in _tt)", "def f_v(_a, _vs, _Ps, _Ps0): # _aはスカラ, _vsはベクトル, _Ps, _Ps0は3行2列の行列\n center_pos = _Ps[0]\n center_pos_0 = _Ps0[0]\n idx_iter = Index_iterator(1, 8)\n #中心点から各点へのベクトル\n x = []\n x0 = []\n for p in (_Ps):\n x.append(p - center_pos)\n for p in _Ps(_Ps0):\n x0.append(p - center_pos_0)\n\n x01 = (_Ps[1]-center_pos) \n x02 = (_Ps[2]-center_pos) \n x03 = (_Ps[3]-center_pos) \n x04 = (_Ps[4]-center_pos) \n x05 = (_Ps[5]-center_pos) \n x06 = (_Ps[6]-center_pos) \n x07 = (_Ps[7]-center_pos) \n x08 = (_Ps[8]-center_pos)\n print('p_id', center_pos, end='\\t')\n print('x01:', x01, end=\"\\t\")\n print('x03:', x03, end=\"\\t\")\n print('x05:', x05, end=\"\\t\")\n print('x07:', x07)\n x001 = (_Ps0[1]-_Ps0[0]) \n x002 = (_Ps0[2]-_Ps0[0]) \n x003 = (_Ps0[3]-_Ps0[0]) \n x004 = (_Ps0[4]-_Ps0[0]) \n x005 = (_Ps0[5]-_Ps0[0]) \n x006 = (_Ps0[6]-_Ps0[0]) \n x007 = (_Ps0[7]-_Ps0[0]) \n x008 = (_Ps0[8]-_Ps0[0]) \n \n #中心点周りの面の面積\n def calc_area(j,k,l):\n s = LA.norm(np.cross(x[j],x[k]))/2 \\\n + LA.norm(np.cross(x[k],x[l]))/2\n return s\n\n s = []\n s0 = []\n hen = [1,3,5,7]\n for i in range(4):\n j,k,l = [n for n in idx_iter.get_indexes(start_idx=hen[i], 3)]\n s[i] = calc_area(j,k,l)\n s0[i] = calc_area(j,k,l)\n\n # s0123 = LA.norm(np.cross(x[1],x[2]))/2\\\n # +LA.norm(np.cross(x[2],x[3]))/2\n # s4367 = LA.norm(np.cross(x[3],x[4]))/2\\\n # +LA.norm(np.cross(x[4],x[5]))/2\n # s4785 = LA.norm(np.cross(x[5],x[6]))/2\\\n # +LA.norm(np.cross(x[6],x[7]))/2\n # s4521 = LA.norm(np.cross(x[7],x[8]))/2\\\n # +LA.norm(np.cross(x[8],x[1]))/2\n # s04103 = LA.norm(np.cross(x0[1],x0[2]))/2\\\n # +LA.norm(np.cross(x0[2],x0[3]))/2\n # s04367 = LA.norm(np.cross(x0[3],x0[4]))/2\\\n # +LA.norm(np.cross(x0[4],x0[7]))/2\n # s04785 = LA.norm(np.cross(x0[7],x0[8]))/2\\\n # +LA.norm(np.cross(x0[8],x0[5]))/2\n # s04521 = LA.norm(np.cross(x0[5],x0[2]))/2\\\n # +LA.norm(np.cross(x0[2],x0[1]))/2\n \n #各方向への平均面積(ここだけ反時計回り順で設定してる)\n S_iminus = (s[1] + s[2]) / 2 #43方向\n S_Jminus = (s[1] + s[4]) / 2 #41方向\n S_iplus = (s[3] + s[4]) / 2 #45方向\n S_Jplus = (s[3] + s[2]) / 2 #47方向\n S_iminus0 = (s0[1] + s0[2]) / 2 #43方向\n S_Jminus0 = (s0[1] + s0[4]) / 2 #41方向\n S_iplus0 = (s0[3] + s0[4]) / 2 #45方向\n S_Jplus0 = (s0[3] + s0[2]) / 2 #47方向\n # 各方向への厚み\n h_iminus = h_0 / ((poisson/(1-poisson) * (S_iminus - S_iminus0) / S_iminus0) + 1) #43方向\n h_Jminus = h_0 / ((poisson/(1-poisson) * (S_Jminus - S_Jminus0) / S_Jminus0) + 1) #41方向\n h_iplus = h_0 / ((poisson/(1-poisson) * (S_iplus - S_iplus0) / S_iplus0) + 1) #45方向\n h_Jplus = h_0 / ((poisson/(1-poisson) * (S_Jplus - S_Jplus0) / S_Jplus0) + 1) #47方向\n # 各断片の重心\n g = []\n kado = [2,4,6,8]\n hen = [1,3,5,7]\n for i in range(len(kado)):\n _kado = kado[i]\n _hen1, _ = [idx for idx in idx_iter.get_indexes_reverse(_kado, 2)]\n _hen2, _ = [idx for idx in idx_iter.get_indexes(_kado, 2)]\n _hen = [_hen1, _hen2]\n _g1 = (center_pos + _Ps[_kado] + _Ps[_hen1])/3\n _g2 = (center_pos + _Ps[_kado] + _Ps[_hen2])/3\n g.append([_g1, _g2])\n\n g401 = (center_pos + _Ps[0] + _Ps[1]) / 3\n g430 = (center_pos + _Ps[3] + _Ps[0]) / 3\n g436 = (center_pos + _Ps[3] + _Ps[6]) / 3\n g467 = (center_pos + _Ps[6] + _Ps[7]) / 3\n g478 = (center_pos + _Ps[7] + _Ps[8]) / 3\n g485 = (center_pos + _Ps[8] + _Ps[5]) / 3\n g452 = (center_pos + _Ps[5] + _Ps[2]) / 3\n g421 = (center_pos + _Ps[2] + _Ps[1]) / 3\n g0401 = (_Ps0[4] + _Ps0[0] + _Ps0[1]) / 3\n g0430 = (_Ps0[4] + _Ps0[3] + _Ps0[0]) / 3\n g0436 = (_Ps0[4] + _Ps0[3] + _Ps0[6]) / 3\n g0467 = (_Ps0[4] + _Ps0[6] + _Ps0[7]) / 3\n g0478 = (_Ps0[4] + _Ps0[7] + _Ps0[8]) / 3\n g0485 = (_Ps0[4] + _Ps0[8] + _Ps0[5]) / 3\n g0452 = (_Ps0[4] + _Ps0[5] + _Ps0[2]) / 3\n g0421 = (_Ps0[4] + _Ps0[2] + _Ps0[1]) / 3\n \n # 各断片面積\n triangle_area = []\n kado = [2,4,6,8]\n for i in range(len(kado)):\n j, k = [idx for idx in idx_iter.get_indexes_reverse(kado[i], 1)]\n _s1 = LA.norm(np.cross(x[j],x[k]))/2\n j, k = [idx for idx in idx_iter.get_indexes(kado[i], 1)]\n _s2 = LA.norm(np.cross(x[j],x[k]))/2\n triangle_area.append([_s1, _s2])\n\n s410 = LA.norm(np.cross(x[1],x[2]))/2\n s403 = LA.norm(np.cross(x[2],x[3]))/2\n s436 = LA.norm(np.cross(x[3],x[4]))/2\n s467 = LA.norm(np.cross(x[4],x[5]))/2\n s478 = LA.norm(np.cross(x[5],x[6]))/2\n s485 = LA.norm(np.cross(x[6],x[7]))/2\n s452 = LA.norm(np.cross(x[7],x[8]))/2\n s421 = LA.norm(np.cross(x[8],x[1]))/2\n s0410 = LA.norm(np.cross(x0[1],x0[2]))/2\n s0403 = LA.norm(np.cross(x0[2],x0[3]))/2\n s0436 = LA.norm(np.cross(x0[3],x0[4]))/2\n s0467 = LA.norm(np.cross(x0[4],x0[5]))/2\n s0478 = LA.norm(np.cross(x0[5],x0[6]))/2\n s0485 = LA.norm(np.cross(x0[6],x0[7]))/2\n s0452 = LA.norm(np.cross(x0[7],x0[8]))/2\n s0421 = LA.norm(np.cross(x0[8],x0[1]))/2\n # 四角の重心\n\n center_g_square = []\n for i in range(len(g)):\n _g = (triangle_area[i][0]*g[i][0] + triangle_area[i][1]*g[i][1])/(triangle_area[i][0] + triangle_area[i][1])\n center_g.append(_g)\n g4103 = (s410*g401 + s403*g430) / (s410 + s403)\n g4367 = (s436*g436 + s467*g467) / (s436 + s467)\n g4785 = (s478*g478 + s485*g485) / (s478 + s485)\n g4521 = (s452*g452 + s421*g421) / (s452 + s421)\n g04103 = (s0410*g0401 + s0403*g0430) / (s0410 + s0403)\n g04367 = (s0436*g0436 + s0467*g0467) / (s0436 + s0467)\n g04785 = (s0478*g0478 + s0485*g0485) / (s0478 + s0485)\n g04521 = (s0452*g0452 + s0421*g0421) / (s0452 + s0421)\n # 各重心間の距離\n Lj82 = LA.norm(g4521 - g4103)\n Lj24 = LA.norm(g4103 - g4367)\n Lj46 = LA.norm(g4367 - g4785)\n Lj68 = LA.norm(g4785 - g4521)\n \n # ひずみ\n eps_i41 = (LA.norm(x01) - LA.norm(x041)) / LA.norm(x041)\n eps_J41 = (LA.norm(g4521 - g4103) - LA.norm(g04521 - g04103)) / LA.norm(g04521 - g04103)\n eps_i43 = (LA.norm(x03) - LA.norm(x043)) / LA.norm(x043)\n eps_J43 = (LA.norm(g4103 - g4367) - LA.norm(g04103 - g04367)) / LA.norm(g04103 - g04367)\n eps_i47 = (LA.norm(x01) - LA.norm(x041)) / LA.norm(x041)\n eps_J47 = (LA.norm(g4367 - g4785) - LA.norm(g04367 - g04785)) / LA.norm(g04367 - g04785)\n eps_i45 = (LA.norm(x01) - LA.norm(x041)) / LA.norm(x041)\n eps_J45 = (LA.norm(g4785 - g4521) - LA.norm(g04785 - g04521)) / LA.norm(g04785 - g04521)\n # 張力\n F_T1 = (young_modulus * h_Jminus * Lj82 * (eps_i41 + poisson * eps_J41) / (1 - poisson**2))*x01/LA.norm(x01)\n F_T3 = (young_modulus * h_iminus * Lj24 * (eps_i43 + poisson * eps_J43) / (1 - poisson**2))*x03/LA.norm(x03)\n F_T5 = (young_modulus * h_Jplus * Lj46 * (eps_i47 + poisson * eps_J47) / (1 - poisson**2))*x05/LA.norm(x05)\n F_T7 = (young_modulus * h_iplus * Lj68 * (eps_i45 + poisson * eps_J45) / (1 - poisson**2))*x07/LA.norm(x07)\n # せん断ひずみ\n gamma513 = (math.acos((np.dot(x07,x01))/(LA.norm(x07)*LA.norm(x01))) - math.acos((np.dot(x045,x041))/(LA.norm(x045)*LA.norm(x041)))\\\n + math.acos((np.dot(x03,x01))/(LA.norm(x03)*LA.norm(x01))) - math.acos((np.dot(x043,x041))/(LA.norm(x043)*LA.norm(x041))))/2\n gamma137 = (math.acos((np.dot(x01,x03))/(LA.norm(x01)*LA.norm(x03))) - math.acos((np.dot(x041,x043))/(LA.norm(x041)*LA.norm(x043)))\\\n + math.acos((np.dot(x03,x05))/(LA.norm(x03)*LA.norm(x05))) - math.acos((np.dot(x043,x047))/(LA.norm(x043)*LA.norm(x047))))/2\n gamma375 = (math.acos((np.dot(x05,x03))/(LA.norm(x05)*LA.norm(x03))) - math.acos((np.dot(x047,x043))/(LA.norm(x047)*LA.norm(x043)))\\\n + math.acos((np.dot(x07,x05))/(LA.norm(x07)*LA.norm(x05))) - math.acos((np.dot(x045,x047))/(LA.norm(x045)*LA.norm(x047))))/2\n gamma751 = (math.acos((np.dot(x05,x07))/(LA.norm(x05)*LA.norm(x07))) - math.acos((np.dot(x047,x045))/(LA.norm(x047)*LA.norm(x045)))\\\n + math.acos((np.dot(x07,x01))/(LA.norm(x07)*LA.norm(x01))) - math.acos((np.dot(x045,x041))/(LA.norm(x045)*LA.norm(x041))))/2\n # せん断力\n F_S41 = ((young_modulus * h_Jminus * LA.norm(x01) * gamma513)/(2 * (1 + poisson)))*x01/LA.norm(x01)\n F_S43 = ((young_modulus * h_Jminus * LA.norm(x03) * gamma137)/(2 * (1 + poisson)))*x03/LA.norm(x03)\n F_S47 = ((young_modulus * h_Jminus * LA.norm(x05) * gamma375)/(2 * (1 + poisson)))*x05/LA.norm(x05)\n F_S45 = ((young_modulus * h_Jminus * LA.norm(x07) * gamma751)/(2 * (1 + poisson)))*x07/LA.norm(x07)\n \n # J方向の曲げ力\n n_j_cross = np.cross(x05, x01)\n if any(n_j_cross):\n n_J = n_j_cross/LA.norm(n_j_cross)\n else: \n\n l_Jalfa = LA.norm(_Ps[1] - _Ps[7])\n cos_Jalfa = (LA.norm(x01)**2 + LA.norm(x05)**2 - l_Jalfa**2) / (2 * LA.norm(x01) * LA.norm(x05))\n if cos_Jalfa > 1.0:\n cos_Jalfa = 1.0\n elif cos_Jalfa < -1.0:\n cos_Jalfa = -1.0\n sin_Jalfa = math.sqrt(1 - cos_Jalfa**2)\n CJa2 = math.sqrt((cos_Jalfa + 1)/2)\n SJa2 = math.sqrt((1 - cos_Jalfa)/2)\n zJC = (_Ps[7][2]-_Ps[1][2])/(_Ps[7][0]-_Ps[1][0]) * (center_pos[0]-_Ps[1][0]) + _Ps[1][2] #曲げ力の方向の場合わけに必要\n if center_pos[2] > zJC:\n e_j = np.dot(np.array([[CJa2 + (n_J[0]**2) * (1 - CJa2), n_J[0] * n_J[1] * (1 - CJa2) + n_J[2] * SJa2, n_J[0] * n_J[2] * (1 - CJa2) - n_J[1] * SJa2],\\\n [n_J[1] * n_J[0] * (1 - CJa2) - n_J[2] * SJa2, CJa2 + (n_J[1]**2) * (1 - CJa2), n_J[1] * n_J[2] * (1 - CJa2) + n_J[0] * SJa2],\\\n [n_J[2] * n_J[0] * (1 - CJa2) + n_J[1] * SJa2, n_J[2] * n_J[1] * (1 - CJa2) - n_J[0] * SJa2, CJa2 + (n_J[2]**2) * (1 - CJa2)]]), (_Ps[7] - center_pos)/LA.norm(_Ps[7] - center_pos))\n else:\n e_j = np.dot(np.array([[CJa2 + (n_J[0]**2) * (1 - CJa2), n_J[0] * n_J[1] * (1 - CJa2) - n_J[2] * SJa2, n_J[0] * n_J[2] * (1 - CJa2) + n_J[1] * SJa2],\\\n [n_J[1] * n_J[0] * (1 - CJa2) + n_J[2] * SJa2, CJa2 + (n_J[1]**2) * (1 - CJa2), n_J[1] * n_J[2] * (1 - CJa2) - n_J[0] * SJa2],\\\n [n_J[2] * n_J[0] * (1 - CJa2) - n_J[1] * SJa2, n_J[2] * n_J[1] * (1 - CJa2) + n_J[0] * SJa2, CJa2 + (n_J[2]**2) * (1 - CJa2)]]), (_Ps[7] - center_pos)/LA.norm(_Ps[7] - center_pos))\n d_etha_J = (2 * sin_Jalfa / l_Jalfa) - (2 * math.sqrt(1 - np.dot(x041,x047)**2/(LA.norm(x041)*LA.norm(x047))**2)/(LA.norm(x041 - x047)))\n\n n_i = np.cross(x07,x03)/LA.norm(np.cross(x03,x07)) \n cos_ialfa = np.dot(x03,x07) / (LA.norm(x03) * LA.norm(x07))\n sin_ialfa = math.sqrt(1 - cos_ialfa**2)\n Cia2 = math.sqrt((cos_ialfa + 1)/2)\n Sia2 = math.sqrt((1 - cos_ialfa)/2)\n ziC = (_Ps[5][2]-_Ps[3][2])/(_Ps[5][0]-_Ps[3][0]) * (center_pos[0]-_Ps[3][0]) + _Ps[3][2]\n if center_pos[2] > ziC:\n e_i = np.dot(np.array([[Cia2 + (n_i[0]**2) * (1 - Cia2), n_i[0] * n_i[1] * (1 - Cia2) + n_i[2] * Sia2, n_i[0] * n_i[2] * (1 - Cia2) - n_i[1] * Sia2],\\\n [n_i[1] * n_i[0] * (1 - Cia2) - n_i[2] * Sia2, Cia2 + (n_i[1]**2) * (1 - Cia2), n_i[1] * n_i[2] * (1 - Cia2) + n_i[0] * Sia2],\\\n [n_i[2] * n_i[0] * (1 - Cia2) + n_i[1] * Sia2, n_i[2] * n_i[1] * (1 - Cia2) - n_i[0] * Sia2, Cia2 + (n_i[2]**2) * (1 - Cia2)]]), (_Ps[7] - center_pos)/LA.norm(_Ps[7] - center_pos))\n else:\n e_i = np.dot(np.array([[Cia2 + (n_i[0]**2) * (1 - Cia2), n_i[0] * n_i[1] * (1 - Cia2) - n_i[2] * Sia2, n_i[0] * n_i[2] * (1 - Cia2) + n_i[1] * Sia2],\\\n [n_i[1] * n_i[0] * (1 - Cia2) + n_i[2] * Sia2, Cia2 + (n_i[1]**2) * (1 - Cia2), n_i[1] * n_i[2] * (1 - Cia2) - n_i[0] * Sia2],\\\n [n_i[2] * n_i[0] * (1 - Cia2) - n_i[1] * Sia2, n_i[2] * n_i[1] * (1 - Cia2) + n_i[0] * Sia2, Cia2 + (n_i[2]**2) * (1 - Cia2)]]), (_Ps[5] - center_pos)/LA.norm(_Ps[5] - center_pos))\n d_etha_i = (2 * sin_ialfa / LA.norm(x07 - x03)) - (2 * math.sqrt(1 - np.dot(x043,x045)**2/(LA.norm(x043)*LA.norm(x045))**2)/(LA.norm(x043 - x045)))\n\n\n l_J = (Lj20 + Lj06 + Lj68 + Lj82) / 4\n h = (h_iminus + h_iplus + h_Jminus + h_Jplus) / 4\n I = (l_J * h**3) / 12\n M_i = (young_modulus * I * (d_etha_i + poisson * d_etha_J)/(1 - poisson**2))\n M_J = (young_modulus * I * (d_etha_J + poisson * d_etha_i)/(1 - poisson**2))\n #曲げ力\n F_Bi = M_i / LA.norm(x03) + M_i / LA.norm(x07) * e_i\n F_BJ = M_J / LA.norm(x01) + M_J / LA.norm(x05) * e_j\n #空気力\n # S = (S_iminus + S_iplus + S_Jminus + S_Jplus) / 4\n # F_A = p * S\n F_A = np.array([0.0, 0.0, -0.1]) * _a\n\n # 運動方程式(支配方程式)\n S_0 = (S_iminus0 + S_iplus0 + S_Jminus0 + S_Jplus0) / 4\n F_T = F_T41 + F_T43 + F_T45 + F_T47\n F_S = F_S41 + F_S43 + F_S45 + F_S47\n F_B = F_Bi + F_BJ\n return (F_T + F_S + F_B + F_A) / (rho * h_0 * S_0) - c * _vs", "def part_recur(ckt, initial, w):\n partition_set = []\n# partition_mech = KLPart.KLPartition()\n# convert_Gate(ckt, partition_mech)\n print \"Diving into C++\"\n# (a, b) = partition_mech.partition_once(KLPart.StringVector(list(set(initial))))\n (a, b) = partition(ckt, list(set(initial)))\n print \"Coming back up\"\n if len(get_inputs(ckt, a)) > w and len(a) > 3:\n partition_set = partition_set + part_recur(ckt, a, w)\n else:\n partition_set.append(a)\n if len(get_inputs(ckt, b)) > w and len(b) > 3:\n partition_set = partition_set + part_recur(ckt, b, w)\n else:\n partition_set.append(b)\n return partition_set", "def part2():\r\n my_input = 368078\r\n coords = [(1, 0), (1, -1), (0, -1), (-1, -1), (-1, 0), (-1, 1), (0, 1), (1, 1)]\r\n x = y = dx = 0\r\n dy = -1\r\n grid = {}\r\n\r\n while True:\r\n total = 0\r\n for offset in coords:\r\n ox, oy = offset\r\n if (x+ox, y+oy) in grid:\r\n total += grid[(x+ox, y+oy)]\r\n if total > int(my_input):\r\n return total\r\n if (x, y) == (0, 0):\r\n grid[(0, 0)] = 1\r\n else:\r\n grid[(x, y)] = total\r\n if (x == y) or (x < 0 and x == -y) or (x > 0 and x == 1-y):\r\n dx, dy = -dy, dx\r\n x, y = x+dx, y+dy", "def _compute_gravity_torque(self):\n pass", "def pro_avfid_superoperator_compsubspace_phasecorrected_onlystaticqubit(U,L1,phases):\n\n Ucorrection = qtp.Qobj([[np.exp(-1j*np.deg2rad(phases[0])), 0, 0, 0, 0, 0, 0, 0, 0],\n [0, np.exp(-1j*np.deg2rad(phases[0])), 0, 0, 0, 0, 0, 0, 0],\n [0, 0, np.exp(-1j*np.deg2rad(phases[0])), 0, 0, 0, 0, 0, 0],\n [0, 0, 0, np.exp(-1j*np.deg2rad(phases[2])), 0, 0, 0, 0, 0],\n [0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[2])), 0, 0, 0, 0],\n [0, 0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[2])), 0, 0, 0],\n [0, 0, 0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[0])), 0, 0],\n [0, 0, 0, 0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[0])), 0],\n [0, 0, 0, 0, 0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[0]))]],\n type='oper',\n dims=[[3, 3], [3, 3]])\n\n if U.type=='oper':\n U=Ucorrection*U\n inner = U.dag()*U_target\n part_idx = [0, 1, 3, 4] # only computational subspace\n ptrace = 0\n for i in part_idx:\n ptrace += inner[i, i]\n dim = 4 # 2 qubits comp subspace \n\n return np.real(((np.abs(ptrace))**2+dim*(1-L1))/(dim*(dim+1)))\n\n elif U.type=='super':\n U=qtp.to_super(Ucorrection)*U\n kraus_form = qtp.to_kraus(U)\n dim=4 # 2 qubits in the computational subspace\n part_idx = [0, 1, 3, 4] # only computational subspace\n psum=0\n for A_k in kraus_form:\n ptrace = 0\n inner = U_target_diffdims.dag()*A_k # otherwise dimension mismatch\n for i in part_idx:\n ptrace += inner[i, i]\n psum += (np.abs(ptrace))**2\n\n return np.real((dim*(1-L1) + psum) / (dim*(dim + 1)))", "def partition_ratio(self):\n if self._partition_ratio is None:\n partition_lineal = 0\n zones = self.idfobjects[\"ZONE\"]\n zone: EpBunch\n for zone in zones:\n for surface in [\n surf\n for surf in zone.zonesurfaces\n if surf.key.upper() not in [\"INTERNALMASS\", \"WINDOWSHADINGCONTROL\"]\n ]:\n if hasattr(surface, \"tilt\"):\n if (\n surface.tilt == 90.0\n and surface.Outside_Boundary_Condition != \"Outdoors\"\n ):\n multiplier = float(\n zone.Multiplier if zone.Multiplier != \"\" else 1\n )\n partition_lineal += surface.width * multiplier\n self._partition_ratio = (\n partition_lineal / self.net_conditioned_building_area\n )\n return self._partition_ratio", "def remap_partition(particles):\n remap_gid_partition_cython(particles, gl_to_loc_map_b.value)\n return particles", "def _compute_operating_point(self):\n xs = [self._x0]\n us = [[] for ii in range(self._num_players)]\n costs = [[] for ii in range(self._num_players)]\n\n for k in range(self._horizon):\n if self._current_operating_point is not None:\n current_x = self._current_operating_point[0][k]\n current_u = [self._current_operating_point[1][ii][k]\n for ii in range(self._num_players)]\n else:\n current_x = np.zeros((self._dynamics._x_dim, 1))\n current_u = [np.zeros((ui_dim, 1))\n for ui_dim in self._dynamics._u_dims]\n\n feedback = lambda x, u_ref, x_ref, P, alpha : \\\n u_ref - P @ (x - x_ref) - self._alpha_scaling * alpha\n u = [feedback(xs[k], current_u[ii], current_x,\n self._Ps[ii][k], self._alphas[ii][k])\n for ii in range(self._num_players)]\n\n # Clip u1 and u2.\n# for ii in range(self._num_players):\n# u[ii] = self._u_constraints[ii].clip(u[ii])\n\n for ii in range(self._num_players):\n us[ii].append(u[ii])\n costs[ii].append(self._player_costs[ii](\n torch.as_tensor(xs[k].copy()),\n [torch.as_tensor(ui) for ui in u],\n k))\n\n if k == self._horizon - 1:\n break\n\n xs.append(self._dynamics.integrate(xs[k], u))\n\n return xs, us, costs", "def getPlane(entry):\n\n \n \n a,b,c = getNewLattice(entry,2)\n a_vector = np.linalg.solve(np.array(entry[0].lattice.as_dict()['matrix']).T,a)\n b_vector = np.linalg.solve(np.array(entry[0].lattice.as_dict()['matrix']).T,b)\n fracs = np.cross(a_vector,b_vector)\n fracs /= min([x for x in fracs if abs(x)>1E-4])\n \n return(fracs)", "def tab(self, origin, direction, width, partitionLine=None,\n maxHeight=pcbnew.FromMM(50), fillet=0):\n self.orient()\n\n origin = np.array(origin)\n for geom in listGeometries(self.substrates):\n try:\n direction = np.around(normalize(direction), 4)\n sideOriginA = origin + makePerpendicular(direction) * width / 2\n sideOriginB = origin - makePerpendicular(direction) * width / 2\n boundary = geom.exterior\n splitPointA = closestIntersectionPoint(sideOriginA, direction,\n boundary, maxHeight)\n splitPointB = closestIntersectionPoint(sideOriginB, direction,\n boundary, maxHeight)\n tabFace = biteBoundary(boundary, splitPointB, splitPointA)\n if partitionLine is None:\n # There is nothing else to do, return the tab\n tab = Polygon(list(tabFace.coords) + [sideOriginA, sideOriginB])\n return self._makeTabFillet(tab, tabFace, fillet)\n # Span the tab towards the partition line\n # There might be multiple geometries in the partition line, so try them\n # individually.\n direction = -direction\n for p in listGeometries(partitionLine):\n try:\n partitionSplitPointA = closestIntersectionPoint(splitPointA.coords[0],\n direction, p, maxHeight)\n partitionSplitPointB = closestIntersectionPoint(splitPointB.coords[0],\n direction, p, maxHeight)\n except NoIntersectionError: # We cannot span towards the partition line\n continue\n if isLinestringCyclic(p):\n candidates = [(partitionSplitPointA, partitionSplitPointB)]\n else:\n candidates = [(partitionSplitPointA, partitionSplitPointB),\n (partitionSplitPointB, partitionSplitPointA)]\n for i, (spa, spb) in enumerate(candidates):\n partitionFace = biteBoundary(p, spa, spb)\n if partitionFace is None:\n continue\n partitionFaceCoord = list(partitionFace.coords)\n if i == 1:\n partitionFaceCoord = partitionFaceCoord[::-1]\n # We offset the tab face a little so we can be sure that we\n # penetrate the board substrate. Otherwise, there is a\n # numerical instability on small slopes that yields\n # artifacts on substrate union\n offsetTabFace = [(p[0] - SHP_EPSILON * direction[0], p[1] - SHP_EPSILON * direction[1]) for p in tabFace.coords]\n tab = Polygon(offsetTabFace + partitionFaceCoord)\n return self._makeTabFillet(tab, tabFace, fillet)\n return None, None\n except NoIntersectionError as e:\n continue\n except TabFilletError as e:\n message = f\"Cannot create fillet for tab: {e}\\n\"\n message += f\" Annotation position {self._strPosition(origin)}\\n\"\n message += \"This is a bug. Please open an issue and provide the board on which the fillet failed.\"\n raise RuntimeError(message) from None\n\n message = \"Cannot create tab:\\n\"\n message += f\" Annotation position {self._strPosition(origin)}\\n\"\n message += f\" Tab ray origin that failed: {self._strPosition(origin)}\\n\"\n message += \"Possible causes:\\n\"\n message += \"- too wide tab so it does not hit the board,\\n\"\n message += \"- annotation is placed inside the board,\\n\"\n message += \"- ray length is not sufficient,\\n\"\n raise RuntimeError(message) from None", "def _fp32_mte_process_1(axis_0_index, w_lp_index, sub_w_size):\n\n def _fp32_inner_mte_1(h_lp_index, sub_h_size):\n \"\"\"\n inner mte\n \"\"\"\n # move data in\n in_offset = ((block_idx * per_core_col_size + axis_0_index * axis_1 +\n h_lp_index * max_core_axis_size) * axis_2 +\n w_lp_index * max_no_core_axis_size)\n data_in_inf = (sub_h_size, sub_w_size, axis_1, axis_2, in_offset)\n _data_move_in_last_dim_be_one_block(tik_inst, ub_input, data_in, data_in_inf)\n\n # move data out\n out_offset = ((block_idx * per_core_col_size * axis_0 + axis_0_index +\n h_lp_index * max_core_axis_size * axis_0) * axis_2 +\n w_lp_index * max_no_core_axis_size)\n data_out_inf = (sub_h_size, sub_w_size, axis_0, axis_1, axis_2, out_offset)\n _data_move_out_last_dim_be_one_block(tik_inst, data_out, ub_input, data_out_inf)\n\n with tik_inst.for_range(0, loop_cnt) as h_lp_idx:\n _fp32_inner_mte_1(h_lp_idx, max_core_axis_size)\n with tik_inst.if_scope(left_data > 0):\n _fp32_inner_mte_1(loop_cnt, left_data)", "def get_partition(num_partitions, partition_type, log_beta_min=-10,\n device=None):\n if device is None:\n device = torch.device('cpu')\n if num_partitions == 1:\n partition = torch.tensor([0, 1], dtype=torch.float, device=device)\n else:\n if partition_type == 'linear':\n partition = torch.linspace(0, 1, steps=num_partitions + 1,\n device=device)\n elif partition_type == 'log':\n partition = torch.zeros(num_partitions + 1, device=device,\n dtype=torch.float)\n partition[1:] = torch.logspace(log_beta_min, 0, steps=num_partitions, device=device,\n dtype=torch.float)\n return partition", "def build_linear_system(u, dt, dx, D = 3, P = 3,time_diff = 'poly',space_diff = 'poly',lam_t = None,lam_x = None, width_x = None,width_t = None, deg_x = 5,deg_t = None,sigma = 2):\n\n n, m = u.shape\n\n if width_x == None: width_x = n/10\n if width_t == None: width_t = m/10\n if deg_t == None: deg_t = deg_x\n\n # If we're using polynomials to take derviatives, then we toss the data around the edges.\n if time_diff == 'poly': \n m2 = m-2*width_t\n offset_t = width_t\n else: \n m2 = m\n offset_t = 0\n if space_diff == 'poly': \n n2 = n-2*width_x\n offset_x = width_x\n else: \n n2 = n\n offset_x = 0\n\n if lam_t == None: lam_t = 1.0/m\n if lam_x == None: lam_x = 1.0/n\n\n ########################\n # First take the time derivaitve for the left hand side of the equation\n ########################\n ut = np.zeros((n2,m2), dtype=u.dtype)\n\n if time_diff == 'FDconv':\n Usmooth = np.zeros((n,m), dtype=u.dtype)\n # Smooth across x cross-sections\n for j in range(m):\n Usmooth[:,j] = ConvSmoother(u[:,j],width_t,sigma)\n # Now take finite differences\n for i in range(n2):\n ut[i,:] = FiniteDiff(Usmooth[i + offset_x,:],dt,1)\n\n elif time_diff == 'poly':\n T= np.linspace(0,(m-1)*dt,m)\n for i in range(n2):\n ut[i,:] = PolyDiff(u[i+offset_x,:],T,diff=1,width=width_t,deg=deg_t)[:,0]\n\n elif time_diff == 'Tik':\n for i in range(n2):\n ut[i,:] = TikhonovDiff(u[i + offset_x,:], dt, lam_t)\n\n else:\n for i in range(n2):\n ut[i,:] = FiniteDiff(u[i + offset_x,:],dt,1)\n \n ut = np.reshape(ut, (n2*m2,1), order='F')\n\n ########################\n # Now form the rhs one column at a time, and record what each one is\n ########################\n\n u2 = u[offset_x:n-offset_x,offset_t:m-offset_t]\n Theta = np.zeros((n2*m2, (D+1)*(P+1)), dtype=u.dtype)\n ux = np.zeros((n2,m2), dtype=u.dtype)\n rhs_description = ['' for i in range((D+1)*(P+1))]\n\n if space_diff == 'poly': \n Du = {}\n for i in range(m2):\n Du[i] = PolyDiff(u[:,i+offset_t],np.linspace(0,(n-1)*dx,n),diff=D,width=width_x,deg=deg_x)\n if space_diff == 'Fourier': ik = 2*np.pi*1j*np.fft.fftfreq(n, d = dx)\n \n for d in range(D+1):\n\n if d > 0:\n for i in range(m2):\n if space_diff == 'Tik': ux[:,i] = TikhonovDiff(u[:,i+offset_t], dx, lam_x, d=d)\n elif space_diff == 'FDconv':\n Usmooth = ConvSmoother(u[:,i+offset_t],width_x,sigma)\n ux[:,i] = FiniteDiff(Usmooth,dx,d)\n elif space_diff == 'FD': ux[:,i] = FiniteDiff(u[:,i+offset_t],dx,d)\n elif space_diff == 'poly': ux[:,i] = Du[i][:,d-1]\n elif space_diff == 'Fourier': ux[:,i] = np.fft.ifft(ik**d*np.fft.fft(u[:,i]))\n else: ux = np.ones((n2,m2), dtype=u.dtype) \n \n for p in range(P+1):\n Theta[:, d*(P+1)+p] = np.reshape(np.multiply(ux, np.power(u2,p)), (n2*m2), order='F')\n\n if p == 1: rhs_description[d*(P+1)+p] = rhs_description[d*(P+1)+p]+'u'\n elif p>1: rhs_description[d*(P+1)+p] = rhs_description[d*(P+1)+p]+'u^' + str(p)\n if d > 0: rhs_description[d*(P+1)+p] = rhs_description[d*(P+1)+p]+\\\n 'u_{' + ''.join(['x' for _ in range(d)]) + '}'\n\n return ut, Theta, rhs_description", "def get_inertia(self,p0,p1):\n # direction (index) of movement\n if p0.x!=p1.x:\n return 0\n elif p0.y!=p1.y:\n return 1\n else:\n # z direction\n return 2", "def linearize_pose_landmark_constraint(x, l, z):\n print(\"you shouldn't be here....\")\n e = np.zeros([2, 1])\n A = np.zeros([2, 3])\n B = np.zeros([2, 2])\n\n Ri = v2t(x)[0:2, 0:2]\n ti = x[0:2]\n\n fi = x[2]\n c = np.cos(fi)\n s = np.sin(fi)\n dR_dteta = np.array([[-s, c], [-c, -s]])\n\n e = Ri.transpose() @ (l - x[0:2]) - z\n\n B = Ri.transpose()\n\n A[0:2, 0:2] = -Ri.transpose()\n A[0:2, 2] = dR_dteta @ (l - ti)\n\n return e, A, B", "def rfpart(x):\n return 1 - Util.fpart(x)", "def compute_spline(self, initial_state, final_state):\r\n a, b, c, s = self._initialize_spline(initial_state, final_state)\r\n final_state_pred = self._motion_update_one_shot(initial_state, a, b, c, s)\r\n\r\n converge = self._check_converge(final_state, final_state_pred)\r\n total_iter = 0\r\n # pdb.set_trace()\r\n while (total_iter < self.max_iter) & (converge is not True): # (total_iter < self.max_iter) \r\n \r\n \r\n correction = self._compute_correction(initial_state, final_state, a, b, c, s)\r\n a = a - correction[0]\r\n b = b - correction[1]\r\n # c = c - correction[2]\r\n s = s - correction[2]\r\n \r\n final_state_pred = self._motion_update_one_shot(initial_state, a, b, c, s)\r\n\r\n converge = self._check_converge(final_state, final_state_pred)\r\n total_iter = total_iter +1\r\n\r\n # print(total_iter)\r\n # print(final_state_pred)\r\n # print(s)\r\n\r\n # sometimes it converge to negative s (travel distance) which \r\n # is invalid..., need to figure it out...\r\n if (converge == True) & (s > 0):\r\n final_state_pred, point_list = self._path_sampling_one_shot(initial_state, a, b, c, s)\r\n else:\r\n point_list = [[-1,-1]]\r\n\r\n return point_list", "def define_ufl_local_inertia_diff(self):\n\n if hasattr(self, 'ufl_local_inertia_dv'):\n return None\n\n if not self.config['formulation']['time']['unsteady']:\n self.ufl_local_inertia_dv = 0\n return None\n\n xi = self.test_vector\n dv = self.trial_vector\n rho = self.config['material']['density']\n\n self.ufl_local_inertia_dv = dlf.dot(xi, rho*dv)*dlf.dx\n\n return None", "def physical_to_comoving(dist_physical, redshift):\n return dist_physical / scale_factor(redshift)", "def swipeBase (self) :\n grid = self.grid\n\n #we start by putting every tile up\n for columnNbr in range(4) :\n nbrZeros = 4 - np.count_nonzero(grid[:,columnNbr])\n\n for lineNbr in range(4) :\n counter = 0\n while (grid[lineNbr, columnNbr] == 0) and (counter < 4):\n counter += 1\n if np.count_nonzero(grid[lineNbr:4, columnNbr]) != 0 :\n for remainingLine in range (lineNbr, 3) :\n grid[remainingLine, columnNbr] = grid[remainingLine+1, columnNbr]\n grid[3, columnNbr] = 0\n\n #now we do the additions\n for lineNbr in range(3) :\n if grid[lineNbr, columnNbr] == grid[lineNbr+1, columnNbr] :\n grid[lineNbr, columnNbr] *= 2\n for remainingLine in range (lineNbr+1, 3) :\n grid[remainingLine, columnNbr] = grid[remainingLine+1, columnNbr]\n grid[3, columnNbr] = 0\n\n return (grid)", "def dca_algorithm(machine_part_matrix):\r\n # Step 1: Order the rows and columns\r\n # Sum the 1s in each row and column\r\n row_sums = machine_part_matrix.sum(axis=1)\r\n col_sums = machine_part_matrix.sum(axis=0)\r\n \r\n # Order the rows and columns based on their sums\r\n row_indices = np.argsort(-row_sums) # descending order\r\n col_indices = np.argsort(col_sums) # ascending order\r\n \r\n # Step 2: Sort the columns\r\n for i in range(len(machine_part_matrix)):\r\n for j in range(len(col_indices)):\r\n # If a column contains 1 in the current row,\r\n # shift it to the left\r\n if machine_part_matrix[row_indices[i]][col_indices[j]] == 1:\r\n machine_part_matrix[:, [j, j-1]] = machine_part_matrix[:, [j-1, j]]\r\n col_indices[j], col_indices[j-1] = col_indices[j-1], col_indices[j]\r\n \r\n # Step 3: Sort the rows\r\n for j in range(len(col_indices)):\r\n for i in range(len(machine_part_matrix)-1):\r\n # If a block of 1s can be formed by shifting a row upward,\r\n # do it\r\n if machine_part_matrix[row_indices[i]][col_indices[j]] == 0 and \\\r\n machine_part_matrix[row_indices[i+1]][col_indices[j]] == 1:\r\n machine_part_matrix[[i, i+1], :] = machine_part_matrix[[i+1, i], :]\r\n row_indices[i], row_indices[i+1] = row_indices[i+1], row_indices[i]\r\n \r\n # Step 4: Form cells\r\n cells = []\r\n for i in range(len(row_indices)):\r\n # If the current row is not part of any cell yet\r\n if machine_part_matrix[row_indices[i], :].sum() > 0:\r\n # Form a new cell\r\n cell = []\r\n for j in range(len(col_indices)):\r\n if machine_part_matrix[row_indices[i], col_indices[j]] == 1:\r\n cell.append(col_indices[j])\r\n cells.append(cell)\r\n # Remove the processed rows from the matrix\r\n machine_part_matrix[row_indices[i], :] = 0\r\n \r\n return cells", "def distribute_force_constants_by_translations(fc, primitive, supercell):\n s2p = primitive.s2p_map\n p2s = primitive.p2s_map\n positions = supercell.scaled_positions\n lattice = supercell.cell.T\n diff = positions - positions[p2s[0]]\n trans = np.array(diff[np.where(s2p == p2s[0])[0]],\n dtype='double', order='C')\n rotations = np.array([np.eye(3, dtype='intc')] * len(trans),\n dtype='intc', order='C')\n permutations = primitive.get_atomic_permutations()\n distribute_force_constants(fc, p2s, lattice, rotations, permutations)", "def _F_to_m_on_basis(self, la):\n return self._weyl.from_reduced_word(Partition(la).from_kbounded_to_reduced_word(self.k)).stanley_symmetric_function()", "def get_latency_of_one_partition(\n partition: Partition,\n node_to_latency_mapping: Dict[Node, NodeLatency]\n) -> PartitionLatency:\n\n def get_top_nodes(partition: Partition) -> List[Node]:\n \"\"\"Given a partition, return a list of nodes on the top bfs level\"\"\"\n top_nodes: List[Node] = []\n for node in partition.nodes:\n # Skip placeholder and get_attr nodes\n if node.op in {'placeholder', 'get_attr'}:\n continue\n input_nodes: Dict[Node, None] = {}\n map_arg(node.args, lambda n: input_nodes.setdefault(n))\n map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))\n # If a node has no input nodes in this partition,\n # or its input nodes in this partition are placeholders and get_attrs\n # this node is on the top bfs level in this partition\n if not any([n in partition.nodes and n.op not in {'placeholder', 'get_attr'} for n in input_nodes]):\n top_nodes.append(node)\n return top_nodes\n\n def dfs_helper(node: Node, partition_latency) -> PartitionLatency:\n \"\"\"Given a top node of a partition, this function returns\n the latency of the critical path in the partition\n \"\"\"\n node_latency = node_to_latency_mapping[node]\n # Calculate the current overall latency of the partition\n overall_latency = partition_latency.overall_latency + max(node_latency.compute_latency, node_latency.mem_latency)\n # Update the mem latency of this path\n mem_latency = partition_latency.mem_latency + node_latency.mem_latency\n # Update the compute latency of this path\n compute_latency = partition_latency.compute_latency + node_latency.compute_latency\n # Get all users of this node that are in this partition\n users = set(node.users).intersection(partition.nodes)\n if users:\n max_latency = PartitionLatency(mem_latency=0., compute_latency=0., overall_latency=0.)\n for n in users:\n # Get new partition latency recursively\n new_partition_latency = dfs_helper(n, PartitionLatency(mem_latency, compute_latency, overall_latency))\n if new_partition_latency.overall_latency > max_latency.overall_latency:\n max_latency = new_partition_latency\n return max_latency\n # If there is no user, the node is at bottom of the partition\n return PartitionLatency(mem_latency, compute_latency, overall_latency)\n # Main part starts\n # Get all top level nodes of this partition\n top_nodes = get_top_nodes(partition)\n critical_path_latency = PartitionLatency(mem_latency=0., compute_latency=0., overall_latency=0.)\n # Go through all top nodes and find the largest latency (critical pass latency)\n for node in top_nodes:\n partition_latency = dfs_helper(node, PartitionLatency(mem_latency=0., compute_latency=0., overall_latency=0.))\n if partition_latency.overall_latency > critical_path_latency.overall_latency:\n critical_path_latency = partition_latency\n return critical_path_latency", "def move(self):\n x = y = z = 0.0\n for cell in self.cells:\n x += (cell.x)#*n\n y += (cell.y)#*n\n z += (cell.z)#*n\n np = float(len(self.cells))\n med = numpy.array([x/np,y/np,z/np])\n \n dists = []\n for cell in self.cells:\n d = (cell.x-self.x)**2+(cell.y-self.y)**2+(cell.z-self.z)**2\n d = numpy.sqrt(d)\n dists.append(d)\n #md = (cell.x-med[0])**2+(cell.y-med[1])**2+(cell.z-med[2])**2\n #dists[-1] = (dists[-1]+md)/2\n cell = self.cells[numpy.argmin(dists)]\n cc = numpy.array([cell.x, cell.y, cell.z])\n \n t = self.t\n if abs(self.dnp) * ( self.np-self.np_req) > 0:\n t = self.tr\n self.dcenter = (1-t)*(med-self.center + self.u*(cc-med))\n self.x,self.y,self.z = self.center = self.center + self.dcenter", "def pss(self):\n return (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / \\\n ((self.table[0, 0] + self.table[1, 0]) * (self.table[0, 1] + self.table[1, 1]))" ]
[ "0.6461611", "0.63680524", "0.5586814", "0.55430216", "0.5531079", "0.55039924", "0.5489266", "0.546037", "0.53223366", "0.5185964", "0.5154166", "0.512814", "0.5126923", "0.5116043", "0.5107573", "0.5096535", "0.5077164", "0.5064965", "0.50587356", "0.505364", "0.50446224", "0.5016628", "0.50142735", "0.49990666", "0.49974972", "0.4975306", "0.49418455", "0.49132454", "0.49043941", "0.49028197", "0.4884775", "0.48805448", "0.48764023", "0.4867409", "0.48386604", "0.4827422", "0.4820443", "0.4815053", "0.48072875", "0.47973084", "0.47869632", "0.47731096", "0.47684437", "0.47669962", "0.4762961", "0.47591463", "0.47552863", "0.47549796", "0.47437796", "0.47371262", "0.47309038", "0.4717658", "0.47047192", "0.4689813", "0.46852902", "0.46812728", "0.46765456", "0.4664064", "0.46553674", "0.46515006", "0.46399897", "0.4639796", "0.462757", "0.46268502", "0.4625051", "0.46225545", "0.46153626", "0.46115568", "0.46085408", "0.45930517", "0.45928788", "0.45928243", "0.45917103", "0.45839512", "0.45816392", "0.45814344", "0.4576512", "0.45757186", "0.45711255", "0.45681125", "0.4567622", "0.45649892", "0.4552408", "0.45522928", "0.4549153", "0.4540097", "0.45395857", "0.45369753", "0.4533632", "0.45252976", "0.45228755", "0.45219022", "0.4518631", "0.45143774", "0.4512281", "0.45114622", "0.4507468", "0.45060506", "0.45050895", "0.44976994", "0.44962645" ]
0.0
-1
Calculates the rotational partition function, assuming rigid rotor
def qrot(I,sym): T = s.Symbol("T") if type(I) == list: return (((s.pi * I[0] * I[1] * I[2])**(1/2))/sym) * ((8 * s.pi**2 * k * T) / (h**2))**(3/2) else: return (((s.pi * I)**(1/2))/sym) * ((8 * s.pi**2 * k * T) / (h**2))**(3/2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rpartition(self, x):\n pass", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.hinderedrotor_partitionfunction(Tlist, self.frequency, self.barrier) ** self.degeneracy", "def relative_partition_function(self):\n return self.overall_norm**2", "def my_rodriguez_rotation(P, k, theta):\n\n P_rot = np.zeros((len(P), 3))\n\n for i in range(len(P)):\n P_rot[i] = P[i]*np.cos(theta) + np.cross(k, P[i])*np.sin(theta) + \\\n k*np.dot(k, P[i])*(1.0-np.cos(theta))\n\n return P_rot", "def getPartitionFunction(self, Tlist):\n\t\tQ = np.ones((len(Tlist)), np.float64) / self.symmetry\n\t\t# Active K-rotor\n\t\trotors = [mode for mode in self.modes if isinstance(mode, RigidRotor)]\n\t\tif len(rotors) == 0:\n\t\t\tTrot = constants.h * constants.c * 100.0 * 1.0 / constants.kB\n\t\t\tQ0 = [math.sqrt(T / Trot) for T in Tlist]\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\t# Other modes\n\t\tfor mode in self.modes:\n\t\t\tQ0 = mode.getPartitionFunction(Tlist)\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\treturn Q", "def rotate(self,r):\n return r.hprod( self.hprod( r.inv() ) )", "def _partition(self, l, r):\n\t\tpivot = median([\n\t\t\tself.order_dict[self.arrangement[l]], \n\t\t\tself.order_dict[self.arrangement[(l+r)//2]], \n\t\t\tself.order_dict[self.arrangement[r]]]\n\t\t)\n\t\twhile True:\n\t\t\twhile ((self.order_dict[self.arrangement[l]] < pivot) or (l == self.swap_idx)) and (l < r):\n\t\t\t\tl += 1\n\t\t\twhile ((self.order_dict[self.arrangement[r]] > pivot) or (r == self.swap_idx)) and (l < r):\n\t\t\t\tr -= 1\n\t\t\tif l == r:\n\t\t\t\tbreak\n\t\t\tself._swap(l, r, self.swap_idx)\n\t\treturn l", "def rotation(self):\n\t\treturn self.piv.a.rotate.v", "def partition(seq):\n\n return 0", "def partial_euler(angle = 90, Rmin = 3, Reff = None, p = 0.2, num_pts = 720):\n # Overhead calculations\n num_pts = abs(int(num_pts * angle/360))\n angle = np.radians(angle)\n sp = np.sqrt(p*angle) # Clothoid-to-normal transition point s value\n s0 = 2*sp + angle*(1-p)/(2*np.sqrt(p*angle/2))\n c = 1 / (2*sp*Rmin) # Scaling factor to enforce Rmin\n print(sp)\n\n # Constructing s and K arrays\n s = np.linspace(0, s0, num_pts)\n if p == 0: K = np.array([[1/Rmin] * len(s)])\n else:\n i1 = np.argmax(s > sp)\n i2 = np.argmax(s >= s0 - sp)\n K = c * np.concatenate([np.multiply(np.ones(i1), 2*s[:i1]),\n np.multiply(np.ones(i2-i1), 2*sp),\n np.multiply(np.ones(num_pts-i2), \n 2*(s0 - s[i2:num_pts]))])\n\n # Integrating to find x and y\n ds = s[1] - s[0]\n phi = cumtrapz(K*ds)\n x, y = np.concatenate([np.array([[0],[0]]), \n np.cumsum([ds*np.cos(phi), ds*np.sin(phi)], axis = 1)],\n axis = 1)\n\n return x, y", "def partition_data(d,r,n):\n\tdem_part = [] #democrat partition\n\trep_part = [] #republican partition\n\tdem_perm = range(d) \n\trandom.shuffle(dem_perm) #democrat permutation\n\trep_perm = range(r) \n\trandom.shuffle(rep_perm) #republican permutation\n\tdem_size = d/n #size per partition\n\trep_size = r/n #size per partition\n\n\ti = 0\n\tj = 0\n\tcount = 0\n\tfor x in range(0,n):\n\t\tif count < d%n:\n\t\t\tdem_part.append(dem_perm[i:i+dem_size+1])\n\t\t\ti = i+dem_size+1\n\t\telif x == n-1:\n\t\t\tdem_part.append(dem_perm[i:d])\n\t\telse:\n\t\t\tdem_part.append(dem_perm[i:i+dem_size])\n\t\t\ti = i+dem_size\n\t\tif count < r%n:\n\t\t\trep_part.append(rep_perm[i:i+rep_size+1])\n\t\t\tj = j+rep_size+1\n\t\telif x == n-1:\n\t\t\trep_part.append(rep_perm[j:r])\n\t\telse:\n\t\t\trep_part.append(rep_perm[j:j+rep_size])\n\t\t\tj = j+rep_size\n\t\tcount = count + 1\n\n\treturn dem_part,rep_part", "def rk4(x,t,h,f):\n\n k1=h*f(x,t)\n k2=h*f(x+k1/2,t+h/2)\n k3=h*f(x+k2/2,t+h/2)\n k4=h*f(x+k3,t+h)\n\n return x + (k1 + 2*k2 + 2*k3 + k4)/6", "def partition(v,m,I,V,sym):\n T = s.Symbol(\"T\")\n return qvib(v) + qtrans(m,V) + qrot(I,sym)", "def get_rotation_diversity(self, i, t):\n\n if self.rotation_type == 2:\n ith = self._get_check_i(self.diversity, i)\n rotations = self.pro.get_ordering(self.diversity, ith)\n t -= 1\n return rotations[t]\n else:\n return self.get_diversity(t)", "def rotation(self, p1, p2, p3):\n return (p2[0] - p1[0]) * (p3[1] - p1[1]) - (p2[1] - p1[1]) * (p3[0] - p1[0])", "def left_rotate_s4(arr, d):\n n = len(arr)\n g = gcd(d, n)\n for i in range(g):\n\n # move i-th values of blocks\n temp = arr[i]\n j = i\n while 1:\n k = j + d\n # print(\"K >= n : {} >= {}\".format(k, n), end=\"\\n\")\n if k >= n:\n k = k - n\n # print(\"K == i : {} == {}\".format(k, i), end=\"\\n\")\n if k == i:\n break\n # print(\"i: {}, j: {}, k: {}\".format(i, j, k), end=\"\\n\")\n arr[j] = arr[k]\n j = k\n\n arr[j] = temp", "def kuzmin_rotation(R,c,M,G=astronomicalG):\n return np.sqrt(2*G*np.power(10.,M)*R*R*np.power(c*c+R*R,-1.5))", "def compute_rotation(self):\n if self.predictions[self.iteration][0] == 90.0 or self.predictions[self.iteration][0] == 270.0:\n self.rotation = 20\n self.initial_adjust = True\n return\n\n if self.iteration == 0 or (self.iteration == 1 and self.initial_adjust):\n self.rotation = rotate.get_90_deg_rotation(self.predictions[self.iteration])\n elif self.iteration == 1 or (self.iteration == 2 and self.initial_adjust):\n self.rotation = rotate.get_45_deg_rotation(self.predictions, self.current_position)\n elif self.iteration >= 2 or (self.iteration > 2 and self.initial_adjust):\n self.rotation = rotate.get_fine_rotation(self.iteration)", "def fun_lorentzian(p,r):\n return p[1] / ((r/p[0])**2 + 1)", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.harmonicoscillator_partitionfunction(Tlist, self.frequency) ** self.degeneracy", "def rk4_perp(xy, v, NL, KL, KGdivKL, Mm, NP, nn, h):\n dx1 = h * v\n dv1 = h * fspring_perp(xy, NL, KL, KGdivKL, Mm, NP, nn)\n dx2 = h * (v + dv1 / 2.)\n dv2 = h * fspring_perp(xy + dx1 / 2., NL, KL, KGdivKL, Mm, NP, nn)\n dx3 = h * (v + dv2 / 2.)\n dv3 = h * fspring_perp(xy + dx2 / 2., NL, KL, KGdivKL, Mm, NP, nn)\n dx4 = h * (v + dv3)\n dv4 = h * fspring_perp(xy + dx3, NL, KL, KGdivKL, Mm, NP, nn)\n xout = xy + (dx1 + 2. * dx2 + 2. * dx3 + dx4) / 6.\n vout = v + (dv1 + 2. * dv2 + 2. * dv3 + dv4) / 6.\n\n return dx1, dv1, dx2, dv2, dx3, dv3, dx4, dv4, xout, vout", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.freerotor_partitionfunction(Tlist, self.frequencies, 1 if self.linear else 0)", "def partition(lst_part, p, r):\n cond_part = 0\n assign_part = 0\n pivot = lst_part[r] # Set pivot to last (right-most) value in list\n pivot_index = p\n for j in range(p,r):\n cond_part +=1\n if lst_part[j] <= pivot:\n lst_part[pivot_index],lst_part[j] = swap(lst_part[pivot_index],lst_part[j]) \n assign_part +=3\n pivot_index += 1 \n lst_part[pivot_index],lst_part[r] = swap(lst_part[pivot_index],lst_part[r])\n assign_part +=3\n return pivot_index,cond_part,assign_part", "def rfpart(x):\n return 1 - fpart(x)", "def intermediateJacPol2Rot(self,x):\n allS = np.sin(x[0,:])\n allC = np.cos(x[0,:])\n allR = x[1,:]\n \n Jac = Idn(x.shape[1],self._dim)\n Jac[:,0,0] = -allS*allR\n Jac[:,0,1] = allC\n Jac[:,1,0] = allC*allR\n Jac[:,1,1] = allS\n return Jac", "def rotLeft(a, d):\n for i in range(d):\n arr = left_rotate(arr)\n return arr", "def correct_rotation(k_rotations):\n\n for key, value in Chunk.global_piece_rotations.items():\n Chunk.global_piece_rotations[key] = (k_rotations + value) % 4\n # Should I correct it for the side rotations also?", "def intermediateJacRot2Polar(self,x):\n r = cNorm(x[:2,:],kd=False)\n x0overr = x[0,:]/r\n x1overr = x[1,:]/r\n\n Jac = Idn(x.shape[1],x.shape[0])\n Jac[:,0,0] = -x1overr\n Jac[:,0,1] = x0overr\n Jac[:,1,0] = x0overr\n Jac[:,1,1] = x1overr\n \n return Jac", "def plummer_rotation(r,b,M,G=astronomicalG):\n return np.sqrt(2*G*np.power(10.,M)*r*r*np.power(b*b+r*r,-1.5))", "def rotmod(param, airmass, rotang, phase=None):\n # 2012-04-18 10:44 IJMC: Created\n a, b, offset = param[0:3]\n mod = a + b*airmass * np.cos(offset + rotang)\n if phase is not None:\n mod += param[3] * (phase - phase.mean())\n return mod", "def get_s0_xy_equipartition(N, angle):\n s0x= [1]\n s0y = [0]\n s0z = [0]\n for i in range(1,N):\n s_old = [s0x [-1],s0y [-1],s0z [-1] ]\n #final_angle = random.choice([-1,1]) * np.radians(angle - 3 + 6*random.random())\n #final_angle = random.choice([-1,1]) * np.radians(angle * 2*random.random())\n final_angle = random.choice([-1,1]) * np.radians(random.gauss(0, 1.2 * angle))\n sin_final = sin(final_angle)\n cos_final = cos(final_angle)\n s_new = [cos_final*s_old[0] +sin_final*s_old[1], -sin_final*s_old[0] + cos_final*s_old[1],0]\n s0x.append(s_new[0])\n s0y.append(s_new[1])\n s0z.append(s_new[2])\n norm = np.linalg.norm([s0x [-1],s0y [-1],s0z [-1] ])\n s0x [-1] = s0x[-1] / norm\n s0y [-1] = s0y[-1] / norm\n s0z [-1] = s0z[-1] / norm\n\n return np.concatenate((s0x,s0y,s0z),axis = 0)", "def reduced_partition_function_ratio(light_freq, heavy_freq, temp):\r\n # check lengths of arrays are the same.\r\n # The error would only occur if the 2 molecules are different.\r\n assert len(light_freq) == len(\r\n heavy_freq\r\n ), \"Array lengths do not match - please ensure both your chosen log files optimise the same molecule!\"\r\n\r\n # check if numpy array and convert if not.\r\n if not isinstance(light_freq, np.ndarray):\r\n np.asarray(light_freq)\r\n if not isinstance(heavy_freq, np.ndarray):\r\n np.asarray(heavy_freq)\r\n\r\n # calculate ln of ratio of heavy / light frequencies\r\n ratio = np.log(np.divide(heavy_freq, light_freq))\r\n # check if length of ratio array is the same as the frequency arrays\r\n assert len(ratio) == len(light_freq) == len(heavy_freq)\r\n\r\n # add the ratios together\r\n ratio = np.sum(ratio)\r\n\r\n # calculate vibrational partition functions\r\n Q_light = partition_function(light_freq, temp)\r\n Q_heavy = partition_function(heavy_freq, temp)\r\n\r\n # print variables used to calculate RPFR\r\n print(\"Primed variables (v', Q') refer to the light isotope.\")\r\n print(\"ln(v/v'): \", ratio)\r\n print(\"lnQ: \", Q_heavy)\r\n print(\"lnQ': \", Q_light)\r\n\r\n # calculate RPFR, defined as 1000*ln(beta).\r\n beta = 1000 * (ratio + Q_heavy - Q_light)\r\n print(\"1000*lnB: \", beta)\r\n return beta, ratio, Q_heavy, Q_light", "def Rfun(U,V,Q,Phi,Phibar, taudrag):\n \n Qclone=Q.copy()\n Qclone[Q<0]=0\n\n Ru=np.divide(np.multiply(-U,Qclone),Phi+Phibar)\n Rv=np.divide(np.multiply(-V,Qclone),Phi+Phibar)\n \n #reset to 0 if losing mass\n Ru[Q<0]=0\n Rv[Q<0]=0\n \n #if taudrag is infinity, only have the R componen \n if taudrag!=-1:\n F=Ru-(U/taudrag)\n G=Rv-(V/taudrag)\n \n else:\n F=Ru\n G=Rv\n \n return F, G", "def find_rotation_efficient(arr):\n # edge case: already sorted\n if arr[0] < arr[-1]:\n return 0\n\n low = 0\n high = len(arr)-1\n\n # when high is one greater than low, high will be rotation index\n while high - low > 1:\n\n # start guessing at middle\n guess_index = low + (high - low) / 2\n\n # rotation is left\n if arr[guess_index] < arr[low]:\n high = guess_index\n\n # rotation is right\n else:\n low = guess_index\n\n return high", "def rk4(accel,m,r,h,v): \n k1v = accel(m,r) \n k1r = v \n k2v = accel(m,r + h*0.5*k1r) \n k2r = v+k1v*h*0.5 \n k3v = accel(m,r + h*0.5*k2r) \n k3r = v+k2v*h*0.5\n k4v = accel(m,r + h*k3r) \n k4r = v+k3v*h\n new_v = v + h*(k1v + 2*k2v + 2*k3v + k4v)/float(6)\n new_r = r + h*(k1r + 2*k2r + 2*k3r + k4r)/float(6)\n return new_v,new_r", "def RingPartition(ringsize, z, r, beta):\n # divide the ring into segments and initialise the coordinate of the segments\n location = list(range(ringsize))\n assert ringsize<=16 and ringsize>=5, \"Ring size greater than 16 or smaller than 5 is not supported\"\n if ringsize<=7 and ringsize>=5:\n segment1 = location[0:3]\n segment2 = location[2:-1]\n segment2.reverse()\n segment3 = location[-2:]+[location[0]]\n segment3.reverse()\n elif ringsize<=10 and ringsize>=8:\n segment1 = location[0:4]\n segment2 = location[3:-2]\n segment2.reverse()\n segment3 = location[-3:]+[location[0]]\n segment3.reverse()\n elif ringsize<=13 and ringsize>=11:\n segment1 = location[0:5]\n segment2 = location[4:-3]\n segment2.reverse()\n segment3 = location[-4:]+[location[0]]\n segment3.reverse()\n else: #ringsize<=16 and ringsize>=14:\n segment1 = location[0:6]\n segment2 = location[5:-4]\n segment2.reverse()\n segment3 = location[-5:] + [location[0]]\n segment3.reverse()\n segcoord_1_init = SegCoord(segment1, r, beta)\n segcoord_2_init = SegCoord(segment2, r, beta)\n segcoord_3_init = SegCoord(segment3, r, beta)\n Reflection = np.array((-1,1))\n OPsq = np.inner(segcoord_1_init[-1], segcoord_1_init[-1])\n PQsq = np.inner(segcoord_2_init[-1], segcoord_2_init[-1])\n OQsq = np.inner(segcoord_3_init[-1], segcoord_3_init[-1])\n segcoord_1 = [Reflection*item for item in segcoord_1_init]\n segcoord_2 = [x + np.sqrt((OQsq,0)) for x in segcoord_2_init]\n segcoord_3 = [np.array(x) for x in segcoord_3_init]\n # Link segment together\n xp = (OPsq+OQsq-PQsq)/(2*np.sqrt(OQsq))\n yp = np.sqrt(OPsq-np.square(xp))\n phi1, phi2, phi3 = np.arctan2(segcoord_1[-1][1],segcoord_1[-1][0]), np.arctan2(segcoord_2[-1][1], segcoord_2[-1][0]-np.sqrt(OQsq)), np.arctan2(segcoord_3[-1][1], segcoord_3[-1][0])\n phiseg1, phiseg2 = np.arctan2(yp,xp), np.arctan2(yp,xp-np.sqrt(OQsq))\n sigma1, sigma2, sigma3 = np.abs(phi1-phiseg1), np.abs(phiseg2-phi2), np.abs(phi3)\n Rsigma1, Rsigma2, Rsigma3 = RotationMatrix(-sigma1), RotationMatrix(sigma2), RotationMatrix(-sigma3)\n coordinate_1 = [np.array((0,0))]\n seg1_size = len(segcoord_1)\n for i in range(1,seg1_size-1):\n coordinate_1.append(np.matmul(Rsigma1,segcoord_1[i]))\n coordinate_1.append(np.array((xp,yp)))\n #### Check Here ####\n coordinate_2 = []\n seg2_size = len(segcoord_2)\n for i in range(seg2_size-2,0,-1):\n tmp = np.sqrt((OQsq,0))\n coordinate_2.append(tmp + np.matmul(Rsigma2, (segcoord_2[i]-tmp)))\n coordinate_3 = [np.sqrt((OQsq,0))]\n seg3_size = len(segcoord_3)\n for i in range(seg3_size-2,0,-1):\n coordinate_3.append(np.matmul(Rsigma3, segcoord_3[i]))\n coordinate = coordinate_1 + coordinate_2 + coordinate_3\n Rg = np.sum(coordinate,axis=0)\n phig = np.arctan2(Rg[1],Rg[0]) + np.pi/2\n Rphig = RotationMatrix(-phig) \n newcoord = [np.matmul(Rphig, coordinate[i]-Rg).tolist()+[z[i]] for i in range(ringsize)]\n origin = np.mean(newcoord,axis=0)\n finalcoord = np.array(newcoord)-origin\n return finalcoord", "def get_Grotations(self, x):\n xsh = x.get_shape().as_list()\n angles = [0.,np.pi/2.,np.pi,3.*np.pi/2.]\n rx = []\n for i in range(4):\n # Z4 rotations about the z axis\n perm = [1,0,2,3]\n y = tf.transpose(x, perm=perm)\n y = tf.contrib.image.rotate(y, angles[i])\n y = tf.transpose(y, perm=perm)\n # Rotations in the quotient space (sphere S^2)\n # i) Z4 rotations about y axis\n for j in range(4):\n perm = [2,1,0,3]\n z = tf.transpose(y, perm=perm)\n z = tf.contrib.image.rotate(z, angles[-j])\n z = tf.transpose(z, perm=perm)\n \n rx.append(z)\n # ii) 2 rotations to the poles about the x axis\n perm = [0,2,1,3]\n z = tf.transpose(y, perm=perm)\n z = tf.contrib.image.rotate(z, angles[3])\n z = tf.transpose(z, perm=perm)\n rx.append(z)\n\n z = tf.transpose(y, perm=perm)\n z = tf.contrib.image.rotate(z, angles[1])\n z = tf.transpose(z, perm=perm)\n rx.append(z)\n\n return rx", "def getPartitionFunction(self, Tlist, V=1.0):\n\t\treturn _modes.translation_partitionfunction(Tlist, self.mass, self.dimension, V)", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def partition_function(array, temp):\r\n\r\n # Constants imported from scipy.constants\r\n h = scipy.constants.h # Planck's constant\r\n # speed of light must be in cm/s as wavenumber is in cm-1\r\n c = scipy.constants.c * 100\r\n k = scipy.constants.k # Boltzmann constant\r\n T = temp # extracted from log file using extract_temp()\r\n\r\n # check if inputs are numpy arrays and convert if not.\r\n if not isinstance(array, np.ndarray):\r\n np.asarray(array)\r\n\r\n # conversion to exponent\r\n u = (h * array * c) / (k * T)\r\n\r\n # calculates natural log of an individual frequency contribution to the partition function\r\n Q_ = np.log(np.exp(-(u / 2)) / (1 - np.exp(-u)))\r\n # sums all the contributions together, giving the final result.\r\n Q = np.sum(Q_)\r\n return Q", "def comp_rot_dir(self):\n\n MMF = self.comp_mmf_unit()\n p = self.get_pole_pair_number()\n\n # Compute rotation direction from unit mmf\n results = MMF.get_harmonics(1, \"freqs\", \"wavenumber\")\n H1 = results[MMF.symbol]\n\n return sign(H1[0])", "def rk3(x,t,h,f):\n \n k1=h*f(x,t)\n k2=h*f(x+k1/2,t+h/2)\n k3=h*f(x-k1+2*k2,t+h)\n\n return x + (k1 + 4*k2 + k3)/6", "def rotate_phasor(r, r1, r2):\n return (r - r2) / (r1 - r2)", "def rotate(prime_pos, theta, phi):\n light_dir = np.array([0, 0, 1])\n origin_prime = np.array(prime_pos)\n light_dir = phi_rot(light_dir, phi)\n light_dir = theta_rot(light_dir, theta)\n # origin = phi_rot(origin_prime, phi)\n origin = theta_rot(origin_prime, theta)\n return origin, light_dir", "def rk_adaptive(accel,m,r,h,v,recur,emin=10**-12,emax=10**-8,hmax=.1,hmin=.01,recurmax=100):\n k1v = accel(m,r)\n k1r = v\n k2v = accel(m,r + 0.25*k1r*h)\n k2r = v + (0.25*k1v)*h\n k3v = accel(m,r + (3/32.*k1r + 9/32.*k2r)*h)\n k3r = v + (3/32.*k1v + 9/32.*k2v)*h\n k4v = accel(m,r + (1932/2197.*k1r - 7200/2197.*k2r + 7296/2197.*k3r)*h)\n k4r = v + (1932/2197.*k1v - 7200/2197.*k2v + 7296/2197.*k3v)*h\n k5v = accel(m,r + (439/216.*k1r - 8*k2r + 3680/513.*k3r - 845/4104.*k4r)*h)\n k5r = v + (439/216.*k1v - 8*k2v + 3680/513.*k3v - 845/4104.*k4v)*h\n k6v = accel(m,r - (8/27.*k1r + 2*k2r - 3544/2565.*k3r + 1859/4104.*k4r - 11/40.*k5r)*h)\n k6r = v - (8/27.*k1v + 2*k2v - 3544/2565.*k3v + 1859/4104.*k4v - 11/40.*k5v)*h\n\n # 4th order calculation\n new_v4 = v + h*(25/216.*k1v + 1408/2565.*k3v + 2197/4104.*k4v - 1/5.*k5v)\n new_r4 = r + h*(25/216.*k1r + 1408/2565.*k3r + 2197/4104.*k4r - 1/5.*k5r)\n \n # 5th order calculation\n new_v5 = v + h*(16/135.*k1v + 6656/12825.*k3v+28561/56430.*k4v - 9/50.*k5v + 2/55.*k6v) \n new_r5 = r + h*(16/135.*k1r + 6656/12825.*k3r+28561/56430.*k4r - 9/50.*k5r + 2/55.*k6r) \n\n # Calculate truncation error between 5th and 4th order\n eps = np.abs( (np.max(np.abs(new_r5)) - np.max(np.abs(new_r4))) / np.max(np.abs(new_r4)))\n \n # Compare eps to emin and emax and update h accordingly\n if np.max(eps) < emin:\n if h*2.0 < hmax:\n h *= 2.0\n new_v = new_v5\n new_r = new_r5 \n \n if np.max(eps) > emax:\n if h/2.0 > hmin:\n h /= 2.0\n print h\n # Error too large, call rk_adaptive again with smaller h\n if recur < recurmax:\n recur += 1\n rk_adaptive(accel,m,r,h,v,recur)\n new_v = new_v5\n new_r = new_r5\n \n else:\n new_v = new_v5\n new_r = new_r5\n \n return new_v, new_r, h", "def rk4(f, x, t, dt, order=4): \n if order >=1: k1 = dt * f(t , x)\n if order >=2: k2 = dt * f(t+dt/2, x+k1/2)\n if order ==3: k3 = dt * f(t+dt , x+k2*2-k1)\n if order ==4:\n k3 = dt * f(t+dt/2, x+k2/2)\n k4 = dt * f(t+dt , x+k3)\n if order ==1: return x + k1\n elif order ==2: return x + k2\n elif order ==3: return x + (k1 + 4*k2 + k3)/6\n elif order ==4: return x + (k1 + 2*(k2 + k3) + k4)/6\n else: raise NotImplementedError", "def earth_rotation_effect(Nbase, slice_num, int_time, declination=30.):\n\n\tp = np.pi/180.\n\tdelta = p*declination\n\tk = slice_num\n\tHA =-15.0*p*(k-1)*int_time/(3600.0) - np.pi/180.0*90.0 + np.pi/180.0*360.0\n\t\n\tnew_Nbase = np.zeros(Nbase.shape)\n\tnew_Nbase[:,0] = np.sin(HA)*Nbase[:,0] + np.cos(HA)*Nbase[:,1]\n\tnew_Nbase[:,1] = -1.0*np.sin(delta)*np.cos(HA)*Nbase[:,0] + np.sin(delta)*np.sin(HA)*Nbase[:,1] + np.cos(delta)*Nbase[:,2]\n\tnew_Nbase[:,2] = np.cos(delta)*np.cos(HA)*Nbase[:,0] - np.cos(delta)*np.sin(HA)*Nbase[:,1] + np.sin(delta)*Nbase[:,2]\n\treturn new_Nbase", "def operator(self, params: Tensor) -> Tensor:\n theta, phi = params\n # calculate entries\n a: Tensor = exp(1j * phi) * cos(theta / 2)\n b: Tensor = sin(theta / 2)\n c: Tensor = -b\n d: Tensor = exp(-1j * phi) * cos(theta / 2)\n # construct the rows of the rotation matrix\n r1: Tensor = cat((a.view(1), b.view(1)))\n r2: Tensor = cat((c.view(1), d.view(1)))\n # build and return the rotation matrix\n rot: Tensor = cat((r1, r2)).view(2, 2)\n return rot", "def rotate(p,q,A,V): \n n = A.shape[0]\n App, Aqq, Apq = A[p,p], A[q,q], A[p,q] #Initial values\n phi = 0.5*math.atan2(2*Apq, Aqq-App) #Find the rotation value\n c, s = math.cos(phi), math.sin(phi) #Calculate sin and cos\n\n #Update the matrix diagonal elements\n A[p,p] = c*c*App + s*s*Aqq - 2*s*c*Apq \n A[q,q] = s*s*App + c*c*Aqq + 2*s*c*Apq\n A[p,q] = 0 #This is zero by construction\n \n \n #Iterate over and update remaining off-diagonal elements\n for i in range(p):\n Aip, Aiq = A[i,p], A[i,q]\n A[i,p] = c*Aip - s*Aiq\n A[i,q] = c*Aiq + s*Aip\n \n for i in range(p+1,q):\n Api, Aiq = A[p,i], A[i,q]\n A[p,i] = c*Api - s*Aiq\n A[i,q] = c*Aiq + s*Api\n \n for i in range(q+1,n):\n Api, Aqi = A[p,i], A[q,i]\n A[p,i] = c*Api - s*Aqi\n A[q,i] = c*Aqi + s*Api\n \n #Update eigenvectors in matrix V\n for i in range(n):\n Vip, Viq = V[i,p], V[i,q]\n V[i,p] = c*Vip - s*Viq\n V[i,q] = s*Vip + c*Viq\n \n return A, V", "def _rk4(t, dt, x, f, args=None):\n x = np.asarray(x)\n k1 = np.asarray(f(x, t, *args))\n k2 = np.asarray(f(x + 0.5*dt*k1, t + 0.5*dt, *args))\n k3 = np.asarray(f(x + 0.5*dt*k2, t + 0.5*dt, *args))\n k4 = np.asarray(f(x + dt*k3, t + dt, *args))\n return x + dt*(k1 + 2*k2 + 2*k3 + k4)/6.0", "def modulus_raknare(steps):\n i = 0\n\n def next_step():\n nonlocal i\n i = (i + 1) % steps\n return i\n return next_step", "def rodriguesRotation(self, v, k, object_pose):\n\n pose_array = geometry_msgs.msg.PoseArray()\n pose_array.header.frame_id = self.pose_reference_frame\n\n temp = np.zeros(shape=(100,3))\n for i in range(0, self.rodrigues_resolution):\n angle = 2 * math.pi * (float(i)/self.rodrigues_resolution)\n temp[i] = v * math.cos(angle) + np.cross(k,v) * math.sin(angle) + k * np.dot(k,v) * (1-math.cos(angle)) \n\n array = np.zeros([3, 3])\n array[0] = -temp[i]\n array[1] = np.cross(temp[i], k)\n array[2] = k\n\n array = np.rot90(np.fliplr(array))\n quad = Quaternion(matrix=array)\n pose_msg = self.makePoseMessage(\n [object_pose.position.x,\n object_pose.position.y,\n object_pose.position.z],\n quad)\n\n pose_array.poses.append(pose_msg)\n \n self.rodriguesPosePublisher.publish(pose_array)\n\n comp = np.zeros(shape=(3))\n\n # Finds vector with highest z component\n for r in range(1,100):\n if temp[r-1][2] > comp[2]:\n comp = temp[r-1]\n\n return comp", "def calc_rot(rot, POIn, ice_slope, max_rot):\n row, col = cuda.grid(2)\n\n if row < POIn.shape[0] and col < POIn.shape[1]:\n rot[ row, col ] = POIn[ row, col ] * ice_slope[ row, col ] * max_rot\n \n if rot[row, col] > max_rot:\n rot[ row, col ] = max_rot", "def rk4(s,t0,tf,h=30):\n\n t = t0\n\n if tf < t0:\n h = -h\n\n while(abs(tf-t) > 0.00001):\n if (abs(tf-t) < abs(h)):\n h = tf-t\n\n k1 = h*sdot(s)\n k2 = h*sdot(s+k1/2)\n k3 = h*sdot(s+k2/2)\n k4 = h*sdot(s+k3)\n\n s = s+(k1+2*k2+2*k3+k4)/6\n t = t+h\n\n # if (s[2]<0 and s[2]>-200 and s[5]>0):\n # dt = -s[2]/s[5]\n # print(t+dt)\n\n return s", "def _spin(self):\n center= self.rect.center\n self.dizzy= self.dizzy + 10 #12\n if self.dizzy >= 360:\n self.dizzy = 0\n self.image = self.original\n else:\n rotate= pygame.transform.rotate\n self.image= rotate(self.original, self.dizzy)\n self.rect= self.image.get_rect(center= center)", "def sinu_ramp_rev(t, kinematic_parameters):\n int_precision = 1e-12\n steady_rotation_frequency = kinematic_parameters[0]\n initial_ramp_time = kinematic_parameters[1]\n\n steady_rotation_omega = 360 * steady_rotation_frequency\n omega_print = steady_rotation_omega * np.pi / 180\n print('steady revolving omega = %s' % omega_print)\n\n def omega(x):\n \"\"\"rotation speed function\"\"\"\n if x <= initial_ramp_time:\n omega = steady_rotation_omega / 2 + steady_rotation_omega / 2 * np.sin(\n 2 * np.pi * x / (2 * initial_ramp_time) - np.pi / 2)\n else:\n omega = steady_rotation_omega\n\n return omega\n\n def ddphi(x):\n \"\"\"flapping angular acceleration function\"\"\"\n return derivative(omega, x, dx=1e-6)\n\n ramp_angle = integrate.quad(lambda x: np.abs(omega(x)), 0,\n initial_ramp_time)[0]\n print('initial sinu ramp angle = %s' % ramp_angle)\n\n omega_int = []\n for ti in t:\n omega_int.append(omega(ti))\n\n def phi(x):\n \"\"\"rotation angle function\"\"\"\n if x <= initial_ramp_time:\n return integrate.quad(omega, 0, x, epsabs=int_precision)[0]\n else:\n return ramp_angle + steady_rotation_omega * (x - initial_ramp_time)\n\n kinematic_angles = []\n for ti in t:\n kinematic_anglesi = [-phi(ti), 0, -omega(ti), 0, -ddphi(ti), 0]\n kinematic_angles.append(kinematic_anglesi)\n\n return kinematic_angles", "def disk_r(N=10,h=1.0,z0=1.0,p0=1.0,Mtot=1.0):\n r = np.linspace(0,100*h,N)\n z = np.array([random.uniform(0,z0) for _ in range(N)])\n #z = np.random.uniform(0,100*z0,N)\n #q_r = np.random.uniform(0,1,N)\n q_r = np.array([random.uniform(0,1) for _ in range(N)]) \n\n LHS = np.exp(-r/h)*(1+ r/h)\n RHS = 1 - ( (2*np.pi*q_r*p0*h**2)/(Mtot*np.tanh(z/z0)) )\n \n w = np.interp(r,LHS,RHS)\n u = np.array([random.uniform(0,1) for _ in range(N)])\n #u = np.random.uniform(0,1,N)\n theta = 2*np.pi*u\n\n x = w*np.cos(theta)\n y = w*np.sin(theta)\n\n p = np.stack((x,y,z),axis=-1)\n\n return p", "def find_plane_angles(self, roof_motor_position):\n\n # Calcolo il punto mediano tra i vertici 2 e 3\n pc_x = (self.roof_vertex_x[1] + self.roof_vertex_x[2]) / 2\n pc_y = (self.roof_vertex_y[1] + self.roof_vertex_y[2]) / 2\n pc_z = (self.roof_vertex_z[1] + self.roof_vertex_z[2]) / 2\n\n # Questa non so cosa sia\n base_r = [[self.roof_vertex_x[0] - pc_x, self.roof_vertex_y[0] - pc_y, self.roof_vertex_z[0] - pc_z],\n [self.roof_vertex_x[1] - pc_x, self.roof_vertex_y[1] - pc_y, self.roof_vertex_z[1] - pc_z],\n [0.0, 0.0, 0.0]]\n\n # Questa e' la costruzione di una matrice\n mat_rot = [[0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0]]\n\n # Non so quale operazione è implementata, ma a me servono solo tre elementi, j=2, i=0,1, j=1, i=0\n # Primo elemento, j=1, i=0\n mr = math.sqrt((base_r[0][0] ** 2) + (base_r[0][1] ** 2) + (base_r[0][2] ** 2))\n mat_rot[1][0] = base_r[0][1] / mr\n # Secondo elemento, j=2, i=0\n mat_rot[2][0] = base_r[0][2] / mr\n # Terzo elemento, j=2, i=1\n mr = math.sqrt((base_r[1][0] ** 2) + (base_r[1][1] ** 2) + (base_r[1][2] ** 2))\n mat_rot[2][1] = base_r[1][2] / mr\n\n # In alternativa posso calcolare tutti gli elementi della matrice\n # for i in range(2):\n # mr = math.sqrt((base_r[i][0] ** 2) + (base_r[i][1] ** 2) + (base_r[i][2] ** 2))\n # for j in range(3):\n # base_r[i][j] /= mr\n # mat_rot[j][i] = base_r[i][j]\n\n # Sono elementi della matrice non utilizzati\n # base_r[2][0] = +base_r[1][1] * base_r[0][2] - base_r[0][1] * base_r[1][2]\n # base_r[2][1] = -base_r[1][0] * base_r[0][2] + base_r[0][0] * base_r[1][2]\n # base_r[2][2] = +base_r[1][0] * base_r[0][1] - base_r[0][0] * base_r[1][1]\n # for i in range(3):\n # mat_rot[i][2] = base_r[2][i]\n\n # Qui estraggo la terna di Tait-Bryan angles usata internamente, la Z1Y2X3\n k17 = mat_rot[2][0]\n k16 = mat_rot[1][0]\n l17 = mat_rot[2][1]\n m20 = math.asin(k17)\n i23 = math.cos(m20)\n i24 = k16 / i23\n i25 = l17 / i23\n m19 = math.asin(i24)\n self.zyx1_r = m19 + roof_motor_position\n self.zyx2_r = math.asin(k17)\n self.zyx3_r = math.asin(i25)\n self.zyx3 = self.zyx3_r / Kinematic.M_TO_RAD\n self.zyx2 = self.zyx2_r / Kinematic.M_TO_RAD\n self.zyx1 = self.zyx1_r / Kinematic.M_TO_RAD\n angles = self.zyx_r_to_xyz(self.zyx3_r, self.zyx2_r, self.zyx1_r)\n self.xyz1 = angles[2]\n self.xyz2 = angles[0]\n self.xyz3 = angles[1]\n self.xyz1_r = angles[5]\n self.xyz2_r = angles[3]\n self.xyz3_r = angles[4]", "def rotate(matrix):\n n, = np.shape(matrix)\n x = np.zeros(n, ) # Column vector of unknown\n\n \"\"\"\n Reduction of the matrix to\n a triangular form\n \"\"\"\n for i in range(0, n):\n for j in range(i + 1, n):\n a = matrix[i, i]\n b = matrix[j, i]\n c = a / m.sqrt(a * a + b * b)\n s = b / m.sqrt(a * a + b * b)\n for k in range(i, n + 1):\n t = matrix[i, k]\n matrix[i, k] = (c * matrix[i, k]) + (s * matrix[j, k])\n matrix[j, k] = (-s * t) + (c * matrix[j, k])\n\n \"\"\"\n Back stroke from the Gauss method\n \"\"\"\n for i in range(n - 1, -1, -1):\n summ = 0\n for j in range(i + 1, n):\n summ += matrix[i, j] * x[j]\n summ = matrix[i, n] - summ\n if matrix[i, i] == 0:\n return False\n x[i] = summ / matrix[i, i]\n\n i = 0\n while i < len(x):\n x[i] = int((x[i] * 10000) + 0.5) / 10000\n i += 1\n\n \"\"\"\n Vector of discrepancy (Ax - B)\n \"\"\"\n a, b = create_matrix_txt(form='normal')\n discrep = np.dot(a, x)\n discrep = discrep - b\n\n print(\"Method of rotation:\\n\")\n print(\"Vector discrepancy: \", discrep)\n print(\"Vector x: \", x, \"\\n\")\n\n return x", "def rk45(fp, x0, y0, ts):\n k1 = fp(x0, y0)\n k2 = fp(x0 +ts/2, y0+ts/2*k1)\n k3 = fp(x0 +ts/2, y0+ts/2*k2)\n k4 = fp(x0 +ts, y0+ts*k3) \n return y0 + ts/6*(k1+2*k2+2*k3+k4)", "def determine_rotation(arm, d, tip_data, rot_data):\n n_t = np.zeros(3)\n for this_n_t in tip_data['pos_ntip_wrt_r']:\n n_t += this_n_t\n n_t /= len(tip_data['pos_ntip_wrt_r'])\n print(\"Our n_t to use in this stage: {}\".format(n_t))\n\n K = len(rot_data['pos_ntip_wrt_s'])\n errors_zyz = []\n errors_zyx = []\n\n for k in range(K):\n lhs = rot_data['pos_ntip_wrt_s'][k]\n t_st = rot_data['pos_tool_wrt_s_code'][k]\n ypr = rot_data['rot_tool_wrt_s_code'][k]\n yaw, pitch, roll = ypr[0], ypr[1], ypr[2]\n\n # R_zyz\n R_z1 = U.rotation_matrix_3x3_axis(angle=roll, axis='z')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z2 = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyz = R_z2.dot(R_y).dot(R_z1)\n\n # R_zyx\n R_x = U.rotation_matrix_3x3_axis(angle=roll, axis='x')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyx = R_z.dot(R_y).dot(R_x)\n\n # Evaluate!\n rhs_zyz = t_st + R_zyz.dot( n_t )\n rhs_zyx = t_st + R_zyx.dot( n_t )\n err_zyz = np.linalg.norm(lhs - rhs_zyz)\n err_zyx = np.linalg.norm(lhs - rhs_zyx)\n errors_zyz.append( err_zyz )\n errors_zyx.append( err_zyx )\n print(\"\\nerr_zyz: {:.3f} for {}-th sample\".format(err_zyz, k))\n print(\"err_zyx: {:.3f} for {}-th sample\".format(err_zyx, k))\n print(\"R_zyz:\\n{}\".format(R_zyz))\n print(\"R_zyx:\\n{}\".format(R_zyx))\n\n print(\"\\nDone with evaluation!\")\n print(\"zyz has avg error {:.5f}\".format(np.mean(errors_zyz)))\n print(\"zyx has avg error {:.5f}\".format(np.mean(errors_zyx)))", "def orientation(p, q, r):\n val = (q.y - p.y) * (r.x - q.x) - (q.x - p.x) * (r.y - q.y)\n if val == 0:\n return 0\n elif val > 0:\n return 1\n else:\n return 2", "def rfpart(x):\n return 1 - Util.fpart(x)", "def partition3(a, l, r):\n x = a[l]\n j = l\n for i in range(l + 1, r + 1):\n # print(a)\n if a[i] < x:\n j += 1\n a[i], a[j] = a[j], a[i]\n a[l], a[j] = a[j], a[l]\n\n k = j\n for i in range(j + 1, r + 1):\n # print(a)\n if a[i] == x:\n k += 1\n a[i], a[k] = a[k], a[i]\n # a[l], a[k] = a[k], a[l]\n # print(a)\n\n return j,k", "def _apply_spin_time_reversal(rotation_matrix_cartesian, spin, numeric):\n dim = rotation_matrix_cartesian.shape[0]\n if numeric:\n assert np.all(rotation_matrix_cartesian == np.eye(dim))\n else:\n assert rotation_matrix_cartesian == sp.eye(dim)\n if spin.total == 0:\n return {spin: 1}\n\n # time-reversal is represented by sigma_y * complex conjugation\n elif spin.total == Fraction(1, 2):\n if spin == SPIN_UP:\n if numeric:\n return {SPIN_DOWN: 1j}\n else:\n return {SPIN_DOWN: sp.I}\n else:\n assert spin == SPIN_DOWN\n if numeric:\n return {SPIN_UP: -1j}\n else:\n return {SPIN_UP: -sp.I}\n else:\n raise NotImplementedError('Spins larger than 1/2 are not implemented.')", "def toRot(q):\n R = SX.zeros(3, 3)\n qi = q[0]; qj = q[1]; qk = q[2]; qr = q[3]\n R[0, 0] = 1. - 2. * (qj * qj + qk * qk);\n R[0, 1] = 2. * (qi * qj - qk * qr);\n R[0, 2] = 2. * (qi * qk + qj * qr)\n R[1, 0] = 2. * (qi * qj + qk * qr);\n R[1, 1] = 1. - 2. * (qi * qi + qk * qk);\n R[1, 2] = 2. * (qj * qk - qi * qr)\n R[2, 0] = 2. * (qi * qk - qj * qr);\n R[2, 1] = 2. * (qj * qk + qi * qr);\n R[2, 2] = 1. - 2. * (qi * qi + qj * qj)\n\n return R", "def inertia_tensor_partial(self, part, masswt=True, zero=ZERO):\n tensor = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]\n\n for i in part:\n if masswt:\n # I(alpha, alpha)\n tensor[0][0] += self.mass(i) * (self.y(i) * self.y(i) + self.z(i) * self.z(i))\n tensor[1][1] += self.mass(i) * (self.x(i) * self.x(i) + self.z(i) * self.z(i))\n tensor[2][2] += self.mass(i) * (self.x(i) * self.x(i) + self.y(i) * self.y(i))\n\n # I(alpha, beta)\n tensor[0][1] -= self.mass(i) * self.x(i) * self.y(i)\n tensor[0][2] -= self.mass(i) * self.x(i) * self.z(i)\n tensor[1][2] -= self.mass(i) * self.y(i) * self.z(i)\n\n else:\n # I(alpha, alpha)\n tensor[0][0] += self.y(i) * self.y(i) + self.z(i) * self.z(i)\n tensor[1][1] += self.x(i) * self.x(i) + self.z(i) * self.z(i)\n tensor[2][2] += self.x(i) * self.x(i) + self.y(i) * self.y(i)\n\n # I(alpha, beta)\n tensor[0][1] -= self.x(i) * self.y(i)\n tensor[0][2] -= self.x(i) * self.z(i)\n tensor[1][2] -= self.y(i) * self.z(i)\n\n # mirror\n tensor[1][0] = tensor[0][1]\n tensor[2][0] = tensor[0][2]\n tensor[2][1] = tensor[1][2]\n\n # Check the elements for zero and make them a hard zero.\n for i in range(3):\n for j in range(3):\n if math.fabs(tensor[i][j]) < zero:\n tensor[i][j] = 0.0\n return tensor", "def CR(phi):\n return (np.kron(P0,s0) + np.kron(P1,R(phi)))", "def rot_crop(x):\n x = abs(x)\n deg45 = math.pi * 0.25\n deg135 = math.pi * 0.75\n x = x * math.pi / 180\n a = (math.sin(deg135 - x) - math.sin(deg45 - x))/(math.cos(deg135-x)-math.cos(deg45-x))\n return math.sqrt(2) * (math.sin(deg45-x) - a*math.cos(deg45-x)) / (1-a)", "def test_revolute(self):\n # Rotate around the z axis\n r = Joint.revolute(np.array([0, 0, 1]))\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat, np.array([1, 0, 0, 1]))[:3]\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))", "def corrected_rotation(x_arr, mu):\n if x_arr < (mu-180):\n x_arr += 360\n elif x_arr > mu+180:\n x_arr -= 360\n\n return x_arr", "def next_partition(Z, k, h):\n n = len(Z)\n for i in range(n-1, 0, -1):\n if(Z[i] <= k[i-1]):\n h[Z[i]] -= 1\n Z[i] += 1\n\n if Z[i] == len(h):\n h.append(1)\n else:\n h[Z[i]] += 1\n\n k[i] = Z[i] if (k[i] <= Z[i]) else k[i]\n\n for j in range(i+1, n):\n h[Z[j]] -= 1\n h[Z[0]] += 1\n\n Z[j] = Z[0]\n k[j] = k[i]\n\n while h[-1] == 0:\n del h[-1]\n\n return Z, k, h\n return None", "def rk4(fn, time, state, time_step, *args):\n k1 = time_step*fn(time, state, *args)\n k2 = time_step*fn(time + 0.5 * time_step, state + 0.5 * k1, *args)\n k3 = time_step*fn(time + 0.5 * time_step, state + 0.5 * k2, *args)\n k4 = time_step*fn(time + time_step, state + k3, *args)\n return state + (1/6)*(k1 + 2*k2 + 2*k3 + k4)", "def partition(game, player):\n height = game.height\n width = game.width\n blanks = game.get_blank_spaces()\n has_partition = False\n partition_col = int(game.width/2)\n partition_row = int(game.height/2)\n moves = game.get_legal_moves(player)\n if moves:\n player_location = game.get_player_location(player)\n for i in range(2, width - 3): #search for vertical partitions\n if (0,i) not in blanks and (0,i+1) not in blanks:\n j = 1\n while j < height and (j, i) not in blanks and (j, i + 1) not in blanks:\n j += 1\n if j == height:\n has_partition = True\n pb = partition_blanks(game, (0,i))\n if pb[0] > pb[1]: #more blanks on the left of the partition\n for move in moves:\n if move[1] < i:\n return has_partition, True\n return has_partition, False\n else: #more blanks on right of partition\n for move in moves:\n if move[1] > i + 1:\n return has_partition, True\n return has_partition, False\n\n for i in range(2, height - 3): #seach for horizontal partitions\n if (i,0) not in blanks and (i+1,0) not in blanks:\n j = 1\n while j < width and (i,j) not in blanks and (i+1, j) not in blanks:\n j += 1\n if j == width:\n has_partition = True\n pb = partition_blanks(game, (i, 0))\n if pb[0] > pb[1]: #more blanks on top of partition\n for move in moves:\n if move[0] < i:\n return has_partition, True\n return has_partition, False\n else: #more blanks below partition\n for move in moves:\n if move[0] > i + 1:\n return has_partition, True\n return has_partition, False\n\n return has_partition, False", "def retarder(phase,angle=0):\n r = np.exp(1j*phase/2)\n R = np.array([[r,0],[0,np.conj(r)]])\n if angle != 0:\n return Jones.rotate(R,angle)\n else:\n return R", "def I1(k, horn_width, hplane_effective_length, theta, phi):\n # Calculate the x-component of the wavenumber primed\n kx_p = k * sin(theta) * cos(phi) + pi / horn_width\n kx_m = k * sin(theta) * cos(phi) - pi / horn_width\n\n # Calculate the arguments of the Fresnel integrals\n t1_p = sqrt(1.0 / (pi * k * hplane_effective_length)) * (-k * horn_width / 2.0 - kx_p * hplane_effective_length)\n t2_p = sqrt(1.0 / (pi * k * hplane_effective_length)) * ( k * horn_width / 2.0 - kx_p * hplane_effective_length)\n\n t1_m = sqrt(1.0 / (pi * k * hplane_effective_length)) * (-k * horn_width / 2.0 - kx_m * hplane_effective_length)\n t2_m = sqrt(1.0 / (pi * k * hplane_effective_length)) * ( k * horn_width / 2.0 - kx_m * hplane_effective_length)\n\n # Calculate the Fresnel integrals\n s1p, c1p = fresnel(t1_p)\n s2p, c2p = fresnel(t2_p)\n\n s1m, c1m = fresnel(t1_m)\n s2m, c2m = fresnel(t2_m)\n\n # Build the terms from the Fresnel integrals\n fresnel_term1 = (c2p - c1p) + 1j * (s1p - s2p)\n fresnel_term2 = (c2m - c1m) + 1j * (s1m - s2m)\n\n # Calculate the phase terms\n phase_term1 = exp(1j * kx_p ** 2 * hplane_effective_length / (2.0 * k))\n phase_term2 = exp(1j * kx_m ** 2 * hplane_effective_length / (2.0 * k))\n\n return 0.5 * sqrt(pi * hplane_effective_length / k) * (phase_term1 * fresnel_term1 + phase_term2 * fresnel_term2)", "def rotate_components(phi, gamma = 1.0, q = 50, tol = 1e-6):\n p,k = phi.shape\n r = np.eye(k)\n d = 0\n cnt = 0\n for i in np.arange(q):\n cnt = cnt + 1\n d_old = d\n Lambda = np.dot(phi, r)\n u,s,vh = np.linalg.svd(np.dot(\n phi.T,np.asarray(Lambda)**3 - (gamma/p) * np.dot(\n Lambda, np.diag(np.diag(np.dot(Lambda.T,Lambda))))))\n print(\"Matrix u: \")\n print(u)\n print(\"Matrix s: \")\n print(s)\n print(\"Matrix vh: \")\n print(vh)\n r = np.dot(u, vh)\n d = np.sum(s)\n if d_old != 0 and d / d_old < 1 + tol:\n break\n print(\"Trace rotate_components_START\")\n print(\"Rotation matrix: \")\n print(r)\n print(\"Loop number: \" + str(cnt))\n print(\"Trace rotate_components_END\")\n return np.dot(phi, r)", "def RotatedPlaneArray(plane,tot_ang,divs,axis):\n out_planes=[]\n plane.Rotate(-tot_ang*0.5,axis)\n out_planes.append(Rhino.Geometry.Plane(plane))\n inc=tot_ang/(divs-1)\n for i in range(divs-1):\n plane.Rotate(inc,axis)\n out_planes.append(Rhino.Geometry.Plane(plane))\n return out_planes", "def partition(A:list, p:int, r:int) -> int:\n\n\tx = A[r]\n\ti = p - 1\n\tfor j in range(p, r):\n\t\tif A[j] <= x:\n\t\t\ti = i + 1\n\t\t\tA[i], A[j] = A[j], A[i]\n\tA[i+1], A[r] = A[r], A[i+1]\n\treturn i + 1", "def get_rotation_vector(R):\n v = np.array([R[1,2] - R[2,1],\n R[2,0] - R[0,1],\n R[0,1] - R[1,0]]) # eq. 3.12 in [1], pp.66\n return v", "def rotate(volume):\n\n def scipy_rotate(volume_):\n angles = [-20, -10, -5, 5, 10, 20]\n angle = random.choice(angles)\n volume_ = ndimage.rotate(input=volume_, angle=angle, reshape=False)\n volume_[volume_ < 0] = 0\n volume_[volume_ > 1] = 1\n\n return volume_\n\n augmented_volume = tf.numpy_function(scipy_rotate, [volume], tf.float32)\n\n return augmented_volume", "def get_key_from_rot(rotation):\n if rotation < -67.5: \n return -90\n elif rotation < -22.5: \n return -45\n elif rotation < 22.5: \n return 0\n elif rotation < 67.5: \n return 45\n else:\n return 90", "def partition(in_list, l_index: int, r_index: int) -> int:\n print(\"start partition sub-routine\")\n pivot = in_list[l_index] # the pivot\n print(f\"pivot value is: {pivot}\")\n\n i = l_index + 1 # the i-th index that separates values less than (<i) and greater (>i) of the pivot\n\n for j in range(i, r_index):\n print(f\"in_list[{j}] is: {in_list[j]}\")\n \n if in_list[j] < pivot: # if the j-th element is less than the pivot \n in_list = swap(in_list, i, j) # swap the i-th element with the j-th element since i is boundary index and the i-th element is greater than the pivot\n i += 1 # increment i\n print(f\"i is: {i}\")\n print(in_list)\n\n in_list = swap(in_list, l_index, i-1) #after the loop has sorted the array, swap with pivot into place\n print(\"finish partition sub-routine\")\n\n return i-1 # returns the index of the pivot for use in the quicksort recursion", "def rk4_second_order_method(f, y, z, dx, range):\n x = min(range)\n \n x_space = [x]\n y_space = [y]\n z_space = []\n \n while x<=max(range):\n k_1 = z*dx\n l_1 = f(x, y, z)*dx\n \n k_2 = (z+1/2*l_1)*dx\n l_2 = f(x+1/2*dx, y + 1/2*k_1, z + 1/2*l_1)*dx\n \n k_3 = (z + 1/2*l_2)*dx\n l_3 = f(x+1/2*dx, y + 1/2*k_2, z + 1/2*l_2)*dx\n \n k_4 = (z + l_3)*dx\n l_4 = f(x + dx, y + k_3, z + l_3)*dx\n \n y += 1/6*(k_1+2*k_2+2*k_3+k_4)\n z += 1/6*(l_1+2*l_2+2*l_3+l_4)\n \n x += dx\n x_space.append(x)\n y_space.append(y)\n z_space.append(z)\n return (x_space, y_space, z_space)", "def rotate(x: torch.Tensor, angle: int) -> torch.Tensor:\n # B C H W\n h_dim = 2\n w_dim = 3\n\n if angle == 0:\n return x\n elif angle == 90:\n return x.flip(w_dim).transpose(h_dim, w_dim)\n elif angle == 180:\n return x.flip(w_dim).flip(h_dim)\n elif angle == 270:\n return x.flip(h_dim).transpose(h_dim, w_dim)\n else:\n raise NotImplementedError(\"Must be rotation divisible by 90 degrees\")", "def test_revolute_from_dh(self):\n x_offset = 1\n z_offset = 2\n # Rotate around the z axis\n r = Joint.revolute_from_dh(0, 0, x_offset, z_offset)\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat[:3, :3], np.array([1, 0, 0]))\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))\n self.assertTrue(np.allclose(t_mat[2, 3], z_offset))\n # x was rotated 90 degrees, and is now y\n self.assertTrue(np.allclose(t_mat[1, 3], x_offset))", "def partition(array, p, r):\n # Choose pivot at end\n x = array[r]\n # Setup lower partition counter\n i = p - 1\n # For each element in array[p..r-1]\n for j in range(p, r):\n # If current item is leq than pivot\n if array[j] <= x:\n # Increment i anticipating a swap\n i += 1\n # Swap values at indices i and j in array\n array[i], array[j] = array[j], array[i]\n # Swap pivot with first element of upper sub-array, such that array[p..i] <= pivot <= array[i+2..r]\n array[i + 1], array[r] = array[r], array[i + 1]\n # Return index of pivot for further sorting\n return i + 1", "def rk4(f, y, t, dt):\n k1 = f(y, t)\n k2 = f(y + 0.5 * k1 * dt, t + 0.5 * dt)\n k3 = f(y + 0.5 * k2 * dt, t + 0.5 * dt)\n k4 = f(y + k3 * dt, t + dt)\n\n res = y + float(1) / 6 * dt * (k1 + 2 * k2 + 2 * k3 + k4)\n return res", "def rotator(angle):\n c = np.cos(angle)\n s = np.sin(angle)\n return np.array([[c,-s],[s,c]])", "def rk8(accel,m,r,h,v): \n k1v = accel(m,r)\n k1r = v\n k2v = accel(m,r + 0.25*k1r*h)\n k2r = v + (0.25*k1v)*h\n k3v = accel(m,r + (5/72.*k1r + 1/72.*k2r)*h)\n k3r = v + (5/72.*k1v + 1/72.*k2v)*h\n k4v = accel(m,r + (1/32.*k1r +3/32.*k3r)*h)\n k4r = v + (1/32.*k1v +3/32.*k3v)*h\n k5v = accel(m,r + (106/125.*k1r- 408/125.*k3r + 352/125.*k4r)*h)\n k5r = v + (106/125.*k1v- 408/125.*k3v + 352/125.*k4v)*h\n k6v = accel(m,r + (1/48.*k1r+ 8/33.*k4r - 125/528.*k5r)*h)\n k6r = v + (1/48.*k1v+ 8/33.*k4v - 125/528.*k5v)*h\n k7v = accel(m,r + (-13893*k1r+ 39936*k4r -64125*k5r+ 60720*k6r)*h/26411.)\n k7r = v +(-13893*k1v+ 39936*k4v -64125*k5v+ 60720*k6v)*h/26411.\n k8v = accel(m,r + (37/392.*k1r+ 1625/9408.*k5r -2/15.*k6r+ 61/6720*k7r)*h)\n k8r = v + (37/392.*k1v+ 1625/9408.*k5v -2/15.*k6v+ 61/6720*k7v)*h\n k9v = accel(m,r +(17176/25515.*k1r - 47104/25515.*k4r + 1325/504.*k5r - 41792/25515.*k6r + 20237/145800.*k7r + 4312/6075.*k8r)*h)\n k9r = v + (17176/25515.*k1v - 47104/25515.*k4v + 1325/504.*k5v - 41792/25515.*k6v + 20237/145800.*k7v + 4312/6075.*k8v)*h\n k10v = accel(m,r + ( -23834/180075.*k1r - 77824/1980825.*k4r- 636635/633864.*k5r + 254048/300125.*k6r - 183/7000.*k7r + 8/11.*k8r - 324/3773.*k9r)*h)\n k10r = v + ( -23834/180075.*k1v - 77824/1980825.*k4v- 636635/633864.*k5v + 254048/300125.*k6v - 183/7000.*k7v + 8/11.*k8v - 324/3773.*k9v)*h\n k11v= accel(m,r + (12733/7600.*k1r - 20032/5225.*k4r + 456485/80256.*k5r - 42599/7125.*k6r + 339227/912000.*k7r - 1029/4108.*k8r + 1701/1408.*k9r + 5145/2432.*k10r)*h)\n k11r = v + (12733/7600.*k1v - 20032/5225.*k4v + 456485/80256.*k5v - 42599/7125.*k6v + 339227/912000.*k7v - 1029/4108.*k8v + 1701/1408.*k9v + 5145/2432.*k10v)*h\n k12v = accel(m,r + h*(-27061/204120.*k1r + 40448/280665.*k4r -1353775/1197504.*k5r + 17662/25515.*k6r - 71687/1166400.*k7r + 98/225.*k8r + 1/16.*k9r + 3773/11664.*k10r))\n k12r = v + h*(-27061/204120.*k1v + 40448/280665.*k4v -1353775/1197504.*k5v + 17662/25515.*k6v - 71687/1166400.*k7v + 98/225.*k8v + 1/16.*k9v + 3773/11664.*k10v)\n k13v = accel(m,r + h*(11203/8680.*k1r - 38144/11935.*k4r + 2354425/458304.*k5r - 84046/16275.*k6r + 673309/1636800.*k7r + 4704/8525.*k8r + 9477/10912.*k9r - 1029/992.*k10r + 19/341.*k12r))\n k13r = v + h*(11203/8680.*k1v - 38144/11935.*k4v + 2354425/458304.*k5v - 84046/16275.*k6v + 673309/1636800.*k7v + 4704/8525.*k8v + 9477/10912.*k9v - 1029/992.*k10v + 19/341.*k12v)\n\n\n new_v8 = v + h*(13/288.*k1v +32/125.*k6v + 31213/144000.*k7v + 2401/12375.*k8v + 1701/14080.*k9v + 2401/19200.*k10v + 19/450.*k11v) \n new_r8 = r + h*(13/288.*k1r +32/125.*k6r + 31213/144000.*k7r + 2401/12375.*k8r + 1701/14080.*k9r + 2401/19200.*k10r + 19/450.*k11r) \n \n return new_v8,new_r8", "def _rotate(polyreg, i=None, j=None, u=None, v=None, theta=None, R=None):\n # determine the rotation matrix based on inputs\n if R is not None:\n logger.debug(\"rotate: R=\\n{}\".format(R))\n if i is not None:\n raise ValueError(i)\n if j is not None:\n raise ValueError(j)\n if theta is not None:\n raise ValueError(theta)\n if u is not None:\n raise ValueError(u)\n if v is not None:\n raise ValueError(v)\n elif i is not None and j is not None and theta is not None:\n logger.info(\"rotate via indices and angle.\")\n if R is not None:\n raise ValueError(R)\n if u is not None:\n raise ValueError(u)\n if v is not None:\n raise ValueError(v)\n if i == j:\n raise ValueError(\"Must provide two unique basis vectors.\")\n R = givens_rotation_matrix(i, j, theta, polyreg.dim)\n elif u is not None and v is not None:\n logger.info(\"rotate via 2 vectors.\")\n if R is not None:\n raise ValueError(R)\n if i is not None:\n raise ValueError(i)\n if j is not None:\n raise ValueError(j)\n if theta is not None:\n raise ValueError(theta)\n R = solve_rotation_ap(u, v)\n else:\n raise ValueError(\"R or (i and j and theta) or (u and v) \"\n \"must be defined.\")\n if isinstance(polyreg, Polytope):\n # Ensure that half space is normalized before rotation\n n, p = _hessian_normal(polyreg.A, polyreg.b)\n # Rotate the hyperplane normals\n polyreg.A = np.inner(n, R)\n polyreg.b = p\n else:\n # Rotate subregions\n for poly in polyreg.list_poly:\n _rotate(poly, None, None, R=R)\n # transform bbox and cheby\n if polyreg.bbox is not None:\n polyreg.bbox = (np.inner(polyreg.bbox[0].T, R).T,\n np.inner(polyreg.bbox[1].T, R).T)\n if polyreg._chebXc is not None:\n polyreg._chebXc = np.inner(polyreg._chebXc, R)\n return R", "def heuristic_3_partition(game, player) -> float:\n\n partition_possible_factor = get_partition_possible_factor(game, player)\n\n return float(partition_possible_factor)", "def test_d_3():\n rs = 20\n d = 3\n np.random.seed(rs)\n number_rotations = 3\n\n theta_1 = np.random.uniform(0, 2 * math.pi)\n rotation_1 = np.identity(d)\n pos_1 = np.random.randint(0, d - 1)\n pos_2 = np.random.randint(pos_1 + 1, d)\n rotation_1[pos_1, pos_1] = math.cos(theta_1)\n rotation_1[pos_1, pos_2] = - math.sin(theta_1)\n rotation_1[pos_2, pos_1] = math.sin(theta_1)\n rotation_1[pos_2, pos_2] = math.cos(theta_1)\n\n theta_2 = np.random.uniform(0, 2 * math.pi)\n rotation_2 = np.identity(d)\n pos_3 = np.random.randint(0, d - 1)\n pos_4 = np.random.randint(pos_3 + 1, d)\n rotation_2[pos_3, pos_3] = math.cos(theta_2)\n rotation_2[pos_3, pos_4] = - math.sin(theta_2)\n rotation_2[pos_4, pos_3] = math.sin(theta_2)\n rotation_2[pos_4, pos_4] = math.cos(theta_2)\n\n theta_3 = np.random.uniform(0, 2 * math.pi)\n rotation_3 = np.identity(d)\n pos_5 = np.random.randint(0, d - 1)\n pos_6 = np.random.randint(pos_5 + 1, d)\n rotation_3[pos_5, pos_5] = math.cos(theta_3)\n rotation_3[pos_5, pos_6] = - math.sin(theta_3)\n rotation_3[pos_6, pos_5] = math.sin(theta_3)\n rotation_3[pos_6, pos_6] = math.cos(theta_3)\n\n final_rotation = rotation_1 @ rotation_2 @ rotation_3\n np.random.seed(rs)\n rotation_function = (mt_obj.calculate_rotation_matrix\n (d, number_rotations))\n assert(np.all(final_rotation == rotation_function))", "def read_rotor(self):\n self.sig_a = self.pin_a.read_digital()\n self.sig_b = self.pin_b.read_digital()\n # catch the rising edge of A\n if self.sig_a and not self.old_sig_a:\n # if b is also high, clockwise\n if self.sig_b:\n self.x += 1\n\n else:\n self.x -= 1\n if self.x > 9:\n self.x = 0\n elif self.x < 0:\n self.x = 9\n self.old_sig_a = self.sig_a\n return self.x", "def part_recur(ckt, initial, w):\n partition_set = []\n# partition_mech = KLPart.KLPartition()\n# convert_Gate(ckt, partition_mech)\n print \"Diving into C++\"\n# (a, b) = partition_mech.partition_once(KLPart.StringVector(list(set(initial))))\n (a, b) = partition(ckt, list(set(initial)))\n print \"Coming back up\"\n if len(get_inputs(ckt, a)) > w and len(a) > 3:\n partition_set = partition_set + part_recur(ckt, a, w)\n else:\n partition_set.append(a)\n if len(get_inputs(ckt, b)) > w and len(b) > 3:\n partition_set = partition_set + part_recur(ckt, b, w)\n else:\n partition_set.append(b)\n return partition_set", "def update_spin(self):\n if self.spin:\n self.angle += self.deltaAng\n self.stepsLeft -= 1\n if self.stepsLeft == 0:\n self.replication += self.deltaRep\n if self.revolution % 2 == 0:\n self.offset += self.deltaOff\n self.stepsLeft = self.stepsPer90\n\n if self.angle >= 360.0:\n self.revolution += 1\n self.angle = 0\n self.offset = 0\n self.deltaRep = -self.deltaRep\n glutPostRedisplay()", "def grid_rot(volume, batch_size, rot_matrix):\n \n #with torch.no_grad():\n volume_rotate = VolumeRotation(mode = 'bilinear')\n R = rot_matrix #rotgetRandomRotation(batch_size)\n volume = volume_rotate(volume, R.to(dtype = torch.float, device = 'cuda'))\n\n return volume", "def pos_rot_arm(arm, nparrays=False):\n return pos_rot_cpos(arm.get_current_cartesian_position(), nparrays)", "def calc_ro(f,vorticity):\n\n Ro = vorticity/f\n modRo = np.arctan(-1-Ro)\n\n return Ro, modRo", "def rk4(func, z0, time):\r\n\r\n z = np.zeros((np.size(time),np.size(z0)))\r\n z[0,:] = z0\r\n zp = np.zeros_like(z0)\r\n\r\n for i, t in enumerate(time[0:-1]):\r\n dt = time[i+1] - time[i]\r\n dt2 = dt/2.0\r\n k1 = np.asarray(func(z[i,:], t)) # predictor step 1\r\n k2 = np.asarray(func(z[i,:] + k1*dt2, t + dt2)) # predictor step 2\r\n k3 = np.asarray(func(z[i,:] + k2*dt2, t + dt2)) # predictor step 3\r\n k4 = np.asarray(func(z[i,:] + k3*dt, t + dt)) # predictor step 4\r\n z[i+1,:] = z[i,:] + dt/6.0*(k1 + 2.0*k2 + 2.0*k3 + k4) # Corrector step\r\n\r\n return z", "def partition(A, p, r):\n x = A[r]\n i = p\n for j in range(p, r):\n if A[j] <= x:\n A[i], A[j] = A[j], A[i]\n i += 1\n A[i], A[r] = A[r], A[i]\n return i" ]
[ "0.60148156", "0.5928004", "0.5905255", "0.58448195", "0.5765161", "0.56619036", "0.56407213", "0.56067413", "0.5598395", "0.5529949", "0.55244756", "0.55044246", "0.5503363", "0.54765314", "0.54362005", "0.5415683", "0.54050183", "0.5374608", "0.5368864", "0.5364739", "0.5363586", "0.5351869", "0.5350976", "0.53490597", "0.5346866", "0.533562", "0.53331256", "0.5328986", "0.53104174", "0.5290422", "0.52894986", "0.5278173", "0.5275239", "0.5266627", "0.5265668", "0.5254347", "0.5224346", "0.52216357", "0.52057594", "0.52039945", "0.51977396", "0.51927936", "0.5187236", "0.5185458", "0.51833653", "0.5181391", "0.51661944", "0.5153958", "0.5139301", "0.51294494", "0.51190174", "0.51109284", "0.5105123", "0.51043946", "0.50966424", "0.5093217", "0.5086183", "0.5084063", "0.5078381", "0.5051362", "0.50456876", "0.50428057", "0.50404716", "0.50340366", "0.50329775", "0.50324696", "0.503169", "0.502311", "0.5021883", "0.5017661", "0.5011744", "0.5010463", "0.50079024", "0.5005615", "0.49979603", "0.49965435", "0.4992385", "0.4990072", "0.49745795", "0.49742123", "0.4973404", "0.49666855", "0.49645832", "0.49644428", "0.49602395", "0.4951779", "0.49513033", "0.49488428", "0.4946697", "0.4946294", "0.49447885", "0.49428657", "0.49385917", "0.49268624", "0.49266154", "0.49260026", "0.49238792", "0.4914116", "0.49108517", "0.49063095", "0.49048233" ]
0.0
-1
returns partition sum without electronic contribution, v = vibrational frequency in m^1, m = mass in kg, I=moment of inertia either number or list of three [kgm^2], V= Volume in m^3, sym=number of similar rotation axis
def partition(v,m,I,V,sym): T = s.Symbol("T") return qvib(v) + qtrans(m,V) + qrot(I,sym)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def vol_uc(x):\r\n return sum([vol(m) for m in metamer(x)])", "def get_effective_mass():\n\n H_BAR = 6.582119514e-16 # eV*s\n M_0 = 9.10938356e-31 # kg\n N_KPTS = 6 # Number of k-points included in the parabola.\n\n spin_up = Spin(1)\n\n band_structure = Vasprun('vasprun.xml').get_band_structure()\n\n # Locations of CBM and VBM in band_structure.bands\n cbm_band_index = band_structure.get_cbm()['band_index'][spin_up][0]\n cbm_kpoint_index = band_structure.get_cbm()['kpoint_index'][0]\n\n vbm_band_index = band_structure.get_vbm()['band_index'][spin_up][0]\n vbm_kpoint_index = band_structure.get_vbm()['kpoint_index'][0]\n\n k = {'electron': {'left': [], 'right': []},\n 'hole': {'left': [], 'right': []}}\n E = {'electron': {'left': [], 'right': []},\n 'hole': {'left': [], 'right': []}}\n\n e_ref_coords = band_structure.kpoints[cbm_kpoint_index]._ccoords\n h_ref_coords = band_structure.kpoints[vbm_kpoint_index]._ccoords\n\n for n in range(-N_KPTS, 1):\n e_coords = band_structure.kpoints[cbm_kpoint_index + n]._ccoords\n h_coords = band_structure.kpoints[vbm_kpoint_index + n]._ccoords\n\n k['electron']['left'].append(\n ((e_coords[0] - e_ref_coords[0])**2 +\n (e_coords[1] - e_ref_coords[1])**2 +\n (e_coords[2] - e_ref_coords[2])**2)**0.5\n )\n k['hole']['left'].append(\n ((h_coords[0] - h_ref_coords[0])**2 +\n (h_coords[1] - h_ref_coords[1])**2 +\n (h_coords[2] - h_ref_coords[2])**2)**0.5\n )\n\n e_energy = band_structure.bands[\n spin_up][cbm_band_index][cbm_kpoint_index + n]\n h_energy = band_structure.bands[\n spin_up][vbm_band_index][vbm_kpoint_index + n]\n\n E['electron']['left'].append(e_energy)\n E['hole']['left'].append(h_energy)\n\n for n in range(1, 1 + N_KPTS):\n e_coords = band_structure.kpoints[cbm_kpoint_index + n]._ccoords\n h_coords = band_structure.kpoints[vbm_kpoint_index + n]._ccoords\n\n k['electron']['right'].append(\n ((e_coords[0] - e_ref_coords[0])**2 +\n (e_coords[1] - e_ref_coords[1])**2 +\n (e_coords[2] - e_ref_coords[2])**2)**0.5\n )\n k['hole']['right'].append(\n ((h_coords[0] - h_ref_coords[0])**2 +\n (h_coords[1] - h_ref_coords[1])**2 +\n (h_coords[2] - h_ref_coords[2])**2)**0.5\n )\n\n e_energy = band_structure.bands[\n spin_up][cbm_band_index][cbm_kpoint_index + n]\n h_energy = band_structure.bands[\n spin_up][vbm_band_index][vbm_kpoint_index + n]\n\n E['electron']['right'].append(e_energy)\n E['hole']['right'].append(h_energy)\n\n # 2nd order fits\n e_l_fit = np.poly1d(\n np.polyfit(k['electron']['left'], E['electron']['left'], 2))\n e_r_fit = np.poly1d(\n np.polyfit(k['electron']['right'], E['electron']['right'], 2))\n h_l_fit = np.poly1d(\n np.polyfit(k['hole']['left'], E['hole']['left'], 2))\n h_r_fit = np.poly1d(\n np.polyfit(k['hole']['right'], E['hole']['right'], 2))\n\n # Curvatures\n e_l_curvature = e_l_fit.deriv().deriv()[0]\n e_r_curvature = e_r_fit.deriv().deriv()[0]\n h_l_curvature = h_l_fit.deriv().deriv()[0]\n h_r_curvature = h_r_fit.deriv().deriv()[0]\n\n # Unit conversion\n e_m_eff_l = 10 * ((H_BAR ** 2) / e_l_curvature) / M_0\n e_m_eff_r = 10 * ((H_BAR ** 2) / e_r_curvature) / M_0\n h_m_eff_l = -10 * ((H_BAR ** 2) / h_l_curvature) / M_0\n h_m_eff_r = -10 * ((H_BAR ** 2) / h_r_curvature) / M_0\n\n return {'electron': {'left': e_m_eff_l, 'right': e_m_eff_r},\n 'hole': {'left': h_m_eff_l, 'right': h_m_eff_r}}", "def get_k(M):\n k = np.arange(1,M+1)*np.pi/(M+1) # vector of all possible quasimomenta\n return k", "def decomposition_into_s_n_irreducibles(self, n):\r\n w5 = partitions_list(n)\r\n M5 = form_matrix_yt(w5)\r\n card = math.factorial(n)\r\n vec_dic = {}\r\n for k in range(self.dimension()+1):\r\n D = {}\r\n uu = []\r\n vv = []\r\n p = k \r\n A = self.matrix_simmetric_representate(p)\r\n if (p >0 and (p <= self.dimension())):\r\n null = nullspace(A)\r\n w3 = []\r\n for i in range(len(null[0])):\r\n w = []\r\n for j in range(len(null)):\r\n w.append(null[j][i])\r\n w3.append(w) \r\n null = w3\r\n M = np.matrix(w3, dtype= np.float64).transpose()\r\n Mi = np.linalg.pinv(M)\r\n else:\r\n if (p == 0):\r\n M = A\r\n null = []\r\n for i in range(A.shape[0]):\r\n aux = []\r\n for j in range(A.shape[1]):\r\n aux.append(M[i,j])\r\n null.append(aux)\r\n M = np.matrix(null, dtype=np.float64)\r\n Mi = M\r\n p = k + 1\r\n if (p>0 and (p <= self.dimension())):\r\n A1=self.matrix_simmetric_representate(p)\r\n col = columnspace(A1)\r\n w4 = []\r\n for i in range(len(col[0])):\r\n w = []\r\n for j in range(len(col)):\r\n w.append(col[j][i])\r\n w4.append(w)\r\n col = w4\r\n M1 = np.matrix(w4, dtype=np.float64).transpose()\r\n Mii = np.linalg.pinv(M1)\r\n for h in w5:\r\n p = k \r\n if (p >0 and (p <= self.dimension())):\r\n if (all(elem == 0 for elem in null[0])):\r\n l1 = 0\r\n else:\r\n he = self.basis_group_oriented_p_chains(p) \r\n on1 = np.ones(len(list(he.dic.keys())), dtype=np.float64) \r\n v = P_chains([],[])\r\n v = P_chains(list(he.dic.keys()),on1)\r\n v1 = permutation_in_simplex_test(v, make_permutation(h))\r\n D1={}\r\n c1 = 0\r\n for i in list(v1.dic.keys()):\r\n c2 = 1\r\n for j in list(he.dic.keys()):\r\n if (i == j):\r\n if (v1.dic[i] == he.dic[j]):\r\n D1[c1] = c2\r\n else:\r\n D1[c1] = -c2\r\n c2 = c2 + 1\r\n c1 = c1 + 1\r\n rr = M.shape[0]\r\n cc = M.shape[1]\r\n Ma = np.zeros([rr,cc],dtype=np.float64)\r\n for i in range(rr):\r\n Ma[i,:] = (M[(abs(D1[i])-1),:]*(np.sign(D1[i])))\r\n l1 = 0\r\n for j in range(cc):\r\n l1 = np.dot(Mi[j,:],Ma[:,j])[0,0] + l1\r\n else:\r\n if (p == 0):\r\n he = self.basis_group_oriented_p_chains(p) \r\n on1 = np.ones(len(list(he.dic.keys())), dtype=np.float64) \r\n v = P_chains([],[])\r\n v = P_chains(list(he.dic.keys()),on1)\r\n v1 = permutation_in_simplex_test(v, make_permutation(h))\r\n D1={}\r\n c1 = 0\r\n for i in list(v1.dic.keys()):\r\n c2 = 1\r\n for j in list(he.dic.keys()):\r\n if (i == j):\r\n if (v1.dic[i] == he.dic[j]):\r\n D1[c1] = c2\r\n else:\r\n D1[c1] = -c2\r\n c2 = c2 + 1\r\n c1 = c1 + 1\r\n rr = M.shape[0]\r\n cc = M.shape[1]\r\n Ma = np.zeros([rr,cc],dtype=np.float64)\r\n for i in range(rr):\r\n Ma[i,:] = (M[(abs(D1[i])-1),:]*(np.sign(D1[i])))\r\n l1 = 0\r\n for j in range(cc):\r\n l1 = np.dot(Mi[j,:],Ma[:,j])[0,0] + l1\r\n else:\r\n l1 = 0\r\n p = k + 1\r\n if (p>0 and (p <= self.dimension())):\r\n hi = self.basis_group_oriented_p_chains(p-1) \r\n on1i = np.ones(len(list(hi.dic.keys())), dtype=np.float64) \r\n vi = P_chains([],[])\r\n vi = P_chains(list(hi.dic.keys()),on1i)\r\n v1i = permutation_in_simplex_test(vi, make_permutation(h))\r\n D1i={}\r\n c1 = 0\r\n for i in list(v1i.dic.keys()):\r\n c2 = 1\r\n for j in list(hi.dic.keys()):\r\n if (i == j):\r\n if (v1i.dic[i] == hi.dic[j]):\r\n D1i[c1] = c2\r\n else:\r\n D1i[c1] = -c2\r\n c2 = c2 + 1\r\n c1 = c1 + 1\r\n rr = M1.shape[0]\r\n cc = M1.shape[1]\r\n Mai = np.zeros([rr,cc],dtype=np.float64)\r\n for i in range(rr):\r\n Mai[i,:] = (M1[(abs(D1i[i])-1),:]*(np.sign(D1i[i])))\r\n l2 = 0\r\n for j in range(cc):\r\n l2 = np.dot(Mii[j,:],Mai[:,j])[0,0] + l2\r\n else:\r\n l2 = 0\r\n uu.append(l1-l2) \r\n vv.append(size_conjugacy_class(h,n))\r\n for i in range(M5.shape[0]):\r\n Ip = 0\r\n for j in range(M5.shape[1]):\r\n Ip = Ip + M5[i,j]*uu[j]*vv[j]\r\n Ip = Ip/card\r\n D[tuple(w5[i])] = abs(round(Ip))\r\n '''Note that I am using round, only because the results obtained are \r\n not esthetics'''\r\n vec_dic[k] = D\r\n return vec_dic", "def partition_by_eigenvector(graph):\n ###TODO\n pass", "def nfw(self, k, m, z):\n RS, rhoS, c = self.rS_rhoS_c(m, z)\n #\n result = np.sin(k * RS) * ( Si((1+c) * k * RS) - Si(k * RS) )\n result += - np.sin(c * k * RS) / ((1+c) * k * RS)\n result += np.cos(k * RS) * ( Ci((1+c) * k * RS) - Ci(k * RS) )\n result /= (np.log(1+c) - c/(1+c))\n return result", "def nfw(self, k, m, z):\n RS, rhoS, c = self.rS_rhoS_c(m, z)\n #\n result = np.sin(k * RS) * ( Si((1+c) * k * RS) - Si(k * RS) )\n result += - np.sin(c * k * RS) / ((1+c) * k * RS)\n result += np.cos(k * RS) * ( Ci((1+c) * k * RS) - Ci(k * RS) )\n result /= (np.log(1+c) - c/(1+c))\n return result", "def part(n, show_progress=False):\n # Get partitions as list of tuples\n parts = partitions(n, show_progress=show_progress)\n\n #products = set(map(lambda x: np.prod(x), parts))\n # Only count unique products\n filtered_products = list(set(parts.values()))\n filtered_products.sort()\n\n return format('Range: %d Average: %.2f Median: %.2f' % \n (filtered_products[-1]-filtered_products[0], np.mean(filtered_products), np.median(filtered_products)))", "def inertia(mus):\n pos, negs, zeros = cluster_eignvalues(mus)\n\n return len(zeros) + min(len(pos), len(negs))", "def total_electronic_hamiltonian(self):\n return block_diag(*[self.electronic_manifold(n) for n in range(3)])", "def HarmonicOscillator(inv_mass_matrix, k=1.0, m=1.0):\n\n def potential_energy(q):\n return jnp.sum(0.5 * k * jnp.square(q[\"x\"]))\n\n def kinetic_energy(p):\n v = jnp.multiply(inv_mass_matrix, p[\"x\"])\n return jnp.sum(0.5 * jnp.dot(v, p[\"x\"]))\n\n return potential_energy, kinetic_energy", "def spectral_modularity_partition(G):\n try:\n import numpy as np\n except:\n raise ImportError(\"spectral_partition() \\\n requires NumPy: http://scipy.org/\")\n\n\n k = np.matrix(G.degree().values())\n m = G.number_of_edges()\n B = nx.adj_matrix(G) - (k.transpose() * k) / (2.0 * m)\n eigenvalues, eigenvectors = np.linalg.eig(B)\n # sort and keep smallest nonzero \n index = np.argsort(eigenvalues)[-1] # -1 index is largest eigenvalue\n v2 = zip(np.real(eigenvectors[:, index]), G)\n \n C = [set(), set()]\n \n for (u, n) in v2:\n if u < 0:\n C[0].add(n)\n else:\n C[1].add(n)\n return C", "def inv_sym(self, ):\n m = self.m\n n = self.n\n kQ = self.kQ\n iQ = self.iQ\n iA = self.iA\n kA = self.kA\n kAt = self.kAt\n iAt = self.iAt\n bndmark = self.bndmark\n rngmark = self.rngmark\n\n verbose = self.verbose\n pdf = self.pdf\n\n separable = True\n degree = np.empty(n+m, dtype=np.int)\n nbrs = np.empty(n+m, dtype=object)\n\n #/*-----------------------------------------------------+\n #| First check to see if the problem is separable. */\n\n for j in range(n):\n for k in range(kQ[j], kQ[j+1]):\n if iQ[k] != j:\n separable = False\n break\n\n #/*----------------------------------------------------+\n #| Select ordering priority (primal or dual) */\n\n\n _dense, _fraction, pfillin, dfillin = 0.0, 0.0, 0.0, 0.0\n\n _fraction = 1.0e0\n for j in range(n):\n _dense = float(kA[j+1]-kA[j])/(m+1)\n _fraction = _fraction*(1.0e0 - _dense*_dense)\n\n pfillin = 0.5*m*m*(1.0e0-_fraction)\n if verbose>2:\n print(\"primal fillin estimate: {:10.0f}\".format(pfillin))\n\n _fraction = 1.0e0\n for i in range(m):\n _dense = float(kAt[i+1]-kAt[i])/(n+1)\n _fraction = _fraction*(1.0e0 - _dense*_dense)\n\n dfillin = 0.5*n*n*(1.0e0-_fraction)\n if verbose>2:\n print(\"dual fillin estimate: {:10.0f}\\n\".format(dfillin))\n\n if pdf == self._UNSET:\n if 3*pfillin <= dfillin and separable:\n pdf = self._PRIMAL\n if verbose>2:\n print(\"Ordering priority favors PRIMAL\")\n else:\n pdf = self._DUAL\n if verbose>2:\n print(\"Ordering priority favors DUAL\")\n\n\n #/*----------------------------------------------+\n #| Initialize nbrs so that nbrs[col][k] con- |\n #| tains the row index of the k_th nonzero in |\n #| column col. |\n #| Initialize degree so that degree[col] con- |\n #| tains the number of nonzeros in column col. |\n #| */\n\n for j in range(n):\n ne = kA[j+1] - kA[j] + kQ[j+1] - kQ[j]\n nbrs[j] = np.empty(ne, dtype=np.int)\n ne = 0\n for k in range(kA[j], kA[j+1]):\n nbrs[j][ne] = n+iA[k]\n ne+=1\n for k in range(kQ[j],kQ[j+1]):\n if iQ[k] != j:\n nbrs[j][ne] = iQ[k]\n ne+=1\n\n degree[j] = ne\n\n for i in range(m):\n ne = kAt[i+1] - kAt[i]\n nbrs[n+i] = np.empty(ne, dtype=np.int)\n degree[n+i] = ne\n ne = 0\n for k in range(kAt[i], kAt[i+1]):\n nbrs[n+i][ne] = iAt[k]\n ne+=1\n\n #/*----------------------------------------------+\n #| Initialize tier to contain the ordering |\n #| priority scheme. |\n #| */\n\n if self.tier is None:\n self.tier = np.empty(n+m, dtype=np.int)\n n1 = 0\n if pdf == self._PRIMAL:\n for j in range(n):\n if bndmark[j] != FREEVAR:\n self.tier[j] = 0 # 0\n else:\n self.tier[j] = 1 # 2\n\n for i in range(m):\n if rngmark[i] == UNCONST:\n self.tier[n+i] = 1 # 4\n n1+=1\n elif rngmark[i] == INFINITE:\n self.tier[n+i] = 1 # 1\n else:\n self.tier[n+i] = 1 # 3\n n1+=1\n\n else:\n for j in range(n):\n if bndmark[j] != FREEVAR:\n self.tier[j] = 1 # 1\n else:\n self.tier[j] = 1 # 3\n n1+=1\n\n for i in range(m):\n if rngmark[i] == UNCONST:\n self.tier[n+i] = 1 # 4\n elif rngmark[i] == INFINITE:\n self.tier[n+i] = 0 # 0\n else:\n self.tier[n+i] = 1 # 2\n\n\n #/*---------------------------------------------------------+\n #| compute maximum column degree of tier zero columns */\n\n if self.dense < 0:\n denfac = 3.0\n colhisto = np.zeros(n+m+1, dtype=np.int)\n\n for i in range(n+m):\n if self.tier[i] == 0:\n colhisto[ degree[i] ] += 1\n\n tot = 0\n _max = n1\n for i in range(n+m):\n tot += colhisto[i]\n if tot >= _max:\n break\n i+=1\n tot = 0\n cnt = 0\n for j in range(n+m):\n if self.tier[j] == 0:\n tot += degree[j]\n cnt+=1\n self.dense = dense = int(denfac*i)\n\n #dense = (int)(denfac*MAX(i,tot/cnt))\n \t\t#printf(\"i = %d, n = %d, m = %d, n1 = %d \\n\", i,n,m,n1)\n \t\t#printf(\"tot = %d, cnt = %d\\n\", tot, cnt)\n del(colhisto)\n\n\n if verbose>2:\n print(\"dense: {:5d}\".format(dense))\n\n #/*----------------------------------------------+\n #| Get memory for mark[]. */\n\n self.mark = np.empty(n+m, dtype=np.int)\n\n self.lltsym(degree,nbrs)\n\n del(degree)\n del(nbrs)\n self.tier = None", "def _get_vp_totvolume(self, geom, n=None):\n if geom.vp is None:\n geom.voronoi(self.pbc, self.ratio)\n if hasattr(geom.vp, 'vp_volume'):\n return geom.vp.vp_volume\n f = geom.vp.vp_faces()\n v, _ = geom.vp.vp_volumes(f)\n if n is not None:\n v = [v[i] for i in n]\n return v", "def getPartitionFunction(self, Tlist):\n\t\treturn _modes.harmonicoscillator_partitionfunction(Tlist, self.frequency) ** self.degeneracy", "def sim_split_sym_mig_all_size(params, ns):\n #12 parameters\t\n nuA, nu1a, nu1b, nu2a, nu2b, nu3a, nu3b, m_1, m_2, m_3, T1, T2 = params\n sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1] + ns[2])\n fs = moments.Spectrum(sts)\n fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1] + ns[2])\n fs = moments.Manips.split_2D_to_3D_2(fs, ns[1], ns[2])\n ## Population function for T1\n nu_T1 = [nu1a, nu2a, nu3a]\n fs.integrate(nu_T1, T1)\n ## Population function for T2 \n nu_T2 = [nu1b, nu2b, nu3b]\n mig = numpy.array([[0, m_1, m_3],[m_1, 0, m_2], [m_3, m_2, 0]]) \n fs.integrate(nu_T2, T2, m=mig) \n return fs", "def mass_from_composition(composition):\n mass = 0.0\n for k, v in composition.items():\n if k == 0: # electron\n mass -= v * 5.489e-4\n else:\n mass += v * relative_atomic_masses[k - 1]\n return mass", "def find_partitions(V,k):\n k_subs = k_subset(V,k)\n k_subs = uniq_subsets(k_subs)\n\n return k_subs", "def mass(self):\n\t\treturn self.volume*self.density", "def inertia_tensor_partial(self, part, masswt=True, zero=ZERO):\n tensor = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]\n\n for i in part:\n if masswt:\n # I(alpha, alpha)\n tensor[0][0] += self.mass(i) * (self.y(i) * self.y(i) + self.z(i) * self.z(i))\n tensor[1][1] += self.mass(i) * (self.x(i) * self.x(i) + self.z(i) * self.z(i))\n tensor[2][2] += self.mass(i) * (self.x(i) * self.x(i) + self.y(i) * self.y(i))\n\n # I(alpha, beta)\n tensor[0][1] -= self.mass(i) * self.x(i) * self.y(i)\n tensor[0][2] -= self.mass(i) * self.x(i) * self.z(i)\n tensor[1][2] -= self.mass(i) * self.y(i) * self.z(i)\n\n else:\n # I(alpha, alpha)\n tensor[0][0] += self.y(i) * self.y(i) + self.z(i) * self.z(i)\n tensor[1][1] += self.x(i) * self.x(i) + self.z(i) * self.z(i)\n tensor[2][2] += self.x(i) * self.x(i) + self.y(i) * self.y(i)\n\n # I(alpha, beta)\n tensor[0][1] -= self.x(i) * self.y(i)\n tensor[0][2] -= self.x(i) * self.z(i)\n tensor[1][2] -= self.y(i) * self.z(i)\n\n # mirror\n tensor[1][0] = tensor[0][1]\n tensor[2][0] = tensor[0][2]\n tensor[2][1] = tensor[1][2]\n\n # Check the elements for zero and make them a hard zero.\n for i in range(3):\n for j in range(3):\n if math.fabs(tensor[i][j]) < zero:\n tensor[i][j] = 0.0\n return tensor", "def V_angles(atoms):\n \n Va = 0 # this is the variable we will store the sum of all the energies in\n N = len(atoms)\n for i in range(len(atoms)):\n j = (i+1) % N\n k = (i-1) % N\n x_ij = atoms.coords[j] - atoms.coords[i] # vector from atom i to j\n x_ik = atoms.coords[k] - atoms.coords[i] # vector from atom i to k\n theta = np.arccos(np.dot(x_ij, x_ik)/(norm(x_ij)*norm(x_ik))) # angle between the above two\n \n Va += (theta - TH0)**2\n \n return Va", "def get_element_density(mt):\r\n fraction_matrix = zeros(100)\r\n \r\n composition = Composition(mt['pretty_formula'])\r\n \r\n for element in composition:\r\n fraction = composition.get_atomic_fraction(element) # get the atomic fraction.\r\n fraction_matrix[element.Z] = fraction\r\n \r\n return fraction_matrix", "def vol(x):\r\n return pi*(topdia(x)/2000.)**2 * length (x)", "def I(material):\n # Lookup table for Z<=13. Key is the \"Z\" of the material. Values are in eV\n lookup_table = {\n 1: 18.9,\n 2: 42.0,\n 3: 38.0,\n 4: 60.0, \n 6: 78.0,\n 7: 85.0,\n 8: 89.0,\n 10: 131.0,\n 13: 163.0\n }\n \n I_list = []\n \n for mat, frac in material.mult_by_mass().items():\n Z = nucname.znum(mat)\n # Check to see if Z is in our table\n I = lookup_table.get(Z)\n\n # If I is not in the table, calculate it\n # Use Anderson Equation 2.33\n if I is None:\n I = 9.73 * Z + 58.8 * Z ** -0.19\n \n I_list.append(I * frac)\n \n I_a = sum(I_list)\n \n # Convert I from eV to MeV\n I_a = I_a * 10**-6.0\n \n return I_a", "def normal_vol(self, k):\r\n f, s, t = self.f, self.shift, self.t\r\n beta, rho, volvol = self.beta, self.rho, self.volvol\r\n alpha = self.alpha()\r\n v_n = normal_vol(k+s, f+s, t, alpha, beta, rho, volvol)\r\n return v_n", "def MDL_KLT(data):\n\n eigs = []\n p = 64\n N = len(data[0])//p\n for sig in data:\n splits = np.split(sig, N)\n cov_matrix = np.zeros((p, p), dtype=np.complex128)\n for split in splits:\n split /= np.mean(split)\n cov_matrix += np.outer(split, np.conj(split))\n\n eigv = np.real(scipy.linalg.eigvalsh(cov_matrix)[::-1])\n eigv = eigv/np.mean(eigv)\n \n best_k = 0\n best_MDL = float(\"inf\")\n for k in range(0,p):\n noise_eigs = eigv[k:]\n noise_dim = len(noise_eigs)\n ratio = gmean(noise_eigs)/np.mean(noise_eigs)\n cur_MDL = -np.log(ratio**(noise_dim*N)) + .5*k*(2*p-k)*np.log(N)\n if cur_MDL < best_MDL:\n best_k = k\n best_MDL = cur_MDL\n \n if best_k == 0:\n eigs.append(0)\n else:\n eigs.append(sum(eigv[:best_k]))\n \n return np.real(np.array(eigs))", "def total_kinetic_energy(V,M):\r\n N = V.shape[0] # number of bodies\r\n K = 0 # initialize kinetic energy\r\n V0 = np.zeros(3) # initialize center of mass velocity\r\n # find refernce velocity\r\n for n in range(N):\r\n V0 = V0 + V[n,:]*M[n]\r\n V0 = V0/np.sum(M)\r\n # find kinetic energy\r\n for n in range(N):\r\n K = K + ((util.enod(V[n,:],V0))**2)* 0.5 * M[n]\r\n \r\n return K", "def modularity(G, partition):\n m = G.size(weight=\"weight\")\n degrees = dict(G.degree(weight=\"weight\"))\n Q = 0\n for community in partition:\n for u, v in product(community, repeat=2):\n try:\n w = G[u][v].get(\"weight\", 1)\n except KeyError:\n w = 0\n if u == v:\n # Double count self-loop weight.\n w *= 2\n Q += w - degrees[u] * degrees[v] / (2 * m)\n return Q / (2 * m)", "def Partitioner(q,InvV,Posterior,m_points):\n \n m = InvV.n #get the number of maps being used \n Q = np.zeros([m,m_points.num]) #initialise the partition functions\n \n for j in range(m):\n #backmap the points from the posterior to the intermediate\n backmap = m_points.map(InvV,j)\n #determine the current mixture using a change of variables\n det = InvV.L[j,:,:].diagonal().prod()**2\n Q[j,:] = q[j] * multivariate_normal.pdf(backmap.all,mean=np.zeros(m_points.d),cov=np.eye(m_points.d)) * det\n \n #now we have the total mixture\n g_est = np.sum(Q,axis=0)\n\n for j in range(m):\n #the partitioner can be found from these\n Q[j,:] /= g_est\n #apply the partitioner to the posterior evaluations to get the partitioned components\n \n return Q", "def zernike_Vnm(rho,theta,n,m):\n\tRnm = 0\n\tfact = lambda x: np.math.factorial(x)\n\tam = abs(m)\n\tfor s in range(0,(n-am)/2):\n\t\tRnm+= (-1)**s*fact(n-s)*rho**(n-2*s)/(\n\t\t\tfact(s)*fact((n+am)/2-s)*fact((n-am)/2-s))\n\tVnm = Rnm*np.exp(1j*m*theta)", "def plot_measure(v, partition, error=None, basis='Ulam', norm='L1', **kwargs):\n\n\tif not 'legend_label' in kwargs:\n\t\tkwargs['legend_label'] = 'Invariant measure'\n\tif not 'color' in kwargs:\n\t\tkwargs['color'] = 'red'\n\t\n\tif basis == 'Ulam' or basis == 'ulam':\n\t\tsteps = tuple(step_function(v, partition))\n\t\tp = plot_step_function(steps, **kwargs)\n\telif basis == 'hat':\n\t\tpoints = zip(partition, np.append(v, v[0]))\n\t\tp = line(points, **kwargs)\n\telse:\n\t\traise ValueError, \"invalid basis type\"\n\t\t\n\tif error is not None:\n\t\tif norm == 'L1':\n\t\t\tp += bar_chart([sqrt(error)], width=sqrt(error), color='green', legend_label='Area of the total error')\n\t\telif norm == 'Linf' or norm == 'C0':\n\t\t\tp += line([(x, y-error) for (x, y) in points], color='green', legend_label='error bounds')\n\t\t\tp += line([(x, y+error) for (x, y) in points], color='green')\n\t\telse:\n\t\t\traise ValueError, \"invalid norm type\"\n\t\n\treturn p", "def specvol(SA, CT, p):\n\n SA = np.maximum(SA, 0)\n\n xs = np.sqrt(sfac * SA + soffset)\n ys = CT * 0.025\n z = p * 1e-4\n\n specific_volume = (v000\n + xs * (v100 + xs * (v200 + xs * (v300 + xs * (v400 + xs * (v500\n + xs * v600)))))\n + ys * (v010\n + xs * (v110 + xs * (v210 + xs * (v310 + xs * (v410 + xs * v510))))\n + ys * (v020 + xs * (v120 + xs * (v220 + xs * (v320 + xs * v420)))\n + ys * (v030 + xs * (v130 + xs * (v230 + xs * v330))\n + ys * (v040 + xs * (v140 + xs * v240)\n + ys * (v050 + xs * v150 + ys * v060)))))\n + z * (v001\n + xs * (v101 + xs * (v201 + xs * (v301 + xs * (v401 + xs * v501))))\n + ys * (v011 + xs * (v111 + xs * (v211 + xs * (v311 + xs * v411)))\n + ys * (v021 + xs * (v121 + xs * (v221 + xs * v321))\n + ys * (v031 + xs * (v131 + xs * v231)\n + ys * (v041 + xs * v141 + ys * v051))))\n + z * (v002\n + xs * (v102 + xs * (v202 + xs * (v302 + xs * v402)))\n + ys * (v012 + xs * (v112 + xs * (v212 + xs * v312))\n + ys * (v022 + xs * (v122 + xs * v222)\n + ys * (v032 + xs * v132 + ys * v042)))\n + z * (v003\n + xs * (v103 + xs * v203)\n + ys * (v013 + xs * v113 + ys * v023)\n + z * (v004 + xs * v104 + ys * v014\n + z * (v005 + z * v006))))))\n\n return specific_volume", "def sumaPar(self,numSeg,w):\n total=0\n for i in range(2,numSeg-1,2):\n total+=2*self.F(i*w)\n return total", "def __init__(self, my_partition: List[int]):\n self.my_partition = my_partition\n self.my_partition.sort(reverse=True)\n if self.my_partition[-1]==0:\n first_zero = self.my_partition.index(0)\n self.my_partitition = self.my_partition[0:first_zero]\n self.my_n = sum(self.my_partition)", "def _G_to_km_on_basis_single_level(self, w, m):\n kB = self._sym.kBoundedSubspace(self.k,t=1)\n g = kB.K_kschur()\n mon = self.km()\n if m < w.length():\n return 0\n ans = self.zero()\n for la in Partitions(m, max_part = self.k):\n ans += g.homogeneous_basis_noncommutative_variables_zero_Hecke((la)).coefficient(w)*mon(la)\n return ans", "def hypergraph_volume(hypergraph, vertex_set, complement=False):\n vertex_set = set(vertex_set)\n if not complement:\n return sum(hypergraph.degrees[v] for v in vertex_set)\n else:\n return sum(hypergraph.degrees[v] for v in hypergraph.nodes if v not in vertex_set)", "def sigmai_dep(ptem, psal, pref):\n zr4 = 4.8313e-4\n zd =-2.042967e-2\n zrau0 = 1000.e0\n \n sigmai_dep_out = zeros(psal.shape)\n \n # ?? for whatever reason sqrt(abs(psal)) seems to kick up a fuss when arrays\n # exceed a certain size...??? otherwise this could be vectorised\n # TODO: if pref is a number, broadcast it into a 2d field\n \n for jj in range(psal.shape[0]): # python indexing\n for ji in range(psal.shape[1]):\n \n ztem = ptem[jj, ji]\n zsal = psal[jj, ji]\n zws = sqrt( abs(psal[jj, ji]) )\n \n # Compute the volumic mass of pure water at atmospheric pressure.\n zr1 = ( ( ( ( (6.536332e-9 * ztem - 1.120083e-6) * ztem + 1.001685e-4 )\n * ztem - 9.095290e-3 ) * ztem + 6.793952e-2 ) * ztem + 999.842594e0\n )\n\n # Compute the seawater volumic mass at atmospheric pressure.\n zr2 = ( ( ( ( 5.3875e-9 * ztem - 8.2467e-7) * ztem + 7.6438e-5)\n * ztem - 4.0899e-3) * ztem + 0.824493e0\n )\n\n zr3 = (-1.6546e-6 * ztem + 1.0227e-4) * ztem - 5.72466e-3\n\n # Compute the potential volumic mass (referenced to the surface).\n zrhop = (zr4 * zsal + zr3 * zws + zr2) * zsal + zr1\n\n # Compute the compression terms.\n ze = (-3.508914e-8 * ztem - 1.248266e-8) * ztem - 2.595994e-6\n\n zbw = (1.296821e-6 * ztem - 5.782165e-9) * ztem + 1.045941e-4\n\n zb = zbw + ze * zsal\n\n zc = (-7.267926e-5 * ztem + 2.598241e-3) * ztem + 0.1571896e0\n\n zaw = ( ( (5.939910e-6 * ztem + 2.512549e-3) * ztem - 0.1028859e0 ) \n * ztem - 4.721788e0\n )\n\n za = (zd * zws + zc) * zsal + zaw\n\n zb1 = (-0.1909078e0 * ztem + 7.390729e0) * ztem - 55.87545e0\n\n za1 = ( ( (2.326469e-3 * ztem + 1.553190e0) * ztem - 65.00517e0)\n * ztem + 1044.077e0\n )\n\n zkw = ( ( ( (-1.361629e-4 * ztem - 1.852732e-2) * ztem - 30.41638e0)\n * ztem + 2098.925e0) * ztem + 190925.60\n )\n\n zk0 = (zb1 * zws + za1) * zsal + zkw\n\n # Compute the potential density anomaly.\n sigmai_dep_out[jj, ji] = ( zrhop / (1.0e0 - pref / \n ( zk0 - pref * (za - pref * zb) ) )\n - zrau0\n )\n \n return sigmai_dep_out", "def _compute_sizes(self, k, expand=False, factor=False,\n simplify=False):\n if not self._has(\"omega\"):\n self.cosineSequences(expand=expand, factor=factor,\n simplify=simplify)\n if not self._has(\"theta\"):\n self.eigenvalues(expand=expand, factor=factor, simplify=simplify)\n if self.is_cyclic():\n m = tuple(Integer(1 if th in [2, -2] else 2)\n for th in self._.theta)\n else:\n try:\n m = tuple(integralize(_simplify(_factor(\n self._.n / sum(s * om**2\n for s, om in zip(k, omg)))))\n for omg in self._.omega)\n except TypeError:\n raise InfeasibleError(\"%s not integral\" % self.DUAL_SIZES)\n return m", "def total_mass_au(self):\n return np.sum(self.atomic_mass)", "def TR_algo3(h, vd=2):\n ve = 0\n vd = 2\n p = [0]*N\n for i in range(M-1, -1, -1):\n w = [bit_component(h, i*N+ii) for ii in range(N)]\n #print(i, w)\n w = sum( [wx*2**j for j, wx in enumerate(w)] )\n #print(i, w, gc(w))\n l = gc(w)\n l = T_inv(ve, vd, l)\n for j in range(N):\n p[j] += bit_component(l, j) << i\n ve = ve ^ rotate_left(e(w), vd+1)\n vd = (vd + d(w) + 1) % N\n return p", "def partitions(items: int) -> int:\n return bell_number(items)", "def sim_split_sym_mig_all(params, ns):\n #8 parameters\t\n nuA, nu1, nu2, nu3, m_1, m_2, m_3, T1 = params\n sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1] + ns[2])\n fs = moments.Spectrum(sts)\n fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1] + ns[2])\n fs = moments.Manips.split_2D_to_3D_2(fs, ns[1], ns[2])\n ## Population function for T1\n nu_T1 = [nu1, nu2, nu3]\n mig = numpy.array([[0, m_1, m_3],[m_1, 0, m_2], [m_3, m_2, 0]]) \n fs.integrate(nu_T1, T1, m=mig) \n return fs", "def get_tt72_disk(m=10.e11|units.MSun,\n r_min=25.|units.kpc,\n n_rings=[12,15,18,21,24,27,30,33,36,39,42,45],\n r_rings_rel=[0.2,0.25,0.3,0.35,0.4,0.45,0.5,0.55,0.6,0.65,0.7,0.75],\n disk_id='a',\n eps=0.|units.m):\n disk = Particles()\n \n for i,ri in enumerate(r_rings_rel):\n \n disk_rad_i = Particles(n_rings[i])\n \n a = ri*r_min\n phi_i = numpy.linspace(0., pipi, num=n_rings[i], endpoint=False)\n \n disk_rad_i.x = a * numpy.cos(phi_i)\n disk_rad_i.y = a * numpy.sin(phi_i)\n disk_rad_i.z = 0. * a\n \n x_r = disk_rad_i.x/a\n y_r = disk_rad_i.y/a\n \n #vc = (constants.G*m/a)**0.5\n vc = ( constants.G*m*a**2/(a**2 + eps**2)**1.5 )**0.5\n disk_rad_i.vx = -vc * y_r\n disk_rad_i.vy = vc * x_r\n disk_rad_i.vz = 0.0 * vc\n \n disk.add_particles(disk_rad_i)\n \n # test particles\n disk.mass = 0.|units.MSun\n \n # identification of the disk\n disk.id = disk_id\n \n return disk", "def FreeEnergy(self, v, partition=False):\n\n\t\tif partition:\n\t\t\tif self.logZ==None:\n\t\t\t\tself.estimateZ_AIS(steps=1000, M=500)\n\t\t\treturn -1*(numpy.log(1 + numpy.exp(numpy.dot(v.T, self.W) + v.sum()*self.hbias)).sum() + (numpy.dot(v.T, self.vbias))) - self.logZ\n\t\telse:\n\t\t\treturn -1*(numpy.log(1 + numpy.exp(numpy.dot(v.T, self.W) + v.sum()*self.hbias)).sum() + (numpy.dot(v.T, self.vbias)))", "def plotVolume(self, phys, forces, step):\r\n self.plotQuantity(step, phys.volume(), 'volume')", "def calc_menc(m,sortby=None):\n # Sort and sum if sortby array provided\n if sortby is not None:\n idxsort = np.argsort(sortby)\n idxsort_rev = np.argsort(idxsort)\n menc = np.cumsum(m[idxsort])[idxsort_rev]\n # Otherwise just sum\n else:\n menc = np.cumsum(m)\n # Return\n return menc", "def compute_volume(self) -> float:\n return (\n (1 if self.clockwise else -1)\n * np.sum(\n np.linalg.det(\n np.dstack(\n (\n self.vertices[self._faces[:, 0]],\n self.vertices[self._faces[:, 1]],\n self.vertices[self._faces[:, 2]],\n )\n )\n )\n )\n / 6\n )", "def disk_r(N=10,h=1.0,z0=1.0,p0=1.0,Mtot=1.0):\n r = np.linspace(0,100*h,N)\n z = np.array([random.uniform(0,z0) for _ in range(N)])\n #z = np.random.uniform(0,100*z0,N)\n #q_r = np.random.uniform(0,1,N)\n q_r = np.array([random.uniform(0,1) for _ in range(N)]) \n\n LHS = np.exp(-r/h)*(1+ r/h)\n RHS = 1 - ( (2*np.pi*q_r*p0*h**2)/(Mtot*np.tanh(z/z0)) )\n \n w = np.interp(r,LHS,RHS)\n u = np.array([random.uniform(0,1) for _ in range(N)])\n #u = np.random.uniform(0,1,N)\n theta = 2*np.pi*u\n\n x = w*np.cos(theta)\n y = w*np.sin(theta)\n\n p = np.stack((x,y,z),axis=-1)\n\n return p", "def fermionic_degree(self):\n return len(self[0])", "def mvar(mva, pf):\r\n x = mva*1000000\r\n y = math.acos(pf)\r\n #print (f' The arccos and angle is {y}')\r\n z=math.sin(y)\r\n #print(f' The sin and angle is {z}')\r\n w = round ((x*z)/1000000, 3)\r\n return w", "def getPartitionFunction(self, Tlist, V=1.0):\n\t\treturn _modes.translation_partitionfunction(Tlist, self.mass, self.dimension, V)", "def sumaImpar(self,numSeg,w):\n total=0\n for i in range(1,numSeg,2):\n total+=4*self.F(i*w)\n return total", "def total_volume(self):\n v = self.cell_edges\n v = np.abs(v[-1] - v[0])\n return v", "def total_volume(self):", "def markov_partition(markov_network):\n evidences = []\n for i in range(markov_network[\"n_variables\"]):\n evidences.append([k for k in range(markov_network[\"cardinalities\"][i])])\n\n evidences = itertools.product(*evidences)\n evidences = [e for e in evidences]\n\n result = 0\n for evidence in evidences:\n clique_potentials = []\n for c in range(markov_network[\"n_cliques\"]):\n ### Check if the clique is maximal\n ### If not, don't consider it\n ### For now, it only excludes singletons\n if len(markov_network[\"cliques\"][c][\"vars\"]) == 1:\n continue\n\n ### Consider only variables present at the considered clique\n reduced_evidence = itemgetter(*markov_network[\"cliques\"][c][\"vars\"])(evidence)\n\n ### When there's only one variable the itemgetter does not return a tuple\n if isinstance(reduced_evidence, int): reduced_evidence = (reduced_evidence, )\n\n ### Get the potential from all the cliques\n clique_potentials.append(retrieve_potential(reduced_evidence, markov_network[\"cliques\"][c][\"vars\"], markov_network))\n\n #print(clique_potentials)\n result += reduce(lambda x, y: x * y, clique_potentials)\n\n return result", "def mass_avionics(\n mass_uninstalled_avionics: float,\n):\n return (\n 1.73 *\n (mass_uninstalled_avionics / u.lbm) ** 0.983\n ) * u.lbm", "def rk_adaptive(accel,m,r,h,v,recur,emin=10**-12,emax=10**-8,hmax=.1,hmin=.01,recurmax=100):\n k1v = accel(m,r)\n k1r = v\n k2v = accel(m,r + 0.25*k1r*h)\n k2r = v + (0.25*k1v)*h\n k3v = accel(m,r + (3/32.*k1r + 9/32.*k2r)*h)\n k3r = v + (3/32.*k1v + 9/32.*k2v)*h\n k4v = accel(m,r + (1932/2197.*k1r - 7200/2197.*k2r + 7296/2197.*k3r)*h)\n k4r = v + (1932/2197.*k1v - 7200/2197.*k2v + 7296/2197.*k3v)*h\n k5v = accel(m,r + (439/216.*k1r - 8*k2r + 3680/513.*k3r - 845/4104.*k4r)*h)\n k5r = v + (439/216.*k1v - 8*k2v + 3680/513.*k3v - 845/4104.*k4v)*h\n k6v = accel(m,r - (8/27.*k1r + 2*k2r - 3544/2565.*k3r + 1859/4104.*k4r - 11/40.*k5r)*h)\n k6r = v - (8/27.*k1v + 2*k2v - 3544/2565.*k3v + 1859/4104.*k4v - 11/40.*k5v)*h\n\n # 4th order calculation\n new_v4 = v + h*(25/216.*k1v + 1408/2565.*k3v + 2197/4104.*k4v - 1/5.*k5v)\n new_r4 = r + h*(25/216.*k1r + 1408/2565.*k3r + 2197/4104.*k4r - 1/5.*k5r)\n \n # 5th order calculation\n new_v5 = v + h*(16/135.*k1v + 6656/12825.*k3v+28561/56430.*k4v - 9/50.*k5v + 2/55.*k6v) \n new_r5 = r + h*(16/135.*k1r + 6656/12825.*k3r+28561/56430.*k4r - 9/50.*k5r + 2/55.*k6r) \n\n # Calculate truncation error between 5th and 4th order\n eps = np.abs( (np.max(np.abs(new_r5)) - np.max(np.abs(new_r4))) / np.max(np.abs(new_r4)))\n \n # Compare eps to emin and emax and update h accordingly\n if np.max(eps) < emin:\n if h*2.0 < hmax:\n h *= 2.0\n new_v = new_v5\n new_r = new_r5 \n \n if np.max(eps) > emax:\n if h/2.0 > hmin:\n h /= 2.0\n print h\n # Error too large, call rk_adaptive again with smaller h\n if recur < recurmax:\n recur += 1\n rk_adaptive(accel,m,r,h,v,recur)\n new_v = new_v5\n new_r = new_r5\n \n else:\n new_v = new_v5\n new_r = new_r5\n \n return new_v, new_r, h", "def _harmonic_sum(self, rank: int) -> complex:\n return (self.flm * self.slepian.eigenvectors[rank].conj()).sum()", "def multinomial_coefficient(partition, n=None):\n tot = 0\n deg = 1\n for p in partition:\n tot += p\n deg *= factorial(p)\n if n is None:\n n = tot\n return factorial(n)//deg//factorial(n-tot)", "def sublinear_random_mon_VI(mdp, eps, delta, analyze=False):\n\n analysis = {}\n K = math.log(mdp.M / (eps * (1 - mdp.gamma)), 2)\n K = int(K) + 1\n T = 1. / (1 - mdp.gamma) * math.log(4. / (1 - mdp.gamma))\n T = int(T) + 1\n\n v_k = np.zeros((mdp.nb_s, 1))\n pi_k = np.zeros((mdp.nb_a, 1))\n eps_k = mdp.M / (1 - mdp.gamma)\n\n if analyze:\n analysis['K'] = K\n analysis['T'] = T\n analysis['eps'] = eps\n analysis['delta'] = delta\n analysis['m_hist'] = []\n analysis['V_hist'] = []\n analysis['pi_hist'] = []\n\n for k in tqdm(range(K)):\n eps_k = 0.5 * eps_k\n v_k, pi_k, m_hist = sample_randomize_mon_VI(mdp, v_k, pi_k, T,\n (1 - mdp.gamma)*eps/(4*mdp.gamma), delta/K, analyze)\n\n if analyze:\n analysis['m_hist'].append(m_hist)\n analysis['V_hist'].append(v_k)\n analysis['pi_hist'].append(pi_k)\n\n return v_k, pi_k, analysis", "def gaussianElimKer(M, zero, one):\n # V satisfies the invariant\n # M = V M_0\n V = [Polynomial([zero] * i + [one]) for i in range(len(M))]\n pivots = [None] * (len(M) + 1)\n for l in range(len(M)):\n while M[l].deg >= 0:\n idp = M[l].deg\n if pivots[idp] is None:\n pivots[idp] = l\n break\n else:\n c = M[l][idp] / M[pivots[idp]][idp]\n M[l] -= c * M[pivots[idp]]\n V[l] -= c * V[pivots[idp]]\n else:\n # If a line is null, we found an element of the kernel\n return V[l]\n return None", "def fuel_from_mass(m):\n return (m/3).astype(int) - 2", "def relative_partition_function(self):\n return self.overall_norm**2", "def trans_specprof(m):\n m = asmatrix(m)\n row_sums = sum(m, axis=1)\n result = m / row_sums\n return result", "def _reduced_mass(structure) -> float:\n reduced_comp = structure.composition.reduced_composition\n num_elems = len(reduced_comp.elements)\n elem_dict = reduced_comp.get_el_amt_dict()\n\n denominator = (num_elems - 1) * reduced_comp.num_atoms\n\n all_pairs = combinations(elem_dict.items(), 2)\n mass_sum = 0\n\n for pair in all_pairs:\n m_i = Composition(pair[0][0]).weight\n m_j = Composition(pair[1][0]).weight\n alpha_i = pair[0][1]\n alpha_j = pair[1][1]\n\n mass_sum += (alpha_i + alpha_j) * (m_i * m_j) / (m_i + m_j) # type: ignore\n\n reduced_mass = (1 / denominator) * mass_sum\n\n return reduced_mass", "def generated_data_vol(self):\n return sum(b.data_vol for b in self.sent) if self.monitor else -1", "def UnitCellVolume(ID): \n ID=goodID(ID)\n lp=latticeParameters[ID]\n a=lp[0]/u['ang']\n b=lp[1]/u['ang']\n c=lp[2]/u['ang']\n alpha=lp[3]\n beta=lp[4]\n gamma=lp[5]\n L=latticeType[ID]\n ca=cosd(alpha)\n cb=cosd(beta)\n cg=cosd(gamma)\n V=a*b*c*np.sqrt(1-ca**2-cb**2-cg**2+2*ca*cb*cg)\n return V", "def mass_variance(self, logM, k = [], pk = [], var = 'cb', window = 'th', **kwargs):\n return self.mass_variance_multipoles(logM = logM, k = k, pk = pk, var = var, window = window, **kwargs)", "def mass_tot_rho(self):\n\n dm = np.zeros(self.nzon)\n dm[0] = 4. * np.pi / 3. * (self.r[0] ** 3 - self.r_cen ** 3) * self.rho[0]\n for i in range(1, self.nzon):\n dm[i] = 4. / 3. * np.pi * (self.r[i] ** 3 - self.r[i - 1] ** 3) * self.rho[i]\n # print(f' M_tot(Density) = {np.sum(dm)/phys.M_sun:.3f}')\n return np.sum(dm)", "def getBeamNaturalFrequencies(K,M,L,plot=False):\n\n n = int(M.shape[0]/2)\n\n A = np.dot(npl.inv(M[2:,2:]), K[2:,2:]) # fixed-free rod\n lmbda,v = npl.eig(A)\n sort_idx = lmbda.argsort()\n\n w = np.sqrt(lmbda[sort_idx[:min(3,n-1)]])\n print(w/(2*np.pi))\n\n if plot:\n plt.figure()\n for i in sort_idx[:min(3,n-1)]:\n plt.plot(np.linspace(0,L,n),np.append([0],v[::2,i]))\n plt.title(\"Mode Shapes\")\n plt.xlabel(\"Axial Distance [m]\")\n plt.ylabel(\"Normalized Axial Displacement\")\n plt.legend([\"Mode 1\", \"Mode 2\", \"Mode 3\"])\n plt.grid()\n plt.xlim([0,L])\n plt.show()", "def _vmomentsurfacemass(self,R,n,m,romberg=False,nsigma=None,\n relative=False,phi=0.,deriv=None):\n #odd moments of vR are zero\n if isinstance(n,int) and n%2 == 1:\n return 0.\n if nsigma == None:\n nsigma= _NSIGMA\n logSigmaR= self.targetSurfacemass(R,log=True,use_physical=False)\n sigmaR2= self.targetSigma2(R,use_physical=False)\n sigmaR1= sc.sqrt(sigmaR2)\n logsigmaR2= sc.log(sigmaR2)\n if relative:\n norm= 1.\n else:\n norm= sc.exp(logSigmaR+logsigmaR2*(n+m)/2.)/self._gamma**m\n #Use the asymmetric drift equation to estimate va\n va= sigmaR2/2./R**self._beta*(1./self._gamma**2.-1.\n -R*self._surfaceSigmaProfile.surfacemassDerivative(R,log=True)\n -R*self._surfaceSigmaProfile.sigma2Derivative(R,log=True))\n if math.fabs(va) > sigmaR1: va = 0. #To avoid craziness near the center\n if deriv is None:\n if romberg:\n return sc.real(bovy_dblquad(_vmomentsurfaceIntegrand,\n self._gamma*(R**self._beta-va)/sigmaR1-nsigma,\n self._gamma*(R**self._beta-va)/sigmaR1+nsigma,\n lambda x: -nsigma, lambda x: nsigma,\n [R,self,logSigmaR,logsigmaR2,sigmaR1,\n self._gamma,n,m],\n tol=10.**-8)/sc.pi*norm/2.)\n else:\n return integrate.dblquad(_vmomentsurfaceIntegrand,\n self._gamma*(R**self._beta-va)/sigmaR1-nsigma,\n self._gamma*(R**self._beta-va)/sigmaR1+nsigma,\n lambda x: -nsigma, lambda x: nsigma,\n (R,self,logSigmaR,logsigmaR2,sigmaR1,\n self._gamma,n,m),\n epsrel=_EPSREL)[0]/sc.pi*norm/2.\n else:\n if romberg:\n return sc.real(bovy_dblquad(_vmomentderivsurfaceIntegrand,\n self._gamma*(R**self._beta-va)/sigmaR1-nsigma,\n self._gamma*(R**self._beta-va)/sigmaR1+nsigma,\n lambda x: -nsigma, lambda x: nsigma,\n [R,self,logSigmaR,logsigmaR2,sigmaR1,\n self._gamma,n,m,deriv],\n tol=10.**-8)/sc.pi*norm/2.)\n else:\n return integrate.dblquad(_vmomentderivsurfaceIntegrand,\n self._gamma*(R**self._beta-va)/sigmaR1-nsigma,\n self._gamma*(R**self._beta-va)/sigmaR1+nsigma,\n lambda x: -nsigma, lambda x: nsigma,\n (R,self,logSigmaR,logsigmaR2,sigmaR1,\n self._gamma,n,m,deriv),\n epsrel=_EPSREL)[0]/sc.pi*norm/2.", "def GetPartitioningArray(self):\n return _hypre.HypreParVector_GetPartitioningArray(self)", "def get_cell_volumes(mesh):\n num_els = mesh.num_cells()\n coords = mesh.coordinates()\n cells = mesh.cells()\n dim = len(coords[0])\n\n cell_volume = np.zeros(num_els, dtype=float)\n div_fact = 1.0/float(fact(dim)) #division factor for n-dim tetrahderon\n \n for i in range(num_els):\n cell_volume[i] = abs(la.det(np.insert(coords[cells[i]], dim, 1, axis=1)))\n \n return div_fact*cell_volume", "def dos_integral(E,dos,m=0):\n somma = 0.0\n h = 0.5*(E[2]-E[0])\n for j in range(0,len(dos)-3,3):\n somma += 3.0*pow(E[j],m)*dos[j]+3.0*pow(E[j+1],m)*dos[j+1]+2.0*pow(E[j+2],m)*dos[j+2]\n \n return h*somma*3.0/8.0;", "def void_size_function_EST(self, R, z, k, pk, delta_v = None, a = 1., p = 0.):\n if delta_v == None: delta_v = -1.76\n # Set number of redshifts\n pk=np.atleast_2d(pk)\n # Check dimensions\n assert len(np.atleast_1d(z))==len(pk), \"Redshifts are not of the same length as power spectra\"\n nz = len(pk)\n nR = len(np.atleast_1d(R))\n\n # Set minimum/maximum radii and masses\n Rmin,Rmax = 0.01, 200. # Mpc/h\n Mmin,Mmax = self.mass_in_radius(Rmin),self.mass_in_radius(Rmax)\n\n # Temporary radii and masses\n Rtmp = self.radius_of_mass(self.M)\n Mtmp = self.M[np.where((Rtmp>Rmin) & (Rtmp<Rmax))]\n Rtmp = Rtmp [np.where((Rtmp>Rmin) & (Rtmp<Rmax))]\n logMtmp = np.log10(Mtmp)\n\n # sigma_j^2\n s0 = self.mass_variance_multipoles(logM=logMtmp,k=k,pk=pk,j=0,smooth=True ,window='th')\n s1 = self.mass_variance_multipoles(logM=logMtmp,k=k,pk=pk,j=1,smooth=True ,window='th')\n s2 = self.mass_variance_multipoles(logM=logMtmp,k=k,pk=pk,j=2,smooth=True ,window='th')\n\n # Useful quantities\n gamma_p = s1/np.sqrt(s0*s2) # gamma parameter\n R_star = np.sqrt(3.*s1/s2) # R_* parameter\n dv = np.abs(delta_v) # Use -1.76!!!!\n nu = dv/s0**.5 # Peak height\n RLtmp = np.outer(self.lagrange_to_euler(z = z, delta_v = delta_v),Rtmp)\n RL = np.outer(self.lagrange_to_euler(z = z, delta_v = delta_v),R)\n\n # Excursion Set Troughs\n G1 = np.array([self.G_n_BBKS(1, gamma_p[iz], nu[iz]) for iz in range(nz)])\n f_ST = self.ShethTormen_mass_function(s0**.5,delta_th=dv,a=a,p=p)/(2.*nu)\n f_nu = self.volume_of_radius(Rtmp, 'th')/(2.*np.pi*R_star**2.)**(3./2.)*(f_ST)*G1/(gamma_p*nu)\n\n # VSF\n dndR = np.zeros((nz,nR))\n loge = np.log10(np.e)\n for iz in range(nz):\n s0_int = si.interp1d(logMtmp, s0[iz],'cubic',bounds_error=False,fill_value='extrapolate')\n log_der = sm.derivative(s0_int, logMtmp, dx = 1e-3, n = 1, order = 3)\n dnu_dr = -3./2.*nu[iz]/Rtmp*loge/s0[iz]*log_der\n V = self.volume_of_radius(Rtmp, 'th')\n dndR_tmp = f_nu[iz]/V*dnu_dr\n dndR[iz] = si.interp1d(RLtmp[iz],dndR_tmp,'cubic')(RL[iz])\n return RL,dndR", "def CuN17vol(n,h,w,V):\n Vshell = 2*ZCE_GaAs(n,V)*(h-ZCE_GaAs(n,V)) + w*ZCE_GaAs(n,V)\n Vcore = (w-2*ZCE_GaAs(n,V))*(h-ZCE_GaAs(n,V))\n return Vshell*1e14,Vcore*1e14", "def count_partitions(n, m):\n if n == 0:\n return 1\n elif n < 0:\n return 0\n elif m == 0:\n return 0\n else:\n with_m = count_partitions(n - m, m)\n without_m = count_partitions(n, m - 1)\n return with_m + without_m", "def u(self, k, m, z):\n result = self.nfw(k, m, z) * m / self.U.rho_m(z)\n # FOG\n #sigma2 = self.U.sigma2DispFog(m, z)\n #result *= np.exp(- 0.5 * sigma2 * k**2 * mu**2)\n return result", "def nball_volume(R,k=3):\n return (np.pi**(k/2.0)/gamma(k/2.0+1.0))*R**k", "def inertial_system_partial(self, part, masswt=True, zero=ZERO):\n return diagonalize3x3symmat(self.inertia_tensor_partial(part, masswt, zero))", "def add_natural_isotope(self):\n\n for fragment in self.mdv:\n if not self.formula[fragment] == \"\":\n pattern = self.get_fragment_mdv(fragment)\n num = len(pattern)\n matrix = transition_matrix(self.formula[fragment])[0:num,0:num]\n transformed = numpy.dot(matrix, pattern)\n for i, ratio in enumerate(transformed):\n self.mdv[fragment][i][\"ratio\"] = ratio\n return", "def volume(self):\n return (\n (4 / 3 * np.pi)\n * self.semimajor_axis\n * self.semimedium_axis\n * self.semiminor_axis\n )", "def vol_pousse(x):\r\n if Class(x) ==\"I\" and Sons(x) != []:\r\n return vol_uc(x) + sum([vol_uc(y) for y in Sons(x)])\r\n else:\r\n return vol_uc(x)", "def _get_vp_ksph(self, geom):\n if geom.vp is None:\n geom.voronoi(self.pbc, self.ratio)\n if not hasattr(geom.vp, 'vp_volume'):\n f = geom.vp.vp_faces()\n v, a = geom.vp.vp_volumes(f, partial=False)\n else:\n v = geom.vp.vp_volume\n a = geom.vp.vp_area\n ksph = 36. * np.pi * v * v / (a * a * a)\n return ksph", "def normal_vol(k, f, t, alpha, beta, rho, volvol):\r\n # We break down the complex formula into simpler sub-components\r\n f_av = np.sqrt(f * k)\r\n A = - beta * (2 - beta) * alpha**2 / (24 * f_av**(2 - 2 * beta))\r\n B = rho * alpha * volvol * beta / (4 * f_av**(1 - beta))\r\n C = (2 - 3 * rho**2) * volvol**2 / 24\r\n FMKR = _f_minus_k_ratio(f, k, beta)\r\n ZXZ = _zeta_over_x_of_zeta(k, f, t, alpha, beta, rho, volvol)\r\n # Aggregate all components into actual formula (B.67a)\r\n v_n = alpha * FMKR * ZXZ * (1 + (A + B + C) * t)\r\n return v_n", "def u(self, k, m, z):\n result = self.ProfNFW.nfw(k, m, z) * self.Ngal(m) / self.nBarGal(1./(1.+z))\n return result", "def sim_split_no_mig_size(params, ns):\n #9 parameters\t\n nuA, nu1a, nu1b, nu2a, nu2b, nu3a, nu3b, T1, T2 = params\n sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1] + ns[2])\n fs = moments.Spectrum(sts)\n fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1] + ns[2])\n fs = moments.Manips.split_2D_to_3D_2(fs, ns[1], ns[2])\n ## Population function for T1\n nu_T1 = [nu1a, nu2a, nu3a]\n fs.integrate(nu_T1, T1)\n ## Population function for T2\n nu_T2 = [nu1b, nu2b, nu3b]\n fs.integrate(nu_T2, T2) \n return fs", "def Schechter_M_z(M, redshift, richness):\n\treturn 0.4 * n.log(10.) * 10**logPhi_evol(redshift, richness) * 10**(0.4 * (M_s_evol(redshift, richness) - M) * (alpha_evol(redshift, richness) + 1)) * n.e**( -10** ( 0.4 * (M_s_evol(redshift,richness) - M)))", "def mixture_vMF_density(x, mu_list, k_list):\n return_value = 0\n \n nr_mixtures = len(mu_list)\n \n for mu, k in zip(mu_list,k_list):\n \n Z = 2 * np.pi * ( np.exp(k) - np.exp(- k) ) / k\n \n return_value += 1 / Z * np.exp( k * np.dot(x, mu) )\n \n return return_value / nr_mixtures", "def getPartitionFunction(self, Tlist):\n\t\tQ = np.ones((len(Tlist)), np.float64) / self.symmetry\n\t\t# Active K-rotor\n\t\trotors = [mode for mode in self.modes if isinstance(mode, RigidRotor)]\n\t\tif len(rotors) == 0:\n\t\t\tTrot = constants.h * constants.c * 100.0 * 1.0 / constants.kB\n\t\t\tQ0 = [math.sqrt(T / Trot) for T in Tlist]\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\t# Other modes\n\t\tfor mode in self.modes:\n\t\t\tQ0 = mode.getPartitionFunction(Tlist)\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\treturn Q", "def spectrum(syst, params, k=20):\n ham = syst.hamiltonian_submatrix(params=params, sparse=True)\n (energies, wfs) = mumps_eigsh(ham, k=k, sigma=0)\n return energies, wfs", "def compute_density_ari(cur, N):\n ## arithmetic average cardinality\n avg_card = 0\n for n in range(N):\n card = get_parameter(cur, par=(\"card_B%d\" % n))\n avg_card += card / float(N)\n\n ## average mass\n if avg_card == 0:\n return (-1, 0)\n else:\n return (get_parameter(cur, par=\"B_mass\") / avg_card, avg_card)", "def get_mass(element):\n return pt.elements.isotope(element).mass", "def get_minimum_air_volume(v_vent: np.ndarray) -> float:\n\n return v_vent.sum()", "def volume(self):\n vol = ((self.I0 * self.V.omega *\n self._mu_0 / (self._mu_0 + self._mu_ex))\n * (1. - np.exp(-(self.V.tau / self._mu_0) -\n (self.V.tau / self._mu_ex)))\n * self.V.p(self.t_0, self.t_ex, self.p_0, self.p_ex,\n param_dict=self.param_dict))\n\n return (1. - self.bsf) * vol", "def v_atm_n(f, t, alpha, beta, rho, volvol):\r\n f_av = f\r\n A = - beta * (2 - beta) * alpha**2 / (24 * f_av**(2 - 2 * beta))\r\n B = rho * alpha * volvol * beta / (4 * f_av**(1 - beta))\r\n C = (2 - 3 * rho**2) * volvol**2 / 24\r\n v_atm_n = alpha * f**beta * (1 + (A + B + C) * t)\r\n return v_atm_n", "def networkx_volume_in(graph, vertex_set):\n degree = graph.in_degree\n return sum(d for v, d in degree(vertex_set, weight=\"weight\"))", "def count_partitions(n, m):\n # print(n, m)\n if n == 0:\n return 1\n elif n < 0:\n return 0\n elif m == 0:\n return 0\n else:\n return count_partitions(n-m, m) + count_partitions(n, m//2)", "def volterra_BM_path_chol(grid_points, M, H, T,rho):\n\n assert 0<H<1.0\n\n ## Step1: create partition\n\n X=np.linspace(0, T, num=grid_points)\n\n # get rid of starting point\n X=X[1:grid_points]\n\n ## Step 2: compute covariance matrix\n size=2*(grid_points-1)\n Sigma=np.zeros([size,size])\n #Sigma(1,1)\n for j in range(grid_points-1):\n for i in range(grid_points-1):\n if i==j:\n Sigma[i,j]=np.power(X[i],2*H)/2/H\n else:\n s=np.minimum(X[i],X[j])\n t=np.maximum(X[i],X[j])\n Sigma[i,j]=np.power(t-s,H-0.5)/(H+0.5)*np.power(s,0.5+H)*special.hyp2f1(0.5-H, 0.5+H, 1.5+H, -s/(t-s))\n #Sigma(1,2) and Sigma (2,1)\n for j in range(grid_points-1):\n for i in range(grid_points-1):\n Sigma[i,j+((grid_points-1))]=rho/(H+0.5)*(np.power(X[i],H+0.5)-np.power(X[i]-np.minimum(X[i],X[j]),H+0.5))\n Sigma[i+(grid_points-1),j]=rho/(H+0.5)*(np.power(X[j],H+0.5)-np.power(X[j]-np.minimum(X[i],X[j]),H+0.5))\n #Sigma(2,2)\n for j in range(grid_points-1):\n for i in range(grid_points-1):\n Sigma[i+(grid_points-1),j+(grid_points-1)]=np.minimum(X[i],X[j])\n\n ## Step 3: compute Cholesky decomposition\n P=np.linalg.cholesky(Sigma)\n\n ## Step 4: draw Gaussian rv\n\n Z=np.random.normal(loc=0.0, scale=1.0, size=[M,2*(grid_points-1)])\n\n ## Step 5: get (V,W) and add 0's in the beginning\n\n V=np.zeros((M,grid_points))\n W=np.zeros((M,grid_points))\n for i in range(M):\n aux=np.dot(P,Z[i,:])\n V[i,1:grid_points]=aux[0:(grid_points-1)]\n W[i,1:grid_points]=aux[(grid_points-1):2*(grid_points-1)]\n\n return V, W", "def peridym_compute_weighted_volume(cell_cent, cell_vol, nbr_lst, nbr_beta_lst, horizon, omega_fun):\n\n mw = np.zeros(len(cell_vol), dtype=float) #m is wighted volume\n\n for i in range(len(cell_cent)):\n curr_node_coord = cell_cent[i]\n \n #declare empty lists for current node neighbor\n #attributes like neighbor bond vector, bond len,\n #and influence field \n #refer ch5 algo1 of handbook of peridynamic modelling\n #by silling etal \n\n curr_nbr_lst = nbr_lst[i] \n curr_beta_lst = nbr_beta_lst[i]\n curr_nbr_bnd_vct = cell_cent[curr_nbr_lst] - curr_node_coord\n curr_nbr_bnd_len = la.norm(curr_nbr_bnd_vct, 2, axis=1)\n mw[i] = sum(omega_fun(curr_nbr_bnd_vct, horizon)*curr_nbr_bnd_len**2*cell_vol[curr_nbr_lst]*curr_beta_lst)\n\n return mw" ]
[ "0.5707858", "0.54226637", "0.5395542", "0.52172196", "0.5165793", "0.5102502", "0.5102502", "0.5101261", "0.50958854", "0.49969062", "0.49926218", "0.49795717", "0.4946353", "0.49354938", "0.49308503", "0.49158522", "0.4907569", "0.4894429", "0.4889348", "0.4864072", "0.48453435", "0.48379022", "0.48185363", "0.48128682", "0.48117468", "0.48037538", "0.48007002", "0.47995242", "0.47808802", "0.4769919", "0.4763718", "0.47626963", "0.47580236", "0.47579825", "0.47530454", "0.4743277", "0.47410852", "0.4736159", "0.47355956", "0.47340158", "0.47232944", "0.4721866", "0.47215503", "0.4720005", "0.47152564", "0.47142312", "0.47031024", "0.4701752", "0.470009", "0.46996942", "0.4698387", "0.46924263", "0.46915203", "0.4691506", "0.46889648", "0.4687594", "0.46849462", "0.4678572", "0.46763256", "0.46724927", "0.46713534", "0.4669989", "0.4650049", "0.46473417", "0.4646248", "0.46444783", "0.46429688", "0.46422926", "0.46419972", "0.4639133", "0.46383798", "0.46342725", "0.46332893", "0.46259987", "0.4619459", "0.46158478", "0.46065566", "0.46058288", "0.46036893", "0.45979935", "0.45967412", "0.45918807", "0.45799914", "0.457257", "0.45720747", "0.4571049", "0.4569372", "0.45661655", "0.45647573", "0.4560238", "0.45585522", "0.45557958", "0.45551074", "0.45550108", "0.45545325", "0.45536605", "0.45495155", "0.45473132", "0.4545433", "0.4531862" ]
0.7013808
0
create a database connection to the SQLite database specified by the db_file
def create_connection(db_file): try: conn = sqlite3.connect(db_file) #print("connection with the database is established") return conn except Error as e: print(e) #print("we have just failed to connect to the database") return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_db_connection(db_file):\n\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Exception as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n conn = sqlite3.connect(db_file)\n return conn", "def create_db(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(\"Database created, version = \", sqlite3.version)\n except Error as e:\n print(e)\n finally:\n conn.close()", "def create_connection(db_file):\n\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n conn.row_factory = sqlite3.Row\n except Error as e:\n print(e)\n return conn", "def create_connection(self, db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect('project1.db')\n return conn\n except Error as e:\n print(e)\n \n return conn", "def create_connection(self, db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n\ttry:\n\t\tconn = sqlite3.connect(db_file)\n\texcept Error as e:\n\t\tprint(e)\n\t\n\treturn conn", "def create_connection(db_file):\n\tconn = None\n\ttry:\n\t\tconn = sqlite3.connect(db_file)\n\texcept Error as e:\n\t\tprint(e)\n\treturn conn", "def create_connection(db_file):\n\tconn = None\n\ttry:\n\t\tconn = sqlite3.connect(db_file)\n\t\treturn conn\n\texcept Error as e:\n\t\tprint(e)\n \n\treturn conn", "def create_connection(self, db_file):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(db_file)\r\n except Error as e:\r\n print(e)\r\n\r\n return conn", "def create_connection(db_file):\n\tconn = None\n\ttry:\n\t\tconn = sqlite3.connect(db_file)\n\t\treturn conn\n\texcept Error as e:\n\t\tprint(e)\n\treturn conn", "def create_connection(db_file):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(db_file)\r\n except Error as e:\r\n print(e)\r\n \r\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(sqlite3.version)\n except Error as e:\n print(e)\n finally:\n if conn:\n conn.close()", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(sqlite3.version)\n except Error as e:\n print(e)\n finally:\n if conn:\n conn.close()", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(sqlite3.version)\n except Error as e:\n print(e)\n finally:\n if conn:\n conn.close()", "def create_connection(db_file):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(db_file)\r\n except Error as e:\r\n print(e)\r\n\r\n return conn", "def create_connection(db_file):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(db_file)\r\n return conn\r\n except Error as e:\r\n print(e)\r\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except ValueError as e:\n raise e\n\n return conn", "def create_connection(self, db_file):\n self.conn = None\n try:\n self.conn = sqlite3.connect(db_file)\n except Error as e:\n print('[Database] Error:')\n print(e)\n \n return self.conn", "def create_database(databasefile):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(databasefile, check_same_thread=False)\r\n except Error as e:\r\n print(e)\r\n\r\n return conn", "def create_connection(db_file):\n conn = None\n\n try:\n # create database file or connect to existing\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Exception as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n print(sqlite3.version)\n except Error as e:\n print(e)\n finally:\n conn.close()", "def create_connection(db_file):\n\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except sqlite3.Error as e:\n print(e)\n\n return conn", "def create_connection(db_file_path):\n connection = None\n try:\n connection = sqlite3.connect(db_file_path)\n except Error as e:\n print(e)\n finally:\n if connection:\n connection.close()", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Error as e:\n print(e)\n\n return conn", "def create_connection(sqlite_db_file):\n try:\n connection_db = sqlite3.connect(sqlite_db_file)\n return connection_db\n except Exception:\n pass", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Error as e:\n print(e)\n \n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Error as e:\n print(e)\n \n return conn", "def create_connection(db_file):\n try:\n con = sqlite3.connect(db_file)\n print('SQLite Version is: ', sqlite3.version)\n except sqlErr as se:\n raise Exception('SQL Error in create_connection(): ' + se.__str__())\n except Exception as e:\n raise Exception('General Error in create_connection(): ' + e.__str__())\n return con", "def create_connection(db_file):\n try:\n con = sqlite3.connect(db_file)\n print('SQLite Version is: ', sqlite3.version)\n except sqlErr as se:\n raise Exception('SQL Error in create_connection(): ' + se.__str__())\n except Exception as e:\n raise Exception('General Error in create_connection(): ' + e.__str__())\n return con", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n conn.row_factory = sqlite3.Row\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except sqlite3.Error as e:\n print('coucou')\n print(e)\n return conn", "def create_connection(self, db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn;\n except Error as e:\n print(e)\n finally:\n\n pass\n return", "def create_connection(db_file):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(db_file)\r\n print(sqlite3.version) \r\n except Error as e:\r\n print(e)\r\n return conn", "def create_connection(self,db_file):\n print(\"yes\")\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(db_file)\r\n print(sqlite3.version)\r\n except Error as e:\r\n print(e)\r\n return conn", "def create_connection(db_file):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(db_file)\r\n print(sqlite3.version)\r\n except Error as e:\r\n print(e)\r\n\r\n return conn", "def create_connection(db_file):\n try:\n return sqlite3.connect(db_file)\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n\tconn = None\n\ttry:\n\t\tconn = sqlite3.connect(db_file)\n\texcept Error as e:\n\t\tprint(e)\n\tfinally:\n\t\tif conn:\n\t\t\treturn conn", "def _CreateConnection(db_file):\r\n conn = None\r\n try:\r\n conn = sqlite3.connect(db_file)\r\n except Error as e:\r\n print(e)\r\n\r\n return conn", "def get_db(file_path):\n db_new = not os.path.isfile(file_path)\n sqlite3_detect_types = sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES\n db = sqlite3.connect(file_path, detect_types=sqlite3_detect_types)\n if db_new:\n create_db(db)\n return db", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(sqlite3.version)\n except Error as e:\n print(e)\n return conn", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Exception as e:\n print(e)\n return None", "def create_connection(db_file):\n\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(\"Connection sucessfull. SQLite3 version \"+sqlite3.version)\n return conn\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(sqlite3.version)\n return conn\n except Error as e:\n print(e)", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except sqlite3.Error as e:\n print(e, file=sys.stderr)\n return None", "def create_connection(self, db_file: str):\r\n try:\r\n con = sqlite3.connect(db_file)\r\n except sqlErr as se:\r\n raise Exception('SQL Error in create_connection(): ' + se.__str__())\r\n except Exception as e:\r\n raise Exception('General Error in create_connection(): ' + e.__str__())\r\n return con", "def _create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n\n # Create a new SQLite table\n cur.execute(\"CREATE TABLE {tn} ({r1}, {r2}, {time} {ft})\"\n .format(tn=TABLE_NAME, r1=INPUT_COLUMN, r2=OUTPUT_COLUMN,\n time='time', ft='TEXT'))\n\n except Error as err:\n print(err)\n\n finally:\n conn.commit()\n conn.close()", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Exception as e:\n print(e)\n\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Exception as error:\n print(error)\n\n return None", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(sqlite3.version)\n except Error as e:\n print(e)\n return None\n finally:\n return conn", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except :\n print(\"fail while connecting\")\n\n return None", "def create_connection(db_file):\r\n \r\n try:\r\n conn = sqlite3.connect(db_file)\r\n c = conn.cursor()\r\n c.execute('''CREATE TABLE IF NOT EXISTS contacts\r\n (id INTEGER PRIMARY KEY, name text NOT NULL, address text NOT NULL, email text NOT NULL, phone text NOT NULL)''')\r\n \r\n except Error as e:\r\n print(e)\r\n finally:\r\n if conn:\r\n conn.close()", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n return None", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file, check_same_thread=False)\n return conn\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Exception as e:\n print(e)\n\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n return None", "def create_connection(db_file):\n\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def createConnection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n\n return None", "def create_connection(db_file):\n\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as error:\n print error\n\n return None", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(\"connected\")\n except Error as e:\n print(e)\n \n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(\"connected\")\n except Error as e:\n print(e)\n \n return conn", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n \n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n \n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n \n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n \n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n \n return None", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as e:\n print(e)\n \n return None", "def __create_connection(db_file: Path) -> sqlite3.Connection:\n\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except sqlite3.Error as e:\n logging.error(e)\n logging.info('Connection could be created. Return sqlite3.Connection object.')\n return conn", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file)\n return conn\n except Error as connection_error:\n print connection_error\n return None", "def db_connect(dbfile) :\n global DB\n if not os.path.isfile(dbfile) :\n raise TypeError(\"The database file must be created first.\")\n DB = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES)\n DB.executescript(\"\"\"\n pragma foreign_keys = ON;\n \"\"\")\n DB.row_factory = sqlite3.Row", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file, timeout=30000)\n except Error as e:\n print(e)\n\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(\"DB Connection setup\")\n except Error as e:\n print(e)\n finally:\n if conn:\n return conn\n else:\n print(\"DB Connection failed\")", "def connect_db(file_name):\n db = sqlite3.connect(file_name)\n c = db.cursor()\n return db, c", "def create_db(db_file):\n try:\n conn = sqlite3.connect(db_file)\n cursor = conn.cursor()\n cursor.execute(CREATE_VISITORS_SQL)\n cursor.execute(CREATE_SETTINGS_SQL)\n cursor.execute(ADD_PASS_SQL)\n conn.commit()\n conn.close()\n app.logger.info(\"Database commit successful\")\n except Error as e:\n print(e)\n raise", "def create_connection(db_file):\r\n try:\r\n #don't check for same thread (to use multithreading)\r\n conn = sqlite3.connect(db_file, check_same_thread=False) \r\n return conn\r\n except Error as e:\r\n print(e)\r\n \r\n return None", "def create_conn():\n return sqlite3.connect(DBFILE)", "def create_sqlite_connection(db_filename=SQLITE_DATABASE_FILE):\n return sqlite3.connect(db_filename)", "def create_database(file: Path) -> tuple[sqlite3.Connection, sqlite3.Cursor]:\n if file.exists():\n os.remove(file)\n\n conn = sqlite3.connect(file)\n cursor = conn.cursor()\n create_table(conn, cursor)\n\n return conn, cursor", "def create_connection(db_file):\n try:\n conn = sqlite3.connect(db_file, timeout=10)\n return conn\n except Error as e:\n print(e)\n \n return None", "def create_connections(db_file):\n\n\tpath.exists(db_file) == True\n\n\tif path.exists(db_file):\n\t\tprint ('database is already exist')\n\telse:\n\t\ttry:\n\t\t\tconn = sqlite3.connect(db_file)\n\t\t\tprint('Creating schema...')\n\t\t\twith open('dhcp_snooping_schema.sql', 'r') as f:\n\t\t\t\tschema = f.read()\n\t\t\t\tconn.executescript(schema)\n\t\t\tprint ('Done')\n\t\texcept sqlite3.OperationalError as e:\n\t\t\tprint(e)\n\t\tfinally:\n\t\t\tif conn:\n\t\t\t\tconn.close()", "def create_connection(db_file):\r\n conn = None\r\n try:\r\n #conn = sqlite3.connect(db_file)\r\n conn = psycopg2.connect(conn_string)\r\n cursor = conn.cursor()\r\n\r\n except Error as e:\r\n print(e)\r\n return conn", "def create_connection(db_file):\n conn = None\n try:\n conn = connect(db_file)\n except Error as e:\n print(e)\n\n return conn", "def create_sql_connection(db_file):\n trace_id = str(random.randint(1000000, 9999999))\n logger.info(\"[trace start \" + trace_id + \"]\")\n if os.path.exists(db_file):\n try:\n my_db = sqlite3.connect(db_file)\n logger.info(\"[trace end \" + trace_id + \"]\")\n return my_db\n except sqlite3.Error:\n logger.error(str(sqlite3.Error))\n raise Exception(str(sqlite3.Error))\n else:\n logger.info(db_file + \" does not seem to exist; creating it\")\n logger.info(\"[trace end \" + trace_id + \"]\")\n return sqlite3.connect(db_file)\n logger.info(\"[trace end \" + trace_id + \"]\")\n return None" ]
[ "0.846957", "0.8452252", "0.8396848", "0.838181", "0.8371498", "0.8364115", "0.83331347", "0.8318548", "0.83168167", "0.8303119", "0.8297461", "0.8294878", "0.82667893", "0.825977", "0.825977", "0.825977", "0.8247484", "0.8242083", "0.8225199", "0.8222829", "0.82215047", "0.82213676", "0.8219448", "0.82104135", "0.82059747", "0.8204832", "0.82037073", "0.82037073", "0.82037073", "0.82037073", "0.82037073", "0.8197147", "0.8186915", "0.8186268", "0.8186268", "0.81850725", "0.81850725", "0.8180934", "0.8170801", "0.81669146", "0.8166566", "0.8159075", "0.81486934", "0.8142175", "0.8134084", "0.8125555", "0.81224084", "0.8111487", "0.81112725", "0.80777246", "0.8077592", "0.80774695", "0.80717576", "0.8068723", "0.8057399", "0.8056656", "0.80554813", "0.80413043", "0.8038501", "0.80325586", "0.80324143", "0.80324143", "0.8031674", "0.80292237", "0.80155706", "0.80155706", "0.80155706", "0.80155706", "0.80155706", "0.80155706", "0.8015277", "0.79918754", "0.79918754", "0.79835474", "0.7974919", "0.7971683", "0.7970954", "0.7970954", "0.79646254", "0.79646254", "0.79646254", "0.79646254", "0.79646254", "0.79646254", "0.7961048", "0.7960201", "0.79469573", "0.790868", "0.78954655", "0.7856797", "0.7839892", "0.78348345", "0.78256077", "0.7825244", "0.77851224", "0.77591103", "0.77469015", "0.77453274", "0.77202475", "0.77003115" ]
0.7852928
90
This function resizes the image message recieved over the head image topic and republishes it to a new topic.
def headImageCallback(msg): global headCamImage # extract head cam image and update globally bridge = CvBridge() headCamImage = bridge.imgmsg_to_cv2(msg, "bgr8")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def FlyResize( image, log_mess, nimages, method = Image.ANTIALIAS ):\n oldw, oldh = image.size\n resl = [8, 10, 14, 16, 20, 22, 24, 32, 40, 48, 64, 96, 128, 256]\n \n if oldw > 256 or oldh > 256:\n newsiz = min(resl, key = lambda x:abs(x - max(oldw, oldh)))\n image.thumbnail((newsiz, newsiz), method)\n neww, newh = image.size\n log_mess += ' and new size scaled = %s x %s' %(neww, newh)\n elif nimages > 1:\n log_mess += ' and size = %s x %s' %(oldw, oldh)\n \n return oldw, oldh, image, log_mess", "def mqtt_on_message(client, userdata, msg):\n logging.debug('mqtt message received for topic %s', msg.topic)\n image_queue.put(msg.payload)", "def mqtt_publish(image):\n logging.debug('publishing image to mqtt broker topic %s', \n config['mqtt']['publish_topic'])\n mqtt_client.publish(config['mqtt']['publish_topic'], image)", "def image_bot_callback(msg):\n global img_bot, sub_sampling, img_bot_res\n arr = np.fromstring(msg.data, np.uint8)\n img_bot = cv.resize(cv.imdecode(arr, 1), (0, 0),\n fx=sub_sampling, fy=sub_sampling)\n himg, wimg = img_bot.shape[:2]\n img_bot = cv.resize(img_bot, (int(wimg/3), int(himg/3)))\n img_bot_res = img_bot.copy()", "def _telegram_photo_callback(self, update: Update, _: CallbackContext):\n rospy.logdebug(\"Received image, downloading highest resolution image ...\")\n byte_array = update.message.photo[-1].get_file().download_as_bytearray()\n rospy.logdebug(\"Download complete, publishing ...\")\n\n img = cv2.imdecode(np.asarray(byte_array, dtype=np.uint8), cv2.IMREAD_COLOR)\n msg = self._cv_bridge.cv2_to_imgmsg(img, encoding=\"bgr8\")\n msg.header.stamp = rospy.Time.now()\n\n if self._caption_as_frame_id:\n msg.header.frame_id = update.message.caption\n self._from_telegram_image_publisher.publish(msg)\n\n if update.message.caption:\n self._from_telegram_string_publisher.publish(String(data=update.message.caption))", "def resizeAndRepubThread():\n\n # reference globals\n global primaryCamString\n global secondaryCamString\n global armCamImage\n global headCamImage\n\n # initialize image publishers\n primaryPub = rospy.Publisher(primaryCamRepub, Image, queue_size=1)\n secondaryPub = rospy.Publisher(secondaryCamRepub, Image, queue_size=1)\n\n # create CvBridge object for converting CV2 images to sensor_msgs/Image messages\n backBridge = CvBridge()\n\n while(True):\n primaryImage = np.zeros(shape=[512, 512, 3])\n secondaryImage = np.zeros(shape=[512, 512, 3])\n\n # just keep looping until we get images\n\n if(np.sum(headCamImage) == 0 or np.sum(armCamImage) == 0):\n rospy.loginfo(\"still waiting on camera images...\")\n continue\n\n # get primary image\n if(primaryCamString == \"head\"):\n primaryImage = resizeImage(headCamImage, primarySize)\n elif(primaryCamString == \"leftArm\"):\n primaryImage = resizeImage(armCamImage, primarySize)\n elif(primaryCamString == \"\"):\n pass\n else:\n rospy.logerr(\"Invalid Option for primaryCamString recieved!\")\n\n # get secondary image\n if(secondaryCamString == \"head\"):\n secondaryImage = resizeImage(headCamImage, secondarySize)\n elif(secondaryCamString == \"leftArm\"):\n secondaryImage = resizeImage(armCamImage, secondarySize)\n elif(secondaryCamString == \"\"):\n pass\n else:\n rospy.logerr(\"Invalid Option for secondaryCamString recieved!\")\n\n # publish both new images\n if(np.sum(primaryImage) != 0 and np.sum(secondaryImage) != 0):\n primaryImageMessage = backBridge.cv2_to_imgmsg(primaryImage, \"bgr8\")\n primaryPub.publish(primaryImageMessage)\n\n secondaryImageMessage = backBridge.cv2_to_imgmsg(secondaryImage, \"bgr8\")\n secondaryPub.publish(secondaryImageMessage)", "def _image_callback(self, image_msg):\n # type: (Image) -> None\n # Drops old images and cleans up the queue.\n # Still accepts very old images, that are most likely from ROS bags.\n image_age = rospy.get_rostime() - image_msg.header.stamp\n if 1.0 < image_age.to_sec() < 1000.0:\n rospy.logwarn(f\"Vision: Dropped incoming Image-message, because its too old! ({image_age.to_sec()} sec)\",\n logger_name=\"vision\")\n return\n\n if self._transfer_image_msg_mutex.locked():\n return\n\n with self._transfer_image_msg_mutex:\n # Transfer the image to the main thread\n self._transfer_image_msg = image_msg", "def __update_image(self, image_msg: Image):\n self.image = self.bridge.imgmsg_to_cv2(image_msg, desired_encoding='rgb8')\n\n if self.__listener != None:\n self.__listener(self.image,image_msg.header.stamp)", "def image_top_callback(msg):\n global img_top, sub_sampling, img_top_res\n arr = np.fromstring(msg.data, np.uint8)\n img_top = cv.resize(cv.imdecode(arr, 1), (0, 0),\n fx=sub_sampling, fy=sub_sampling)\n himg, wimg = img_top.shape[:2]\n img_top = cv.resize(img_top, (int(wimg/3), int(himg/3)))\n img_top_res = img_top.copy()", "def _ros_image_callback(self, msg: Image):\n cv2_img = self._cv_bridge.imgmsg_to_cv2(msg, \"bgr8\")\n self._telegram_updater.bot.send_photo(\n self._telegram_chat_id,\n photo=BytesIO(cv2.imencode(\".jpg\", cv2_img)[1].tobytes()),\n caption=msg.header.frame_id,\n )", "def process_image(image):\n image = resize(image)\n return image", "def process_image_attachment(image_id):\n image_attachment_model = import_image_attachment()\n image = image_attachment_model.objects.get(pk=image_id)\n image.create_display_size()\n image.create_thumbnail()", "def resize_image(self, nNewSize, nFlags):\n\t\treturn Job(SDK.PrlVmDev_ResizeImage(self.handle, nNewSize, nFlags)[0])", "def resize(img):\n size = (500, 500)\n img.thumbnail(size)\n return img", "def _publish_image(self):\n # only publish if we have a subscriber\n if self._image_pub.get_num_connections() == 0:\n return\n\n # get latest image from cozmo's camera\n camera_image = self._cozmo.world.latest_image\n if camera_image is not None:\n # convert image to gray scale as it is gray although\n img = camera_image.raw_image.convert('L')\n ros_img = Image()\n ros_img.encoding = 'mono8'\n ros_img.width = img.size[0]\n ros_img.height = img.size[1]\n ros_img.step = ros_img.width\n ros_img.data = img.tobytes()\n ros_img.header.frame_id = 'cozmo_camera'\n cozmo_time = camera_image.image_recv_time\n ros_img.header.stamp = rospy.Time.from_sec(cozmo_time)\n # publish images and camera info\n self._image_pub.publish(ros_img)\n camera_info = self._camera_info_manager.getCameraInfo()\n camera_info.header = ros_img.header\n self._camera_info_pub.publish(camera_info)", "def pub_images(self, cv2_img, image_format=\"passthrough\"):\n if self.pub.get_num_connections() > 0:\n try:\n image_msg = self.bridge.cv2_to_imgmsg(cv2_img, image_format)\n self.pub.publish(image_msg)\n except CvBridgeError as e:\n rospy.logerr(\"Error on converting image for publishing: \" +\n str(e) + \" (Is your image_format correct?)\")\n\n if self.pub_compressed.get_num_connections() > 0:\n msg = CompressedImage()\n msg.header.stamp = rospy.Time.now()\n msg.format = \"jpeg\"\n msg.data = np.array(cv2.imencode('.jpg', cv2_img)[1]).tostring()\n self.pub_compressed.publish(msg)", "def raw_image_callback(self, msg):\n if self.pictures_to_take and not self.detection_to_receive:\n self.pictures_to_take -= 1\n # so let's analyse it here and then delete the subscription\n rows = msg.height\n step = msg.step\n cols = msg.width\n dim = int(step / cols)\n pixels = msg.data # of size (steps, nrows)\n # save the image (later we will need to analyse it)\n vision_utils.save_picture(pixels, rows, cols, dim, self.name, FOLDER)", "def notifyResized(self, function, **kwargs):\n self._sig_resized.subscribe(function, **kwargs)", "def resizeImage(IMG,IMAGE_SIZE):\n\n RESCALED_IMAGE = skimage.transform.resize(IMG,[IMG.shape[0],IMAGE_SIZE,IMAGE_SIZE])\n return RESCALED_IMAGE", "def image_cb(self, msg):\n self.has_image = True\n self.camera_image_msg = msg", "def _image_callback(self, msg):\n\n try:\n cv_image = self.bridge.imgmsg_to_cv2(msg, \"bgr8\")\n dil_size = self._sliderDil.value()\n eros_size = self._sliderEros.value()\n\t self.cv_image = self._image_widget.calc_bbox(cv_image, dil_size, eros_size)\n self.image = self._image_widget.set_image(cv_image)\n\n if self.save:\n\t\tif self.counter == 5:\n \t self.numImg += 1\n \t self._imgNum_label.setText(str(self.numImg))\n \t self.store_image(self._image_widget.get_image(), self._image_widget.get_bbox(), self.cls_id, self._image_widget.get_mask())\n\t\t self.counter = 0\n\t \telse:\n\t\t self.counter += 1\n except CvBridgeError as e:\n rospy.logerr(e)", "def resize_and_process_image(data: dict, context):\n file_name = data[\"name\"]\n bucket_name = data[\"bucket\"]\n _, temp_local_filename = tempfile.mkstemp(suffix=file_name)\n blob = storage_client.bucket(bucket_name).get_blob(file_name)\n blob_bytes = blob.download_as_bytes()\n output = io.BytesIO(blob_bytes)\n output.seek(0)\n image = Image.open(output)\n print(\"trying to resize image\")\n # resizes image\n resized_image = resize_image(image)\n resized_image.save(fp=temp_local_filename)\n print(\"Image resized\")\n\n # Upload result to second bucket\n print(\"Trying to upload resized image to second bucket\")\n second_bucket_name = os.getenv(\"SECOND_BUCKET\")\n second_bucket = storage_client.bucket(second_bucket_name)\n print(\"second bucket found\")\n new_blob = second_bucket.blob(file_name)\n new_blob.metadata = blob.metadata\n print(\"created new blob\")\n new_blob.upload_from_filename(temp_local_filename)\n print(\"uploaded resized image from file\")\n os.remove(temp_local_filename)", "def send_image(self, path):\n img = cv2.imread(path)\n msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding=\"bgr8\")\n pub = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)\n pub.publish(msg)\n # Sleep to allow for image to be published.\n # removed by alice\n #rospy.sleep(1)", "def __resize_image(self, img):\n return cv2.resize(img, self.g.img_size, \n interpolation = cv2.INTER_CUBIC)", "async def processing(message: types.Message):\n\n if message.text == 'Bad quality, low time':\n image_size = 128\n elif message.text == 'Medium quality, medium time':\n image_size = 256\n else:\n image_size = 300\n\n await message.answer(text='Style transfering starts. '\n 'Wait a bit.',\n reply_markup=types.ReplyKeyboardRemove())\n transform('content.jpg', 'style.jpg', image_size)\n with open('result.jpg', 'rb') as file:\n await message.answer_photo(file, caption='Work is done!')", "def convert_topic_dir(full_topic_dir, full_target_dir, photo_topic):\n warnings.simplefilter('error', Image.DecompressionBombWarning)\n topicfiles = [f for f in listdir(full_topic_dir) if isfile(join(full_topic_dir, f))\n and (f.lower().endswith(\".jpg\") or f.lower().endswith(\".jpeg\"))]\n for topicfile in topicfiles:\n fulltopicfile = join(full_topic_dir, topicfile)\n fulltargetfile = join(full_target_dir, topicfile)\n if not exists_and_newer(fulltargetfile, fulltopicfile):\n print(\" Converting\", topicfile, \": \", end='')\n try:\n im = Image.open(fulltopicfile)\n\n if im._getexif() is not None:\n exif = {\n TAGS[k]: v\n for k, v in im._getexif().items()\n if k in TAGS\n }\n else:\n exif = dict()\n\n if 'Orientation' in exif:\n im = apply_image_rotation_by_exif(im, exif['Orientation'])\n\n if 'ImageDescription' in exif:\n photo_description = exif['ImageDescription']\n photo_description = bytes(photo_description, encoding=\"ansi\", errors=\"ignore\").decode(\"utf-8\", errors=\"ignore\") # PIL reads exif data as ansi not utf-8 strings\n if photo_description.rstrip() == '':\n photo_caption = photo_topic\n else:\n if photo_description.endswith('#'):\n photo_caption = photo_topic + \" - \" + photo_description.rstrip('#')\n else:\n photo_caption = photo_description\n else:\n photo_caption = photo_topic\n\n print(photo_caption)\n im.thumbnail(targetSize, Image.ANTIALIAS)\n im = add_caption_to_image(im, photo_caption)\n im.save(fulltargetfile, \"JPEG\")\n except IOError:\n print(\"cannot create target for '%s'\" % fulltopicfile)\n except AttributeError:\n print(\"Attribute error for '%s'\" % fulltopicfile)\n else:\n print(\" Skipping\", topicfile)", "def new_image_callback(self, new_image_msg):\n self.process_new_frame(\n self.cv_bridge.imgmsg_to_cv2(\n new_image_msg,\n desired_encoding=\"bgr8\"\n )\n )", "def adjust(self, image):\n ...", "def image_cb(self, msg): # incoming image\n self.has_image = True\n self.camera_image = msg", "def make_reply(self,request,nreplies):\n #print(\"DummyPyWorker. Sending client message back\")\n self._log.debug(\"received message with {} parts\".format(len(request)))\n\n if not self.is_model_loaded():\n self._log.debug(\"model not loaded for some reason. loading.\")\n\n try:\n import torch\n except:\n raise RuntimeError(\"could not load pytorch!\")\n\n # message pattern: [image_bson,image_bson,...]\n\n nmsgs = len(request)\n nbatches = nmsgs/self.batch_size\n\n if not self._still_processing_msg:\n self._next_msg_id = 0\n\n # turn message pieces into numpy arrays\n img2d_v = []\n sizes = []\n frames_used = []\n rseid_v = []\n for imsg in xrange(self._next_msg_id,nmsgs):\n try:\n compressed_data = str(request[imsg])\n data = zlib.decompress(compressed_data)\n c_run = c_int()\n c_subrun = c_int()\n c_event = c_int()\n c_id = c_int()\n img2d = larcv.json.image2d_from_pystring(data,\n c_run, c_subrun, c_event, c_id )\n except:\n self._log.error(\"Image Data in message part {}\\\n could not be converted\".format(imsg))\n continue\n self._log.debug(\"Image[{}] converted: {}\"\\\n .format(imsg,img2d.meta().dump()))\n\n # check if correct plane!\n if img2d.meta().plane()!=self.plane:\n self._log.debug(\"Image[{}] is the wrong plane!\".format(imsg))\n continue\n\n # check that same size as previous images\n imgsize = (int(img2d.meta().cols()),int(img2d.meta().rows()))\n if len(sizes)==0:\n sizes.append(imgsize)\n elif len(sizes)>0 and imgsize not in sizes:\n self._log.debug(\"Next image a different size. \\\n we do not continue batch.\")\n self._next_msg_id = imsg\n break\n img2d_v.append(img2d)\n frames_used.append(imsg)\n rseid_v.append((c_run.value,c_subrun.value,c_event.value,c_id.value))\n if len(img2d_v)>=self.batch_size:\n self._next_msg_id = imsg+1\n break\n\n\n # convert the images into numpy arrays\n nimgs = len(img2d_v)\n self._log.debug(\"converted msgs into batch of {} images. frames={}\"\n .format(nimgs,frames_used))\n np_dtype = np.float32\n img_batch_np = np.zeros( (nimgs,1,sizes[0][1],sizes[0][0]),\n dtype=np_dtype )\n\n for iimg,img2d in enumerate(img2d_v):\n meta = img2d.meta()\n img2d_np = larcv.as_ndarray( img2d )\\\n .reshape( (1,1,meta.cols(),meta.rows()))\n\n img2d_np=np.transpose(img2d_np,(0,1,3,2))\n img_batch_np[iimg,:] = img2d_np\n\n # print(\"shape of image: \",img2d_np.shape)\n\n\n # now make into torch tensor\n img2d_batch_t = torch.from_numpy( img_batch_np ).to(self.device)\n # out_batch_np = img2d_batch_t.detach().cpu().numpy()\n # out_batch_np=np.transpose(out_batch_np,(0,1,3,2))\n\n print(\"shape of image: \",img2d_batch_t.shape)\n with torch.set_grad_enabled(False):\n out_batch_np = self.model.forward(img2d_batch_t).detach().cpu().numpy()\n out_batch_np=np.transpose(out_batch_np,(0,1,3,2))\n\n\n\n # compression techniques\n ## 1) threshold values to zero\n ## 2) suppress output for non-adc values\n ## 3) use half\n\n # suppress small values\n out_batch_np[ out_batch_np<1.0e-3 ] = 0.0\n\n # threshold\n # for ich in xrange(out_batch_np.shape[1]):\n # out_batch_np[:,ich,:,:][ img_batch_np[:,0,:,:]<10.0 ] = 0.0\n\n # convert back to full precision, if we used half-precision in the net\n\n self._log.debug(\"passed images through net. output batch shape={}\"\n .format(out_batch_np.shape))\n # convert from numpy array batch back to image2d and messages\n reply = []\n for iimg in xrange(out_batch_np.shape[0]):\n img2d = img2d_v[iimg]\n rseid = rseid_v[iimg]\n meta = img2d.meta()\n\n out_np = out_batch_np[iimg,0,:,:]\n # print(\"out_np\",type(out_np))\n # print(\"meta\",type(meta))\n out_img2d = larcv.as_image2d_meta( out_np, meta )\n bson = larcv.json.as_pystring( out_img2d,\n rseid[0], rseid[1], rseid[2], rseid[3] )\n compressed = zlib.compress(bson)\n reply.append(compressed)\n\n if self._next_msg_id>=nmsgs:\n isfinal = True\n self._still_processing_msg = False\n else:\n isfinal = False\n self._still_processing_msg = True\n\n self._log.debug(\"formed reply with {} frames. isfinal={}\"\n .format(len(reply),isfinal))\n return reply,isfinal", "def image_cb(self, msg):\n self.has_image = True\n self.camera_image = msg", "def resize_image(first_image, width, height):\n resizing_image = Image()\n temp_image = ((Pil_image.open(first_image.picture)).convert(\"RGB\")).resize((width, height), Pil_image.ANTIALIAS)\n filestream = BytesIO()\n temp_image.save(filestream, 'JPEG', quality=90)\n filestream.seek(0)\n name = f\"{str(first_image.picture).split('.')[0]}_{width}_{height}.{str(first_image.picture).split('.')[1]}\"\n image = InMemoryUploadedFile(filestream, 'picture', name, 'jpeg/image', sys.getsizeof(filestream), None)\n resizing_image.url = first_image.url\n resizing_image.width = width\n resizing_image.height = height\n resizing_image.parent = first_image.id\n resizing_image.picture = image\n resizing_image.save()\n return resizing_image", "def _resize_image_tuple(self, image_tup):\n if self.api_info is None:\n self.get_info() # sets the image size and other such info from server.\n try:\n MIN_SIZE = self.api_info['min_image_size']\n MAX_SIZE = self.api_info['max_image_size']\n img = Image.open(image_tup[0])\n min_dimension = min(img.size)\n max_dimension = max(img.size)\n min_ratio = float(MIN_SIZE) / min_dimension\n max_ratio = float(MAX_SIZE) / max_dimension\n def get_newsize(img, ratio, SIZE):\n if img.size[0] == min_dimension:\n newsize = (SIZE, int(round(ratio * img.size[1])))\n else:\n newsize = (int(round(ratio * img.size[0])), SIZE)\n return newsize\n im_changed = False\n # Only resample if min size is > 512 or < 256\n if max_ratio < 1.0: # downsample to MAX_SIZE\n newsize = get_newsize(img, max_ratio, MAX_SIZE)\n img = img.resize(newsize, Image.BILINEAR)\n im_changed = True\n elif min_ratio > 1.0: # upsample to MIN_SIZE\n newsize = get_newsize(img, min_ratio, MIN_SIZE)\n img = img.resize(newsize, Image.BICUBIC)\n im_changed = True\n else: # no changes needed so rewind file-object.\n image_tup[0].seek(0)\n # Finally make sure we have RGB images.\n if img.mode != \"RGB\":\n img = img.convert(\"RGB\")\n im_changed = True\n if im_changed:\n io = StringIO()\n img.save(io, 'jpeg', quality=IM_QUALITY)\n io.seek(0) # rewind file-object to read() below is good to go.\n image_tup = (io, image_tup[1])\n except IOError, e:\n logger.warning('Could not open image file: %s, still sending to server.', image_tup[1])\n return image_tup", "def __init__(self,\n camera_topic_name=\"/resize_img/image\",\n ): # sub class args\n\n self.camera_topic_name = camera_topic_name\n\n self._run_rate = 1\n self.image_check_interval_s = 5\n self.latest_im_check_time_s = rospy.get_time()\n\n msg_received = False\n # get initial image to generate our pixel sampling location bbased on the image height/width\n while not msg_received and not rospy.is_shutdown():\n try:\n image_msg = rospy.wait_for_message(self.camera_topic_name, Image, timeout=20)\n im = bridge.imgmsg_to_cv2(image_msg)\n # get the characteristics of our image\n self.im_height = image_msg.height\n self.im_width = image_msg.width\n # initialise the pixel checking arrays\n self.pixels2check_ver = np.ceil(np.linspace(1, self.im_height-1, 10)).astype(np.uint8)\n self.pixels2check_hor = np.ceil(np.linspace(1, self.im_width-1, 10)).astype(np.uint8)\n self.pixel_vals_this = im[self.pixels2check_ver, self.pixels2check_hor]\n self.pixel_vals_previous = np.clip(self.pixel_vals_this + 10, 0, 255)\n msg_received = True\n except rospy.ROSException as e:\n rospy.logwarn_throttle(5, 'camera watchdog node timed out waiting for image message \\\n - traceback was {}'.format(e))\n # except e:\n # rospy.logwarn(('{} happened'.format(e)))\n\n self.downcam_sub = rospy.Subscriber(self.camera_topic_name, Image, self.downcam_callback, queue_size=5)\n\n ## todo - implement bottom clearance sensor checker - perhaps create another node for this?\n # add flag to set / unset altitude sensor check\n self.altitude_bottom_clearance = Float32()\n self.alt_sub = rospy.Subscriber('mavros/altitude', Altitude, self.altitude_callback, queue_size=5)", "def send_image(self, device_id, image):\n self.logger.debug(f\"{device_id}: sending processed image!\")\n base64_img = base64.b64encode(\n cv2.imencode('.jpg', image)[1].tostring())\n self.socketio.emit(\n \"image\", {\"message\": base64_img}, room=f\"device-{device_id}\")", "def image_resize_shortest_edge(\n img, size: int, channels_last: bool = False\n) -> torch.Tensor:\n img = _to_tensor(img)\n no_batch_dim = len(img.shape) == 3\n if len(img.shape) < 3 or len(img.shape) > 5:\n raise NotImplementedError()\n if no_batch_dim:\n img = img.unsqueeze(0) # Adds a batch dimension\n if channels_last:\n h, w = img.shape[-3:-1]\n if len(img.shape) == 4:\n # NHWC -> NCHW\n img = img.permute(0, 3, 1, 2)\n else:\n # NDHWC -> NDCHW\n img = img.permute(0, 1, 4, 2, 3)\n else:\n # ..HW\n h, w = img.shape[-2:]\n\n # Percentage resize\n scale = size / min(h, w)\n h = int(h * scale)\n w = int(w * scale)\n img = torch.nn.functional.interpolate(\n img.float(), size=(h, w), mode=\"area\"\n ).to(dtype=img.dtype)\n if channels_last:\n if len(img.shape) == 4:\n # NCHW -> NHWC\n img = img.permute(0, 2, 3, 1)\n else:\n # NDCHW -> NDHWC\n img = img.permute(0, 1, 3, 4, 2)\n if no_batch_dim:\n img = img.squeeze(dim=0) # Removes the batch dimension\n return img", "def startNode():\n\n # init node\n rospy.init_node(\"resize_and_repub\")\n rospy.loginfo(\"resize_and_repub node started\")\n\n # setup subcribers\n rospy.Subscriber(leftArmCamTopic, Image, leftArmImageCallback)\n rospy.Subscriber(headCamTopic, Image, headImageCallback)\n rospy.Subscriber(primaryCamTopic, String, primaryCamCallback)\n rospy.Subscriber(secondaryCamTopic, String, secondayCamCallback)\n rospy.loginfo(\"all subscribers initialized, entering publishing loop...\")\n\n # start repub thread\n thread = threading.Thread(target=resizeAndRepubThread)\n thread.start()\n rospy.spin()", "def receive_images(p_image_queue,p_new_im_id_queue, host = \"127.0.0.1\", port = 6200, timeout = 20, VERBOSE = True,worker_num = 0):\n \n # open ZMQ socket\n context = zmq.Context()\n sock = context.socket(zmq.SUB)\n sock.connect(\"tcp://{}:{}\".format(host, port))\n sock.subscribe(b'') # subscribe to all topics on this port (only images)\n \n print (\"w{}: Image receiver thread connected to socket.\".format(worker_num))\n \n # main receiving loop\n prev_time = time.time()\n while time.time() - prev_time < timeout:\n try:\n temp = sock.recv_pyobj(zmq.NOBLOCK)\n (name,im) = pickle.loads(temp)\n p_image_queue.put((name,im,time.time())) \n p_new_im_id_queue.put(name)\n prev_time = time.time()\n if VERBOSE: print(\"w{}: Image receiver thread received image {} at {}\".format(worker_num,name,time.ctime(prev_time)))\n except zmq.ZMQError:\n time.sleep(0.1)\n pass\n \n sock.close()\n print (\"w{}: Image receiver thread closed socket.\".format(worker_num))", "def on_process_image(self, img, prefix):\n\t\traise NotImplementedError(\"You need to implement this to tweet to timeline (or pass if you don't want to)!\")", "def resize(self, new_size):\n resized_img = opencv.resize(self.img, new_size)\n return Image(resized_img)", "def image_resize(image):\n print(\"image-resizing2\")\n\n i=0\n height,width = image.shape[:2]\n shape = [height,width]\n if len(image_shape) == 0:\n #print(\"Intial\")\n image_shape.append(shape)\n resized = cv2.resize(image,(int(width*0.2),int(height*0.2)),interpolation=cv2.INTER_CUBIC)\n else:\n for old_shape in image_shape:\n #print(\"second\")\n if old_shape == shape:\n i=0\n break\n else:\n i+=1\n if(i > 0):\n #print(\"third\")\n image_shape.append(shape)\n resized = cv2.resize(image, (int(width * 0.2), int(height * 0.2)), interpolation=cv2.INTER_CUBIC)\n return resized,shape", "def update_image(self, img):\r\n qt_img = self.convert_cv_qt(img)\r\n self.main.caption_feed.setPixmap(qt_img)", "def notifyResize(self, function, **kwargs):\n self._sig_resize.subscribe(function, **kwargs)", "def expect_resize(self, resize):\n resize_xform = images_service_pb.Transform()\n resize_xform.set_width(resize)\n resize_xform.set_height(resize)\n self._images_stub._Resize(mox.IsA(Image.Image),\n resize_xform).AndReturn(self._image)", "def resizeImage(self):\n ratio = float(self.qIma.width()) / float(self.qIma.height())\n if self.qIma.width() > self.qIma.height():\n maxWidth = 300\n maxHeight = int(300 / ratio)\n else:\n maxWidth = int(300 / ratio)\n maxHeight = 300\n img = self.qIma.toImage().scaled(maxWidth, maxHeight, QtCore.Qt.KeepAspectRatio)\n return img", "def showResized(name, image, scale):\n image = resizeImage(image, scale)\n cv.ShowImage(name, image)", "def send_image(path):\n img = cv2.imread(path)\n msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding=\"bgr8\")\n pub = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)\n pub.publish(msg)\n # Sleep to allow for image to be published.\n rospy.sleep(1)", "def send_image(path):\n img = cv2.imread(path)\n msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding=\"bgr8\")\n pub = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)\n pub.publish(msg)\n # Sleep to allow for image to be published.\n rospy.sleep(1)", "def image_resize(job_object):\n try:\n job = json.loads(job_object.arg)\n base64_file = job['image']\n args = job['args'] if 'args' in job else {}\n del job['image']\n logging.info(job)\n \n def write_file(local_path,filename,file_b64):\n logging.debug(\"about to save to \" + \"%s/%s\" % (local_path,filename))\n if not os.path.exists(local_path): os.makedirs(local_path)\n image_file = base64.b64decode(file_b64)\n local_file = open(\"%s/%s\" % (local_path,filename), \"w\")\n local_file.write(image_file)\n local_file.close()\n \n def download_file(url,local_path,filename):\n print \"downloading \" + url\n f = urllib2.urlopen(urllib2.Request(url))\n print \"about to save to \" + \"%s/%s\" % (local_path,filename)\n if not os.path.exists(local_path): os.makedirs(local_path)\n # Open our local file for writing\n local_file = open(\"%s/%s\" % (local_path,filename), \"w\")\n local_file.write(f.read())\n local_file.close()\n \n local_path = '%s/upload/%s' % (options.asset_root,job['path'])\n local_path_wfile = '%s/%s%s' % (local_path,job['file'],job['extension'])\n filename = '%s%s' % (job['file'],job['extension'])\n #download_file(job['url'],local_path,filename)\n write_file(local_path,filename,base64_file)\n \n def resize_and_save(local_file,new_file,maxsize=None,maxh=None,maxw=None,crop=None):\n \"\"\"Resize the image and save\"\"\"\n logging.debug(\"maxw = %s, maxsize=%s, crop=%s\" % (maxw,maxsize,crop))\n img = Image.open(local_file)\n width,height = img.size\n width,height = float(width), float(height)\n ratio = float(1)\n if crop is not None:\n size = float(maxsize)\n if width <= height and width > size:\n ratio = size/width\n elif height < width and height > size:\n ratio = size/height\n else: \n ratio = 1 # too small\n elif maxsize:\n size = float(maxsize)\n if width >= height and width > size:\n ratio = size/width\n elif height > width and height > size:\n ratio = size/height\n else: \n ratio = 1 # too small\n elif maxh:\n size = maxh\n if height > size:\n ratio = size/height\n else:\n # too small\n ratio = 1\n elif maxw:\n size = maxw\n if width > size:\n ratio = size/width\n else:\n # too small\n ratio = 1\n else:\n raise Exception(\"must specify max width, OR max size\")\n \n print(\"old: ratio = %s: size(x,y) = %s,%s\" % (ratio,width,height))\n height = int(height*ratio)\n width = int(width*ratio)\n print(\"new ratio = %s: size(x,y) = %s,%s\" % (ratio,width,height))\n img = img.resize((width, height),Image.ANTIALIAS)\n if crop is not None:\n log.debug(\"in crop %s\" % crop)\n crop = int(crop)\n if width > crop:\n amt = int((int(width) - crop)/2)\n img = img.crop((amt,0,amt + crop, crop))\n elif height > crop:\n amt = int((int(height) - crop)/2)\n img = img.crop((0,amt,crop,amt+crop))\n \n log.debug(\"saving new file %s\" % new_file)\n if img.mode != \"RGB\":\n img = img.convert(\"RGB\")\n img.save(new_file)\n \n \n if os.path.exists(local_path_wfile):\n if args != {}:\n ext = args['extension'] if 'extension' in args else \"_t\"\n resize_and_save(local_path_wfile,\n '%s/%s%s.jpg' % (local_path,job['file'],ext),\n maxsize=args['maxsize'],\n crop=args['crop'])\n else:\n resize_and_save(local_path_wfile,'%s/%s_t.jpg' % (local_path,job['file']),maxsize=100)\n resize_and_save(local_path_wfile,'%s/%s_m.jpg' % (local_path,job['file']),maxw=317)\n resize_and_save(local_path_wfile,'%s/%s_l.jpg' % (local_path,job['file']),maxsize=800)\n keeptrying = False\n else:\n logging.error(\"haven't found file? %s\" % local_path_wfile)\n \n # delete original\n logging.debug(\"About to delete original %s\" % local_path_wfile)\n os.remove(local_path_wfile)\n \n except:\n traceback.print_exc()", "def on_mention_with_image(self, tweet, prefix, image):\n\n\t\t# process image, resulting in a new image and a comment\n\t\timage, status = self.process_image(image, prefix)\n\n\t\t# filename and format for uplaoding\n\t\tfilename, format = \"result.jpg\", \"JPEG\"\n\n\t\t# write image to a StringIO file\n\t\tfile = StringIO.StringIO()\n\t\timage.save(file, format=format)\n\n\t\t# post tweet\n\t\ttry:\n\t\t\tself.post_tweet(status[:140], reply_to=tweet, media=filename, file=file)\n\t\texcept Exception as e:\n\t\t\tprint(e)", "def imgProcessing(self):\n if (self.image_width > 320):\n self.cv_image = imutils.resize(self.cv_image, width = 320)\n else:\n pass\n\n \"\"\" optional -- image-mirrored \"\"\"\n # self.cv_image = cv2.flip(self.cv_image, 1)", "def resize_img(img, STANDARD_SIZE, verbose=False):\n if verbose:\n print \"changing size from %s to %s\" % (str(img.size), str(STANDARD_SIZE))\n img = img.resize(STANDARD_SIZE)\n return img", "def test_transform_image_resize_and_crop_portrait_png(self):\n self.expect_open_image('SomeBlobKey', (1600, 1200), mime_type='PNG')\n self.expect_crop(left_x=0.125, right_x=0.875)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c',\n images_service_pb.OutputSettings.PNG)\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/png'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()", "def handleResize(self):\n pass", "def send_processd_for_prediction(self):\n resized_image = cv2.resize(self.processed_image, (28, 28))\n self.send_proccesd_image_to_ML.emit(resized_image)", "def rescale_image(self, img_file, new_width, new_height, model_path, file_description):\n cwd = os.getcwd()\n self.new_width = new_width\n self.new_height = new_height\n self.extract_file_name(img_file)\n shutil.copy(img_file, os.path.join('utils_dfn/temp', self.file_name_with_ext))\n self.run_padding()\n self.run_dfn(model_path)\n self.restore_to_correct_size(file_description)\n clean()", "def resize(self, old, new):", "def resize_image():\r\n print(\"--- resizing the image ---\")\r\n if args.image:\r\n img = cv2.imread(args.image)\r\n img = cv2.resize(img, (32, 32))\r\n img = np.array(img, dtype=np.uint8)\r\n img = img.reshape(32, 32, 3) / 255\r\n img = np.expand_dims(img, axis=0)\r\n print(\"--- done resizing the image ---\")\r\n return img\r\n else:\r\n print(\"Please enter a path of an image to check for it's label\")", "def update_size(self):\n self.size = self.image.size\n self.width, self.height = self.size", "def resize(self):\n pass", "def resize_image(image, size=(926, 617)):\n\n im = Image.open(image)\n im.convert('RGB')\n im.thumbnail(size)\n thumb_io = BytesIO()\n im.save(thumb_io, 'JPEG', quality=85)\n thumbnail = File(thumb_io, name=image.name)\n return thumbnail", "def resize_image(img: torch.Tensor, new_size: Union[int, Tuple[int, int]], resize_method: str, crop_or_pad_constant: str=CROP_OR_PAD, interpolate_constant: str=INTERPOLATE) ->torch.Tensor:\n new_size = to_tuple(new_size)\n if list(img.shape[-2:]) != list(new_size):\n if resize_method == crop_or_pad_constant:\n return crop_or_pad(img, new_size)\n elif resize_method == interpolate_constant:\n return F.resize(img, new_size)\n raise ValueError(f'Invalid image resize method: {resize_method}')\n return img", "def compress_image_dialogue(update: Update, _: CallbackContext) -> int:\n\n update.message.reply_text(\n 'Send image to compress',\n reply_markup=ReplyKeyboardRemove(),\n )\n return COMPRESSION_IMAGE", "def callback(self, data):\n\n # Convert sensor_msgs.msg.Image into OpenDR Image\n image = self.bridge.from_ros_image(data)\n self.ID = self.ID + 1\n # Get an OpenCV image back\n image = np.float32(image.numpy())\n name = str(f\"{self.ID:02d}\"+\"_single.jpg\")\n cv2.imwrite(os.path.join(self.args.path_in, name), image)\n\n if (self.ID == 5):\n # Run SyntheticDataGeneration\n self.synthetic.eval()\n self.ID = 0\n # Annotate image and publish results\n current_directory_path = os.path.join(self.args.save_path, str(\"/Documents_orig/\"))\n for file in os.listdir(current_directory_path):\n name, ext = os.path.splitext(file)\n if ext == \".jpg\":\n image_file_savepath = os.path.join(current_directory_path, file)\n cv_image = cv2.imread(image_file_savepath)\n cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)\n if self.image_publisher is not None:\n image = Image(np.array(cv_image, dtype=np.uint8))\n message = self.bridge.to_ros_image(image, encoding=\"bgr8\")\n self.image_publisher.publish(message)\n for f in os.listdir(self.args.path_in):\n os.remove(os.path.join(self.args.path_in, f))", "def _resize_img(self, results):\n for key in results.get('img_fields', ['img']):\n if self.keep_ratio:\n img, scale_factor = mmcv.imrescale(\n results[key],\n results['scale'],\n return_scale=True,\n interpolation=self.interpolation,\n backend=self.backend)\n # the w_scale and h_scale has minor difference\n # a real fix should be done in the mmcv.imrescale in the future\n new_h, new_w = img.shape[:2]\n h, w = results[key].shape[:2]\n w_scale = new_w / w\n h_scale = new_h / h\n else:\n img, w_scale, h_scale = mmcv.imresize(\n results[key],\n results['scale'],\n return_scale=True,\n interpolation=self.interpolation,\n backend=self.backend)\n results[key] = img\n\n scale_factor = np.array([w_scale, h_scale, w_scale, h_scale],\n dtype=np.float32)\n results['img_shape'] = img.shape\n # in case that there is no padding\n results['pad_shape'] = img.shape\n results['scale_factor'] = scale_factor\n results['keep_ratio'] = self.keep_ratio", "def _resize_image(self, event):\n self.window_width = event.width\n self.window_height = event.height", "def test_resize(dummy_input):\n # Test the 2D image: H, W, C\n image, label = dummy_input(image_size=(512, 512, 3),\n label_size=(512, 512, 1))\n transform = Resize(size=(64, 64))\n _image, _label = transform(image, label, resize_orders=[3, 0])\n assert _image.shape == (64, 64, 3)\n assert _image.dtype == image.dtype\n assert _label.shape == (64, 64, 1)\n assert _label.dtype == label.dtype\n\n # Test the 3D image: H, W, D, C\n image, label = dummy_input(image_size=(512, 512, 20, 3),\n label_size=(512, 512, 20, 1))\n transform = Resize(size=(64, 64, 10))\n _image, _label = transform(image, label, resize_orders=[3, 0])\n assert _image.shape == (64, 64, 10, 3)\n assert _image.dtype == image.dtype\n assert _label.shape == (64, 64, 10, 1)\n assert _label.dtype == label.dtype", "def resize_for_interface(cv_img):\n\n height, width, channels = cv_img.shape\n width_ratio = width / Images.IMAGES_WIDTH\n height_ratio = height / Images.IMAGES_HEIGHT\n\n ratio = max(width_ratio, height_ratio)\n\n if ratio > 1:\n new_size = (int(width / ratio), int(height / ratio))\n return cv2.resize(cv_img, new_size)\n else:\n # No need to resize\n return cv_img", "def resize(self, image_shape_output):\n\n # calculate transformation between original and resized images\n corners_orig = Box.get_corners_rect(self.image_shape).astype(np.float32)[:3] # corner coordinates of original image\n corners_resized = Box.get_corners_rect(image_shape_output).astype(np.float32)[:3]# corner coordinates of resized image\n M = cv2.getAffineTransform(corners_orig, corners_resized)\n\n # warp bounding box\n bbox = self.bbox.reshape((-1, 2))\n bbox_resized = Box.get_warped_points(bbox, M)\n # bbox_resized = np.around(bbox_resized).astype(np.int) # cast to int\n bbox_resized = bbox_resized.reshape((-1, 4))\n\n self.bbox = bbox_resized\n self.image_shape = image_shape_output\n\n return", "def resize_profile_pic(sender, instance, **kwargs):\n profile_pic = instance.profile_picture\n if profile_pic.name != \"default.png\":\n img = Image.open(profile_pic.path)\n if img.height > 300 or img.width > 300:\n output_size = (300, 300)\n img.thumbnail(output_size)\n img.save(profile_pic.path)", "def resize_image(data, sz=(256, 256)):\n from PIL import Image as PIL_Image\n\n im = PIL_Image.open(BytesIO(data))\n if im.mode != \"RGB\":\n im = im.convert('RGB')\n imr = im.resize(sz, resample=PIL_Image.BILINEAR)\n fh_im = BytesIO()\n imr.save(fh_im, format='JPEG')\n fh_im.seek(0)\n return bytearray(fh_im.read())", "def resizeImage(self, obj, w, h, conserve_aspect_ration=True):\n image = self.getPILFromObject(obj)\n return self.performResize(image, w, h, conserve_aspect_ration)", "def update_message(self):\n self.msg_image = self.font.render(self.message, True, self.text_color)", "def resize_tensor(tensor, new_shape):\n channels = tensor.shape[0]\n new_tensor = np.zeros(shape=(channels,) + new_shape)\n for i in range(0, channels):\n new_tensor[i] = cv2.resize(tensor[i], dsize=new_shape[::-1])\n\n return new_tensor", "def state_message_received(msg):\n self._last_image = msg.payload", "def body_resize(self):", "def resizeImage(image, newDimension):\r\n image = validateImage(image)\r\n if image is None:\r\n print(\"ERROR - resizeImage: Image is missing.\")\r\n return None\r\n\r\n if not isinstance(newDimension, tuple) or len(newDimension) != image.ndim:\r\n print(\"ERROR - resizeImage: Specified dimension is illegal. Dimension=\", len(newDimension), \", ImageDimension=\",\r\n image.ndim)\r\n return None\r\n\r\n return cv2.resize(image, newDimension)", "def _resize_img(self, results):\n for key in results.get('img_fields', ['img']):\n if self.keep_ratio:\n img, scale_factor = general_ocr.imrescale(\n results[key],\n results['scale'],\n return_scale=True,\n backend=self.backend)\n # the w_scale and h_scale has minor difference\n # a real fix should be done in the general_ocr.imrescale in the future\n new_h, new_w = img.shape[:2]\n h, w = results[key].shape[:2]\n w_scale = new_w / w\n h_scale = new_h / h\n else:\n img, w_scale, h_scale = general_ocr.imresize(\n results[key],\n results['scale'],\n return_scale=True,\n backend=self.backend)\n results[key] = img\n\n scale_factor = np.array([w_scale, h_scale, w_scale, h_scale],\n dtype=np.float32)\n results['img_shape'] = img.shape\n # in case that there is no padding\n results['pad_shape'] = img.shape\n results['scale_factor'] = scale_factor\n results['keep_ratio'] = self.keep_ratio", "def cs4243_resize(image, new_width, new_height):\n new_image = np.zeros((new_height, new_width, 3), dtype='uint8')\n if len(image.shape)==2:\n new_image = np.zeros((new_height, new_width), dtype='uint8')\n ###Your code here###\n \n # if new_width < 0 or new_height < 0, np.zeros() will throw a ValueError.\n \n # if new_width == 0 or new_height == 0, we won't need to do any calculation.\n if new_width == 0 or new_height == 0:\n return new_image\n \n # resizing algorithm taken from\n # https://tech-algorithm.com/articles/nearest-neighbor-image-scaling/\n height, width = image.shape[0], image.shape[1]\n w_ratio = int(((width << 16) / new_width) + 1)\n h_ratio = int(((height << 16) / new_height) + 1)\n \n for h in range(new_height):\n for w in range(new_width):\n pw = int((w*w_ratio) >> 16)\n ph = int((h*h_ratio) >> 16)\n new_image[h,w] = image[ph, pw]\n ###\n return new_image", "def _prep_msg(self, msg):\r\n self.msg_image = self.font.render(\r\n msg, True, self.text_color, self.button_color)\r\n self.msg_image_rect = self.msg_image.get_rect()\r\n self.msg_image_rect.center = self.rect.center", "def _resize_image(self, filename, resize_source):\n from PIL import Image, ImageOps\n img = Image.open(filename)\n img = ImageOps.fit(img, resize_source['size'], Image.ANTIALIAS)\n try:\n img.save(filename, optimize=1)\n except IOError:\n img.save(filename)", "def whole_inference(self, img, img_meta, rescale):\n if not isinstance(img_meta, list):\n img_meta = img_meta.data\n seg_logit, gen_out = self.encode_decode(img, img_meta)\n if rescale:\n seg_logit = resize(\n seg_logit,\n size=img_meta[0]['ori_shape'][:2],\n mode='bilinear',\n align_corners=self.align_corners,\n warning=False)\n\n gen_out = resize(\n gen_out,\n size=img_meta[0]['ori_shape'][:2],\n mode='bilinear',\n align_corners=self.align_corners,\n warning=False)\n\n return seg_logit, gen_out", "def camera_cb(self, msg):\n #rospy.loginfo(\"Received new image\")\n\n try:\n image = self.bridge.imgmsg_to_cv2(msg, \"bgr8\")\n except CvBridgeError as e:\n rospy.logerr(e)\n return\n\n self.image = cv2.flip(image, -1)", "def send_jpg_frame_REP_watcher(self, text, image):\n\n ret_code, jpg_buffer = cv2.imencode(\n \".jpg\", image, [int(cv2.IMWRITE_JPEG_QUALITY),\n self.jpeg_quality])\n self.REQ_sent_time.append(datetime.utcnow()) # utcnow 2x faster than now\n try:\n hub_reply = self.sender.send_jpg(text, jpg_buffer)\n except: # add more specific exception, e.g. ZMQError, after testing\n print(\"Exception at sender.send_jpg in REP_watcher function.\")\n self. fix_comm_link()\n self.REP_recd_time.append(datetime.utcnow())\n return hub_reply", "def test_im_file_resize(self):\n self._test_img_resize(IMBackend())", "def image_callback(img_msg):\n bridge = CvBridge()\n try:\n # Convert from sensor_msgs::Image to cv::Mat\n \tcv_image = bridge.imgmsg_to_cv2(img_msg, desired_encoding=\"passthrough\")\n \t# Access global variable and store image as numpy.array\n \tglobal _last_image\n \t_last_image = np.asarray(cv_image)\n except CvBridgeError as ex:\n\tprint ex", "def read_image_and_resize(image_path: str,\n new_WH: Tuple[int, int]=(512, 512),\n save_dir: str=\"resize\") -> str:\n assert os.path.exists(save_dir) is True\n new_path = os.path.join(save_dir, os.path.basename(image_path))\n image = cv2.imread(image_path)\n image = cv2.resize(image, new_WH, interpolation=cv2.INTER_AREA)\n cv2.imwrite(new_path, image)\n\n return image_path", "def resize(\n self,\n image: np.ndarray,\n size: Dict[str, int],\n resample: PILImageResampling = PILImageResampling.BICUBIC,\n data_format: Optional[Union[str, ChannelDimension]] = None,\n input_data_format: Optional[Union[str, ChannelDimension]] = None,\n **kwargs,\n ) -> np.ndarray:\n size = get_size_dict(size)\n if \"longest_edge\" not in size:\n raise ValueError(f\"The `size` dictionary must contain the key `longest_edge`. Got {size.keys()}\")\n input_size = get_image_size(image, channel_dim=input_data_format)\n output_height, output_width = self._get_preprocess_shape(input_size, size[\"longest_edge\"])\n return resize(\n image,\n size=(output_height, output_width),\n resample=resample,\n data_format=data_format,\n input_data_format=input_data_format,\n **kwargs,\n )", "def compressed_image_response(update: Update, _: CallbackContext) -> int:\n IMAGE_TO_COMPRESS['image'].save(IMAGE_TO_COMPRESS['compressed'], optimize=True, quality=int(update.message.text))\n IMAGE_TO_COMPRESS['compressed'].seek(0)\n update.message.reply_photo(\n photo=IMAGE_TO_COMPRESS['compressed']\n )\n return SELECT_TYPE", "def decode_and_resize(image_str_tensor, size):\n \n # Output a grayscale (channels=1) image\n image = tf.image.decode_jpeg(image_str_tensor, channels=3)\n \n # Note resize expects a batch_size, but tf_map supresses that index,\n # thus we have to expand then squeeze. Resize returns float32 in the\n # range [0, uint8_max]\n image = tf.expand_dims(image, 0)\n # image = tf.image.resize_bilinear(\n # image, [size, size], align_corners=False)\n image = tf.squeeze(image, squeeze_dims=[0])\n image = tf.cast(image, dtype=tf.uint8)\n return image", "def process(self, image, annotation_meta=None):\n # image dasta stored inside DataRepresentation in data field\n data = image.data\n # internally we work with numpy arrays, so we need to convert it to pillow image object for making resize\n resized_data = Image.fromarray(data).resize((self.size, self.size), Image.ANTIALIAS)\n # return back data to numpy array\n data = np.array(resized_data)\n # expand dims for gray scale image\n if len(data.shape) == 2:\n data = np.expand_dims(data, axis=-1)\n image.data = data\n # return updated DataRepresentation\n return image", "def process(self):\n self.output_image = cv.resize(\n self.input_image,\n (self.WIDTH, self.HEIHGT),\n )\n return self.output_image", "def fileResizeObscure(new_filepath):\n # Resize\n img1 = Image.open(new_filepath)\n img2=image_reduce(img1)\n *** Stopped working here\n newpath=\"toupload\\\\%s\" % new_filepath\n # Block ID\n width=img2.size[0]\n height=img2.size[1]\n # Obscuring params were decided by trial and error using fraction of width and height\n x1=int(0.16*width)\n x2=int(0.28*width)\n y1=int(0.94*height)\n y2=int(0.98*height) \n # Faster but easier to snoop? should not be since it changes the pixels\n draw = ImageDraw.Draw(img2)\n draw.rectangle([(x1,y1),(x2,y2)],fill=\"white\")\n del draw\n \n img2.save(newpath,optimize=True,quality=95)", "def _test_img_resize(self, backend):\n # Check quality setting unaffected by new parameter\n im_95_qual = backend.resize(\n 225,\n self.IMG_225x225,\n quality=95,\n max_filesize=0,\n )\n # check valid path returned - max_filesize hasn't broken resize command\n self.assertExists(im_95_qual)\n\n # Attempt a lower filesize with same quality\n im_a = backend.resize(\n 225,\n self.IMG_225x225,\n quality=95,\n max_filesize=0.9 * os.stat(syspath(im_95_qual)).st_size,\n )\n self.assertExists(im_a)\n # target size was achieved\n self.assertLess(os.stat(syspath(im_a)).st_size,\n os.stat(syspath(im_95_qual)).st_size)\n\n # Attempt with lower initial quality\n im_75_qual = backend.resize(\n 225,\n self.IMG_225x225,\n quality=75,\n max_filesize=0,\n )\n self.assertExists(im_75_qual)\n\n im_b = backend.resize(\n 225,\n self.IMG_225x225,\n quality=95,\n max_filesize=0.9 * os.stat(syspath(im_75_qual)).st_size,\n )\n self.assertExists(im_b)\n # Check high (initial) quality still gives a smaller filesize\n self.assertLess(os.stat(syspath(im_b)).st_size,\n os.stat(syspath(im_75_qual)).st_size)", "def callback(self,data):\n self.cvtImage(data)\n\n \"\"\" Do some image processing; flip, resize, and etc\"\"\"\n self.imgProcessing()\n\n \"\"\" displaying an OpenCV image \"\"\"\n cv2.imshow(self.cv_window_name, self.cv_image)\n cv2.waitKey(1)\n# ------------------------------------------------------------------------------\n\n try:\n \"\"\" coverting the uint8 OpenCV image to ROS image data \"\"\"\n \"\"\" Publisher.publish() -- explicit way \"\"\"\n self.image_pub.publish(self.bridge.cv2_to_imgmsg(self.cv_image, \"bgr8\"))\n except CvBridgeError as e:\n print(e)", "def test_run_resize(self):\n self.expect_datatore_lookup('SomeBlobKey', True)\n self.expect_open_image('SomeBlobKey', (1600, 1200))\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32')\n self.mox.ReplayAll()\n self._environ['PATH_INFO'] += '=s32'\n self.run_request('image/jpeg', 'SomeImageSize32')", "def image_cb(self, msg):\n rospy.logdebug(\"TLDetector.image_cb\")\n self.__has_image = True\n self.__camera_image = msg\n\n cv_image = self.__bridge.imgmsg_to_cv2(msg, \"bgr8\")\n light_wp, state = self.__process_traffic_lights()\n if self.__mode == LABEL_MODE and not self.__classification_done and state != 4:\n self.__classification_done = self.__light_classifier.save_image(\n cv_image, state\n )\n if self.__classification_done:\n rospy.loginfo(\"TLDetector.image_cb: Done generating labels.\")\n\n \"\"\"\n Publish upcoming red lights at camera frequency.\n Each predicted state has to occur `STATE_COUNT_THRESHOLD` number\n of times till we start using it. Otherwise the previous stable state is\n used.\n \"\"\"\n self.__publish_traffic_light_state(light_wp, state)", "def to_ros(img):\n\n # Everything ok, convert PIL.Image to ROS and return it\n if img.mode == 'P':\n img = img.convert('RGB')\n\n rosimage = ImageMsg()\n rosimage.encoding = ImageConverter._ENCODINGMAP_PY_TO_ROS[img.mode]\n (rosimage.width, rosimage.height) = img.size\n rosimage.step = (ImageConverter._PIL_MODE_CHANNELS[img.mode]\n * rosimage.width)\n rosimage.data = img.tobytes()\n return rosimage", "def prep_msg(self,msg):\n self.msg_image = self.font.render(msg,True,self.text_color,self.button_color)\n self.msg_image_rect = self.msg_image.get_rect()\n self.msg_image_rect.center = self.rect.center", "def resize(im: Image) -> Image:\n size_factor = random.uniform(*ImageOperations.config.get('resize_interval'))\n return im.resize((\n int(round(im.height * size_factor, 0)),\n int(round(im.width * size_factor, 0))\n ))", "def test_transform_image_resize_and_crop_portrait(self):\n self.expect_open_image('SomeBlobKey', (148, 215))\n self.expect_crop(top_y=0.0, bottom_y=0.68837209302325575)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()" ]
[ "0.6290891", "0.6252121", "0.62168384", "0.60811555", "0.60775405", "0.6049448", "0.5981075", "0.58995944", "0.5890728", "0.572378", "0.56443006", "0.5638707", "0.56351453", "0.5631245", "0.5589404", "0.55732054", "0.5532038", "0.54880667", "0.54583186", "0.54128087", "0.53943235", "0.53802806", "0.53721184", "0.5360166", "0.5342123", "0.5341706", "0.5339002", "0.5326002", "0.5324073", "0.5316872", "0.5305081", "0.5299874", "0.529895", "0.5273231", "0.5257569", "0.52458227", "0.52360445", "0.52009845", "0.5196035", "0.51854056", "0.51716965", "0.51668644", "0.51626676", "0.5151049", "0.5142402", "0.5134827", "0.5131902", "0.5131902", "0.51257986", "0.51090753", "0.5100978", "0.5099391", "0.5093058", "0.5080492", "0.50778365", "0.5066332", "0.5063441", "0.50629103", "0.50617915", "0.50617695", "0.50600195", "0.50451213", "0.50440115", "0.5043652", "0.5043061", "0.504298", "0.50228286", "0.50165", "0.50159675", "0.50139534", "0.5003769", "0.49993798", "0.4996602", "0.49892104", "0.49836114", "0.4979096", "0.49788246", "0.49781305", "0.49704292", "0.49671263", "0.4966457", "0.49660608", "0.4965338", "0.4952658", "0.4952082", "0.49444357", "0.49442613", "0.49438304", "0.49312097", "0.4925971", "0.49216574", "0.49159217", "0.49111646", "0.49110425", "0.49105534", "0.49087363", "0.490857", "0.49066934", "0.49029985", "0.49027872", "0.49015117" ]
0.0
-1
This function resizes the image message recieved over the left arm image topic and republishes it to a new topic.
def leftArmImageCallback(msg): global armCamImage # extract arm cam image and update globally bridge = CvBridge() armCamImage = bridge.imgmsg_to_cv2(msg, "bgr8")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def FlyResize( image, log_mess, nimages, method = Image.ANTIALIAS ):\n oldw, oldh = image.size\n resl = [8, 10, 14, 16, 20, 22, 24, 32, 40, 48, 64, 96, 128, 256]\n \n if oldw > 256 or oldh > 256:\n newsiz = min(resl, key = lambda x:abs(x - max(oldw, oldh)))\n image.thumbnail((newsiz, newsiz), method)\n neww, newh = image.size\n log_mess += ' and new size scaled = %s x %s' %(neww, newh)\n elif nimages > 1:\n log_mess += ' and size = %s x %s' %(oldw, oldh)\n \n return oldw, oldh, image, log_mess", "def resizeAndRepubThread():\n\n # reference globals\n global primaryCamString\n global secondaryCamString\n global armCamImage\n global headCamImage\n\n # initialize image publishers\n primaryPub = rospy.Publisher(primaryCamRepub, Image, queue_size=1)\n secondaryPub = rospy.Publisher(secondaryCamRepub, Image, queue_size=1)\n\n # create CvBridge object for converting CV2 images to sensor_msgs/Image messages\n backBridge = CvBridge()\n\n while(True):\n primaryImage = np.zeros(shape=[512, 512, 3])\n secondaryImage = np.zeros(shape=[512, 512, 3])\n\n # just keep looping until we get images\n\n if(np.sum(headCamImage) == 0 or np.sum(armCamImage) == 0):\n rospy.loginfo(\"still waiting on camera images...\")\n continue\n\n # get primary image\n if(primaryCamString == \"head\"):\n primaryImage = resizeImage(headCamImage, primarySize)\n elif(primaryCamString == \"leftArm\"):\n primaryImage = resizeImage(armCamImage, primarySize)\n elif(primaryCamString == \"\"):\n pass\n else:\n rospy.logerr(\"Invalid Option for primaryCamString recieved!\")\n\n # get secondary image\n if(secondaryCamString == \"head\"):\n secondaryImage = resizeImage(headCamImage, secondarySize)\n elif(secondaryCamString == \"leftArm\"):\n secondaryImage = resizeImage(armCamImage, secondarySize)\n elif(secondaryCamString == \"\"):\n pass\n else:\n rospy.logerr(\"Invalid Option for secondaryCamString recieved!\")\n\n # publish both new images\n if(np.sum(primaryImage) != 0 and np.sum(secondaryImage) != 0):\n primaryImageMessage = backBridge.cv2_to_imgmsg(primaryImage, \"bgr8\")\n primaryPub.publish(primaryImageMessage)\n\n secondaryImageMessage = backBridge.cv2_to_imgmsg(secondaryImage, \"bgr8\")\n secondaryPub.publish(secondaryImageMessage)", "def _image_callback(self, image_msg):\n # type: (Image) -> None\n # Drops old images and cleans up the queue.\n # Still accepts very old images, that are most likely from ROS bags.\n image_age = rospy.get_rostime() - image_msg.header.stamp\n if 1.0 < image_age.to_sec() < 1000.0:\n rospy.logwarn(f\"Vision: Dropped incoming Image-message, because its too old! ({image_age.to_sec()} sec)\",\n logger_name=\"vision\")\n return\n\n if self._transfer_image_msg_mutex.locked():\n return\n\n with self._transfer_image_msg_mutex:\n # Transfer the image to the main thread\n self._transfer_image_msg = image_msg", "def mqtt_on_message(client, userdata, msg):\n logging.debug('mqtt message received for topic %s', msg.topic)\n image_queue.put(msg.payload)", "def image_bot_callback(msg):\n global img_bot, sub_sampling, img_bot_res\n arr = np.fromstring(msg.data, np.uint8)\n img_bot = cv.resize(cv.imdecode(arr, 1), (0, 0),\n fx=sub_sampling, fy=sub_sampling)\n himg, wimg = img_bot.shape[:2]\n img_bot = cv.resize(img_bot, (int(wimg/3), int(himg/3)))\n img_bot_res = img_bot.copy()", "def mqtt_publish(image):\n logging.debug('publishing image to mqtt broker topic %s', \n config['mqtt']['publish_topic'])\n mqtt_client.publish(config['mqtt']['publish_topic'], image)", "def __update_image(self, image_msg: Image):\n self.image = self.bridge.imgmsg_to_cv2(image_msg, desired_encoding='rgb8')\n\n if self.__listener != None:\n self.__listener(self.image,image_msg.header.stamp)", "def image_top_callback(msg):\n global img_top, sub_sampling, img_top_res\n arr = np.fromstring(msg.data, np.uint8)\n img_top = cv.resize(cv.imdecode(arr, 1), (0, 0),\n fx=sub_sampling, fy=sub_sampling)\n himg, wimg = img_top.shape[:2]\n img_top = cv.resize(img_top, (int(wimg/3), int(himg/3)))\n img_top_res = img_top.copy()", "def _ros_image_callback(self, msg: Image):\n cv2_img = self._cv_bridge.imgmsg_to_cv2(msg, \"bgr8\")\n self._telegram_updater.bot.send_photo(\n self._telegram_chat_id,\n photo=BytesIO(cv2.imencode(\".jpg\", cv2_img)[1].tobytes()),\n caption=msg.header.frame_id,\n )", "def resize_image(self, nNewSize, nFlags):\n\t\treturn Job(SDK.PrlVmDev_ResizeImage(self.handle, nNewSize, nFlags)[0])", "def raw_image_callback(self, msg):\n if self.pictures_to_take and not self.detection_to_receive:\n self.pictures_to_take -= 1\n # so let's analyse it here and then delete the subscription\n rows = msg.height\n step = msg.step\n cols = msg.width\n dim = int(step / cols)\n pixels = msg.data # of size (steps, nrows)\n # save the image (later we will need to analyse it)\n vision_utils.save_picture(pixels, rows, cols, dim, self.name, FOLDER)", "def receive_images(p_image_queue,p_new_im_id_queue, host = \"127.0.0.1\", port = 6200, timeout = 20, VERBOSE = True,worker_num = 0):\n \n # open ZMQ socket\n context = zmq.Context()\n sock = context.socket(zmq.SUB)\n sock.connect(\"tcp://{}:{}\".format(host, port))\n sock.subscribe(b'') # subscribe to all topics on this port (only images)\n \n print (\"w{}: Image receiver thread connected to socket.\".format(worker_num))\n \n # main receiving loop\n prev_time = time.time()\n while time.time() - prev_time < timeout:\n try:\n temp = sock.recv_pyobj(zmq.NOBLOCK)\n (name,im) = pickle.loads(temp)\n p_image_queue.put((name,im,time.time())) \n p_new_im_id_queue.put(name)\n prev_time = time.time()\n if VERBOSE: print(\"w{}: Image receiver thread received image {} at {}\".format(worker_num,name,time.ctime(prev_time)))\n except zmq.ZMQError:\n time.sleep(0.1)\n pass\n \n sock.close()\n print (\"w{}: Image receiver thread closed socket.\".format(worker_num))", "def _image_callback(self, msg):\n\n try:\n cv_image = self.bridge.imgmsg_to_cv2(msg, \"bgr8\")\n dil_size = self._sliderDil.value()\n eros_size = self._sliderEros.value()\n\t self.cv_image = self._image_widget.calc_bbox(cv_image, dil_size, eros_size)\n self.image = self._image_widget.set_image(cv_image)\n\n if self.save:\n\t\tif self.counter == 5:\n \t self.numImg += 1\n \t self._imgNum_label.setText(str(self.numImg))\n \t self.store_image(self._image_widget.get_image(), self._image_widget.get_bbox(), self.cls_id, self._image_widget.get_mask())\n\t\t self.counter = 0\n\t \telse:\n\t\t self.counter += 1\n except CvBridgeError as e:\n rospy.logerr(e)", "def process_image(image):\n image = resize(image)\n return image", "def resize(self, image_shape_output):\n\n # calculate transformation between original and resized images\n corners_orig = Box.get_corners_rect(self.image_shape).astype(np.float32)[:3] # corner coordinates of original image\n corners_resized = Box.get_corners_rect(image_shape_output).astype(np.float32)[:3]# corner coordinates of resized image\n M = cv2.getAffineTransform(corners_orig, corners_resized)\n\n # warp bounding box\n bbox = self.bbox.reshape((-1, 2))\n bbox_resized = Box.get_warped_points(bbox, M)\n # bbox_resized = np.around(bbox_resized).astype(np.int) # cast to int\n bbox_resized = bbox_resized.reshape((-1, 4))\n\n self.bbox = bbox_resized\n self.image_shape = image_shape_output\n\n return", "def adjust(self, image):\n ...", "def _telegram_photo_callback(self, update: Update, _: CallbackContext):\n rospy.logdebug(\"Received image, downloading highest resolution image ...\")\n byte_array = update.message.photo[-1].get_file().download_as_bytearray()\n rospy.logdebug(\"Download complete, publishing ...\")\n\n img = cv2.imdecode(np.asarray(byte_array, dtype=np.uint8), cv2.IMREAD_COLOR)\n msg = self._cv_bridge.cv2_to_imgmsg(img, encoding=\"bgr8\")\n msg.header.stamp = rospy.Time.now()\n\n if self._caption_as_frame_id:\n msg.header.frame_id = update.message.caption\n self._from_telegram_image_publisher.publish(msg)\n\n if update.message.caption:\n self._from_telegram_string_publisher.publish(String(data=update.message.caption))", "def make_reply(self,request,nreplies):\n #print(\"DummyPyWorker. Sending client message back\")\n self._log.debug(\"received message with {} parts\".format(len(request)))\n\n if not self.is_model_loaded():\n self._log.debug(\"model not loaded for some reason. loading.\")\n\n try:\n import torch\n except:\n raise RuntimeError(\"could not load pytorch!\")\n\n # message pattern: [image_bson,image_bson,...]\n\n nmsgs = len(request)\n nbatches = nmsgs/self.batch_size\n\n if not self._still_processing_msg:\n self._next_msg_id = 0\n\n # turn message pieces into numpy arrays\n img2d_v = []\n sizes = []\n frames_used = []\n rseid_v = []\n for imsg in xrange(self._next_msg_id,nmsgs):\n try:\n compressed_data = str(request[imsg])\n data = zlib.decompress(compressed_data)\n c_run = c_int()\n c_subrun = c_int()\n c_event = c_int()\n c_id = c_int()\n img2d = larcv.json.image2d_from_pystring(data,\n c_run, c_subrun, c_event, c_id )\n except:\n self._log.error(\"Image Data in message part {}\\\n could not be converted\".format(imsg))\n continue\n self._log.debug(\"Image[{}] converted: {}\"\\\n .format(imsg,img2d.meta().dump()))\n\n # check if correct plane!\n if img2d.meta().plane()!=self.plane:\n self._log.debug(\"Image[{}] is the wrong plane!\".format(imsg))\n continue\n\n # check that same size as previous images\n imgsize = (int(img2d.meta().cols()),int(img2d.meta().rows()))\n if len(sizes)==0:\n sizes.append(imgsize)\n elif len(sizes)>0 and imgsize not in sizes:\n self._log.debug(\"Next image a different size. \\\n we do not continue batch.\")\n self._next_msg_id = imsg\n break\n img2d_v.append(img2d)\n frames_used.append(imsg)\n rseid_v.append((c_run.value,c_subrun.value,c_event.value,c_id.value))\n if len(img2d_v)>=self.batch_size:\n self._next_msg_id = imsg+1\n break\n\n\n # convert the images into numpy arrays\n nimgs = len(img2d_v)\n self._log.debug(\"converted msgs into batch of {} images. frames={}\"\n .format(nimgs,frames_used))\n np_dtype = np.float32\n img_batch_np = np.zeros( (nimgs,1,sizes[0][1],sizes[0][0]),\n dtype=np_dtype )\n\n for iimg,img2d in enumerate(img2d_v):\n meta = img2d.meta()\n img2d_np = larcv.as_ndarray( img2d )\\\n .reshape( (1,1,meta.cols(),meta.rows()))\n\n img2d_np=np.transpose(img2d_np,(0,1,3,2))\n img_batch_np[iimg,:] = img2d_np\n\n # print(\"shape of image: \",img2d_np.shape)\n\n\n # now make into torch tensor\n img2d_batch_t = torch.from_numpy( img_batch_np ).to(self.device)\n # out_batch_np = img2d_batch_t.detach().cpu().numpy()\n # out_batch_np=np.transpose(out_batch_np,(0,1,3,2))\n\n print(\"shape of image: \",img2d_batch_t.shape)\n with torch.set_grad_enabled(False):\n out_batch_np = self.model.forward(img2d_batch_t).detach().cpu().numpy()\n out_batch_np=np.transpose(out_batch_np,(0,1,3,2))\n\n\n\n # compression techniques\n ## 1) threshold values to zero\n ## 2) suppress output for non-adc values\n ## 3) use half\n\n # suppress small values\n out_batch_np[ out_batch_np<1.0e-3 ] = 0.0\n\n # threshold\n # for ich in xrange(out_batch_np.shape[1]):\n # out_batch_np[:,ich,:,:][ img_batch_np[:,0,:,:]<10.0 ] = 0.0\n\n # convert back to full precision, if we used half-precision in the net\n\n self._log.debug(\"passed images through net. output batch shape={}\"\n .format(out_batch_np.shape))\n # convert from numpy array batch back to image2d and messages\n reply = []\n for iimg in xrange(out_batch_np.shape[0]):\n img2d = img2d_v[iimg]\n rseid = rseid_v[iimg]\n meta = img2d.meta()\n\n out_np = out_batch_np[iimg,0,:,:]\n # print(\"out_np\",type(out_np))\n # print(\"meta\",type(meta))\n out_img2d = larcv.as_image2d_meta( out_np, meta )\n bson = larcv.json.as_pystring( out_img2d,\n rseid[0], rseid[1], rseid[2], rseid[3] )\n compressed = zlib.compress(bson)\n reply.append(compressed)\n\n if self._next_msg_id>=nmsgs:\n isfinal = True\n self._still_processing_msg = False\n else:\n isfinal = False\n self._still_processing_msg = True\n\n self._log.debug(\"formed reply with {} frames. isfinal={}\"\n .format(len(reply),isfinal))\n return reply,isfinal", "def image_cb(self, msg):\n self.has_image = True\n self.camera_image_msg = msg", "def new_image_callback(self, new_image_msg):\n self.process_new_frame(\n self.cv_bridge.imgmsg_to_cv2(\n new_image_msg,\n desired_encoding=\"bgr8\"\n )\n )", "def startNode():\n\n # init node\n rospy.init_node(\"resize_and_repub\")\n rospy.loginfo(\"resize_and_repub node started\")\n\n # setup subcribers\n rospy.Subscriber(leftArmCamTopic, Image, leftArmImageCallback)\n rospy.Subscriber(headCamTopic, Image, headImageCallback)\n rospy.Subscriber(primaryCamTopic, String, primaryCamCallback)\n rospy.Subscriber(secondaryCamTopic, String, secondayCamCallback)\n rospy.loginfo(\"all subscribers initialized, entering publishing loop...\")\n\n # start repub thread\n thread = threading.Thread(target=resizeAndRepubThread)\n thread.start()\n rospy.spin()", "def __init__(self,\n camera_topic_name=\"/resize_img/image\",\n ): # sub class args\n\n self.camera_topic_name = camera_topic_name\n\n self._run_rate = 1\n self.image_check_interval_s = 5\n self.latest_im_check_time_s = rospy.get_time()\n\n msg_received = False\n # get initial image to generate our pixel sampling location bbased on the image height/width\n while not msg_received and not rospy.is_shutdown():\n try:\n image_msg = rospy.wait_for_message(self.camera_topic_name, Image, timeout=20)\n im = bridge.imgmsg_to_cv2(image_msg)\n # get the characteristics of our image\n self.im_height = image_msg.height\n self.im_width = image_msg.width\n # initialise the pixel checking arrays\n self.pixels2check_ver = np.ceil(np.linspace(1, self.im_height-1, 10)).astype(np.uint8)\n self.pixels2check_hor = np.ceil(np.linspace(1, self.im_width-1, 10)).astype(np.uint8)\n self.pixel_vals_this = im[self.pixels2check_ver, self.pixels2check_hor]\n self.pixel_vals_previous = np.clip(self.pixel_vals_this + 10, 0, 255)\n msg_received = True\n except rospy.ROSException as e:\n rospy.logwarn_throttle(5, 'camera watchdog node timed out waiting for image message \\\n - traceback was {}'.format(e))\n # except e:\n # rospy.logwarn(('{} happened'.format(e)))\n\n self.downcam_sub = rospy.Subscriber(self.camera_topic_name, Image, self.downcam_callback, queue_size=5)\n\n ## todo - implement bottom clearance sensor checker - perhaps create another node for this?\n # add flag to set / unset altitude sensor check\n self.altitude_bottom_clearance = Float32()\n self.alt_sub = rospy.Subscriber('mavros/altitude', Altitude, self.altitude_callback, queue_size=5)", "def resize_and_process_image(data: dict, context):\n file_name = data[\"name\"]\n bucket_name = data[\"bucket\"]\n _, temp_local_filename = tempfile.mkstemp(suffix=file_name)\n blob = storage_client.bucket(bucket_name).get_blob(file_name)\n blob_bytes = blob.download_as_bytes()\n output = io.BytesIO(blob_bytes)\n output.seek(0)\n image = Image.open(output)\n print(\"trying to resize image\")\n # resizes image\n resized_image = resize_image(image)\n resized_image.save(fp=temp_local_filename)\n print(\"Image resized\")\n\n # Upload result to second bucket\n print(\"Trying to upload resized image to second bucket\")\n second_bucket_name = os.getenv(\"SECOND_BUCKET\")\n second_bucket = storage_client.bucket(second_bucket_name)\n print(\"second bucket found\")\n new_blob = second_bucket.blob(file_name)\n new_blob.metadata = blob.metadata\n print(\"created new blob\")\n new_blob.upload_from_filename(temp_local_filename)\n print(\"uploaded resized image from file\")\n os.remove(temp_local_filename)", "def image_resize(image):\n print(\"image-resizing2\")\n\n i=0\n height,width = image.shape[:2]\n shape = [height,width]\n if len(image_shape) == 0:\n #print(\"Intial\")\n image_shape.append(shape)\n resized = cv2.resize(image,(int(width*0.2),int(height*0.2)),interpolation=cv2.INTER_CUBIC)\n else:\n for old_shape in image_shape:\n #print(\"second\")\n if old_shape == shape:\n i=0\n break\n else:\n i+=1\n if(i > 0):\n #print(\"third\")\n image_shape.append(shape)\n resized = cv2.resize(image, (int(width * 0.2), int(height * 0.2)), interpolation=cv2.INTER_CUBIC)\n return resized,shape", "def image_cb(self, msg):\n self.has_image = True\n self.camera_image = msg", "def send_processd_for_prediction(self):\n resized_image = cv2.resize(self.processed_image, (28, 28))\n self.send_proccesd_image_to_ML.emit(resized_image)", "def camera_cb(self, msg):\n #rospy.loginfo(\"Received new image\")\n\n try:\n image = self.bridge.imgmsg_to_cv2(msg, \"bgr8\")\n except CvBridgeError as e:\n rospy.logerr(e)\n return\n\n self.image = cv2.flip(image, -1)", "def notifyResized(self, function, **kwargs):\n self._sig_resized.subscribe(function, **kwargs)", "def process_image_attachment(image_id):\n image_attachment_model = import_image_attachment()\n image = image_attachment_model.objects.get(pk=image_id)\n image.create_display_size()\n image.create_thumbnail()", "def image_cb(self, msg): # incoming image\n self.has_image = True\n self.camera_image = msg", "def _publish_image(self):\n # only publish if we have a subscriber\n if self._image_pub.get_num_connections() == 0:\n return\n\n # get latest image from cozmo's camera\n camera_image = self._cozmo.world.latest_image\n if camera_image is not None:\n # convert image to gray scale as it is gray although\n img = camera_image.raw_image.convert('L')\n ros_img = Image()\n ros_img.encoding = 'mono8'\n ros_img.width = img.size[0]\n ros_img.height = img.size[1]\n ros_img.step = ros_img.width\n ros_img.data = img.tobytes()\n ros_img.header.frame_id = 'cozmo_camera'\n cozmo_time = camera_image.image_recv_time\n ros_img.header.stamp = rospy.Time.from_sec(cozmo_time)\n # publish images and camera info\n self._image_pub.publish(ros_img)\n camera_info = self._camera_info_manager.getCameraInfo()\n camera_info.header = ros_img.header\n self._camera_info_pub.publish(camera_info)", "def state_message_received(msg):\n self._last_image = msg.payload", "def resize(self, old, new):", "def resizeImage(IMG,IMAGE_SIZE):\n\n RESCALED_IMAGE = skimage.transform.resize(IMG,[IMG.shape[0],IMAGE_SIZE,IMAGE_SIZE])\n return RESCALED_IMAGE", "def resizeLXCContainer(self,node,vmid,post_data):\n data = self.connect('put',\"nodes/%s/lxc/%s/resize\" % (node,vmid), post_data)\n return data", "def pub_images(self, cv2_img, image_format=\"passthrough\"):\n if self.pub.get_num_connections() > 0:\n try:\n image_msg = self.bridge.cv2_to_imgmsg(cv2_img, image_format)\n self.pub.publish(image_msg)\n except CvBridgeError as e:\n rospy.logerr(\"Error on converting image for publishing: \" +\n str(e) + \" (Is your image_format correct?)\")\n\n if self.pub_compressed.get_num_connections() > 0:\n msg = CompressedImage()\n msg.header.stamp = rospy.Time.now()\n msg.format = \"jpeg\"\n msg.data = np.array(cv2.imencode('.jpg', cv2_img)[1]).tostring()\n self.pub_compressed.publish(msg)", "async def processing(message: types.Message):\n\n if message.text == 'Bad quality, low time':\n image_size = 128\n elif message.text == 'Medium quality, medium time':\n image_size = 256\n else:\n image_size = 300\n\n await message.answer(text='Style transfering starts. '\n 'Wait a bit.',\n reply_markup=types.ReplyKeyboardRemove())\n transform('content.jpg', 'style.jpg', image_size)\n with open('result.jpg', 'rb') as file:\n await message.answer_photo(file, caption='Work is done!')", "def __resize_image(self, img):\n return cv2.resize(img, self.g.img_size, \n interpolation = cv2.INTER_CUBIC)", "def imgProcessing(self):\n if (self.image_width > 320):\n self.cv_image = imutils.resize(self.cv_image, width = 320)\n else:\n pass\n\n \"\"\" optional -- image-mirrored \"\"\"\n # self.cv_image = cv2.flip(self.cv_image, 1)", "def onImageReceived(self, msg):\n\n self.BGR = self.bridge.imgmsg_to_cv2(msg)\n self.processImage(self.BGR)", "def rescale_image(self, img_file, new_width, new_height, model_path, file_description):\n cwd = os.getcwd()\n self.new_width = new_width\n self.new_height = new_height\n self.extract_file_name(img_file)\n shutil.copy(img_file, os.path.join('utils_dfn/temp', self.file_name_with_ext))\n self.run_padding()\n self.run_dfn(model_path)\n self.restore_to_correct_size(file_description)\n clean()", "def send_image(self, path):\n img = cv2.imread(path)\n msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding=\"bgr8\")\n pub = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)\n pub.publish(msg)\n # Sleep to allow for image to be published.\n # removed by alice\n #rospy.sleep(1)", "def resize(img):\n size = (500, 500)\n img.thumbnail(size)\n return img", "def callback(self, data):\n\n # Convert sensor_msgs.msg.Image into OpenDR Image\n image = self.bridge.from_ros_image(data)\n self.ID = self.ID + 1\n # Get an OpenCV image back\n image = np.float32(image.numpy())\n name = str(f\"{self.ID:02d}\"+\"_single.jpg\")\n cv2.imwrite(os.path.join(self.args.path_in, name), image)\n\n if (self.ID == 5):\n # Run SyntheticDataGeneration\n self.synthetic.eval()\n self.ID = 0\n # Annotate image and publish results\n current_directory_path = os.path.join(self.args.save_path, str(\"/Documents_orig/\"))\n for file in os.listdir(current_directory_path):\n name, ext = os.path.splitext(file)\n if ext == \".jpg\":\n image_file_savepath = os.path.join(current_directory_path, file)\n cv_image = cv2.imread(image_file_savepath)\n cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)\n if self.image_publisher is not None:\n image = Image(np.array(cv_image, dtype=np.uint8))\n message = self.bridge.to_ros_image(image, encoding=\"bgr8\")\n self.image_publisher.publish(message)\n for f in os.listdir(self.args.path_in):\n os.remove(os.path.join(self.args.path_in, f))", "def _prep_msg(self, msg):\r\n self.msg_image = self.font.render(\r\n msg, True, self.text_color, self.button_color)\r\n self.msg_image_rect = self.msg_image.get_rect()\r\n self.msg_image_rect.center = self.rect.center", "def resize(self, new_size):\n resized_img = opencv.resize(self.img, new_size)\n return Image(resized_img)", "def publish_receive(message):\n topic, content = message # \"femag_log\" + text\n # topics: femag_log, progress, file_modified,\n # model_image, calc_image, field_image, babs_image, demag_image, color_scale\n if topic == 'femag_log' or topic == 'progress':\n logger.info(\"%s: %s\", topic, content.strip())\n else:\n logger.info('%s: len %d', topic, len(content.strip()))", "def resize_image(img: torch.Tensor, new_size: Union[int, Tuple[int, int]], resize_method: str, crop_or_pad_constant: str=CROP_OR_PAD, interpolate_constant: str=INTERPOLATE) ->torch.Tensor:\n new_size = to_tuple(new_size)\n if list(img.shape[-2:]) != list(new_size):\n if resize_method == crop_or_pad_constant:\n return crop_or_pad(img, new_size)\n elif resize_method == interpolate_constant:\n return F.resize(img, new_size)\n raise ValueError(f'Invalid image resize method: {resize_method}')\n return img", "def resizeImage(self):\n ratio = float(self.qIma.width()) / float(self.qIma.height())\n if self.qIma.width() > self.qIma.height():\n maxWidth = 300\n maxHeight = int(300 / ratio)\n else:\n maxWidth = int(300 / ratio)\n maxHeight = 300\n img = self.qIma.toImage().scaled(maxWidth, maxHeight, QtCore.Qt.KeepAspectRatio)\n return img", "def resizeImage(image, newDimension):\r\n image = validateImage(image)\r\n if image is None:\r\n print(\"ERROR - resizeImage: Image is missing.\")\r\n return None\r\n\r\n if not isinstance(newDimension, tuple) or len(newDimension) != image.ndim:\r\n print(\"ERROR - resizeImage: Specified dimension is illegal. Dimension=\", len(newDimension), \", ImageDimension=\",\r\n image.ndim)\r\n return None\r\n\r\n return cv2.resize(image, newDimension)", "def process_image(self, image):\n min_ = np.min(image.size)\n max_ = np.max(image.size)\n\n # Find dimension of the longest side if the shortest side is resized to\n # 256 pixels.\n dimension = 256 * max_ / min_\n\n # Resize to dimension x dimension max resolution and preserve the\n # aspect ratio.\n image.thumbnail((dimension, dimension))\n\n crop_dim = 224 # crop dimension 224 x 224 pixels\n # Find center crop coordinates\n width, height = image.size\n left = (width - crop_dim) / 2\n top = (height - crop_dim) / 2\n right = (width + crop_dim) / 2\n bottom = (height + crop_dim) / 2\n\n np_image = np.array(image.crop((left, top, right, bottom))) / 255\n\n means = np.array([0.485, 0.456, 0.406])\n std_devs = np.array([0.229, 0.224, 0.225])\n\n np_image = (np_image - means) / std_devs\n np_image = np_image.transpose((2, 0, 1))\n\n return np_image", "def image_resize(job_object):\n try:\n job = json.loads(job_object.arg)\n base64_file = job['image']\n args = job['args'] if 'args' in job else {}\n del job['image']\n logging.info(job)\n \n def write_file(local_path,filename,file_b64):\n logging.debug(\"about to save to \" + \"%s/%s\" % (local_path,filename))\n if not os.path.exists(local_path): os.makedirs(local_path)\n image_file = base64.b64decode(file_b64)\n local_file = open(\"%s/%s\" % (local_path,filename), \"w\")\n local_file.write(image_file)\n local_file.close()\n \n def download_file(url,local_path,filename):\n print \"downloading \" + url\n f = urllib2.urlopen(urllib2.Request(url))\n print \"about to save to \" + \"%s/%s\" % (local_path,filename)\n if not os.path.exists(local_path): os.makedirs(local_path)\n # Open our local file for writing\n local_file = open(\"%s/%s\" % (local_path,filename), \"w\")\n local_file.write(f.read())\n local_file.close()\n \n local_path = '%s/upload/%s' % (options.asset_root,job['path'])\n local_path_wfile = '%s/%s%s' % (local_path,job['file'],job['extension'])\n filename = '%s%s' % (job['file'],job['extension'])\n #download_file(job['url'],local_path,filename)\n write_file(local_path,filename,base64_file)\n \n def resize_and_save(local_file,new_file,maxsize=None,maxh=None,maxw=None,crop=None):\n \"\"\"Resize the image and save\"\"\"\n logging.debug(\"maxw = %s, maxsize=%s, crop=%s\" % (maxw,maxsize,crop))\n img = Image.open(local_file)\n width,height = img.size\n width,height = float(width), float(height)\n ratio = float(1)\n if crop is not None:\n size = float(maxsize)\n if width <= height and width > size:\n ratio = size/width\n elif height < width and height > size:\n ratio = size/height\n else: \n ratio = 1 # too small\n elif maxsize:\n size = float(maxsize)\n if width >= height and width > size:\n ratio = size/width\n elif height > width and height > size:\n ratio = size/height\n else: \n ratio = 1 # too small\n elif maxh:\n size = maxh\n if height > size:\n ratio = size/height\n else:\n # too small\n ratio = 1\n elif maxw:\n size = maxw\n if width > size:\n ratio = size/width\n else:\n # too small\n ratio = 1\n else:\n raise Exception(\"must specify max width, OR max size\")\n \n print(\"old: ratio = %s: size(x,y) = %s,%s\" % (ratio,width,height))\n height = int(height*ratio)\n width = int(width*ratio)\n print(\"new ratio = %s: size(x,y) = %s,%s\" % (ratio,width,height))\n img = img.resize((width, height),Image.ANTIALIAS)\n if crop is not None:\n log.debug(\"in crop %s\" % crop)\n crop = int(crop)\n if width > crop:\n amt = int((int(width) - crop)/2)\n img = img.crop((amt,0,amt + crop, crop))\n elif height > crop:\n amt = int((int(height) - crop)/2)\n img = img.crop((0,amt,crop,amt+crop))\n \n log.debug(\"saving new file %s\" % new_file)\n if img.mode != \"RGB\":\n img = img.convert(\"RGB\")\n img.save(new_file)\n \n \n if os.path.exists(local_path_wfile):\n if args != {}:\n ext = args['extension'] if 'extension' in args else \"_t\"\n resize_and_save(local_path_wfile,\n '%s/%s%s.jpg' % (local_path,job['file'],ext),\n maxsize=args['maxsize'],\n crop=args['crop'])\n else:\n resize_and_save(local_path_wfile,'%s/%s_t.jpg' % (local_path,job['file']),maxsize=100)\n resize_and_save(local_path_wfile,'%s/%s_m.jpg' % (local_path,job['file']),maxw=317)\n resize_and_save(local_path_wfile,'%s/%s_l.jpg' % (local_path,job['file']),maxsize=800)\n keeptrying = False\n else:\n logging.error(\"haven't found file? %s\" % local_path_wfile)\n \n # delete original\n logging.debug(\"About to delete original %s\" % local_path_wfile)\n os.remove(local_path_wfile)\n \n except:\n traceback.print_exc()", "def restore_to_correct_size(self, file_description):\n file_name = os.listdir('utils_dfn/output/result')[0]\n file_name_with_ext, file_name = extract_file_name(file_name)\n new_file = os.path.join('static/rescaled_images', self.file_name + file_description + '.png')\n img = cv2.imread(os.path.join('utils_dfn/output/result', file_name_with_ext))\n if self.new_width > self.new_height:\n new_size = self.new_width\n else:\n new_size = self.new_height\n\n img = resize_image(img, new_size, new_size)\n img = crop_image(img, bottom=self.pad_to_bottom, right=self.pad_to_right)\n cv2.imwrite(new_file, img)\n self.rescaled_file_name_with_ext, _ = extract_file_name(new_file)", "def resize(self):\n pass", "def crop_and_resize(input):\r\n\r\n # Locate the last Relu node of the first backbone (pre 1st NMS). Relu node contains feature maps\r\n # necessary for CropAndResize plugin.\r\n relu_name = \"StatefulPartitionedCall/model/\"\r\n relu_node = [node for node in self.graph.nodes if node.op == \"Relu\" and relu_name in node.name][-1]\r\n\r\n # Before passing 1st NMS's detection boxes (rois) to CropAndResize, we need to clip and normalize them.\r\n # Clipping happens for coordinates that are less than 0 and more than self.height.\r\n # Normalization is just divison of every coordinate by self.height.\r\n clip_min = np.asarray([0], dtype=np.float32)\r\n clip_max = np.asarray([self.height], dtype=np.float32)\r\n clip_out = self.graph.elt_const_clip(\"Clip\", \"FirstNMS/detection_boxes_clipper\", input, clip_min, clip_max)\r\n div_const = np.expand_dims(np.asarray([self.height, self.height, self.height, self.height], dtype=np.float32), axis=(0, 1))\r\n div_out = self.graph.elt_const(\"Div\", \"FirstNMS/detection_boxes_normalizer\", clip_out[0], div_const)\r\n\r\n # Linear transformation to convert box coordinates from (TopLeft, BottomRight) Corner encoding\r\n # to CenterSize encoding.\r\n matmul_const = np.matrix('0.5 0 -1 0; 0 0.5 0 -1; 0.5 0 1 0; 0 0.5 0 1', dtype=np.float32)\r\n matmul_out = self.graph.elt_const(\"MatMul\", \"FirstNMS/detection_boxes_conversion\", div_out[0], matmul_const)\r\n\r\n # Additionally CropAndResizePlugin requires 4th dimension of 1: [N, B, 4, 1], so\r\n # we need to add unsqeeze node to make tensor 4 dimensional. \r\n unsqueeze_node = self.graph.unsqueeze( \"FirstNMS/detection_boxes_unsqueeze\", div_out)\r\n\r\n # CropAndResizePlugin's inputs \r\n feature_maps = relu_node.outputs[0]\r\n rois = unsqueeze_node[0]\r\n\r\n # CropAndResize TensorRT Plugin.\r\n # Two inputs are given to the CropAndResize TensorRT node:\r\n # - The feature_maps (from the Relu node found above): [batch_size, channel_num, height, width]\r\n # - The rois (in other words clipped and normalized detection boxes resulting fromm 1st NMS): [batch_size, featuremap, 4, 1]\r\n cnr_inputs = [feature_maps, rois]\r\n cnr_op = \"CropAndResize\"\r\n cnr_attrs = {\r\n 'crop_width': self.initial_crop_size,\r\n 'crop_height': self.initial_crop_size,\r\n }\r\n\r\n # CropAndResize Outputs.\r\n cnr_pfmap = gs.Variable(name=\"pfmap\", dtype=np.float32,\r\n shape=[self.batch_size, self.first_stage_max_proposals, feature_maps.shape[1], self.initial_crop_size, self.initial_crop_size])\r\n cnr_outputs = [cnr_pfmap]\r\n\r\n # Create the CropandResize Plugin node with the selected inputs. \r\n self.graph.plugin(\r\n op=cnr_op,\r\n name=\"cnr/crop_and_resize\",\r\n inputs=cnr_inputs,\r\n outputs=cnr_outputs,\r\n attrs=cnr_attrs)\r\n log.info(\"Created CropAndResize plugin '{}' with attributes: {}\".format(cnr_op, cnr_attrs))\r\n\r\n # Reshape node that is preparing CropAndResize's pfmap output shape for MaxPool node that comes next,\r\n # after that is 2nd backbone that leads us to final 2nd NMS.\r\n reshape_shape = np.asarray([self.first_stage_max_proposals*self.batch_size, feature_maps.shape[1], self.initial_crop_size, self.initial_crop_size], dtype=np.int64)\r\n reshape_node = self.graph.elt_const(\"Reshape\", \"StatefulPartitionedCall/CropandResize/reshape\", cnr_outputs[0], reshape_shape)\r\n maxpl_name = \"StatefulPartitionedCall/MaxPool2D/MaxPool\"\r\n maxpool_node = [node for node in self.graph.nodes if node.op == \"MaxPool\" and maxpl_name == node.name][0]\r\n maxpool_node.inputs[0] = reshape_node[0]\r\n\r\n # Return linear transformation node, it will be located between 1st and 2nd NMS, \r\n # so we need to pass and connect it to 2nd NMS.\r\n return matmul_out[0]", "def image_callback(img_msg):\n bridge = CvBridge()\n try:\n # Convert from sensor_msgs::Image to cv::Mat\n \tcv_image = bridge.imgmsg_to_cv2(img_msg, desired_encoding=\"passthrough\")\n \t# Access global variable and store image as numpy.array\n \tglobal _last_image\n \t_last_image = np.asarray(cv_image)\n except CvBridgeError as ex:\n\tprint ex", "def resize_image(image, side):\n cv2.imshow(\"Original\", image)\n small = cv2.resize(image, (side,side), interpolation = cv2.INTER_AREA)\n return small", "def _resize_img(self, results):\n for key in results.get('img_fields', ['img']):\n if self.keep_ratio:\n img, scale_factor = mmcv.imrescale(\n results[key],\n results['scale'],\n return_scale=True,\n interpolation=self.interpolation,\n backend=self.backend)\n # the w_scale and h_scale has minor difference\n # a real fix should be done in the mmcv.imrescale in the future\n new_h, new_w = img.shape[:2]\n h, w = results[key].shape[:2]\n w_scale = new_w / w\n h_scale = new_h / h\n else:\n img, w_scale, h_scale = mmcv.imresize(\n results[key],\n results['scale'],\n return_scale=True,\n interpolation=self.interpolation,\n backend=self.backend)\n results[key] = img\n\n scale_factor = np.array([w_scale, h_scale, w_scale, h_scale],\n dtype=np.float32)\n results['img_shape'] = img.shape\n # in case that there is no padding\n results['pad_shape'] = img.shape\n results['scale_factor'] = scale_factor\n results['keep_ratio'] = self.keep_ratio", "def test_transform_image_resize_and_crop_portrait_png(self):\n self.expect_open_image('SomeBlobKey', (1600, 1200), mime_type='PNG')\n self.expect_crop(left_x=0.125, right_x=0.875)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c',\n images_service_pb.OutputSettings.PNG)\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/png'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()", "def convert_topic_dir(full_topic_dir, full_target_dir, photo_topic):\n warnings.simplefilter('error', Image.DecompressionBombWarning)\n topicfiles = [f for f in listdir(full_topic_dir) if isfile(join(full_topic_dir, f))\n and (f.lower().endswith(\".jpg\") or f.lower().endswith(\".jpeg\"))]\n for topicfile in topicfiles:\n fulltopicfile = join(full_topic_dir, topicfile)\n fulltargetfile = join(full_target_dir, topicfile)\n if not exists_and_newer(fulltargetfile, fulltopicfile):\n print(\" Converting\", topicfile, \": \", end='')\n try:\n im = Image.open(fulltopicfile)\n\n if im._getexif() is not None:\n exif = {\n TAGS[k]: v\n for k, v in im._getexif().items()\n if k in TAGS\n }\n else:\n exif = dict()\n\n if 'Orientation' in exif:\n im = apply_image_rotation_by_exif(im, exif['Orientation'])\n\n if 'ImageDescription' in exif:\n photo_description = exif['ImageDescription']\n photo_description = bytes(photo_description, encoding=\"ansi\", errors=\"ignore\").decode(\"utf-8\", errors=\"ignore\") # PIL reads exif data as ansi not utf-8 strings\n if photo_description.rstrip() == '':\n photo_caption = photo_topic\n else:\n if photo_description.endswith('#'):\n photo_caption = photo_topic + \" - \" + photo_description.rstrip('#')\n else:\n photo_caption = photo_description\n else:\n photo_caption = photo_topic\n\n print(photo_caption)\n im.thumbnail(targetSize, Image.ANTIALIAS)\n im = add_caption_to_image(im, photo_caption)\n im.save(fulltargetfile, \"JPEG\")\n except IOError:\n print(\"cannot create target for '%s'\" % fulltopicfile)\n except AttributeError:\n print(\"Attribute error for '%s'\" % fulltopicfile)\n else:\n print(\" Skipping\", topicfile)", "def to_ros(img):\n\n # Everything ok, convert PIL.Image to ROS and return it\n if img.mode == 'P':\n img = img.convert('RGB')\n\n rosimage = ImageMsg()\n rosimage.encoding = ImageConverter._ENCODINGMAP_PY_TO_ROS[img.mode]\n (rosimage.width, rosimage.height) = img.size\n rosimage.step = (ImageConverter._PIL_MODE_CHANNELS[img.mode]\n * rosimage.width)\n rosimage.data = img.tobytes()\n return rosimage", "def fileResizeObscure(new_filepath):\n # Resize\n img1 = Image.open(new_filepath)\n img2=image_reduce(img1)\n *** Stopped working here\n newpath=\"toupload\\\\%s\" % new_filepath\n # Block ID\n width=img2.size[0]\n height=img2.size[1]\n # Obscuring params were decided by trial and error using fraction of width and height\n x1=int(0.16*width)\n x2=int(0.28*width)\n y1=int(0.94*height)\n y2=int(0.98*height) \n # Faster but easier to snoop? should not be since it changes the pixels\n draw = ImageDraw.Draw(img2)\n draw.rectangle([(x1,y1),(x2,y2)],fill=\"white\")\n del draw\n \n img2.save(newpath,optimize=True,quality=95)", "def cs4243_resize(image, new_width, new_height):\n new_image = np.zeros((new_height, new_width, 3), dtype='uint8')\n if len(image.shape)==2:\n new_image = np.zeros((new_height, new_width), dtype='uint8')\n ###Your code here###\n \n # if new_width < 0 or new_height < 0, np.zeros() will throw a ValueError.\n \n # if new_width == 0 or new_height == 0, we won't need to do any calculation.\n if new_width == 0 or new_height == 0:\n return new_image\n \n # resizing algorithm taken from\n # https://tech-algorithm.com/articles/nearest-neighbor-image-scaling/\n height, width = image.shape[0], image.shape[1]\n w_ratio = int(((width << 16) / new_width) + 1)\n h_ratio = int(((height << 16) / new_height) + 1)\n \n for h in range(new_height):\n for w in range(new_width):\n pw = int((w*w_ratio) >> 16)\n ph = int((h*h_ratio) >> 16)\n new_image[h,w] = image[ph, pw]\n ###\n return new_image", "def prep_msg(self,msg):\n self.msg_image = self.font.render(msg,True,self.text_color,self.button_color)\n self.msg_image_rect = self.msg_image.get_rect()\n self.msg_image_rect.center = self.rect.center", "def image_resize_shortest_edge(\n img, size: int, channels_last: bool = False\n) -> torch.Tensor:\n img = _to_tensor(img)\n no_batch_dim = len(img.shape) == 3\n if len(img.shape) < 3 or len(img.shape) > 5:\n raise NotImplementedError()\n if no_batch_dim:\n img = img.unsqueeze(0) # Adds a batch dimension\n if channels_last:\n h, w = img.shape[-3:-1]\n if len(img.shape) == 4:\n # NHWC -> NCHW\n img = img.permute(0, 3, 1, 2)\n else:\n # NDHWC -> NDCHW\n img = img.permute(0, 1, 4, 2, 3)\n else:\n # ..HW\n h, w = img.shape[-2:]\n\n # Percentage resize\n scale = size / min(h, w)\n h = int(h * scale)\n w = int(w * scale)\n img = torch.nn.functional.interpolate(\n img.float(), size=(h, w), mode=\"area\"\n ).to(dtype=img.dtype)\n if channels_last:\n if len(img.shape) == 4:\n # NCHW -> NHWC\n img = img.permute(0, 2, 3, 1)\n else:\n # NDCHW -> NDHWC\n img = img.permute(0, 1, 3, 4, 2)\n if no_batch_dim:\n img = img.squeeze(dim=0) # Removes the batch dimension\n return img", "def showResized(name, image, scale):\n image = resizeImage(image, scale)\n cv.ShowImage(name, image)", "def resizing(image, image_2, desire_width):\n\n height, width = image.shape[0:2]\n\n aspect_ratio = (width / height)\n\n new_width = desire_width\n\n new_height = int(round(new_width / aspect_ratio))\n\n standard_src = cv2.resize(image, (new_width, new_height))\n\n image_tocut = cv2.resize(image_2,(new_width, new_height))\n\n return standard_src, image_tocut", "def image_cb(self, msg):\n self.has_image = True\n self.camera_image = msg\n light_wp, state = self.process_traffic_lights()\n\n # rospy.logerr(\"light_wp\")\n # rospy.logerr(light_wp)\n\n # self.upcoming_red_light_pub.publish(Int32(light_wp))\n\n '''\n Publish upcoming red lights at camera frequency.\n Each predicted state has to occur `STATE_COUNT_THRESHOLD` number\n of times till we start using it. Otherwise the previous stable state is\n used.\n '''\n if self.state != state:\n self.state_count = 0\n self.state = state\n elif self.state_count >= STATE_COUNT_THRESHOLD:\n self.last_state = self.state\n light_wp = light_wp if state == TrafficLight.RED else -1\n self.last_wp = light_wp\n self.upcoming_red_light_pub.publish(Int32(light_wp))\n else:\n self.upcoming_red_light_pub.publish(Int32(self.last_wp))\n self.state_count += 1", "def expect_resize(self, resize):\n resize_xform = images_service_pb.Transform()\n resize_xform.set_width(resize)\n resize_xform.set_height(resize)\n self._images_stub._Resize(mox.IsA(Image.Image),\n resize_xform).AndReturn(self._image)", "def process_image(image_path):\n\n # open the image\n image = Image.open(image_path)\n \n # print(\"Original Image size: \", image.size)\n \n # first resize the images where the shortest side is 256 px\n width, height = image.size\n size = 256, 256\n \n newwidth, newheight = None, None\n \n # if the height is the shorter side\n if height < width:\n # find ratio between larger and smaller side\n ratio = float(width) / float(height)\n # resize smaller side to 256\n newheight = 256\n # resize larger side to 256 * ratio\n newwidth = int(floor(ratio * size[0])) \n # else, the width is the shorter side\n else:\n # find ratio between larger and smaller side\n ratio = float(height)/float(width)\n # resize smaller side to 256\n newwidth = 256\n # resize larger side to 256 * ratio\n newheight = int(floor(ratio * size[1]))\n \n \n # print(\"W: {}, H: {}\".format(newwidth, newheight))\n \n # resize the image\n image = image.resize((newwidth, newheight), Image.ANTIALIAS)\n\n # print(\"Resized Image (keep aspect ratio): \", image.size)\n \n # perform center crop\n # https://stackoverflow.com/questions/16646183/crop-an-image-in-the-centre-using-pil\n width, height = image.size # Get dimensions\n new_height, new_width = 224, 224\n \n left = (width - new_width)/2\n top = (height - new_height)/2\n right = (width + new_width)/2\n bottom = (height + new_height)/2\n\n image = image.crop((left, top, right, bottom))\n # print(\"cropped image size: \", image.size)\n \n # convert encoded color channels and convert to floats (divide by 255)\n np_image = np.array(image) / 255\n # print(np_image)\n \n # normalize\n mean = [0.485, 0.456, 0.406]\n std = [0.229, 0.224, 0.225]\n np_image = (np_image - mean) / std\n \n # finally, transpose\n # print(\"shape 1: \", np_image.shape)\n np_image = np_image.transpose((2, 0, 1))\n # print(\"transposed shape: \", np_image.shape)\n \n # Originally, I was returning a numpy array, as I thought these were the instructions, but\n # when trying to test, it would not work. \n # Found solution at: https://knowledge.udacity.com/questions/29173\n # We have to convert to a tensor before we return it\n return torch.Tensor(np_image)", "def client_handler(inbound_socket, addr, job_queue, result_queue):\n global last_pic\n print(inbound_socket)\n\n def draw_boxes(boxes):\n mask = Image.new('RGBA', picSize, (255, 255, 255, 0))\n d = ImageDraw.Draw(mask)\n fnt = ImageFont.truetype(p.FONT_PATH, 12)\n txt_offset_x = 0\n txt_offset_y = 20\n for box in boxes:\n p_coords = [box.coords[0]*picSize[0],\n box.coords[1]*picSize[1],\n box.coords[2]*picSize[0],\n box.coords[3]*picSize[1]]\n d.rectangle(p_coords, outline='red')\n print('drawing box at ', end='')\n # print([x for x in box.coords])\n textpos = (p_coords[0] - txt_offset_x, p_coords[1] - txt_offset_y)\n d.text(textpos, 'Class %s at %s confidence' %\n (box.classification, box.confidence), font=fnt, fill='red')\n\n return mask\n try:\n camera_socket = socket.socket()\n camera_socket.connect(('dronepi.local', 8000))\n camera_connection = camera_socket.makefile('rwb')\n\n client_connection = inbound_socket.makefile('rwb')\n image_stream = io.BytesIO()\n char_len = struct.calcsize('<c')\n long_len = struct.calcsize('<L')\n while True:\n t = time.time()\n command = struct.unpack('<c', client_connection.read(char_len))[0]\n t = time_op(t, 'recv command')\n if command != b'':\n if command == b'p':\n last_pic.save(image_stream,\n format='jpeg',\n quality=85,\n thumbnail=None)\n t = time_op(t, 'save pic')\n header = struct.pack('<L', image_stream.tell())\n client_connection.write(header)\n t = time_op(t, 'send header')\n # Rewind the stream and send the image data over the wire\n image_stream.seek(0)\n client_connection.write(image_stream.read())\n client_connection.flush()\n t = time_op(t, 'send pic')\n # reset stream\n image_stream.seek(0)\n image_stream.truncate()\n\n elif command == b'c':\n camera_connection.write(b'p')\n camera_connection.flush()\n t = time_op(t, 'send cam request')\n image_len_raw = camera_connection.read(long_len)\n image_len = struct.unpack('<L', image_len_raw)[0]\n t = time_op(t, 'recv header')\n if not image_len:\n print('Received image length of 0, quitting!')\n break\n # Construct a stream to hold the image data and\n # read the image data from the connection\n image_stream.write(camera_connection.read(image_len))\n t = time_op(t, 'recv pic')\n # Rewind the stream, open it as an image with PIL and\n # do some processing on it\n image_stream.seek(0)\n image = Image.open(image_stream)\n\n t = time_op(t, 'open pic & process')\n job_queue.put(image)\n job_queue.join()\n t = time_op(t, 'NN')\n\n image_stream.seek(0)\n image_stream.truncate()\n\n bboxes = result_queue.get(False)\n box_pickle = pickle.dumps(bboxes, protocol=3)\n pickle_size = len(box_pickle)\n t = time_op(t, 'pickle')\n client_connection.write(struct.pack('<L', pickle_size))\n client_connection.write(box_pickle)\n client_connection.flush()\n t = time_op(t, 'send pickle')\n\n last_pic = image\n\n elif command == b'd':\n camera_connection.write(b'p')\n camera_connection.flush()\n t = time_op(t, 'send cam request')\n image_len_raw = camera_connection.read(long_len)\n image_len = struct.unpack('<L', image_len_raw)[0]\n t = time_op(t, 'recv header')\n if not image_len:\n print('Received image length of 0, quitting!')\n break\n # Construct a stream to hold the image data and read\n # the image data from the connection\n\n image_stream.write(camera_connection.read(image_len))\n t = time_op(t, 'recv pic')\n # Rewind the stream, open it as an image with PIL and\n # do some processing on it\n image_stream.seek(0)\n image = Image.open(image_stream)\n\n t = time_op(t, 'open pic & process')\n job_queue.put(image)\n job_queue.join()\n t = time_op(t, 'NN')\n\n image_stream.seek(0)\n image_stream.truncate()\n\n bboxes = result_queue.get(False)\n\n box_count = len(bboxes)\n client_connection.write(struct.pack('<L', box_count))\n for box in bboxes:\n data = [box.coords[0],\n box.coords[1],\n box.coords[2],\n box.coords[3],\n box.confidence,\n box.classification]\n #print(data)\n client_connection.write(struct.pack('<ffffff',\n data[0],\n data[1],\n data[2],\n data[3],\n data[4],\n data[5]))\n client_connection.flush()\n t = time_op(t, 'send tuples')\n\n last_pic = image\n except:\n print('Error: %s' % sys.exc_info()[0], flush=True)\n print('Error: %s' % sys.exc_info()[1], flush=True)\n print('Error: %s' % sys.exc_info()[2], flush=True)\n client_connection.close()\n camera_connection.close()\n inbound_socket.close()\n camera_socket.close()\n return 0", "def rescale_image(image, rescale_width):\r\n\r\n # image size\r\n image_height = pdb.gimp_image_height(image)\r\n image_width = pdb.gimp_image_width(image)\r\n\r\n # new image height\r\n rescale_height = round(image_height * (rescale_width * 1.0 / image_width))\r\n\r\n pdb.gimp_image_scale(image, rescale_width, rescale_height)\r\n gimp.message('Rescaled image')", "def resize_image():\r\n print(\"--- resizing the image ---\")\r\n if args.image:\r\n img = cv2.imread(args.image)\r\n img = cv2.resize(img, (32, 32))\r\n img = np.array(img, dtype=np.uint8)\r\n img = img.reshape(32, 32, 3) / 255\r\n img = np.expand_dims(img, axis=0)\r\n print(\"--- done resizing the image ---\")\r\n return img\r\n else:\r\n print(\"Please enter a path of an image to check for it's label\")", "def on_parent_resize(self, event):\n #self.resize()\n #self.resize_scaled(drag_rootx=self.resize_frame.winfo_rootx())\n self.resize_scaled(current=MathStat.lerp(0,\n self.prop_frame.winfo_width(), self.last_right_bias))", "async def resend_post(self):\n if not self.post_queue.empty():\n post = self.post_queue.get()\n image = None\n if post['photo']:\n image_str = str(post['photo']).encode('ascii')\n image_byte = base64.b64decode(image_str)\n with open('anuncio.png', 'wb+') as image:\n image.write(image_byte)\n\n for channel in self.aviso_channels:\n await channel.send(content=(post['caption'] if image else post['text']), file=(discord.File('anuncio.png') if image else None))", "def resize_tensor(tensor, new_shape):\n channels = tensor.shape[0]\n new_tensor = np.zeros(shape=(channels,) + new_shape)\n for i in range(0, channels):\n new_tensor[i] = cv2.resize(tensor[i], dsize=new_shape[::-1])\n\n return new_tensor", "def handleResize(self):\n pass", "def scale_image(image=np.zeros((100,100)), \n new_width=100,\n ascii_block_size=(2,3)):\n \n original_width, original_height = image.shape\n aspect_ratio = original_height / float(original_width)\n w,h = ascii_block_size\n new_height = int(h/w * aspect_ratio * new_width)\n\n return skimage.transform.resize(image, (new_width, new_height))", "def update_message(self):\n self.msg_image = self.font.render(self.message, True, self.text_color)", "def send_image(self, device_id, image):\n self.logger.debug(f\"{device_id}: sending processed image!\")\n base64_img = base64.b64encode(\n cv2.imencode('.jpg', image)[1].tostring())\n self.socketio.emit(\n \"image\", {\"message\": base64_img}, room=f\"device-{device_id}\")", "def prep_msg(self, msg):\r\n\t\tself.msg_image = self.font.render(msg, True, self.text_color,\r\n\t\t\tself.button_color)\r\n\t\tself.msg_image_rect = self.msg_image.get_rect()\r\n\t\tself.msg_image_rect.center = self.rect.center", "def prep_msg(self, msg):\r\n\t\tself.msg_image = self.font.render(msg, True, self.text_color,\r\n\t\t\tself.button_color)\r\n\t\tself.msg_image_rect = self.msg_image.get_rect()\r\n\t\tself.msg_image_rect.center = self.rect.center", "def test_resize(dummy_input):\n # Test the 2D image: H, W, C\n image, label = dummy_input(image_size=(512, 512, 3),\n label_size=(512, 512, 1))\n transform = Resize(size=(64, 64))\n _image, _label = transform(image, label, resize_orders=[3, 0])\n assert _image.shape == (64, 64, 3)\n assert _image.dtype == image.dtype\n assert _label.shape == (64, 64, 1)\n assert _label.dtype == label.dtype\n\n # Test the 3D image: H, W, D, C\n image, label = dummy_input(image_size=(512, 512, 20, 3),\n label_size=(512, 512, 20, 1))\n transform = Resize(size=(64, 64, 10))\n _image, _label = transform(image, label, resize_orders=[3, 0])\n assert _image.shape == (64, 64, 10, 3)\n assert _image.dtype == image.dtype\n assert _label.shape == (64, 64, 10, 1)\n assert _label.dtype == label.dtype", "def _resize_img(self, results):\n for key in results.get('img_fields', ['img']):\n if self.keep_ratio:\n img, scale_factor = general_ocr.imrescale(\n results[key],\n results['scale'],\n return_scale=True,\n backend=self.backend)\n # the w_scale and h_scale has minor difference\n # a real fix should be done in the general_ocr.imrescale in the future\n new_h, new_w = img.shape[:2]\n h, w = results[key].shape[:2]\n w_scale = new_w / w\n h_scale = new_h / h\n else:\n img, w_scale, h_scale = general_ocr.imresize(\n results[key],\n results['scale'],\n return_scale=True,\n backend=self.backend)\n results[key] = img\n\n scale_factor = np.array([w_scale, h_scale, w_scale, h_scale],\n dtype=np.float32)\n results['img_shape'] = img.shape\n # in case that there is no padding\n results['pad_shape'] = img.shape\n results['scale_factor'] = scale_factor\n results['keep_ratio'] = self.keep_ratio", "def image_cb(self, msg):\n rospy.logdebug(\"TLDetector.image_cb\")\n self.__has_image = True\n self.__camera_image = msg\n\n cv_image = self.__bridge.imgmsg_to_cv2(msg, \"bgr8\")\n light_wp, state = self.__process_traffic_lights()\n if self.__mode == LABEL_MODE and not self.__classification_done and state != 4:\n self.__classification_done = self.__light_classifier.save_image(\n cv_image, state\n )\n if self.__classification_done:\n rospy.loginfo(\"TLDetector.image_cb: Done generating labels.\")\n\n \"\"\"\n Publish upcoming red lights at camera frequency.\n Each predicted state has to occur `STATE_COUNT_THRESHOLD` number\n of times till we start using it. Otherwise the previous stable state is\n used.\n \"\"\"\n self.__publish_traffic_light_state(light_wp, state)", "def _resize_image(self, event):\n self.window_width = event.width\n self.window_height = event.height", "def resize_image(self, im, max_side_len=512):\n h, w, _ = im.shape\n\n resize_w = w\n resize_h = h\n\n # limit the max side\n if max(resize_h, resize_w) > max_side_len:\n ratio = float(max_side_len) / resize_h if resize_h > resize_w else float(max_side_len) / resize_w\n else:\n ratio = 1.\n resize_h = int(resize_h * ratio)\n resize_w = int(resize_w * ratio)\n\n resize_h = resize_h if resize_h % 32 == 0 else (resize_h // 32 - 1) * 32\n resize_w = resize_w if resize_w % 32 == 0 else (resize_w // 32 - 1) * 32\n im = cv2.resize(im, (int(resize_w), int(resize_h)))\n\n ratio_h = resize_h / float(h)\n ratio_w = resize_w / float(w)\n\n return im, (ratio_h, ratio_w)", "def read_image_and_resize(image_path: str,\n new_WH: Tuple[int, int]=(512, 512),\n save_dir: str=\"resize\") -> str:\n assert os.path.exists(save_dir) is True\n new_path = os.path.join(save_dir, os.path.basename(image_path))\n image = cv2.imread(image_path)\n image = cv2.resize(image, new_WH, interpolation=cv2.INTER_AREA)\n cv2.imwrite(new_path, image)\n\n return image_path", "def process_next_image(self):\n if self.queue:\n next_queue_item = self.queue.popleft()\n if type(next_queue_item) == str:\n if next_queue_item == 'clear':\n self.signal_status_message.emit('Clearing ROI data (from request in image queue)')\n self.clear()\n return\n [image,file_id,image_num] = next_queue_item\n # print('image_num',image_num)\n # print('next image',self.next_image)\n self.signal_status_message.emit('Started processing ID {} Im {}'.format(file_id,image_num))\n image = image - self.emccd_bias # don't edit in place because this seemed to cause an issue with images not showing in GUI. Maybe not thread safe?\n # print('image min',np.min(image))\n # print('image max',np.max(image))\n image_num_too_big = False\n for group in self.roi_groups:\n for roi in group.rois:\n try:\n roi.counts[image_num][file_id] = image[roi.x:roi.x+roi.w,roi.y:roi.y+roi.h].sum()\n except IndexError: # image_num was not valid for the number of images that MAIA is expecting\n image_num_too_big = True\n if image_num_too_big:\n self.signal_status_message.emit('Image number {} is greater than max expected images, so this image has been ignored (most likely cause is rearrangement toggle).')\n self.signal_status_message.emit('Finished processing ID {} Im {}'.format(file_id,image_num))\n self.calculate_thresholds()", "def image_cb(self, msg):\n self.has_image = True\n self.camera_image = msg\n light_wp, state = self.process_traffic_lights()\n\n '''\n Publish upcoming red lights at camera frequency.\n Each predicted state has to occur `STATE_COUNT_THRESHOLD` number\n of times till we start using it. Otherwise the previous stable state is\n used.\n '''\n if self.state != state:\n self.state_count = 0\n self.state = state\n elif self.state_count >= STATE_COUNT_THRESHOLD:\n self.last_state = self.state\n light_wp = light_wp if state == TrafficLight.RED else -1\n self.last_wp = light_wp\n self.upcoming_red_light_pub.publish(Int32(light_wp))\n else:\n self.upcoming_red_light_pub.publish(Int32(self.last_wp))\n self.state_count += 1", "def OnResizeEnd(self, event):\n self._resizing = False\n self.Refresh()", "def callback(self,data):\n self.cvtImage(data)\n\n \"\"\" Do some image processing; flip, resize, and etc\"\"\"\n self.imgProcessing()\n\n \"\"\" displaying an OpenCV image \"\"\"\n cv2.imshow(self.cv_window_name, self.cv_image)\n cv2.waitKey(1)\n# ------------------------------------------------------------------------------\n\n try:\n \"\"\" coverting the uint8 OpenCV image to ROS image data \"\"\"\n \"\"\" Publisher.publish() -- explicit way \"\"\"\n self.image_pub.publish(self.bridge.cv2_to_imgmsg(self.cv_image, \"bgr8\"))\n except CvBridgeError as e:\n print(e)", "def rescale(self, img):\n\n if self.scale != 1:\n return imutils.resize(img, width=int(img.shape[1] * self.scale))\n else:\n return img", "def send_image(path):\n img = cv2.imread(path)\n msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding=\"bgr8\")\n pub = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)\n pub.publish(msg)\n # Sleep to allow for image to be published.\n rospy.sleep(1)", "def send_image(path):\n img = cv2.imread(path)\n msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding=\"bgr8\")\n pub = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)\n pub.publish(msg)\n # Sleep to allow for image to be published.\n rospy.sleep(1)", "def _image_decode_resize_normalized(image_tensor,height, weight, is_normalized=True):\n image_tensor = tf.image.convert_image_dtype(image_tensor, dtype=tf.float32)\n image_tensor = tf.image.resize_images(image_tensor, [height, weight])\n if is_normalized:\n image_tensor /= 255\n return image_tensor", "def callback(self, data):\n\n # Convert sensor_msgs.msg.Image into OpenDR Image\n image = self.bridge.from_ros_image(data)\n rospy.loginfo(\"image info: {}\".format(image.numpy().shape))\n\n # Run pose estimation\n boxes = self.object_detector.infer(image, threshold=0.1, keep_size=False)\n\n # Get an OpenCV image back\n image = np.float32(image.numpy())\n\n # Convert detected boxes to ROS type and publish\n ros_boxes = self.bridge.to_ros_boxes(boxes)\n if self.bbox_publisher is not None:\n self.bbox_publisher.publish(ros_boxes)\n rospy.loginfo(\"Published face boxes\")\n\n # Annotate image and publish result\n # NOTE: converting back to OpenDR BoundingBoxList is unnecessary here,\n # only used to test the corresponding bridge methods\n odr_boxes = self.bridge.from_ros_boxes(ros_boxes)\n image = draw_bounding_boxes(image, odr_boxes, class_names=self.class_names)\n if self.image_publisher is not None:\n message = self.bridge.to_ros_image(np.uint8(image))\n self.image_publisher.publish(message)\n rospy.loginfo(\"Published annotated image\")", "def body_resize(self):", "def resize_image(image, newN, newM):\n resize = np.zeros((newM, newN))\n row, col = resize.shape\n try:\n for j in range(col):\n for i in range(row):\n resize[i, j] = image[i*2, j*2]\n except:\n pass\n\n return resize", "def initDevMsgImage(self):\n return" ]
[ "0.64465207", "0.6418566", "0.6362998", "0.62206006", "0.59658504", "0.59462744", "0.58035374", "0.5765646", "0.5717802", "0.56819785", "0.5623336", "0.55827916", "0.5574665", "0.5500553", "0.54983515", "0.54708004", "0.54649293", "0.53893703", "0.53882927", "0.538468", "0.5383726", "0.53825295", "0.5361664", "0.5300716", "0.5273187", "0.52588874", "0.52583784", "0.5238543", "0.5232245", "0.5225853", "0.52188545", "0.51918197", "0.5191751", "0.51914316", "0.5183047", "0.51724756", "0.51507497", "0.5150172", "0.5149598", "0.5148789", "0.5141985", "0.5137608", "0.51351714", "0.51106536", "0.51072633", "0.51048875", "0.5098135", "0.5096546", "0.50925225", "0.5087318", "0.50479794", "0.5041129", "0.5038636", "0.50358343", "0.5035505", "0.502429", "0.5017244", "0.5000672", "0.50005907", "0.49998713", "0.4993399", "0.49855664", "0.49780127", "0.49747008", "0.49655035", "0.4961538", "0.4958402", "0.4958133", "0.49496442", "0.49482518", "0.49441236", "0.4941714", "0.49371776", "0.49335185", "0.49299636", "0.49263027", "0.4923049", "0.4918328", "0.49179226", "0.49166998", "0.49160945", "0.49160945", "0.49039942", "0.48960453", "0.48935783", "0.4891641", "0.48777273", "0.4874425", "0.4868815", "0.48655248", "0.48636875", "0.48502353", "0.48472425", "0.48442507", "0.48442507", "0.48412988", "0.4839468", "0.483896", "0.4834165", "0.4833304" ]
0.51675516
36
This function subscribes to the primaryCam topic and updates its state in the global scope.
def primaryCamCallback(msg): global primaryCamString primaryCamString = msg.data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _on_connect(self, client, userdata, flags, rc):\n self.subscribe(self.topic)", "def on_connect():\n # There is now a connection\n subscribe_to_topic(\"pir\",\"Trigger\")", "def on_connect(client, userdata, flags, rc):\n client.subscribe(mqtt_Light_topic)", "def subscribe(self, topic):\n self.topic = topic\n self.client.on_connect = self.on_connect\n self.client.on_message = self.on_message\n\n self.client.loop_start()", "def on_subscribe(self, client, userdata, mid, granted_qos):\n\t\tprint (\"[{}] Client subscribed to {}\".format(\n\t\t\tint(time.time()),\n\t\t\tself.topic\n\t\t))\n\t\t#the following lines are here and not in on_connect() only for printing purpose\n\t\tif not self.printed_sub:\n\t\t\tself.printed_sub = True\n\t\t\tself.subscribe(\"measure/people\")", "def cbMqtt_on_subscribe(client, userdata, mid, granted_qos):\n # logger.debug('Subscribed to MQTT topic with message id %d', mid)\n pass", "def on_subscribe( client, userdata, mid, granted_qos ):\n logging.info( \"Topic successfully subcribed with QoS: %s\" %granted_qos )", "def on_connect(self, client, userdata, flags, rc):\n\n logger.info(f'Connected to {self.topic} with result code {rc}')\n # self.client.publish('Augmented/A.V.A.', str(rc)) # For return the connection situation to the subscriber device.\n if rc == 0:\n self.is_connected = True\n self.client.subscribe(self.topic)", "def subscribe(self, topic):\n\t\tself.topic=topic\n\t\tself.client.subscribe(self.topic)", "def on_connect(client, userdata, flags, rc):\n print(f\"Re/Suscribing to TOPIC: {TOPIC}\")\n client.subscribe(TOPIC)\n if rc == 0:\n print(f'Connected OK Returned code={rc}')\n else:\n print('Bad connection Returned code={rc}')", "def _subscribe_update_callback(self, client, userdata, message):\n logger.info('Message recieved from {} topic'.format(message.topic))\n payload = message.payload\n try:\n payload_dict = json.loads(payload)\n light_data = payload_dict['current']['state']['desired']\n if self.light.needs_updating(light_data):\n self.light.update_lights(light_data)\n reported_payload = {\n 'state': {\n 'reported': self.light.current_settings()\n }\n }\n JSON_payload = json.dumps(reported_payload)\n self.shadowClient.publish(update_topic, JSON_payload, 0)\n except ValueError:\n logger.error('Value error')\n logger.info(payload)\n except Exception as e:\n logger.error(e.message)", "def on_subscribe(self, mqtt_client, userdata, mid, granted_qos):\n logging.debug(\"DEBUG - subscribe ack received\")", "def mqtt_on_connect(client, userdata, flags, rc):\n logging.debug('successfully connected to mqtt broker')\n client.subscribe(config['mqtt']['subscribe_topic'])", "def starup(self, sender, **kwargs):\n self._initialize_devices()\n for device_topic in device_topic_dict:\n _log.debug('Subscribing to ' + device_topic)\n self.vip.pubsub.subscribe(peer='pubsub',\n prefix=device_topic,\n callback=self.on_analysis_message)", "def on_connect(self, client, userdata, flags, rc):\n# client.subscribe(\"power_meter/status/#\")\n client.subscribe(self.mqtt_topic_status)\n client.subscribe(self.mqtt_topic_electricity + '/#')\n client.subscribe(self.mqtt_topic_gas + '/#')\n client.subscribe(self.mqtt_topic_water + '/#')\n self.mqtt_client.publish(self.mqtt_topic_last_will, \"online, \" + str(self.dconn), qos=0, retain=True)\n self.connected = True\n self.log.warning(\"Connected with result code: \" + str(rc))\n self.log.info(\"Connected to: \" + MQTT_SERVER)", "def on_connect(client, userdata, flags, rc):\n if rc == 0:\n client.subscribe(topic_subscribe)\n print(\"connected OK with returned code=\", rc)\n else:\n print(\"Bad connection with returned code=\", rc)", "def on_connect(client, userdata, flags, rc):\n print(\"Connected with with mqtt server: \" + str(rc))\n client.subscribe(\"clients/#\")", "def on_connect(client, userdata, flags, rc):\n print('Connected with result code ' + str(rc))\n client.subscribe(MQTT_TOPIC)", "def on_connect(client, userdata, flags, rc):\n print('Connected with result code ' + str(rc))\n client.subscribe(MQTT_TOPIC)", "def on_connect(self, client, userdata, flags, rc):\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n sleep(5) # quick delay\n self.client.subscribe(self.subControls)\n self.client.subscribe(self.subSettings)", "def camera_listener(self):\n camera_sub_cb_grp = ReentrantCallbackGroup()\n self.create_subscription(CameraMsg,\n constants.CAMERA_MSG_TOPIC,\n self.camera_callback,\n 10,\n callback_group=camera_sub_cb_grp)\n display_img_sub_cb_grp = ReentrantCallbackGroup()\n self.create_subscription(Image,\n constants.DISPLAY_MSG_TOPIC,\n self.display_callback,\n 10,\n callback_group=display_img_sub_cb_grp)", "def on_connect(client, interface, flags, rc):\n logger.info(\"Connected with result code \" + str(rc))\n for i in Const.sub_topics:\n client.subscribe(i)\n logger.info(\"Successfully subscribed to \" + i)", "def on_connect(client, userdata, flags, rc):\n print('Connected with result code ' + str(rc))\n client.subscribe(MQTT_TOPIC, qos=1)", "def subscribe(self):\n if not self._subscribed and self._connected:\n if ATTR_STREAM_ID not in self.data:\n msg = self._create_message(strings.SUB_MSG)\n self.write(msg)\n else:\n msg = self._create_message(strings.RESUB_MSG)\n self.write(msg)\n self._subscribed = True", "def subscribe( self, topic ):\n logging.info( \"Subscribing to topic %s\" %topic )\n try:\n self.client.subscribe( topic )\n except Exception as error:\n print( error )", "def on_subscribe(\n client: mqtt.Client,\n userdata: Any,\n mid: int,\n granted_qos: int,\n properties: Properties = None,\n ) -> None:\n logging.info(\n f\"Successfully subscribed to topic: mid={mid}, granted qos={granted_qos}, properties={properties}\"\n )", "def cameraCallback(self, data):\n if not self.isReady:\n cvImage, self.imageInfo['shape'] = u.getCVImage(data)\n if self.measuring is not None:\n self.list, cvImage, self.isReady = self.measuring.getListObjects(cvImage)\n # preview topic /see_main\n msg_image = u.getMsgImage(cvImage)\n self.pub_view_main.publish(msg_image)\n else:\n if self.imageInfo['shape'] is not None:\n self.init()\n else:\n rospy.logerr(\"no video stream. check camera's topic!\")", "def on_connect(client, userdata, flags, rc):\n if rc == 0:\n logging.info(\"Connected\")\n else:\n logging.warning(\"Connection issue - result code \"+str(rc))\n\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n client.subscribe(config.topic_set)\n publish_status(client) # update the status on connect", "def subscribe(receiver):", "def subscribe(receiver):", "def subscribe(receiver):", "def subscribe(client, mqtt_topic):\n\n print()\n print(\"Subscribe\")\n print(\"================================================\")\n print()\n\n # Subscribe to the config topic.\n print(\"Subscribing\")\n print(mqtt_topic)\n print()\n client.subscribe(mqtt_topic, qos=1)", "def on_connect(self):\n log.info(\"Stream connected\")", "def subscribe(self, broker):\n if self.subscribed == False:\n for attr in self.parm_list:\n if attr.direction == attr.SUB:\n self.logging.debug(\"Subscribing: \"+attr.label)\n self.mqtt_client.subscribe(attr.topic)\n self.subscribed = True\n else:\n self.logging.debug(\"Already subscribed ... ignoring\")", "def subscribe_callback(self, scan: LaserScan): \n \n pub_msg = Range()\n pub_msg.range = self.calc_distance_to_wall(scan)\n print(\"min_distance front: \", pub_msg.range)\n\n # Here I am publishing min_distance_front\n self.control_publisher.publish(pub_msg)", "def on_connect(client, userdata, flags, rc):\n print(\"MQTT Connected with result code \" + str(rc))\n if rc == 0:\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n topic = \"building/dgm/command\"\n client.subscribe(topic, qos=2)\n print(\"MQTT Subscribed to \" + topic)", "def StartSubscriptions(self):\n rospy.Subscriber('/drivers/dvl', Dvl, self.dvl_callback)\n rospy.Subscriber('/drivers/imu', Imu, self.imu_callback)\n rospy.Subscriber('/reference/depth', Position, self.refDepth_callback)\n rospy.Subscriber('/reference/speed', Speed, self.refSpeed_callback)\n rospy.Subscriber('/reference/rpy', Euler, self.refRpy_callback)\n rospy.Subscriber('/reference/ll', Position, self.refLL_callback)\n rospy.Subscriber('/control/trackers_enabled', Trackers, self.trackersControl_callback)", "def mqtt_on_message(client, userdata, msg):\n logging.debug('mqtt message received for topic %s', msg.topic)\n image_queue.put(msg.payload)", "def on_connect(client, userdata, flags, rc):\n\t# subscribe to the LEDs topic when connected\n\tclient.subscribe(\"SNHU/IT697/leds\")", "def subscribeConsumer(consumer):", "def __initSubscribers(self):\n\t\t\n\t\t# Drone estimated pose (from FCU)\n\t\tself.__subs['pose'] = MySubscriber('mavros/local_position/pose',\n\t\t\t\t\t\t\t\t\t\t\tPoseStamped)\n\t\t\n\t\t# Drone state (connected, armed, mode)\n\t\tself.__subs['state'] = MySubscriber('mavros/state',\n\t\t\t\t\t\t\t\t\t\t\tState)", "def on_connect(client, userdata, flags, rc):\n print(\"Connected with result code \"+str(rc))\n\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n client.subscribe(MQTT_PATH)", "def on_publish(unused_client, unused_userdata, unused_mid):\n print('on_publish')\n status_light.on()", "def _on_received(self):\n self._call_subscribers(on_level=0xFF)\n publish_topic(self._on_subscriber_topic, on_level=0xFF)", "def _on_received(self):\n self._call_subscribers(on_level=0xFF)\n publish_topic(self._on_subscriber_topic, on_level=0xFF)", "def secondayCamCallback(msg):\n\n global secondaryCamString\n secondaryCamString = msg.data", "def on_publish(client, userdata, mid):\n print(\"Message Published.\")", "def callback(self, msg):\n if not self.has_cloud:\n self.active_cloud_msg = msg\n self.has_cloud = True", "def subscribe(self, subject):\n pass", "def subscribe(self):\n fd = libplasma.subscribe(self.conn)\n self.notification_sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)\n # Make the socket non-blocking.\n self.notification_sock.setblocking(0)", "def pub_callback(self, pub):\n self.publish_callback_value = pub", "def subscribe_topic(self):\n req = {\n \"op\": \"subscribe\",\n \"args\": [\n \"instrument\",\n \"trade\",\n \"orderBook10\",\n \"execution\",\n \"order\",\n \"position\",\n \"margin\",\n ],\n }\n self.send_packet(req)", "def subscribe_on(self, callback: callable):\n subscribe_topic(callback, self._on_subscriber_topic)", "def mqtt_sub_callback(self, client, userdata, message):\n\t#def mqtt_sub_callback(self, message):\n\n\t\t# Decode the message using UTF-8 and convert it\n\t\t# to 'string' datatype\n\t\tpayload = str(message.payload.decode(\"utf-8\"))\n\n\t\trospy.loginfo(\"[BRIDGE] Message Received from MQTT\")\n\n\t\t# Give the appropiate values to the contents of the message\n\t\t# that will be published to '/ros_iot_bridge/mqtt/sub'\n\t\tmsg_mqtt_sub = msgMqttSub()\n\t\tmsg_mqtt_sub.timestamp = rospy.Time.now()\n\t\tmsg_mqtt_sub.topic = message.topic\n\t\tmsg_mqtt_sub.message = payload\n\n\t\t# Publish the message\n\t\tself._handle_ros_pub.publish(msg_mqtt_sub)\n\n\t\t# Upload to Google Sheet\n\t\tret = self.update_gsheet(\"None\", True, payload)", "def subscribe_on(self, callback: callable):\n topic = f\"{self._subscriber_topic}_on\"\n subscribe_topic(callback, topic)", "def on_message(self, client, userdata, msg):\n\n data = json.loads(msg.payload.decode(\"utf-8\"))\n if debug: print(\"topic: \", msg.topic, \" payload:\", data)\n #print \"Received: \", data\n if msg.topic == self.subControls:\n self.controller.setpoint = int(data['temp'])\n status_old = self.controller.status\n if data['mode'] == \"auto\" or data['mode'] == \"cool1\" or data['mode'] == \"cool2\" or data['mode'] == \"cool3\":\n self.controller.status = 1\n elif data['mode'] == \"off\":\n self.controller.status = 0\n if status_old and self.controller.status: onoff = False\n elif status_old and not self.controller.status: onoff = True\n elif not status_old and self.controller.status: onoff = True\n else: onoff = False\n self.controller.updateControls(onoff = onoff, radio=False)\n\n elif msg.topic == self.subSettings :\n self.controller.temp_interval = int(data['temp-res'])\n self.controller.energy_interval = int(data['energy-res'])\n self.controller.updateIntervals()\n else:\n pass", "def on_connect(client, userdata, flags, return_code):\n\n if return_code != 0:\n print(\"Connected with result code: \", str(return_code))\n else:\n client.connected_flag=True\n client.publish(status_topic, \"Online\", retain=True)", "def on_connect(self, mqtt_client, userdata, flags, rc):\n global connack\n logging.debug(\"DEBUG - Connected to broker\")\n connack = True", "def subscribe(receiver, updateInterval=10):", "def cbMqtt_on_connect(client, userdata, flags, rc):\n if rc == 0:\n logger.debug('Connected to %s: %s', str(mqtt), userdata)\n setup_mqtt_filters()\n mqtt_publish_fan_state()\n else:\n logger.error('Connection to MQTT broker failed: %s (rc = %d)',\n userdata, rc)", "def on_message(self, client, userdata, msg):\n\n # that was the big fault. calling client.publish under the on_message() create a conflict!!\n # I found the solution via using different ports. And so I don,'t need use this following line here.\n # self.publish(self.topic, msg.payload) # for directly return the incoming message\n msg.payload = msg.payload.decode(\"utf-8\")\n\n self.incoming_message['message'] = json.loads(msg.payload) # json converting cause of mqtt's data transfer limit.\n self.incoming_message['is_used'] = False", "def _global_pose_cb(self, msg):\n self.global_pose = msg", "def subscribe_mqtt(self, topic):\n if topic not in self.subscriptions:\n self.subscriptions.append(topic)\n self.mqtt.subscribe(topic)", "def enable_subscription():\n client = KConsumer(config=subscriber_config)\n counter = 0\n while 1:\n data = client.consume()\n if data:\n print(\"Received Data\", counter)\n class_label = inference_on_data(data.value)\n publish_response(class_label)", "def on_connect(self, client, userdata, flags, rc):\n logging.info('Connection to MQTT Broker established with status {}'.format(rc))", "def output_topic_callback(self, msg):\n with self.callback_lock:\n if self._time_received_input != 0:\n # Get actual time from ROS\n time_now = self.node.get_clock().now().nanoseconds\n\n # Compute the amount of time elapsed from receiving the last\n # message in the input topic\n measure = time_now - self._time_received_input\n\n # Transform from nanoseconds to milliseconds\n measure = measure / (1000 * 1000)\n\n publish_msg = Int64()\n publish_msg.data = int(measure)\n\n # Publish the measurement\n self._publisher.publish(publish_msg)\n\n self._time_received_input = 0", "def on_publish( client, userdata, mid ):\n logging.info( \"Data published successfully.\" )", "def on_connect_local(local_client, userdata, flags, rc):\n if rc == 0:\n local_client.connected_flag = True\n print(\"Connected local OK returned code = \", rc)\n local_client.subscribe(mqtt_topic)\n else:\n print(\"Bad connection local Returned code = \", rc)", "def on_publish(client: mqtt.Client, userdata: Any, mid: int) -> None:\n logging.info(f\"Successfully published a message: mid={mid}\")", "def on_connect(unused_client, unused_userdata, unused_flags, rc):\n print('on_connect', mqtt.connack_string(rc))\n\n # This is the topic that the device will receive configuration updates on.\n mqtt_config_topic = '/devices/{}/config'.format(device_id)\n # Subscribe to the config topic.\n client.subscribe(mqtt_config_topic, qos=1)\n\n status_light.on()", "def on_message(client, userdata, message):\n #global dc_flag\n\n try:\n print(\"Receiving messages\")\n msg = message.payload\n\n #if dc_flag:\n #remote_client.connect(remote_mqtt_host, mqtt_port, 60)\n remote_client.publish(mqtt_topic, payload=msg, qos=0, retain=False)\n except:\n print(\"Error in receiving messages\")", "def __run_client(self):\n\n self._client = CoapClient(server_hostname=self._hostname, server_port=self._port, src_port=self._src_port)\n self._client_running = True\n\n if self.use_polling:\n super(CoapSensor, self).on_start()\n else:\n self.observe_topic()", "def laser_cb(self, msg):\n #rospy.loginfo(\"Received new scan\")\n self.laser = msg", "def on_next(self, msg):\n # publish the message to the topics\n retain = msg.retain if hasattr(msg, 'retain') else False\n for (topic, qos) in self.topics:\n self.client.publish(topic, msg, qos, retain)", "def _subscribe(self):\n self.subscribed = True\n self.subscribe_date = now()\n self.unsubscribed = False", "def subscribe(self, topic: str, call_back):\n self.callback_dict[topic] = call_back\n self.client.subscribe(topic)", "def on_connect(self, client, userdata, flags, rc):\n\n\t\tself.subscribe(\"system\")\n\t\tprint (\"[{}] Client connected\".format(\n\t\t\tint(time.time())\n\t\t))", "def subscribe(receiver, updateInterval=None):", "def on_publish(self, mqtt_client, userdata, mid):\n logging.debug(\"DEBUG - publish ack received\")", "def on_connected(self):\n logger.info('connection to redis resumed')\n for chid in self.clients.iterkeys():\n self.subscriber.psubscribe(chid)", "def _subscribe(topic:str) -> None:\n\t\t\tif topic in self.subscribedTopics:\n\t\t\t\tself.messageHandler and self.messageHandler.logging(self.mqttClient, logging.WARNING, f'MQTT: topic already subscribed: {topic}')\n\t\t\t\treturn\n\t\t\tif (r := self.mqttClient.subscribe(topic))[0] == 0:\n\t\t\t\tt = MQTTTopic(topic = topic, mid=r[1], callback=callback, callbackArgs=kwargs)\n\t\t\t\tself.subscribedTopics[topic] = t\n\t\t\telse:\n\t\t\t\tself.messageHandler and self.messageHandler.logging(self.mqttClient, logging.ERROR, f'MQTT: cannot subscribe: {r[0]}')", "def onSubscribed(self, connection:MQTTConnection, topic:str) -> bool:\n\t\tconnection.subscribedCount += 1\n\t\treturn True", "def _on_mqtt_message(\n self, client: mqtt.Client, userdata: str, message: mqtt.MQTTMessage\n ) -> None:\n self.log.debug(f\"Received message on topic: {message.topic}\")\n self.inbound_message_listener(Message(message.topic, message.payload))", "def poseCallback(self, msg):\n\n state = self.rcs.getState()\n state[0] = msg.pose.position.x\n state[1] = msg.pose.position.y\n quat = np.array((msg.pose.orientation.x,\n msg.pose.orientation.y,\n msg.pose.orientation.z,\n msg.pose.orientation.w))\n # yaw\n (_,_,yaw) = euler_from_quaternion(quat)\n state[2] = yaw\n\n self.rcs.setState(state)\n self.rcs.runScan()", "def on_connect(mqttc, mosq, obj, rc):\n print(\"Connected with result code:\"+str(rc))\n # subscribe for all devices of user\n mqttc.subscribe('+/devices/+/up')\n mqttc.subscribe('+/devices/+/events/down/sent')\n if rc != 0:\n sys.exit('Could not connect to server. \\n Result code: ' + str(rc))", "def on_connect(client, userdata, flags, return_code):\n # pylint: disable=unused-argument\n if return_code == 0:\n _LOGGER.debug(\"Connected with result code: %s\", return_code)\n client.subscribe(userdata.status_topic)\n\n userdata.connection_callback(True)\n else:\n _LOGGER.error(\"Connection error: %s\",\n MQTT_RETURN_CODES[return_code])\n userdata.connection_callback(False)", "def cam_callback(msg):\n #cam_window_name = \"Baxter Video Feed\"\n bridge = CvBridge() #instantiate CvBridge\n img_bgr = bridge.imgmsg_to_cv2(msg, \"bgr8\") #ROS Image msg to OpenCV2\n self.img = img_bgr", "async def subscribe(self, topic: str, callback: aiowamp.SubscriptionHandler, *,\n match_policy: aiowamp.MatchPolicy = None,\n node_key: str = None,\n options: aiowamp.WAMPDict = None) -> int:\n ...", "def set_pub_sub(self):\n\n # Set trap check service client\n self.trap_cheq_srv = rospy.ServiceProxy(\"check_for_trap\", TrapCheck)\n\n # Set mix initiave controller output\n self.mix_cmd_pub = rospy.Publisher(\"mix_cmd\", Bool, queue_size=50)\n\n # Set agent TS state subscriber\n rospy.Subscriber(\"ts_state\", TransitionSystemStateStamped, self.ts_state_callback, queue_size=50)\n\n # Set human input planner\n rospy.Subscriber(\"key_cmd\", Bool, self.teleop_cmd_callback, queue_size=50)\n\n # Set planner input subscriber\n rospy.Subscriber(\"planner_cmd\", Bool, self.planner_cmd_callback, queue_size=50)", "def on_open(self) -> None:\n\n channel = [{\"name\": \"level2\", \"product_ids\": list(self.products.keys())}]\n msg_subscribe = {\"type\": \"subscribe\", \"channels\": channel}\n\n subscribe_payload = json.dumps(msg_subscribe)\n self.ws.send(subscribe_payload)", "def on_connect(self, client, userdata, flags, rc):\n if rc == 0:\n self.log.info(u\"==> Connection to MQTT broquer successful, (Result code 0)\")\n try: \n if self.mqttsubtopics:\n self.log.debug(u\"==> Subscribe to Topic list: %s\" % format(self.mqttsubtopics))\n self.MQTTClient.subscribe(self.mqttsubtopics) # Subscribing in on_connect() means that if we lose the connection and econnect then subscriptions will be renewed.\n else:\n self.log.info(u\"==> No MQTT sensor topic to subscribe\")\n except ValueError: # Raises a ValueError if qos is not 0, 1 or 2, or if topic is None or has zero string length, or if topic is not a string, tuple or list.\n raise MQTTException(u\"### Subscribing, invalid Qos or Topic in list: %s\" % format(self.mqttsubtopics))\n else:\n raise MQTTException(u\"### Connection to MQTT broquer failed, (Result code \" + str(rc) + \": \" + CONNECT_RESULT_CODES[rc] + \")\")", "def on_topic_change(bot, trigger):\n\tif len(trigger.args) == 1:\n\t\treturn # Empty TOPIC gets the current topic.\n\tchannel = trigger.args[0]\n\tlog(bot, channel, '*** {} changes topic to \"{}\"', trigger.nick, trigger.args[1]);", "def subscribe(self, callback):\n self.channel.basic_consume(callback, queue=self.queue_name)\n self.channel.start_consuming()", "def on_connect(mqtt_client, obj, flags, rc):\n print(\"Connected\")", "def on_message(self, client, userdata, msg):\n st = datetime.datetime.fromtimestamp(msg.timestamp).strftime('%Y-%m-%d %H:%M:%S.%f')\n# print st[:-3], \":\", msg.topic, \":\", msg.payload\n\n # Note: Update_display from this function does not work\n if msg.topic == self.mqtt_topic_electricity:\n self.my_gui.update_electricity(float(msg.payload)) # kWh\n\n elif self.mqtt_topic_electricity in msg.topic: # covers /1 /2 ... etc.\n index = int(msg.topic.split('/')[-1])\n self.my_gui.update_electricity_hour(index, float(msg.payload))\n\n # -----------------------------------------------------------------\n elif msg.topic == self.mqtt_topic_water:\n self.my_gui.update_water(int(msg.payload)) # Litter\n\n elif self.mqtt_topic_water in msg.topic: \n index = int(msg.topic.split('/')[-1])\n self.my_gui.update_water_hour(index, int(msg.payload))\n\n # -----------------------------------------------------------------\n elif msg.topic == self.mqtt_topic_gas:\n self.my_gui.update_gas(float(msg.payload)) # m3, 10 Litters/msg\n\n elif self.mqtt_topic_gas in msg.topic:\n index = int(msg.topic.split('/')[-1])\n self.my_gui.update_gas_hour(index, float(msg.payload))\n\n# elif self.mqtt_topic_status == msg.topic:\n# # TODO\n# if \"online\" in msg.payload:\n# print \"A is online\"\n# elif \"offline\" in msg.payload:\n# print \"A is offline\"\n# print st[:-3], \":\", msg.topic, \":\", msg.payload\n\n self.my_gui.update_eur_total()", "def topic(self, msg):\n self.make_topic(msg, new_topic=msg.args[0])\n self.bot.log.info(\"Topic changed by \" + msg.user)", "def deviceClientOn():\n\n # Make the client\n client = makeClient()\n camera = PiCamera()\n camera.start_preview()\n sleep(2)\n\n # Continous looping for user images. If there is a face\n # The loop will pause and the user key will be evaluated.\n while True:\n\n # Take a piture and save it to a specific path\n takePicture(camera, IMGPATH)\n\n # If face detected it will send to AWS IoT Core\n if faceDetection(IMGPATH):\n\n print('face detected... sending to cloud')\n\n # Open the newly aquired face\n with open(IMGPATH, 'rb') as file:\n img = file.read()\n\n # open users for admin messages.\n # Ideally this would be in an MySQL server, but \n # that is a paid service in AWS and time consuming\n with open(PATH_TO_USERS) as users:\n readData = json.load(users)\n \n # We need to encode the image to be in a byte format for MQTT\n data = base64.b64encode(img)\n\n # Create our dictionary to send the data in\n message = {\n \"image\" : data.decode('utf-8'),\n \"faceCollection\" : FACE_COLLECTION_ID,\n \"admin_list\" : readData['admin_list'],\n \"device_id\" : DEVICENAME\n }\n\n # Send to cloud\n client.publish(topic, json.dumps(message), 1)\n\n # Buffer for processing time\n # Sleep for 30 seconds as camera cooldown / as to not spam\n # the AWS service ($$$)\n print('Waiting...')\n sleep(30)\n else: \n # No face was detected, we sleep only for 10 seconds\n print(\"No face found... Looking again in 10 seconds\")\n sleep(10)", "def on_message(client, userdata, message): \n print(\"Topic: \" + message.topic + \" Message: \" + message.payload.decode('utf-8'))", "def pose_cb(self, msg):\n self.current_pose = msg.pose", "def on_message(self, client, userdata, message):\n\t\tself.message = message\n\t\tmqtt_msg = mqttJsonLoad(self.message.payload)\n\t\t\n\t\tprint (\"[{}] Message arrived:\\n\\t\\tTopic: {}\\n\\t\\tMessage: {}\".format(\n\t\t\tint(time.time()), \n\t\t\tmessage.topic, \n\t\t\tmessage.payload\n\t\t))\n\t\t\n\t\tif self.message.topic == \"measure/people\":\n\t\t\trpi.updatePendingJson(\"people_inside\", mqtt_msg, \"data\")\n\t\telif self.message.topic == \"system\":\n\t\t\tstart()" ]
[ "0.69074756", "0.6543661", "0.64770025", "0.6473724", "0.64547884", "0.6375277", "0.63147134", "0.6292447", "0.6290028", "0.61770564", "0.6158316", "0.61119586", "0.60491896", "0.60434437", "0.60176116", "0.60137093", "0.59834874", "0.5942834", "0.5942834", "0.59380347", "0.593585", "0.59330964", "0.59272164", "0.59143656", "0.59026986", "0.58767396", "0.5862388", "0.5853787", "0.5848923", "0.5848923", "0.5848923", "0.58204335", "0.5816063", "0.5810985", "0.5803085", "0.57841986", "0.5763605", "0.5736896", "0.56761825", "0.5674986", "0.5667357", "0.5658864", "0.5642329", "0.56377906", "0.56377906", "0.5633568", "0.56288576", "0.56110424", "0.5610265", "0.56014764", "0.55942893", "0.55942404", "0.55853677", "0.5577577", "0.55710465", "0.55459565", "0.5524189", "0.55203944", "0.55197525", "0.5489137", "0.5487585", "0.5477764", "0.54769325", "0.54688984", "0.54659534", "0.5461834", "0.5442961", "0.5438181", "0.5419392", "0.5416838", "0.54154146", "0.54107976", "0.5410697", "0.5409108", "0.5397538", "0.5392014", "0.5381859", "0.5378307", "0.5376391", "0.53685975", "0.53581434", "0.5357315", "0.53514814", "0.5343496", "0.5340968", "0.533956", "0.53209525", "0.5319543", "0.53132254", "0.52993876", "0.5286288", "0.5286138", "0.5279573", "0.52787197", "0.5270378", "0.5267984", "0.5261278", "0.52605194", "0.5252274", "0.52449447" ]
0.64189833
5
This function subscribes to the secondaryCam topic and updates its state in the global scope.
def secondayCamCallback(msg): global secondaryCamString secondaryCamString = msg.data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _on_connect(self, client, userdata, flags, rc):\n self.subscribe(self.topic)", "def _subscribe_update_callback(self, client, userdata, message):\n logger.info('Message recieved from {} topic'.format(message.topic))\n payload = message.payload\n try:\n payload_dict = json.loads(payload)\n light_data = payload_dict['current']['state']['desired']\n if self.light.needs_updating(light_data):\n self.light.update_lights(light_data)\n reported_payload = {\n 'state': {\n 'reported': self.light.current_settings()\n }\n }\n JSON_payload = json.dumps(reported_payload)\n self.shadowClient.publish(update_topic, JSON_payload, 0)\n except ValueError:\n logger.error('Value error')\n logger.info(payload)\n except Exception as e:\n logger.error(e.message)", "def on_connect(client, userdata, flags, rc):\n client.subscribe(mqtt_Light_topic)", "def subscribeConsumer(consumer):", "def on_connect():\n # There is now a connection\n subscribe_to_topic(\"pir\",\"Trigger\")", "def subscribe(receiver):", "def subscribe(receiver):", "def subscribe(receiver):", "def on_subscribe(self, client, userdata, mid, granted_qos):\n\t\tprint (\"[{}] Client subscribed to {}\".format(\n\t\t\tint(time.time()),\n\t\t\tself.topic\n\t\t))\n\t\t#the following lines are here and not in on_connect() only for printing purpose\n\t\tif not self.printed_sub:\n\t\t\tself.printed_sub = True\n\t\t\tself.subscribe(\"measure/people\")", "def cbMqtt_on_subscribe(client, userdata, mid, granted_qos):\n # logger.debug('Subscribed to MQTT topic with message id %d', mid)\n pass", "def on_subscribe( client, userdata, mid, granted_qos ):\n logging.info( \"Topic successfully subcribed with QoS: %s\" %granted_qos )", "def subscribe(receiver, updateInterval=10):", "def subscribe(self, topic):\n self.topic = topic\n self.client.on_connect = self.on_connect\n self.client.on_message = self.on_message\n\n self.client.loop_start()", "def subscribe(receiver, updateInterval=None):", "def subscribe(self, topic):\n\t\tself.topic=topic\n\t\tself.client.subscribe(self.topic)", "def on_connect(self, client, userdata, flags, rc):\n\n logger.info(f'Connected to {self.topic} with result code {rc}')\n # self.client.publish('Augmented/A.V.A.', str(rc)) # For return the connection situation to the subscriber device.\n if rc == 0:\n self.is_connected = True\n self.client.subscribe(self.topic)", "def subscribe(self):\n if not self._subscribed and self._connected:\n if ATTR_STREAM_ID not in self.data:\n msg = self._create_message(strings.SUB_MSG)\n self.write(msg)\n else:\n msg = self._create_message(strings.RESUB_MSG)\n self.write(msg)\n self._subscribed = True", "def on_connect(client, userdata, flags, rc):\n print(f\"Re/Suscribing to TOPIC: {TOPIC}\")\n client.subscribe(TOPIC)\n if rc == 0:\n print(f'Connected OK Returned code={rc}')\n else:\n print('Bad connection Returned code={rc}')", "def output_topic_callback(self, msg):\n with self.callback_lock:\n if self._time_received_input != 0:\n # Get actual time from ROS\n time_now = self.node.get_clock().now().nanoseconds\n\n # Compute the amount of time elapsed from receiving the last\n # message in the input topic\n measure = time_now - self._time_received_input\n\n # Transform from nanoseconds to milliseconds\n measure = measure / (1000 * 1000)\n\n publish_msg = Int64()\n publish_msg.data = int(measure)\n\n # Publish the measurement\n self._publisher.publish(publish_msg)\n\n self._time_received_input = 0", "def _on_received(self):\n self._call_subscribers(on_level=0xFF)\n publish_topic(self._on_subscriber_topic, on_level=0xFF)", "def _on_received(self):\n self._call_subscribers(on_level=0xFF)\n publish_topic(self._on_subscriber_topic, on_level=0xFF)", "def on_connect(self, client, userdata, flags, rc):\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n sleep(5) # quick delay\n self.client.subscribe(self.subControls)\n self.client.subscribe(self.subSettings)", "def camera_listener(self):\n camera_sub_cb_grp = ReentrantCallbackGroup()\n self.create_subscription(CameraMsg,\n constants.CAMERA_MSG_TOPIC,\n self.camera_callback,\n 10,\n callback_group=camera_sub_cb_grp)\n display_img_sub_cb_grp = ReentrantCallbackGroup()\n self.create_subscription(Image,\n constants.DISPLAY_MSG_TOPIC,\n self.display_callback,\n 10,\n callback_group=display_img_sub_cb_grp)", "def on_connect(client, userdata, flags, rc):\n\t# subscribe to the LEDs topic when connected\n\tclient.subscribe(\"SNHU/IT697/leds\")", "def on_connect(self, client, userdata, flags, rc):\n# client.subscribe(\"power_meter/status/#\")\n client.subscribe(self.mqtt_topic_status)\n client.subscribe(self.mqtt_topic_electricity + '/#')\n client.subscribe(self.mqtt_topic_gas + '/#')\n client.subscribe(self.mqtt_topic_water + '/#')\n self.mqtt_client.publish(self.mqtt_topic_last_will, \"online, \" + str(self.dconn), qos=0, retain=True)\n self.connected = True\n self.log.warning(\"Connected with result code: \" + str(rc))\n self.log.info(\"Connected to: \" + MQTT_SERVER)", "def primaryCamCallback(msg):\n\n global primaryCamString\n primaryCamString = msg.data", "def on_connect(client, userdata, flags, rc):\n if rc == 0:\n client.subscribe(topic_subscribe)\n print(\"connected OK with returned code=\", rc)\n else:\n print(\"Bad connection with returned code=\", rc)", "def on_subscribe(self, mqtt_client, userdata, mid, granted_qos):\n logging.debug(\"DEBUG - subscribe ack received\")", "def starup(self, sender, **kwargs):\n self._initialize_devices()\n for device_topic in device_topic_dict:\n _log.debug('Subscribing to ' + device_topic)\n self.vip.pubsub.subscribe(peer='pubsub',\n prefix=device_topic,\n callback=self.on_analysis_message)", "def mqtt_sub_callback(self, client, userdata, message):\n\t#def mqtt_sub_callback(self, message):\n\n\t\t# Decode the message using UTF-8 and convert it\n\t\t# to 'string' datatype\n\t\tpayload = str(message.payload.decode(\"utf-8\"))\n\n\t\trospy.loginfo(\"[BRIDGE] Message Received from MQTT\")\n\n\t\t# Give the appropiate values to the contents of the message\n\t\t# that will be published to '/ros_iot_bridge/mqtt/sub'\n\t\tmsg_mqtt_sub = msgMqttSub()\n\t\tmsg_mqtt_sub.timestamp = rospy.Time.now()\n\t\tmsg_mqtt_sub.topic = message.topic\n\t\tmsg_mqtt_sub.message = payload\n\n\t\t# Publish the message\n\t\tself._handle_ros_pub.publish(msg_mqtt_sub)\n\n\t\t# Upload to Google Sheet\n\t\tret = self.update_gsheet(\"None\", True, payload)", "def laser_cb(self, msg):\n #rospy.loginfo(\"Received new scan\")\n self.laser = msg", "def mqtt_on_connect(client, userdata, flags, rc):\n logging.debug('successfully connected to mqtt broker')\n client.subscribe(config['mqtt']['subscribe_topic'])", "def StartSubscriptions(self):\n rospy.Subscriber('/drivers/dvl', Dvl, self.dvl_callback)\n rospy.Subscriber('/drivers/imu', Imu, self.imu_callback)\n rospy.Subscriber('/reference/depth', Position, self.refDepth_callback)\n rospy.Subscriber('/reference/speed', Speed, self.refSpeed_callback)\n rospy.Subscriber('/reference/rpy', Euler, self.refRpy_callback)\n rospy.Subscriber('/reference/ll', Position, self.refLL_callback)\n rospy.Subscriber('/control/trackers_enabled', Trackers, self.trackersControl_callback)", "def subscribe(self, broker):\n if self.subscribed == False:\n for attr in self.parm_list:\n if attr.direction == attr.SUB:\n self.logging.debug(\"Subscribing: \"+attr.label)\n self.mqtt_client.subscribe(attr.topic)\n self.subscribed = True\n else:\n self.logging.debug(\"Already subscribed ... ignoring\")", "def on_connect(client, interface, flags, rc):\n logger.info(\"Connected with result code \" + str(rc))\n for i in Const.sub_topics:\n client.subscribe(i)\n logger.info(\"Successfully subscribed to \" + i)", "def callback(self, msg):\n if not self.has_cloud:\n self.active_cloud_msg = msg\n self.has_cloud = True", "def on_connect(self):\n log.info(\"Stream connected\")", "def subscribe( self, topic ):\n logging.info( \"Subscribing to topic %s\" %topic )\n try:\n self.client.subscribe( topic )\n except Exception as error:\n print( error )", "def subscribe(self, subject):\n pass", "def on_connect(client, userdata, flags, rc):\n print(\"Connected with with mqtt server: \" + str(rc))\n client.subscribe(\"clients/#\")", "def subscribe_on(self, callback: callable):\n subscribe_topic(callback, self._on_subscriber_topic)", "def _create_subscriber(self, topic_name):\n if self._sub:\n self._sub.unregister()\n self._sub = rospy.Subscriber(topic_name, Image, self._image_callback)\n rospy.loginfo(\"Listening to %s -- spinning ..\" % self._sub.name)\n self._widget.setWindowTitle(\"Label plugin, listening to (%s)\" % self._sub.name)", "def on_connect(client, userdata, flags, rc):\n print('Connected with result code ' + str(rc))\n client.subscribe(MQTT_TOPIC)", "def on_connect(client, userdata, flags, rc):\n print('Connected with result code ' + str(rc))\n client.subscribe(MQTT_TOPIC)", "def on_connect(client, userdata, flags, rc):\n print('Connected with result code ' + str(rc))\n client.subscribe(MQTT_TOPIC, qos=1)", "def on_connect(client, userdata, flags, rc):\n if rc == 0:\n logging.info(\"Connected\")\n else:\n logging.warning(\"Connection issue - result code \"+str(rc))\n\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n client.subscribe(config.topic_set)\n publish_status(client) # update the status on connect", "def cameraCallback(self, data):\n if not self.isReady:\n cvImage, self.imageInfo['shape'] = u.getCVImage(data)\n if self.measuring is not None:\n self.list, cvImage, self.isReady = self.measuring.getListObjects(cvImage)\n # preview topic /see_main\n msg_image = u.getMsgImage(cvImage)\n self.pub_view_main.publish(msg_image)\n else:\n if self.imageInfo['shape'] is not None:\n self.init()\n else:\n rospy.logerr(\"no video stream. check camera's topic!\")", "def on_subscribe(\n client: mqtt.Client,\n userdata: Any,\n mid: int,\n granted_qos: int,\n properties: Properties = None,\n ) -> None:\n logging.info(\n f\"Successfully subscribed to topic: mid={mid}, granted qos={granted_qos}, properties={properties}\"\n )", "def enable_subscription():\n client = KConsumer(config=subscriber_config)\n counter = 0\n while 1:\n data = client.consume()\n if data:\n print(\"Received Data\", counter)\n class_label = inference_on_data(data.value)\n publish_response(class_label)", "def subscribe(receiver, catchup):", "def subscribe(client, mqtt_topic):\n\n print()\n print(\"Subscribe\")\n print(\"================================================\")\n print()\n\n # Subscribe to the config topic.\n print(\"Subscribing\")\n print(mqtt_topic)\n print()\n client.subscribe(mqtt_topic, qos=1)", "def on_open(self) -> None:\n\n channel = [{\"name\": \"level2\", \"product_ids\": list(self.products.keys())}]\n msg_subscribe = {\"type\": \"subscribe\", \"channels\": channel}\n\n subscribe_payload = json.dumps(msg_subscribe)\n self.ws.send(subscribe_payload)", "def cam_callback(msg):\n #cam_window_name = \"Baxter Video Feed\"\n bridge = CvBridge() #instantiate CvBridge\n img_bgr = bridge.imgmsg_to_cv2(msg, \"bgr8\") #ROS Image msg to OpenCV2\n self.img = img_bgr", "def subscribe_on(self, callback: callable):\n topic = f\"{self._subscriber_topic}_on\"\n subscribe_topic(callback, topic)", "def subscribe_callback(self, scan: LaserScan): \n \n pub_msg = Range()\n pub_msg.range = self.calc_distance_to_wall(scan)\n print(\"min_distance front: \", pub_msg.range)\n\n # Here I am publishing min_distance_front\n self.control_publisher.publish(pub_msg)", "def subscribe(self):\n fd = libplasma.subscribe(self.conn)\n self.notification_sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)\n # Make the socket non-blocking.\n self.notification_sock.setblocking(0)", "def subscribe_topic(self):\n req = {\n \"op\": \"subscribe\",\n \"args\": [\n \"instrument\",\n \"trade\",\n \"orderBook10\",\n \"execution\",\n \"order\",\n \"position\",\n \"margin\",\n ],\n }\n self.send_packet(req)", "def on_message(self, client, userdata, msg):\n\n data = json.loads(msg.payload.decode(\"utf-8\"))\n if debug: print(\"topic: \", msg.topic, \" payload:\", data)\n #print \"Received: \", data\n if msg.topic == self.subControls:\n self.controller.setpoint = int(data['temp'])\n status_old = self.controller.status\n if data['mode'] == \"auto\" or data['mode'] == \"cool1\" or data['mode'] == \"cool2\" or data['mode'] == \"cool3\":\n self.controller.status = 1\n elif data['mode'] == \"off\":\n self.controller.status = 0\n if status_old and self.controller.status: onoff = False\n elif status_old and not self.controller.status: onoff = True\n elif not status_old and self.controller.status: onoff = True\n else: onoff = False\n self.controller.updateControls(onoff = onoff, radio=False)\n\n elif msg.topic == self.subSettings :\n self.controller.temp_interval = int(data['temp-res'])\n self.controller.energy_interval = int(data['energy-res'])\n self.controller.updateIntervals()\n else:\n pass", "def __initSubscribers(self):\n\t\t\n\t\t# Drone estimated pose (from FCU)\n\t\tself.__subs['pose'] = MySubscriber('mavros/local_position/pose',\n\t\t\t\t\t\t\t\t\t\t\tPoseStamped)\n\t\t\n\t\t# Drone state (connected, armed, mode)\n\t\tself.__subs['state'] = MySubscriber('mavros/state',\n\t\t\t\t\t\t\t\t\t\t\tState)", "def mqtt_on_message(client, userdata, msg):\n logging.debug('mqtt message received for topic %s', msg.topic)\n image_queue.put(msg.payload)", "def on_connect(client, userdata, flags, rc):\n print(\"MQTT Connected with result code \" + str(rc))\n if rc == 0:\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n topic = \"building/dgm/command\"\n client.subscribe(topic, qos=2)\n print(\"MQTT Subscribed to \" + topic)", "def subscribe(self, topic: str, call_back):\n self.callback_dict[topic] = call_back\n self.client.subscribe(topic)", "def __init__(self,sub_topic=\"\",pub_topic=\"\",data_type=None,tag=\"\",alt_type=None):\n self.sub_topic=sub_topic;\n self.pub_topic=pub_topic;\n self.data_type=data_type;\n self.alt_type=alt_type;\n self.tag=tag;\n self.subscriber=rospy.Subscriber(self.sub_topic+self.tag,self.data_type, self.callback_function,queue_size=20);\n self.message_publisher=None;", "def on_connect(client, userdata, flags, rc):\n print(\"Connected with result code \"+str(rc))\n\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n client.subscribe(MQTT_PATH)", "def input_topic_callback(self, msg):\n with self.callback_lock:\n if self._time_received_input != 0:\n warn = \"[TimeEstimatorTopic] Input time overwritten by another\"\\\n + \" input message, consider slowing down the rate of \" \\\n + \"the published data to let the receiving node handle it\"\n warning(self.node, warn)\n self._time_received_input = self.node.get_clock().now().nanoseconds", "def onSubscribed(self, connection:MQTTConnection, topic:str) -> bool:\n\t\tconnection.subscribedCount += 1\n\t\treturn True", "def on_next(self, msg):\n # publish the message to the topics\n retain = msg.retain if hasattr(msg, 'retain') else False\n for (topic, qos) in self.topics:\n self.client.publish(topic, msg, qos, retain)", "def subscribe_mqtt(self, topic):\n if topic not in self.subscriptions:\n self.subscriptions.append(topic)\n self.mqtt.subscribe(topic)", "def _init_subscribers(self, synergy_input_topic, num_synergies=0,\n queue_size=1):\n\n # Main callback for arrays.\n self._subscriber_main = rospy.Subscriber(\n synergy_input_topic, Float32MultiArray, self._callback_main,\n queue_size=queue_size)\n rospy.loginfo('Created main subscriber {}'.format(synergy_input_topic))\n\n # Component subscriber, using individual (nested) topic names and a\n # simple Float.\n for idx in range(num_synergies):\n topic = '{}/syn_{}'.format(synergy_input_topic, idx)\n\n subscriber = rospy.Subscriber(\n topic, Float32, self._callback_component, idx,\n queue_size=queue_size)\n self._subscriber_components.append(subscriber)\n rospy.loginfo(' Created component subscriber {}'.format(topic))\n pass", "def subscribe(self, callback):\n self.channel.basic_consume(callback, queue=self.queue_name)\n self.channel.start_consuming()", "async def subscribe(self, topic: str, callback: aiowamp.SubscriptionHandler, *,\n match_policy: aiowamp.MatchPolicy = None,\n node_key: str = None,\n options: aiowamp.WAMPDict = None) -> int:\n ...", "def subscribe(self, sub, chan, auth=\"\", cipher=\"\", use_ssl=False):\r\n self.sub = sub\r\n self.chan = chan\r\n self.auth = auth\r\n self.cipher = cipher\r\n self.use_ssl = use_ssl\r\n\r\n # force disconnect of currently active longpoll.\r\n self.hup()", "def subscribe2API():\n\tconnection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))\n\tchannel = connection.channel()\n\n\tchannel.queue_declare(queue='ToAPIgatewayQueue')\n\n\tdef callback(ch, method, properties, body):\n\t\tif(body != ''):\t\n\t\t\tconnection.close()\n \t\tpublish2apiHandler(body)\n\t\t\t\n\t\t\t\n\t\t\t\n\tchannel.basic_consume(callback, queue='ToAPIgatewayQueue', no_ack=True)\n\n\tprint(' [*] Waiting for messages. To exit press CTRL+C')\n\tchannel.start_consuming()\n\t\n\treturn", "async def subscribe(self, callback: Callable=None):\n LOGGER.info('Subscription added')\n await self._ros.send(self._subscribe_msg)", "def subscribeToEvent(eventName,subscriber,msgInterface):", "def stereo_callback(self, stereo_msg):\r\n start = time.time()\r\n self.cam0_curr_img_msg = stereo_msg.cam0_msg\r\n self.cam1_curr_img_msg = stereo_msg.cam1_msg\r\n\r\n # Build the image pyramids once since they're used at multiple places.\r\n self.create_image_pyramids()\r\n\r\n # Detect features in the first frame.\r\n if self.is_first_img:\r\n if not self.config.load_features_flag:\r\n self.initialize_first_frame()\r\n self.is_first_img = False\r\n # Draw results.\r\n # self.draw_features_stereo()\r\n else:\r\n if not self.config.load_features_flag:\r\n # Track the feature in the previous image.\r\n t = time.time()\r\n self.track_features()\r\n print('___track_features:', time.time() - t)\r\n t = time.time()\r\n\r\n # Add new features into the current image.\r\n self.add_new_features()\r\n print('___add_new_features:', time.time() - t)\r\n t = time.time()\r\n self.prune_features()\r\n print('___prune_features:', time.time() - t)\r\n t = time.time()\r\n # Draw results.\r\n # self.draw_features_stereo()\r\n print('___draw_features_stereo:', time.time() - t)\r\n t = time.time()\r\n\r\n print('===image process elapsed:', time.time() - start, f'({stereo_msg.timestamp})')\r\n\r\n if not self.config.load_features_flag:\r\n try:\r\n self.save_features() \r\n return self.publish()\r\n finally:\r\n self.cam0_prev_img_msg = self.cam0_curr_img_msg\r\n self.prev_features = self.curr_features\r\n self.prev_cam0_pyramid = self.curr_cam0_pyramid\r\n\r\n # Initialize the current features to empty vectors.\r\n self.curr_features = [[] for _ in range(self.config.grid_num)]\r\n else:\r\n self.load_features()\r\n return self.publish()", "def subscribe(self):\n if hasattr(self.bus, \"signal_handler\"):\n self.bus.signal_handler.subscribe()\n if hasattr(self.bus, \"console_control_handler\"):\n self.bus.console_control_handler.subscribe()", "def on_message(self, client, userdata, msg):\n st = datetime.datetime.fromtimestamp(msg.timestamp).strftime('%Y-%m-%d %H:%M:%S.%f')\n# print st[:-3], \":\", msg.topic, \":\", msg.payload\n\n # Note: Update_display from this function does not work\n if msg.topic == self.mqtt_topic_electricity:\n self.my_gui.update_electricity(float(msg.payload)) # kWh\n\n elif self.mqtt_topic_electricity in msg.topic: # covers /1 /2 ... etc.\n index = int(msg.topic.split('/')[-1])\n self.my_gui.update_electricity_hour(index, float(msg.payload))\n\n # -----------------------------------------------------------------\n elif msg.topic == self.mqtt_topic_water:\n self.my_gui.update_water(int(msg.payload)) # Litter\n\n elif self.mqtt_topic_water in msg.topic: \n index = int(msg.topic.split('/')[-1])\n self.my_gui.update_water_hour(index, int(msg.payload))\n\n # -----------------------------------------------------------------\n elif msg.topic == self.mqtt_topic_gas:\n self.my_gui.update_gas(float(msg.payload)) # m3, 10 Litters/msg\n\n elif self.mqtt_topic_gas in msg.topic:\n index = int(msg.topic.split('/')[-1])\n self.my_gui.update_gas_hour(index, float(msg.payload))\n\n# elif self.mqtt_topic_status == msg.topic:\n# # TODO\n# if \"online\" in msg.payload:\n# print \"A is online\"\n# elif \"offline\" in msg.payload:\n# print \"A is offline\"\n# print st[:-3], \":\", msg.topic, \":\", msg.payload\n\n self.my_gui.update_eur_total()", "def image_cb(self, msg):\n self.has_image = True\n self.camera_image = msg\n light_wp, state = self.process_traffic_lights()\n #print(light_wp)\n #print(state)\n #print()\n\n '''\n Publish upcoming red lights at camera frequency.\n Each predicted state has to occur `STATE_COUNT_THRESHOLD` number\n of times till we start using it. Otherwise the previous stable state is\n used.\n '''\n our_msg = TL_State()\n our_light = TrafficLight()\n our_light.header = Header()\n our_light.header.stamp = rospy.Time.now()\n our_light.header.frame_id = '/world'\n if self.state != state:\n self.state_count = 0\n self.state = state\n elif self.state_count >= STATE_COUNT_THRESHOLD:\n self.last_state = self.state\n light_wp = light_wp if (state == TrafficLight.RED or state == TrafficLight.YELLOW) else -1\n self.last_wp = light_wp\n self.upcoming_red_light_pub.publish(Int32(light_wp))\n\n our_msg.waypoint = light_wp\n our_light.state = state\n our_msg.light = our_light\n self.custom_state_pub.publish(our_msg)\n else:\n self.upcoming_red_light_pub.publish(Int32(self.last_wp))\n\n our_msg.waypoint = self.last_wp\n our_light.state = self.last_state\n our_msg.light = our_light\n self.custom_state_pub.publish(our_msg)\n self.state_count += 1", "def on_message_device_cb(client, msg):\n device_cb_msg = str(msg.payload)\n if g.BOKEH_DEV:\n print(device_cb_msg)\n\n new_conn_status = None\n\n if device_cb_msg == 'successfully_connected':\n new_conn_status = True\n device_frequency(data_freq_sld.value)\n\n # ''' FOR TESTING/DEBUG\n if g.BOKEH_DEV:\n if os.path.isfile(FILE_PATH): os.remove(FILE_PATH)\n # '''\n\n elif device_cb_msg == 'already_connected':\n new_conn_status = True\n\n elif device_cb_msg == 'ok_to_unsubscribe':\n new_conn_status = False\n client.unsubscribe([g.t_measure, g.t_device_ctrl_cb])\n util.doc_next_tick(doc, util.add_subscription, client, 'measure')\n\n elif device_cb_msg in ['successfully_disconnected', 'no_control_allowed',\n 'esp_init', 'esp_interrupted']:\n new_conn_status = False\n\n if not stream_data_btn.active:\n util.doc_next_tick(doc, disable_stream)\n\n if new_conn_status is not None:\n util.doc_next_tick(doc, util.conn_status_update, conn_status, new_conn_status)", "def start_ispy_transition_listener(self,on_ispy_state_info_received):\n time.sleep(.2)\n self.ispy_to_ros_trans_subs = rospy.Subscriber(ISPY_GAME_TO_ROS_TRANSITION_TOPIC, String, on_ispy_state_info_received) \n print(\"transition listener\")\n rospy.spin()\n #rospy.spin()", "def on_connect(unused_client, unused_userdata, unused_flags, rc):\n print('on_connect', mqtt.connack_string(rc))\n\n # This is the topic that the device will receive configuration updates on.\n mqtt_config_topic = '/devices/{}/config'.format(device_id)\n # Subscribe to the config topic.\n client.subscribe(mqtt_config_topic, qos=1)\n\n status_light.on()", "def on_publish(unused_client, unused_userdata, unused_mid):\n print('on_publish')\n status_light.on()", "def on_connect(mqttc, mosq, obj, rc):\n print(\"Connected with result code:\"+str(rc))\n # subscribe for all devices of user\n mqttc.subscribe('+/devices/+/up')\n mqttc.subscribe('+/devices/+/events/down/sent')\n if rc != 0:\n sys.exit('Could not connect to server. \\n Result code: ' + str(rc))", "def sub_callbackmsg(self, msg):\n\n print (msg.message)\n self.received_msg = self.received_msg + [msg.message]\n print (self.received_msg)", "def subscribe_broker(self, subscriber):\n # Register given feed callback\n self._broker_subscribers.add(subscriber)", "def _registerSubscriber(self, callerId, topic, topicType, callerApi):\n if topic not in self.FilterSubscribedTopics:\n self.__docWriter.addSub(callerId, topic, topicType)", "def topic(self, msg):\n self.make_topic(msg, new_topic=msg.args[0])\n self.bot.log.info(\"Topic changed by \" + msg.user)", "def on_message(client, userdata, message):\n #global dc_flag\n\n try:\n print(\"Receiving messages\")\n msg = message.payload\n\n #if dc_flag:\n #remote_client.connect(remote_mqtt_host, mqtt_port, 60)\n remote_client.publish(mqtt_topic, payload=msg, qos=0, retain=False)\n except:\n print(\"Error in receiving messages\")", "def _subscribe(topic:str) -> None:\n\t\t\tif topic in self.subscribedTopics:\n\t\t\t\tself.messageHandler and self.messageHandler.logging(self.mqttClient, logging.WARNING, f'MQTT: topic already subscribed: {topic}')\n\t\t\t\treturn\n\t\t\tif (r := self.mqttClient.subscribe(topic))[0] == 0:\n\t\t\t\tt = MQTTTopic(topic = topic, mid=r[1], callback=callback, callbackArgs=kwargs)\n\t\t\t\tself.subscribedTopics[topic] = t\n\t\t\telse:\n\t\t\t\tself.messageHandler and self.messageHandler.logging(self.mqttClient, logging.ERROR, f'MQTT: cannot subscribe: {r[0]}')", "def _on_mqtt_message(\n self, client: mqtt.Client, userdata: str, message: mqtt.MQTTMessage\n ) -> None:\n self.log.debug(f\"Received message on topic: {message.topic}\")\n self.inbound_message_listener(Message(message.topic, message.payload))", "def pub_callback(self, pub):\n self.publish_callback_value = pub", "def connected(self):\n manager = self.manager()\n self.log().debug(\"Register [%s] callbacks\", self.name())\n\n manager.subscribeServerCallbacks(self, self.cfg().chatimg.servers or manager.SERVERS_ALL)", "def traffic_cb(self, msg):\n prev_red_light_waypoint = self.red_light_waypoint\n self.red_light_waypoint = msg.data if msg.data >= 0 else None\n if prev_red_light_waypoint != self.red_light_waypoint:\n if debugging:\n rospy.loginfo(\"TrafficLight changed: %s\", str(self.red_light_waypoint))\n if publish_on_light_change:\n self.update_and_publish() # Refresh if next traffic light has changed", "def on_publish(client, userdata, mid):\n print(\"Message Published.\")", "def on_connect(self, client, userdata, flags, rc):\n\n\t\tself.subscribe(\"system\")\n\t\tprint (\"[{}] Client connected\".format(\n\t\t\tint(time.time())\n\t\t))", "def subscribe(observer):", "def subscribe(observer):", "def cbMqtt_on_connect(client, userdata, flags, rc):\n if rc == 0:\n logger.debug('Connected to %s: %s', str(mqtt), userdata)\n setup_mqtt_filters()\n mqtt_publish_fan_state()\n else:\n logger.error('Connection to MQTT broker failed: %s (rc = %d)',\n userdata, rc)", "def update_mqtt_subscription(self, datastreams: Dict[str, OGCDatastream]):\n # Get the listening topics and run the subscriptions\n for ds in datastreams.values():\n top = ds.get_mqtt_topic()\n logging.debug(\"Subscribing to MQTT topic: \" + top)\n self._mqtt_subscriber.subscribe(top, DEFAULT_MQTT_QOS)" ]
[ "0.6671746", "0.63125056", "0.6236678", "0.6203599", "0.6199879", "0.61778575", "0.61778575", "0.61778575", "0.6139608", "0.6105276", "0.60968137", "0.60713357", "0.60546625", "0.6009579", "0.59927255", "0.596567", "0.59032744", "0.588785", "0.5863523", "0.5775574", "0.5775574", "0.57657975", "0.5757503", "0.5757264", "0.5737805", "0.5737493", "0.5737411", "0.5707207", "0.56977177", "0.56582916", "0.5639683", "0.5637592", "0.56325155", "0.5619618", "0.55697775", "0.5568762", "0.5567078", "0.55612826", "0.5553537", "0.55376035", "0.55144733", "0.55020297", "0.54930335", "0.54930335", "0.54910934", "0.54806674", "0.5448774", "0.5448047", "0.54434496", "0.5443282", "0.5431986", "0.5418197", "0.5387748", "0.5365207", "0.53597367", "0.5352456", "0.5345208", "0.5334904", "0.5319546", "0.5311079", "0.53045785", "0.52923465", "0.5273809", "0.52664185", "0.52508557", "0.5243046", "0.52405363", "0.52390957", "0.52277046", "0.522387", "0.5222001", "0.52167416", "0.52149665", "0.52120143", "0.5203845", "0.5193014", "0.5185159", "0.51840585", "0.517924", "0.5179129", "0.5167835", "0.51674765", "0.51655877", "0.51631516", "0.51631296", "0.5157168", "0.51564705", "0.51540935", "0.5145225", "0.5144412", "0.51375157", "0.5135345", "0.5130013", "0.5129857", "0.5127296", "0.5126229", "0.51125914", "0.51125914", "0.51066935", "0.51066726" ]
0.6567832
1
This function uniformally scales image to newSize
def resizeImage(image, newSize): # scale image scaledImage = cv2.resize(image, newSize) return scaledImage
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rescale_image(image, scale=0.50):\r\n \r\n wi = int(image.shape[1]*scale)\r\n hei = int(image.shape[0]*scale)\r\n dimension = (wi, hei)\r\n return cv.resize(image, dimension, interpolation = cv.INTER_AREA)", "def scale(img, scale):\n return resize(img, x_scale=scale, y_scale=scale)", "def rescale_image(image: np.ndarray, scale: float) -> np.ndarray:\n (height, width) = image.shape[:2]\n new_dims = (int(width * scale), int(height * scale))\n return cv2.resize(image, new_dims, interpolation=cv2.INTER_CUBIC)", "def scale_down(image:np.array)->np.array:\n src = image\n scale_percent = 25\n width = int(src.shape[1] * scale_percent / 100)\n height = int(src.shape[0] * scale_percent / 100)\n dsize = (width, height)\n output = cv2.resize(src, dsize)\n return output", "def scale_image(img, factor=1):\n return cv2.resize(img, (int(img.shape[1] * factor), int(img.shape[0] * factor)))", "def _scale(self, image):\n\n if image.GetWidth() != self._width or image.GetHeight()!= self._height:\n image.Rescale(self._width, self._height)\n \n return image", "def scale_image(img, factor=1):\n\treturn cv2.resize(img,(int(img.shape[1]*factor), int(img.shape[0]*factor)))", "def rescale(self, img):\n\n if self.scale != 1:\n return imutils.resize(img, width=int(img.shape[1] * self.scale))\n else:\n return img", "def scale(input_img, size):\n width, height = size\n old_height, old_width = input_img.shape\n x_scale = float(height) / old_height\n y_scale = float(width) / old_width\n\n output_img = np.zeros((height, width), dtype=np.uint8)\n for xidx in xrange(height):\n old_x = float(xidx) / x_scale\n for yidx in xrange(width):\n old_y = float(yidx) / y_scale\n if old_x.is_integer() or old_y.is_integer():\n output_img[xidx, yidx] = input_img[int(old_x), int(old_y)]\n else: # use bilinear interpolation\n x1 = int(np.floor(old_x))\n x2 = int(np.ceil(old_x)) if int(np.ceil(old_x)) < old_height else old_height - 1\n y1 = int(np.floor(old_y))\n y2 = int(np.ceil(old_y)) if int(np.ceil(old_y)) < old_width else old_width - 1\n\n q11 = input_img[x1, y1]\n q12 = input_img[x1, y2]\n q21 = input_img[x2, y1]\n q22 = input_img[x2, y2]\n\n output_img[xidx, yidx] = (q11 * (x2 - old_x) * (y2 - old_y)\n + q21 * (old_x - x1) * (y2 - old_y)\n + q12 * (x2 - old_x) * (old_y - y1)\n + q22 * (old_x - x1) * (old_y - y1)) \\\n / ((x2 - x1) * (y2 - y1) + 1e-10)\n\n return output_img", "def scaleImage(img, min):\n scales = []\n img_ = img.copy()\n h, w, c = img.shape\n factor = 0.8\n while h >= min and w >= min:\n img_ = cv2.resize(img_, (w, h))\n scales.append(img_)\n h = int(h * factor)\n w = int(w * factor)\n\n return scales", "def scale_image(image=np.zeros((100,100)), \n new_width=100,\n ascii_block_size=(2,3)):\n \n original_width, original_height = image.shape\n aspect_ratio = original_height / float(original_width)\n w,h = ascii_block_size\n new_height = int(h/w * aspect_ratio * new_width)\n\n return skimage.transform.resize(image, (new_width, new_height))", "def scale(image, maxval=1024):\n image += maxval # minimum value is now 0\n image /= maxval*2\n\n return(image)", "def scale_image(self, pixels, size):\n x_min, x_max = np.amin(pixels[:,0]), np.amax(pixels[:,0])\n y_min, y_max = np.amin(pixels[:,1]), np.amax(pixels[:,1])\n z_min, z_max = np.amin(pixels[:,2]), np.amax(pixels[:,2])\n \n pixels[:,0] -= x_min \n pixels[:,1] -= y_min\n pixels[:,2] -= z_min\n \n x_max -= x_min\n y_max -= y_min\n z_max -= z_min\n \n scale_factor = size / max(x_max, y_max, z_max) \n # All points are now between [0..max]\n\n pixels *= scale_factor\n return pixels", "def Rescale(self):\r\n picWidth,picHeight = self.oldSize = self.GetSizeTuple()\r\n bitmap = self.scaled = self.bitmap\r\n if not bitmap: return\r\n imgWidth,imgHeight = bitmap.GetWidth(),bitmap.GetHeight()\r\n if self.scaling == 2 or (self.scaling == 1 and (imgWidth > picWidth or imgHeight > picHeight)):\r\n image = bitmap.ConvertToImage()\r\n factor = min(1.0*picWidth/imgWidth,1.0*picHeight/imgHeight)\r\n newWidth,newHeight = int(factor*imgWidth),int(factor*imgHeight)\r\n self.scaled = image.Scale(newWidth,newHeight).ConvertToBitmap()\r\n #self.scaled = image.Scale(newWidth,newHeight,wx.IMAGE_QUALITY_HIGH ).ConvertToBitmap()\r", "def rescale_image(image, rescale_width):\r\n\r\n # image size\r\n image_height = pdb.gimp_image_height(image)\r\n image_width = pdb.gimp_image_width(image)\r\n\r\n # new image height\r\n rescale_height = round(image_height * (rescale_width * 1.0 / image_width))\r\n\r\n pdb.gimp_image_scale(image, rescale_width, rescale_height)\r\n gimp.message('Rescaled image')", "def _resize(img, max_dim=128):\n if max(img.shape[:3]) <= max_dim:\n return img\n else:\n new_size = [max_dim / s if s >= max_dim else 1.0 for s in img.shape[:3]]\n new_size.append(1.0) # for channel\n return scipy.ndimage.zoom(img, new_size, order=2)", "def rescale(self, factor):\n scaled_size = (int(self.width * factor), int(self.height * factor))\n return self.resize(scaled_size)", "def resize(im, new_size, preserve_aspect_ratio=True, prefilter=True):\n factors = [new_size[i] / im.shape[i] for i in range(2)]\n\n #assert factors[0] == factors[1], \"Must have same factor for now\"\n f = factors[0] \n \n if f < 1:\n im2 = pyramid_reduce(im, downscale=1/f)\n elif f > 1:\n im2 = pyramid_expand(im, upscale=f)\n else:\n im2 = im\n\n assert im2.shape[:2] == tuple(new_size), \"{0} != {1} (original size: {2})\".format(im2.shape, new_size, im.shape)\n \n return im2", "def image_resize(img, min_size=600, max_size=1000):\n C, H, W = img.shape\n scale1 = min_size / min(H, W)\n scale2 = max_size / max(H, W)\n scale = min(scale1, scale2)\n img = img / 255\n img = transform.resize(img, (C, H * scale, W * scale),\n mode='reflect', anti_aliasing=False)\n # img = pytorch_normalize(img)\n # img = caffe_normalize(img)\n return img", "def scale_image(image, new_range):\n min_val = np.min(image).astype(np.float32)\n max_val = np.max(image).astype(np.float32)\n min_val_new = np.array(min(new_range), dtype=np.float32)\n max_val_new = np.array(max(new_range), dtype=np.float32)\n scaled_image = (image - min_val) / (max_val - min_val) * (max_val_new - min_val_new) + min_val_new\n return scaled_image.astype(np.uint8)", "def scale_img(img, scale=1.0):\n dim = (int(img.shape[1]*scale), int(img.shape[0]*scale))\n return cv2.resize(img.copy(), dim, interpolation=cv2.INTER_AREA)", "def scale_image(self, image):\n err_m1 = \"image must be a numpy.ndarray with shape (h, w, 3)\"\n if not isinstance(image, np.ndarray):\n raise TypeError(err_m1)\n if len(image.shape) != 3 or image.shape[2] != 3:\n raise TypeError(err_m1)\n shape0 = image.shape[0]\n shape1 = image.shape[1]\n\n if shape0 > shape1:\n new_w = int(image.shape[1] * 512 / image.shape[0])\n new_h = 512\n elif shape0 < shape1:\n new_h = int(image.shape[0] * 512 / image.shape[1])\n new_w = 512\n else:\n new_h = 512\n new_w = 512\n tf.image.ResizeMethod.BICUBIC\n image = tf.expand_dims(image, 0)\n image = tf.image.resize_bicubic(image,\n (new_h, new_w),\n align_corners=False)\n image = image / 255\n image = tf.clip_by_value(image, clip_value_min=0, clip_value_max=1)\n\n return image", "def rescale_by_width(image, target_width, method=cv2.INTER_LANCZOS4):\r\n h = int(round(target_width * image.shape[0] / image.shape[1]))\r\n return cv2.resize(image, (target_width, h), interpolation=method)", "def scale_img(img):\r\n # Scale values of img between 0 and 255.\r\n img -= np.amin(img)\r\n img /= np.amax(img)\r\n img *= 255\r\n return img", "def downScaleResolution(kv, factor=10):\n sub_img_name = kv[0]\n sub_image = kv[1]\n img_dimension = len(sub_image)\n big_image = sub_image\n Nbig = img_dimension\n Nsmall = Nbig//factor\n small_image = big_image.reshape([Nsmall, Nbig // Nsmall, Nsmall, Nbig // Nsmall]).mean(3).mean(1)\n return (sub_img_name,small_image)", "def scale_image(image, scale=1, interpolation='linear'):\n if interpolation == 'linear':\n return cv2.resize(image, None, fx=scale, fy=scale, interpolation=cv2.INTER_LINEAR)\n if interpolation == 'nearest':\n return cv2.resize(image, None, fx=scale, fy=scale, interpolation=cv2.INTER_NEAREST)", "def uniform_size(img_path):\n \n img_name = os.path.splitext(img_path)[0]\n \n # Read image\n im = cv2.imread(img_path)\n rows, cols, channels = im.shape \n ar = rows/cols\n \n # Define best ar for MLO (need to fix cc normals)\n target_ar = 720/400\n target_width = 400\n target_height = 720#int(round(target_width*target_ar))\n\n # If too many rows, crop rows\n if ar >= target_ar:\n\n target_rows = int(cols*target_ar)\n delta = rows - target_rows\n new_im = im[delta//2:rows-delta//2, :,:]\n rows, cols, channels = new_im.shape\n\n # if too many columns, crop columns\n if ar < target_ar:\n\n target_cols = int(rows/target_ar) \n delta = cols - target_cols\n new_im = im[:,delta//2:cols-delta//2,:]\n rows, cols, channels = new_im.shape \n\n # Resize to match minimum dimension. \n resize = target_width/new_im.shape[1] \n resize_im = cv2.resize(new_im, (target_width, target_height))\n \n # Renormalize to make sure all have similar brightness scale\n resize_im = cv2.normalize(resize_im, None, 0, 255, cv2.NORM_MINMAX)\n cv2.imwrite(f'{img_name}.png', resize_im)\n \n return 0", "def __scale_image(image, scale: float):\r\n height, width, _ = image.shape\r\n\r\n width_scaled = int(np.ceil(width * scale))\r\n height_scaled = int(np.ceil(height * scale))\r\n\r\n im_data = cv2.resize(image, (width_scaled, height_scaled), interpolation=cv2.INTER_AREA)\r\n\r\n # Normalize the image's pixels\r\n im_data_normalized = (im_data - 127.5) * 0.0078125\r\n\r\n return im_data_normalized", "def transform_images(x_image: tf.Tensor, size: int) -> tf.Tensor:\n x_image = tf.image.resize(x_image, (size, size))\n x_image = x_image / 255\n return x_image", "def resize(image, size):\n return np.array(Image.fromarray(image).resize(size))", "def scaleImageToEqualSize(sourceImg, targetImg):\n return cv2.resize(sourceImg,\n None, # no specific out dim\n fx=targetImg.shape[1] / sourceImg.shape[1], # scale factor Y\n fy=targetImg.shape[0] / sourceImg.shape[0], # scale factor X\n interpolation=cv2.INTER_AREA)", "def resize(img, size, interpolation=Image.BILINEAR):\n\n if isinstance(size, int):\n w, h = img.size\n if (w <= h and w == size) or (h <= w and h == size):\n return img\n if w < h:\n ow = size\n oh = int(size * h / w)\n return img.resize((ow, oh), interpolation)\n else:\n oh = size\n ow = int(size * w / h)\n return img.resize((ow, oh), interpolation)\n else:\n return img.resize(size[::-1], interpolation)", "def scale(image, min_dim=256):\n # no scaling, keep images full size\n if min_dim == -1:\n return image\n\n # aspect-ratio preserving scale so that the smallest dimension is `min_dim`\n width, height = image.size\n scale_dimension = width if width < height else height\n scale_ratio = float(min_dim) / scale_dimension\n\n if scale_ratio == 1:\n return image\n\n return image.resize(\n (int(width * scale_ratio), int(height * scale_ratio)),\n Image.ANTIALIAS,\n )", "def downscale_image(im, max_dim=2048):\n a, b = im.size\n if max(a, b) <= max_dim:\n return 1.0, im\n\n scale = 1.0 * max_dim / max(a, b)\n new_im = im.resize((int(a * scale), int(b * scale)), Image.ANTIALIAS)\n return scale, new_im", "def coral_image_resize(im, scaling_method, scaling_factor, height_cm):\n\n if scaling_method == 'scale':\n scale = float(scaling_factor) # here scaling_factor is the desired image scaling.\n elif scaling_method == 'ratio':\n scale = float(scaling_factor) * height_cm / im.shape[0] # here scaling_factor is the desited px_cm_ratio.\n im = scipy.misc.imresize(im, scale)\n return (im, scale)", "def transform_images(img, size):\n return tf.image.resize(img, (size, size)) / 255", "def rescale_and_resize_image(img_name, img_size, save_img):\n \n try:\n \n # Open image\n img = Image.open(img_name)\n \n # Resize image\n img = img.resize((int(img_size), int(img_size)), Image.ANTIALIAS) \n \n # Get data from image\n img_list = list(img.getdata())\n \n # Find minimum and maximum value pixels in the image\n img_max = max(img_list)\n img_min = min(img_list)\n \n # Determine factor to scale to a 8-bit image\n scale_factor = 255.0/(img_max - img_min)\n \n img_list_new = [0] * img_size * img_size\n \n # Rescale all pixels to the range 0 to 255 (in line with unit8 values)\n for i in range(0,img_size):\n for j in range(0,img_size):\n img_list_new[i*img_size + j] = int((img_list[i*img_size + j]-img_min)*scale_factor)\n if (img_list_new[i*img_size + j] > 255) or (img_list_new[i*img_size + j] < 0) or (img_list_new[i*img_size + j]-int(img_list_new[i*img_size + j]) != 0):\n print(\"img_list_new[%d][%d] = %r\" % (i,j,img_list_new[i*img_size + j]))\n \n img.putdata(img_list_new)\n \n # Convert to uint8 greyscale image\n img = img.convert('L')\n \n # Save image\n if save_img:\n img.save(img_name)\n else:\n ph = ImageTk.PhotoImage(img)\n return ph\n \n finally:\n \n # Close image\n img.close()", "def resizeImage(IMG,IMAGE_SIZE):\n\n RESCALED_IMAGE = skimage.transform.resize(IMG,[IMG.shape[0],IMAGE_SIZE,IMAGE_SIZE])\n return RESCALED_IMAGE", "def imrescale(img,scale,return_scale=False,interpolation='bilinear'):\n h,w = img.shape[:2]\n if isinstance(scale,(float,int)):\n if scale <= 0:\n raise ValueError('Invalid scale:{},must be positive.'.format(scale))\n scale_factor = scale\n \n elif isinstance(scale,tuple):\n max_long_edge = max(scale)\n max_short_edge = min(scale)\n scale_factor = min(max_long_edge/max(h,w),\n max_short_edge/min(h,w))\n else:\n raise ValueError('Scale must be a number or tuple of int,but got {}'.format(type(scale)))\n \n new_size = _scale_size((w,h),scale_factor)\n rescaled_img = imresize(img,new_size,interpolation = interpolation)\n if return_scale:\n return rescaled_img,scale_factor\n else:\n return rescaled_img", "def scale(self):", "def resize_image(image, size):\n return skimage.transform.resize(image, size, mode='reflect', preserve_range=True).astype(image.dtype)", "def scale(image):\n image = tf.cast(image, tf.float32)\n image /= 255\n return image", "def upscale(x, n):\n if n == 1:\n return x\n x_shape = tf.shape(x)\n height, width = x_shape[1], x_shape[2]\n return tf.image.resize_nearest_neighbor(x, [n * height, n * width])\n # return tf.batch_to_space(tf.tile(x, [n**2, 1, 1, 1]), [[0, 0], [0, 0]], n)", "def _resize_short_within(self, img, short, max_size, mult_base=1, interp=Image.BILINEAR):\n w, h = img.size\n im_size_min, im_size_max = (h, w) if w > h else (w, h)\n scale = float(short) / float(im_size_min)\n if np.round(scale * im_size_max / mult_base) * mult_base > max_size:\n # fit in max_size\n scale = float(np.floor(max_size / mult_base) * mult_base) / float(im_size_max)\n new_w, new_h = (int(np.round(w * scale / mult_base) * mult_base),\n int(np.round(h * scale / mult_base) * mult_base))\n img = img.resize((new_w, new_h), interp)\n return img", "def resizeImg(img, new_width, new_height):\n w, h = img.size\n width = new_width\n pwidth = new_width / float(w)\n height = int((float(h) * float(pwidth)))\n if height > new_height:\n height = new_height\n pheight = height / float(h)\n width = int((float(w) * float(pheight)))\n img = img.resize((width, height), Image.ANTIALIAS)\n return img", "def resize_image(image: Image) -> Image:\n expected_x: int = 1024\n expected_y: int = 768\n x, y = image.size\n if x > expected_x or y > expected_y:\n scale = min(expected_x / x, expected_y / y)\n return image.resize((int(x * scale), int(y * scale)))\n else:\n return image.resize((int(0.9*x), int(0.9*y)))", "def FlyResize( image, log_mess, nimages, method = Image.ANTIALIAS ):\n oldw, oldh = image.size\n resl = [8, 10, 14, 16, 20, 22, 24, 32, 40, 48, 64, 96, 128, 256]\n \n if oldw > 256 or oldh > 256:\n newsiz = min(resl, key = lambda x:abs(x - max(oldw, oldh)))\n image.thumbnail((newsiz, newsiz), method)\n neww, newh = image.size\n log_mess += ' and new size scaled = %s x %s' %(neww, newh)\n elif nimages > 1:\n log_mess += ' and size = %s x %s' %(oldw, oldh)\n \n return oldw, oldh, image, log_mess", "def adjust(self, image):\n ...", "def resize(img, size):\n img = cv2.resize(img, tuple(size[::-1]))\n return img", "def scale_image(img_path, ratio):\n img = cv2.imread(img_path)\n (h, w) = img.shape[:2]\n h = int(h * ratio)\n w = int(w * ratio)\n img = cv2.resize(img, (w, h))\n cv2.imwrite(img_path, img)", "def draw_scale(img,scale,width=2,head_enlarge_rate=1,fix_head_size=0):\n scale_img = copy.deepcopy(img)\n for i in range(scale.shape[0]):\n if fix_head_size > 0:\n head_size = fix_head_size\n else:\n head_size = scale[i][2] * head_enlarge_rate\n p1 = (int(round(scale[i][0] - head_size / 2)),\n int(round(scale[i][1] - head_size / 2)))\n p2 = (int(round(scale[i][0] + head_size / 2)),\n int(round(scale[i][1] + head_size / 2)))\n cv2.rectangle(scale_img, p1, p2, (0, 0, 255), width)\n\n return scale_img", "def resize_img_norescale(img, size):\n w,h = size\n shape = (h,w)\n out = np.zeros(shape, dtype=img.dtype)\n i = min(img.shape[0], out.shape[0])\n j = min(img.shape[1], out.shape[1])\n out[0:i,0:j] = img[0:i, 0:j]\n return out", "def process_image(image):\n image = resize(image)\n return image", "def resize(img):\n size = (500, 500)\n img.thumbnail(size)\n return img", "def scale_images(images, new_shape):\n\n images_list = list()\n\n for image in images:\n new_image = resize(image, new_shape)\n images_list.append(new_image)\n \n return np.asarray(images_list)", "def preprocess(img, min_size, max_size):\n if(min_size > max_size):\n raise Exception('min_size should not exceed max_size')\n \n width, height = img.size\n minDim = min(width,height)\n maxDim = max(width,height)\n scale_shorter_side = min_size/minDim\n scale_longer_side = maxDim * scale_shorter_side\n if(scale_longer_side > max_size):\n scale = max_size/maxDim\n else:\n scale = scale_shorter_side\n transform = transforms.Compose([\n transforms.Resize((round(img.height*scale),round(img.width * scale))),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n ])\n img = transform(img)\n return scale,img", "def resize(self, new_size):\n resized_img = opencv.resize(self.img, new_size)\n return Image(resized_img)", "def resize(im: Image) -> Image:\n size_factor = random.uniform(*ImageOperations.config.get('resize_interval'))\n return im.resize((\n int(round(im.height * size_factor, 0)),\n int(round(im.width * size_factor, 0))\n ))", "def scale(img, scales):\n outputs = []\n for sc in scales:\n outputs.append(\n cv2.resize(img, None, fx=sc, fy=sc, interpolation=cv2.INTER_CUBIC)\n )\n return outputs", "def set_scale_factors_to_output_size(self):\n # Compute the scale_factor using rounded scaled image size.\n height = tf.shape(self._image)[0]\n width = tf.shape(self._image)[1]\n max_image_size = tf.to_float(tf.maximum(height, width))\n image_scale = tf.to_float(self._output_size) / max_image_size\n scaled_height = tf.to_int32(tf.to_float(height) * image_scale)\n scaled_width = tf.to_int32(tf.to_float(width) * image_scale)\n self._image_scale = image_scale\n self._scaled_height = scaled_height\n self._scaled_width = scaled_width", "def imageScale(scale):\n\t\treturn max(1, int(scale * (InterfaceTools.getCanvasSize()[0] / height)))", "def resample(img, newSize):\n # use float as datatype to preserve float precision\n # will convert to np.uint8 when display result\n # get the new dimensions\n nH, nW = newSize\n if np.ndim(img) == 2:\n H, W = img.shape\n res = np.zeros((nH, nW), dtype=np.float32)\n elif np.ndim(img) == 3:\n H, W, _ = img.shape\n res = np.zeros((nH, nW, _), dtype=np.float32)\n else:\n raise ValueError(\"input image has invalid dimension %s\" % (img.shape))\n\n # interpolate the value for the result\n for idx in range(nH * nW):\n i = idx // nW\n j = idx % nW\n orig_i = int((i * H) // nH)\n orig_j = int((j * W) // nW)\n res[i, j] = img[orig_i, orig_j]\n return res", "def resample(medical_image, image, new_spacing=[1, 1, 1]):\n image_shape = np.array(list(medical_image.header.get_data_shape()))\n spacing = np.array(list(medical_image.header.get_zooms()))\n resize_factor = spacing / new_spacing\n\n new_shape = image_shape * resize_factor\n new_shape = np.round(new_shape)\n\n real_resize_factor = new_shape / image_shape\n\n new_spacing = spacing / real_resize_factor\n resampled_image = scipy.ndimage.interpolation.zoom(image, real_resize_factor)\n\n return resampled_image, new_spacing", "def resize_function(input):\n\n from keras.backend import tf as ktf\n return ktf.image.resize_images(input, (64, 64))", "def rescale_by_height(image, target_height, method=cv2.INTER_LANCZOS4):\r\n w = int(round(target_height * image.shape[1] / image.shape[0]))\r\n return cv2.resize(image, (w, target_height), interpolation=method)", "def autoscale(self, img: Image, max_height: int, max_width: int):\n height = img.bgr.shape[0]\n width = img.bgr.shape[1]\n\n diff_height = max_height / height\n diff_width = max_width / width\n\n diff = min(diff_height, diff_width)\n\n height = int(height * diff)\n width = int(width * diff)\n\n return img.rescale_image(height, width)", "def cs4243_resize(image, new_width, new_height):\n new_image = np.zeros((new_height, new_width, 3), dtype='uint8')\n if len(image.shape)==2:\n new_image = np.zeros((new_height, new_width), dtype='uint8')\n ###Your code here###\n \n # if new_width < 0 or new_height < 0, np.zeros() will throw a ValueError.\n \n # if new_width == 0 or new_height == 0, we won't need to do any calculation.\n if new_width == 0 or new_height == 0:\n return new_image\n \n # resizing algorithm taken from\n # https://tech-algorithm.com/articles/nearest-neighbor-image-scaling/\n height, width = image.shape[0], image.shape[1]\n w_ratio = int(((width << 16) / new_width) + 1)\n h_ratio = int(((height << 16) / new_height) + 1)\n \n for h in range(new_height):\n for w in range(new_width):\n pw = int((w*w_ratio) >> 16)\n ph = int((h*h_ratio) >> 16)\n new_image[h,w] = image[ph, pw]\n ###\n return new_image", "def scale_image(image: Image, scale: float) -> Image:\n width = round(image.width * scale)\n height = round(image.height * scale)\n image.thumbnail((width, height))\n return image", "def imresize(img, size):\n if hasattr(size, \"__len__\"):\n num_rows, num_cols = size\n assert (num_rows > 0) or (num_cols > 0)\n if num_rows < 0:\n num_rows = num_cols * img.shape[0] / img.shape[1]\n if num_cols < 0:\n num_cols = num_rows * img.shape[1] / img.shape[0]\n else:\n num_rows = int(round(img.shape[0] * size))\n num_cols = int(round(img.shape[1] * size))\n return skimage.transform.resize(img, (num_rows, num_cols))", "def change_resolution(img):\n scale_factor = np.random.choice(list(range(0, 6, 2)))\n if scale_factor == 0:\n return img\n downsample = nn.AvgPool2d(scale_factor)\n upsample = nn.UpsamplingNearest2d(scale_factor=scale_factor)\n new_res_img = upsample(downsample(img.unsqueeze(dim=1))).squeeze()\n return new_res_img", "def test_scale_image_rgb(self):\n larger = scale_image(self.rgb_image, 1.5)\n self.assertEqual((150, 300, 3), larger.shape)\n\n smaller = scale_image(self.rgb_image, 0.2)\n self.assertEqual((20, 40, 3), smaller.shape)", "def upsampleImage( arr, kernelSize ):\n return scipy.ndimage.zoom( arr, kernelSize )", "def resizeImage(image, maxW, maxH):\n\timageW, imageH = image.size\n\tif imageW == maxW and imageH == maxH:\n\t\treturn image\n\t# find which axis requires the biggest zoom (smallest relative max dimension)\n\tzoomW = float(imageW) / float(maxW)\n\tzoomH = float(imageH) / float(maxH)\n\tzoom = max(zoomW, zoomH)\n\tif zoomW >= zoomH:\t# size is defined by width\n\t\tmaxH = int(imageH//zoom)\t# calculate the new height\n\telse:\n\t\tmaxW = int(imageW//zoom)\n\treturn image.resize((maxW, maxH))", "def resize_image(self, width=200):\n self.new_width = width\n aspect_ratio = self.original_height/float(self.original_width)\n self.new_height = int(aspect_ratio * self.new_width)\n\n resized_image = self.image.resize((self.new_width, self.new_height), Image.BILINEAR)\n return resized_image", "def resize(img,width):\n \n wpercent = float(width / float(img.size[0]))\n hsize = int((float(img.size[1])*float(wpercent)))\n img = img.resize((width ,hsize), Image.ANTIALIAS)\n\n return img", "def resize(img,width):\n \n wpercent = float(width / float(img.size[0]))\n hsize = int((float(img.size[1])*float(wpercent)))\n img = img.resize((width ,hsize), Image.ANTIALIAS)\n\n return img", "def __resize_image(self, img):\n return cv2.resize(img, self.g.img_size, \n interpolation = cv2.INTER_CUBIC)", "def resize_image(img: npt.ArrayLike, target_size: tuple(int, int)) -> npt.ArrayLike:\n\n ht, wt = target_size\n h, w = img.shape\n # scaling coefficient\n sc = min(wt / w, ht / h)\n tx = (wt - w * sc) / 2\n ty = (ht - h * sc) / 2\n \n # M = [[x_scale, x_shear, X_up_left], \n # [y_shear, y_scale, Y_up_left]]\n M = np.float32([[sc, 0, tx], [0, sc, ty]])\n img = cv2.warpAffine(img, M, dsize=(wt, ht), borderValue=0)\n\n return img / 255.", "def resize_volume(img):\n desired_depth = 64\n desired_width = 128\n desired_height = 128\n\n current_depth = img.shape[-1]\n current_width = img.shape[0]\n current_height = img.shape[1]\n\n depth = current_depth / desired_depth\n width = current_width / desired_width\n height = current_height / desired_height\n\n depth_factor = 1 / depth\n width_factor = 1 / width\n height_factor = 1 / height\n\n img = ndimage.rotate(input=img, angle=90, reshape=False)\n img = ndimage.zoom(input=img, zoom=(width_factor, height_factor, depth_factor), order=1)\n\n return img", "def _do_adaptive_shrinking(self, im):\n im_sz = list(im.shape)\n dim = len(im_sz)\n dim_to_pad = [dim_sz%self.adaptive_padding!=0 and dim_sz>3 for dim_sz in im_sz]\n dim_rem = [dim_sz//self.adaptive_padding for dim_sz in im_sz]\n new_dim_sz = [(dim_rem[i])*self.adaptive_padding if dim_to_pad[i] else im_sz[i] for i in range(dim)]\n before_id = [(-new_dim_sz[i] +im_sz[i]+1)//2 for i in range(dim)]\n after_id = [new_dim_sz[i] + before_id[i] for i in range(dim)]\n new_img = im[before_id[0]:after_id[0],before_id[1]:after_id[1],before_id[2]:after_id[2]].copy()\n return new_img", "def retinanet_resize_image(img, min_side=800, max_side=1333):\n # compute scale to resize the image\n scale = retinanet_compute_resize_scale(img.shape, min_side=min_side, max_side=max_side)\n # scale = round(scale, 4)\n # print(\"scale: \", scale)\n\n # resize the image with the computed scale\n img = cv2.resize(img, None, fx=scale, fy=scale)\n\n return img, scale", "def rescale_img(image: Matrix, scales: List[float]) -> List[Matrix]:\n rescaled_imgs = []\n for scale in scales:\n if scale == 1:\n rescaled_imgs.append(np.copy(image))\n else:\n rescaled_imgs.append(rescale(image, scale))\n return rescaled_imgs", "def resize_to_box(im, size):\n #mx = np.max(im.shape[:2])\n\n factors = [size[i]/im.shape[i] for i in range(2)]\n\n f = np.min(factors)\n if f < 1.0:\n return resize_with_factor_new(im, f)\n else:\n return im", "def rescale_box(box, img_size_orig, img_size_new):\n orig_w, orig_h = img_size_orig\n new_w, new_h = img_size_new\n scale_x = new_w / orig_w\n scale_y = new_h / orig_h\n sx, sy, ex, ey = box\n return [sx * scale_x, sy * scale_y, ex * scale_x, ey * scale_y]", "def resize(img, size):\n img = tf.expand_dims(img, 0)\n return tf.image.resize_bilinear(img, size)[0,:,:,:]", "def resize(img,scale):\n sdims=img.shape\n datatype=c_double\n if img.dtype!=datatype:\n print \"Error the image must be of doubles!\"\n raise RuntimeError\n\n if scale>1.0:\n print \"Invalid scaling factor!\"\n raise RuntimeError \n \n img = asfortranarray(img,c_double) # make array continguous\n \n try:\n mresize = ctypeslib.load_library(\"libresize.so\",\".\") \n except:\n print \"Unable to load resize library\"\n raise RuntimeError\n \n #use two times the 1d resize to get a 2d resize\n fresize = mresize.resize1dtran\n fresize.restype = None\n fresize.argtypes = [ ctypeslib.ndpointer(dtype=datatype, ndim=3), c_int,ctypeslib.ndpointer(dtype=datatype, ndim=3), c_int, c_int , c_int ]\n ddims = [int(round(sdims[0]*scale)),int(round(sdims[1]*scale)),sdims[2]];\n mxdst = zeros((ddims), dtype=datatype)\n tmp = zeros((ddims[0],sdims[1],sdims[2]), dtype=datatype)\n img1=img\n t1=time()\n fresize(img1, sdims[0], tmp, ddims[0], sdims[1], sdims[2]);\n fresize(tmp, sdims[1], mxdst, ddims[1], ddims[0], sdims[2]);\n t2=time()\n return mxdst.reshape(ddims[2],ddims[1],ddims[0]).T", "def scale(volume, voxel_dim, expansion, objective_factor,\n pixel_size, focal_plane_depth, **kwargs):\n xy_step = float(pixel_size) / (voxel_dim[1] * expansion * objective_factor)\n #This removes rounding artifacts, by binning with an integer number of pixels\n if xy_step < 1:\n xy_scale = 1.0 / xy_step\n xy_scale = np.round(xy_scale)\n print \"Warning: the ground truth resolution is too low to resolve the volume with the desired expansion. Attempting a work around.\"\n else:\n xy_step = np.round(xy_step)\n xy_scale = 1.0 / xy_step\n z_scale = voxel_dim[0] * expansion / float(focal_plane_depth)\n z_step = np.round(1.0 / z_scale).astype(np.int)\n out = []\n for i in range(0, volume.shape[0], z_step):\n X, Y = np.nonzero(volume[i, :, :])\n values = volume[i, X, Y]\n #Rescale and round\n X = np.floor(xy_scale * X).astype(np.int64)\n Y = np.floor(xy_scale * Y).astype(np.int64)\n #Create new image\n d, w, h = np.ceil(np.array(volume.shape) * xy_scale)\n im = np.zeros((int(w), int(h)), np.uint32)\n #Adding poisson if the volume is expanded, to avoid grid-like images\n if xy_scale > 1:\n X = np.clip(X + np.random.poisson(int(xy_scale), size = len(X)), 0, w - 1)\n Y = np.clip(Y + np.random.poisson(int(xy_scale), size = len(Y)), 0, h - 1)\n #This allows to add to repetition of the same index\n np.add.at(im, (X.astype(np.uint64), Y.astype(np.uint64)), values)\n out.append(im)\n return np.array(out)", "def resize_image(img):\n ratio = float(img.size[1]) / float(img.size[0])\n\n if ratio > 1.73:\n h, w = 320, 160 # 512, 256\n elif ratio < 1.0 / 1.73:\n h, w = 160, 320 # 256, 512\n elif ratio > 1.41:\n h, w = 384, 256\n elif ratio < 1. / 1.41:\n h, w = 256, 384\n elif ratio > 1.15:\n h, w = 320, 240 # 512, 384\n elif ratio < 1. / 1.15:\n h, w = 240, 320 # 384, 512\n else:\n h, w = 320, 320 # 384, 384\n\n h = int(h)\n w = int(w)\n img = TF.resize(img, size=[h, w], interpolation=Image.BILINEAR)\n return img", "def resize_image(image, new_size):\n height, width = image.shape[:2]\n quad_size = min(height, width)\n\n if len(image.shape) == 3:\n cropped_image = image[:quad_size, :quad_size, :]\n else:\n cropped_image = image[:quad_size, :quad_size]\n\n resized_image = cv2.resize(cropped_image, (new_size, new_size))\n\n return resized_image", "def zoom_augmentation():\n # Get the width and the height of the zoomed version\n x_len, y_len = np.random.randint(250, 350, size=2)\n # Get left upper ,right and lower bound of the pixels in the original image\n left = np.random.randint(x_size-x_len)\n upper = np.random.randint(y_size-y_len)\n right, lower = left + x_len, upper+y_len\n # Crops the box and resizes it to the original image size\n box = (left, upper, right, lower)\n return lambda image: image.transform(image.size, Image.EXTENT, box)", "def rescale_image_0255(image):\n # scale image to from [0.0, 1.0] to [0, 255]\n image *= 255\n return image.astype(np.uint8)", "def resized_map(self, new_size):\n\n new_map = cv2.resize(self.map.copy(), new_size)\n cur_count = np.sum(new_map)\n\n # Avoid dividing by zero\n if cur_count == 0:\n return new_map\n\n scale = self.count / cur_count\n new_map *= scale\n return new_map", "def resize_img(self,scale=1):\n reduced = self.image.reduce((scale,scale))\n reduced.save(\"../edited/{}\".format(self.image.filename))\n\n reduced = Image.open(\"../edited/{}\".format(self.image.filename))\n return reduced", "def scale(self, size):\n self._surf = pygame.transform.smoothscale(self._surf, size).convert_alpha()", "def resize3D(img, target_size, bspline_order=3, mode='constant'): \n # compute zoom values\n target_size = np.array(target_size, dtype=float)\n image_shape = np.array(img.shape, dtype=float)\n zoom_factors = np.divide(target_size,image_shape)\n print \"Target Size\"\n print target_size\n \n\n print \"Zoom Factors\"\n print zoom_factors\n\n \n # zoom image\n img = zoom(img, zoom_factors, order=bspline_order, mode=mode)\n\n print \"image_shape\"\n print img.shape\n\n return img", "def resize(image):\r\n return cv2.resize(image, (200, 66), interpolation=cv2.INTER_AREA)", "def reScaleLandsat(self,img):\n \n\t\tthermalBand = ee.List(['thermal'])\n\t\tthermal = ee.Image(img).select(thermalBand).multiply(10)\n \n\t\totherBands = ee.Image(img).bandNames().removeAll(thermalBand)\n\t\tscaled = ee.Image(img).select(otherBands).divide(0.0001)\n \n\t\timage = ee.Image(scaled.addBands(thermal)).int16()\n \n\t\treturn image.copyProperties(img)", "def resize(img, min_size=512):\n width, height, _ = tf.unstack(tf.shape(img), num=3)\n if height < width:\n new_height = min_size\n new_width = int(width * new_height / height)\n else:\n new_width = min_size\n new_height = int(height * new_width / width)\n\n img = tf.image.resize(img, size=(new_width, new_height))\n return img", "def resize(img, pct):\n size = (img.width * pct // 100, img.height * pct // 100)\n img = img.resize(size)\n return img", "def transform_image(img, new_shape):\n img = img[0:720, 0:720]\n img = cv2.resize(img, (new_shape, new_shape)) / 255\n img = np.expand_dims(img, axis=0) # 3D to 4D\n return img" ]
[ "0.72932327", "0.72652304", "0.7210784", "0.7190373", "0.7156874", "0.714985", "0.7103231", "0.70871055", "0.7044541", "0.70186824", "0.69733804", "0.694513", "0.6941695", "0.69353527", "0.6923214", "0.6918948", "0.68329215", "0.68258023", "0.6771118", "0.6759777", "0.6754045", "0.67492074", "0.6689703", "0.6679433", "0.66789687", "0.66776246", "0.66681284", "0.6665673", "0.6652147", "0.66462976", "0.66315997", "0.6592838", "0.65844965", "0.6576571", "0.65746516", "0.65663373", "0.6559239", "0.65581465", "0.65335333", "0.6512869", "0.6507444", "0.649528", "0.64894605", "0.6471278", "0.64704496", "0.64679986", "0.6438607", "0.6438092", "0.6434015", "0.64201415", "0.6405829", "0.6401998", "0.6396889", "0.63933283", "0.63846606", "0.6379367", "0.63740534", "0.6367263", "0.6363278", "0.63548213", "0.6348894", "0.63465005", "0.63446176", "0.6341144", "0.63287425", "0.6323702", "0.6315295", "0.6312845", "0.6312711", "0.6310788", "0.6309014", "0.6307518", "0.62914675", "0.629131", "0.62700194", "0.62700194", "0.62668365", "0.6265904", "0.6260741", "0.62572795", "0.62566966", "0.62510055", "0.6246354", "0.62378466", "0.6231537", "0.62308204", "0.6220137", "0.6217059", "0.62117726", "0.6207241", "0.61916363", "0.61819756", "0.61793166", "0.6175319", "0.6169362", "0.61685604", "0.61543053", "0.61502355", "0.61471504", "0.6146304" ]
0.7020678
9
This function first determines which camera is the primary and which is secondary. The image streams from the respective primary and seconday cameras are resized and republished
def resizeAndRepubThread(): # reference globals global primaryCamString global secondaryCamString global armCamImage global headCamImage # initialize image publishers primaryPub = rospy.Publisher(primaryCamRepub, Image, queue_size=1) secondaryPub = rospy.Publisher(secondaryCamRepub, Image, queue_size=1) # create CvBridge object for converting CV2 images to sensor_msgs/Image messages backBridge = CvBridge() while(True): primaryImage = np.zeros(shape=[512, 512, 3]) secondaryImage = np.zeros(shape=[512, 512, 3]) # just keep looping until we get images if(np.sum(headCamImage) == 0 or np.sum(armCamImage) == 0): rospy.loginfo("still waiting on camera images...") continue # get primary image if(primaryCamString == "head"): primaryImage = resizeImage(headCamImage, primarySize) elif(primaryCamString == "leftArm"): primaryImage = resizeImage(armCamImage, primarySize) elif(primaryCamString == ""): pass else: rospy.logerr("Invalid Option for primaryCamString recieved!") # get secondary image if(secondaryCamString == "head"): secondaryImage = resizeImage(headCamImage, secondarySize) elif(secondaryCamString == "leftArm"): secondaryImage = resizeImage(armCamImage, secondarySize) elif(secondaryCamString == ""): pass else: rospy.logerr("Invalid Option for secondaryCamString recieved!") # publish both new images if(np.sum(primaryImage) != 0 and np.sum(secondaryImage) != 0): primaryImageMessage = backBridge.cv2_to_imgmsg(primaryImage, "bgr8") primaryPub.publish(primaryImageMessage) secondaryImageMessage = backBridge.cv2_to_imgmsg(secondaryImage, "bgr8") secondaryPub.publish(secondaryImageMessage)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, camera, cameras, settings):\n\n self.cam = None\n self.jpeg_quality = 95 # 0 to 100, higher is better quality, 95 is cv2 default\n # check picamera version\n try:\n picamversion = require('picamera')[0].version\n except:\n picamversion = '0'\n\n if 'threaded_read' in cameras[camera]: # threaded on non-threaded camera reading\n self.threaded_read = cameras[camera]['threaded_read']\n else:\n self.threaded_read = True\n if 'resolution' in cameras[camera]:\n self.resolution = literal_eval(cameras[camera]['resolution'])\n else:\n self.resolution = (320, 240)\n if 'framerate' in cameras[camera]:\n self.framerate = cameras[camera]['framerate']\n else:\n self.framerate = 32\n if 'vflip' in cameras[camera]:\n self.vflip = cameras[camera]['vflip']\n else:\n self.vflip = False\n if 'resize_width' in cameras[camera]:\n # resize_width is a percentage value\n # width in pixels will be computed later after reading a test image\n self.resize_width = cameras[camera]['resize_width']\n else:\n self.resize_width = None\n if 'viewname' in cameras[camera]:\n self.viewname = cameras[camera]['viewname']\n else:\n self.viewname = ' '\n if 'src' in cameras[camera]:\n self.src = cameras[camera]['src']\n else:\n self.src = 0\n if 'exposure_mode' in cameras[camera]:\n self.exposure_mode = cameras[camera]['exposure_mode']\n else:\n self.exposure_mode = None\n if 'iso' in cameras[camera]:\n self.iso = cameras[camera]['iso']\n else:\n self.iso = 0 # default value\n if 'shutter_speed' in cameras[camera]:\n self.shutter_speed = cameras[camera]['shutter_speed']\n else:\n self.shutter_speed = 0 # default value\n if 'sharpness' in cameras[camera]:\n self.sharpness = cameras[camera]['sharpness']\n else:\n self.sharpness = 0 # default value\n if 'contrast' in cameras[camera]:\n self.contrast = cameras[camera]['contrast']\n else:\n self.contrast = 0 # default value\n if 'brightness' in cameras[camera]:\n self.brightness = cameras[camera]['brightness']\n else:\n self.brightness = 50 # default value\n if 'exposure_compensation' in cameras[camera]:\n self.exposure_compensation = cameras[camera]['exposure_compensation']\n else:\n self.exposure_compensation = 0 # 0 default value, integer value between -25 and 25\n if 'awb_mode' in cameras[camera]:\n self.awb_mode = cameras[camera]['awb_mode']\n else:\n self.awb_mode = 'auto' # default value\n\n self.detectors = []\n if 'detectors' in cameras[camera]: # is there at least one detector\n self.setup_detectors(cameras[camera]['detectors'],\n settings.nodename,\n self.viewname)\n if camera[0].lower() == 'p': # this is a picam\n # start PiCamera and warm up; inherits methods from\n # imutils.VideoStream unless threaded_read is False; then uses class\n # PiCameraUnthreadedStream to read the PiCamera in an unthreaded way\n if self.threaded_read:\n self.cam = VideoStream(usePiCamera=True,\n resolution=self.resolution,\n framerate=self.framerate).start()\n else:\n self.cam = PiCameraUnthreadedStream(resolution=self.resolution,\n framerate=self.framerate)\n\n # if an exposure mode has been set in yaml, set it\n if self.exposure_mode:\n self.cam.camera.exposure_mode = self.exposure_mode\n # if an iso has been set in yaml, set it\n if self.iso:\n self.cam.camera.iso = self.iso\n # if an iso has been set in yaml, set it\n if self.shutter_speed:\n self.cam.camera.shutter_speed = self.shutter_speed\n # if an sharpness has been set in yaml, set it\n if self.sharpness:\n self.cam.camera.sharpness = self.sharpness\n # if an contrast has been set in yaml, set it\n if self.contrast:\n self.cam.camera.contrast = self.contrast\n # if an brightness has been set in yaml, set it\n if self.brightness:\n self.cam.camera.brightness = self.brightness\n # if an exposure_compensation has been set in yaml, set it\n if self.exposure_compensation:\n self.cam.camera.exposure_compensation = self.exposure_compensation\n # if an awb_mode has been set in yaml, set it\n if self.awb_mode:\n self.cam.camera.awb_mode = self.awb_mode\n self.cam_type = 'PiCamera'\n else: # this is a webcam (not a picam)\n self.cam = VideoStream(src=0).start()\n self.cam_type = 'webcam'\n sleep(3.0) # allow camera sensor to warm up\n\n # self.text is the text label for images from this camera.\n # Each image that is sent is sent with a text label so the hub can\n # file them by nodename, viewname, and send_type\n # example: JeffOffice Window|jpg\n # Nodename and View name are in one field, separated by a space.\n # send_type is in the next field\n # The 2 field names are separaged by the | character\n node_and_view = ' '.join([settings.nodename, self.viewname]).strip()\n self.text = '|'.join([node_and_view, settings.send_type])\n\n # set up camera image queue\n self.cam_q = deque(maxlen=settings.queuemax)", "def query_camera(self):\n ok, orig_pic = self.vs.read() # Read video stream\n if ok: # If no errors\n orig_pic = imutils.rotate(orig_pic, angle=self.camera_rot)\n curr_pic = imutils.resize(orig_pic, width=self.image_width)\n return curr_pic, orig_pic\n else:\n return None, None", "def secondary_cam_setup(cam):\n\ttry:\n\t\tresult = True\n\t\tnodemap = cam.GetNodeMap()\n\n\t\t# Configure the camera to allow for chunk data\n\t\tresult &= configure_chunk_data(nodemap)\n\n\t\t# Set up the pixel format\n\t\tresult &= pixel_format(2, cam, 'BGR8')\n\n\t\t# Set up the secondary camera hardware trigger\n\t\tresult &= configure_trigger(2, cam, 'hardware')\n\n\t\tprint(\"\\n\\t*** CONFIGURING CAMERA ***\")\n\t\tresult &= acquisition_mode(2, cam)\t\t\t\t # Continuous\n\t\tresult &= auto_exposure_mode(2, cam, 'Continuous') # Autoexposure = On\n\t\tresult &= auto_gain_mode(2, cam, 'Continuous') # Autogain = On\n\t\tprint('\\n')\n\n\texcept PySpin.SpinnakerException as ex:\n\t\tprint('Error: %s' % ex)\n\t\tresult = False\n\n\treturn result", "def read_cameras(self):\n for camera in self.camlist:\n image = camera.cam.read()\n if camera.vflip:\n image = cv2.flip(image, -1)\n if camera.resize_width:\n image = imutils.resize(image, width=camera.width_pixels)\n camera.cam_q.append(image)\n for detector in camera.detectors:\n self.run_detector(camera, image, detector)", "def main(argv):\n # Get default camera id based on current platform.\n if sys.platform == 'linux' or sys.platform == 'linux2':\n default_cam_ids = ['/dev/video0', '/dev/video1', '/dev/video2']\n else: # darwin win32 win64\n default_cam_ids = [0, 1, 2]\n\n # Parse CLI arguments\n ap = argparse.ArgumentParser()\n ap.add_argument('-i', '--cam_ids', default=default_cam_ids,\n help=\"camera ids list (ex: ='[/dev/video0, /dev/video1]'\")\n # TODO: implement dict argument parsing settings\n ap.add_argument('-s', '--settings',\n help=\"camera settings list \"\n \"(ex:[[(3, 640), (4, 480)], [(3, 640), (4, 480)]]\")\n args = vars(ap.parse_args())\n\n # Default camera settings\n if args[\"settings\"]:\n settings = args[\"settings\"]\n else:\n settings = [[(cv2.CAP_PROP_FRAME_WIDTH, 1280),\n (cv2.CAP_PROP_FRAME_HEIGHT, 720),\n (cv2.CAP_PROP_FPS, 30),\n (cv2.CAP_PROP_FOURCC, cv2.VideoWriter_fourcc(*'MJPG')),\n (cv2.CAP_PROP_AUTOFOCUS, 1)],\n [(cv2.CAP_PROP_FRAME_WIDTH, 1280),\n (cv2.CAP_PROP_FRAME_HEIGHT, 720),\n (cv2.CAP_PROP_FPS, 30),\n (cv2.CAP_PROP_FOURCC, cv2.VideoWriter_fourcc(*'MJPG')),\n (cv2.CAP_PROP_AUTOFOCUS, 1)],\n [(cv2.CAP_PROP_FRAME_WIDTH, 1280),\n (cv2.CAP_PROP_FRAME_HEIGHT, 720),\n (cv2.CAP_PROP_FPS, 30),\n (cv2.CAP_PROP_FOURCC, cv2.VideoWriter_fourcc(*'MJPG')),\n (cv2.CAP_PROP_AUTOFOCUS, 1)]]\n\n aruco_dict_num = cv2.aruco.DICT_6X6_1000\n # also available: DICT_5X5_1000, DICT_4X4_50, DICT_ARUCO_ORIGINAL\n\n # Initialize Cameras objects with calibration and lens correction\n cam_ids = args['cam_ids']\n if sys.platform != 'linux' and sys.platform != 'linux2':\n cam_ids = [int(cam_id) for cam_id in cam_ids]\n cameras = []\n for cam_id, setting in zip(cam_ids, settings):\n print('Setting up camera %s.' % cam_id)\n cam = CameraCorrected(\n cam_id=cam_id, aruco_dict_num=aruco_dict_num, settings=setting)\n cam.initialize()\n cameras.append(cam)\n\n cameras_fusion = CamerasFusion(cameras)\n cameras_fusion.initialize()\n\n # Open basic live view\n print('Live view running...')\n print(' k to calibrate correction')\n print(' m to save frame')\n print(' v loop between gray2rgb and blue2rgb fusion')\n print(' ESC or q to exit.')\n\n selected_fused = cameras_fusion.read_blue2rgb_fused\n while True:\n if cameras_fusion.fusion_calibration_is_done:\n frame = selected_fused()\n frame = camera[0].draw_fps(frame)\n else:\n for camera in cameras_fusion.cameras:\n frame = camera.read_undistort()\n frame = camera.draw_text(\n frame, 'Please manually adjust Cameras overlapping, then c'\n 'alibrate.', y=camera.height - (camera.height/20),\n thickness=2)\n k = cv2.waitKey(50) % 256\n if k == 27 or k == ord('q'):\n break\n cv2.imshow(\"Live camera\", frame)\n k = cv2.waitKey(40) % 256\n if k == 27 or k == ord('q'):\n break\n elif k == ord('k'):\n if cameras_fusion.calibrate_fusion():\n print('Calibration done!')\n elif k == ord('m'):\n cv2.imwrite('frame_fused_%s.png' % cam.cam_id, frame)\n elif k == ord('v'):\n if selected_fused == cameras_fusion.read_blue2rgb_fused:\n selected_fused = cameras_fusion.read_gray2rgb_fused\n else:\n selected_fused = cameras_fusion.read_blue2rgb_fused\n\n cameras_fusion.release() # DO NOT FORGET TO RELEASE!\n cv2.destroyAllWindows()", "def update_display(self):\n \n # check availability of display queue of the wide camera\n# if not hasattr(self,'wide_disp_queue'):\n# pass\n# elif self.wide_disp_queue.empty():\n# pass\n# else:\n# try:\n# wide_disp_image = self.wide_disp_queue.get()\n# \n# self.wide_disp_counter += 1\n# self.wide_disp_counter %= 2\n# if self.wide_disp_counter == 0:\n# if type(wide_disp_image) == np.ndarray:\n# if wide_disp_image.shape == (self.wide_cam.settings.height.value(),self.wide_cam.settings.width.value()):\n# try:\n# self.wide_cam_image.setImage(wide_disp_image)\n# except Exception as ex:\n# print('Error: %s' % ex)\n# except Exception as ex:\n# print(\"Error: %s\" % ex)\n \n # check availability of display queue of the track camera \n if not hasattr(self,'track_disp_queue'):\n pass\n elif self.track_disp_queue.empty():\n pass\n else:\n try:\n track_disp_image = self.track_disp_queue.get()\n self.track_disp_counter += 1\n self.track_disp_counter %= 4\n if self.track_disp_counter == 0:\n if type(track_disp_image) == np.ndarray:\n if track_disp_image.shape == (self.track_cam.settings.height.value(),self.track_cam.settings.width.value()):\n try:\n self.track_cam_image.setImage(track_disp_image)\n except Exception as ex:\n print('Error: %s' % ex)\n \n x = int(self.settings.x.value())\n y = int(self.settings.y.value())\n self.tracker_data[:] = 0\n self.tracker_data[x,y] = 1\n self.tracker_image.setImage(np.copy(self.tracker_data))\n except Exception as ex:\n print(\"Error: %s\" % ex)", "def update(self):\r\n\r\n # Update the vision frames in the system\r\n self._system.update()\r\n\r\n # Create blank PIL images to hold the video streams\r\n layered = PIL.Image.new('RGBA', (400, 400))\r\n stacked = PIL.Image.new('RGBA', (200, 800))\r\n control = PIL.Image.new('RGBA', (600, 800))\r\n\r\n focalpoint = self._system[self._appString[\"device\"].get()].focalpoint()\r\n # print(focalpoint)\r\n\r\n # Get each vision key and vision for the selected device\r\n visionList = [(visionKey, vision) for visionKey, vision in self._system[self._appString[\"device\"].get()]]\r\n\r\n # Loop through each vision in the vision list\r\n for i, (visionKey, vision) in enumerate(visionList):\r\n\r\n # Grab the frames from the vision when it is \"curr\"\r\n frameList = [frame for frameKey, frame in vision if frameKey==self._appString[\"frame\"].get()]\r\n\r\n # Loop through each frame in the frame list\r\n for frame in frameList:\r\n\r\n # Get the properties and turn the image into RGBA\r\n ratio, size = vision.properties()\r\n rgbFrame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\r\n\r\n # print(rgbFrame.shape)\r\n width, height, channels = rgbFrame.shape\r\n\r\n # Paste the images together in layered\r\n\r\n imgFrame = PIL.Image.fromarray(cv2.resize(rgbFrame, (int(400 * ratio), int(400 * ratio))))\r\n layered.paste(imgFrame, (int(200 * (1 - ratio)), int(200 * (1 - ratio))))\r\n\r\n # layered.paste(imgFrame, (int(200 * (1 - ratio) + focalpoint[0] * (200 / width)), int(200 * (1 - ratio) - focalpoint[1] * (200 / height))))\r\n # layered.paste(imgFrame, (int(200 * (1 - ratio) + focalpoint[0] * (200 // width)), int(200 * (1 - ratio) - focalpoint[1] * (200 // height))))\r\n # layered.paste(imgFrame, (int(200 * (1 - ratio) + focalpoint[0] * (ratio ** -1)), int(200 * (1 - ratio) - focalpoint[1] * (ratio ** -1))))\r\n # layered.paste(imgFrame, (int(200 * (1 - ratio) + focalpoint[0] * (200/width) / ratio), int(200 * (1 - ratio) - focalpoint[1] * (200/height) / ratio)))\r\n # layered.paste(imgFrame, (int(200 * (1 - ratio) + focalpoint[0] * (200 / width)), int(200 * (1 - ratio) - focalpoint[1] * (200 / height))))\r\n # layered.paste(imgFrame, (int(200 * (1 - ratio) + focalpoint[0] * (ratio ** -1) / 200), int(200 * (1 - ratio) - focalpoint[1] * (ratio ** -1) / 200)))\r\n # layered.paste(imgFrame, (int(200 * (1 - ratio) + focalpoint[0] * (400//width * (1- ratio))), int(200 * (1 - ratio) - focalpoint[1] * (400//height * (1 - ratio)))))\r\n\r\n # Paste the images together in stacked\r\n imgFrame = PIL.Image.fromarray(cv2.resize(rgbFrame, (200, 200)))\r\n stacked.paste(imgFrame, (0, 200 * i))\r\n\r\n # Add the stacked image to the canvas\r\n self._pilFrames[\"stacked\"] = PIL.ImageTk.PhotoImage(image=stacked)\r\n self._appCanvas[\"stacked\"].create_image(100, 0, image=self._pilFrames[\"stacked\"], anchor=tkinter.NW)\r\n\r\n # Add the layered image to the canvas\r\n self._pilFrames[\"layered\"] = PIL.ImageTk.PhotoImage(image=layered)\r\n self._appCanvas[\"layered\"].create_image(0, 0, image=self._pilFrames[\"layered\"], anchor=tkinter.NW)\r\n\r\n # Add the control image to the canvas\r\n imgFrame = cv2.cvtColor(self._system[self._appString[\"device\"].get()][self._appString[\"vision\"].get()][self._appString[\"frame\"].get()], cv2.COLOR_BGR2RGBA)\r\n control = PIL.Image.fromarray(cv2.resize(imgFrame, (600, 600)))\r\n self._pilFrames[\"control\"] = PIL.ImageTk.PhotoImage(image=control)\r\n self._appCanvas[\"control\"].create_image(100, 90, image=self._pilFrames[\"control\"], anchor=tkinter.NW)\r\n\r\n # Continue to update with a delay of 15\r\n self.after(15, self.update)", "def primary_cam_setup(cam):\n\ttry:\n\t\tresult = True\n\t\tnodemap = cam.GetNodeMap()\n\n\t\t# Configure the camera to allow for chunk data\n\t\tresult &= configure_chunk_data(nodemap)\n\n\t\t# Setup the pixel format\n\t\tresult &= pixel_format(1, cam, 'BGR8')\n\n\t\t# Set up the primary camera output GPIO signal\n\t\tprint('\\n\\t*** CONFIGURING HARDWARE OUTPUT ***')\n\t\tcam.LineSelector.SetValue(PySpin.LineSelector_Line2)\n\t\tcam.V3_3Enable.SetValue(True)\n\t\tprint('\\t\\tCamera 1 Hardware output set to Line 2...')\n\n\t\tresult &= trigger_selector(1, cam, 'FrameStart')\n\t\tresult &= trigger_overlap(1, cam, 'ReadOut')\n\t\tresult &= configure_trigger(1, cam, 'software')\n\n\t\tprint(\"\\n\\t*** CONFIGURING CAMERA ***\")\n\t\tresult &= acquisition_mode(1, cam)\t\t\t# Continuous acquisition\n\t\tresult &= framerate(1, cam)\t\t\t\t\t# Set the framerate\n\t\tresult &= auto_exposure_mode(1, cam, 'Off') # Autoexposure = Off\n\t\tresult &= exposure_change(cam, first_exp) # Set first exposure\n\t\tresult &= auto_gain_mode(1, cam, 'Off')\t\t# Autogain = Off\n\t\tresult &= gain_change(cam, first_gain)\t # Set first gain\n\t\tprint('\\n')\n\n\texcept PySpin.SpinnakerException as ex:\n\t\tprint('Error: %s' % ex)\n\t\tresult = False\n\n\treturn result", "def compare(image_a, image_b, is_camera_image):\n\n # Generate a unique filename\n filename = uuid.uuid4().hex[:3]\n\n if is_camera_image:\n image_a = imutils.rotate_bound(image_a, 90)\n image_b = imutils.rotate_bound(image_b, 90)\n\n # Store original to show in future\n original = image_a\n\n # Convert to greyscale\n image_a = cv2.cvtColor(image_a, cv2.COLOR_BGR2GRAY)\n image_b = cv2.cvtColor(image_b, cv2.COLOR_BGR2GRAY)\n\n # Reduce size and blur to account for shaky handheld camera based images\n if is_camera_image:\n scale_multiplier = 0.03125\n image_a = cv2.resize(image_a, (0, 0), fx=scale_multiplier, fy=scale_multiplier)\n image_b = cv2.resize(image_b, (0, 0), fx=scale_multiplier, fy=scale_multiplier)\n image_a = cv2.GaussianBlur(image_a, (1001, 1001), cv2.BORDER_DEFAULT)\n image_b = cv2.GaussianBlur(image_b, (1001, 1001), cv2.BORDER_DEFAULT)\n\n # Obtain SSIM and determine differences\n try:\n _, differences = structural_similarity(image_a, image_b, full=True, gaussian_weights=True)\n except ValueError:\n print('Images are not the same size')\n return None\n\n # Convert to cv2 array\n differences = (differences * 255).astype('uint8')\n\n # Threshold and find contours (differences)\n thresh = cv2.threshold(differences, 0, 255, cv2.THRESH_BINARY_INV | cv2.THRESH_OTSU)[1]\n contours = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n contours = imutils.grab_contours(contours)\n\n # Draw contours (differences)\n for cont in contours:\n (x, y, w, h) = cv2.boundingRect(cont)\n if is_camera_image:\n multiplier = int(1 / scale_multiplier)\n y *= multiplier\n x *= multiplier\n h *= multiplier\n w *= multiplier\n cv2.rectangle(original, (x, y), (x + w, y + h), (255, 0, 0), 4)\n\n # TODO: Create GIF highlighting differences (instead of statuic image)\n cv2.imwrite('static/images/differences/' + filename + '.jpg', original)\n\n return filename", "def run_single_camera(cam):\n\n try:\n # Retrieve TL device nodemap and print device information\n #nodemap_tldevice = cam.GetTLDeviceNodeMap()\n\n #result &= print_device_info(nodemap_tldevice)\n\n # Initialize camera\n cam.Init()\n\n # Retrieve GenICam nodemap\n nodemap = cam.GetNodeMap()\n exposures=[2000,4000,8000,16000]\n index=0\n if cam.ExposureAuto.GetAccessMode() != PySpin.RW:\n print(\"Unable to disable automatic exposure. Aborting...\")\n return False\n node_acquisition_mode = PySpin.CEnumerationPtr(nodemap.GetNode(\"AcquisitionMode\"))\n if not PySpin.IsAvailable(node_acquisition_mode) or not PySpin.IsWritable(node_acquisition_mode):\n print(\"Unable to set acquisition mode to continuous (enum retrieval). Aborting...\")\n return False\n\n # Retrieve entry node from enumeration node\n node_acquisition_mode_continuous = node_acquisition_mode.GetEntryByName(\"Continuous\")\n if not PySpin.IsAvailable(node_acquisition_mode_continuous) or not PySpin.IsReadable(node_acquisition_mode_continuous):\n print(\"Unable to set acquisition mode to continuous (entry retrieval). Aborting...\")\n return False\n\n acquisition_mode_continuous = node_acquisition_mode_continuous.GetValue()\n\n node_acquisition_mode.SetIntValue(acquisition_mode_continuous)\n\n print(\"Acquisition mode set to continuous...\")\n\n cam.ExposureAuto.SetValue(PySpin.ExposureAuto_Off)\n '''\n # Set maximum width\n #\n # *** NOTES ***\n # Other nodes, such as those corresponding to image width and height,\n # might have an increment other than 1. In these cases, it can be\n # important to check that the desired value is a multiple of the\n # increment.\n #\n # This is often the case for width and height nodes. However, because\n # these nodes are being set to their maximums, there is no real reason\n # to check against the increment.\n if cam.Width.GetAccessMode() == PySpin.RW and cam.Width.GetInc() != 0 and cam.Width.GetMax != 0:\n cam.Width.SetValue(FRAME_WIDTH)\n print(\"Width set to %i...\" % cam.Width.GetValue())\n\n else:\n print(\"Width not available...\")\n result = False\n\n # Set maximum height\n #\n # *** NOTES ***\n # A maximum is retrieved with the method GetMax(). A node's minimum and\n # maximum should always be a multiple of its increment.\n if cam.Height.GetAccessMode() == PySpin.RW and cam.Height.GetInc() != 0 and cam.Height.GetMax != 0:\n cam.Height.SetValue(FRAME_HEIGHT)\n print(\"Height set to %i...\" % cam.Height.GetValue())\n\n else:\n print(\"Height not available...\")\n result = False\n '''\n print(\"Automatic exposure disabled...\")\n #node_acquisition_framerate = PySpin.CFloatPtr(nodemap.GetNode(\"AcquisitionFrameRate\"))\n\n # if not PySpin.IsAvailable(node_acquisition_framerate) and not PySpin.IsReadable(node_acquisition_framerate):\n # print(\"Unable to retrieve frame rate. Aborting...\")\n # return False\n\n # framerate_to_set = node_acquisition_framerate.GetValue()\n\n # print(\"Frame rate to be set to %d...\" % framerate_to_set)\n canvas=np.zeros((FRAME_HEIGHT*2,FRAME_WIDTH*2,3), np.uint8)\n while True:\n exposure=exposures[index]\n \n configure_exposure(cam, exposure)\n # Acquire images\n err, img,width,height = acquire_images(cam, nodemap)\n if err < 0:\n return err\n\n \n img = img.GetData().reshape(height,width,3)\n\n half_height = int(height/2)\n half_width = int(width/2)\n half_frame_height = int(FRAME_HEIGHT/2)\n half_frame_width = int(FRAME_WIDTH/2)\n \n img = img[half_height-half_frame_height:half_height+half_frame_height,half_width-half_frame_width:half_width+half_frame_width]\n #smallimg=cv2.resize(img,(int(FRAME_WIDTH/2),int(FRAME_HEIGHT/2)))\n if index==0:\n #top left\n canvas[0:FRAME_HEIGHT,0:FRAME_WIDTH]=img\n elif index==1:\n #top right\n canvas[0:FRAME_HEIGHT,FRAME_WIDTH:FRAME_WIDTH*2]=img\n elif index==2:\n #bot left\n canvas[FRAME_HEIGHT:FRAME_HEIGHT*2,0:FRAME_WIDTH]=img\n else:\n #bot right\n canvas[FRAME_HEIGHT:FRAME_HEIGHT*2,FRAME_WIDTH:FRAME_WIDTH*2]=img\n index+=1\n if index>=len(exposures):\n index=0\n\n cv2.imshow(\"frame\",canvas)\n if cv2.waitKey(1) &0xff ==ord('q'):\n #stop the feed the 'q'\n break\n cv2.destroyAllWindows()\n # Deinitialize camera\n cam.DeInit()\n\n except PySpin.SpinnakerException as ex:\n print(\"Error: %s\" % ex)\n result = False", "def process_camera():\n\n pic_array = take_picture()\n detections, shapes, descriptors = detect_faces(person_database,pic_array)\n\n names = []\n\n for desc in descriptors:\n name = find_match(person_database, desc)\n names.append(name)\n\n return pic_array, names, detections, shapes, descriptors", "def set_video_source(self):\n if self.config['camera_device_id'] == 'pi':\n # Raspberry Pi camera as video source\n # only import if needed because it requires specific packages!\n from raspicamera import RasPiCamera\n self.video_stream = RasPiCamera()\n elif self.config['camera_device_id'] == 'network':\n # External camera through network stream as video source\n # only import if needed because it requires specific packages!\n from networkcamera import NetworkCamera\n NetworkCamera.set_url(self.config['camera_stream_url'])\n self.video_stream = NetworkCamera()\n else:\n # Local webcam as video source\n # only import if needed because it requires specific packages!\n from opencvcamera import OpencvCamera\n OpencvCamera.set_video_source(self.config['camera_device_id'])\n self.video_stream = OpencvCamera()", "def test_generate_camera_info(self):\n data = ET.parse('data/cam_data_0.xml')\n data_str = ET.tostring(data.getroot())\n\n dict = tesse_ros_bridge.utils.parse_cam_data(data_str)\n\n (left, right) = tesse_ros_bridge.utils.generate_camera_info(dict, dict)\n self.assertEqual(left.header.frame_id, \"left_cam\")\n self.assertEqual(right.header.frame_id, \"right_cam\")\n self.assertEqual(left.width, dict['parameters']['width'])\n self.assertEqual(left.height, dict['parameters']['height'])\n self.assertEqual(right.width, dict['parameters']['width'])\n self.assertEqual(right.height, dict['parameters']['height'])\n\n # TODO(marcus): add more checks", "def _open_capture(self):\n\n plat = platform.system()\n if plat == \"Windows\":\n gst = 'rtspsrc location=' + self._rtsp + ' latency=10 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! appsink sync=false'\n self.capture = cv2.VideoCapture(gst, apiPreference=cv2.CAP_GSTREAMER)\n # self.capture = cv2.VideoCapture(self._rtsp, apiPreference=cv2.CAP_FFMPEG)\n elif plat == \"Linux\":\n if platform.machine() == 'aarch64': # Jetson Nano\n gst ='rtspsrc location=' + self._rtsp + ' latency=10 ! rtph264depay ! h264parse ! omxh264dec ! nvvidconv ! appsink sync=false'\n self.capture = cv2.VideoCapture(gst, apiPreference=cv2.CAP_GSTREAMER)\n elif platform.machine() == 'armv6l' or platform.machine() == 'armv7l': # Raspberry Pi\n gst = 'rtspsrc location=' + self._rtsp + ' latency=10 ! queue ! rtph264depay ! h264parse ! v4l2h264dec capture-io-mode=4 ! v4l2convert output-io-mode=5 capture-io-mode=4 ! appsink sync=false'\n # might not need the two queue statements above\n self.capture = cv2.VideoCapture(gst, apiPreference=cv2.CAP_GSTREAMER)\n elif plat == \"MacOS\":\n gst = 'rtspsrc location=' + self._rtsp + ' latency=10 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! appsink'\n self.capture = cv2.VideoCapture(gst, apiPreference=cv2.CAP_GSTREAMER)\n else:\n gst = 'rtspsrc location=' + self._rtsp + ' latency=10 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! appsink'\n self.capture = cv2.VideoCapture(gst, apiPreference=cv2.CAP_GSTREAMER)\n\n self.capture_open = self.capture.isOpened() \n if not self.capture_open:\n self.logger.log(logging.CRITICAL, \"Status:Failed to open camera!\")", "def checkCamera(self):\n cameraFound = False\n print(\"[INFO]: Searching for camera...\")\n try:\n for camera in glob.glob(\"/dev/video?\"):\n if camera == \"/dev/video2\":\n cameraIndex = 2\n cameraFound = True\n print(\"[INFO]: Using index 2 for the camera.\")\n return cameraIndex, cameraFound\n elif camera == \"/dev/video1\":\n cameraIndex = 1\n cameraFound = True\n print(\"[INFO]: Using index 1 for the camera.\")\n return cameraIndex, cameraFound\n elif camera == \"/dev/video0\":\n cameraIndex = 0\n cameraFound = True\n print(\"[INFO]: Using index 0 for the camera\")\n return cameraIndex, cameraFound\n else:\n print(\"[ERROR]: No camera found.\")\n cameraFound = False\n cameraIndex = 0\n return cameraIndex, cameraFound\n except(TypeError):\n print(\"[ERROR]: Camera is probably not connected.\")", "def cameraCallback(self, data):\n if not self.isReady:\n cvImage, self.imageInfo['shape'] = u.getCVImage(data)\n if self.measuring is not None:\n self.list, cvImage, self.isReady = self.measuring.getListObjects(cvImage)\n # preview topic /see_main\n msg_image = u.getMsgImage(cvImage)\n self.pub_view_main.publish(msg_image)\n else:\n if self.imageInfo['shape'] is not None:\n self.init()\n else:\n rospy.logerr(\"no video stream. check camera's topic!\")", "def let_camera_update_parameters(path_to_images, name_image, video_source=\"/dev/video0\"):\n subprocess_cmd(\"ffmpeg -f video4linux2 -s 1280x720 -i {} -ss 00:00:02 -frames 1 ./{}/{} -loglevel error -nostats\".format(video_source, path_to_images, name_image))", "def _start_vidmemwriter(self, camType, ip=None, inputres=\"640x480\", outputres=\"640x480\"):\n if not self.__vidmemwriter and not self.__server_mode:\n self.__vidmemwriter = vidmemwriter.VidMemWriter([], [])\n\n if camType in self.__video_sources:\n return True\n\n self.__logger.info(\"I'm starting %s\" % camType)\n\n if ros_pattern.match(camType):\n #The first 4 characters \"ros_\" identify that is a specific ros image\n #The second part *** in \"ros_***/topic\" is the encoding:\n topic = camType[4:]\n encoding = \"passthrough\"\n self.__logger.info(\"camType !!!!!! %s\" % camType)\n if not camType[4] == '/':\n str_list = camType.split(\"_\")\n topic = '_'.join(str_list[2:])\n encoding = str_list[1]\n ros_image_source = rosimage.RosImage(topic, encoding)\n\n if self.__server_mode:\n self.__register_video_source(camType, ros_image_source)\n else:\n self.__vidmemwriter.add_video_source(ros_image_source, camType)\n self.__video_sources.append(camType)\n self.__logger.info(\"rosimage started for topic: %s, with encoding: %s\" % (topic, encoding))\n return True\n elif camType == \"webcam\":\n self.__logger.debug(\"I'm starting webcam\")\n webcamsource = takeimages.TakeImages(self.__camera)\n img = webcamsource.get_image()\n if type(img) is type(\"\"):\n self.__logger.error(\"No camera found. Please check connection!\")\n return False\n\n if webcamsource.Nocamera:\n if self.__camera == -1:\n self.__logger.error(\"No camera found. Please check connection!\")\n else:\n self.__logger.error(\"Camera %d not found. Please check connection!\" % self.__camera)\n return False\n if self.__server_mode:\n self.__register_video_source('webcam', webcamsource)\n else:\n self.__vidmemwriter.add_video_source(webcamsource, \"webcam\")\n self.__video_sources.append(\"webcam\")\n self.__logger.info(\"Webcam started\")\n return True\n elif camType == 'kinect_openni':\n self.__logger.debug(\"I'm starting kinect using openni\")\n import util.openni_kinectvideo as kv\n depth_source = kv.OpenNIKinect(\"depth\")\n rgb_source = kv.OpenNIKinect(\"rgb\")\n\n try:\n depth_source.get_image()\n except:\n self.__logger.error(\"Kinect not found. Please check connection!\")\n return False\n\n if self.__server_mode:\n self.__register_video_source('kinect_depth', depth_source)\n self.__register_video_source('kinect_rgb', rgb_source)\n else:\n self.__vidmemwriter.add_video_source(depth_source, \"kinect_depth\")\n self.__vidmemwriter.add_video_source(rgb_source, \"kinect_rgb\")\n\n self.__video_sources.append(\"kinect_depth\")\n self.__video_sources.append(\"kinect_rgb\")\n self.__video_sources.append(\"kinect\")\n self.__video_sources.append(\"kinect_openni\")\n \n self.__logger.info(\"Kinect started\")\n return True\n elif camType == 'kinect' or camType == 'kinect_rgb' or camType == 'kinect_depth':\n if self.__use_openni:\n self.__logger.info(\"I'm starting kinect using openni\")\n import util.openni_kinectvideo as kv\n depth_source = kv.OpenNIKinect(\"depth\")\n rgb_source = kv.OpenNIKinect(\"rgb\")\n\n try:\n depth_source.get_image()\n except:\n self.__logger.error(\"Kinect not found. Please check connection!\")\n return False\n else:\n self.__logger.info(\"I'm starting kinect using freenect\")\n try:\n import util.kinectmemwriter\n except:\n self.__logger.error(\"Could not load kinectmemwriter module. Check modules.\")\n return False\n\n depth_source = util.kinectmemwriter.KinectDepthSource()\n rgb_source = util.kinectmemwriter.KinectRGBSource()\n\n try:\n depth_source.get_image()\n except:\n self.__logger.error(\"Kinect not found. Please check connection!\")\n return False\n\n if self.__server_mode:\n self.__register_video_source('kinect_depth', depth_source)\n self.__register_video_source('kinect_rgb', rgb_source)\n else:\n self.__vidmemwriter.add_video_source(depth_source, \"kinect_depth\")\n self.__vidmemwriter.add_video_source(rgb_source, \"kinect_rgb\")\n\n self.__video_sources.append(\"kinect_depth\")\n self.__video_sources.append(\"kinect_rgb\")\n self.__video_sources.append(\"kinect\")\n \n self.__logger.info(\"Kinect started\")\n return True\n elif camType == \"naovideo\":\n self.__logger.debug(\"I'm starting naovideo\")\n try:\n import util.naovideo as naovideo\n except:\n self.__logger.error(\"Could not load naovideo module. Check modules\")\n return False\n #get ip of nao:\n #TODO: fix this dirty hack (it should be read from the config file)\n naoip = \"129.125.178.232\"\n if ip:\n naoip = ip\n \n self.__logger.warn(\"Using input resolution %s and output resolution %s\" % (inputres, outputres))\n #use the naovideo module:\n if self.__camera != 0 and self.__camera != 1:\n self.__camera = 0\n try:\n naocamsource = naovideo.VideoModule(naoip, inputres, outputres, camera=self.__camera)\n naocamsource.get_image()\n except:\n self.__logger.error(\"Something went wrong using the camera of the nao (check connection!)\")\n traceback.print_exc()\n return False\n\n if self.__server_mode:\n self.__register_video_source('naovideo', naocamsource)\n else:\n self.__vidmemwriter.add_video_source(naocamsource, \"naovideo\")\n self.__video_sources.append(\"naovideo\")\n self.__nao_camera = naocamsource\n self.__logger.info(\"Naovideo started\")\n return True\n else:\n self.__logger.warning(\"Invalid video source specified: %s\" % camType)\n return False", "def camera(*args, aspectRatio: Union[float, bool]=0.0, cameraScale: Union[float, bool]=0.0,\n centerOfInterest: Union[float, bool]=0.0, clippingPlanes: bool=True, depthOfField:\n bool=True, displayFieldChart: bool=True, displayFilmGate: bool=True,\n displayFilmOrigin: bool=True, displayFilmPivot: bool=True, displayGateMask:\n bool=True, displayResolution: bool=True, displaySafeAction: bool=True,\n displaySafeTitle: bool=True, fStop: Union[float, bool]=0.0, farClipPlane:\n Union[float, bool]=0.0, farFocusDistance: Union[float, bool]=0.0, filmFit:\n Union[AnyStr, bool]=\"\", filmFitOffset: Union[float, bool]=0.0, filmRollOrder:\n Union[AnyStr, bool]=\"\", filmRollValue: Union[float, bool]=0.0, filmTranslateH:\n Union[float, bool]=0.0, filmTranslateV: Union[float, bool]=0.0, focalLength:\n Union[float, bool]=0.0, focusDistance: Union[float, bool]=0.0, homeCommand:\n Union[AnyStr, bool]=\"\", horizontalFieldOfView: Union[float, bool]=0.0,\n horizontalFilmAperture: Union[float, bool]=0.0, horizontalFilmOffset: Union[float,\n bool]=0.0, horizontalPan: Union[float, bool]=0.0, horizontalRollPivot: Union[float,\n bool]=0.0, horizontalShake: Union[float, bool]=0.0, journalCommand: bool=True,\n lensSqueezeRatio: Union[float, bool]=0.0, lockTransform: bool=True, motionBlur:\n bool=True, name: Union[AnyStr, bool]=\"\", nearClipPlane: Union[float, bool]=0.0,\n nearFocusDistance: Union[float, bool]=0.0, orthographic: bool=True,\n orthographicWidth: Union[float, bool]=0.0, overscan: Union[float, bool]=0.0,\n panZoomEnabled: bool=True, position: Union[List[float, float, float], bool]=None,\n postScale: Union[float, bool]=0.0, preScale: Union[float, bool]=0.0, renderPanZoom:\n bool=True, rotation: Union[List[float, float, float], bool]=None, shakeEnabled:\n bool=True, shakeOverscan: Union[float, bool]=0.0, shakeOverscanEnabled: bool=True,\n shutterAngle: Union[float, bool]=0.0, startupCamera: bool=True,\n stereoHorizontalImageTranslate: Union[float, bool]=0.0,\n stereoHorizontalImageTranslateEnabled: bool=True, verticalFieldOfView: Union[float,\n bool]=0.0, verticalFilmAperture: Union[float, bool]=0.0, verticalFilmOffset:\n Union[float, bool]=0.0, verticalLock: bool=True, verticalPan: Union[float, bool]=0.0,\n verticalRollPivot: Union[float, bool]=0.0, verticalShake: Union[float, bool]=0.0,\n worldCenterOfInterest: Union[List[float, float, float], bool]=None, worldUp:\n Union[List[float, float, float], bool]=None, zoom: Union[float, bool]=0.0, q=True,\n query=True, e=True, edit=True, **kwargs)->Union[List[AnyStr], Any]:\n pass", "def run(self, live_camera, stream_path):\n\n has_element_err = False\n\n number_sources = 1\n # Standard GStreamer initialization\n GObject.threads_init()\n Gst.init(None)\n # Create gstreamer elements\n # Create Pipeline element that will form a connection of other elements\n print(\"Creating Pipeline \\n \")\n pipeline = Gst.Pipeline()\n\n if not pipeline:\n sys.stderr.write(\" Unable to create Pipeline \\n\")\n has_element_err = True\n if live_camera:\n if constants.RPI_MODE == constants.CAM_MODE:\n print(\"Creating Source \\n \")\n source = Gst.ElementFactory.make(\"nvarguscamerasrc\", \"src-elem\")\n if not source:\n sys.stderr.write(\" Unable to create Source \\n\")\n has_element_err = True\n else:\n print(\"Creating Source \\n \")\n source = Gst.ElementFactory.make(\"v4l2src\", \"usb-cam-source\")\n if not source:\n sys.stderr.write(\" Unable to create Source \\n\")\n has_element_err = True\n\n caps_v4l2src = Gst.ElementFactory.make(\"capsfilter\", \"v4l2src_caps\")\n if not caps_v4l2src:\n sys.stderr.write(\" Unable to create v4l2src capsfilter \\n\")\n has_element_err = True\n print(\"Creating Video Converter \\n\")\n # videoconvert to make sure a superset of raw formats are supported\n vidconvsrc = Gst.ElementFactory.make(\"videoconvert\", \"convertor_src1\")\n if not vidconvsrc:\n sys.stderr.write(\" Unable to create videoconvert \\n\")\n has_element_err = True\n # nvvideoconvert to convert incoming raw buffers to NVMM Mem (NvBufSurface API)\n nvvidconvsrc = Gst.ElementFactory.make(\"nvvideoconvert\", \"convertor_src2\")\n if not nvvidconvsrc:\n sys.stderr.write(\" Unable to create Nvvideoconvert \\n\")\n has_element_err = True\n caps_vidconvsrc = Gst.ElementFactory.make(\"capsfilter\", \"nvmm_caps\")\n if not caps_vidconvsrc:\n sys.stderr.write(\" Unable to create capsfilter \\n\")\n has_element_err = True\n else:\n # Source element for reading from the file\n print(\"Creating Source \\n \")\n source = Gst.ElementFactory.make(\"filesrc\", \"file-source\")\n if not source:\n sys.stderr.write(\" Unable to create Source \\n\")\n has_element_err = True\n # Since the data format in the input file is elementary h264 stream,\n # we need a h264parser\n print(\"Creating H264Parser \\n\")\n h264parser = Gst.ElementFactory.make(\"h264parse\", \"h264-parser\")\n if not h264parser:\n sys.stderr.write(\" Unable to create h264 parser \\n\")\n has_element_err = True\n # Use nvdec_h264 for hardware accelerated decode on GPU\n print(\"Creating Decoder \\n\")\n decoder = Gst.ElementFactory.make(\"nvv4l2decoder\", \"nvv4l2-decoder\")\n if not decoder:\n sys.stderr.write(\" Unable to create Nvv4l2 Decoder \\n\")\n has_element_err = True\n # Create nvstreammux instance to form batches from one or more sources.\n streammux = Gst.ElementFactory.make(\"nvstreammux\", \"Stream-muxer\")\n if not streammux:\n sys.stderr.write(\" Unable to create NvStreamMux \\n\")\n has_element_err = True\n # Use nvinfer to run inferencing on decoder's output,\n # behaviour of inferencing is set through config file\n pgie = Gst.ElementFactory.make(\"nvinfer\", \"primary-inference\")\n if not pgie:\n sys.stderr.write(\" Unable to create pgie \\n\")\n has_element_err = True\n\n # Use nv-tracker to keep track of the detected objects\n tracker = Gst.ElementFactory.make(\"nvtracker\", \"NV-Tracker\")\n if not tracker:\n sys.stderr.write(\" Unable to create tracker \\n\")\n has_element_err = True\n\n # Add nvvidconv1 and filter1 to convert the frames to RGBA\n # which is easier to work with in Python.\n print(\"Creating nvvidconv1 \\n \")\n nvvidconv1 = Gst.ElementFactory.make(\"nvvideoconvert\", \"convertor1\")\n if not nvvidconv1:\n sys.stderr.write(\" Unable to create nvvidconv1 \\n\")\n has_element_err = True\n print(\"Creating filter1 \\n \")\n caps1 = Gst.Caps.from_string(\"video/x-raw(memory:NVMM), format=RGBA\")\n filter1 = Gst.ElementFactory.make(\"capsfilter\", \"filter1\")\n if not filter1:\n sys.stderr.write(\" Unable to get the caps filter1 \\n\")\n has_element_err = True\n #filter1.set_property(\"caps\", caps1)\n print(\"Creating tiler \\n \")\n tiler = Gst.ElementFactory.make(\"nvmultistreamtiler\", \"nvtiler\")\n if not tiler:\n sys.stderr.write(\" Unable to create tiler \\n\")\n has_element_err = True\n print(\"Creating nvvidconv \\n \")\n nvvidconv = Gst.ElementFactory.make(\"nvvideoconvert\", \"convertor\")\n if not nvvidconv:\n sys.stderr.write(\" Unable to create nvvidconv \\n\")\n has_element_err = True\n print(\"Creating nvosd \\n \")\n nvosd = Gst.ElementFactory.make(\"nvdsosd\", \"onscreendisplay\")\n if not nvosd:\n sys.stderr.write(\" Unable to create nvosd \\n\")\n has_element_err = True\n print(\"Creating Fake sink \\n\")\n # sink = Gst.ElementFactory.make(\"nveglglessink\", \"nvvideo-renderer\")\n sink = Gst.ElementFactory.make(\"fakesink\", \"fakesink\")\n if not sink:\n sys.stderr.write(\" Unable to create fake sink \\n\")\n has_element_err = True\n print(\"Playing file %s \" %stream_path)\n\n\n if has_element_err:\n\n process_result = False\n\n else:\n\n if live_camera:\n if constants.RPI_MODE == constants.CAM_MODE:\n source.set_property('bufapi-version', True)\n else:\n source.set_property('device', stream_path)\n caps_v4l2src.set_property('caps', \\\n Gst.Caps.from_string(\"video/x-raw, framerate=30/1\"))\n caps_vidconvsrc.set_property('caps', \\\n Gst.Caps.from_string(\"video/x-raw(memory:NVMM)\"))\n else:\n source.set_property('location', stream_path)\n\n streammux.set_property('width', 1920)\n streammux.set_property('height', 1080)\n streammux.set_property('batch-size', 1)\n streammux.set_property('batched-push-timeout', 4000000)\n\n tiler_rows = int(math.sqrt(number_sources))\n tiler_columns = int(math.ceil((1.0*number_sources)/tiler_rows))\n tiler.set_property(\"rows\", tiler_rows)\n tiler.set_property(\"columns\", tiler_columns)\n tiler.set_property(\"width\", constants.FRAME_WIDTH)\n tiler.set_property(\"height\", constants.FRAME_HEIGHT)\n\n if is_aarch64():\n sink.set_property(\"sync\", 0)\n else:\n sink.set_property(\"sync\", 1)\n\n # Use CUDA unified memory in the pipeline so frames\n # can be easily accessed on CPU in Python.\n mem_type = int(pyds.NVBUF_MEM_CUDA_UNIFIED)\n streammux.set_property(\"nvbuf-memory-type\", mem_type)\n nvvidconv.set_property(\"nvbuf-memory-type\", mem_type)\n nvvidconv1.set_property(\"nvbuf-memory-type\", mem_type)\n tiler.set_property(\"nvbuf-memory-type\", mem_type)\n\n filter1.set_property(\"caps\", caps1)\n\n #Set properties of pgie\n pgie.set_property('config-file-path', \"dstest1_pgie_config.txt\")\n\n #Set nv-tracker properties\n tracker.set_property('ll-lib-file', \\\n '/opt/nvidia/deepstream/deepstream-6.0/lib/libnvds_nvdcf.so')\n tracker.set_property('tracker-width', 20*32)\n tracker.set_property('tracker-height', 20*32)\n tracker.set_property('enable-past-frame', 1)\n tracker.set_property('enable-batch-process', 1)\n tracker.set_property('ll-config-file', 'config/tracker_config.yml')\n\n print(\"Adding elements to Pipeline \\n\")\n pipeline.add(source)\n if live_camera:\n if constants.RPI_MODE != constants.CAM_MODE:\n pipeline.add(caps_v4l2src)\n pipeline.add(vidconvsrc)\n pipeline.add(nvvidconvsrc)\n pipeline.add(caps_vidconvsrc)\n else:\n pipeline.add(h264parser)\n pipeline.add(decoder)\n pipeline.add(streammux)\n pipeline.add(pgie)\n pipeline.add(tracker)\n pipeline.add(tiler)\n pipeline.add(nvvidconv)\n pipeline.add(filter1)\n pipeline.add(nvvidconv1)\n pipeline.add(nvosd)\n pipeline.add(sink)\n\n # we link the elements together\n # file-source -> h264-parser -> nvh264-decoder ->\n # nvinfer -> nvvidconv -> nvosd -> video-renderer\n print(\"Linking elements in the Pipeline \\n\")\n if live_camera:\n if constants.RPI_MODE == constants.CAM_MODE:\n source.link(nvvidconvsrc)\n else:\n source.link(caps_v4l2src)\n caps_v4l2src.link(vidconvsrc)\n vidconvsrc.link(nvvidconvsrc)\n nvvidconvsrc.link(caps_vidconvsrc)\n else:\n source.link(h264parser)\n h264parser.link(decoder)\n\n sinkpad = streammux.get_request_pad(\"sink_0\")\n if not sinkpad:\n sys.stderr.write(\" Unable to get the sink pad of streammux \\n\")\n if live_camera:\n srcpad = caps_vidconvsrc.get_static_pad(\"src\")\n else:\n srcpad = decoder.get_static_pad(\"src\")\n if not srcpad:\n sys.stderr.write(\" Unable to get source pad of decoder \\n\")\n srcpad.link(sinkpad)\n streammux.link(pgie)\n pgie.link(tracker)\n tracker.link(nvvidconv1)\n nvvidconv1.link(filter1)\n filter1.link(tiler)\n tiler.link(nvvidconv)\n nvvidconv.link(nvosd)\n nvosd.link(sink)\n\n # create and event loop and feed gstreamer bus mesages to it\n loop = GObject.MainLoop()\n\n bus = pipeline.get_bus()\n bus.add_signal_watch()\n bus.connect(\"message\", bus_call, loop)\n\n # Lets add probe to get informed of the meta data generated, we add probe to\n # the sink pad of the osd element, since by that time, the buffer would have\n # had got all the metadata.\n tiler_sink_pad = nvvidconv.get_static_pad(\"sink\")\n if not tiler_sink_pad:\n sys.stderr.write(\" Unable to get src pad \\n\")\n else:\n tiler_sink_pad.add_probe(Gst.PadProbeType.BUFFER, self.__metadata_process, 0)\n\n print(\"Starting pipeline \\n\")\n # start play back and listed to events\n pipeline.set_state(Gst.State.PLAYING)\n\n calib_result = Common.get_instance().check_calibration_file()\n\n if calib_result != constants.V_CALIB_OK:\n\n self.__calibration_mode = constants.ON\n ScreenCalibration.get_instance().run()\n self.__calibration_mode = constants.OFF\n\n # start play back and listed to events\n try:\n loop.run()\n except KeyboardInterrupt:\n pass\n\n # cleanup\n pipeline.set_state(Gst.State.NULL)\n\n process_result = True\n\n return process_result", "def cameraType(self):\r\n cls = mxs.classof(self._nativePointer)\r\n if cls in (mxs.FreeCamera, mxs.TargetCamera):\r\n return CameraType.Standard\r\n\r\n elif cls == mxs.Physical:\r\n return CameraType.Physical\r\n\r\n elif cls == mxs.VRayPhysicalCamera:\r\n return CameraType.Physical\r\n return 0", "def _process_cameras(dataset_info, example, is_raw):\n raw_cameras = example['cameras']\n raw_cameras = tf.reshape(raw_cameras, (-1, dataset_info.sequence_size, _NUM_POSE_PARAMS))\n\n if not is_raw:\n position = raw_cameras[:, :, 0:3]\n yaw = raw_cameras[:, :, 3:4]\n pitch = raw_cameras[:, :, 4:5]\n cameras = tf.concat([position, tf.sin(yaw), tf.cos(yaw), tf.sin(pitch), tf.cos(pitch)], axis=2)\n return cameras\n \n else:\n return raw_cameras", "def concatenate_images(img_1, img_2):\n res_4 = None;\n if not (img_1 is None):\n # Resize Camera and Satellite Image:\n res_1 = cv2.resize(img_2, None,fx=0.7, fy=0.7, interpolation = cv2.INTER_CUBIC)\n res_2 = cv2.resize(img_1, None,fx=0.7, fy=0.7, interpolation = cv2.INTER_CUBIC)\n\n #Concatenate Camera and Satellite view on single image\n h_1 = res_1.shape[0];\n w_1 = res_1.shape[1];\n h_2 = res_2.shape[0];\n w_2 = res_2.shape[1];\n scale = float(h_1)/float(h_2);\n\n h_2 = h_1;\n w_2 = int(w_2*scale)\n dim = (w_2, h_2);\n res_3 = cv2.resize(res_2, dim, interpolation = cv2.INTER_CUBIC)\n\n res_4 = np.concatenate((res_1, res_3), axis=1)\n\n return res_4;", "def getAsSource(self, cameraType):\n res = {}\n for cam in self.__data[cameraType]:\n if cam[\"work\"] == 0:continue\n if cameraType == \"DroneCamera\":\n droneModel = cam[\"droneController\"]\n res[cam[\"id\"]] = {\"source\": initDrone(droneModel), \"altName\": cam['altName'], \"work\":cam[\"work\"]}\n elif cameraType == \"FixedCamera\":\n sourceDetected = False\n if \"ip\" in cam.keys():\n source = cam[\"ip\"]\n sourceDetected = True\n if sourceDetected == False:\n raise KeyError(\"Unable to detect source keys\")\n alertZones = []\n if \"alerts\" in cam.keys():\n alertZones = cam[\"alerts\"]\n res[cam[\"id\"]] = {\"source\": source, \"altName\": cam['altName'], \"alerts\": alertZones, \"work\":cam[\"work\"]}\n elif cameraType == \"Video\":\n res[cam[\"id\"]] = {\"path\" : cam['path'], \n \"altName\":cam['altName'], \n \"timestamp\":cam['timestamp'], \n \"work\":cam[\"work\"]}\n else:\n print(cam)\n print(cameraType)\n raise ValueError('unable to parse cameraType')\n return res", "def get_camera_streaming(cam_id, w, h, fps):\n capture = cv2.VideoCapture(cam_id)\n capture.set(cv2.CAP_PROP_FRAME_WIDTH, w)\n capture.set(cv2.CAP_PROP_FRAME_HEIGHT, h)\n capture.set(cv2.CAP_PROP_FPS, fps)\n if not capture:\n print(\"Failed to initialize camera\")\n sys.exit(1)\n return capture", "def main():\n\n # Retrieve singleton reference to system object\n system = PySpin.System.GetInstance()\n\n # Retrieve list of cameras from the system\n cam_list = system.GetCameras()\n\n num_cameras = cam_list.GetSize()\n\n print(\"Number of cameras detected:\", num_cameras)\n # Finish if there are no cameras\n if num_cameras == 0:\n # Clear camera list before releasing system\n cam_list.Clear()\n\n # Release system\n system.ReleaseInstance()\n\n print(\"Not enough cameras!\")\n \n return False\n\n cam = cam_list.GetByIndex(0)\n run_single_camera(cam)\n\n\n # Release reference to camera\n del cam\n\n # Clear camera list before releasing system\n cam_list.Clear()\n\n # Release instance\n system.ReleaseInstance()", "def generate_test_cameras(self):\n def generate_cameras_for_block(names, block_name, data):\n item_dict = {}\n for name in names:\n item_dict['{}_{}'.format(name, block_name)] = {\n 'block': block_name,\n **data,\n }\n return item_dict\n\n camera_data = {\n 'ip_addr': 'rtsp://192.168.1.1',\n 'coords': [0, 0],\n 'point_coords_in_frame': [0, 1, 2, 3, 4, 5, 6, 7],\n 'point_coords_in_image': [0, 1, 2, 3, 4, 5, 6, 7],\n }\n\n self.cs_b1_f0_l1_o1_dict =\\\n generate_cameras_for_block(\n ['c0', 'c1', 'c2_del', 'c3_del', 'c4_del', 'c5_del'],\n 'b1_f0_l1_o1',\n camera_data)\n self.cs_b1_f0_l1_o2_dict =\\\n generate_cameras_for_block(\n ['c0', 'c1', 'c2_del', 'c3_del', 'c4_del', 'c5_del'],\n 'b1_f0_l1_o2',\n camera_data)\n self.cs_b1_f0_l1_sub1_o1_dict =\\\n generate_cameras_for_block(\n ['c0', 'c1', 'c2_del', 'c3_del', 'c4_del', 'c5_del'],\n 'b1_f0_l1_sub1_o1',\n camera_data)\n self.cs_b1_f0_l1_sub1_o2_dict =\\\n generate_cameras_for_block(\n ['c0', 'c1', 'c2_del', 'c3_del'],\n 'b1_f0_l1_sub1_o2',\n camera_data)\n\n self.cs_dict = {\n **self.cs_b1_f0_l1_o1_dict,\n **self.cs_b1_f0_l1_o2_dict,\n **self.cs_b1_f0_l1_sub1_o1_dict,\n **self.cs_b1_f0_l1_sub1_o2_dict,\n }\n\n # generate blocks in database\n self.cameras = \\\n self.create_cameras_from_data(self.cs_dict, self.blocks)", "def spinupcvstreams():\n global _riverprocess\n global _cityprocess\n if __name__ == \"__main__\":\n _riverprocess = CVStream(OPENCV_STREAM_RIVER)\n CVPROCESSES.append(_riverprocess)\n _cityprocess = CVStream(OPENCV_STREAM_CITY)\n CVPROCESSES.append(_cityprocess)\n _riverprocess.start()\n _cityprocess.start()", "def main():\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument(\"-hgt\", \"--imgHeight\", help=\"The height of the images, default=720.\",\n type=int, default=720)\n\n parser.add_argument(\"-wd\", \"--imgWidth\", help=\"The width of the images, default=1280.\",\n type=int, default=1280)\n\n parser.add_argument(\"-r\", \"--chessboardRows\", help=\"The rows of the chessboard calibration images, default=6.\",\n type=int, default=6)\n\n parser.add_argument(\"-c\", \"--chessboardCols\", help=\"The cols of the chessboard calibration images, default=9.\",\n type=int, default=9)\n\n parser.add_argument(\"-cp\", \"--calibrationPath\", help=\"The height of the images, default=720.\",\n type=str, default='')\n\n parser.add_argument(\"-in\", \"--inputVideoPath\", help=\"The path to the input video to be processed.\",\n type=str, default='')\n\n parser.add_argument(\"-out\", \"--outputVideoPath\", help=\"The path to the where to store output video.\",\n type=str, default='')\n\n args = parser.parse_args()\n\n print(args)\n\n assert args.calibrationPath != '', \"The path to calibration images can't be empty\"\n assert args.inputVideoPath != '', \"The path to input video can't be empty\"\n assert args.outputVideoPath != '', \"The path to output video can't be empty\"\n\n camera_mtx, dist_coeff = CameraCalibration((args.imgHeight, args.imgWidth),\n (args.chessboardRows, args.chessboardCols),\n args.calibrationPath).calibrate()\n print(\"Camera Mtx\", camera_mtx)\n print(\"Distortion Coefficient\", dist_coeff)\n # img = cv2.imread('test_images/test5.jpg')\n # img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n\n AdvancedLaneDetection(args.inputVideoPath, camera_mtx, dist_coeff).process_video(args.outputVideoPath)\n\n # cv2.imwrite(\"output.jpg\", result)", "def initialize_camera(self):\n if Rescue_PI.input_video_file_path is None:\n print(\"[INFO] starting threaded video stream...\")\n self.vs = VideoStream(src=VID_CAM_INDEX).start()\n else:\n self.vs = cv2.VideoCapture(Rescue_PI.input_video_file_path)", "def imaging(input_model, reference_files):\n detector = cf.Frame2D(name='detector', axes_order=(0, 1), unit=(u.pix, u.pix))\n v2v3 = cf.Frame2D(name='v2v3', axes_order=(0, 1), unit=(u.deg, u.deg))\n world = cf.CelestialFrame(reference_frame=coord.ICRS(), name='world')\n\n subarray2full = subarray_transform(input_model)\n imdistortion = imaging_distortion(input_model, reference_files)\n distortion = subarray2full | imdistortion\n distortion.bounding_box = imdistortion.bounding_box\n del imdistortion.bounding_box\n tel2sky = pointing.v23tosky(input_model)\n pipeline = [(detector, distortion),\n (v2v3, tel2sky),\n (world, None)]\n return pipeline", "def resize_secondary_image(primary_image, secondary_image):\n im_primary = Image.open(primary_image)\n im_secondary = Image.open(secondary_image)\n\n # get width and height of primary image\n width_primary, height_primary = im_primary.size\n\n # resize the second image to the size of the primary image\n # WARNING this does not take into account proportions of secondary image\n resized_secondary_image = im_secondary.resize((width_primary,\n height_primary), resample=0)\n\n return resized_secondary_image", "def __init__(self, pn_output=\"./\"):\n # Initialize the video stream, then allow the camera sensor to warm up\n print(\"[INFO] starting video stream...\")\n self.vs = cv2.VideoCapture(0) # Capture video frames, 0 is default video camera\n time.sleep(2.0)\n\n # Load config\n config = configparser.ConfigParser()\n config.read(fn_config)\n self.pn_guest_images = config['DEFAULT']['pn_guest_images_archive']\n self.guest_archive = p7zip(self.pn_guest_images)\n self.camera_rot = int(config['DEFAULT']['camera_rot'])\n self.image_width = int(config['DEFAULT']['image_width'])\n self.max_capture_interval = float(config['DEFAULT']['capture_interval'])\n self.max_capture_length = int(config['DEFAULT']['max_capture_length'])\n self.max_images = int(config['DEFAULT']['max_images'])\n\n # Capture Vars\n self.curr_pic = None # Current image from the camera\n self.gst_capture = None\n self.start_time = time.time()\n self.save_time = time.time()\n self.pic_num = None\n self.pn_gstcap_out = None\n\n # Face Detection Model\n self.min_detec_conf = float(config['DEFAULT']['min_detec_conf'])\n self.min_face_px = make_tuple(config['DEFAULT']['min_face_px'])\n pn_detector_model = config['DEFAULT']['pn_detector_model']\n self.trainRBGavg = make_tuple(config['DEFAULT']['detector_trainrgbavg'])\n print(\"[INFO] loading face detector and embedding model...\")\n protoPath = os.path.sep.join([pn_detector_model, \"deploy.prototxt\"])\n modelPath = os.path.sep.join([pn_detector_model,\n \"res10_300x300_ssd_iter_140000.caffemodel\"])\n self.detector = cv2.dnn.readNetFromCaffe(protoPath, modelPath)\n self.detector.setPreferableTarget(cv2.dnn.DNN_TARGET_CPU)\n\n # Face Recognition (extract/recognize embeddings) Model\n self.min_recog_prob = float(config['DEFAULT']['min_recog_prob'])\n fn_embedding_model = config['DEFAULT']['fn_embedding_model']\n self.embedder = cv2.dnn.readNetFromTorch(fn_embedding_model)\n self.embedder.setPreferableTarget(cv2.dnn.DNN_TARGET_CPU)\n self.gst_identify = False\n self.guest_ids = {}\n\n # Guest Info (update outside of function)\n self.known_guest_meta = None", "def camera_listener(self):\n camera_sub_cb_grp = ReentrantCallbackGroup()\n self.create_subscription(CameraMsg,\n constants.CAMERA_MSG_TOPIC,\n self.camera_callback,\n 10,\n callback_group=camera_sub_cb_grp)\n display_img_sub_cb_grp = ReentrantCallbackGroup()\n self.create_subscription(Image,\n constants.DISPLAY_MSG_TOPIC,\n self.display_callback,\n 10,\n callback_group=display_img_sub_cb_grp)", "def run_single_camera(cam):\r\n try:\r\n result = True\r\n err = False\r\n\r\n # Retrieve TL device nodemap and print device information\r\n nodemap_tldevice = cam.GetTLDeviceNodeMap()\r\n\r\n #result &= print_device_info(nodemap_tldevice)\r\n #dont need to print all of that\r\n\r\n # Initialize camera\r\n cam.Init()\r\n\r\n # Retrieve GenICam nodemap\r\n nodemap = cam.GetNodeMap()\r\n\r\n #configure settings\r\n if configure_exp_gain(cam,exp,gain) is False:\r\n return False\r\n\r\n # Configure trigger\r\n if configure_trigger(cam) is False:\r\n return False\r\n\r\n # Acquire images\r\n snr = acquire_images(cam, nodemap, nodemap_tldevice)\r\n #print(\"SNR after acquire_images: \", snr)\r\n\r\n # Reset trigger\r\n result &= reset_trigger(nodemap)\r\n\r\n # Deinitialize camera\r\n cam.DeInit()\r\n\r\n except PySpin.SpinnakerException as ex:\r\n print('Error: %s' % ex)\r\n result = False\r\n\r\n return snr", "def generate_images(video_path, index_first, index_second):\n cap = cv2.VideoCapture(video_path)\n cap.set(cv2.CAP_PROP_POS_FRAMES, index_first)\n success, img = cap.read()\n cv2.imwrite(os.path.join(data_folder, 'demo_single_first.png'), img)\n cap.set(cv2.CAP_PROP_POS_FRAMES, index_second)\n success, img = cap.read()\n cv2.imwrite(os.path.join(data_folder, 'demo_single_second.png'), img)", "def __init__(self,\n camera_topic_name=\"/resize_img/image\",\n ): # sub class args\n\n self.camera_topic_name = camera_topic_name\n\n self._run_rate = 1\n self.image_check_interval_s = 5\n self.latest_im_check_time_s = rospy.get_time()\n\n msg_received = False\n # get initial image to generate our pixel sampling location bbased on the image height/width\n while not msg_received and not rospy.is_shutdown():\n try:\n image_msg = rospy.wait_for_message(self.camera_topic_name, Image, timeout=20)\n im = bridge.imgmsg_to_cv2(image_msg)\n # get the characteristics of our image\n self.im_height = image_msg.height\n self.im_width = image_msg.width\n # initialise the pixel checking arrays\n self.pixels2check_ver = np.ceil(np.linspace(1, self.im_height-1, 10)).astype(np.uint8)\n self.pixels2check_hor = np.ceil(np.linspace(1, self.im_width-1, 10)).astype(np.uint8)\n self.pixel_vals_this = im[self.pixels2check_ver, self.pixels2check_hor]\n self.pixel_vals_previous = np.clip(self.pixel_vals_this + 10, 0, 255)\n msg_received = True\n except rospy.ROSException as e:\n rospy.logwarn_throttle(5, 'camera watchdog node timed out waiting for image message \\\n - traceback was {}'.format(e))\n # except e:\n # rospy.logwarn(('{} happened'.format(e)))\n\n self.downcam_sub = rospy.Subscriber(self.camera_topic_name, Image, self.downcam_callback, queue_size=5)\n\n ## todo - implement bottom clearance sensor checker - perhaps create another node for this?\n # add flag to set / unset altitude sensor check\n self.altitude_bottom_clearance = Float32()\n self.alt_sub = rospy.Subscriber('mavros/altitude', Altitude, self.altitude_callback, queue_size=5)", "def main():\n camera = picamera.PiCamera()\n camera.resolution = (RESOLUTIONX, RESOLUTIONY)\n camera.iso = 800\n time.sleep(2)\n while True:\n camera.capture('current-image.jpg')\n adapt_steering(navigation.get_xposition('current-image.jpg'))\n time.sleep(0.4)", "def _syncCameras(self, camera, ev):\n\t\tself.rendererOverlay.GetActiveCamera().ShallowCopy(camera)", "def initialize(acquisition_mode, image_mode, save_path, save_nickname=True):\r\n\r\n # get the system and cameras using library\r\n system = PySpin.System.GetInstance()\r\n cameras = system.GetCameras()\r\n num_cams = cameras.GetSize()\r\n print(\"Cameras detected: \" + str(num_cams))\r\n \r\n serial_numbers = []\r\n # initialize the cameras and get their serial numbers\r\n for i, cam in enumerate(cameras):\r\n cam.Init()\r\n serial_numbers.append(cam.TLDevice.DeviceSerialNumber.GetValue())\r\n print(\"Initialized cameras.\")\r\n del cam\r\n \r\n # set acquisition and format mode \r\n # manual and timed modes acquire images in single frame mode\r\n # note that to use continuous mode in a six camera setup, the packet delay must be set to 10,000 in FlyCap2\r\n for i, cam in enumerate(cameras):\r\n if acquisition_mode == CONTINUOUS_MODE:\r\n cam.AcquisitionMode.SetValue(PySpin.AcquisitionMode_Continuous)\r\n else:\r\n cam.AcquisitionMode.SetValue(PySpin.AcquisitionMode_SingleFrame)\r\n cam.PixelFormat.SetValue(image_mode)\r\n del cam\r\n \r\n # create the folders required for saving\r\n # create a new folder for the date/time\r\n timestamp = datetime.datetime.now().strftime('%Y-%m-%d %Hhr %Mmin %Ss')\r\n time_path = save_path + \"\\\\Camera Run \" + timestamp\r\n os.mkdir(time_path)\r\n file_path = time_path\r\n # create the camera folders\r\n # save nicknames is enabled - use the camera numbers\r\n if save_nickname:\r\n camera_numbers = range(1, num_cams+1)\r\n # use the camera serial numbers for naming instead\r\n else:\r\n camera_numbers = serial_numbers\r\n for number in camera_numbers:\r\n cam_path = time_path + \"\\\\Camera_\" + str(number)\r\n os.mkdir(cam_path)\r\n\r\n return (system, cameras, file_path)", "def get_image():\n\n # Access the global variable and activate the saving for the last camera's\n # frame\n global _save_image\n _save_image = True", "async def main():\n\tresult = True\n\n\t# Set up system object\n\tsystem = PySpin.System.GetInstance()\n\tversion = system.GetLibraryVersion()\n\tprint('\\n***** Setting up Camera *****')\n\tprint('\\nSpinnaker library version: {}.{}.{}.{}'.format(version.major, version.minor, version.type, version.build))\n\n\t# Retrieve list of cameras\n\tcam_list = system.GetCameras()\n\tnum_cameras = cam_list.GetSize()\n\tqueue = asyncio.Queue()\n\n\tprint('Number of cameras detected: {}'.format(num_cameras))\n\n\t# Create save directories\n\tfor DIR in SAVE_DIRS:\n\t\tos.makedirs(DIR, exist_ok=True)\n\n\t# Exit if there are no cameras:\n\tif num_cameras == 0:\n\t\tcam_list.Clear()\n\t\tsystem.ReleaseInstance()\n\t\tprint('No cameras detected!')\n\t\tinput('Done! Press Enter to exit...')\n\t\treturn False\n\n\t# Match the serial numbers to save locations\n\tassert num_cameras <= len(SAVE_DIRS), 'More cameras than save directories'\n\tcamera_sns = [cam.GetUniqueID() for cam in cam_list]\n\tsave_dir_per_cam = dict(zip(camera_sns, SAVE_DIRS))\n\n\t# Configure cameras\n\tprint('Configuring all cameras...\\n')\n\tcam_1 = cam_list.GetBySerial(camera_sns[0])\n\tcam_2 = cam_list.GetBySerial(camera_sns[1])\n\n\t# Print device information for the camera\n\tprint('*** DEVICE INFORMATION ***\\n')\n\tnodemap_tldevice_1 = cam_1.GetTLDeviceNodeMap()\n\tnodemap_tldevice_2 = cam_2.GetTLDeviceNodeMap()\n\tprint_device_info(nodemap_tldevice_1, 1)\n\tprint_device_info(nodemap_tldevice_2, 2)\n\n\t# Initialize the cameras\n\tcam_1.Init()\n\tcam_2.Init()\n\n\t# Setup the hardware triggers\n\t# Primary\n\tprint('*** CONFIGURING CAMERA 1 ***')\n\tresult &= primary_cam_setup(cam_1)\n\n\t# Secondary\n\tprint('*** CONFIGURING CAMERA 2 ***')\n\tresult &= secondary_cam_setup(cam_2)\n\n\tcam_2.BeginAcquisition()\n\tcam_1.BeginAcquisition()\n\n\t# Start the acquisition and save coroutines\n\tacquisition = [asyncio.gather(acquire_images(queue, cam)) for cam in cam_list]\n\tsavers = [asyncio.gather(save_images(queue, save_dir_per_cam)) for _ in range(NUM_SAVERS)]\n\n\tawait asyncio.gather(*acquisition)\n\t\n\tprint('\\nAcquisition Complete!\\n')\n\n\tresult &= reset_camera(1, cam_1)\n\tresult &= reset_camera(2, cam_2)\n\n\tcam_1.DeInit()\n\tcam_2.DeInit()\n\n\tdel cam_1\n\tdel cam_2\n\n\t# cancel the now idle savers\n\tfor c in savers:\n\t\tc.cancel()\n\n\t# Clean up environment and shut down cameras properly\n\tcam_list.Clear()\n\tsystem.ReleaseInstance()\n\n\tinput('Done! Press Enter to exit...')\n\treturn result", "def snapshot(self):\n\n if self.pj[OBSERVATIONS][self.observationId][TYPE] in [MEDIA]:\n\n if self.playerType == VLC:\n\n if self.playMode == FFMPEG:\n\n for idx, media in enumerate(self.pj[OBSERVATIONS][self.observationId][FILE][PLAYER1]):\n if self.FFmpegGlobalFrame < sum(self.duration[0:idx + 1]):\n\n p = pathlib.Path(media)\n snapshotFilePath = str(p.parent / \"{}_{}.png\".format(p.stem, self.FFmpegGlobalFrame))\n\n if self.detachFrameViewer or self.second_player():\n self.frame_viewer1.lbFrame.pixmap().save(snapshotFilePath)\n elif not self.detachFrameViewer:\n self.lbFFmpeg.pixmap().save(snapshotFilePath)\n self.statusbar.showMessage(\"Snapshot player #1 saved in {}\".format(snapshotFilePath), 0)\n break\n\n if self.second_player():\n for idx, media in enumerate(self.pj[OBSERVATIONS][self.observationId][FILE][PLAYER2]):\n if self.FFmpegGlobalFrame2 < sum(self.duration2[0:idx + 1]):\n p = pathlib.Path(media)\n snapshotFilePath = str(p.parent / \"{}_{}.png\".format(p.stem, self.FFmpegGlobalFrame2))\n\n self.frame_viewer2.lbFrame.pixmap().save(snapshotFilePath)\n self.statusbar.showMessage(\"Snapshot player #2 saved in {}\".format(snapshotFilePath), 0)\n break\n\n else: # VLC\n\n current_media_path = url2path(self.mediaplayer.get_media().get_mrl())\n # TODO: replace with pathlib\n dirName, fileName = os.path.split(current_media_path)\n self.mediaplayer.video_take_snapshot(0, \"{dirName}{sep}{fileNameWOExt}_{time}.png\".format(\n dirName=dirName,\n sep=os.sep,\n fileNameWOExt=os.path.splitext(fileName)[0],\n time=self.mediaplayer.get_time()),\n 0, 0)\n\n # check if multi mode\n # second video together\n if self.simultaneousMedia:\n current_media_path = url2path(self.mediaplayer2.get_media().get_mrl())\n\n dirName, fileName = os.path.split(current_media_path)\n self.mediaplayer2.video_take_snapshot(0, \"{dirName}{sep}{fileNameWOExt}_{time}.png\".format(\n dirName=dirName,\n sep=os.sep,\n fileNameWOExt=os.path.splitext(fileName)[0],\n time=self.mediaplayer2.get_time()),\n 0, 0)", "def startCamera(self):\n if self.video == \"camera\":\n self.cap = cv2.VideoCapture(gstreamer_pipeline(\n capture_width=416, capture_height=416, flip_method=0), cv2.CAP_GSTREAMER)\n else:\n video_path = Path(self.video)\n if not video_path.exists():\n raise Exception(\"Video file not found\")\n self.cap = cv2.VideoCapture(str(video_path))", "def initialize_cameras(self):\n self.logger.info('Initializing cameras')\n config_mic = self.config['camera_microscope']\n self.camera_microscope = Camera(config_mic['init'], initial_config=config_mic['config'])\n\n config_fiber = self.config['camera_fiber']\n self.camera_fiber = Camera(config_fiber['init'], initial_config=config_fiber['config'])\n\n for cam in (self.camera_fiber, self.camera_microscope):\n self.logger.info(f'Initializing {cam}')\n cam.initialize()\n self.logger.debug(f'Configuring {cam}')", "def main_func_video_camera(param_list: list = None) -> bool:\r\n # index of param\r\n # noinspection PyPep8Naming\r\n PORT_RAW_PICT = 0\r\n\r\n # check if param OK\r\n if len(param_list) != 1:\r\n log_error_to_console(\"GET FRAME VIDEO CAPTURE MAIN FUNCTION PARAM NOK\", str(len(param_list)))\r\n return False\r\n else:\r\n port_image = get_port_from_wave(name=param_list[PORT_RAW_PICT])\r\n\r\n try:\r\n # noinspection PyUnresolvedReferences\r\n success, port_image.arr[:] = global_var_handler.VIDEO.read()\r\n if success is True:\r\n port_image.set_valid()\r\n except BaseException as error:\r\n is_error()\r\n # noinspection PyUnresolvedReferences\r\n log_error_to_console('RAW PICTURE NOK TO READ: ' + str(global_var_handler.VIDEO.__str__()), str(error))\r\n port_image.set_invalid()\r\n pass\r\n\r\n # noinspection PyUnresolvedReferences\r\n log_to_file(str(global_var_handler.FRAME))\r\n # noinspection PyUnresolvedReferences\r\n log_to_file(global_var_handler.STR_L0_SIZE)\r\n\r\n return True", "def main():\n doc = c4d.documents.GetActiveDocument() # Get active Cinema 4D document\n keymod = GetKeyMod() # Get keymodifier\n selected = doc.GetActiveObjects(0) # Get selected objects\n cameras = [] # Collect cameras to an array\n doc.StartUndo() # Start recording undos\n # Collect cameras and do preparation operations\n for s in selected: # Iterate through objects\n if (s.GetType() == 5103) or (s.GetType() == 1057516): # If object is a camera object (standard C4D camera or Redshift camera)\n if s.GetType() == 5103: # If standard C4D camera\n dummyCam = DummyStandardCamera(s, doc) # Dummy camera\n dataVault = GetDataVault(5103) # Get corresponding data vault\n elif s.GetType() == 1057516: # If RS camera\n dummyCam = DummyRedshiftCamera(s, doc) # Dummy camera\n dataVault = GetDataVault(1057516) # Get corresponding data vault\n bakeCam = dummyCam.GetClone() # Bake camera\n name = s.GetName() # Get camera's name\n bakeCam.SetName(name+suffix) # Set baked camera's name\n doc.InsertObject(bakeCam) # Insert camera to document\n doc.AddUndo(c4d.UNDOTYPE_NEW, bakeCam) # Add undo command for creating a new object\n MoveToLast(bakeCam, doc) # Move object to last\n RemoveTags(bakeCam) # Remove tags of the object\n cameras.append([s, dummyCam, bakeCam, dataVault]) # Original camera, dummy camera, camera to bake\n\n doc.ExecutePasses(None, True, True, True, 0) # Animate the current frame of the document\n Bake(cameras) # Bake the camera (standard C4D camera)\n CleanKeys(cameras) # Clean keyframes\n\n # Remove dummy cameras\n for i in range(0, len(cameras)):\n cameras[i][1].Remove() # Delete Dummy camera(s)\n\n # Sort baked cameras\n for i in reversed(range(0, len(cameras))):\n MoveToFirst(cameras[i][2], doc) # Move camera to top of the hierarchy list\n\n #if keymod == \"Shift\":\n # CopyRendererTags(s, bakeCam) # Copies renderer tags from source camera to bake camera\n\n doc.EndUndo() # Stop recording undos\n c4d.EventAdd() # Refresh Cinema 4D\n c4d.StatusClear() # Clear status", "def find_scene(orig_scene, match):\n \n image_to_compare = orig_scene.copy()\n \n r,c,_ = match.shape\n ir, ic, _ = image_to_compare.shape\n min_ssd = None\n\n\n for x in range(r):\n for y in range(c):\n # compare to sample image to start off with...\n # mse(imageA, imageB, mask=0) \n\n# if x % 25 == 0 and y == 50:\n# print x\n\n # assume x,y is top left corner, \n imageA = match[x:x+ir, y:y+ic, :]\n\n if imageA.shape[0] != ir or imageA.shape[1] != ic:\n continue\n\n # add the mask \n\n current_ssd = ssd(imageA, image_to_compare)\n if current_ssd == None:\n pass\n elif min_ssd == None:\n min_ssd = current_ssd\n best_sample = imageA\n best_x = x\n best_y = y\n elif min_ssd > current_ssd:\n min_ssd = current_ssd\n best_sample = imageA\n best_x = x\n best_y = y\n return best_x, best_y, best_sample", "def _pool_and_resize(self):\n # Pool if there are enough screens to do so.\n if self.frame_skip > 1:\n np.maximum(\n self.screen_buffer[0],\n self.screen_buffer[1],\n out=self.screen_buffer[0])\n\n transformed_image = cv2.resize(\n self.screen_buffer[0], (self.screen_size, self.screen_size),\n interpolation=cv2.INTER_AREA)\n int_image = np.asarray(transformed_image, dtype=np.uint8)\n return np.expand_dims(int_image, axis=2)", "def parse_cameras(number_of_cameras: int,\n nvm_content: List[str],\n offset: int,\n camera_id_offset: int,\n filter_list: Optional[Set[str]],\n nvm_images_path: str,\n cameras: kapture.Sensors,\n images: kapture.RecordsCamera,\n trajectories: Optional[kapture.Trajectories]) -> List[str]:\n image_idx_to_image_name = []\n # parse all cameras\n for i in range(0, number_of_cameras):\n line = nvm_content[i + offset].split()\n timestamp = i + camera_id_offset\n camera_id = f'sensor{timestamp}'\n image_file_name = line[0]\n image_idx_to_image_name.append(image_file_name)\n if filter_list is not None and image_file_name not in filter_list:\n # file_name is not in the list, do not add it\n continue\n\n focal_length = float(line[1])\n quaternion_wxyz = quaternion.from_float_array([float(v) for v in line[2:6]])\n camera_center = np.array([float(v) for v in line[6:9]])\n # https://github.com/colmap/colmap/blob/67e96894d4beed7cc93f1c0755a98d3664f85e63/src/base/reconstruction.cc#L891\n radial_distortion = -float(line[9]) # SIGN !\n\n try:\n # lazy open\n with Image.open(path.join(nvm_images_path, image_file_name)) as im:\n width, height = im.size\n except (OSError, PIL.UnidentifiedImageError):\n # It is not a valid image: skip it\n logger.info(f'Skipping invalid image file {image_file_name}')\n continue\n\n translation = - np.matmul(quaternion.as_rotation_matrix(quaternion_wxyz), camera_center)\n pose = kapture.PoseTransform(quaternion_wxyz, translation)\n\n camera = kapture.Camera(MODEL, [width, height, focal_length, width / 2, height / 2, radial_distortion])\n cameras[camera_id] = camera\n\n images[(timestamp, camera_id)] = image_file_name\n if trajectories is not None:\n trajectories[(timestamp, camera_id)] = pose\n return image_idx_to_image_name", "def get_sim_images(self, urdf_file, camera_pose_path):\n self.load_urdf(urdf_file, random_pose=False)\n # self.get_plane()\n # self.change_texture(self.plane_id)\n # self.change_texture(self.object_id)\n\n self.create_camera()\n self.from_camera_pose(camera_pose_path)\n self.step(1)\n\n self.get_bgr()\n self.get_seg()\n\n if self.get_object_mask(self.object_id) is None:\n return False\n\n self.get_object_depth()\n self.crop(padding=10, random=False)\n\n print('sim img')\n\n return self.bgr, self.depth", "def getCamera2():\n for msg in camera2:\n yield (b'--frame\\r\\n'\n b'Content-Type: image/jpg\\r\\n\\r\\n' + base64.b64decode(msg.value['image_bytes']) + b'\\r\\n\\r\\n')", "def _create_single_camera(self):\n\n # obtian K matrix from cfg #####################################################################################\n for ii in range(0,3):\n for jj in range(0,3):\n self._k_mat[ii,jj] = self._cfg[\"KMatrix\"][ii*3+jj]\n ############################################################################## end of obtian K matrix from cfg #\n\n # create camera\n bpy.ops.object.camera_add()\n\n # get camera object\n # TODO get without object name:\n self._sensor = bpy.data.objects['Camera']\n\n # change name of camera\n self._sensor.name = self._cfg[\"outputBaseName\"] + '_Camera_RGBD'\n\n # use depth of field if requested ##############################################################################\n if \"depthOfField\" in self._cfg:\n\n # activate depth of field\n self._sensor.data.dof.use_dof = True\n\n # set up params ############################################################################################\n if \"distance\" in self._cfg[\"depthOfField\"]:\n self._sensor.data.dof.focus_distance = self._cfg[\"depthOfField\"][\"distance\"]\n else:\n self._sensor.data.dof.focus_distance = 10.0\n\n if \"fStop\" in self._cfg[\"depthOfField\"]:\n self._sensor.data.dof.aperture_fstop = self._cfg[\"depthOfField\"][\"fStop\"]\n else:\n self._sensor.data.dof.aperture_fstop = 1.5\n\n if \"blades\" in self._cfg[\"depthOfField\"]:\n self._sensor.data.dof.aperture_blades = self._cfg[\"depthOfField\"][\"blades\"]\n else:\n self._sensor.data.dof.aperture_blades = 0\n\n if \"rotationDeg\" in self._cfg[\"depthOfField\"]:\n self._sensor.data.dof.aperture_rotation = self._cfg[\"depthOfField\"][\"rotationDeg\"]*(math.pi/180.0)\n else:\n self._sensor.data.dof.aperture_rotation = 0\n\n if \"ratio\" in self._cfg[\"depthOfField\"]:\n self._sensor.data.dof.aperture_ratio = self._cfg[\"depthOfField\"][\"ratio\"]\n else:\n self._sensor.data.dof.aperture_ratio = 1.0\n #################################################################################### end of set up params #\n else:\n # deactivate depth of field\n self._sensor.data.dof.use_dof = False\n ####################################################################### end of use depth of field if requested #\n\n # set camera params ############################################################################################\n # based on https://blender.stackexchange.com/a/120063\n\n # get focal lenght and principle point from K matrix\n _f_x = self._k_mat[0,0]\n _f_y = self._k_mat[1,1]\n _c_x = self._k_mat[0,2]\n _c_y = self._k_mat[1,2]\n\n # get image resolution\n _w = self._cfg[\"imageResolution\"][0]\n _h = self._cfg[\"imageResolution\"][1]\n\n # calc field of view\n _fov = 2.0*math.atan(_w/(2*_f_x))\n _fov_deg = _fov*(180./math.pi)\n\n # aspect ratio\n _a_x = 1\n _a_y = 1\n if _f_x > _f_y:\n _a_y = _f_x / _f_y\n elif _f_x < _f_y:\n _a_y = _f_y / _f_x\n\n # calc focal length ratio\n _f_ratio = _f_x / _f_y\n\n # sensor fitting mode according to issue\n if 'AUTO' == self._sensor.data.sensor_fit:\n if _f_x*_w >= _f_y*_h:\n _v = _w\n else:\n _v = pixel_aspect_ratio * _h\n else:\n if 'HORIZONTAL' == cam.sensor_fit:\n _v = _w\n else:\n _v = pixel_aspect_ratio * _h\n\n # Set shift\n self._sensor.data.shift_x = ((_w/2.)-_c_x)/ _v\n self._sensor.data.shift_y = ((_h/2.)-_c_y)/ _v * _f_ratio\n\n # set field of view for camera\n self._sensor.data.lens_unit = 'FOV'\n self._sensor.data.angle = _fov\n\n # set transformation for camera\n self._sensor.rotation_mode = 'QUATERNION' \n self._base_to_sensor = self._cfg[\"transformation\"]\n self._sensor.location = (self._base_to_sensor[0],self._base_to_sensor[1],self._base_to_sensor[2])\n self._sensor.rotation_quaternion = (self._base_to_sensor[3],\n self._base_to_sensor[4],\n self._base_to_sensor[5],\n self._base_to_sensor[6])\n ##################################################################################### end of set camera params #\n\n # set render pass dict #########################################################################################\n self._general_render_pass_dict = {}\n self._general_render_pass_dict[\"name\"] = self._cfg[\"outputBaseName\"]\n self._general_render_pass_dict[\"imageResolution\"] = [int(self._cfg[\"imageResolution\"][0]),\\\n int(self._cfg[\"imageResolution\"][1])]\n ################################################################################## end of set render pass dict #\n\n # config RGBDPass ##############################################################################################\n if 'RGBDPass' in self._cfg[\"renderPasses\"]:\n rgbd_info = {}\n rgbd_info[\"name\"] = self._cfg[\"outputBaseName\"]\n rgbd_info[\"imageResolution\"] = [int(self._cfg[\"imageResolution\"][0]),int(self._cfg[\"imageResolution\"][1])]\n rgbd_info[\"DepthEnabled\"] = self._cfg[\"renderPasses\"][\"RGBDPass\"][\"DepthEnabled\"]\n self._render_pass_dict[\"RGBDPass\"] = rgbd_info\n ####################################################################################### end of config RGBDPass #", "def camera(ctx, cam_id, analytic_addr, width, height):\n if not analytic_addr:\n analytic_addr = [\"localhost:50051\"]\n db = ctx.obj.db\n client = aceclient.AnalyticMultiClient()\n cap = cv2.VideoCapture(int(cam_id))\n cap.set(cv2.CAP_PROP_FRAME_WIDTH, int(width))\n cap.set(cv2.CAP_PROP_FRAME_HEIGHT, int(height))\n classes = {}\n window_names = []\n f_req = analytic_pb2.FrameRequest()\n for a in analytic_addr:\n analytic = analytic_pb2.AnalyticData()\n analytic.addr = a\n f_req.analytics.append(analytic)\n try:\n while cap.isOpened():\n ret, frame = cap.read()\n if not ret:\n print(\"Stream unavailable. Exiting.\")\n break\n resp = analytic_pb2.CompositeResults()\n resp = client.process_frame(frame, f_req, resp)\n print(len(window_names))\n render(resp, window_names, classes, frame, db)\n finally:\n cv2.destroyAllWindows()\n print(\"Shutting down\")", "def capture_camera(mirror=True, size=None):\n # カメラをキャプチャする\n cap = cv2.VideoCapture(0) # 0はカメラのデバイス番号\n #HAAR分類器の顔検出用の特徴量\n cascade_path = \"haarcascade_frontalface_alt.xml\"\n color = (255, 255, 255) #白\n #カスケード分類器の特徴量を取得する\n cascade = cv2.CascadeClassifier(cascade_path)\n\n while True:\n count = 0 #参照フレームのカウント\n # retは画像を取得成功フラグ\n ret, frame = cap.read()\n\n # 鏡のように映るか否か\n if mirror is True:\n frame = frame[:,::-1]\n\n # フレームをリサイズ\n # sizeは例えば(800, 600)\n if size is not None and len(size) == 2:\n frame = cv2.resize(frame, size)\n\n k = cv2.waitKey(1) # 1msec待つ\n\n if k == 13: # Enterキーで保存\n cv2.imwrite(\"test.png\", frame)\n\n if k == 27: # ESCキーで終了\n break\n\n\n if count == 10 or count == 0: # 参照フレーム軽減\n #グレースケール変換\n image_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n #物体認識(顔認識)の実行\n facerect = cascade.detectMultiScale(image_gray, scaleFactor=1.1, minNeighbors=1, minSize=(1, 1))\n count = 1\n else:\n count = count + 1\n #rect = (50,50,50,50)\n image = cv2.imread('lena.jpeg')\n #cv2.rectangle(image), tuple([50,50]), tuple([50,50]), color, thickness=2)\n\n if len(facerect) > 0:\n #if True:\n #検出した顔を囲む矩形の作成\n print (\"face rectangle\")\n print (facerect)\n for rect in facerect:\n cv2.rectangle(image, tuple(rect[0:2]),tuple(rect[0:2]+rect[2:4]), color, thickness=2)\n print('check')\n\n # フレームを表示する\n cv2.imshow('camera capture', frame)\n\n # キャプチャを解放する\n cap.release()\n cv2.destroyAllWindows()", "def main():\n\n\t# Run setup\n\ts = Setup()\n\tcontrolRoom, studio, newscaster = s.beginSetup()\n\n\t# Start cameras\n\tcontrolRoom.startCameras()\n\tprint 'Cameras started'\n\tcontrolRoom.setCameraSize()\n\n\tif len(controlRoom.studio.cameras) > 1:\n\t\tprint 'Everything up and running...'\n\n\t\t# Fetch a camera that best matches the headpose angle\n\t\tcamera = controlRoom.getClosestCamera()\n\t\twhile True:\n\t\t\t# If advance camera selection algo indicates true, fetch camera closest to headpose\n\t\t\tif controlRoom.cameraSelectionADV():\n\t\t\t\tcamera = controlRoom.getClosestCamera()\n\t\t\tprint 'Active camera: ' + str(camera.cameraID)\n\t\t\t\n\t\t\t# Capture frame or in simulation mode, light up led\n\t\t\tcamera.capture()\n\n\telif len(controlRoom.studio.cameras) == 1:\n\t\twhile True:\n\t\t\tcontrolRoom.studio.cameras[0].capture()\n\t\t\ttime.sleep(2)\n\telse:\n\t\tprint 'No cameras found! Something seems to be wrong...'\n\n\t# Shutdown all cameras and kill all windows\n\tcontrolRoom.shutdownCameras()", "def camera(self):\n self.spectrum = self.spectrum", "def adjust_image_resolution(data):\n\n output_large = cStringIO.StringIO()\n output_default = cStringIO.StringIO()\n output_tiny = cStringIO.StringIO()\n \n try:\n im0 = Image.open(cStringIO.StringIO(data))\n im0.thumbnail((1280, 1280), Image.ANTIALIAS)\n im0.save(output_large, 'JPEG')\n\n im1 = Image.open(cStringIO.StringIO(data))\n im1.thumbnail((1024, 1024), Image.ANTIALIAS)\n # could run entropy check to see if GIF makes more sense given an item.\n im1.save(output_default, 'JPEG')\n \n im2 = Image.open(cStringIO.StringIO(data))\n im2.thumbnail((120, 120), Image.ANTIALIAS)\n im2.save(output_tiny, 'JPEG')\n except IOError:\n return None\n \n return {\"large\" : output_large.getvalue(),\n \"default\" : output_default.getvalue(),\n \"tiny\" : output_tiny.getvalue()}", "def __publish_mayacamera(self, item, output, work_template, primary_publish_path,\n sg_task, comment, thumbnail_path, progress_cb):\n # determine the publish info to use\n #\n progress_cb(10, \"Determining publish details\")\n\n # get the current scene path and extract fields from it\n # using the work template:\n scene_path = os.path.abspath(cmds.file(query=True, sn=True))\n fields = work_template.get_fields(scene_path)\n publish_version = fields[\"version\"]\n tank_type = output[\"tank_type\"]\n\n # extract entity from camera node name\n # handle full paths, trim off everything after the _\n # e.g. |pivot_GRP|master_CAM -> master\n fields[\"name\"] = item[\"name\"].split(\"|\")[-1].split(\"_\")[0]\n\n # create the publish path by applying the fields\n # with the publish template:\n fields[\"Step\"] = \"cam\" # first force step to be camera\n publish_template = output[\"publish_template\"]\n publish_path = publish_template.apply_fields(fields)\n\n # ensure the publish folder exists:\n publish_folder = os.path.dirname(publish_path)\n self.parent.ensure_folder_exists(publish_folder)\n\n # determine the publish name:\n publish_name = fields.get(\"name\")\n if not publish_name:\n publish_name = os.path.basename(publish_path)\n\n\n progress_cb(50.0, \"Exporting from scene\")\n try:\n publish_folder = os.path.dirname(publish_path)\n self.parent.ensure_folder_exists(publish_folder)\n self.parent.log_debug(\"Exporting to %s...\" % (publish_path))\n\n # stash the selection\n sel = cmds.ls(sl=True)\n # clear it\n cmds.select(clear=True)\n # select just the specific camera we are processing\n cmds.select(item[\"name\"],add=True)\n\n # do export selection once camera selected\n cmds.file( publish_path,\n type='mayaBinary',\n exportSelected=True,\n force=True,\n )\n\n # reset the selection to what it was prior\n cmds.select(clear=True)\n for obj in sel:\n cmds.select(obj,add=True)\n\n except Exception, e:\n raise TankError(\"Failed to export to %s - %s\" % (publish_path, e))\n\n # register the publish:\n progress_cb(75, \"Registering the publish\")\n args = {\n \"tk\": self.parent.tank,\n \"context\": self.parent.context,\n \"comment\": comment,\n \"path\": publish_path,\n \"name\": publish_name,\n \"version_number\": publish_version,\n \"thumbnail_path\": thumbnail_path,\n \"task\": sg_task,\n \"dependency_paths\": [primary_publish_path],\n \"published_file_type\":tank_type\n }\n tank.util.register_publish(**args)", "def multi_video_feed(device):\n client_ip = request.environ['REMOTE_ADDR'][:3]\n if str(client_ip[:3]) == \"192\" or str(client_ip) == \"127.0.0.1\":\n camera_stream = import_module('camera_multicv').BaseCamera\n camera_stream.set_video_source(int(device))\n return Response(gen(camera_stream(int(device))),\n mimetype='multipart/x-mixed-replace; boundary=frame')\n else:\n return render_template('404.html')", "def create_image_pyramids(self):\r\n curr_cam0_img = self.cam0_curr_img_msg.image\r\n # self.curr_cam0_pyramid = cv2.buildOpticalFlowPyramid(\r\n # curr_cam0_img, self.config.win_size, self.config.pyramid_levels, \r\n # None, cv2.BORDER_REFLECT_101, cv2.BORDER_CONSTANT, False)[1]\r\n self.curr_cam0_pyramid = curr_cam0_img\r\n\r\n curr_cam1_img = self.cam1_curr_img_msg.image\r\n # self.curr_cam1_pyramid = cv2.buildOpticalFlowPyramid(\r\n # curr_cam1_img, self.config.win_size, self.config.pyramid_levels, \r\n # None, cv2.BORDER_REFLECT_101, cv2.BORDER_CONSTANT, False)[1]\r\n self.curr_cam1_pyramid = curr_cam1_img", "def guess_camera(device_dims, coating_dims=[0,0,0], \n camera_style=\"perspective\", camera_rotate =0, center=[0, 0], \n isosurface=False):\n from math import sin, cos, pi\n\n camera_position = [0, 0, 0]\n light_position = [0, 0, 0]\n\n deg_to_rads = pi / 180.0\n camera_rotate *= deg_to_rads \n\n if camera_style == \"perspective\":\n x_offset = 1.2\n z_scale = 1.0\n elif camera_style == \"orthographic\":\n x_offset = 1.2\n z_scale = 1.0\n else:\n x_offset = 1.2\n z_scale = 1.0\n print(\"WARNING: Camera parameters are not optimized for this style!\")\n\n # Offset for x,y-dimensions\n camera_offset = x_offset * (max(device_dims) + 0.8 * max(coating_dims))\n light_offset = camera_offset * 1.25 \n\n # Need to scale z-axis settings differently with isosurfaces\n # Related to default origin position:\n # - Pure device render: top at z = 0, centered at x=y=0 by default\n # - Isosurface (and opt. unit cell) bottom at z=0, origin at corner\n if isosurface == False:\n z_lookat = -0.66\n else:\n z_lookat = 0.25\n device_dims[2] *= 1.75\n center = [0.5 * device_dims[0], 0.5 * device_dims[1]]\n camera_offset *= 0.75\n\n # Guess things\n camera_position[0] = ((camera_offset+device_dims[0]+center[0])\n * cos(camera_rotate))\n camera_position[1] = ((camera_offset+device_dims[0]+center[1])\n * sin(camera_rotate))\n camera_position[2] = z_scale * (device_dims[2] + 0.5*coating_dims[2])\n\n camera_look_at = [center[0], center[1], \n (z_lookat*device_dims[2]+0.50*coating_dims[2])]\n\n light_position[0] = ((device_dims[0]+light_offset)\n * cos(camera_rotate-12*deg_to_rads))\n light_position[1] = ((device_dims[1]+light_offset)\n * sin(camera_rotate-12*deg_to_rads))\n light_position[2] = camera_position[2] + light_offset/3.0\n\n if isosurface == True:\n light_position[0] = max(light_position[0], light_position[1])\n light_position[1] = light_position[0]\n\n #print(\"Write_POV estimated camera parameters:\")\n #print(\"camera_position : \" , camera_position)\n #print(\"camera_look_at : \", camera_look_at)\n\n return camera_position, camera_look_at, light_position", "def setup_camera(is_fullscreen = True):\r\n \r\n # ensure that camera is correctly installed and set it up to output to a\r\n # window and turn off AWB and exposure modes. If camera does not exist\r\n # print error message and quit program.\r\n camera = picamera.PiCamera()\r\n camera.resolution = s.PICTURE_RESOLUTION\r\n camera.preview_fullscreen = is_fullscreen\r\n camera.awb_mode = \"off\"\r\n #camera.exposure_mode = \"off\"\r\n if not is_fullscreen: camera.preview_window = s.CAMERA_WINDOW_SIZE\r\n time.sleep(s.WAKEUP_DELAY) # camera wake-up time: 2 s\r\n \r\n return camera", "def compute_relations(self):\n\n visible_nodes = {}\n\n self.cameras = self.get_all_cameras()\n rospy.logdebug(self.cameras)\n\n if self.cameras.items():\n try:\n if self.visibility_monitor is None:\n self.visibility_monitor = VisibilityMonitor(self.ctx, self.source)\n rospy.loginfo(\"[perspective_filter] Visibility monitor now running, please active the Pygame windows.\")\n visible_nodes = self.visibility_monitor.compute_all()\n rospy.logdebug(\"[perspective_filter] %d perspectives computed \" % len(visible_nodes))\n #rospy.logdebug(visible_nodes)\n except Exception as e:\n rospy.logwarn(\"[perspective_filter] Exception occurred while computing relation : %s\" % str(e))\n if self.visibility_monitor:\n self.visible_nodes = {} #visible_nodes\n for camera_name, visibles_obj in visible_nodes.items():\n camera_id = self.source.scene.nodebyname(camera_name)[0].id\n self.visible_nodes[camera_id] = visibles_obj\n for node in visibles_obj:\n if node.parent in self.cameras.keys():\n if self.source.scene.nodes[node.parent] not in visibles_obj:\n visibles_obj.append(self.source.scene.nodes[node.parent])\n\n for agent_id, nodes_seen in self.visible_nodes.items():\n agent = self.source.scene.nodes[agent_id]\n for node in nodes_seen:\n if agent_id in self.previously_visible_nodes:\n if node not in self.previously_visible_nodes[agent_id]:\n self.start_predicate(self.source.timeline, \"isVisibleBy\", node.name, object_name=agent.name)\n else:\n self.start_predicate(self.source.timeline, \"isVisibleBy\", node.name, object_name=agent.name)\n\n for agent_id, nodes_previously_seen in self.previously_visible_nodes.items():\n agent = self.source.scene.nodes[agent_id]\n for node in nodes_previously_seen:\n if agent_id in self.visible_nodes:\n if node not in self.visible_nodes[agent_id]:\n self.end_predicate(self.source.timeline, \"isVisibleBy\", node.name, object_name=agent.name)\n else:\n self.end_predicate(self.source.timeline, \"isVisibleBy\", node.name, object_name=agent.name)\n\n self.publish_perspectives()\n self.previously_visible_nodes = self.visible_nodes", "def numberOfCamera():\n return numCams", "def run(self):\n\n # Try connecting to the camera unless a connection is refused\n try:\n # Connect to camera at channel\n cap = cv2.VideoCapture(self.channel)\n # Read first frame\n success, img = cap.read()\n\n # Set publisher rate (framerate) to custom framerate if specified, otherwise, set to default\n loop_rate = None\n if self.framerate is None:\n loop_rate = rospy.Rate(cap.get(cv2.CAP_PROP_FPS))\n else:\n loop_rate = rospy.Rate(self.framerate)\n\n # Including 'not rospy.is_shutdown()' in the loop condition here to ensure if this script is exited\n # while this loop is running, the script quits without escalating to SIGTERM or SIGKILL\n while not rospy.is_shutdown() and success:\n # Convert image read from cv2.videoCapture to image message to be published\n image_msg = self.image_tools.convert_to_ros_compressed_msg(img) # Compress image\n # Publish the image\n self.publisher.publish(image_msg)\n\n # Read next image\n success, img = cap.read()\n # Sleep loop to maintain frame rate\n loop_rate.sleep()\n except Exception:\n rospy.logerr(f\"Camera not found at channel {self.channel}\")", "def matchsize(A, B):\n wA, hA = cv.GetSize(A)\n wB, hB = cv.GetSize(B)\n if wA == wB and hA == hB:\n return A\n SetImageROI = cv.SetImageROI\n out = cv.CreateImage((wB, hB), A.depth, A.channels)\n wOut, hOut = cv.GetSize(out)\n if wA < wOut and hA < hOut:\n SetImageROI(out, (0, 0, wA, hA))\n elif wA >= wOut and hA < hOut:\n SetImageROI(out, (0, 0, wOut, hA))\n SetImageROI(A, (0, 0, wOut, hA))\n elif wA < wOut and hA >= hOut:\n SetImageROI(out, (0, 0, wA, hOut))\n SetImageROI(A, (0, 0, wA, hOut))\n else: # wA >= wOut and hA >= hOut:\n SetImageROI(A, (0, 0, wOut, hOut))\n cv.Copy(A, out)\n cv.ResetImageROI(out)\n cv.ResetImageROI(A)\n return out", "def initialize_first_frame(self):\r\n img = self.cam0_curr_img_msg.image\r\n grid_height, grid_width = self.get_grid_size(img)\r\n\r\n # Detect new features on the frist image.\r\n new_features = self.detector.detect(img)\r\n\r\n # Find the stereo matched points for the newly detected features.\r\n cam0_points = [kp.pt for kp in new_features]\r\n cam1_points, inlier_markers = self.stereo_match(cam0_points)\r\n\r\n cam0_inliers, cam1_inliers = [], []\r\n response_inliers = []\r\n for i, inlier in enumerate(inlier_markers):\r\n if not inlier:\r\n continue\r\n cam0_inliers.append(cam0_points[i])\r\n cam1_inliers.append(cam1_points[i])\r\n response_inliers.append(new_features[i].response)\r\n # len(cam0_inliers) < max(5, 0.1 * len(new_features))\r\n\r\n # Group the features into grids\r\n grid_new_features = [[] for _ in range(self.config.grid_num)]\r\n\r\n for i in range(len(cam0_inliers)):\r\n cam0_point = cam0_inliers[i]\r\n cam1_point = cam1_inliers[i]\r\n response = response_inliers[i]\r\n\r\n row = int(cam0_point[1] / grid_height)\r\n col = int(cam0_point[0] / grid_width)\r\n code = row*self.config.grid_col + col\r\n\r\n new_feature = FeatureMetaData()\r\n new_feature.response = response\r\n new_feature.cam0_point = cam0_point\r\n new_feature.cam1_point = cam1_point\r\n grid_new_features[code].append(new_feature)\r\n\r\n # Sort the new features in each grid based on its response.\r\n # And collect new features within each grid with high response.\r\n for i, new_features in enumerate(grid_new_features):\r\n for feature in sorted(new_features, key=lambda x:x.response, \r\n reverse=True)[:self.config.grid_min_feature_num]:\r\n self.curr_features[i].append(feature)\r\n self.curr_features[i][-1].id = self.next_feature_id\r\n self.curr_features[i][-1].lifetime = 1\r\n self.next_feature_id += 1", "def create_cameras_from_config(config=None, **kwargs):\n cameras = OrderedDict()\n\n config = config or get_config()\n camera_config = config[\"cameras\"]\n\n if camera_config.get(\"devices\", None) is None:\n logger.info('No camera devices found in config.')\n return cameras\n\n # Get a config specific to the local cameras\n config_local = camera_config.copy()\n n_local = 0\n with suppress(KeyError):\n del config_local[\"devices\"]\n config_local[\"devices\"] = [c for c in camera_config[\"devices\"] if not c.get(\n \"is_distributed\", False)]\n n_local = len(config_local['devices'])\n logger.debug(f\"Found {n_local} local cameras in config.\")\n\n # Get a config specific to the distibuted cameras\n config_distributed = camera_config.copy()\n n_dist = 0\n with suppress(KeyError):\n del config_distributed[\"devices\"]\n config_distributed[\"devices\"] = [c for c in camera_config[\"devices\"] if c.get(\n \"is_distributed\", False)]\n n_dist = len(config_distributed['devices'])\n logger.debug(f\"Found {n_dist} distributed cameras in config.\")\n\n # Create local cameras\n if n_local > 0:\n try:\n cameras_local = create_local_cameras(config=config_local, **kwargs)\n cameras.update(cameras_local)\n except Exception as err:\n logger.error(f\"Error encountered while creating local cameras: {err}\")\n\n # Create distributed cameras\n if n_dist > 0:\n try:\n cameras_dist = create_distributed_cameras(camera_config=config_distributed)\n cameras.update(cameras_dist)\n except Exception as err:\n logger.error(f\"Error encountered while creating distributed cameras: {err}\")\n\n if len(cameras) == 0:\n raise error.CameraNotFound(msg=\"Failed to create any cameras!\")\n\n # Find primary camera\n primary_camera = None\n for camera in cameras.values():\n if camera.is_primary:\n primary_camera = camera\n\n # If no camera was specified as primary use the first\n if primary_camera is None:\n camera_names = sorted(cameras.keys())\n primary_camera = cameras[camera_names[0]]\n primary_camera.is_primary = True\n\n logger.debug(f\"Primary camera: {primary_camera}.\")\n logger.debug(f\"{len(cameras)} cameras created.\")\n\n return cameras", "def create_cameras_from_config(config=None, **kwargs):\n cameras = OrderedDict()\n\n config = config or get_config()\n camera_config = config[\"cameras\"]\n\n if camera_config.get(\"devices\", None) is None:\n logger.info('No camera devices found in config.')\n return cameras\n\n # Get a config specific to the local cameras\n config_local = camera_config.copy()\n n_local = 0\n with suppress(KeyError):\n del config_local[\"devices\"]\n config_local[\"devices\"] = [c for c in camera_config[\"devices\"] if not c.get(\n \"is_distributed\", False)]\n n_local = len(config_local['devices'])\n logger.debug(f\"Found {n_local} local cameras in config.\")\n\n # Get a config specific to the distibuted cameras\n config_distributed = camera_config.copy()\n n_dist = 0\n with suppress(KeyError):\n del config_distributed[\"devices\"]\n config_distributed[\"devices\"] = [c for c in camera_config[\"devices\"] if c.get(\n \"is_distributed\", False)]\n n_dist = len(config_distributed['devices'])\n logger.debug(f\"Found {n_dist} distributed cameras in config.\")\n\n # Create local cameras\n if n_local > 0:\n try:\n cameras_local = create_local_cameras(config=config_local, **kwargs)\n cameras.update(cameras_local)\n except Exception as err:\n logger.error(f\"Error encountered while creating local cameras: {err}\")\n\n # Create distributed cameras\n if n_dist > 0:\n try:\n cameras_dist = create_distributed_cameras(camera_config=config_distributed)\n cameras.update(cameras_dist)\n except Exception as err:\n logger.error(f\"Error encountered while creating distributed cameras: {err}\")\n\n if len(cameras) == 0:\n raise error.CameraNotFound(msg=\"Failed to create any cameras!\")\n\n # Find primary camera\n primary_camera = None\n for camera in cameras.values():\n if camera.is_primary:\n primary_camera = camera\n\n # If no camera was specified as primary use the first\n if primary_camera is None:\n camera_names = sorted(cameras.keys())\n primary_camera = cameras[camera_names[0]]\n primary_camera.is_primary = True\n\n logger.debug(f\"Primary camera: {primary_camera}.\")\n logger.debug(f\"{len(cameras)} cameras created.\")\n\n return cameras", "def camera_image_size():\n camera = GigE_camera(parameter(\"camera.IP_addr\"))\n width,height = camera.width,camera.height\n orientation = parameter('Orientation',90) # in degrees counter-clockwise\n if orientation == None: orienation = 0\n orientation %= 360\n if orientation == 90 or orientation == 270: width,height = height,width\n return width,height", "def choose_pair():\n print('This program is creating hybrid images from a given pair of images.\\n'\\\n 'Choose a pair of images you would like to make a hybrid image from:\\n'\\\n '1. bicycle + motorcycle\\n'\\\n '2. dog + cat\\n'\\\n '3. Marylin Monroe + Albert Einstein\\n'\\\n '4. bird + plane\\n'\\\n '5. fish + submarine\\n'\\\n '6. eye + snail\\n'\\\n '7. kitten + moon')\n path = '/Users/frank/PycharmProjects/hybrid-images'\n path = path + '/' + 'pictures'\n choice = int(input('Enter a number from 1 to 7: \\n'))\n while choice not in range(1,8):\n choice = int(input('The value you entered is invalid, try again: /n')) \n if choice == 1: \n img1 = cv2.imread(path + '/' + 'motorcycle.bmp')\n img2 = cv2.imread(path + '/' + 'bicycle.bmp')\n h1 = w1 = h2 = w2 = 11\n s1 = s2 = 2\n elif choice == 2: \n img1 = cv2.imread(path + '/' + 'dog.bmp')\n img2 = cv2.imread(path + '/' + 'cat.bmp')\n h1 = w1 = h2 = w2 = 21\n s1 = s2 = 7\n elif choice == 3:\n img1 = cv2.imread(path + '/' + 'marilyn.bmp')\n img2 = cv2.imread(path + '/' + 'einstein.bmp')\n h1 = w1 = 23\n h2 = w2 = 11\n s1 = 4\n s2 = 2\n elif choice == 4:\n img1 = cv2.imread(path + '/' + 'bird.bmp')\n img2 = cv2.imread(path + '/' + 'plane.bmp')\n h1 = w1 = 6\n h2 = w2 = 21\n s1 = 2\n s2 = 8\n elif choice == 5:\n img1 = cv2.imread(path + '/' + 'submarine.bmp')\n img2 = cv2.imread(path + '/' + 'fish.bmp')\n h1 = w1 = 29\n h2 = w2 = 17\n s1 = 5\n s2 = 3\n elif choice == 6:\n img1 = cv2.imread(path + '/' + 'snail.png')\n img2 = cv2.imread(path + '/' + 'eye.png')\n h1 = w1 = 16\n h2 = w2 = 25\n s1 = 4\n s2 = 10\n elif choice == 7:\n img1 = cv2.imread(path + '/' + 'moon.png')\n img2 = cv2.imread(path + '/' + 'kitten.png')\n h1 = w1 = 5\n h2 = w2 = 21\n s1 = 1\n s2 = 5\n #convert the images into floating points data type \n img1 = img1.astype('float64')\n img2 = img2.astype('float64')\n param = [h1,w1,s1,h2,w2,s2]\n #change parameters to user chosen if such were declared in parameters module \n for i in range(len(param)):\n if parameters.values[i] != 0:\n param[i] = parameters.values[i]\n #create Gaussian filters for each image\n kernel1 = create_gaussian_filter((param[0], param[1]), param[2])\n kernel2 = create_gaussian_filter((param[3], param[4]), param[5]) \n return img1, img2, kernel1, kernel2", "def check_sizes(self, show=True):\n # find pixel with common RA \n comRApix = np.where((self.coords1[0]<=np.max(self.coords2[0]))&\n (self.coords1[0]>=np.min(self.coords2[0]))\n )[0]\n \n # find pixels with common DEC \n comDECpix = np.where((self.coords1[1]<=np.max(self.coords2[1]))&\n (self.coords1[1]>=np.min(self.coords2[1]))\n )[0]\n \n print('Image 1 common pixels size: ({:}, {:})'.format(comRApix.size,\n comDECpix.size))\n \n # Corner coordinates \n minRA = np.min(self.coords1[0][comRApix])\n maxRA = np.max(self.coords1[0][comRApix])\n minDEC = np.min(self.coords1[1][comDECpix])\n maxDEC = np.max(self.coords1[1][comDECpix])\n if show:\n comFrame = plt.Rectangle(xy=(minRA, minDEC), width=maxRA-minRA,\n height=maxDEC-minDEC, hatch='\\\\', fill=True,\n color='g', alpha=.3)\n fig = plt.figure(figsize=(10,10))\n ax = fig.add_subplot(111)\n ax.add_patch(comFrame)\n ax.add_patch(self.image1.plotframe(color='r'))\n ax.add_patch(self.image2.plotframe(color='b'))\n ax.annotate('Image 1', xy=(minRA,maxDEC), color='r')\n ax.plot() \n plt.show()\n \n self.boundRA = np.array([minRA, maxRA])\n self.boundDEC = np.array([minDEC, maxDEC]) \n self.bounds1 = np.array([[comRApix[0], comRApix[-1]], \n [comDECpix[0], comDECpix[-1]]])\n \n if self.image1.get_pix_area() < self.image2.get_pix_area():\n print('Image 1 have smaller pixels than 2. \\n')\n self.pix_1_smaller = True \n else:\n print('Image 2 have smaller pixels than 1. \\n')\n self.pix_1_smaller = False", "def get_things1(kp_3d, kp_2d, des, comp_list, H, map_3d, map_2d, map_des, map_cam, map_view, my_max):\n # Initializing the arrays\n points_3d = []\n points_2d = []\n camera_ind = []\n points_ind = []\n cam_params = []\n\n dst_3d = kp_3d\n dst_2d = kp_2d\n src_3d = map_3d\n src_2d = map_2d\n src_cam = map_cam\n low_bound = []\n up_bound = []\n my_min = 0\n\n # Updating the Camera parameters in map and setting the bounds for the update \n for i in range(my_min,my_max+1):\n cam_param = [map_view[i,0], map_view[i,1], map_view[i,2], map_view[i,3], map_view[i,4], map_view[i,5], f,0,0]\n cam_params.append(cam_param)\n\n low_bound.append(-np.pi)\n low_bound.append(-np.pi)\n low_bound.append(-np.pi)\n low_bound.append(-20)\n low_bound.append(-np.inf)\n low_bound.append(-20)\n low_bound.append(f-1)\n low_bound.append(-1)\n low_bound.append(-1)\n up_bound.append(np.pi)\n up_bound.append(np.pi)\n up_bound.append(np.pi)\n up_bound.append(20)\n up_bound.append(np.inf)\n up_bound.append(20)\n up_bound.append(f)\n up_bound.append(0)\n up_bound.append(0)\n \n # Updating the Camera parameters for frame and setting the bounds for the update\n r = (R.from_matrix((H[0:3, 0:3]))).as_rotvec()\n t = H[:,3]\n cam_param = [r[0], r[1], r[2], t[0], t[1], t[2], f, 0, 0]\n cam_params.append(cam_param)\n \n low_bound.append(-np.pi)\n low_bound.append(-np.pi)\n low_bound.append(-np.pi)\n low_bound.append(-20)\n low_bound.append(-np.inf)\n low_bound.append(-20)\n low_bound.append(f-1)\n low_bound.append(-1)\n low_bound.append(-1)\n up_bound.append(np.pi)\n up_bound.append(np.pi)\n up_bound.append(np.pi)\n up_bound.append(20)\n up_bound.append(np.inf)\n up_bound.append(20)\n up_bound.append(f)\n up_bound.append(0)\n up_bound.append(0)\n\n new_cam = len(cam_params)-1\n cam_params = np.array(cam_params).reshape(-1,9)\n count = 0\n \n # listing variables to iterate \n l1 = []\n l2 = []\n count = 0\n \n for m in comp_list:\n count+=1\n l1.append(m.queryIdx)\n l2.append(m.trainIdx)\n\n l1 = np.array(l1).reshape(1,-1)\n l2 = np.array(l2).reshape(1,-1)\n l = np.vstack((l1,l2))\n l_fin = l[:,l[1, :].argsort()]\n j = 0\n count = len(points_3d)\n prev = -1\n final_l1 = []\n final_l2 = []\n final_des = []\n\n # Iterating through the list made and making sure no duplicates\n while(j<(len(l_fin[0]))):\n i1 = l_fin[0,j]\n i2 = l_fin[1,j]\n if(i2!=prev):\n # Map points insertion\n \n check = 0\n for ii in range(len(src_2d[i1])):\n m_2d = src_2d[i1][ii]\n check = 1\n ind = int(src_cam[i1][ii])\n points_2d.append([int((m_2d[0]%(2*cx))-cx), int((m_2d[1]%(2*cy))-cy),0])\n\n points_ind.append(count)\n camera_ind.append(ind)\n final_l1.append(i1)\n final_l2.append(0)\n \n # Taking Mean Desciptor if needed un comment 2 lines below\n # x = ((map_des[i1]*len(src_2d[i1]))+des[i2])/(len(src_2d[i1])+1)\n # map_des[i1] = x\n \n if(check==1):\n # Frame points insersion\n points_2d.append([int((dst_2d[i2,0])-cx), int((dst_2d[i2,1])-cy), 0])\n points_ind.append(count)\n camera_ind.append(new_cam)\n final_l1.append(i2)\n final_l2.append(1)\n wld_pt = src_3d[i1]\n points_3d.append([wld_pt[0], wld_pt[1], wld_pt[2]])\n prev = i2\n count = len(points_3d)\n low_bound.append(-20)\n low_bound.append(-np.inf)\n low_bound.append(-20)\n up_bound.append(20)\n up_bound.append(np.inf)\n up_bound.append(20)\n src_2d[i1].append([int((dst_2d[i2,0])), int((dst_2d[i2,1]))])\n j+=1\n \n # Final Output\n cam_params = np.array(cam_params).reshape(-1,9)\n points_3d = np.array(points_3d)\n points_2d = np.array(points_2d)\n camera_ind = np.array(camera_ind).reshape(len(camera_ind))\n points_ind = np.array(points_ind).reshape(len(points_ind))\n final_l1 = np.array(final_l1)\n final_l2 = np.array(final_l2)\n return cam_params, points_3d, points_2d, camera_ind, points_ind, final_l1, final_l2, low_bound, up_bound, map_des, src_2d", "def main():\n NAME = os.path.basename(__file__).split(\".\")[0]\n\n with its.device.ItsSession() as cam:\n\n props = cam.get_camera_properties()\n\n white_level = float(props['android.sensor.info.whiteLevel'])\n black_levels = props['android.sensor.blackLevelPattern']\n idxs = its.image.get_canonical_cfa_order(props)\n black_levels = [black_levels[i] for i in idxs]\n\n # Expose for the scene with min sensitivity\n sens_min, sens_max = props['android.sensor.info.sensitivityRange']\n s_ae,e_ae,awb_gains,awb_ccm,_ = cam.do_3a(get_results=True)\n s_e_prod = s_ae * e_ae\n\n # Make the image brighter since the script looks at linear Bayer\n # raw patches rather than gamma-encoded YUV patches (and the AE\n # probably under-exposes a little for this use-case).\n s_e_prod *= 2\n\n # Capture raw frames across the full sensitivity range.\n NUM_SENS_STEPS = 15\n sens_step = int((sens_max - sens_min - 1) / float(NUM_SENS_STEPS))\n reqs = []\n sens = []\n for s in range(sens_min, sens_max, sens_step):\n e = int(s_e_prod / float(s))\n req = its.objects.manual_capture_request(s, e)\n req[\"android.colorCorrection.transform\"] = \\\n its.objects.float_to_rational(awb_ccm)\n req[\"android.colorCorrection.gains\"] = awb_gains\n reqs.append(req)\n sens.append(s)\n\n caps = cam.do_capture(reqs, cam.CAP_RAW)\n\n # A list of the (x,y) coords of the center pixel of a collection of\n # patches of a color checker chart. Each patch should be uniform,\n # however the actual color doesn't matter. Note that the coords are\n # relative to the *converted* RGB image, which is 1/2 x 1/2 of the\n # full size; convert back to full.\n img = its.image.convert_capture_to_rgb_image(caps[0], props=props)\n patches = its.image.get_color_checker_chart_patches(img, NAME+\"_debug\")\n patches = [(2*x,2*y) for (x,y) in sum(patches,[])]\n\n lines = []\n for (s,cap) in zip(sens,caps):\n # For each capture, compute the mean value in each patch, for each\n # Bayer plane; discard patches where pixels are close to clamped.\n # Also compute the variance.\n CLAMP_THRESH = 0.2\n planes = its.image.convert_capture_to_planes(cap, props)\n points = []\n for i,plane in enumerate(planes):\n plane = (plane * white_level - black_levels[i]) / (\n white_level - black_levels[i])\n for j,(x,y) in enumerate(patches):\n tile = plane[y/2-16:y/2+16:,x/2-16:x/2+16:,::]\n mean = its.image.compute_image_means(tile)[0]\n var = its.image.compute_image_variances(tile)[0]\n if (mean > CLAMP_THRESH and mean < 1.0-CLAMP_THRESH):\n # Each point is a (mean,variance) tuple for a patch;\n # for a given ISO, there should be a linear\n # relationship between these values.\n points.append((mean,var))\n\n # Fit a line to the points, with a line equation: y = mx + b.\n # This line is the relationship between mean and variance (i.e.)\n # between signal level and noise, for this particular sensor.\n # In the DNG noise model, the gradient (m) is \"S\", and the offset\n # (b) is \"O\".\n points.sort()\n xs = [x for (x,y) in points]\n ys = [y for (x,y) in points]\n m,b = numpy.polyfit(xs, ys, 1)\n lines.append((s,m,b))\n print s, \"->\", m, b\n\n # TODO: Clean up these checks (which currently fail in some cases).\n # Some sanity checks:\n # * Noise levels should increase with brightness.\n # * Extrapolating to a black image, the noise should be positive.\n # Basically, the \"b\" value should correspnd to the read noise,\n # which is the noise level if the sensor was operating in zero\n # light.\n #assert(m > 0)\n #assert(b >= 0)\n\n # Draw a plot.\n pylab.plot(xs, ys, 'r')\n pylab.plot([0,xs[-1]],[b,m*xs[-1]+b],'b')\n matplotlib.pyplot.savefig(\"%s_plot_mean_vs_variance.png\" % (NAME))\n\n # Now fit a line across the (m,b) line parameters for each sensitivity.\n # The gradient (m) params are fit to the \"S\" line, and the offset (b)\n # params are fit to the \"O\" line, both as a function of sensitivity.\n gains = [d[0] for d in lines]\n Ss = [d[1] for d in lines]\n Os = [d[2] for d in lines]\n mS,bS = numpy.polyfit(gains, Ss, 1)\n mO,bO = numpy.polyfit(gains, Os, 1)\n\n # Plot curve \"O\" as 10x, so it fits in the same scale as curve \"S\".\n pylab.plot(gains, [10*o for o in Os], 'r')\n pylab.plot([gains[0],gains[-1]],\n [10*mO*gains[0]+10*bO, 10*mO*gains[-1]+10*bO], 'b')\n pylab.plot(gains, Ss, 'r')\n pylab.plot([gains[0],gains[-1]], [mS*gains[0]+bS, mS*gains[-1]+bS], 'b')\n matplotlib.pyplot.savefig(\"%s_plot_S_O.png\" % (NAME))\n\n print \"\"\"\n /* Generated test code to dump a table of data for external validation\n * of the noise model parameters.\n */\n #include <stdio.h>\n #include <assert.h>\n double compute_noise_model_entry_S(int sens);\n double compute_noise_model_entry_O(int sens);\n int main(void) {\n int sens;\n for (sens = %d; sens <= %d; sens += 100) {\n double o = compute_noise_model_entry_O(sens);\n double s = compute_noise_model_entry_S(sens);\n printf(\"%%d,%%lf,%%lf\\\\n\", sens, o, s);\n }\n return 0;\n }\n\n /* Generated functions to map a given sensitivity to the O and S noise\n * model parameters in the DNG noise model.\n */\n double compute_noise_model_entry_S(int sens) {\n double s = %e * sens + %e;\n return s < 0.0 ? 0.0 : s;\n }\n double compute_noise_model_entry_O(int sens) {\n double o = %e * sens + %e;\n return o < 0.0 ? 0.0 : o;\n }\n \"\"\"%(sens_min,sens_max,mS,bS,mO,bO)", "def set_resolution(self, res, video_file = \"output.avi\"):\n\t\tprint(\"Setting resolution to %s.\"%res)\n\t\thigh_res = (1088,720)\n\t\tmed_res = (640,480)\n\t\tlow_res = (320,240)\t\n\t\tif self.vs is not None:\t\n\t\t\tself.teardown()\n\t\t\tsleep(1)\n\n\t\tif res == \"high\": #high\n\t\t\tself.camera_matrix = np.array([[8.4360221747968785e+02, 0., 544.], [0., 8.4385823040303683e+02, 360.],[0., 0.,\n \t\t1.]])\n\t\t\tself.dist_coeff = np.array([1.7626446405747770e-01, -3.4120481004692560e-01,\n\t\t \t-2.1890672094602151e-03, -3.6706857342688248e-05,\n\t\t \t8.1488779271148601e-02])\n\t\t\tres = high_res\n\t\t\tself.cop.horizontal_fov = 76.884\n\t\t\tself.cop.vertical_fov = 51.714\n\n\t\telif res == \"med\": #med\n\t\t\tself.camera_matrix = np.array([[4.9855533317091482e+02, 0., 320.], [0., 4.9967286973785622e+02, 240.],[0., 0.,\n\t\t1.]])\n\t\t\tself.dist_coeff = np.array([1.9695524980263868e-01, -4.7266256496392656e-01,\n \t\t\t-2.8509501186610737e-03, -6.6742476969470338e-04,\n \t\t\t2.9734384543609033e-01])\n\t\t\tres = med_res\n\t\t\tself.cop.horizontal_fov = 85.521\n\t\t\tself.cop.vertical_fov = 69.626\n\n\t\telif res == \"low\": #low\n\t\t\tself.camera_matrix = np.array([[2.4848460687057266e+02, 0., 160.], [0., 2.4930955561049109e+02, 120.], [0., 0.,\n \t\t1.]])\n\t\t\tself.dist_coeff = np.array([2.1646548043084851e-01, -6.2149098910402545e-01,\n \t\t\t-1.9510859152085493e-03, -1.6281010642558004e-03,\n \t\t\t\t5.5614584686671453e-01])\n\t\t\tres = low_res\n\t\t\tself.cop.horizontal_fov = 83.237\n\t\t\tself.cop.vertical_fov = 68.536\n\t\t\n\t\tself.vs = VideoStream(usePiCamera=True, resolution=res).start()\n\t\tsleep(.5)\n\n\t\t#set copter properties\n\t\tself.cop.horizontal_resolution = res[0]\n\t\tself.cop.vertical_resolution = res[1]\n\t\t#full camera fov\n\t\tself.cop.horizontal_fov_rad = self.cop.horizontal_fov * math.pi / 180\n\t\tself.cop.vertical_fov_rad = self.cop.vertical_fov * math.pi / 180\n\n\t\tframe = self.vs.read()\n\t\tself.horizontal_resolution = frame.shape[1]\n\t\tself.vertical_resolution = frame.shape[0]\n\t\tself.c_x_image = self.horizontal_resolution / 2\n\t\tself.c_y_image = self.vertical_resolution / 2\n\t\tself.frame = frame\n\n\t\t#for color target stuff\n\t\tf_x = self.camera_matrix[0][0]\n\t\tf_y = self.camera_matrix[1][1]\n\t\tself.m = (f_x + f_y) / (2 * self.foc)\n\n\t\t#file writing\n\t\tfpsL = 60\n\t\tfpsM = 125\n\t\tfpsH = 40\n\t\t\n\t\tfourcc = cv2.VideoWriter_fourcc(*'MJPG')\n\t\tif res == high_res:\n\t\t\tself.out = cv2.VideoWriter(video_file, fourcc,fpsH,(self.horizontal_resolution,self.vertical_resolution),True)\n\t\telif res == med_res:\n\t\t\tself.out = cv2.VideoWriter(video_file, fourcc,fpsM,(self.horizontal_resolution,self.vertical_resolution),True)\n\t\telif res == low_res:\n\t\t\tself.out = cv2.VideoWriter(video_file, fourcc,fpsL,(self.horizontal_resolution,self.vertical_resolution),True)\n\n\t\tself.fps = FPS().start()", "def main():\n cam = Realsense()\n # cam.access_intr_and_extr()\n profile = cam.pipeline.start(cam.config)\n depth_sensor = profile.get_device().first_depth_sensor()\n depth_scale = depth_sensor.get_depth_scale()\n align_to = rs.stream.color\n align = rs.align(align_to)\n\n objp = np.zeros((3*4,3), np.float32)\n objp[:,:2] = np.mgrid[0:4,0:3].T.reshape(-1,2)\n axis = np.float32([[1,0,0], [0,1,0], [0,0,-1]]).reshape(-1,3)\n # print(objp)\n\n try:\n while (True):\n # detect ArUco markers in RGB images\n frames = cam.pipeline.wait_for_frames()\n aligned_frames = align.process(frames)\n color_frame = aligned_frames.get_color_frame()\n color_image = np.asanyarray(color_frame.get_data()) \n frame = color_image\n font = cv2.FONT_HERSHEY_SIMPLEX\n corners, ids, rvecs, tvecs = cam.detect_markers_realsense(frame)\n \n if np.all(ids != None): # if markers are detected\n for i in range(0, ids.size):\n aruco.drawAxis(frame, cam.newcameramtx, cam.dist, rvecs[i],\n tvecs[i], 0.1) # Draw axis\n aruco.drawDetectedMarkers(frame, corners) # draw square around markers\n\n ###### DRAW ID #####\n strg = ''\n for i in range(0, ids.size):\n strg += str(ids[i][0])+', '\n\n cv2.putText(frame, \"Id: \" + strg, (0,25), font, 1, (0,255,0), 2,\n cv2.LINE_AA)\n\n\t ###### Output marker positions in camera frame ######\n \t # output tvec\n y0 = 60\n dy = 40\n for i in range(0, ids.size):\n y = y0 + i*dy\n cv2.putText(frame, str(tvecs[i][0]), (0, y), font, 1, (0,255,0),\n 2, cv2.LINE_AA)\n\n else:\n ##### DRAW \"NO IDS\" #####\n cv2.putText(frame, \"No Ids\", (0,64), font, 1, (0,255,0), 2,\n cv2.LINE_AA)\n\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n ret, corners = cv2.findChessboardCorners(gray, (4,3), None)\n if ret == True:\n corners2 = cv2.cornerSubPix(gray, corners,(11,11), (-1,-1),\n cam.criteria)\n corners2 = corners2[::-1]\n # print(corners2)\n # print(objp)\n frame = cv2.drawChessboardCorners(frame, (4,3), corners2, ret)\n # Find the rotation and translation vectors.\n _, rvecs, tvecs = cv2.solvePnP(objp, corners2, cam.newcameramtx,\n cam.dist)\n rot, _ = cv2.Rodrigues(rvecs)\n # print(rot)\n # project 3D points to image plane\n imgpts, jac = cv2.projectPoints(axis, rvecs, tvecs,\n cam.newcameramtx, cam.dist)\n frame = draw(frame, corners2, imgpts)\n\n # Display the resulting frame\n cv2.imshow('frame',frame)\n cv2.waitKey(5)\n\n # When everything done, release the capture\n cv2.destroyAllWindows()\n\n finally:\n cam.pipeline.stop()", "def secondayCamCallback(msg):\n\n global secondaryCamString\n secondaryCamString = msg.data", "def get_cameras_number():\n lib.initlib()\n return lib.is_GetNumberOfCameras()", "def match_based_on_spatial_temperal_prior_test_2(tracker_record_1, tracker_record_2, pt_obj_1, pt_obj_2, associate_dict, t_interval=30):\n print(\"===== Get in the match_based_on_spatial_temperal_prior_test_2! ===== \")\n \n # file path\n device_id_1 = 0\n device_id_2 = 1\n img_root_1 = data_path[device_id_1]\n img_root_2 = data_path[device_id_2]\n # save_root =\n \n obj_single_camera_stp_cam_1 = SingleCameraSTP(tracker_record_1, pt_obj_1)\n obj_single_camera_stp_cam_2 = SingleCameraSTP(tracker_record_2, pt_obj_2)\n \n print(obj_single_camera_stp_cam_1.perspective_trace)\n print(obj_single_camera_stp_cam_1.motion_params_4_each)\n obj_multi_cameras_stp_c1c2 = MultiCamerasSTP(\n obj_single_camera_stp_cam_1,\n obj_single_camera_stp_cam_2,\n associate_dict)\n\n # # ===== TEST:coord_transformer_test =====\n # coord_transformer_test(obj_multi_cameras_stp_c1c2)\n # obj_multi_cameras_stp_c1c2.get_start_point_transform()\n \n pt_box_info_1 = obj_multi_cameras_stp_c1c2.obj_single_camera_stp_cam_1.perspective_trace\n pt_box_info_2 = obj_multi_cameras_stp_c1c2.obj_single_camera_stp_cam_2.perspective_trace\n \n # Test on object id '1'\n object_id = '0'\n \n for i in range(np.min([len(pt_box_info_1[object_id]), len(pt_box_info_2[object_id])])):\n f1 = i\n f2 = i\n fname_1 = str(pt_box_info_1[object_id][f1][1])+'.jpg'\n fname_2 = str(pt_box_info_2[object_id][f2][1])+'.jpg'\n \n img_1 = cv2.imread(os.path.join(img_root_1, fname_1))\n img_2 = cv2.imread(os.path.join(img_root_2, fname_2))\n \n cam_1_x = pt_box_info_1[object_id][f1][0][0]\n cam_1_y = pt_box_info_1[object_id][f1][0][1]\n \n cam_2_x = pt_box_info_2[object_id][f2][0][0]\n cam_2_y = pt_box_info_2[object_id][f2][0][1]\n \n t_interval = pt_box_info_2[object_id][f2][1]-pt_box_info_1[object_id][f1][1]\n \n print(cam_1_x, cam_1_y)\n print(cam_2_x, cam_2_y)\n print(t_interval)\n # print(obj_multi_cameras_stp_c1c2.starting_point)\n \n p_map = obj_multi_cameras_stp_c1c2.get_probability_map(cam_1_x, cam_1_y, t_interval, height=210, width=80)\n p_map = cv2.applyColorMap(p_map, cv2.COLORMAP_JET)\n p = obj_multi_cameras_stp_c1c2.get_probability(cam_2_x, cam_2_y, cam_1_x, cam_1_y, t_interval)\n print(p)\n # dist = obj_multi_cameras_stp_c1c2.get_distance(cam_2_x,cam_2_y,cam_1_x,cam_1_y,t_interval)\n p_map = cv2.resize(p_map, (int(pt_obj_2.transformed_width_for_disp), int(pt_obj_2.transformed_height_for_disp)))\n p_map = cv2.flip(p_map, 0) # 0:vertical flip\n pt_color_p_map = pt_obj_2.get_inverse_disp_transform(p_map)\n \n alpha = 0.5\n img_3 = cv2.addWeighted(img_2, alpha, pt_color_p_map, 1-alpha, 0)\n \n img_4 = np.zeros((int(img_2.shape[0]), int(img_2.shape[1]*2), 3), np.uint8)\n img_4[:, :img_1.shape[1], :] = img_1\n img_4[:, img_1.shape[1]:, :] = img_3\n\n # cv2.namedWindow('img_1',cv2.WINDOW_NORMAL)\n # cv2.namedWindow('img_2',cv2.WINDOW_NORMAL)\n cv2.namedWindow('img_4', cv2.WINDOW_NORMAL)\n \n # cv2.imshow('img_1',img_1)\n # cv2.imshow('img_2',img_2)\n cv2.imshow('img_4', img_4)\n \n cv2.imwrite(os.path.join(save_root, fname_1), img_4)\n \n cv2.waitKey()\n return", "def __init__(self):\n self.index = 'r11_07_06c'\n self.parameters = {'run_index': 'r11_07_06c',\n 'h_1': 0.25,\n 'rho_0': 1.150,\n 'rho_1': 1.100,\n 'rho_2': 1.000,\n 'alpha': 0.5,\n 'D': 0.4,\n 'H': 0.25,\n 'sample': 1.0,\n 'perspective': 'old'}\n self.run_data = {'run_index': 'r11_07_06c',\n 'l0x': 2796,\n 'l0y': 1151,\n 'lsx': 2793,\n 'lsy': 716,\n 'j10x': 210,\n 'j10y': 1165,\n 'j1sx': 208,\n 'j1sy': 727,\n 'leakage': -76,\n 'odd_1': 'n',\n 'j20x': 2728,\n 'j20y': 1086,\n 'j2sx': 2730,\n 'j2sy': 670,\n 'r0x': 1097,\n 'r0y': 1095,\n 'rsx': 1093,\n 'rsy': 683,\n 'odd_2': 'n'}\n self.raw_image = 'tests/data/synced/r11_07_06c/cam1/img_0001.jpg'\n self.bc_image = 'tests/data/bc/r11_07_06c/cam1/img_0001.jpg'\n self.processed_path = 'tests/data/processed_ref/r11_07_06c/cam1/img_0001.jpg'", "def setup_cameras(self, settings):\n for camera in settings.cameras: # for each camera listed in yaml file\n cam = Camera(camera, settings.cameras, settings) # create a Camera instance\n self.camlist.append(cam) # add it to the list of cameras", "def infer():\n\n # Create StreamManagerApi object\n stream_manager_api = StreamManagerApi()\n # Use InitManager method init StreamManagerApi\n ret = stream_manager_api.InitManager()\n if ret != 0:\n print(\"Failed to init Stream manager, ret=%s\" % str(ret))\n exit()\n\n # create streams by pipeline config file\n with open(args.pipeline_path, \"rb\") as f:\n pipeline_str = f.read()\n\n # Configuring a stream\n ret = stream_manager_api.CreateMultipleStreams(pipeline_str)\n if ret != 0:\n print(\"Failed to create Stream, ret=%s\" % str(ret))\n exit()\n\n # Construct the input of the stream\n data_input = MxDataInput()\n # Stream_name encoded in UTF-8\n stream_name = args.stream_name.encode()\n print(stream_name)\n predictions = []\n with open(args.label_path, 'rt') as f:\n val_cls = f.read().rstrip(\"\\n\").split(\"\\n\")\n val_cls_dict = {}\n for i, cls in enumerate(val_cls):\n val_cls_dict[i] = cls\n coco_gt = COCO(args.instances_path)\n classs_dict = {}\n cat_ids = coco_gt.loadCats(coco_gt.getCatIds())\n for cat in cat_ids:\n classs_dict[cat[\"name\"]] = cat[\"id\"]\n\n for file_name in os.listdir(args.img_path):\n pred_data = []\n # Gets the Address of each image\n img_id = int(file_name.split('.')[0])\n file_path = args.img_path + file_name\n size = (cv2.imread(file_path)).shape\n\n # Read each photo in turn\n with open(file_path, \"rb\") as f:\n img_data = f.read()\n if not img_data:\n print(f\"read empty data from img:{file_name}\")\n continue\n # The element value img_data\n data_input.data = img_data\n boxes_output, scores_output = send_data_get_output(stream_name, data_input, stream_manager_api)\n pred_data.append({\"boxes\": boxes_output,\n \"box_scores\": scores_output,\n \"img_id\": img_id,\n \"image_shape\": size})\n\n parse_img_infer_result(pred_data[0], predictions, val_cls_dict, classs_dict)\n print(f\"Inferred image:{file_name} success!\")\n\n # Save the result in JSON format\n if not os.path.exists(args.res_path):\n os.makedirs(args.res_path)\n with open(args.res_path + 'predictions_test.json', 'w') as f:\n json.dump(predictions, f)\n stream_manager_api.DestroyAllStreams()", "def main():\n\n with its.device.ItsSession() as cam:\n\n props = cam.get_camera_properties()\n its.caps.skip_unless(its.caps.raw16(props) and\n its.caps.manual_sensor(props) and\n its.caps.read_3a(props) and\n its.caps.per_frame_control(props) and\n not its.caps.mono_camera(props))\n debug = its.caps.debug_mode()\n\n # Expose for the scene with min sensitivity\n exp_min, exp_max = props[\"android.sensor.info.exposureTimeRange\"]\n sens_min, _ = props[\"android.sensor.info.sensitivityRange\"]\n # Digital gains might not be visible on RAW data\n sens_max = props[\"android.sensor.maxAnalogSensitivity\"]\n sens_step = (sens_max - sens_min) / NUM_ISO_STEPS\n white_level = float(props[\"android.sensor.info.whiteLevel\"])\n black_levels = [its.image.get_black_level(i,props) for i in range(4)]\n # Get the active array width and height.\n aax = props[\"android.sensor.info.activeArraySize\"][\"left\"]\n aay = props[\"android.sensor.info.activeArraySize\"][\"top\"]\n aaw = props[\"android.sensor.info.activeArraySize\"][\"right\"]-aax\n aah = props[\"android.sensor.info.activeArraySize\"][\"bottom\"]-aay\n raw_stat_fmt = {\"format\": \"rawStats\",\n \"gridWidth\": aaw/IMG_STATS_GRID,\n \"gridHeight\": aah/IMG_STATS_GRID}\n\n e_test = []\n mult = 1.0\n while exp_min*mult < exp_max:\n e_test.append(int(exp_min*mult))\n mult *= EXP_MULT\n if e_test[-1] < exp_max * INCREASING_THR:\n e_test.append(int(exp_max))\n e_test_ms = [e / 1000000.0 for e in e_test]\n\n for s in range(sens_min, sens_max, sens_step):\n means = []\n means.append(black_levels)\n reqs = [its.objects.manual_capture_request(s, e, 0) for e in e_test]\n # Capture raw in debug mode, rawStats otherwise\n caps = []\n for i in range(len(reqs) / SLICE_LEN):\n if debug:\n caps += cam.do_capture(reqs[i*SLICE_LEN:(i+1)*SLICE_LEN], cam.CAP_RAW)\n else:\n caps += cam.do_capture(reqs[i*SLICE_LEN:(i+1)*SLICE_LEN], raw_stat_fmt)\n last_n = len(reqs) % SLICE_LEN\n if last_n == 1:\n if debug:\n caps += [cam.do_capture(reqs[-last_n:], cam.CAP_RAW)]\n else:\n caps += [cam.do_capture(reqs[-last_n:], raw_stat_fmt)]\n elif last_n > 0:\n if debug:\n caps += cam.do_capture(reqs[-last_n:], cam.CAP_RAW)\n else:\n caps += cam.do_capture(reqs[-last_n:], raw_stat_fmt)\n\n # Measure the mean of each channel.\n # Each shot should be brighter (except underexposed/overexposed scene)\n for i,cap in enumerate(caps):\n if debug:\n planes = its.image.convert_capture_to_planes(cap, props)\n tiles = [its.image.get_image_patch(p, 0.445, 0.445, 0.11, 0.11) for p in planes]\n mean = [m * white_level for tile in tiles\n for m in its.image.compute_image_means(tile)]\n img = its.image.convert_capture_to_rgb_image(cap, props=props)\n its.image.write_image(img, \"%s_s=%d_e=%05d.jpg\" % (NAME, s, e_test))\n else:\n mean_image, _ = its.image.unpack_rawstats_capture(cap)\n mean = mean_image[IMG_STATS_GRID/2, IMG_STATS_GRID/2]\n\n print \"ISO=%d, exposure time=%.3fms, mean=%s\" % (\n s, e_test[i] / 1000000.0, str(mean))\n means.append(mean)\n\n\n # means[0] is black level value\n r = [m[0] for m in means[1:]]\n gr = [m[1] for m in means[1:]]\n gb = [m[2] for m in means[1:]]\n b = [m[3] for m in means[1:]]\n\n pylab.plot(e_test_ms, r, \"r.-\")\n pylab.plot(e_test_ms, b, \"b.-\")\n pylab.plot(e_test_ms, gr, \"g.-\")\n pylab.plot(e_test_ms, gb, \"k.-\")\n pylab.xscale('log')\n pylab.yscale('log')\n pylab.title(\"%s ISO=%d\" % (NAME, s))\n pylab.xlabel(\"Exposure time (ms)\")\n pylab.ylabel(\"Center patch pixel mean\")\n matplotlib.pyplot.savefig(\"%s_s=%d.png\" % (NAME, s))\n pylab.clf()\n\n allow_under_saturated = True\n for i in xrange(1, len(means)):\n prev_mean = means[i-1]\n mean = means[i]\n\n if np.isclose(max(mean), white_level, rtol=SATURATION_TOL):\n print \"Saturated: white_level %f, max_mean %f\"% (white_level, max(mean))\n break;\n\n if allow_under_saturated and np.allclose(mean, black_levels, rtol=BLK_LVL_TOL):\n # All channel means are close to black level\n continue\n\n allow_under_saturated = False\n # Check pixel means are increasing (with small tolerance)\n channels = [\"Red\", \"Gr\", \"Gb\", \"Blue\"]\n for chan in range(4):\n err_msg = \"ISO=%d, %s, exptime %3fms mean: %.2f, %s mean: %.2f, TOL=%.f%%\" % (\n s, channels[chan],\n e_test_ms[i-1], mean[chan],\n \"black level\" if i == 1 else \"exptime %3fms\"%e_test_ms[i-2],\n prev_mean[chan],\n INCREASING_THR*100)\n assert mean[chan] > prev_mean[chan] * INCREASING_THR, err_msg", "def run(self):\n\n info(\"creating camera\")\n self.camera_controller = CameraController()\n self.camera_controller.camera.resolution = self.photo_resolution\n\n self.screen_resolution = ui.get_screen_resolution()\n self.normalized_screen_resolution = ui.normalize_dimension(self.screen_resolution)\n info(\"screen_resolution: %s\", self.screen_resolution)\n info(\"normalized_screen_resolution: %s\", self.normalized_screen_resolution)\n\n info(\"creating buffer image and canvas\")\n self.buffer_image = Image.new('RGB', self.normalized_screen_resolution)\n self.canvas = ImageDraw.Draw(self.buffer_image)\n debug(\"buffer_image resolution: %s\", self.buffer_image.size)\n\n info(\"creating preview renderer\")\n self.preview_renderer = self.camera_controller.start_preview(\n fullscreen=False,\n window=ui.normalize_dimension((\n 0, 0,\n self.normalized_screen_resolution[0] * 0.75,\n self.normalized_screen_resolution[1]\n )))\n debug(\"preview location: %s\", self.preview_renderer.window)\n\n info(\"creating window renderer\")\n self.window_renderer = self.camera_controller.add_overlay(\n self.buffer_image.tobytes(),\n size=self.buffer_image.size,\n fullscreen=False,\n layer=1,\n window=(\n 0, 0,\n self.normalized_screen_resolution[0],\n self.normalized_screen_resolution[1]\n ))\n debug(\"window location: %s\", self.window_renderer.window)\n\n info(\"setting up UI\")\n self._setup_ui()\n\n info(\"setting up input\")\n self.yes_button = GPIOButton(self.yes_pin)\n self.no_button = GPIOButton(self.no_pin)\n\n info(\"starting app\")\n self._enter_state(STATE_DEFAULT)\n self.render_timer.start()\n ui_context = ui.UIContext(self.canvas, self.window, update_function=self._logic)\n ui_context.main_loop()\n\n info(\"exiting\")", "def readImages(self):\r\n\r\n #Read the file camera.csv for the image file name\r\n lines = [line.strip() for line in open(self.cameraFile)]\r\n i = 0;\r\n\tself.centers = []\r\n\tself.lefts = []\r\n\tself.rights = []\r\n\r\n for line in lines:\r\n info = line.split(',')\r\n \r\n\r\n if info[0] == 'seq':\r\n i += 1\r\n continue\r\n \r\n if info[4] == 'left_camera':\r\n self.lefts.append(info)\r\n if info[4] == 'center_camera':\r\n self.centers.append(info)\r\n if info[4] == 'right_camera':\r\n self.rights.append(info)\r\n i += 1\r\n\r\n print \"Total Frames: %d \" % (len(self.centers))", "def r2n2_normal_cam_images(self):\n if not hasattr(self, '_r2n2_normal_cam_images'):\n nrm_world = self.r2n2_normal_world_images\n cam2world = self.r2n2_cam2world.copy()\n cam_images = []\n for i in range(24):\n # Use the inverse-transpose of the needed matrix:\n im_i = geom_util_np.apply_4x4(\n nrm_world[i, ...], cam2world[i, :, :].T, are_points=False)\n nrm = np.linalg.norm(im_i, axis=-1, keepdims=True) + 1e-10\n im_i /= nrm\n mask = np_util.make_mask(self.r2n2_depth_images[i, ...])\n cam_images.append(np_util.zero_by_mask(mask, im_i).astype(np.float32))\n self._r2n2_normal_cam_images = np.stack(cam_images)\n return self._r2n2_normal_cam_images", "def run(self):\n #calculate platescale of first input image\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.cd)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.pc)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n pscale = self.datain[0].header['PIXSCAL']\n #filtering out images which are too far away from the others\n #passing images added to a list of (image, WCS) tuples\n '''\n image_centers = []\n for f in self.datain:\n image_centers.append((f.header['CRVAL1'], f.header['CRVAL2']))\n filtered_datain = []\n dist_list = [[[0]*(len(image_centers)-1)]*len(image_centers)]\n for i in range(len(image_centers)):\n for j in range(len(image_centers)-1):\n dist_list[i][j+1] = np.sqrt((image_)**2+()**2)\n '''\n #calculations necessary for updating wcs information\n px = []\n py = []\n \n #in order to avoid NaN interactions, creating weight map\n weights=[]\n for f in self.datain:\n weights.append((np.where(np.isnan(f.image) == True, 0, 1)))\n \n for f in self.datain:\n px.extend(wcs.WCS(f.header).calc_footprint()[:,0])\n py.extend(wcs.WCS(f.header).calc_footprint()[:,1])\n x0 = (max(px)+min(px))/2.\n y0 = (max(py)+min(py))/2.\n sx = (max(px)-min(px))*np.cos(y0/180*np.pi) # arcsec\n sy = (max(py)-min(py)) # arcsec\n size = (sx*3600+self.getarg('pad')*2, sy*3600+self.getarg('pad')*2)\n xpix = size[0]//pscale\n ypix = size[1]//pscale\n cdelt = [pscale/3600.]*2\n \n #create self.dataout and give it a copy of an input's header\n self.dataout = DataFits(config = self.config)\n self.dataout.header = self.datain[0].header.copy()\n \n #update header wcs information\n self.log.info('Creating new WCS header')\n \n self.dataout.header['CRPIX1'] = xpix/2\n self.dataout.header['CRPIX2'] = ypix/2\n self.dataout.header['CRVAL1'] = x0\n self.dataout.header['CRVAL2'] = y0\n self.dataout.header['CD1_1'] = -cdelt[0]\n self.dataout.header['CD1_2'] = self.dataout.header['CD2_1'] = 0.\n self.dataout.header['CD2_2'] = cdelt[1]\n self.dataout.header['NAXIS1'] = int(xpix)\n self.dataout.header['NAXIS2'] = int(ypix)\n self.dataout.header['CTYPE1'] = 'RA---TAN-SIP'\n self.dataout.header['CTYPE2'] = 'DEC--TAN-SIP'\n self.dataout.header['RADESYS'] = 'ICRS'\n self.dataout.header['EQUINOX'] = 2000\n self.dataout.header['LATPOLE'] = self.datain[0].header['CRVAL2']\n self.dataout.header['LONPOLE'] = 180\n self.dataout.header['PIXASEC'] = pscale\n \n theta_rad = np.deg2rad(self.getarg('outangle'))\n rot_matrix = np.array([[np.cos(theta_rad), -np.sin(theta_rad)], \n [np.sin(theta_rad), np.cos(theta_rad)]])\n rot_cd = np.dot(rot_matrix, np.array([[self.dataout.header['CD1_1'], 0.],[0., self.dataout.header['CD2_2']]]))\n for i in [0,1]:\n for j in [0,1]:\n self.dataout.header['CD{0:d}_{1:d}'.format(i+1, j+1)] = rot_cd[i,j]\n \n #check drizzle arguments\n if self.getarg('kernel') == 'smoothing':\n kernel = 'lanczos3'\n elif self.getarg('kernel') in ['square', 'point', 'gaussian', 'tophat']:\n kernel = self.getarg('kernel')\n else:\n self.log.error('Kernel name not recognized, using default')\n kernel = 'square'\n if self.getarg('drizzleweights') == 'uniform':\n driz_wt = ''\n elif self.getarg('drizzleweights') in ['exptime', 'expsq']:\n driz_wt = self.getarg('drizzleweights')\n else:\n self.log.error('Drizzle weighting not recognized, using default')\n driz_wt = ''\n \n #create drizzle object and add input images\n fullwcs = wcs.WCS(self.dataout.header)\n self.log.info('Starting drizzle')\n driz = drz.Drizzle(outwcs = fullwcs, pixfrac=self.getarg('pixfrac'), \\\n kernel=kernel, fillval='10000', wt_scl=driz_wt)\n for i,f in enumerate(self.datain):\n self.log.info('Adding %s to drizzle stack' % f.filename)\n driz.add_image(f.imgdata[0], wcs.WCS(f.header), inwht=weights[i])\n \n try:\n fillval=float(self.getarg('fillval'))\n except:\n fillval=np.nan\n self.log.error('Fillvalue not recognized or missing, using default')\n \n #creates output fits file from drizzle output\n self.dataout.imageset(np.where(driz.outsci == 10000, fillval, driz.outsci))\n self.dataout.imageset(driz.outwht,'OutWeight', self.dataout.header)\n self.dataout.filename = self.datain[0].filename\n\n #add history\n self.dataout.setheadval('HISTORY','Coadd: %d files combined with %s kernel, pixfrac %f at %f times resolution' \\\n % (len(self.datain), kernel, self.getarg('pixfrac'), self.getarg('resolution')))", "def test_cameras(self):\n curiosity_cameras = ['FHAZ', 'NAVCAM', 'MAST', 'CHEMCAM', 'MAHLI', 'MARDI', 'RHAZ']\n\n photos_by_camera = {camera: len(self._get_all_photos(1000, camera)) for camera in curiosity_cameras}\n\n self.assertTrue(all(\n [a / b > 10 or b / a > 10\n for a, b in itertools.combinations(photos_by_camera.values(), 2) # get unique pairs of numbers of photos\n if a > 0 and b > 0]\n ), msg=photos_by_camera)", "def scale_mvs_input(images, cams, depth_image=None, scale=1):\n for view in range(FLAGS.view_num):\n images[view] = scale_image(images[view], scale=scale)\n cams[view] = scale_camera(cams[view], scale=scale)\n\n if depth_image is None:\n return images, cams\n else:\n depth_image = scale_image(depth_image, scale=scale, interpolation='nearest')\n return images, cams, depth_image", "def resizing(image, image_2, desire_width):\n\n height, width = image.shape[0:2]\n\n aspect_ratio = (width / height)\n\n new_width = desire_width\n\n new_height = int(round(new_width / aspect_ratio))\n\n standard_src = cv2.resize(image, (new_width, new_height))\n\n image_tocut = cv2.resize(image_2,(new_width, new_height))\n\n return standard_src, image_tocut", "def discover(self, **kwargs):\n manifest = self.parent.config.get('manifest', {})\n if self.foscam_mjpeg > 0:\n self._discover_foscam(manifest)\n else:\n self.parent.logger.info(\"CameraServer: Not Polling for Foscam MJPEG cameras %s\" % (self.foscam_mjpeg))\n self.set_driver('GV2', self.num_cams, uom=56, report=True)\n self.parent.logger.info(\"CameraServer: Done adding cameras\")\n self.parent.update_config()\n return True", "def stereo_callback(self, stereo_msg):\r\n start = time.time()\r\n self.cam0_curr_img_msg = stereo_msg.cam0_msg\r\n self.cam1_curr_img_msg = stereo_msg.cam1_msg\r\n\r\n # Build the image pyramids once since they're used at multiple places.\r\n self.create_image_pyramids()\r\n\r\n # Detect features in the first frame.\r\n if self.is_first_img:\r\n if not self.config.load_features_flag:\r\n self.initialize_first_frame()\r\n self.is_first_img = False\r\n # Draw results.\r\n # self.draw_features_stereo()\r\n else:\r\n if not self.config.load_features_flag:\r\n # Track the feature in the previous image.\r\n t = time.time()\r\n self.track_features()\r\n print('___track_features:', time.time() - t)\r\n t = time.time()\r\n\r\n # Add new features into the current image.\r\n self.add_new_features()\r\n print('___add_new_features:', time.time() - t)\r\n t = time.time()\r\n self.prune_features()\r\n print('___prune_features:', time.time() - t)\r\n t = time.time()\r\n # Draw results.\r\n # self.draw_features_stereo()\r\n print('___draw_features_stereo:', time.time() - t)\r\n t = time.time()\r\n\r\n print('===image process elapsed:', time.time() - start, f'({stereo_msg.timestamp})')\r\n\r\n if not self.config.load_features_flag:\r\n try:\r\n self.save_features() \r\n return self.publish()\r\n finally:\r\n self.cam0_prev_img_msg = self.cam0_curr_img_msg\r\n self.prev_features = self.curr_features\r\n self.prev_cam0_pyramid = self.curr_cam0_pyramid\r\n\r\n # Initialize the current features to empty vectors.\r\n self.curr_features = [[] for _ in range(self.config.grid_num)]\r\n else:\r\n self.load_features()\r\n return self.publish()", "def stereo_match(self, cam0_points):\r\n cam0_points = np.array(cam0_points)\r\n if len(cam0_points) == 0:\r\n return []\r\n\r\n R_cam0_cam1 = self.R_cam1_imu.T @ self.R_cam0_imu\r\n cam0_points_undistorted = self.undistort_points(\r\n cam0_points, self.cam0_intrinsics,\r\n self.cam0_distortion_model, self.cam0_distortion_coeffs, R_cam0_cam1)\r\n cam1_points = self.distort_points(\r\n cam0_points_undistorted, self.cam1_intrinsics,\r\n self.cam1_distortion_model, self.cam1_distortion_coeffs)\r\n cam1_points_copy = cam1_points.copy()\r\n\r\n # Track features using LK optical flow method.\r\n cam0_points = cam0_points.astype(np.float32)\r\n cam1_points = cam1_points.astype(np.float32)\r\n cam1_points, inlier_markers, _ = cv2.calcOpticalFlowPyrLK(\r\n self.curr_cam0_pyramid, self.curr_cam1_pyramid,\r\n cam0_points, cam1_points, **self.config.lk_params)\r\n\r\n cam0_points_, _, _ = cv2.calcOpticalFlowPyrLK(\r\n self.curr_cam1_pyramid, self.curr_cam0_pyramid, \r\n cam1_points, cam0_points.copy(), **self.config.lk_params)\r\n err = np.linalg.norm(cam0_points - cam0_points_, axis=1)\r\n\r\n # cam1_points_undistorted = self.undistort_points(\r\n # cam1_points, self.cam1_intrinsics,\r\n # self.cam1_distortion_model, self.cam1_distortion_coeffs, R_cam0_cam1)\r\n disparity = np.abs(cam1_points_copy[:, 1] - cam1_points[:, 1])\r\n \r\n\r\n \r\n inlier_markers = np.logical_and.reduce(\r\n [inlier_markers.reshape(-1), err < 3, disparity < 20])\r\n\r\n # Mark those tracked points out of the image region as untracked.\r\n img = self.cam1_curr_img_msg.image\r\n for i, point in enumerate(cam1_points):\r\n if not inlier_markers[i]:\r\n continue\r\n if (point[0] < 0 or point[0] > img.shape[1]-1 or \r\n point[1] < 0 or point[1] > img.shape[0]-1):\r\n inlier_markers[i] = 0\r\n\r\n # Compute the relative rotation between the cam0 frame and cam1 frame.\r\n t_cam0_cam1 = self.R_cam1_imu.T @ (self.t_cam0_imu - self.t_cam1_imu)\r\n # Compute the essential matrix.\r\n E = skew(t_cam0_cam1) @ R_cam0_cam1\r\n\r\n # Further remove outliers based on the known essential matrix.\r\n cam0_points_undistorted = self.undistort_points(\r\n cam0_points, self.cam0_intrinsics,\r\n self.cam0_distortion_model, self.cam0_distortion_coeffs)\r\n cam1_points_undistorted = self.undistort_points(\r\n cam1_points, self.cam1_intrinsics,\r\n self.cam1_distortion_model, self.cam1_distortion_coeffs)\r\n\r\n norm_pixel_unit = 4.0 / (\r\n self.cam0_intrinsics[0] + self.cam0_intrinsics[1] +\r\n self.cam1_intrinsics[0] + self.cam1_intrinsics[1])\r\n\r\n for i in range(len(cam0_points_undistorted)):\r\n if not inlier_markers[i]:\r\n continue\r\n pt0 = np.array([*cam0_points_undistorted[i], 1.0])\r\n pt1 = np.array([*cam1_points_undistorted[i], 1.0])\r\n epipolar_line = E @ pt0\r\n error = np.abs((pt1 * epipolar_line)[0]) / np.linalg.norm(\r\n epipolar_line[:2])\r\n\r\n if error > self.config.stereo_threshold * norm_pixel_unit:\r\n inlier_markers[i] = 0\r\n\r\n return cam1_points, inlier_markers", "def camera_image_callback(self, ros_data):\n self.last_call_back_time = rospy.get_time()\n\n # self.logger.info(\"Got image\")\n if self.lastCameraInfo is not None:\n # Collect latest ros_data\n self.image_queue.put((ros_data, self.lastCameraInfo, self.seq_stamper), block=True)\n self.seq_stamper += 1\n\n # self.logger.warning(str(len(multiprocessing.active_children())))\n else:\n self.logger.warning(\"No camera info\")", "def acquire_images(cam, nodemap, nodemap_tldevice):\r\n\r\n #print('*** IMAGE ACQUISITION ***\\n')\r\n try:\r\n result = True\r\n\r\n # Set acquisition mode to continuous\r\n # In order to access the node entries, they have to be casted to a pointer type (CEnumerationPtr here)\r\n node_acquisition_mode = PySpin.CEnumerationPtr(nodemap.GetNode('AcquisitionMode'))\r\n if not PySpin.IsAvailable(node_acquisition_mode) or not PySpin.IsWritable(node_acquisition_mode):\r\n print('Unable to set acquisition mode to continuous (enum retrieval). Aborting...')\r\n return False\r\n\r\n # Retrieve entry node from enumeration node\r\n node_acquisition_mode_continuous = node_acquisition_mode.GetEntryByName('Continuous')\r\n if not PySpin.IsAvailable(node_acquisition_mode_continuous) or not PySpin.IsReadable(\r\n node_acquisition_mode_continuous):\r\n print('Unable to set acquisition mode to continuous (entry retrieval). Aborting...')\r\n return False\r\n\r\n # Retrieve integer value from entry node\r\n acquisition_mode_continuous = node_acquisition_mode_continuous.GetValue()\r\n\r\n # Set integer value from entry node as new value of enumeration node\r\n node_acquisition_mode.SetIntValue(acquisition_mode_continuous)\r\n\r\n #print('Acquisition mode set to continuous...')\r\n\r\n # Begin acquiring images\r\n cam.BeginAcquisition()\r\n\r\n #print('Acquiring images...')\r\n\r\n\r\n # Retrieve device serial number for filename\r\n #\r\n # *** NOTES ***\r\n # The device serial number is retrieved in order to keep cameras from\r\n # overwriting one another. Grabbing image IDs could also accomplish\r\n # this.\r\n device_serial_number = ''\r\n node_device_serial_number = PySpin.CStringPtr(nodemap_tldevice.GetNode('DeviceSerialNumber'))\r\n if PySpin.IsAvailable(node_device_serial_number) and PySpin.IsReadable(node_device_serial_number):\r\n device_serial_number = node_device_serial_number.GetValue()\r\n #print('Device serial number retrieved as %s...' % device_serial_number)\r\n\r\n\r\n print('')\r\n #picList = []\r\n\r\n # Retrieve, convert, and save images\r\n for i in range(NUM_IMAGES):\r\n try:\r\n\r\n # Retrieve the next image from the trigger\r\n result &= grab_next_image_by_trigger(nodemap, cam)\r\n\r\n # Retrieve next received image\r\n image_result = cam.GetNextImage()\r\n\r\n # Ensure image completion\r\n if image_result.IsIncomplete():\r\n print('Image incomplete with image status %d ...' % image_result.GetImageStatus())\r\n\r\n else:\r\n\r\n # Print image information; height and width recorded in pixels\r\n #\r\n # *** NOTES ***\r\n # Images have quite a bit of available metadata including\r\n # things such as CRC, image status, and offset values, to\r\n # name a few.\r\n #\r\n #Don't need this right now (CP)\r\n ##width = image_result.GetWidth()\r\n ##height = image_result.GetHeight()\r\n ##print('Grabbed Image %d, width = %d, height = %d' % (i, width, height))\r\n\r\n # Convert image to mono 8\r\n #\r\n # *** NOTES ***\r\n # Images can be converted between pixel formats by using\r\n # the appropriate enumeration value. Unlike the original\r\n # image, the converted one does not need to be released as\r\n # it does not affect the camera buffer.\r\n #\r\n # When converting images, color processing algorithm is an\r\n # optional parameter.\r\n\r\n #change to 12 bit\r\n #(self,format,algorithm)\r\n #Image_Result = image_result.ResetImage(720,540,0,0,PySpin.PixelFormat_Mono12p)\r\n #print(image_result.GetPixelFormatName())\r\n image_converted = image_result.Convert(PySpin.PixelFormat_Mono16, PySpin.HQ_LINEAR)\r\n\r\n image_result.Release()\r\n\r\n #add to piclist\r\n imgarray = image_converted.GetNDArray()\r\n images.append(imgarray)\r\n\r\n except PySpin.SpinnakerException as ex:\r\n print('Error: %s' % ex)\r\n return False\r\n\r\n\r\n # End acquisition\r\n #\r\n # *** NOTES ***\r\n # Ending acquisition appropriately helps ensure that devices clean up\r\n # properly and do not need to be power-cycled to maintain integrity.\r\n cam.EndAcquisition()\r\n snr = AvgSNR2(images) #try with picList\r\n #print(\"this is SNR in acquire images\",SNR) #this is not the value being returne\r\n\r\n except PySpin.SpinnakerException as ex:\r\n print('Error: %s' % ex)\r\n return False\r\n\r\n return snr", "def check_camera(self):\n # -- 2. Read the video stream\n if not self.cap.isOpened:\n rospy.logerr(\"[FACE] Error opening video capture\")\n if not self.face_cascade.load(self.face_cascade_name):\n rospy.logerr(\"[FACE] Error loading face cascade\")\n if not self.eyes_cascade.load(self.eyes_cascade_name):\n rospy.logerr(\"[FACE] Error loading eye cascade\")", "def enumerateDevices():\r\n \r\n return tuple((dev,dev) for dev in pygame.camera.list_cameras())", "def __publish_alembiccamera(self, item, output, work_template, primary_publish_path,\n sg_task, comment, thumbnail_path, progress_cb):\n # determine the publish info to use\n #\n progress_cb(10, \"Determining publish details\")\n\n # get the current scene path and extract fields from it\n # using the work template:\n scene_path = os.path.abspath(cmds.file(query=True, sn=True))\n fields = work_template.get_fields(scene_path)\n publish_version = fields[\"version\"]\n tank_type = output[\"tank_type\"]\n\n # extract entity from camera node name\n # handle full paths, trim off everything after the _\n # e.g. |pivot_GRP|master_CAM -> master\n fields[\"name\"] = item[\"name\"].split(\"|\")[-1].split(\"_\")[0]\n fields[\"cache_name\"] = fields[\"name\"]\n\n # create the publish path by applying the fields\n # with the publish template:\n fields[\"Step\"] = \"cam\" # first force step to be camera\n publish_template = output[\"publish_template\"]\n publish_path = publish_template.apply_fields(fields)\n\n # ensure the publish folder exists:\n publish_folder = os.path.dirname(publish_path)\n self.parent.ensure_folder_exists(publish_folder)\n\n # determine the publish name:\n publish_name = fields.get(\"name\")\n if not publish_name:\n publish_name = os.path.basename(publish_path)\n\n\n # set up args to export current camera item\n alembic_args = [\n \"objects=\" + item[\"name\"],\n \"ogawa=1\",\n ]\n\n # find the animated frame range to use:\n # Don't use self._find_scene_animation_range() because with effects\n # scenes we don't have a anim curve to determine the frame range from\n start_frame = int(cmds.playbackOptions(q=True, min=True))\n end_frame = int(cmds.playbackOptions(q=True, max=True))\n alembic_args.append(\"in=%d;out=%d\" % (start_frame, end_frame))\n\n # Set the output path:\n # Note: The AbcExport command expects forward slashes!\n alembic_args.append(\"filename=%s\" % publish_path.replace(\"\\\\\", \"/\"))\n\n job_string = \";\".join(alembic_args)\n\n # ...and execute it:\n progress_cb(30, \"Preparing publish task for the farm\")\n\n thumb_name = os.path.basename(thumbnail_path)\n new_thumbnail_path = os.path.join(\"C:\\\\mnt\\\\workspace\\\\tmp\\\\thumbnails\", item[\"name\"].replace(\"|\", \"_\") + \"_\" + thumb_name)\n shutil.copy2(thumbnail_path, new_thumbnail_path)\n thumbnail_path = new_thumbnail_path\n\n user = tank.util.get_current_user(self.parent.tank)\n args = aaSubmit.submitApi.create_sgpublish_args(\n publish_folder,\n publish_path,\n publish_name,\n publish_version,\n comment or \"No comment\",\n user[\"type\"],\n user[\"id\"],\n thumbnail_path,\n tank_type,\n sg_task[\"id\"],\n dependencyPaths=[primary_publish_path],\n deleteThumbnail=True,\n )\n pub_task = aaSubmit.utils.create_task_with_command(str(\"Publish \" + os.path.basename(publish_path)), args)\n\n return (job_string, pub_task)", "def start_camera(config):\n print(\"Starting {} on {}\".format(config.name, config.path))\n cs = CameraServer.getInstance()\n camera = cs.startAutomaticCapture(name=config.name, path=config.path)\n\n camera.setConfigJson(json.dumps(config.config))\n\n return cs, camera" ]
[ "0.6055959", "0.594556", "0.5910366", "0.58318543", "0.5761805", "0.5758764", "0.57067573", "0.56581026", "0.5576743", "0.5523943", "0.5506011", "0.53973216", "0.5318885", "0.5317909", "0.5289555", "0.5289161", "0.52827555", "0.527724", "0.52749354", "0.5268248", "0.52553976", "0.52499425", "0.52488345", "0.5244013", "0.52201676", "0.52106977", "0.5205049", "0.5200981", "0.51871383", "0.5183824", "0.5182357", "0.51713043", "0.5168774", "0.5156446", "0.51535946", "0.5151481", "0.51440865", "0.51314", "0.51307666", "0.51284975", "0.5125766", "0.51197344", "0.5109132", "0.5108704", "0.5107174", "0.51058406", "0.50963926", "0.50726116", "0.5071062", "0.5065785", "0.50473696", "0.50447965", "0.5035559", "0.5033374", "0.5013562", "0.5008894", "0.49922976", "0.49866006", "0.4983521", "0.49752662", "0.49587664", "0.49456307", "0.49387297", "0.4930763", "0.4926655", "0.49241483", "0.49158442", "0.4913395", "0.49121833", "0.49121833", "0.49017444", "0.48994786", "0.48876208", "0.48862025", "0.48847735", "0.48840743", "0.48827797", "0.48801735", "0.48730662", "0.4871573", "0.486884", "0.48688358", "0.48647347", "0.4863416", "0.48629704", "0.4861828", "0.4853047", "0.48525915", "0.48517874", "0.48503208", "0.48500395", "0.48465878", "0.48432395", "0.48423725", "0.48415083", "0.48355246", "0.4833808", "0.4833793", "0.4833687", "0.48277715" ]
0.6135356
0