_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q30100
_parse_snapshot_share
train
def _parse_snapshot_share(response, name): ''' Extracts snapshot return header. ''' snapshot = response.headers.get('x-ms-snapshot') return _parse_share(response, name, snapshot)
python
{ "resource": "" }
q30101
purge_blob_containers
train
def purge_blob_containers(account, account_key): """ Delete all blob containers in the given storage account. USE AT OWN RISK. NOT SUPPORTED BY STORAGE TEAM. """ bs = BlockBlobService(account, account_key) with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: # use a map to keep track of futures future_to_container_map = {executor.submit(delete_container, bs, container): container for container in bs.list_containers()} # as the futures are completed, print results for future in concurrent.futures.as_completed(future_to_container_map): container_name = future_to_container_map[future].name try: is_deleted = future.result() if is_deleted: print("Deleted container {} on first try".format(container_name)) else: print("Skipped container {} as it no longer exists".format(container_name)) except AzureException as e: # if the deletion failed because there's an active lease on the container, we will break it # since it is most likely left-over from previous tests if 'lease' in str(e): bs.break_container_lease(container_name) is_deleted = bs.delete_container(container_name) if is_deleted: print("Deleted container {} after having broken lease".format(container_name)) else: print("Skipped container {} as it stopped existing after having broken lease".format(container_name)) else: raise e except Exception as e: print("Skipped container " + container_name + " due to error " + str(e))
python
{ "resource": "" }
q30102
_generate_encryption_data_dict
train
def _generate_encryption_data_dict(kek, cek, iv): ''' Generates and returns the encryption metadata as a dict. :param object kek: The key encryption key. See calling functions for more information. :param bytes cek: The content encryption key. :param bytes iv: The initialization vector. :return: A dict containing all the encryption metadata. :rtype: dict ''' # Encrypt the cek. wrapped_cek = kek.wrap_key(cek) # Build the encryption_data dict. # Use OrderedDict to comply with Java's ordering requirement. wrapped_content_key = OrderedDict() wrapped_content_key['KeyId'] = kek.get_kid() wrapped_content_key['EncryptedKey'] = _encode_base64(wrapped_cek) wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() encryption_agent = OrderedDict() encryption_agent['Protocol'] = _ENCRYPTION_PROTOCOL_V1 encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 encryption_data_dict = OrderedDict() encryption_data_dict['WrappedContentKey'] = wrapped_content_key encryption_data_dict['EncryptionAgent'] = encryption_agent encryption_data_dict['ContentEncryptionIV'] = _encode_base64(iv) encryption_data_dict['KeyWrappingMetadata'] = {'EncryptionLibrary': 'Python ' + __version__} return encryption_data_dict
python
{ "resource": "" }
q30103
_generate_AES_CBC_cipher
train
def _generate_AES_CBC_cipher(cek, iv): ''' Generates and returns an encryption cipher for AES CBC using the given cek and iv. :param bytes[] cek: The content encryption key for the cipher. :param bytes[] iv: The initialization vector for the cipher. :return: A cipher for encrypting in AES256 CBC. :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher ''' backend = default_backend() algorithm = AES(cek) mode = CBC(iv) return Cipher(algorithm, mode, backend)
python
{ "resource": "" }
q30104
CloudStorageAccount.create_file_service
train
def create_file_service(self): ''' Creates a FileService object with the settings specified in the CloudStorageAccount. :return: A service object. :rtype: :class:`~azure.storage.file.fileservice.FileService` ''' try: from azure.storage.file.fileservice import FileService return FileService(self.account_name, self.account_key, sas_token=self.sas_token, endpoint_suffix=self.endpoint_suffix) except ImportError: raise Exception('The package azure-storage-file is required. ' + 'Please install it using "pip install azure-storage-file"')
python
{ "resource": "" }
q30105
_get_request_body
train
def _get_request_body(request_body): '''Converts an object into a request body. If it's None we'll return an empty string, if it's one of our objects it'll convert it to XML and return it. Otherwise we just use the object directly''' if request_body is None: return b'' if isinstance(request_body, bytes) or isinstance(request_body, IOBase): return request_body if isinstance(request_body, _unicode_type): return request_body.encode('utf-8') request_body = str(request_body) if isinstance(request_body, _unicode_type): return request_body.encode('utf-8') return request_body
python
{ "resource": "" }
q30106
AutoUpdatedTokenCredential.stop_refreshing_token
train
def stop_refreshing_token(self): """ The timer needs to be canceled if the application is terminating, if not the timer will keep going. """ with self.lock: self.timer_stopped = True self.timer.cancel()
python
{ "resource": "" }
q30107
QueueService.delete_message
train
def delete_message(self, queue_name, message_id, pop_receipt, timeout=None): ''' Deletes the specified message. Normally after a client retrieves a message with the get_messages operation, the client is expected to process and delete the message. To delete the message, you must have two items of data: id and pop_receipt. The id is returned from the previous get_messages operation. The pop_receipt is returned from the most recent :func:`~get_messages` or :func:`~update_message` operation. In order for the delete_message operation to succeed, the pop_receipt specified on the request must match the pop_receipt returned from the :func:`~get_messages` or :func:`~update_message` operation. :param str queue_name: The name of the queue from which to delete the message. :param str message_id: The message id identifying the message to delete. :param str pop_receipt: A valid pop receipt value returned from an earlier call to the :func:`~get_messages` or :func:`~update_message`. :param int timeout: The server timeout, expressed in seconds. ''' _validate_not_none('queue_name', queue_name) _validate_not_none('message_id', message_id) _validate_not_none('pop_receipt', pop_receipt) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(queue_name, True, message_id) request.query = { 'popreceipt': _to_str(pop_receipt), 'timeout': _int_to_str(timeout) } self._perform_request(request)
python
{ "resource": "" }
q30108
FileService.make_file_url
train
def make_file_url(self, share_name, directory_name, file_name, protocol=None, sas_token=None): ''' Creates the url to access a file. :param str share_name: Name of share. :param str directory_name: The path to the directory. :param str file_name: Name of file. :param str protocol: Protocol to use: 'http' or 'https'. If not specified, uses the protocol specified when FileService was initialized. :param str sas_token: Shared access signature token created with generate_shared_access_signature. :return: file access URL. :rtype: str ''' if directory_name is None: url = '{}://{}/{}/{}'.format( protocol or self.protocol, self.primary_endpoint, share_name, file_name, ) else: url = '{}://{}/{}/{}/{}'.format( protocol or self.protocol, self.primary_endpoint, share_name, directory_name, file_name, ) if sas_token: url += '?' + sas_token return url
python
{ "resource": "" }
q30109
FileService.generate_share_shared_access_signature
train
def generate_share_shared_access_signature(self, share_name, permission=None, expiry=None, start=None, id=None, ip=None, protocol=None, cache_control=None, content_disposition=None, content_encoding=None, content_language=None, content_type=None): ''' Generates a shared access signature for the share. Use the returned signature with the sas_token parameter of FileService. :param str share_name: Name of share. :param SharePermissions permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered read, create, write, delete, list. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type start: datetime or str :param str id: A unique value up to 64 characters in length that correlates to a stored access policy. To create a stored access policy, use :func:`~set_share_acl`. :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :param str protocol: Specifies the protocol permitted for a request made. Possible values are both HTTPS and HTTP (https,http) or HTTPS only (https). The default value is https,http. Note that HTTP only is not a permitted value. :param str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :param str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :param str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :param str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :return: A Shared Access Signature (sas) token. :rtype: str ''' _validate_not_none('share_name', share_name) _validate_not_none('self.account_name', self.account_name) _validate_not_none('self.account_key', self.account_key) sas = FileSharedAccessSignature(self.account_name, self.account_key) return sas.generate_share( share_name, permission, expiry, start=start, id=id, ip=ip, protocol=protocol, cache_control=cache_control, content_disposition=content_disposition, content_encoding=content_encoding, content_language=content_language, content_type=content_type, )
python
{ "resource": "" }
q30110
FileService.get_file_service_properties
train
def get_file_service_properties(self, timeout=None): ''' Gets the properties of a storage account's File service, including Azure Storage Analytics. :param int timeout: The timeout parameter is expressed in seconds. :return: The file service properties. :rtype: :class:`~azure.storage.common.models.ServiceProperties` ''' request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'restype': 'service', 'comp': 'properties', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_service_properties)
python
{ "resource": "" }
q30111
FileService._list_shares
train
def _list_shares(self, prefix=None, marker=None, max_results=None, include=None, timeout=None, _context=None): ''' Returns a list of the shares under the specified account. :param str prefix: Filters the results to return only shares whose names begin with the specified prefix. :param str marker: A string value that identifies the portion of the list to be returned with the next list operation. The operation returns a next_marker value within the response body if the list returned was not complete. The marker value may then be used in a subsequent call to request the next set of list items. The marker value is opaque to the client. :param int max_results: Specifies the maximum number of shares to return. A single list request may return up to 1000 shares and potentially a continuation token which should be followed to get additional resutls. :param string include: Include this parameter to specify that either the share's metadata, snapshots or both be returned as part of the response body. set this parameter to string 'metadata' to get share's metadata. set this parameter to 'snapshots' to get all the share snapshots. for both use 'snapshots,metadata'. :param int timeout: The timeout parameter is expressed in seconds. ''' request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'comp': 'list', 'prefix': _to_str(prefix), 'marker': _to_str(marker), 'maxresults': _int_to_str(max_results), 'include': _to_str(include), 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_shares, operation_context=_context)
python
{ "resource": "" }
q30112
FileService.snapshot_share
train
def snapshot_share(self, share_name, metadata=None, quota=None, timeout=None): ''' Creates a snapshot of an existing share under the specified account. :param str share_name: The name of the share to create a snapshot of. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: a dict of str to str: :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5TB (5120). :param int timeout: The timeout parameter is expressed in seconds. :return: snapshot properties :rtype: azure.storage.file.models.Share ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'snapshot', 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-share-quota': _int_to_str(quota) } _add_metadata_headers(metadata, request) return self._perform_request(request, _parse_snapshot_share, [share_name])
python
{ "resource": "" }
q30113
FileService.get_share_acl
train
def get_share_acl(self, share_name, timeout=None): ''' Gets the permissions for the specified share. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A dictionary of access policies associated with the share. :rtype: dict(str, :class:`~azure.storage.common.models.AccessPolicy`) ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'acl', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_signed_identifiers)
python
{ "resource": "" }
q30114
FileService.delete_share
train
def delete_share(self, share_name, fail_not_exist=False, timeout=None, snapshot=None, delete_snapshots=None): ''' Marks the specified share for deletion. If the share does not exist, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_not_exist. :param str share_name: Name of share to delete. :param bool fail_not_exist: Specify whether to throw an exception when the share doesn't exist. False by default. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. Specify this argument to delete a specific snapshot only. delete_snapshots must be None if this is specified. :param ~azure.storage.file.models.DeleteSnapshot delete_snapshots: To delete a share that has snapshots, this must be specified as DeleteSnapshot.Include. :return: True if share is deleted, False share doesn't exist. :rtype: bool ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.headers = { 'x-ms-delete-snapshots': _to_str(delete_snapshots) } request.query = { 'restype': 'share', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot), } if not fail_not_exist: try: self._perform_request(request, expected_errors=[_SHARE_NOT_FOUND_ERROR_CODE]) return True except AzureHttpError as ex: _dont_fail_not_exist(ex) return False else: self._perform_request(request) return True
python
{ "resource": "" }
q30115
FileService.create_directory
train
def create_directory(self, share_name, directory_name, metadata=None, fail_on_exist=False, timeout=None): ''' Creates a new directory under the specified share or parent directory. If the directory with the same name already exists, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_on_exists. :param str share_name: Name of existing share. :param str directory_name: Name of directory to create, including the path to the parent directory. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: dict(str, str): :param bool fail_on_exist: specify whether to throw an exception when the directory exists. False by default. :param int timeout: The timeout parameter is expressed in seconds. :return: True if directory is created, False if directory already exists. :rtype: bool ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), } _add_metadata_headers(metadata, request) if not fail_on_exist: try: self._perform_request(request, expected_errors=_RESOURCE_ALREADY_EXISTS_ERROR_CODE) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
python
{ "resource": "" }
q30116
FileService.get_directory_properties
train
def get_directory_properties(self, share_name, directory_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata and system properties for the specified directory. The data returned does not include the directory's list of files. :param str share_name: Name of existing share. :param str directory_name: The path to an existing directory. :param int timeout: The timeout parameter is expressed in seconds. :return: properties for the specified directory within a directory object. :param str snapshot: A string that represents the snapshot version, if applicable. :rtype: :class:`~azure.storage.file.models.Directory` ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot) } return self._perform_request(request, _parse_directory, [directory_name])
python
{ "resource": "" }
q30117
FileService.copy_file
train
def copy_file(self, share_name, directory_name, file_name, copy_source, metadata=None, timeout=None): ''' Copies a file asynchronously. This operation returns a copy operation properties object, including a copy ID you can use to check or abort the copy operation. The File service copies files on a best-effort basis. If the destination file exists, it will be overwritten. The destination file cannot be modified while the copy operation is in progress. :param str share_name: Name of the destination share. The share must exist. :param str directory_name: Name of the destination directory. The directory must exist. :param str file_name: Name of the destination file. If the destination file exists, it will be overwritten. Otherwise, it will be created. :param str copy_source: A URL of up to 2 KB in length that specifies an Azure file or blob. The value should be URL-encoded as it would appear in a request URI. If the source is in another account, the source must either be public or must be authenticated via a shared access signature. If the source is public, no authentication is required. Examples: https://myaccount.file.core.windows.net/myshare/mydir/myfile https://otheraccount.file.core.windows.net/myshare/mydir/myfile?sastoken :param metadata: Name-value pairs associated with the file as metadata. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination file. If one or more name-value pairs are specified, the destination file is created with the specified metadata, and the metadata is not copied from the source blob or file. :type metadata: dict(str, str). :param int timeout: The timeout parameter is expressed in seconds. :return: Copy operation properties such as status, source, and ID. :rtype: :class:`~azure.storage.file.models.CopyProperties` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_source', copy_source) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = {'timeout': _int_to_str(timeout)} request.headers = { 'x-ms-copy-source': _to_str(copy_source), } _add_metadata_headers(metadata, request) return self._perform_request(request, _parse_properties, [FileProperties]).copy
python
{ "resource": "" }
q30118
FileService.abort_copy_file
train
def abort_copy_file(self, share_name, directory_name, file_name, copy_id, timeout=None): ''' Aborts a pending copy_file operation, and leaves a destination file with zero length and full metadata. :param str share_name: Name of destination share. :param str directory_name: The path to the directory. :param str file_name: Name of destination file. :param str copy_id: Copy identifier provided in the copy.id of the original copy_file operation. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_id', copy_id) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'copy', 'copyid': _to_str(copy_id), 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-copy-action': 'abort', } self._perform_request(request)
python
{ "resource": "" }
q30119
FileService.create_file_from_path
train
def create_file_from_path(self, share_name, directory_name, file_name, local_file_path, content_settings=None, metadata=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None): ''' Creates a new azure file from a local file path, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param str local_file_path: Path of the local file to upload as the file content. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used for setting file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far and total is the size of the file, or None if the total size is unknown. :type progress_callback: func(current, total) :param int max_connections: Maximum number of parallel connections to use. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('local_file_path', local_file_path) count = path.getsize(local_file_path) with open(local_file_path, 'rb') as stream: self.create_file_from_stream( share_name, directory_name, file_name, stream, count, content_settings, metadata, validate_content, progress_callback, max_connections, timeout)
python
{ "resource": "" }
q30120
_HTTPClient.set_proxy
train
def set_proxy(self, host, port, user, password): ''' Sets the proxy server host and port for the HTTP CONNECT Tunnelling. Note that we set the proxies directly on the request later on rather than using the session object as requests has a bug where session proxy is ignored in favor of environment proxy. So, auth will not work unless it is passed directly when making the request as this overrides both. :param str host: Address of the proxy. Ex: '192.168.0.100' :param int port: Port of the proxy. Ex: 6000 :param str user: User for proxy authorization. :param str password: Password for proxy authorization. ''' if user and password: proxy_string = '{}:{}@{}:{}'.format(user, password, host, port) else: proxy_string = '{}:{}'.format(host, port) self.proxies = {'http': 'http://{}'.format(proxy_string), 'https': 'https://{}'.format(proxy_string)}
python
{ "resource": "" }
q30121
_Retry._should_retry
train
def _should_retry(self, context): ''' A function which determines whether or not to retry. :param ~azure.storage.models.RetryContext context: The retry context. This contains the request, response, and other data which can be used to determine whether or not to retry. :return: A boolean indicating whether or not to retry the request. :rtype: bool ''' # If max attempts are reached, do not retry. if context.count >= self.max_attempts: return False status = None if context.response and context.response.status: status = context.response.status if status is None: ''' If status is None, retry as this request triggered an exception. For example, network issues would trigger this. ''' return True elif 200 <= status < 300: ''' This method is called after a successful response, meaning we failed during the response body download or parsing. So, success codes should be retried. ''' return True elif 300 <= status < 500: ''' An exception occured, but in most cases it was expected. Examples could include a 309 Conflict or 412 Precondition Failed. ''' if status == 404 and context.location_mode == LocationMode.SECONDARY: # Response code 404 should be retried if secondary was used. return True if status == 408: # Response code 408 is a timeout and should be retried. return True return False elif status >= 500: ''' Response codes above 500 with the exception of 501 Not Implemented and 505 Version Not Supported indicate a server issue and should be retried. ''' if status == 501 or status == 505: return False return True else: # If something else happened, it's unexpected. Retry. return True
python
{ "resource": "" }
q30122
_get_path
train
def _get_path(queue_name=None, include_messages=None, message_id=None): ''' Creates the path to access a queue resource. queue_name: Name of queue. include_messages: Whether or not to include messages. message_id: Message id. ''' if queue_name and include_messages and message_id: return '/{0}/messages/{1}'.format(_str(queue_name), message_id) if queue_name and include_messages: return '/{0}/messages'.format(_str(queue_name)) elif queue_name: return '/{0}'.format(_str(queue_name)) else: return '/'
python
{ "resource": "" }
q30123
_parse_metadata
train
def _parse_metadata(response): ''' Extracts out resource metadata information. ''' if response is None or response.headers is None: return None metadata = _dict() for key, value in response.headers.items(): if key.lower().startswith('x-ms-meta-'): metadata[key[10:]] = _to_str(value) return metadata
python
{ "resource": "" }
q30124
render_icon
train
def render_icon(icon, **kwargs): """ Render a Bootstrap glyphicon icon """ attrs = { "class": add_css_class( "glyphicon glyphicon-{icon}".format(icon=icon), kwargs.get("extra_classes", ""), ) } title = kwargs.get("title") if title: attrs["title"] = title return render_tag("span", attrs=attrs)
python
{ "resource": "" }
q30125
render_alert
train
def render_alert(content, alert_type=None, dismissable=True): """ Render a Bootstrap alert """ button = "" if not alert_type: alert_type = "info" css_classes = ["alert", "alert-" + text_value(alert_type)] if dismissable: css_classes.append("alert-dismissable") button = ( '<button type="button" class="close" ' + 'data-dismiss="alert" aria-hidden="true">&times;</button>' ) button_placeholder = "__BUTTON__" return mark_safe( render_tag( "div", attrs={"class": " ".join(css_classes)}, content=button_placeholder + text_value(content), ).replace(button_placeholder, button) )
python
{ "resource": "" }
q30126
text_concat
train
def text_concat(*args, **kwargs): """ Concatenate several values as a text string with an optional separator """ separator = text_value(kwargs.get("separator", "")) values = filter(None, [text_value(v) for v in args]) return separator.join(values)
python
{ "resource": "" }
q30127
bootstrap_message_classes
train
def bootstrap_message_classes(message): """ Return the message classes for a message """ extra_tags = None try: extra_tags = message.extra_tags except AttributeError: pass if not extra_tags: extra_tags = "" classes = [extra_tags] try: level = message.level except AttributeError: pass else: try: classes.append(MESSAGE_LEVEL_CLASSES[level]) except KeyError: classes.append("alert alert-danger") return " ".join(classes).strip()
python
{ "resource": "" }
q30128
bootstrap_css
train
def bootstrap_css(): """ Return HTML for Bootstrap CSS. Adjust url in settings. If no url is returned, we don't want this statement to return any HTML. This is intended behavior. Default value: ``None`` This value is configurable, see Settings section **Tag name**:: bootstrap_css **Usage**:: {% bootstrap_css %} **Example**:: {% bootstrap_css %} """ rendered_urls = [render_link_tag(bootstrap_css_url())] if bootstrap_theme_url(): rendered_urls.append(render_link_tag(bootstrap_theme_url())) return mark_safe("".join([url for url in rendered_urls]))
python
{ "resource": "" }
q30129
bootstrap_javascript
train
def bootstrap_javascript(jquery=None): """ Return HTML for Bootstrap JavaScript. Adjust url in settings. If no url is returned, we don't want this statement to return any HTML. This is intended behavior. Default value: ``None`` This value is configurable, see Settings section **Tag name**:: bootstrap_javascript **Parameters**: :jquery: Truthy to include jQuery as well as Bootstrap **Usage**:: {% bootstrap_javascript %} **Example**:: {% bootstrap_javascript jquery=1 %} """ javascript = "" # See if we have to include jQuery if jquery is None: jquery = get_bootstrap_setting("include_jquery", False) # NOTE: No async on scripts, not mature enough. See issue #52 and #56 if jquery: url = bootstrap_jquery_url() if url: javascript += render_script_tag(url) url = bootstrap_javascript_url() if url: javascript += render_script_tag(url) return mark_safe(javascript)
python
{ "resource": "" }
q30130
bootstrap_buttons
train
def bootstrap_buttons(parser, token): """ Render buttons for form **Tag name**:: buttons **Parameters**: submit Text for a submit button reset Text for a reset button **Usage**:: {% buttons %}{% endbuttons %} **Example**:: {% buttons submit='OK' reset="Cancel" %}{% endbuttons %} """ kwargs = parse_token_contents(parser, token) kwargs["nodelist"] = parser.parse(("endbuttons",)) parser.delete_first_token() return ButtonsNode(**kwargs)
python
{ "resource": "" }
q30131
bootstrap_messages
train
def bootstrap_messages(context, *args, **kwargs): """ Show django.contrib.messages Messages in Bootstrap alert containers. In order to make the alerts dismissable (with the close button), we have to set the jquery parameter too when using the bootstrap_javascript tag. Uses the template ``bootstrap3/messages.html``. **Tag name**:: bootstrap_messages **Parameters**: None. **Usage**:: {% bootstrap_messages %} **Example**:: {% bootstrap_javascript jquery=1 %} {% bootstrap_messages %} """ # Force Django 1.8+ style, so dicts and not Context # TODO: This may be due to a bug in Django 1.8/1.9+ if Context and isinstance(context, Context): context = context.flatten() context.update({"message_constants": message_constants}) return render_template_file("bootstrap3/messages.html", context=context)
python
{ "resource": "" }
q30132
get_pagination_context
train
def get_pagination_context( page, pages_to_show=11, url=None, size=None, extra=None, parameter_name="page" ): """ Generate Bootstrap pagination context from a page object """ pages_to_show = int(pages_to_show) if pages_to_show < 1: raise ValueError( "Pagination pages_to_show should be a positive integer, you specified {pages}".format( pages=pages_to_show ) ) num_pages = page.paginator.num_pages current_page = page.number half_page_num = int(floor(pages_to_show / 2)) if half_page_num < 0: half_page_num = 0 first_page = current_page - half_page_num if first_page <= 1: first_page = 1 if first_page > 1: pages_back = first_page - half_page_num if pages_back < 1: pages_back = 1 else: pages_back = None last_page = first_page + pages_to_show - 1 if pages_back is None: last_page += 1 if last_page > num_pages: last_page = num_pages if last_page < num_pages: pages_forward = last_page + half_page_num if pages_forward > num_pages: pages_forward = num_pages else: pages_forward = None if first_page > 1: first_page -= 1 if pages_back is not None and pages_back > 1: pages_back -= 1 else: pages_back = None pages_shown = [] for i in range(first_page, last_page + 1): pages_shown.append(i) # Append proper character to url if url: # Remove existing page GET parameters url = force_text(url) url = re.sub(r"\?{0}\=[^\&]+".format(parameter_name), "?", url) url = re.sub(r"\&{0}\=[^\&]+".format(parameter_name), "", url) # Append proper separator if "?" in url: url += "&" else: url += "?" # Append extra string to url if extra: if not url: url = "?" url += force_text(extra) + "&" if url: url = url.replace("?&", "?") # Set CSS classes, see http://getbootstrap.com/components/#pagination pagination_css_classes = ["pagination"] if size == "small": pagination_css_classes.append("pagination-sm") elif size == "large": pagination_css_classes.append("pagination-lg") # Build context object return { "bootstrap_pagination_url": url, "num_pages": num_pages, "current_page": current_page, "first_page": first_page, "last_page": last_page, "pages_shown": pages_shown, "pages_back": pages_back, "pages_forward": pages_forward, "pagination_css_classes": " ".join(pagination_css_classes), "parameter_name": parameter_name, }
python
{ "resource": "" }
q30133
handle_var
train
def handle_var(value, context): """ Handle template tag variable """ # Resolve FilterExpression and Variable immediately if isinstance(value, FilterExpression) or isinstance(value, Variable): return value.resolve(context) # Return quoted strings unquoted # http://djangosnippets.org/snippets/886 stringval = QUOTED_STRING.search(value) if stringval: return stringval.group("noquotes") # Resolve variable or return string value try: return Variable(value).resolve(context) except VariableDoesNotExist: return value
python
{ "resource": "" }
q30134
parse_token_contents
train
def parse_token_contents(parser, token): """ Parse template tag contents """ bits = token.split_contents() tag = bits.pop(0) args = [] kwargs = {} asvar = None if len(bits) >= 2 and bits[-2] == "as": asvar = bits[-1] bits = bits[:-2] for bit in bits: match = kwarg_re.match(bit) if not match: raise TemplateSyntaxError('Malformed arguments to tag "{}"'.format(tag)) name, value = match.groups() if name: kwargs[name] = parser.compile_filter(value) else: args.append(parser.compile_filter(value)) return {"tag": tag, "args": args, "kwargs": kwargs, "asvar": asvar}
python
{ "resource": "" }
q30135
split_css_classes
train
def split_css_classes(css_classes): """ Turn string into a list of CSS classes """ classes_list = text_value(css_classes).split(" ") return [c for c in classes_list if c]
python
{ "resource": "" }
q30136
add_css_class
train
def add_css_class(css_classes, css_class, prepend=False): """ Add a CSS class to a string of CSS classes """ classes_list = split_css_classes(css_classes) classes_to_add = [c for c in split_css_classes(css_class) if c not in classes_list] if prepend: classes_list = classes_to_add + classes_list else: classes_list += classes_to_add return " ".join(classes_list)
python
{ "resource": "" }
q30137
remove_css_class
train
def remove_css_class(css_classes, css_class): """ Remove a CSS class from a string of CSS classes """ remove = set(split_css_classes(css_class)) classes_list = [c for c in split_css_classes(css_classes) if c not in remove] return " ".join(classes_list)
python
{ "resource": "" }
q30138
render_link_tag
train
def render_link_tag(url, rel="stylesheet", media=None): """ Build a link tag """ url_dict = url_to_attrs_dict(url, url_attr="href") url_dict.setdefault("href", url_dict.pop("url", None)) url_dict["rel"] = rel if media: url_dict["media"] = media return render_tag("link", attrs=url_dict, close=False)
python
{ "resource": "" }
q30139
render_template_file
train
def render_template_file(template, context=None): """ Render a Template to unicode """ assert isinstance(context, Mapping) template = get_template(template) return template.render(context)
python
{ "resource": "" }
q30140
url_replace_param
train
def url_replace_param(url, name, value): """ Replace a GET parameter in an URL """ url_components = urlparse(force_str(url)) query_params = parse_qs(url_components.query) query_params[name] = value query = urlencode(query_params, doseq=True) return force_text( urlunparse( [ url_components.scheme, url_components.netloc, url_components.path, url_components.params, query, url_components.fragment, ] ) )
python
{ "resource": "" }
q30141
url_to_attrs_dict
train
def url_to_attrs_dict(url, url_attr): """ Sanitize url dict as used in django-bootstrap3 settings. """ result = dict() # If url is not a string, it should be a dict if isinstance(url, six.string_types): url_value = url else: try: url_value = url["url"] except TypeError: raise BootstrapError( 'Function "url_to_attrs_dict" expects a string or a dict with key "url".' ) crossorigin = url.get("crossorigin", None) integrity = url.get("integrity", None) if crossorigin: result["crossorigin"] = crossorigin if integrity: result["integrity"] = integrity result[url_attr] = url_value return result
python
{ "resource": "" }
q30142
render_formset
train
def render_formset(formset, **kwargs): """ Render a formset to a Bootstrap layout """ renderer_cls = get_formset_renderer(**kwargs) return renderer_cls(formset, **kwargs).render()
python
{ "resource": "" }
q30143
render_formset_errors
train
def render_formset_errors(formset, **kwargs): """ Render formset errors to a Bootstrap layout """ renderer_cls = get_formset_renderer(**kwargs) return renderer_cls(formset, **kwargs).render_errors()
python
{ "resource": "" }
q30144
render_form
train
def render_form(form, **kwargs): """ Render a form to a Bootstrap layout """ renderer_cls = get_form_renderer(**kwargs) return renderer_cls(form, **kwargs).render()
python
{ "resource": "" }
q30145
render_form_errors
train
def render_form_errors(form, error_types="non_field_errors", **kwargs): """ Render form errors to a Bootstrap layout """ renderer_cls = get_form_renderer(**kwargs) return renderer_cls(form, **kwargs).render_errors(error_types)
python
{ "resource": "" }
q30146
render_field
train
def render_field(field, **kwargs): """ Render a field to a Bootstrap layout """ renderer_cls = get_field_renderer(**kwargs) return renderer_cls(field, **kwargs).render()
python
{ "resource": "" }
q30147
render_label
train
def render_label(content, label_for=None, label_class=None, label_title=""): """ Render a label with content """ attrs = {} if label_for: attrs["for"] = label_for if label_class: attrs["class"] = label_class if label_title: attrs["title"] = label_title return render_tag("label", attrs=attrs, content=content)
python
{ "resource": "" }
q30148
render_button
train
def render_button( content, button_type=None, icon=None, button_class="btn-default", size="", href="", name=None, value=None, title=None, extra_classes="", id="", ): """ Render a button with content """ attrs = {} classes = add_css_class("btn", button_class) size = text_value(size).lower().strip() if size == "xs": classes = add_css_class(classes, "btn-xs") elif size == "sm" or size == "small": classes = add_css_class(classes, "btn-sm") elif size == "lg" or size == "large": classes = add_css_class(classes, "btn-lg") elif size == "md" or size == "medium": pass elif size: raise BootstrapError( 'Parameter "size" should be "xs", "sm", "lg" or ' + 'empty ("{}" given).'.format(size) ) if button_type: if button_type not in ("submit", "reset", "button", "link"): raise BootstrapError( 'Parameter "button_type" should be "submit", "reset", ' + '"button", "link" or empty ("{}" given).'.format(button_type) ) attrs["type"] = button_type classes = add_css_class(classes, extra_classes) attrs["class"] = classes icon_content = render_icon(icon) if icon else "" if href: attrs["href"] = href tag = "a" else: tag = "button" if id: attrs["id"] = id if name: attrs["name"] = name if value: attrs["value"] = value if title: attrs["title"] = title return render_tag( tag, attrs=attrs, content=mark_safe(text_concat(icon_content, content, separator=" ")), )
python
{ "resource": "" }
q30149
render_field_and_label
train
def render_field_and_label( field, label, field_class="", label_for=None, label_class="", layout="", **kwargs ): """ Render a field with its label """ if layout == "horizontal": if not label_class: label_class = get_bootstrap_setting("horizontal_label_class") if not field_class: field_class = get_bootstrap_setting("horizontal_field_class") if not label: label = mark_safe("&#160;") label_class = add_css_class(label_class, "control-label") html = field if field_class: html = '<div class="{klass}">{html}</div>'.format(klass=field_class, html=html) if label: html = render_label(label, label_for=label_for, label_class=label_class) + html return html
python
{ "resource": "" }
q30150
is_widget_required_attribute
train
def is_widget_required_attribute(widget): """ Is this widget required? """ if not widget.is_required: return False if isinstance(widget, WIDGETS_NO_REQUIRED): return False return True
python
{ "resource": "" }
q30151
is_widget_with_placeholder
train
def is_widget_with_placeholder(widget): """ Is this a widget that should have a placeholder? Only text, search, url, tel, e-mail, password, number have placeholders """ return isinstance( widget, (TextInput, Textarea, NumberInput, EmailInput, URLInput, PasswordInput) )
python
{ "resource": "" }
q30152
ArtifactStatistics._PrintDictAsTable
train
def _PrintDictAsTable(self, src_dict): """Prints a table of artifact definitions. Args: src_dict (dict[str, ArtifactDefinition]): artifact definitions by name. """ key_list = list(src_dict.keys()) key_list.sort() print('|', end='') for key in key_list: print(' {0:s} |'.format(key), end='') print('') print('|', end='') for key in key_list: print(' :---: |', end='') print('') print('|', end='') for key in key_list: print(' {0!s} |'.format(src_dict[key]), end='') print('\n')
python
{ "resource": "" }
q30153
ArtifactStatistics.BuildStats
train
def BuildStats(self): """Builds the statistics.""" artifact_reader = reader.YamlArtifactsReader() self.label_counts = {} self.os_counts = {} self.path_count = 0 self.reg_key_count = 0 self.source_type_counts = {} self.total_count = 0 for artifact_definition in artifact_reader.ReadDirectory('data'): if hasattr(artifact_definition, 'labels'): for label in artifact_definition.labels: self.label_counts[label] = self.label_counts.get(label, 0) + 1 for source in artifact_definition.sources: self.total_count += 1 source_type = source.type_indicator self.source_type_counts[source_type] = self.source_type_counts.get( source_type, 0) + 1 if source_type == definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY: self.reg_key_count += len(source.keys) elif source_type == definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE: self.reg_key_count += len(source.key_value_pairs) elif source_type in (definitions.TYPE_INDICATOR_FILE, definitions.TYPE_INDICATOR_DIRECTORY): self.path_count += len(source.paths) os_list = source.supported_os for os_str in os_list: self.os_counts[os_str] = self.os_counts.get(os_str, 0) + 1
python
{ "resource": "" }
q30154
ArtifactStatistics.PrintStats
train
def PrintStats(self): """Build stats and print in MarkDown format.""" self.BuildStats() self.PrintSummaryTable() self.PrintSourceTypeTable() self.PrintOSTable() self.PrintLabelTable()
python
{ "resource": "" }
q30155
WindowsRegistryKeySourceType.ValidateKey
train
def ValidateKey(cls, key_path): """Validates this key against supported key names. Args: key_path (str): path of a Windows Registry key. Raises: FormatError: when key is not supported. """ for prefix in cls.VALID_PREFIXES: if key_path.startswith(prefix): return # TODO: move check to validator. if key_path.startswith('HKEY_CURRENT_USER\\'): raise errors.FormatError( 'HKEY_CURRENT_USER\\ is not supported instead use: ' 'HKEY_USERS\\%%users.sid%%\\') raise errors.FormatError( 'Unupported Registry key path: {0:s}'.format(key_path))
python
{ "resource": "" }
q30156
SourceTypeFactory.CreateSourceType
train
def CreateSourceType(cls, type_indicator, attributes): """Creates a source type. Args: type_indicator (str): source type indicator. attributes (dict[str, object]): source type attributes. Returns: SourceType: a source type. Raises: FormatError: if the type indicator is not set or unsupported, or if required attributes are missing. """ if type_indicator not in cls._source_type_classes: raise errors.FormatError( 'Unsupported type indicator: {0:s}.'.format(type_indicator)) return cls._source_type_classes[type_indicator](**attributes)
python
{ "resource": "" }
q30157
SourceTypeFactory.DeregisterSourceType
train
def DeregisterSourceType(cls, source_type_class): """Deregisters a source type. Source types are identified based on their type indicator. Args: source_type_class (type): source type. Raises: KeyError: if a source type is not set for the corresponding type indicator. """ if source_type_class.TYPE_INDICATOR not in cls._source_type_classes: raise KeyError( 'Source type not set for type: {0:s}.'.format( source_type_class.TYPE_INDICATOR)) del cls._source_type_classes[source_type_class.TYPE_INDICATOR]
python
{ "resource": "" }
q30158
SourceTypeFactory.RegisterSourceType
train
def RegisterSourceType(cls, source_type_class): """Registers a source type. Source types are identified based on their type indicator. Args: source_type_class (type): source type. Raises: KeyError: if source types is already set for the corresponding type indicator. """ if source_type_class.TYPE_INDICATOR in cls._source_type_classes: raise KeyError( 'Source type already set for type: {0:s}.'.format( source_type_class.TYPE_INDICATOR)) cls._source_type_classes[source_type_class.TYPE_INDICATOR] = ( source_type_class)
python
{ "resource": "" }
q30159
ArtifactDefinitionsValidator._CheckMacOSPaths
train
def _CheckMacOSPaths(self, filename, artifact_definition, source, paths): """Checks if the paths are valid MacOS paths. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. source (SourceType): source definition. paths (list[str]): paths to validate. Returns: bool: True if the MacOS paths is valid. """ result = True paths_with_private = [] paths_with_symbolic_link_to_private = [] for path in paths: path_lower = path.lower() path_segments = path_lower.split(source.separator) if not path_segments: logging.warning(( 'Empty path defined by artifact definition: {0:s} in file: ' '{1:s}').format(artifact_definition.name, filename)) result = False elif len(path_segments) == 1: continue elif path_segments[1] in self._MACOS_PRIVATE_SUB_PATHS: paths_with_symbolic_link_to_private.append(path) elif path_segments[1] == 'private' and len(path_segments) >= 2: if path_segments[2] in self._MACOS_PRIVATE_SUB_PATHS: paths_with_private.append(path) else: logging.warning(( 'Unsupported private path: {0:s} defined by artifact definition: ' '{1:s} in file: {2:s}').format( path, artifact_definition.name, filename)) result = False for private_path in paths_with_private: if private_path[8:] not in paths_with_symbolic_link_to_private: logging.warning(( 'Missing symbolic link: {0:s} for path: {1:s} defined by artifact ' 'definition: {2:s} in file: {3:s}').format( private_path[8:], private_path, artifact_definition.name, filename)) result = False for path in paths_with_symbolic_link_to_private: private_path = '/private{0:s}'.format(path) if private_path not in paths_with_private: logging.warning(( 'Missing path: {0:s} for symbolic link: {1:s} defined by artifact ' 'definition: {2:s} in file: {3:s}').format( private_path, path, artifact_definition.name, filename)) result = False return result
python
{ "resource": "" }
q30160
ArtifactDefinitionsValidator._CheckWindowsRegistryKeyPath
train
def _CheckWindowsRegistryKeyPath( self, filename, artifact_definition, key_path): """Checks if a path is a valid Windows Registry key path. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. key_path (str): Windows Registry key path to validate. Returns: bool: True if the Windows Registry key path is valid. """ result = True key_path_segments = key_path.lower().split('\\') if key_path_segments[0] == '%%current_control_set%%': result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that starts with ' '%%CURRENT_CONTROL_SET%%. Replace %%CURRENT_CONTROL_SET%% with ' 'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet').format( artifact_definition.name, filename)) for segment_index, key_path_segment in enumerate(key_path_segments): if key_path_segment.startswith('%%') and key_path_segment.endswith('%%'): if (segment_index == 1 and key_path_segment == '%%users.sid%%' and key_path_segments[0] == 'hkey_users'): continue if key_path_segment.startswith('%%environ_'): result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that contains an environment variable: ' '"{2:s}". Usage of environment variables in key paths is not ' 'encouraged at this time.').format( artifact_definition.name, filename, key_path_segment)) elif key_path_segment.startswith('%%users.'): result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that contains a users variable: "{2:s}". ' 'Usage of users variables in key paths, except for ' '"HKEY_USERS\\%%users.sid%%", is not encouraged at this ' 'time.').format( artifact_definition.name, filename, key_path_segment)) return result
python
{ "resource": "" }
q30161
ArtifactDefinitionsValidator._HasDuplicateRegistryKeyPaths
train
def _HasDuplicateRegistryKeyPaths( self, filename, artifact_definition, source): """Checks if Registry key paths are not already defined by other artifacts. Note that at the moment this function will only find exact duplicate Registry key paths. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. source (SourceType): source definition. Returns: bool: True if the Registry key paths defined by the source type are used in other artifacts. """ result = False intersection = self._artifact_registry_key_paths.intersection( set(source.keys)) if intersection: duplicate_key_paths = '\n'.join(intersection) logging.warning(( 'Artifact definition: {0:s} in file: {1:s} has duplicate ' 'Registry key paths:\n{2:s}').format( artifact_definition.name, filename, duplicate_key_paths)) result = True self._artifact_registry_key_paths.update(source.keys) return result
python
{ "resource": "" }
q30162
ArtifactDefinitionsValidator.CheckFile
train
def CheckFile(self, filename): """Validates the artifacts definition in a specific file. Args: filename (str): name of the artifacts definition file. Returns: bool: True if the file contains valid artifacts definitions. """ result = True artifact_reader = reader.YamlArtifactsReader() try: for artifact_definition in artifact_reader.ReadFile(filename): try: self._artifact_registry.RegisterDefinition(artifact_definition) except KeyError: logging.warning( 'Duplicate artifact definition: {0:s} in file: {1:s}'.format( artifact_definition.name, filename)) result = False artifact_definition_supports_macos = ( definitions.SUPPORTED_OS_DARWIN in ( artifact_definition.supported_os)) artifact_definition_supports_windows = ( definitions.SUPPORTED_OS_WINDOWS in ( artifact_definition.supported_os)) for source in artifact_definition.sources: if source.type_indicator in ( definitions.TYPE_INDICATOR_FILE, definitions.TYPE_INDICATOR_PATH): if (definitions.SUPPORTED_OS_DARWIN in source.supported_os or ( artifact_definition_supports_macos and not source.supported_os)): if not self._CheckMacOSPaths( filename, artifact_definition, source, source.paths): result = False elif (artifact_definition_supports_windows or definitions.SUPPORTED_OS_WINDOWS in source.supported_os): for path in source.paths: if not self._CheckWindowsPath( filename, artifact_definition, source, path): result = False elif source.type_indicator == ( definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY): # Exempt the legacy file from duplicate checking because it has # duplicates intentionally. if (filename != self.LEGACY_PATH and self._HasDuplicateRegistryKeyPaths( filename, artifact_definition, source)): result = False for key_path in source.keys: if not self._CheckWindowsRegistryKeyPath( filename, artifact_definition, key_path): result = False elif source.type_indicator == ( definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE): for key_value_pair in source.key_value_pairs: if not self._CheckWindowsRegistryKeyPath( filename, artifact_definition, key_value_pair['key']): result = False except errors.FormatError as exception: logging.warning( 'Unable to validate file: {0:s} with error: {1!s}'.format( filename, exception)) result = False return result
python
{ "resource": "" }
q30163
ArtifactDefinitionsRegistry.DeregisterDefinition
train
def DeregisterDefinition(self, artifact_definition): """Deregisters an artifact definition. Artifact definitions are identified based on their lower case name. Args: artifact_definition (ArtifactDefinition): an artifact definition. Raises: KeyError: if an artifact definition is not set for the corresponding name. """ artifact_definition_name = artifact_definition.name.lower() if artifact_definition_name not in self._artifact_definitions: raise KeyError( 'Artifact definition not set for name: {0:s}.'.format( artifact_definition.name)) del self._artifact_definitions[artifact_definition_name]
python
{ "resource": "" }
q30164
ArtifactDefinitionsRegistry.RegisterDefinition
train
def RegisterDefinition(self, artifact_definition): """Registers an artifact definition. Artifact definitions are identified based on their lower case name. Args: artifact_definition (ArtifactDefinition): an artifact definition. Raises: KeyError: if artifact definition is already set for the corresponding name. """ artifact_definition_name = artifact_definition.name.lower() if artifact_definition_name in self._artifact_definitions: raise KeyError( 'Artifact definition already set for name: {0:s}.'.format( artifact_definition.name)) self._artifact_definitions[artifact_definition_name] = artifact_definition self._defined_artifact_names.add(artifact_definition.name) for source in artifact_definition.sources: if source.type_indicator == definitions.TYPE_INDICATOR_ARTIFACT_GROUP: self._artifact_name_references.update(source.names)
python
{ "resource": "" }
q30165
ArtifactDefinitionsRegistry.ReadFromDirectory
train
def ReadFromDirectory(self, artifacts_reader, path, extension='yaml'): """Reads artifact definitions into the registry from files in a directory. This function does not recurse sub directories. Args: artifacts_reader (ArtifactsReader): an artifacts reader. path (str): path of the directory to read from. extension (Optional[str]): extension of the filenames to read. Raises: KeyError: if a duplicate artifact definition is encountered. """ for artifact_definition in artifacts_reader.ReadDirectory( path, extension=extension): self.RegisterDefinition(artifact_definition)
python
{ "resource": "" }
q30166
ArtifactDefinitionsRegistry.ReadFromFile
train
def ReadFromFile(self, artifacts_reader, filename): """Reads artifact definitions into the registry from a file. Args: artifacts_reader (ArtifactsReader): an artifacts reader. filename (str): name of the file to read from. """ for artifact_definition in artifacts_reader.ReadFile(filename): self.RegisterDefinition(artifact_definition)
python
{ "resource": "" }
q30167
ArtifactDefinitionsRegistry.ReadFileObject
train
def ReadFileObject(self, artifacts_reader, file_object): """Reads artifact definitions into the registry from a file-like object. Args: artifacts_reader (ArtifactsReader): an artifacts reader. file_object (file): file-like object to read from. """ for artifact_definition in artifacts_reader.ReadFileObject(file_object): self.RegisterDefinition(artifact_definition)
python
{ "resource": "" }
q30168
DependencyDefinitionReader._GetConfigValue
train
def _GetConfigValue(self, config_parser, section_name, value_name): """Retrieves a value from the config parser. Args: config_parser (ConfigParser): configuration parser. section_name (str): name of the section that contains the value. value_name (str): name of the value. Returns: object: configuration value or None if the value does not exists. """ try: return config_parser.get(section_name, value_name) except configparser.NoOptionError: return None
python
{ "resource": "" }
q30169
DependencyDefinitionReader.Read
train
def Read(self, file_object): """Reads dependency definitions. Args: file_object (file): file-like object to read from. Yields: DependencyDefinition: dependency definition. """ config_parser = configparser.RawConfigParser() # pylint: disable=deprecated-method # TODO: replace readfp by read_file, check if Python 2 compatible config_parser.readfp(file_object) for section_name in config_parser.sections(): dependency_definition = DependencyDefinition(section_name) for value_name in self._VALUE_NAMES: value = self._GetConfigValue(config_parser, section_name, value_name) setattr(dependency_definition, value_name, value) yield dependency_definition
python
{ "resource": "" }
q30170
DependencyHelper._CheckPythonModuleVersion
train
def _CheckPythonModuleVersion( self, module_name, module_object, version_property, minimum_version, maximum_version): """Checks the version of a Python module. Args: module_object (module): Python module. module_name (str): name of the Python module. version_property (str): version attribute or function. minimum_version (str): minimum version. maximum_version (str): maximum version. Returns: tuple: consists: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ module_version = None if not version_property.endswith('()'): module_version = getattr(module_object, version_property, None) else: version_method = getattr( module_object, version_property[:-2], None) if version_method: module_version = version_method() if not module_version: status_message = ( 'unable to determine version information for: {0:s}').format( module_name) return False, status_message # Make sure the module version is a string. module_version = '{0!s}'.format(module_version) # Split the version string and convert every digit into an integer. # A string compare of both version strings will yield an incorrect result. # Strip any semantic suffixes such as a1, b1, pre, post, rc, dev. module_version = self._VERSION_NUMBERS_REGEX.findall(module_version)[0] if module_version[-1] == '.': module_version = module_version[:-1] try: module_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(module_version))) except ValueError: status_message = 'unable to parse module version: {0:s} {1:s}'.format( module_name, module_version) return False, status_message if minimum_version: try: minimum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(minimum_version))) except ValueError: status_message = 'unable to parse minimum version: {0:s} {1:s}'.format( module_name, minimum_version) return False, status_message if module_version_map < minimum_version_map: status_message = ( '{0:s} version: {1!s} is too old, {2!s} or later required').format( module_name, module_version, minimum_version) return False, status_message if maximum_version: try: maximum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(maximum_version))) except ValueError: status_message = 'unable to parse maximum version: {0:s} {1:s}'.format( module_name, maximum_version) return False, status_message if module_version_map > maximum_version_map: status_message = ( '{0:s} version: {1!s} is too recent, {2!s} or earlier ' 'required').format(module_name, module_version, maximum_version) return False, status_message status_message = '{0:s} version: {1!s}'.format(module_name, module_version) return True, status_message
python
{ "resource": "" }
q30171
DependencyHelper._PrintCheckDependencyStatus
train
def _PrintCheckDependencyStatus( self, dependency, result, status_message, verbose_output=True): """Prints the check dependency status. Args: dependency (DependencyDefinition): dependency definition. result (bool): True if the Python module is available and conforms to the minimum required version, False otherwise. status_message (str): status message. verbose_output (Optional[bool]): True if output should be verbose. """ if not result or dependency.is_optional: if dependency.is_optional: status_indicator = '[OPTIONAL]' else: status_indicator = '[FAILURE]' print('{0:s}\t{1:s}'.format(status_indicator, status_message)) elif verbose_output: print('[OK]\t\t{0:s}'.format(status_message))
python
{ "resource": "" }
q30172
ArtifactsReader._ReadLabels
train
def _ReadLabels(self, artifact_definition_values, artifact_definition, name): """Reads the optional artifact definition labels. Args: artifact_definition_values (dict[str, object]): artifact definition values. artifact_definition (ArtifactDefinition): an artifact definition. name (str): name of the artifact definition. Raises: FormatError: if there are undefined labels. """ labels = artifact_definition_values.get('labels', []) undefined_labels = set(labels).difference(self.labels) if undefined_labels: raise errors.FormatError( 'Artifact definition: {0:s} found undefined labels: {1:s}.'.format( name, ', '.join(undefined_labels))) artifact_definition.labels = labels
python
{ "resource": "" }
q30173
ArtifactsReader._ReadSupportedOS
train
def _ReadSupportedOS(self, definition_values, definition_object, name): """Reads the optional artifact or source type supported OS. Args: definition_values (dict[str, object]): artifact definition values. definition_object (ArtifactDefinition|SourceType): the definition object. name (str): name of the artifact definition. Raises: FormatError: if there are undefined supported operating systems. """ supported_os = definition_values.get('supported_os', []) if not isinstance(supported_os, list): raise errors.FormatError( 'Invalid supported_os type: {0!s}'.format(type(supported_os))) undefined_supported_os = set(supported_os).difference(self.supported_os) if undefined_supported_os: error_string = ( 'Artifact definition: {0:s} undefined supported operating system: ' '{1:s}.').format(name, ', '.join(undefined_supported_os)) raise errors.FormatError(error_string) definition_object.supported_os = supported_os
python
{ "resource": "" }
q30174
ArtifactsReader._ReadSources
train
def _ReadSources(self, artifact_definition_values, artifact_definition, name): """Reads the artifact definition sources. Args: artifact_definition_values (dict[str, object]): artifact definition values. artifact_definition (ArtifactDefinition): an artifact definition. name (str): name of the artifact definition. Raises: FormatError: if the type indicator is not set or unsupported, or if required attributes are missing. """ sources = artifact_definition_values.get('sources') if not sources: raise errors.FormatError( 'Invalid artifact definition: {0:s} missing sources.'.format(name)) for source in sources: type_indicator = source.get('type', None) if not type_indicator: raise errors.FormatError( 'Invalid artifact definition: {0:s} source type.'.format(name)) attributes = source.get('attributes', None) try: source_type = artifact_definition.AppendSource( type_indicator, attributes) except errors.FormatError as exception: raise errors.FormatError( 'Invalid artifact definition: {0:s}, with error: {1!s}'.format( name, exception)) # TODO: deprecate these left overs from the collector definition. if source_type: if source.get('returned_types', None): raise errors.FormatError(( 'Invalid artifact definition: {0:s} returned_types no longer ' 'supported.').format(name)) source_type.conditions = source.get('conditions', []) self._ReadSupportedOS(source, source_type, name) if set(source_type.supported_os) - set( artifact_definition.supported_os): raise errors.FormatError(( 'Invalid artifact definition: {0:s} missing ' 'supported_os.').format(name))
python
{ "resource": "" }
q30175
ArtifactsReader.ReadArtifactDefinitionValues
train
def ReadArtifactDefinitionValues(self, artifact_definition_values): """Reads an artifact definition from a dictionary. Args: artifact_definition_values (dict[str, object]): artifact definition values. Returns: ArtifactDefinition: an artifact definition. Raises: FormatError: if the format of the artifact definition is not set or incorrect. """ if not artifact_definition_values: raise errors.FormatError('Missing artifact definition values.') different_keys = ( set(artifact_definition_values) - definitions.TOP_LEVEL_KEYS) if different_keys: different_keys = ', '.join(different_keys) raise errors.FormatError('Undefined keys: {0:s}'.format(different_keys)) name = artifact_definition_values.get('name', None) if not name: raise errors.FormatError('Invalid artifact definition missing name.') # The description is assumed to be mandatory. description = artifact_definition_values.get('doc', None) if not description: raise errors.FormatError( 'Invalid artifact definition: {0:s} missing description.'.format( name)) artifact_definition = artifact.ArtifactDefinition( name, description=description) if artifact_definition_values.get('collectors', []): raise errors.FormatError( 'Invalid artifact definition: {0:s} still uses collectors.'.format( name)) urls = artifact_definition_values.get('urls', []) if not isinstance(urls, list): raise errors.FormatError( 'Invalid artifact definition: {0:s} urls is not a list.'.format( name)) # TODO: check conditions. artifact_definition.conditions = artifact_definition_values.get( 'conditions', []) artifact_definition.provides = artifact_definition_values.get( 'provides', []) self._ReadLabels(artifact_definition_values, artifact_definition, name) self._ReadSupportedOS(artifact_definition_values, artifact_definition, name) artifact_definition.urls = urls self._ReadSources(artifact_definition_values, artifact_definition, name) return artifact_definition
python
{ "resource": "" }
q30176
ArtifactsReader.ReadDirectory
train
def ReadDirectory(self, path, extension='yaml'): """Reads artifact definitions from a directory. This function does not recurse sub directories. Args: path (str): path of the directory to read from. extension (Optional[str]): extension of the filenames to read. Yields: ArtifactDefinition: an artifact definition. """ if extension: glob_spec = os.path.join(path, '*.{0:s}'.format(extension)) else: glob_spec = os.path.join(path, '*') for artifact_file in glob.glob(glob_spec): for artifact_definition in self.ReadFile(artifact_file): yield artifact_definition
python
{ "resource": "" }
q30177
ArtifactsReader.ReadFile
train
def ReadFile(self, filename): """Reads artifact definitions from a file. Args: filename (str): name of the file to read from. Yields: ArtifactDefinition: an artifact definition. """ with io.open(filename, 'r', encoding='utf-8') as file_object: for artifact_definition in self.ReadFileObject(file_object): yield artifact_definition
python
{ "resource": "" }
q30178
ArtifactWriter.WriteArtifactsFile
train
def WriteArtifactsFile(self, artifacts, filename): """Writes artifact definitions to a file. Args: artifacts (list[ArtifactDefinition]): artifact definitions to be written. filename (str): name of the file to write artifacts to. """ with open(filename, 'w') as file_object: file_object.write(self.FormatArtifacts(artifacts))
python
{ "resource": "" }
q30179
ArtifactDefinition.AppendSource
train
def AppendSource(self, type_indicator, attributes): """Appends a source. If you want to implement your own source type you should create a subclass in source_type.py and change the AppendSource method to handle the new subclass. This function raises FormatError if an unsupported source type indicator is encountered. Args: type_indicator (str): source type indicator. attributes (dict[str, object]): source attributes. Returns: SourceType: a source type. Raises: FormatError: if the type indicator is not set or unsupported, or if required attributes are missing. """ if not type_indicator: raise errors.FormatError('Missing type indicator.') try: source_object = registry.ArtifactDefinitionsRegistry.CreateSourceType( type_indicator, attributes) except (AttributeError, TypeError) as exception: raise errors.FormatError(( 'Unable to create source type: {0:s} for artifact definition: {1:s} ' 'with error: {2!s}').format(type_indicator, self.name, exception)) self.sources.append(source_object) return source_object
python
{ "resource": "" }
q30180
ArtifactDefinition.AsDict
train
def AsDict(self): """Represents an artifact as a dictionary. Returns: dict[str, object]: artifact attributes. """ sources = [] for source in self.sources: source_definition = { 'type': source.type_indicator, 'attributes': source.AsDict() } if source.supported_os: source_definition['supported_os'] = source.supported_os if source.conditions: source_definition['conditions'] = source.conditions sources.append(source_definition) artifact_definition = { 'name': self.name, 'doc': self.description, 'sources': sources, } if self.labels: artifact_definition['labels'] = self.labels if self.supported_os: artifact_definition['supported_os'] = self.supported_os if self.provides: artifact_definition['provides'] = self.provides if self.conditions: artifact_definition['conditions'] = self.conditions if self.urls: artifact_definition['urls'] = self.urls return artifact_definition
python
{ "resource": "" }
q30181
Cognito.renew_access_token
train
def renew_access_token(self): """ Sets a new access token on the User using the refresh token. """ auth_params = {'REFRESH_TOKEN': self.refresh_token} self._add_secret_hash(auth_params, 'SECRET_HASH') refresh_response = self.client.initiate_auth( ClientId=self.client_id, AuthFlow='REFRESH_TOKEN', AuthParameters=auth_params, ) self._set_attributes( refresh_response, { 'access_token': refresh_response['AuthenticationResult']['AccessToken'], 'id_token': refresh_response['AuthenticationResult']['IdToken'], 'token_type': refresh_response['AuthenticationResult']['TokenType'] } )
python
{ "resource": "" }
q30182
Cognito.initiate_forgot_password
train
def initiate_forgot_password(self): """ Sends a verification code to the user to use to change their password. """ params = { 'ClientId': self.client_id, 'Username': self.username } self._add_secret_hash(params, 'SecretHash') self.client.forgot_password(**params)
python
{ "resource": "" }
q30183
Cognito.change_password
train
def change_password(self, previous_password, proposed_password): """ Change the User password """ self.check_token() response = self.client.change_password( PreviousPassword=previous_password, ProposedPassword=proposed_password, AccessToken=self.access_token ) self._set_attributes(response, {'password': proposed_password})
python
{ "resource": "" }
q30184
Cognito._add_secret_hash
train
def _add_secret_hash(self, parameters, key): """ Helper function that computes SecretHash and adds it to a parameters dictionary at a specified key """ if self.client_secret is not None: secret_hash = AWSSRP.get_secret_hash(self.username, self.client_id, self.client_secret) parameters[key] = secret_hash
python
{ "resource": "" }
q30185
hash_sha256
train
def hash_sha256(buf): """AuthenticationHelper.hash""" a = hashlib.sha256(buf).hexdigest() return (64 - len(a)) * '0' + a
python
{ "resource": "" }
q30186
Wallet.create_new_address_for_user
train
def create_new_address_for_user(self, user_id): """Create a new bitcoin address to accept payments for a User. This is a convenience wrapper around `get_child` that helps you do the right thing. This method always creates a public, non-prime address that can be generated from a BIP32 public key on an insecure server.""" max_id = 0x80000000 if user_id < 0 or user_id > max_id: raise ValueError( "Invalid UserID. Must be between 0 and %s" % max_id) return self.get_child(user_id, is_prime=False, as_private=False)
python
{ "resource": "" }
q30187
Wallet.get_child_for_path
train
def get_child_for_path(self, path): """Get a child for a given path. Rather than repeated calls to get_child, children can be found by a derivation path. Paths look like: m/0/1'/10 Which is the same as self.get_child(0).get_child(-1).get_child(10) Or, in other words, the 10th publicly derived child of the 1st privately derived child of the 0th publicly derived child of master. You can use either ' or p to denote a prime (that is, privately derived) child. A child that has had its private key stripped can be requested by either passing a capital M or appending '.pub' to the end of the path. These three paths all give the same child that has had its private key scrubbed: M/0/1 m/0/1.pub M/0/1.pub """ path = ensure_str(path) if not path: raise InvalidPathError("%s is not a valid path" % path) # Figure out public/private derivation as_private = True if path.startswith("M"): as_private = False if path.endswith(".pub"): as_private = False path = path[:-4] parts = path.split("/") if len(parts) == 0: raise InvalidPathError() child = self for part in parts: if part.lower() == "m": continue is_prime = None # Let primeness be figured out by the child number if part[-1] in "'p": is_prime = True part = part.replace("'", "").replace("p", "") try: child_number = long_or_int(part) except ValueError: raise InvalidPathError("%s is not a valid path" % path) child = child.get_child(child_number, is_prime) if not as_private: return child.public_copy() return child
python
{ "resource": "" }
q30188
Wallet.public_copy
train
def public_copy(self): """Clone this wallet and strip it of its private information.""" return self.__class__( chain_code=self.chain_code, depth=self.depth, parent_fingerprint=self.parent_fingerprint, child_number=self.child_number, public_pair=self.public_key.to_public_pair(), network=self.network)
python
{ "resource": "" }
q30189
Wallet.serialize
train
def serialize(self, private=True): """Serialize this key. :param private: Whether or not the serialized key should contain private information. Set to False for a public-only representation that cannot spend funds but can create children. You want private=False if you are, for example, running an e-commerce website and want to accept bitcoin payments. See the README for more information. :type private: bool, defaults to True See the spec in `deserialize` for more details. """ if private and not self.private_key: raise ValueError("Cannot serialize a public key as private") if private: network_version = long_to_hex( self.network.EXT_SECRET_KEY, 8) else: network_version = long_to_hex( self.network.EXT_PUBLIC_KEY, 8) depth = long_to_hex(self.depth, 2) parent_fingerprint = self.parent_fingerprint[2:] # strip leading 0x child_number = long_to_hex(self.child_number, 8) chain_code = self.chain_code ret = (network_version + depth + parent_fingerprint + child_number + chain_code) # Private and public serializations are slightly different if private: ret += b'00' + self.private_key.get_key() else: ret += self.get_public_key_hex(compressed=True) return ensure_bytes(ret.lower())
python
{ "resource": "" }
q30190
Wallet.serialize_b58
train
def serialize_b58(self, private=True): """Encode the serialized node in base58.""" return ensure_str( base58.b58encode_check(unhexlify(self.serialize(private))))
python
{ "resource": "" }
q30191
Wallet.to_address
train
def to_address(self): """Create a public address from this Wallet. Public addresses can accept payments. https://en.bitcoin.it/wiki/Technical_background_of_Bitcoin_addresses """ key = unhexlify(self.get_public_key_hex()) # First get the hash160 of the key hash160_bytes = hash160(key) # Prepend the network address byte network_hash160_bytes = \ chr_py2(self.network.PUBKEY_ADDRESS) + hash160_bytes # Return a base58 encoded address with a checksum return ensure_str(base58.b58encode_check(network_hash160_bytes))
python
{ "resource": "" }
q30192
Wallet.deserialize
train
def deserialize(cls, key, network="bitcoin_testnet"): """Load the ExtendedBip32Key from a hex key. The key consists of * 4 byte version bytes (network key) * 1 byte depth: - 0x00 for master nodes, - 0x01 for level-1 descendants, .... * 4 byte fingerprint of the parent's key (0x00000000 if master key) * 4 byte child number. This is the number i in x_i = x_{par}/i, with x_i the key being serialized. This is encoded in MSB order. (0x00000000 if master key) * 32 bytes: the chain code * 33 bytes: the public key or private key data (0x02 + X or 0x03 + X for public keys, 0x00 + k for private keys) (Note that this also supports 0x04 + X + Y uncompressed points, but this is totally non-standard and this library won't even generate such data.) """ network = Wallet.get_network(network) if len(key) in [78, (78 + 32)]: # we have a byte array, so pass pass else: key = ensure_bytes(key) if len(key) in [78 * 2, (78 + 32) * 2]: # we have a hexlified non-base58 key, continue! key = unhexlify(key) elif len(key) == 111: # We have a base58 encoded string key = base58.b58decode_check(key) # Now that we double checkd the values, convert back to bytes because # they're easier to slice version, depth, parent_fingerprint, child, chain_code, key_data = ( key[:4], key[4], key[5:9], key[9:13], key[13:45], key[45:]) version_long = long_or_int(hexlify(version), 16) exponent = None pubkey = None point_type = key_data[0] if not isinstance(point_type, six.integer_types): point_type = ord(point_type) if point_type == 0: # Private key if version_long != network.EXT_SECRET_KEY: raise incompatible_network_exception_factory( network.NAME, network.EXT_SECRET_KEY, version) exponent = key_data[1:] elif point_type in [2, 3, 4]: # Compressed public coordinates if version_long != network.EXT_PUBLIC_KEY: raise incompatible_network_exception_factory( network.NAME, network.EXT_PUBLIC_KEY, version) pubkey = PublicKey.from_hex_key(key_data, network=network) # Even though this was generated from a compressed pubkey, we # want to store it as an uncompressed pubkey pubkey.compressed = False else: raise ValueError("Invalid key_data prefix, got %s" % point_type) def l(byte_seq): if byte_seq is None: return byte_seq elif isinstance(byte_seq, six.integer_types): return byte_seq return long_or_int(hexlify(byte_seq), 16) return cls(depth=l(depth), parent_fingerprint=l(parent_fingerprint), child_number=l(child), chain_code=l(chain_code), private_exponent=l(exponent), public_key=pubkey, network=network)
python
{ "resource": "" }
q30193
Wallet.from_master_secret
train
def from_master_secret(cls, seed, network="bitcoin_testnet"): """Generate a new PrivateKey from a secret key. :param seed: The key to use to generate this wallet. It may be a long string. Do not use a phrase from a book or song, as that will be guessed and is not secure. My advice is to not supply this argument and let me generate a new random key for you. See https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#Serialization_format # nopep8 """ network = Wallet.get_network(network) seed = ensure_bytes(seed) # Given a seed S of at least 128 bits, but 256 is advised # Calculate I = HMAC-SHA512(key="Bitcoin seed", msg=S) I = hmac.new(b"Bitcoin seed", msg=seed, digestmod=sha512).digest() # Split I into two 32-byte sequences, IL and IR. I_L, I_R = I[:32], I[32:] # Use IL as master secret key, and IR as master chain code. return cls(private_exponent=long_or_int(hexlify(I_L), 16), chain_code=long_or_int(hexlify(I_R), 16), network=network)
python
{ "resource": "" }
q30194
Wallet.from_master_secret_slow
train
def from_master_secret_slow(cls, password, network=BitcoinMainNet): """ Generate a new key from a password using 50,000 rounds of HMAC-SHA256. This should generate the same result as bip32.org. WARNING: The security of this method has not been evaluated. """ # Make sure the password string is bytes key = ensure_bytes(password) data = unhexlify(b"0" * 64) # 256-bit 0 for i in range(50000): data = hmac.new(key, msg=data, digestmod=sha256).digest() return cls.from_master_secret(data, network)
python
{ "resource": "" }
q30195
Wallet.new_random_wallet
train
def new_random_wallet(cls, user_entropy=None, network=BitcoinMainNet): """ Generate a new wallet using a randomly generated 512 bit seed. Args: user_entropy: Optional user-supplied entropy which is combined combined with the random seed, to help counteract compromised PRNGs. You are encouraged to add an optional `user_entropy` string to protect against a compromised CSPRNG. This will be combined with the output from the CSPRNG. Note that if you do supply this value it only adds additional entropy and will not be sufficient to recover the random wallet. If you're even saving `user_entropy` at all, you're doing it wrong. """ seed = str(urandom(64)) # 512/8 # weak extra protection inspired by pybitcointools implementation: seed += str(int(time.time()*10**6)) if user_entropy: user_entropy = str(user_entropy) # allow for int/long seed += user_entropy return cls.from_master_secret(seed, network=network)
python
{ "resource": "" }
q30196
get_bytes
train
def get_bytes(s): """Returns the byte representation of a hex- or byte-string.""" if isinstance(s, bytes): b = s elif isinstance(s, str): b = bytes.fromhex(s) else: raise TypeError("s must be either 'bytes' or 'str'!") return b
python
{ "resource": "" }
q30197
PrivateKey.from_b58check
train
def from_b58check(private_key): """ Decodes a Base58Check encoded private-key. Args: private_key (str): A Base58Check encoded private key. Returns: PrivateKey: A PrivateKey object """ b58dec = base58.b58decode_check(private_key) version = b58dec[0] assert version in [PrivateKey.TESTNET_VERSION, PrivateKey.MAINNET_VERSION] return PrivateKey(int.from_bytes(b58dec[1:], 'big'))
python
{ "resource": "" }
q30198
PrivateKey.to_b58check
train
def to_b58check(self, testnet=False): """ Generates a Base58Check encoding of this private key. Returns: str: A Base58Check encoded string representing the key. """ version = self.TESTNET_VERSION if testnet else self.MAINNET_VERSION return base58.b58encode_check(bytes([version]) + bytes(self))
python
{ "resource": "" }
q30199
PublicKey.from_int
train
def from_int(i): """ Generates a public key object from an integer. Note: This assumes that the upper 32 bytes of the integer are the x component of the public key point and the lower 32 bytes are the y component. Args: i (Bignum): A 512-bit integer representing the public key point on the secp256k1 curve. Returns: PublicKey: A PublicKey object. """ point = ECPointAffine.from_int(bitcoin_curve, i) return PublicKey.from_point(point)
python
{ "resource": "" }