desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Generate a URL to access this key. :type expires_in: int :param expires_in: How long the url is valid for, in seconds. :type method: string :param method: The method to use for retrieving the file (default is GET). :type headers: dict :param headers: Any headers to pass along in the request. :type query_auth: bool :pa...
def generate_url(self, expires_in, method='GET', headers=None, query_auth=True, force_http=False, response_headers=None, expires_in_absolute=False, version_id=None, policy=None, reduced_redundancy=False, encrypt_key=False):
provider = self.bucket.connection.provider version_id = (version_id or self.version_id) if (headers is None): headers = {} else: headers = headers.copy() if policy: headers[provider.acl_header] = policy if reduced_redundancy: self.storage_class = 'REDUCED_REDUNDAN...
'Upload a file to a key into a bucket on S3. :type fp: file :param fp: The file pointer to upload. The file pointer must point at the offset from which you wish to upload. ie. if uploading the full file, it should point at the start of the file. Normally when a file is opened for reading, the fp will point at the first...
def send_file(self, fp, headers=None, cb=None, num_cb=10, query_args=None, chunked_transfer=False, size=None):
self._send_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb, query_args=query_args, chunked_transfer=chunked_transfer, size=size)
':type fp: file :param fp: File pointer to the file to MD5 hash. The file pointer will be reset to the same position before the method returns. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where the file i...
def compute_md5(self, fp, size=None):
(hex_digest, b64_digest, data_size) = compute_md5(fp, size=size) self.size = data_size return (hex_digest, b64_digest)
'Store an object using the name of the Key object as the key in cloud and the contents of the data stream pointed to by \'fp\' as the contents. The stream object is not seekable and total size is not known. This has the implication that we can\'t specify the Content-Size and Content-MD5 in the header. So for huge uploa...
def set_contents_from_stream(self, fp, headers=None, replace=True, cb=None, num_cb=10, policy=None, reduced_redundancy=False, query_args=None, size=None):
provider = self.bucket.connection.provider if (not provider.supports_chunked_transfer()): raise BotoClientError(('%s does not support chunked transfer' % provider.get_provider_name())) if ((not self.name) or (self.name == '')): raise BotoClientError('Cannot determine the...
'Store an object in S3 using the name of the Key object as the key in S3 and the contents of the file pointed to by \'fp\' as the contents. The data is read from \'fp\' from its current position until \'size\' bytes have been read or EOF. :type fp: file :param fp: the file whose contents to upload :type headers: dict :...
def set_contents_from_file(self, fp, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, query_args=None, encrypt_key=False, size=None, rewind=False):
provider = self.bucket.connection.provider headers = (headers or {}) if policy: headers[provider.acl_header] = policy if encrypt_key: headers[provider.server_side_encryption_header] = 'AES256' if rewind: fp.seek(0, os.SEEK_SET) elif (not isinstance(fp, KeyFile)): ...
'Store an object in S3 using the name of the Key object as the key in S3 and the contents of the file named by \'filename\'. See set_contents_from_file method for details about the parameters. :type filename: string :param filename: The name of the file that you want to put onto S3 :type headers: dict :param headers: A...
def set_contents_from_filename(self, filename, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, encrypt_key=False):
with open(filename, 'rb') as fp: return self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy, md5, reduced_redundancy, encrypt_key=encrypt_key)
'Store an object in S3 using the name of the Key object as the key in S3 and the string \'s\' as the contents. See set_contents_from_file method for details about the parameters. :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :type replace: bool :param replace: If True, re...
def set_contents_from_string(self, string_data, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, encrypt_key=False):
if (not isinstance(string_data, bytes)): string_data = string_data.encode('utf-8') fp = BytesIO(string_data) r = self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy, md5, reduced_redundancy, encrypt_key=encrypt_key) fp.close() return r
'Retrieves a file from an S3 Key :type fp: file :param fp: File pointer to put the data into :type headers: string :param: headers to send when retrieving the files :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters,...
def get_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, override_num_retries=None, response_headers=None):
self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb, torrent=torrent, version_id=version_id, override_num_retries=override_num_retries, response_headers=response_headers, hash_algs=None, query_args=None)
'Get a torrent file (see to get_file) :type fp: file :param fp: The file pointer of where to put the torrent :type headers: dict :param headers: Headers to be passed :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters...
def get_torrent_file(self, fp, headers=None, cb=None, num_cb=10):
return self.get_file(fp, headers, cb, num_cb, torrent=True)
'Retrieve an object from S3 using the name of the Key object as the key in S3. Write the contents of the object to the file pointed to by \'fp\'. :type fp: File -like object :param fp: :type headers: dict :param headers: additional HTTP headers that will be sent with the GET request. :type cb: function :param cb: a ca...
def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, res_download_handler=None, response_headers=None):
if (self.bucket is not None): if res_download_handler: res_download_handler.get_file(self, fp, headers, cb, num_cb, torrent=torrent, version_id=version_id) else: self.get_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, response_headers=response_headers)
'Retrieve an object from S3 using the name of the Key object as the key in S3. Store contents of the object to a file named by \'filename\'. See get_contents_to_file method for details about the parameters. :type filename: string :param filename: The filename of where to put the file contents :type headers: dict :para...
def get_contents_to_filename(self, filename, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, res_download_handler=None, response_headers=None):
try: with open(filename, 'wb') as fp: self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, res_download_handler=res_download_handler, response_headers=response_headers) except Exception: os.remove(filename) raise if (self.last_modifie...
'Retrieve an object from S3 using the name of the Key object as the key in S3. Return the contents of the object as a string. See get_contents_to_file method for details about the parameters. :type headers: dict :param headers: Any additional headers to send in the request :type cb: function :param cb: a callback func...
def get_contents_as_string(self, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, response_headers=None, encoding=None):
fp = BytesIO() self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, response_headers=response_headers) value = fp.getvalue() if (encoding is not None): value = value.decode(encoding) return value
'Convenience method that provides a quick way to add an email grant to a key. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT\'s the new ACL back to S3. :type permission: string :param permission: The permission being granted. Should ...
def add_email_grant(self, permission, email_address, headers=None):
policy = self.get_acl(headers=headers) policy.acl.add_email_grant(permission, email_address) self.set_acl(policy, headers=headers)
'Convenience method that provides a quick way to add a canonical user grant to a key. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT\'s the new ACL back to S3. :type permission: string :param permission: The permission being granted...
def add_user_grant(self, permission, user_id, headers=None, display_name=None):
policy = self.get_acl(headers=headers) policy.acl.add_user_grant(permission, user_id, display_name=display_name) self.set_acl(policy, headers=headers)
'Extracts metadata from existing URI into a dict, so we can overwrite/delete from it to form the new set of metadata to apply to a key.'
def _get_remote_metadata(self, headers=None):
metadata = {} for underscore_name in self._underscore_base_user_settable_fields: if hasattr(self, underscore_name): value = getattr(self, underscore_name) if value: field_name = underscore_name.replace('_', '-') metadata[field_name.lower()] = value...
'Restore an object from an archive. :type days: int :param days: The lifetime of the restored object (must be at least 1 day). If the object is already restored then this parameter can be used to readjust the lifetime of the restored object. In this case, the days param is with respect to the initial time of the requ...
def restore(self, days, headers=None):
response = self.bucket.connection.make_request('POST', self.bucket.name, self.name, data=(self.RestoreBody % days), headers=headers, query_args='restore') if (response.status not in (200, 202)): provider = self.bucket.connection.provider raise provider.storage_response_error(response.status, res...
'Returns a string containing the XML version of the Lifecycle configuration as defined by S3.'
def to_xml(self):
s = '<CORSConfiguration>' for rule in self: s += rule.to_xml() s += '</CORSConfiguration>' return s
'Add a rule to this CORS configuration. This only adds the rule to the local copy. To install the new rule(s) on the bucket, you need to pass this CORS config object to the set_cors method of the Bucket object. :type allowed_methods: list of str :param allowed_methods: An HTTP method that you want to allow the origin...
def add_rule(self, allowed_method, allowed_origin, id=None, allowed_header=None, max_age_seconds=None, expose_header=None):
if (not isinstance(allowed_method, (list, tuple))): allowed_method = [allowed_method] if (not isinstance(allowed_origin, (list, tuple))): allowed_origin = [allowed_origin] if (not isinstance(allowed_origin, (list, tuple))): if (allowed_origin is None): allowed_origin = []...
'Return the uploaded parts of this MultiPart Upload. This is a lower-level method that requires you to manually page through results. To simplify this process, you can just use the object itself as an iterator and it will automatically handle all of the paging with S3.'
def get_all_parts(self, max_parts=None, part_number_marker=None, encoding_type=None):
self._parts = [] query_args = ('uploadId=%s' % self.id) if max_parts: query_args += ('&max-parts=%d' % max_parts) if part_number_marker: query_args += ('&part-number-marker=%s' % part_number_marker) if encoding_type: query_args += ('&encoding-type=%s' % encoding_type) res...
'Upload another part of this MultiPart Upload. .. note:: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up t...
def upload_part_from_file(self, fp, part_num, headers=None, replace=True, cb=None, num_cb=10, md5=None, size=None):
if (part_num < 1): raise ValueError('Part numbers must be greater than zero') query_args = ('uploadId=%s&partNumber=%d' % (self.id, part_num)) key = self.bucket.new_key(self.key_name) key.set_contents_from_file(fp, headers=headers, replace=replace, cb=cb, num_cb=num_cb, md5=md5...
'Copy another part of this MultiPart Upload. :type src_bucket_name: string :param src_bucket_name: Name of the bucket containing the source key :type src_key_name: string :param src_key_name: Name of the source key :type part_num: int :param part_num: The number of this part. :type start: int :param start: Zero-based b...
def copy_part_from_key(self, src_bucket_name, src_key_name, part_num, start=None, end=None, src_version_id=None, headers=None):
if (part_num < 1): raise ValueError('Part numbers must be greater than zero') query_args = ('uploadId=%s&partNumber=%d' % (self.id, part_num)) if ((start is not None) and (end is not None)): rng = ('bytes=%s-%s' % (start, end)) provider = self.bucket.connection.prov...
'Complete the MultiPart Upload operation. This method should be called when all parts of the file have been successfully uploaded to S3. :rtype: :class:`boto.s3.multipart.CompletedMultiPartUpload` :returns: An object representing the completed upload.'
def complete_upload(self):
xml = self.to_xml() return self.bucket.complete_multipart_upload(self.key_name, self.id, xml)
'Cancels a MultiPart Upload operation. The storage consumed by any previously uploaded parts will be freed. However, if any part uploads are currently in progress, those part uploads might or might not succeed. As a result, it might be necessary to abort a given multipart upload multiple times in order to completely f...
def cancel_upload(self):
self.bucket.cancel_multipart_upload(self.key_name, self.id)
'Identifies an Amazon Kinesis stream as the event source for an AWS Lambda function. AWS Lambda invokes the specified function when records are posted to the stream. This is the pull model, where AWS Lambda invokes the function. For more information, go to `AWS LambdaL How it Works`_ in the AWS Lambda Developer Guide. ...
def add_event_source(self, event_source, function_name, role, batch_size=None, parameters=None):
uri = '/2014-11-13/event-source-mappings/' params = {'EventSource': event_source, 'FunctionName': function_name, 'Role': role} headers = {} query_params = {} if (batch_size is not None): params['BatchSize'] = batch_size if (parameters is not None): params['Parameters'] = paramete...
'Deletes the specified Lambda function code and configuration. This operation requires permission for the `lambda:DeleteFunction` action. :type function_name: string :param function_name: The Lambda function to delete.'
def delete_function(self, function_name):
uri = '/2014-11-13/functions/{0}'.format(function_name) return self.make_request('DELETE', uri, expected_status=204)
'Returns configuration information for the specified event source mapping (see AddEventSource). This operation requires permission for the `lambda:GetEventSource` action. :type uuid: string :param uuid: The AWS Lambda assigned ID of the event source mapping.'
def get_event_source(self, uuid):
uri = '/2014-11-13/event-source-mappings/{0}'.format(uuid) return self.make_request('GET', uri, expected_status=200)
'Returns the configuration information of the Lambda function and a presigned URL link to the .zip file you uploaded with UploadFunction so you can download the .zip file. Note that the URL is valid for up to 10 minutes. The configuration information is the same information you provided as parameters when uploading the...
def get_function(self, function_name):
uri = '/2014-11-13/functions/{0}'.format(function_name) return self.make_request('GET', uri, expected_status=200)
'Returns the configuration information of the Lambda function. This the same information you provided as parameters when uploading the function by using UploadFunction. This operation requires permission for the `lambda:GetFunctionConfiguration` operation. :type function_name: string :param function_name: The name of t...
def get_function_configuration(self, function_name):
uri = '/2014-11-13/functions/{0}/configuration'.format(function_name) return self.make_request('GET', uri, expected_status=200)
'Submits an invocation request to AWS Lambda. Upon receiving the request, Lambda executes the specified function asynchronously. To see the logs generated by the Lambda function execution, see the CloudWatch logs console. This operation requires permission for the `lambda:InvokeAsync` action. :type function_name: strin...
def invoke_async(self, function_name, invoke_args):
uri = '/2014-11-13/functions/{0}/invoke-async/'.format(function_name) headers = {} query_params = {} try: content_length = str(len(invoke_args)) except (TypeError, AttributeError): try: invoke_args.tell() except (AttributeError, OSError, IOError): rais...
'Returns a list of event source mappings. For each mapping, the API returns configuration information (see AddEventSource). You can optionally specify filters to retrieve specific event source mappings. This operation requires permission for the `lambda:ListEventSources` action. :type event_source_arn: string :param ev...
def list_event_sources(self, event_source_arn=None, function_name=None, marker=None, max_items=None):
uri = '/2014-11-13/event-source-mappings/' params = {} headers = {} query_params = {} if (event_source_arn is not None): query_params['EventSource'] = event_source_arn if (function_name is not None): query_params['FunctionName'] = function_name if (marker is not None): ...
'Returns a list of your Lambda functions. For each function, the response includes the function configuration information. You must use GetFunction to retrieve the code for your function. This operation requires permission for the `lambda:ListFunctions` action. :type marker: string :param marker: Optional string. An op...
def list_functions(self, marker=None, max_items=None):
uri = '/2014-11-13/functions/' params = {} headers = {} query_params = {} if (marker is not None): query_params['Marker'] = marker if (max_items is not None): query_params['MaxItems'] = max_items return self.make_request('GET', uri, expected_status=200, data=json.dumps(params...
'Removes an event source mapping. This means AWS Lambda will no longer invoke the function for events in the associated source. This operation requires permission for the `lambda:RemoveEventSource` action. :type uuid: string :param uuid: The event source mapping ID.'
def remove_event_source(self, uuid):
uri = '/2014-11-13/event-source-mappings/{0}'.format(uuid) return self.make_request('DELETE', uri, expected_status=204)
'Updates the configuration parameters for the specified Lambda function by using the values provided in the request. You provide only the parameters you want to change. This operation must only be used on an existing Lambda function and cannot be used to update the function\'s code. This operation requires permission f...
def update_function_configuration(self, function_name, role=None, handler=None, description=None, timeout=None, memory_size=None):
uri = '/2014-11-13/functions/{0}/configuration'.format(function_name) params = {} headers = {} query_params = {} if (role is not None): query_params['Role'] = role if (handler is not None): query_params['Handler'] = handler if (description is not None): query_params['...
'Creates a new Lambda function or updates an existing function. The function metadata is created from the request parameters, and the code for the function is provided by a .zip file in the request body. If the function name already exists, the existing Lambda function is updated with the new code and metadata. This op...
def upload_function(self, function_name, function_zip, runtime, role, handler, mode, description=None, timeout=None, memory_size=None):
uri = '/2014-11-13/functions/{0}'.format(function_name) headers = {} query_params = {} if (runtime is not None): query_params['Runtime'] = runtime if (role is not None): query_params['Role'] = role if (handler is not None): query_params['Handler'] = handler if (mode i...
'Load a credential file as is setup like the Java utilities'
def load_credential_file(self, path):
c_data = StringIO() c_data.write('[Credentials]\n') for line in open(path, 'r').readlines(): c_data.write(line.replace('AWSAccessKeyId', 'aws_access_key_id').replace('AWSSecretKey', 'aws_secret_access_key')) c_data.seek(0) self.readfp(c_data)
'Write the specified Section.Option to the config file specified by path. Replace any previous value. If the path doesn\'t exist, create it. Also add the option the the in-memory config.'
def save_option(self, path, section, option, value):
config = ConfigParser() config.read(path) if (not config.has_section(section)): config.add_section(section) config.set(section, option, value) fp = open(path, 'w') config.write(fp) fp.close() if (not self.has_section(section)): self.add_section(section) self.set(secti...
'Add an entry to the system crontab.'
def add_cron(self, name, minute, hour, mday, month, wday, who, command, env=None):
raise NotImplementedError
'Add an environemnt variable'
def add_env(self, key, value):
raise NotImplementedError
'Stop a service.'
def stop(self, service_name):
raise NotImplementedError
'Start a service.'
def start(self, service_name):
raise NotImplementedError
'Do whatever is necessary to "install" the package.'
def install(self):
raise NotImplementedError
'Write a file to /etc/cron.d to schedule a command env is a dict containing environment variables you want to set in the file name will be used as the name of the file'
def add_cron(self, name, command, minute='*', hour='*', mday='*', month='*', wday='*', who='root', env=None):
if (minute == 'random'): minute = str(random.randrange(60)) if (hour == 'random'): hour = str(random.randrange(24)) fp = open(('/etc/cron.d/%s' % name), 'w') if env: for (key, value) in env.items(): fp.write(('%s=%s\n' % (key, value))) fp.write(('%s %s %s ...
'Add this file to the init.d directory'
def add_init_script(self, file, name):
f_path = os.path.join('/etc/init.d', name) f = open(f_path, 'w') f.write(file) f.close() os.chmod(f_path, ((stat.S_IREAD | stat.S_IWRITE) | stat.S_IEXEC)) self.run(('/usr/sbin/update-rc.d %s defaults' % name))
'Add an environemnt variable For Ubuntu, the best place is /etc/environment. Values placed here do not need to be exported.'
def add_env(self, key, value):
boto.log.info(('Adding env variable: %s=%s' % (key, value))) if (not os.path.exists('/etc/environment.orig')): self.run('cp /etc/environment /etc/environment.orig', notify=False, exit_on_error=False) fp = open('/etc/environment', 'a') fp.write(('\n%s="%s"' % (key, value))) fp....
'Create a user on the local system'
def create_user(self, user):
self.run(('useradd -m %s' % user)) usr = getpwnam(user) return usr
'This is the only method you need to override'
def install(self):
raise NotImplementedError
'Verify parent of the start tag.'
def validateStartTag(self, tag, parent):
if (self.current_tag != parent): raise InvalidLifecycleConfigError(('Invalid tag %s found inside %s tag' % (tag, self.current_tag)))
'Verify end tag against the start tag.'
def validateEndTag(self, tag):
if (tag != self.current_tag): raise InvalidLifecycleConfigError(('Mismatched start and end tags (%s/%s)' % (self.current_tag, tag)))
'Validate the rule.'
def validate(self):
if (not self.action): raise InvalidLifecycleConfigError('No action was specified in the rule') if (not self.conditions): raise InvalidLifecycleConfigError(('No condition was specified for action %s' % self.action))
'Convert the rule into XML string representation.'
def to_xml(self):
s = [(('<' + RULE) + '>')] s.append((('<' + ACTION) + '>')) if self.action_text: s.extend([(('<' + self.action) + '>'), self.action_text, (('</' + self.action) + '>')]) else: s.append((('<' + self.action) + '/>')) s.append((('</' + ACTION) + '>')) s.append((('<' + CONDITION) + '>...
'Convert LifecycleConfig object into XML string representation.'
def to_xml(self):
s = ['<?xml version="1.0" encoding="UTF-8"?>'] s.append((('<' + LIFECYCLE_CONFIG) + '>')) for rule in self: s.append(rule.to_xml()) s.append((('</' + LIFECYCLE_CONFIG) + '>')) return ''.join(s)
'Add a rule to this Lifecycle configuration. This only adds the rule to the local copy. To install the new rule(s) on the bucket, you need to pass this Lifecycle config object to the configure_lifecycle method of the Bucket object. :type action: str :param action: Action to be taken. :type action_text: str :param act...
def add_rule(self, action, action_text, conditions):
rule = Rule(action, action_text, conditions) self.append(rule)
'Returns a Key instance for an object in this bucket. Note that this method uses a HEAD request to check for the existence of the key. :type key_name: string :param key_name: The name of the key to retrieve :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will overr...
def get_key(self, key_name, headers=None, version_id=None, response_headers=None, generation=None):
query_args_l = [] if generation: query_args_l.append(('generation=%s' % generation)) if response_headers: for (rk, rv) in six.iteritems(response_headers): query_args_l.append(('%s=%s' % (rk, urllib.quote(rv)))) try: (key, resp) = self._get_key_internal(key_name, heade...
'Create a new key in the bucket by copying an existing key. :type new_key_name: string :param new_key_name: The name of the new key :type src_bucket_name: string :param src_bucket_name: The name of the source bucket :type src_key_name: string :param src_key_name: The name of the source key :type src_generation: int :pa...
def copy_key(self, new_key_name, src_bucket_name, src_key_name, metadata=None, src_version_id=None, storage_class='STANDARD', preserve_acl=False, encrypt_key=False, headers=None, query_args=None, src_generation=None):
if src_generation: headers = (headers or {}) headers['x-goog-copy-source-generation'] = str(src_generation) return super(Bucket, self).copy_key(new_key_name, src_bucket_name, src_key_name, metadata=metadata, storage_class=storage_class, preserve_acl=preserve_acl, encrypt_key=encrypt_key, headers...
'List versioned objects within a bucket. This returns an instance of an VersionedBucketListResultSet that automatically handles all of the result paging, etc. from GCS. You just need to keep iterating until there are no more results. Called with no arguments, this will return an iterator object across all keys withi...
def list_versions(self, prefix='', delimiter='', marker='', generation_marker='', headers=None):
return VersionedBucketListResultSet(self, prefix, delimiter, marker, generation_marker, headers)
'See documentation in boto/s3/bucket.py.'
def validate_get_all_versions_params(self, params):
self.validate_kwarg_names(params, ['version_id_marker', 'delimiter', 'marker', 'generation_marker', 'prefix', 'max_keys'])
'Deletes a key from the bucket. :type key_name: string :param key_name: The key name to delete :type headers: dict :param headers: A dictionary of header name/value pairs. :type version_id: string :param version_id: Unused in this subclass. :type mfa_token: tuple or list of strings :param mfa_token: Unused in this subc...
def delete_key(self, key_name, headers=None, version_id=None, mfa_token=None, generation=None):
query_args_l = [] if generation: query_args_l.append(('generation=%s' % generation)) self._delete_key_internal(key_name, headers=headers, version_id=version_id, mfa_token=mfa_token, query_args_l=query_args_l)
'Sets or changes a bucket\'s or key\'s ACL. :type acl_or_str: string or :class:`boto.gs.acl.ACL` :param acl_or_str: A canned ACL string (see :data:`~.gs.acl.CannedACLStrings`) or an ACL object. :type key_name: string :param key_name: A key name within the bucket to set the ACL for. If not specified, the ACL for the buc...
def set_acl(self, acl_or_str, key_name='', headers=None, version_id=None, generation=None, if_generation=None, if_metageneration=None):
if isinstance(acl_or_str, Policy): raise InvalidAclError('Attempt to set S3 Policy on GS ACL') elif isinstance(acl_or_str, ACL): self.set_xml_acl(acl_or_str.to_xml(), key_name, headers=headers, generation=generation, if_generation=if_generation, if_metageneration=if_metagene...
'Sets or changes a bucket\'s default ACL. :type acl_or_str: string or :class:`boto.gs.acl.ACL` :param acl_or_str: A canned ACL string (see :data:`~.gs.acl.CannedACLStrings`) or an ACL object. :type headers: dict :param headers: Additional headers to set during the request.'
def set_def_acl(self, acl_or_str, headers=None):
if isinstance(acl_or_str, Policy): raise InvalidAclError('Attempt to set S3 Policy on GS ACL') elif isinstance(acl_or_str, ACL): self.set_def_xml_acl(acl_or_str.to_xml(), headers=headers) else: self.set_def_canned_acl(acl_or_str, headers=headers)
'Provides common functionality for get_xml_acl and _get_acl_helper.'
def _get_xml_acl_helper(self, key_name, headers, query_args):
response = self.connection.make_request('GET', self.name, key_name, query_args=query_args, headers=headers) body = response.read() if (response.status != 200): if (response.status == 403): match = ERROR_DETAILS_REGEX.search(body) details = (match.group('details') if match els...
'Provides common functionality for get_acl and get_def_acl.'
def _get_acl_helper(self, key_name, headers, query_args):
body = self._get_xml_acl_helper(key_name, headers, query_args) acl = ACL(self) h = handler.XmlHandler(acl, self) xml.sax.parseString(body, h) return acl
'Returns the ACL of the bucket or an object in the bucket. :param str key_name: The name of the object to get the ACL for. If not specified, the ACL for the bucket will be returned. :param dict headers: Additional headers to set during the request. :type version_id: string :param version_id: Unused in this subclass. :p...
def get_acl(self, key_name='', headers=None, version_id=None, generation=None):
query_args = STANDARD_ACL if generation: query_args += ('&generation=%s' % generation) return self._get_acl_helper(key_name, headers, query_args)
'Returns the ACL string of the bucket or an object in the bucket. :param str key_name: The name of the object to get the ACL for. If not specified, the ACL for the bucket will be returned. :param dict headers: Additional headers to set during the request. :type version_id: string :param version_id: Unused in this subcl...
def get_xml_acl(self, key_name='', headers=None, version_id=None, generation=None):
query_args = STANDARD_ACL if generation: query_args += ('&generation=%s' % generation) return self._get_xml_acl_helper(key_name, headers, query_args)
'Returns the bucket\'s default ACL. :param dict headers: Additional headers to set during the request. :rtype: :class:`.gs.acl.ACL`'
def get_def_acl(self, headers=None):
return self._get_acl_helper('', headers, DEF_OBJ_ACL)
'Provides common functionality for set_acl, set_xml_acl, set_canned_acl, set_def_acl, set_def_xml_acl, and set_def_canned_acl().'
def _set_acl_helper(self, acl_or_str, key_name, headers, query_args, generation, if_generation, if_metageneration, canned=False):
headers = (headers or {}) data = '' if canned: headers[self.connection.provider.acl_header] = acl_or_str else: data = acl_or_str if generation: query_args += ('&generation=%s' % generation) if ((if_metageneration is not None) and (if_generation is None)): raise Va...
'Sets a bucket\'s or objects\'s ACL to an XML string. :type acl_str: string :param acl_str: A string containing the ACL XML. :type key_name: string :param key_name: A key name within the bucket to set the ACL for. If not specified, the ACL for the bucket will be set. :type headers: dict :param headers: Additional heade...
def set_xml_acl(self, acl_str, key_name='', headers=None, version_id=None, query_args='acl', generation=None, if_generation=None, if_metageneration=None):
return self._set_acl_helper(acl_str, key_name=key_name, headers=headers, query_args=query_args, generation=generation, if_generation=if_generation, if_metageneration=if_metageneration)
'Sets a bucket\'s or objects\'s ACL using a predefined (canned) value. :type acl_str: string :param acl_str: A canned ACL string. See :data:`~.gs.acl.CannedACLStrings`. :type key_name: string :param key_name: A key name within the bucket to set the ACL for. If not specified, the ACL for the bucket will be set. :type he...
def set_canned_acl(self, acl_str, key_name='', headers=None, version_id=None, generation=None, if_generation=None, if_metageneration=None):
if (acl_str not in CannedACLStrings): raise ValueError(('Provided canned ACL string (%s) is not valid.' % acl_str)) query_args = STANDARD_ACL return self._set_acl_helper(acl_str, key_name, headers, query_args, generation, if_generation, if_metageneration, canned=True)
'Sets a bucket\'s default ACL using a predefined (canned) value. :type acl_str: string :param acl_str: A canned ACL string. See :data:`~.gs.acl.CannedACLStrings`. :type headers: dict :param headers: Additional headers to set during the request.'
def set_def_canned_acl(self, acl_str, headers=None):
if (acl_str not in CannedACLStrings): raise ValueError(('Provided canned ACL string (%s) is not valid.' % acl_str)) query_args = DEF_OBJ_ACL return self._set_acl_helper(acl_str, '', headers, query_args, generation=None, if_generation=None, if_metageneration=None, canned=True)
'Sets a bucket\'s default ACL to an XML string. :type acl_str: string :param acl_str: A string containing the ACL XML. :type headers: dict :param headers: Additional headers to set during the request.'
def set_def_xml_acl(self, acl_str, headers=None):
return self.set_xml_acl(acl_str, '', headers, query_args=DEF_OBJ_ACL)
'Returns a bucket\'s CORS XML document. :param dict headers: Additional headers to send with the request. :rtype: :class:`~.cors.Cors`'
def get_cors(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args=CORS_ARG, headers=headers) body = response.read() if (response.status == 200): cors = Cors() h = handler.XmlHandler(cors, self) xml.sax.parseString(body, h) return cors else: raise self.connectio...
'Sets a bucket\'s CORS XML document. :param str cors: A string containing the CORS XML. :param dict headers: Additional headers to send with the request.'
def set_cors(self, cors, headers=None):
response = self.connection.make_request('PUT', get_utf8_value(self.name), data=get_utf8_value(cors), query_args=CORS_ARG, headers=headers) body = response.read() if (response.status != 200): raise self.connection.provider.storage_response_error(response.status, response.reason, body)
'Returns the StorageClass for the bucket. :rtype: str :return: The StorageClass for the bucket.'
def get_storage_class(self):
response = self.connection.make_request('GET', self.name, query_args=STORAGE_CLASS_ARG) body = response.read() if (response.status == 200): rs = ResultSet(self) h = handler.XmlHandler(rs, self) xml.sax.parseString(body, h) return rs.StorageClass else: raise self.c...
'Sets a bucket\'s storage class. :param str storage_class: A string containing the storage class. :param dict headers: Additional headers to send with the request.'
def set_storage_class(self, storage_class, headers=None):
req_body = (self.StorageClassBody % get_utf8_value(storage_class)) self.set_subresource(STORAGE_CLASS_ARG, req_body, headers=headers)
'Convenience method that provides a quick way to add an email grant to a bucket. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT\'s the new ACL back to GCS. :type permission: string :param permission: The permission being granted. Sho...
def add_email_grant(self, permission, email_address, recursive=False, headers=None):
if (permission not in GSPermissions): raise self.connection.provider.storage_permissions_error(('Unknown Permission: %s' % permission)) acl = self.get_acl(headers=headers) acl.add_email_grant(permission, email_address) self.set_acl(acl, headers=headers) if recursive: for key in...
'Convenience method that provides a quick way to add a canonical user grant to a bucket. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUTs the new ACL back to GCS. :type permission: string :param permission: The permission being grant...
def add_user_grant(self, permission, user_id, recursive=False, headers=None):
if (permission not in GSPermissions): raise self.connection.provider.storage_permissions_error(('Unknown Permission: %s' % permission)) acl = self.get_acl(headers=headers) acl.add_user_grant(permission, user_id) self.set_acl(acl, headers=headers) if recursive: for key in self: ...
'Convenience method that provides a quick way to add an email group grant to a bucket. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT\'s the new ACL back to GCS. :type permission: string :param permission: The permission being grante...
def add_group_email_grant(self, permission, email_address, recursive=False, headers=None):
if (permission not in GSPermissions): raise self.connection.provider.storage_permissions_error(('Unknown Permission: %s' % permission)) acl = self.get_acl(headers=headers) acl.add_group_email_grant(permission, email_address) self.set_acl(acl, headers=headers) if recursive: for ...
'Returns the ACL entries applied to this bucket. :param dict headers: Additional headers to send with the request. :rtype: list containing :class:`~.gs.acl.Entry` objects.'
def list_grants(self, headers=None):
acl = self.get_acl(headers=headers) return acl.entries
'Disable logging on this bucket. :param dict headers: Additional headers to send with the request.'
def disable_logging(self, headers=None):
xml_str = '<?xml version="1.0" encoding="UTF-8"?><Logging/>' self.set_subresource('logging', xml_str, headers=headers)
'Enable logging on a bucket. :type target_bucket: bucket or string :param target_bucket: The bucket to log to. :type target_prefix: string :param target_prefix: The prefix which should be prepended to the generated log files written to the target_bucket. :param dict headers: Additional headers to send with the request....
def enable_logging(self, target_bucket, target_prefix=None, headers=None):
if isinstance(target_bucket, Bucket): target_bucket = target_bucket.name xml_str = '<?xml version="1.0" encoding="UTF-8"?><Logging>' xml_str = (xml_str + ('<LogBucket>%s</LogBucket>' % target_bucket)) if target_prefix: xml_str = (xml_str + ('<LogObjectPrefix>%s</LogObjectPrefix>' %...
'Returns the current status of logging configuration on the bucket as unparsed XML. :param dict headers: Additional headers to send with the request. :rtype: 2-Tuple :returns: 2-tuple containing: 1) A dictionary containing the parsed XML response from GCS. The overall structure is: * Logging * LogObjectPrefix: Prefix t...
def get_logging_config_with_xml(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='logging', headers=headers) body = response.read() boto.log.debug(body) if (response.status != 200): raise self.connection.provider.storage_response_error(response.status, response.reason, body) e = boto.jsonresponse.Elemen...
'Returns the current status of logging configuration on the bucket. :param dict headers: Additional headers to send with the request. :rtype: dict :returns: A dictionary containing the parsed XML response from GCS. The overall structure is: * Logging * LogObjectPrefix: Prefix that is prepended to log objects. * LogBuck...
def get_logging_config(self, headers=None):
return self.get_logging_config_with_xml(headers)[0]
'Configure this bucket to act as a website :type main_page_suffix: str :param main_page_suffix: Suffix that is appended to a request that is for a "directory" on the website endpoint (e.g. if the suffix is index.html and you make a request to samplebucket/images/ the data that is returned will be for the object with th...
def configure_website(self, main_page_suffix=None, error_key=None, headers=None):
if main_page_suffix: main_page_frag = (self.WebsiteMainPageFragment % main_page_suffix) else: main_page_frag = '' if error_key: error_frag = (self.WebsiteErrorFragment % error_key) else: error_frag = '' body = (self.WebsiteBody % (main_page_frag, error_frag)) resp...
'Returns the current status of website configuration on the bucket. :param dict headers: Additional headers to send with the request. :rtype: dict :returns: A dictionary containing the parsed XML response from GCS. The overall structure is: * WebsiteConfiguration * MainPageSuffix: suffix that is appended to request tha...
def get_website_configuration(self, headers=None):
return self.get_website_configuration_with_xml(headers)[0]
'Returns the current status of website configuration on the bucket as unparsed XML. :param dict headers: Additional headers to send with the request. :rtype: 2-Tuple :returns: 2-tuple containing: 1) A dictionary containing the parsed XML response from GCS. The overall structure is: * WebsiteConfiguration * MainPageSuff...
def get_website_configuration_with_xml(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='websiteConfig', headers=headers) body = response.read() boto.log.debug(body) if (response.status != 200): raise self.connection.provider.storage_response_error(response.status, response.reason, body) e = boto.jsonresponse....
'Remove the website configuration from this bucket. :param dict headers: Additional headers to send with the request.'
def delete_website_configuration(self, headers=None):
self.configure_website(headers=headers)
'Returns the current status of versioning configuration on the bucket. :rtype: bool'
def get_versioning_status(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='versioning', headers=headers) body = response.read() boto.log.debug(body) if (response.status != 200): raise self.connection.provider.storage_response_error(response.status, response.reason, body) resp_json = boto.jsonresp...
'Configure versioning for this bucket. :param bool enabled: If set to True, enables versioning on this bucket. If set to False, disables versioning. :param dict headers: Additional headers to send with the request.'
def configure_versioning(self, enabled, headers=None):
if (enabled == True): req_body = (self.VersioningBody % 'Enabled') else: req_body = (self.VersioningBody % 'Suspended') self.set_subresource('versioning', req_body, headers=headers)
'Returns the current lifecycle configuration on the bucket. :rtype: :class:`boto.gs.lifecycle.LifecycleConfig` :returns: A LifecycleConfig object that describes all current lifecycle rules in effect for the bucket.'
def get_lifecycle_config(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args=LIFECYCLE_ARG, headers=headers) body = response.read() boto.log.debug(body) if (response.status == 200): lifecycle_config = LifecycleConfig() h = handler.XmlHandler(lifecycle_config, self) xml.sax.parseString(bo...
'Configure lifecycle for this bucket. :type lifecycle_config: :class:`boto.gs.lifecycle.LifecycleConfig` :param lifecycle_config: The lifecycle configuration you want to configure for this bucket.'
def configure_lifecycle(self, lifecycle_config, headers=None):
xml = lifecycle_config.to_xml() response = self.connection.make_request('PUT', get_utf8_value(self.name), data=get_utf8_value(xml), query_args=LIFECYCLE_ARG, headers=headers) body = response.read() if (response.status == 200): return True else: raise self.connection.provider.storage_...
'Constructor. Instantiate once for each uploaded file. :type tracker_file_name: string :param tracker_file_name: optional file name to save tracker URI. If supplied and the current process fails the upload, it can be retried in a new process. If called with an existing file containing a valid tracker URI, we\'ll resume...
def __init__(self, tracker_file_name=None, num_retries=None):
self.tracker_file_name = tracker_file_name self.num_retries = num_retries self.server_has_bytes = 0 self.tracker_uri = None if tracker_file_name: self._load_tracker_uri_from_file() self.upload_start_point = None
'Saves URI to tracker file if one was passed to constructor.'
def _save_tracker_uri_to_file(self):
if (not self.tracker_file_name): return f = None try: with os.fdopen(os.open(self.tracker_file_name, (os.O_WRONLY | os.O_CREAT), 384), 'w') as f: f.write(self.tracker_uri) except IOError as e: raise ResumableUploadException(("Couldn't write URI tracker fil...
'Called when we start a new resumable upload or get a new tracker URI for the upload. Saves URI and resets upload state. Raises InvalidUriError if URI is syntactically invalid.'
def _set_tracker_uri(self, uri):
parse_result = urlparse.urlparse(uri) if ((parse_result.scheme.lower() not in ['http', 'https']) or (not parse_result.netloc)): raise InvalidUriError(('Invalid tracker URI (%s)' % uri)) self.tracker_uri = uri self.tracker_uri_host = parse_result.netloc self.tracker_uri_path = ('%s?%...
'Returns upload tracker URI, or None if the upload has not yet started.'
def get_tracker_uri(self):
return self.tracker_uri
'Returns the upload ID for the resumable upload, or None if the upload has not yet started.'
def get_upload_id(self):
delim = '?upload_id=' if (self.tracker_uri and (delim in self.tracker_uri)): return self.tracker_uri[(self.tracker_uri.index(delim) + len(delim)):] else: return None
'Queries server to find out state of given upload. Note that this method really just makes special case use of the fact that the upload server always returns the current start/end state whenever a PUT doesn\'t complete. Returns HTTP response from sending request. Raises ResumableUploadException if problem querying serv...
def _query_server_state(self, conn, file_length):
put_headers = {} put_headers['Content-Range'] = self._build_content_range_header('*', file_length) put_headers['Content-Length'] = '0' return AWSAuthConnection.make_request(conn, 'PUT', path=self.tracker_uri_path, auth_path=self.tracker_uri_path, headers=put_headers, host=self.tracker_uri_host)
'Queries server to find out what bytes it currently has. Returns (server_start, server_end), where the values are inclusive. For example, (0, 2) would mean that the server has bytes 0, 1, *and* 2. Raises ResumableUploadException if problem querying server.'
def _query_server_pos(self, conn, file_length):
resp = self._query_server_state(conn, file_length) if (resp.status == 200): return (0, (file_length - 1)) if (resp.status != 308): raise ResumableUploadException(('Got non-308 response (%s) from server state query' % resp.status), ResumableTransferDisposition.START_OVER)...
'Starts a new resumable upload. Raises ResumableUploadException if any errors occur.'
def _start_new_resumable_upload(self, key, headers=None):
conn = key.bucket.connection if (conn.debug >= 1): print 'Starting new resumable upload.' self.server_has_bytes = 0 post_headers = {} for k in headers: if (k.lower() == 'content-length'): raise ResumableUploadException('Attempt to specify Content-Length ...
'Makes one attempt to upload file bytes, using an existing resumable upload connection. Returns (etag, generation, metageneration) from server upon success. Raises ResumableUploadException if any problems occur.'
def _upload_file_bytes(self, conn, http_conn, fp, file_length, total_bytes_uploaded, cb, num_cb, headers):
buf = fp.read(self.BUFFER_SIZE) if cb: if (num_cb > 2): cb_count = ((file_length / self.BUFFER_SIZE) / (num_cb - 2)) elif (num_cb < 0): cb_count = (-1) else: cb_count = 0 i = 0 cb(total_bytes_uploaded, file_length) if (not headers):...
'Attempts a resumable upload. Returns (etag, generation, metageneration) from server upon success. Raises ResumableUploadException if any problems occur.'
def _attempt_resumable_upload(self, key, fp, file_length, headers, cb, num_cb):
(server_start, server_end) = self.SERVER_HAS_NOTHING conn = key.bucket.connection if self.tracker_uri: try: (server_start, server_end) = self._query_server_pos(conn, file_length) self.server_has_bytes = server_start if server_end: print 'Catching ...
'Checks that etag from server agrees with md5 computed before upload. This is important, since the upload could have spanned a number of hours and multiple processes (e.g., gsutil runs), and the user could change some of the file and not realize they have inconsistent data.'
def _check_final_md5(self, key, etag):
if (key.bucket.connection.debug >= 1): print 'Checking md5 against etag.' if (key.md5 != etag.strip('"\'')): key.open_read() key.close() key.delete() raise ResumableUploadException("File changed during upload: md5 signature doesn't match e...