desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Sends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes.'
def put(self, url, data=None, **kwargs):
return self.request('PUT', url, data=data, **kwargs)
'Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes.'
def patch(self, url, data=None, **kwargs):
return self.request('PATCH', url, data=data, **kwargs)
'Sends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes.'
def delete(self, url, **kwargs):
return self.request('DELETE', url, **kwargs)
'Send a given PreparedRequest.'
def send(self, request, **kwargs):
kwargs.setdefault('stream', self.stream) kwargs.setdefault('verify', self.verify) kwargs.setdefault('cert', self.cert) kwargs.setdefault('proxies', self.proxies) if (not isinstance(request, PreparedRequest)): raise ValueError('You can only send PreparedRequests.') checked_url...
'Check the environment and merge it with some settings.'
def merge_environment_settings(self, url, proxies, stream, verify, cert):
if self.trust_env: env_proxies = (get_environ_proxies(url) or {}) for (k, v) in env_proxies.items(): proxies.setdefault(k, v) if ((verify is True) or (verify is None)): verify = (os.environ.get('REQUESTS_CA_BUNDLE') or os.environ.get('CURL_CA_BUNDLE')) proxies = m...
'Returns the appropriate connnection adapter for the given URL.'
def get_adapter(self, url):
for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix): return adapter raise InvalidSchema(("No connection adapters were found for '%s'" % url))
'Closes all adapters and as such the session'
def close(self):
for v in self.adapters.values(): v.close()
'Registers a connection adapter to a prefix. Adapters are sorted in descending order by key length.'
def mount(self, prefix, adapter):
self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if (len(k) < len(prefix))] for key in keys_to_move: self.adapters[key] = self.adapters.pop(key)
'Like iteritems(), but with all lowercase keys.'
def lower_items(self):
return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
'Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in th...
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs)
'Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to con...
def proxy_manager_for(self, proxy, **proxy_kwargs):
if (not (proxy in self.proxy_manager)): proxy_headers = self.proxy_headers(proxy) self.proxy_manager[proxy] = proxy_from_url(proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs) return self.proxy_manager[proxy]
'Verify a SSL certificate. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. :param verify: Whether we should actually...
def cert_verify(self, conn, url, verify, cert):
if (url.lower().startswith('https') and verify): cert_loc = None if (verify is not True): cert_loc = verify if (not cert_loc): cert_loc = DEFAULT_CA_BUNDLE_PATH if (not cert_loc): raise Exception('Could not find a suitable SSL CA ...
'Builds a :class:`Response <requests.Response>` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. :param...
def build_response(self, req, resp):
response = Response() response.status_code = getattr(resp, 'status', None) response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(req.url, byt...
'Returns a urllib3 connection for the given URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request.'
def get_connection(self, url, proxies=None):
proxies = (proxies or {}) proxy = proxies.get(urlparse(url.lower()).scheme) if proxy: proxy = prepend_scheme_if_needed(proxy, 'http') proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_url(url) else: parsed = urlparse(url) url = par...
'Disposes of any internal state. Currently, this just closes the PoolManager, which closes pooled connections.'
def close(self):
self.poolmanager.clear()
'Obtain the url to use when making the final request. If the message is being sent through a HTTP proxy, the full URL has to be used. Otherwise, we should only use the path portion of the URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapte...
def request_url(self, request, proxies):
proxies = (proxies or {}) scheme = urlparse(request.url).scheme proxy = proxies.get(scheme) if (proxy and (scheme != 'https')): url = urldefragauth(request.url) else: url = request.path_url return url
'Add any headers needed by the connection. As of v2.0 this does nothing by default, but is left for overriding by users that subclass the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapter...
def add_headers(self, request, **kwargs):
pass
'Returns a dictionary of the headers to add to any request sent through a proxy. This works with urllib3 magic to ensure that they are correctly sent to the proxy, rather than in a tunnelled request if CONNECT is being used. This should not be called from user code, and is only exposed for use when subclassing the :cla...
def proxy_headers(self, proxy):
headers = {} (username, password) = get_auth_from_url(proxy) if (username and password): headers['Proxy-Authorization'] = _basic_auth_str(username, password) return headers
'Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (`connect timeout...
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
conn = self.get_connection(request.url, proxies) self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) self.add_headers(request) chunked = (not ((request.body is None) or ('Content-Length' in request.headers))) if isinstance(timeout, tuple): try: ...
'Return true, if the named module is a package. We need this method to get correct spec objects with Python 3.4 (see PEP451)'
def is_package(self, fullname):
return hasattr(self.__get_module(fullname), '__path__')
'Return None Required, if is_package is implemented'
def get_code(self, fullname):
self.__get_module(fullname) return None
':type rule: :class:`boto.s3.website.RoutingRule` :param rule: A routing rule. :return: This ``RoutingRules`` object is returned, so that it can chain subsequent calls.'
def add_rule(self, rule):
self.append(rule) return self
'Returns a string containing the XML version of the Lifecycle configuration as defined by S3.'
def to_xml(self):
s = '' for transition in self: s += transition.to_xml() return s
'Add a transition to this Lifecycle configuration. This only adds the rule to the local copy. To install the new rule(s) on the bucket, you need to pass this Lifecycle config object to the configure_lifecycle method of the Bucket object. :ivar days: The number of days until the object should be moved. :ivar date: The...
def add_transition(self, days=None, date=None, storage_class=None):
transition = Transition(days, date, storage_class) self.append(transition)
'Returns a string containing the XML version of the Lifecycle configuration as defined by S3.'
def to_xml(self):
s = '<?xml version="1.0" encoding="UTF-8"?>' s += '<LifecycleConfiguration>' for rule in self: s += rule.to_xml() s += '</LifecycleConfiguration>' return s
'Add a rule to this Lifecycle configuration. This only adds the rule to the local copy. To install the new rule(s) on the bucket, you need to pass this Lifecycle config object to the configure_lifecycle method of the Bucket object. :type id: str :param id: Unique identifier for the rule. The value cannot be longer th...
def add_rule(self, id=None, prefix='', status='Enabled', expiration=None, transition=None):
rule = Rule(id, prefix, status, expiration, transition) self.append(rule)
'Connect to this Region\'s endpoint. Returns an connection object pointing to the endpoint associated with this region. You may pass any of the arguments accepted by the connection class\'s constructor as keyword arguments and they will be passed along to the connection object. :rtype: Connection object :return: The co...
def connect(self, **kw_params):
if self.connection_cls: return self.connection_cls(host=self.endpoint, **kw_params)
'Set the Key class associated with this bucket. By default, this would be the boto.s3.key.Key class but if you want to subclass that for some reason this allows you to associate your new class with a bucket so that when you call bucket.new_key() or when you get a listing of keys in the bucket you will get an instances...
def set_key_class(self, key_class):
self.key_class = key_class
'Deprecated: Please use get_key method. :type key_name: string :param key_name: The name of the key to retrieve :rtype: :class:`boto.s3.key.Key` :returns: A Key object from this bucket.'
def lookup(self, key_name, headers=None):
return self.get_key(key_name, headers=headers)
'Check to see if a particular key exists within the bucket. This method uses a HEAD request to check for the existence of the key. Returns: An instance of a Key object or None :param key_name: The name of the key to retrieve :type key_name: string :param headers: The headers to send when retrieving the key :type heade...
def get_key(self, key_name, headers=None, version_id=None, response_headers=None, validate=True):
if (validate is False): if (headers or version_id or response_headers): raise BotoClientError(("When providing 'validate=False', no other params " + 'are allowed.')) return self.new_key(key_name) query_args_l = [] if version_id: query_args_l.append(('...
'List key objects within a bucket. This returns an instance of an BucketListResultSet that automatically handles all of the result paging, etc. from S3. You just need to keep iterating until there are no more results. Called with no arguments, this will return an iterator object across all keys within the bucket. The...
def list(self, prefix='', delimiter='', marker='', headers=None, encoding_type=None):
return BucketListResultSet(self, prefix, delimiter, marker, headers, encoding_type=encoding_type)
'List version objects within a bucket. This returns an instance of an VersionedBucketListResultSet that automatically handles all of the result paging, etc. from S3. You just need to keep iterating until there are no more results. Called with no arguments, this will return an iterator object across all keys within t...
def list_versions(self, prefix='', delimiter='', key_marker='', version_id_marker='', headers=None, encoding_type=None):
return VersionedBucketListResultSet(self, prefix, delimiter, key_marker, version_id_marker, headers, encoding_type=encoding_type)
'List multipart upload objects within a bucket. This returns an instance of an MultiPartUploadListResultSet that automatically handles all of the result paging, etc. from S3. You just need to keep iterating until there are no more results. :type key_marker: string :param key_marker: The "marker" of where you are in t...
def list_multipart_uploads(self, key_marker='', upload_id_marker='', headers=None, encoding_type=None):
return MultiPartUploadListResultSet(self, key_marker, upload_id_marker, headers, encoding_type=encoding_type)
'Checks that all named arguments are in the specified list of names. :type kwargs: dict :param kwargs: Dictionary of kwargs to validate. :type names: list :param names: List of possible named arguments.'
def validate_kwarg_names(self, kwargs, names):
for kwarg in kwargs: if (kwarg not in names): raise TypeError(('Invalid argument "%s"!' % kwarg))
'A lower-level method for listing contents of a bucket. This closely models the actual S3 API and requires you to manually handle the paging of results. For a higher-level method that handles the details of paging for you, you can use the list method. :type max_keys: int :param max_keys: The maximum number of keys to...
def get_all_keys(self, headers=None, **params):
self.validate_kwarg_names(params, ['maxkeys', 'max_keys', 'prefix', 'marker', 'delimiter', 'encoding_type']) return self._get_all([('Contents', self.key_class), ('CommonPrefixes', Prefix)], '', headers, **params)
'A lower-level, version-aware method for listing contents of a bucket. This closely models the actual S3 API and requires you to manually handle the paging of results. For a higher-level method that handles the details of paging for you, you can use the list method. :type max_keys: int :param max_keys: The maximum nu...
def get_all_versions(self, headers=None, **params):
self.validate_get_all_versions_params(params) return self._get_all([('Version', self.key_class), ('CommonPrefixes', Prefix), ('DeleteMarker', DeleteMarker)], 'versions', headers, **params)
'Validate that the parameters passed to get_all_versions are valid. Overridden by subclasses that allow a different set of parameters. :type params: dict :param params: Parameters to validate.'
def validate_get_all_versions_params(self, params):
self.validate_kwarg_names(params, ['maxkeys', 'max_keys', 'prefix', 'key_marker', 'version_id_marker', 'delimiter', 'encoding_type'])
'A lower-level, version-aware method for listing active MultiPart uploads for a bucket. This closely models the actual S3 API and requires you to manually handle the paging of results. For a higher-level method that handles the details of paging for you, you can use the list method. :type max_uploads: int :param max_...
def get_all_multipart_uploads(self, headers=None, **params):
self.validate_kwarg_names(params, ['max_uploads', 'key_marker', 'upload_id_marker', 'encoding_type', 'delimiter', 'prefix']) return self._get_all([('Upload', MultiPartUpload), ('CommonPrefixes', Prefix)], 'uploads', headers, **params)
'Creates a new key :type key_name: string :param key_name: The name of the key to create :rtype: :class:`boto.s3.key.Key` or subclass :returns: An instance of the newly created key object'
def new_key(self, key_name=None):
if (not key_name): raise ValueError('Empty key names are not allowed') return self.key_class(self, key_name)
'Deletes a set of keys using S3\'s Multi-object delete API. If a VersionID is specified for that key then that version is removed. Returns a MultiDeleteResult Object, which contains Deleted and Error elements for each key you ask to delete. :type keys: list :param keys: A list of either key_names or (key_name, versioni...
def delete_keys(self, keys, quiet=False, mfa_token=None, headers=None):
ikeys = iter(keys) result = MultiDeleteResult(self) provider = self.connection.provider query_args = 'delete' def delete_keys2(hdrs): hdrs = (hdrs or {}) data = u'<?xml version="1.0" encoding="UTF-8"?>' data += u'<Delete>' if quiet: data += u'<Quiet>...
'Deletes a key from the bucket. If a version_id is provided, only that version of the key will be deleted. :type key_name: string :param key_name: The key name to delete :type version_id: string :param version_id: The version ID (optional) :type mfa_token: tuple or list of strings :param mfa_token: A tuple or list con...
def delete_key(self, key_name, headers=None, version_id=None, mfa_token=None):
if (not key_name): raise ValueError('Empty key names are not allowed') return self._delete_key_internal(key_name, headers=headers, version_id=version_id, mfa_token=mfa_token, query_args_l=None)
'Create a new key in the bucket by copying another existing key. :type new_key_name: string :param new_key_name: The name of the new key :type src_bucket_name: string :param src_bucket_name: The name of the source bucket :type src_key_name: string :param src_key_name: The name of the source key :type src_version_id: st...
def copy_key(self, new_key_name, src_bucket_name, src_key_name, metadata=None, src_version_id=None, storage_class='STANDARD', preserve_acl=False, encrypt_key=False, headers=None, query_args=None):
headers = (headers or {}) provider = self.connection.provider src_key_name = boto.utils.get_utf8_value(src_key_name) if preserve_acl: if (self.name == src_bucket_name): src_bucket = self else: src_bucket = self.connection.get_bucket(src_bucket_name, validate=False...
'Set a subresource for a bucket or key. :type subresource: string :param subresource: The subresource to set. :type value: string :param value: The value of the subresource. :type key_name: string :param key_name: The key to operate on, or None to operate on the bucket. :type headers: dict :param headers: Additional HT...
def set_subresource(self, subresource, value, key_name='', headers=None, version_id=None):
if (not subresource): raise TypeError('set_subresource called with subresource=None') query_args = subresource if version_id: query_args += ('&versionId=%s' % version_id) if (not isinstance(value, bytes)): value = value.encode('utf-8') response = self.connection.make...
'Get a subresource for a bucket or key. :type subresource: string :param subresource: The subresource to get. :type key_name: string :param key_name: The key to operate on, or None to operate on the bucket. :type headers: dict :param headers: Additional HTTP headers to include in the request. :type src_version_id: stri...
def get_subresource(self, subresource, key_name='', headers=None, version_id=None):
if (not subresource): raise TypeError('get_subresource called with subresource=None') query_args = subresource if version_id: query_args += ('&versionId=%s' % version_id) response = self.connection.make_request('GET', self.name, key_name, query_args=query_args, headers=headers) ...
'Convenience method that provides a quick way to add an email grant to a bucket. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT\'s the new ACL back to S3. :type permission: string :param permission: The permission being granted. Shou...
def add_email_grant(self, permission, email_address, recursive=False, headers=None):
if (permission not in S3Permissions): raise self.connection.provider.storage_permissions_error(('Unknown Permission: %s' % permission)) policy = self.get_acl(headers=headers) policy.acl.add_email_grant(permission, email_address) self.set_acl(policy, headers=headers) if recursive: ...
'Convenience method that provides a quick way to add a canonical user grant to a bucket. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT\'s the new ACL back to S3. :type permission: string :param permission: The permission being gran...
def add_user_grant(self, permission, user_id, recursive=False, headers=None, display_name=None):
if (permission not in S3Permissions): raise self.connection.provider.storage_permissions_error(('Unknown Permission: %s' % permission)) policy = self.get_acl(headers=headers) policy.acl.add_user_grant(permission, user_id, display_name=display_name) self.set_acl(policy, headers=headers) ...
'Returns the LocationConstraint for the bucket. :rtype: str :return: The LocationConstraint for the bucket or the empty string if no constraint was specified when bucket was created.'
def get_location(self):
response = self.connection.make_request('GET', self.name, query_args='location') body = response.read() if (response.status == 200): rs = ResultSet(self) h = handler.XmlHandler(rs, self) if (not isinstance(body, bytes)): body = body.encode('utf-8') xml.sax.parseSt...
'Set logging on a bucket directly to the given xml string. :type logging_str: unicode string :param logging_str: The XML for the bucketloggingstatus which will be set. The string will be converted to utf-8 before it is sent. Usually, you will obtain this XML from the BucketLogging object. :rtype: bool :return: True i...
def set_xml_logging(self, logging_str, headers=None):
body = logging_str if (not isinstance(body, bytes)): body = body.encode('utf-8') response = self.connection.make_request('PUT', self.name, data=body, query_args='logging', headers=headers) body = response.read() if (response.status == 200): return True else: raise self.co...
'Enable logging on a bucket. :type target_bucket: bucket or string :param target_bucket: The bucket to log to. :type target_prefix: string :param target_prefix: The prefix which should be prepended to the generated log files written to the target_bucket. :type grants: list of Grant objects :param grants: A list of extr...
def enable_logging(self, target_bucket, target_prefix='', grants=None, headers=None):
if isinstance(target_bucket, Bucket): target_bucket = target_bucket.name blogging = BucketLogging(target=target_bucket, prefix=target_prefix, grants=grants) return self.set_xml_logging(blogging.to_xml(), headers=headers)
'Disable logging on a bucket. :rtype: bool :return: True if ok or raises an exception.'
def disable_logging(self, headers=None):
blogging = BucketLogging() return self.set_xml_logging(blogging.to_xml(), headers=headers)
'Get the logging status for this bucket. :rtype: :class:`boto.s3.bucketlogging.BucketLogging` :return: A BucketLogging object for this bucket.'
def get_logging_status(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='logging', headers=headers) body = response.read() if (response.status == 200): blogging = BucketLogging() h = handler.XmlHandler(blogging, self) if (not isinstance(body, bytes)): body = body.encode('utf...
'Setup the current bucket as a logging target by granting the necessary permissions to the LogDelivery group to write log files to this bucket.'
def set_as_logging_target(self, headers=None):
policy = self.get_acl(headers=headers) g1 = Grant(permission='WRITE', type='Group', uri=self.LoggingGroup) g2 = Grant(permission='READ_ACP', type='Group', uri=self.LoggingGroup) policy.acl.add_grant(g1) policy.acl.add_grant(g2) self.set_acl(policy, headers=headers)
'Configure versioning for this bucket. ..note:: This feature is currently in beta. :type versioning: bool :param versioning: A boolean indicating whether version is enabled (True) or disabled (False). :type mfa_delete: bool :param mfa_delete: A boolean indicating whether the Multi-Factor Authentication Delete feature i...
def configure_versioning(self, versioning, mfa_delete=False, mfa_token=None, headers=None):
if versioning: ver = 'Enabled' else: ver = 'Suspended' if mfa_delete: mfa = 'Enabled' else: mfa = 'Disabled' body = (self.VersioningBody % (ver, mfa)) if mfa_token: if (not headers): headers = {} provider = self.connection.provider ...
'Returns the current status of versioning on the bucket. :rtype: dict :returns: A dictionary containing a key named \'Versioning\' that can have a value of either Enabled, Disabled, or Suspended. Also, if MFADelete has ever been enabled on the bucket, the dictionary will contain a key named \'MFADelete\' which will hav...
def get_versioning_status(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='versioning', headers=headers) body = response.read() if (not isinstance(body, six.string_types)): body = body.decode('utf-8') boto.log.debug(body) if (response.status == 200): d = {} ver = re.search(self.Ve...
'Configure lifecycle for this bucket. :type lifecycle_config: :class:`boto.s3.lifecycle.Lifecycle` :param lifecycle_config: The lifecycle configuration you want to configure for this bucket.'
def configure_lifecycle(self, lifecycle_config, headers=None):
xml = lifecycle_config.to_xml() fp = StringIO(xml) md5 = boto.utils.compute_md5(fp) if (headers is None): headers = {} headers['Content-MD5'] = md5[1] headers['Content-Type'] = 'text/xml' response = self.connection.make_request('PUT', self.name, data=fp.getvalue(), query_args='lifecy...
'Returns the current lifecycle configuration on the bucket. :rtype: :class:`boto.s3.lifecycle.Lifecycle` :returns: A LifecycleConfig object that describes all current lifecycle rules in effect for the bucket.'
def get_lifecycle_config(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='lifecycle', headers=headers) body = response.read() boto.log.debug(body) if (response.status == 200): lifecycle = Lifecycle() h = handler.XmlHandler(lifecycle, self) if (not isinstance(body, bytes)): ...
'Removes all lifecycle configuration from the bucket.'
def delete_lifecycle_configuration(self, headers=None):
response = self.connection.make_request('DELETE', self.name, query_args='lifecycle', headers=headers) body = response.read() boto.log.debug(body) if (response.status == 204): return True else: raise self.connection.provider.storage_response_error(response.status, response.reason, bod...
'Configure this bucket to act as a website :type suffix: str :param suffix: Suffix that is appended to a request that is for a "directory" on the website endpoint (e.g. if the suffix is index.html and you make a request to samplebucket/images/ the data that is returned will be for the object with the key name images/in...
def configure_website(self, suffix=None, error_key=None, redirect_all_requests_to=None, routing_rules=None, headers=None):
config = website.WebsiteConfiguration(suffix, error_key, redirect_all_requests_to, routing_rules) return self.set_website_configuration(config, headers=headers)
':type config: boto.s3.website.WebsiteConfiguration :param config: Configuration data'
def set_website_configuration(self, config, headers=None):
return self.set_website_configuration_xml(config.to_xml(), headers=headers)
'Upload xml website configuration'
def set_website_configuration_xml(self, xml, headers=None):
response = self.connection.make_request('PUT', self.name, data=xml, query_args='website', headers=headers) body = response.read() if (response.status == 200): return True else: raise self.connection.provider.storage_response_error(response.status, response.reason, body)
'Returns the current status of website configuration on the bucket. :rtype: dict :returns: A dictionary containing a Python representation of the XML response from S3. The overall structure is: * WebsiteConfiguration * IndexDocument * Suffix : suffix that is appended to request that is for a "directory" on the website ...
def get_website_configuration(self, headers=None):
return self.get_website_configuration_with_xml(headers)[0]
'Get the website configuration as a :class:`boto.s3.website.WebsiteConfiguration` object.'
def get_website_configuration_obj(self, headers=None):
config_xml = self.get_website_configuration_xml(headers=headers) config = website.WebsiteConfiguration() h = handler.XmlHandler(config, self) xml.sax.parseString(config_xml, h) return config
'Returns the current status of website configuration on the bucket as unparsed XML. :rtype: 2-Tuple :returns: 2-tuple containing: 1) A dictionary containing a Python representation of the XML response. The overall structure is: * WebsiteConfiguration * IndexDocument * Suffix : suffix that is appended to...
def get_website_configuration_with_xml(self, headers=None):
body = self.get_website_configuration_xml(headers=headers) e = boto.jsonresponse.Element() h = boto.jsonresponse.XmlHandler(e, None) h.parse(body) return (e, body)
'Get raw website configuration xml'
def get_website_configuration_xml(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='website', headers=headers) body = response.read().decode('utf-8') boto.log.debug(body) if (response.status != 200): raise self.connection.provider.storage_response_error(response.status, response.reason, body) return body
'Removes all website configuration from the bucket.'
def delete_website_configuration(self, headers=None):
response = self.connection.make_request('DELETE', self.name, query_args='website', headers=headers) body = response.read() boto.log.debug(body) if (response.status == 204): return True else: raise self.connection.provider.storage_response_error(response.status, response.reason, body)...
'Returns the fully qualified hostname to use is you want to access this bucket as a website. This doesn\'t validate whether the bucket has been correctly configured as a website or not.'
def get_website_endpoint(self):
l = [self.name] l.append(S3WebsiteEndpointTranslate.translate_region(self.get_location())) l.append('.'.join(self.connection.host.split('.')[(-2):])) return '.'.join(l)
'Returns the JSON policy associated with the bucket. The policy is returned as an uninterpreted JSON string.'
def get_policy(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='policy', headers=headers) body = response.read() if (response.status == 200): return body else: raise self.connection.provider.storage_response_error(response.status, response.reason, body)
'Add or replace the JSON policy associated with the bucket. :type policy: str :param policy: The JSON policy as a string.'
def set_policy(self, policy, headers=None):
response = self.connection.make_request('PUT', self.name, data=policy, query_args='policy', headers=headers) body = response.read() if ((response.status >= 200) and (response.status <= 204)): return True else: raise self.connection.provider.storage_response_error(response.status, respons...
'Set the CORS (Cross-Origin Resource Sharing) for a bucket. :type cors_xml: str :param cors_xml: The XML document describing your desired CORS configuration. See the S3 documentation for details of the exact syntax required.'
def set_cors_xml(self, cors_xml, headers=None):
fp = StringIO(cors_xml) md5 = boto.utils.compute_md5(fp) if (headers is None): headers = {} headers['Content-MD5'] = md5[1] headers['Content-Type'] = 'text/xml' response = self.connection.make_request('PUT', self.name, data=fp.getvalue(), query_args='cors', headers=headers) body = re...
'Set the CORS for this bucket given a boto CORSConfiguration object. :type cors_config: :class:`boto.s3.cors.CORSConfiguration` :param cors_config: The CORS configuration you want to configure for this bucket.'
def set_cors(self, cors_config, headers=None):
return self.set_cors_xml(cors_config.to_xml())
'Returns the current CORS configuration on the bucket as an XML document.'
def get_cors_xml(self, headers=None):
response = self.connection.make_request('GET', self.name, query_args='cors', headers=headers) body = response.read() boto.log.debug(body) if (response.status == 200): return body else: raise self.connection.provider.storage_response_error(response.status, response.reason, body)
'Returns the current CORS configuration on the bucket. :rtype: :class:`boto.s3.cors.CORSConfiguration` :returns: A CORSConfiguration object that describes all current CORS rules in effect for the bucket.'
def get_cors(self, headers=None):
body = self.get_cors_xml(headers) cors = CORSConfiguration() h = handler.XmlHandler(cors, self) xml.sax.parseString(body, h) return cors
'Removes all CORS configuration from the bucket.'
def delete_cors(self, headers=None):
response = self.connection.make_request('DELETE', self.name, query_args='cors', headers=headers) body = response.read() boto.log.debug(body) if (response.status == 204): return True else: raise self.connection.provider.storage_response_error(response.status, response.reason, body)
'Start a multipart upload operation. .. note:: Note: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up the p...
def initiate_multipart_upload(self, key_name, headers=None, reduced_redundancy=False, metadata=None, encrypt_key=False, policy=None):
query_args = 'uploads' provider = self.connection.provider headers = (headers or {}) if policy: headers[provider.acl_header] = policy if reduced_redundancy: storage_class_header = provider.storage_class_header if storage_class_header: headers[storage_class_header]...
'Complete a multipart upload operation.'
def complete_multipart_upload(self, key_name, upload_id, xml_body, headers=None):
query_args = ('uploadId=%s' % upload_id) if (headers is None): headers = {} headers['Content-Type'] = 'text/xml' response = self.connection.make_request('POST', self.name, key_name, query_args=query_args, headers=headers, data=xml_body) contains_error = False body = response.read().decod...
'To verify that all parts have been removed, so you don\'t get charged for the part storage, you should call the List Parts operation and ensure the parts list is empty.'
def cancel_multipart_upload(self, key_name, upload_id, headers=None):
query_args = ('uploadId=%s' % upload_id) response = self.connection.make_request('DELETE', self.name, key_name, query_args=query_args, headers=headers) body = response.read() boto.log.debug(body) if (response.status != 204): raise self.connection.provider.storage_response_error(response.stat...
'Constructor. Instantiate once for each downloaded file. :type tracker_file_name: string :param tracker_file_name: optional file name to save tracking info about this download. If supplied and the current process fails the download, it can be retried in a new process. If called with an existing file containing an unexp...
def __init__(self, tracker_file_name=None, num_retries=None):
self.tracker_file_name = tracker_file_name self.num_retries = num_retries self.etag_value_for_current_download = None if tracker_file_name: self._load_tracker_file_etag() self.download_start_point = None
'Attempts a resumable download. Raises ResumableDownloadException if any problems occur.'
def _attempt_resumable_download(self, key, fp, headers, cb, num_cb, torrent, version_id, hash_algs):
cur_file_size = get_cur_file_size(fp, position_to_eof=True) if (cur_file_size and self.etag_value_for_current_download and (self.etag_value_for_current_download == key.etag.strip('"\''))): if (cur_file_size > key.size): raise ResumableDownloadException(('%s is larger (%d) than ...
'Retrieves a file from a Key :type key: :class:`boto.s3.key.Key` or subclass :param key: The Key object from which upload is to be downloaded :type fp: file :param fp: File pointer into which data should be downloaded :type headers: string :param: headers to send when retrieving the files :type cb: function :param cb: ...
def get_file(self, key, fp, headers, cb=None, num_cb=10, torrent=False, version_id=None, hash_algs=None):
debug = key.bucket.connection.debug if (not headers): headers = {} if (self.num_retries is None): self.num_retries = config.getint('Boto', 'num_retries', 6) progress_less_iterations = 0 while True: had_file_bytes_before_attempt = get_cur_file_size(fp) try: ...
'Set the Bucket class associated with this bucket. By default, this would be the boto.s3.key.Bucket class but if you want to subclass that for some reason this allows you to associate your new class. :type bucket_class: class :param bucket_class: A subclass of Bucket that can be more specific'
def set_bucket_class(self, bucket_class):
self.bucket_class = bucket_class
'Taken from the AWS book Python examples and modified for use with boto'
def build_post_policy(self, expiration_time, conditions):
assert isinstance(expiration_time, time.struct_time), 'Policy document must include a valid expiration Time object' return ('{"expiration": "%s",\n"conditions": [%s]}' % (time.strftime(boto.utils.ISO8601, expiration_time), ','.join(conditions)))
'Taken from the AWS book Python examples and modified for use with boto This only returns the arguments required for the post form, not the actual form. This does not return the file input field which also needs to be added :type bucket_name: string :param bucket_name: Bucket to submit to :type key: string :param key:...
def build_post_form_args(self, bucket_name, key, expires_in=6000, acl=None, success_action_redirect=None, max_content_length=None, http_method='http', fields=None, conditions=None, storage_class='STANDARD', server_side_encryption=None):
if (fields is None): fields = [] if (conditions is None): conditions = [] expiration = time.gmtime(int((time.time() + expires_in))) conditions.append(('{"bucket": "%s"}' % bucket_name)) if key.endswith('${filename}'): conditions.append(('["starts-with", "$key", "%s"]...
'Convenience method that returns the "CanonicalUserID" of the user who\'s credentials are associated with the connection. The only way to get this value is to do a GET request on the service which returns all buckets associated with the account. As part of that response, the canonical userid is returned. This method si...
def get_canonical_user_id(self, headers=None):
rs = self.get_all_buckets(headers=headers) return rs.owner.id
'Retrieves a bucket by name. If the bucket does not exist, an ``S3ResponseError`` will be raised. If you are unsure if the bucket exists or not, you can use the ``S3Connection.lookup`` method, which will either return a valid bucket or ``None``. If ``validate=False`` is passed, no request is made to the service (no cha...
def get_bucket(self, bucket_name, validate=True, headers=None):
if validate: return self.head_bucket(bucket_name, headers=headers) else: return self.bucket_class(self, bucket_name)
'Determines if a bucket exists by name. If the bucket does not exist, an ``S3ResponseError`` will be raised. :type bucket_name: string :param bucket_name: The name of the bucket :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :returns: A <Bucket> object'
def head_bucket(self, bucket_name, headers=None):
response = self.make_request('HEAD', bucket_name, headers=headers) body = response.read() if (response.status == 200): return self.bucket_class(self, bucket_name) elif (response.status == 403): err = self.provider.storage_response_error(response.status, response.reason, body) err...
'Attempts to get a bucket from S3. Works identically to ``S3Connection.get_bucket``, save for that it will return ``None`` if the bucket does not exist instead of throwing an exception. :type bucket_name: string :param bucket_name: The name of the bucket :type headers: dict :param headers: Additional headers to pass al...
def lookup(self, bucket_name, validate=True, headers=None):
try: bucket = self.get_bucket(bucket_name, validate, headers=headers) except: bucket = None return bucket
'Creates a new located bucket. By default it\'s in the USA. You can pass Location.EU to create a European bucket (S3) or European Union bucket (GCS). :type bucket_name: string :param bucket_name: The name of the new bucket :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :ty...
def create_bucket(self, bucket_name, headers=None, location=Location.DEFAULT, policy=None):
check_lowercase_bucketname(bucket_name) if policy: if headers: headers[self.provider.acl_header] = policy else: headers = {self.provider.acl_header: policy} if (location == Location.DEFAULT): data = '' else: data = (('<CreateBucketConfiguration><Lo...
'Removes an S3 bucket. In order to remove the bucket, it must first be empty. If the bucket is not empty, an ``S3ResponseError`` will be raised. :type bucket_name: string :param bucket_name: The name of the bucket :type headers: dict :param headers: Additional headers to pass along with the request to AWS.'
def delete_bucket(self, bucket, headers=None):
response = self.make_request('DELETE', bucket, headers=headers) body = response.read() if (response.status != 204): raise self.provider.storage_response_error(response.status, response.reason, body)
'A utility function to create the 2-tuple (md5hexdigest, base64md5) from just having a precalculated md5_hexdigest.'
def get_md5_from_hexdigest(self, md5_hexdigest):
digest = binascii.unhexlify(md5_hexdigest) base64md5 = encodebytes(digest) if (base64md5[(-1)] == '\n'): base64md5 = base64md5[0:(-1)] return (md5_hexdigest, base64md5)
'Used by Key subclasses to do additional, provider-specific processing of response headers. No-op for this base class.'
def handle_addl_headers(self, headers):
pass
'Open this key for reading :type headers: dict :param headers: Headers to pass in the web request :type query_args: string :param query_args: Arguments to pass in the query string (ie, \'torrent\') :type override_num_retries: int :param override_num_retries: If not None will override configured num_retries parameter fo...
def open_read(self, headers=None, query_args='', override_num_retries=None, response_headers=None):
if (self.resp is None): self.mode = 'r' provider = self.bucket.connection.provider self.resp = self.bucket.connection.make_request('GET', self.bucket.name, self.name, headers, query_args=query_args, override_num_retries=override_num_retries) if ((self.resp.status < 199) or (self.resp...
'Open this key for writing. Not yet implemented :type headers: dict :param headers: Headers to pass in the write request :type override_num_retries: int :param override_num_retries: If not None will override configured num_retries parameter for underlying PUT.'
def open_write(self, headers=None, override_num_retries=None):
raise BotoClientError('Not Implemented')
'Close this key. :type fast: bool :param fast: True if you want the connection to be closed without first reading the content. This should only be used in cases where subsequent calls don\'t need to return the content from the open HTTP connection. Note: As explained at http://docs.python.org/2/library/httplib.html#htt...
def close(self, fast=False):
if (self.resp and (not fast)): self.resp.read() self.resp = None self.mode = None self.closed = True
'By providing a next method, the key object supports use as an iterator. For example, you can now say: for bytes in key: write bytes to a file or whatever All of the HTTP connection stuff is handled for you.'
def next(self):
self.open_read() data = self.resp.read(self.BufferSize) if (not data): self.close() raise StopIteration return data
'Change the storage class of an existing key. Depending on whether a different destination bucket is supplied or not, this will either move the item within the bucket, preserving all metadata and ACL info bucket changing the storage class or it will copy the item to the provided destination bucket, also preserving meta...
def change_storage_class(self, new_storage_class, dst_bucket=None, validate_dst_bucket=True):
bucket_name = (dst_bucket or self.bucket.name) if (new_storage_class == 'STANDARD'): return self.copy(bucket_name, self.name, reduced_redundancy=False, preserve_acl=True, validate_dst_bucket=validate_dst_bucket) elif (new_storage_class == 'REDUCED_REDUNDANCY'): return self.copy(bucket_name, ...
'Copy this Key to another bucket. :type dst_bucket: string :param dst_bucket: The name of the destination bucket :type dst_key: string :param dst_key: The name of the destination key :type metadata: dict :param metadata: Metadata to be associated with new key. If metadata is supplied, it will replace the metadata of t...
def copy(self, dst_bucket, dst_key, metadata=None, reduced_redundancy=False, preserve_acl=False, encrypt_key=False, validate_dst_bucket=True):
dst_bucket = self.bucket.connection.lookup(dst_bucket, validate_dst_bucket) if reduced_redundancy: storage_class = 'REDUCED_REDUNDANCY' else: storage_class = self.storage_class return dst_bucket.copy_key(dst_key, self.bucket.name, self.name, metadata, storage_class=storage_class, preserv...
'Returns True if the key exists :rtype: bool :return: Whether the key exists on S3'
def exists(self, headers=None):
return bool(self.bucket.lookup(self.name, headers=headers))
'Delete this key from S3'
def delete(self, headers=None):
return self.bucket.delete_key(self.name, version_id=self.version_id, headers=headers)
'Return the redirect location configured for this key. If no redirect is configured (via set_redirect), then None will be returned.'
def get_redirect(self):
response = self.bucket.connection.make_request('HEAD', self.bucket.name, self.name) if (response.status == 200): return response.getheader('x-amz-website-redirect-location') else: raise self.provider.storage_response_error(response.status, response.reason, response.read())
'Configure this key to redirect to another location. When the bucket associated with this key is accessed from the website endpoint, a 301 redirect will be issued to the specified `redirect_location`. :type redirect_location: string :param redirect_location: The location to redirect.'
def set_redirect(self, redirect_location, headers=None):
if (headers is None): headers = {} else: headers = headers.copy() headers['x-amz-website-redirect-location'] = redirect_location response = self.bucket.connection.make_request('PUT', self.bucket.name, self.name, headers) if (response.status == 200): return True else: ...