desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'od.keys() -> list of keys in od'
def keys(self):
return list(self)
'od.values() -> list of values in od'
def values(self):
return [self[key] for key in self]
'od.items() -> list of (key, value) pairs in od'
def items(self):
return [(key, self[key]) for key in self]
'od.iterkeys() -> an iterator over the keys in od'
def iterkeys(self):
return iter(self)
'od.itervalues -> an iterator over the values in od'
def itervalues(self):
for k in self: (yield self[k])
'od.iteritems -> an iterator over the (key, value) items in od'
def iteritems(self):
for k in self: (yield (k, self[k]))
'od.update(E, **F) -> None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, ...
def update(*args, **kwds):
if (len(args) > 2): raise TypeError(('update() takes at most 2 positional arguments (%d given)' % (len(args),))) elif (not args): raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] other = () if (len(args) == 2)...
'od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised.'
def pop(self, key, default=__marker):
if (key in self): result = self[key] del self[key] return result if (default is self.__marker): raise KeyError(key) return default
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
def setdefault(self, key, default=None):
if (key in self): return self[key] self[key] = default return default
'od.__repr__() <==> repr(od)'
def __repr__(self, _repr_running={}):
call_key = (id(self), _get_ident()) if (call_key in _repr_running): return '...' _repr_running[call_key] = 1 try: if (not self): return ('%s()' % (self.__class__.__name__,)) return ('%s(%r)' % (self.__class__.__name__, self.items())) finally: del _repr_run...
'Return state information for pickling'
def __reduce__(self):
items = [[k, self[k]] for k in self] inst_dict = vars(self).copy() for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: return (self.__class__, (items,), inst_dict) return (self.__class__, (items,))
'od.copy() -> a shallow copy of od'
def copy(self):
return self.__class__(self)
'OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S and values equal to v (which defaults to None).'
@classmethod def fromkeys(cls, iterable, value=None):
d = cls() for key in iterable: d[key] = value return d
'od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive.'
def __eq__(self, other):
if isinstance(other, OrderedDict): return ((len(self) == len(other)) and (self.items() == other.items())) return dict.__eq__(self, other)
'od.viewkeys() -> a set-like object providing a view on od\'s keys'
def viewkeys(self):
return KeysView(self)
'od.viewvalues() -> an object providing a view on od\'s values'
def viewvalues(self):
return ValuesView(self)
'od.viewitems() -> a set-like object providing a view on od\'s items'
def viewitems(self):
return ItemsView(self)
'A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: \'foo\': \'bar\', \'fakef...
@classmethod def from_tuples(cls, fieldname, value):
if isinstance(value, tuple): if (len(value) == 3): (filename, data, content_type) = value else: (filename, data) = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_para...
'Overridable helper function to format a single header parameter. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string.'
def _render_part(self, name, value):
return format_header_param(name, value)
'Helper function to format and quote a single header. Useful for single headers that are composed of multiple items. E.g., \'Content-Disposition\' fields. :param header_parts: A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as `k1="v1"; k2="v2"; ...`.'
def _render_parts(self, header_parts):
parts = [] iterable = header_parts if isinstance(header_parts, dict): iterable = header_parts.items() for (name, value) in iterable: if value: parts.append(self._render_part(name, value)) return '; '.join(parts)
'Renders the headers for this request field.'
def render_headers(self):
lines = [] sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] for sort_key in sort_keys: if self.headers.get(sort_key, False): lines.append(('%s: %s' % (sort_key, self.headers[sort_key]))) for (header_name, header_value) in self.headers.items(): if (he...
'Makes this request field into a multipart request field. This method overrides "Content-Disposition", "Content-Type" and "Content-Location" headers to the request parameter. :param content_type: The \'Content-Type\' of the request body. :param content_location: The \'Content-Location\' of the request body.'
def make_multipart(self, content_disposition=None, content_type=None, content_location=None):
self.headers['Content-Disposition'] = (content_disposition or 'form-data') self.headers['Content-Disposition'] += '; '.join(['', self._render_parts((('name', self._name), ('filename', self._filename)))]) self.headers['Content-Type'] = content_type self.headers['Content-Location'] = content_location
'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised.'
def pop(self, key, default=__marker):
try: value = self[key] except KeyError: if (default is self.__marker): raise return default else: del self[key] return value
'Adds a (name, value) pair, doesn\'t overwrite the value if it already exists. >>> headers = HTTPHeaderDict(foo=\'bar\') >>> headers.add(\'Foo\', \'baz\') >>> headers[\'foo\'] \'bar, baz\''
def add(self, key, val):
key_lower = key.lower() new_vals = (key, val) vals = self._container.setdefault(key_lower, new_vals) if (new_vals is not vals): if isinstance(vals, list): vals.append(val) else: self._container[key_lower] = [vals[0], vals[1], val]
'Generic import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__'
def extend(self, *args, **kwargs):
if (len(args) > 1): raise TypeError('extend() takes at most 1 positional arguments ({0} given)'.format(len(args))) other = (args[0] if (len(args) >= 1) else ()) if isinstance(other, HTTPHeaderDict): for (key, val) in other.iteritems(): self.add(key, val) ...
'Returns a list of all the values for the named field. Returns an empty list if the key doesn\'t exist.'
def getlist(self, key):
try: vals = self._container[key.lower()] except KeyError: return [] else: if isinstance(vals, tuple): return [vals[1]] else: return vals[1:]
'Iterate over all header lines, including duplicate ones.'
def iteritems(self):
for key in self: vals = self._container[key.lower()] for val in vals[1:]: (yield (vals[0], val))
'Iterate over all headers, merging duplicate ones together.'
def itermerged(self):
for key in self: val = self._container[key.lower()] (yield (val[0], ', '.join(val[1:])))
'Read headers from a Python 2 httplib message object.'
@classmethod def from_httplib(cls, message):
headers = [] for line in message.headers: if line.startswith((' ', ' DCTB ')): (key, value) = headers[(-1)] headers[(-1)] = (key, ((value + '\r\n') + line.rstrip())) continue (key, value) = line.split(':', 1) headers.append((key, value.strip())) ...
'Establish a socket connection and set nodelay settings on it. :return: New socket connection.'
def _new_conn(self):
extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = connection.create_connection((self.host, self.port), self.timeout, **extra_kw) except SocketTimeout as e...
'Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If the type is not an integer or a float, or if i...
@classmethod def _validate_timeout(cls, value, name):
if (value is _Default): return cls.DEFAULT_TIMEOUT if ((value is None) or (value is cls.DEFAULT_TIMEOUT)): return value try: float(value) except (TypeError, ValueError): raise ValueError(('Timeout value %s was %s, but it must be an int or ...
'Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value....
@classmethod def from_float(cls, timeout):
return Timeout(read=timeout, connect=timeout)
'Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout`'
def clone(self):
return Timeout(connect=self._connect, read=self._read, total=self.total)
'Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already.'
def start_connect(self):
if (self._start_connect is not None): raise TimeoutStateError('Timeout timer has already been started.') self._start_connect = current_time() return self._start_connect
'Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn\'t been started.'
def get_connect_duration(self):
if (self._start_connect is None): raise TimeoutStateError("Can't get connect duration for timer that has not started.") return (current_time() - self._start_connect)
'Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None'
@property def connect_timeout(self):
if (self.total is None): return self._connect if ((self._connect is None) or (self._connect is self.DEFAULT_TIMEOUT)): return self.total return min(self._connect, self.total)
'Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.excepti...
@property def read_timeout(self):
if ((self.total is not None) and (self.total is not self.DEFAULT_TIMEOUT) and (self._read is not None) and (self._read is not self.DEFAULT_TIMEOUT)): if (self._start_connect is None): return self._read return max(0, min((self.total - self.get_connect_duration()), self._read)) elif ((...
'Backwards-compatibility for the old retries format.'
@classmethod def from_int(cls, retries, redirect=True, default=None):
if (retries is None): retries = (default if (default is not None) else cls.DEFAULT) if isinstance(retries, Retry): return retries redirect = (bool(redirect) and None) new_retries = cls(retries, redirect=redirect) log.debug(('Converted retries value: %r -> %r' % (retrie...
'Formula for computing the current backoff :rtype: float'
def get_backoff_time(self):
if (self._observed_errors <= 1): return 0 backoff_value = (self.backoff_factor * (2 ** (self._observed_errors - 1))) return min(self.BACKOFF_MAX, backoff_value)
'Sleep between retry attempts using an exponential backoff. By default, the backoff factor is 0 and this method will return immediately.'
def sleep(self):
backoff = self.get_backoff_time() if (backoff <= 0): return time.sleep(backoff)
'Errors when we\'re fairly sure that the server did not receive the request, so it should be safe to retry.'
def _is_connection_error(self, err):
return isinstance(err, ConnectTimeoutError)
'Errors that occur after the request has been started, so we should assume that the server began processing it.'
def _is_read_error(self, err):
return isinstance(err, (ReadTimeoutError, ProtocolError))
'Is this method/status code retryable? (Based on method/codes whitelists)'
def is_forced_retry(self, method, status_code):
if (self.method_whitelist and (method.upper() not in self.method_whitelist)): return False return (self.status_forcelist and (status_code in self.status_forcelist))
'Are we out of retries?'
def is_exhausted(self):
retry_counts = (self.total, self.connect, self.read, self.redirect) retry_counts = list(filter(None, retry_counts)) if (not retry_counts): return False return (min(retry_counts) < 0)
'Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.HTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :r...
def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None):
if ((self.total is False) and error): raise six.reraise(type(error), error, _stacktrace) total = self.total if (total is not None): total -= 1 _observed_errors = self._observed_errors connect = self.connect read = self.read redirect = self.redirect cause = 'unknown' i...
'For backwards-compatibility with urlparse. We\'re nice like that.'
@property def hostname(self):
return self.host
'Absolute path including the query string.'
@property def request_uri(self):
uri = (self.path or '/') if (self.query is not None): uri += ('?' + self.query) return uri
'Network location including host and port'
@property def netloc(self):
if self.port: return ('%s:%d' % (self.host, self.port)) return self.host
'Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url(\'http://google....
@property def url(self):
(scheme, auth, host, port, path, query, fragment) = self url = '' if (scheme is not None): url += (scheme + '://') if (auth is not None): url += (auth + '@') if (host is not None): url += host if (port is not None): url += (':' + str(port)) if (path is not Non...
'Close all pooled connections and disable the pool.'
def close():
pass
'Return a fresh :class:`HTTPConnection`.'
def _new_conn(self):
self.num_connections += 1 log.info(('Starting new HTTP connection (%d): %s' % (self.num_connections, self.host))) conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, strict=self.strict, **self.conn_kw) return conn
'Get a connection. Will return a pooled connection if one is available. If no connections are available and :prop:`.block` is ``False``, then a fresh connection is returned. :param timeout: Seconds to wait before giving up and raising :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and :prop:`.block` is...
def _get_conn(self, timeout=None):
conn = None try: conn = self.pool.get(block=self.block, timeout=timeout) except AttributeError: raise ClosedPoolError(self, 'Pool is closed.') except Empty: if self.block: raise EmptyPoolError(self, 'Pool reached maximum size and no more con...
'Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be inc...
def _put_conn(self, conn):
try: self.pool.put(conn, block=False) return except AttributeError: pass except Full: log.warning(('Connection pool is full, discarding connection: %s' % self.host)) if conn: conn.close()
'Called right before a request is made, after the socket is created.'
def _validate_conn(self, conn):
pass
'Helper that always returns a :class:`urllib3.util.Timeout`'
def _get_timeout(self, timeout):
if (timeout is _Default): return self.timeout.clone() if isinstance(timeout, Timeout): return timeout.clone() else: return Timeout.from_float(timeout)
'Is the error actually a timeout? Will raise a ReadTimeout or pass'
def _raise_timeout(self, err, url, timeout_value):
if isinstance(err, SocketTimeout): raise ReadTimeoutError(self, url, ('Read timed out. (read timeout=%s)' % timeout_value)) if (hasattr(err, 'errno') and (err.errno in _blocking_errnos)): raise ReadTimeoutError(self, url, ('Read timed out. (read timeout=%s)' % timeout_val...
'Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param timeout: Socket timeout in seconds for the request. This can be a float or integer, which will set the same timeout value for the socket connect and the socket read, or an instan...
def _make_request(self, conn, method, url, timeout=_Default, **httplib_request_kw):
self.num_requests += 1 timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() conn.timeout = timeout_obj.connect_timeout try: self._validate_conn(conn) except (SocketTimeout, BaseSSLError) as e: self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) ...
'Close all pooled connections and disable the pool.'
def close(self):
(old_pool, self.pool) = (self.pool, None) try: while True: conn = old_pool.get(block=False) if conn: conn.close() except Empty: pass
'Check if the given ``url`` is a member of the same host as this connection pool.'
def is_same_host(self, url):
if url.startswith('/'): return True (scheme, host, port) = get_host(url) if (self.port and (not port)): port = port_by_scheme.get(scheme) elif ((not self.port) and (port == port_by_scheme.get(scheme))): port = None return ((scheme, host, port) == (self.scheme, self.host, self...
'Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you\'ll need to specify all the raw details. .. note:: More commonly, it\'s appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` ...
def urlopen(self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, **response_kw):
if (headers is None): headers = self.headers if (not isinstance(retries, Retry)): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if (release_conn is None): release_conn = response_kw.get('preload_content', True) if (assert_same_host and (not self.is_sa...
'Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` and establish the tunnel if proxy is used.'
def _prepare_conn(self, conn):
if isinstance(conn, VerifiedHTTPSConnection): conn.set_cert(key_file=self.key_file, cert_file=self.cert_file, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint) conn.ssl_version = self.ssl_...
'Establish tunnel connection early, because otherwise httplib would improperly set Host: header to proxy\'s IP:port.'
def _prepare_proxy(self, conn):
try: set_tunnel = conn.set_tunnel except AttributeError: set_tunnel = conn._set_tunnel if ((sys.version_info <= (2, 6, 4)) and (not self.proxy_headers)): set_tunnel(self.host, self.port) else: set_tunnel(self.host, self.port, self.proxy_headers) conn.connect()
'Return a fresh :class:`httplib.HTTPSConnection`.'
def _new_conn(self):
self.num_connections += 1 log.info(('Starting new HTTPS connection (%d): %s' % (self.num_connections, self.host))) if ((not self.ConnectionCls) or (self.ConnectionCls is DummyConnection)): raise SSLError("Can't connect to HTTPS URL because the SSL module is ...
'Called right before a request is made, after the socket is created.'
def _validate_conn(self, conn):
super(HTTPSConnectionPool, self)._validate_conn(conn) if (not getattr(conn, 'sock', None)): conn.connect() if (not conn.is_verified): warnings.warn('Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https:...
'authurl is a random URL on the server that is protected by NTLM. user is the Windows user, probably in the DOMAIN\username format. pw is the password for the user.'
def __init__(self, user, pw, authurl, *args, **kwargs):
super(NTLMConnectionPool, self).__init__(*args, **kwargs) self.authurl = authurl self.rawuser = user user_parts = user.split('\\', 1) self.domain = user_parts[0].upper() self.user = user_parts[1] self.pw = pw
'Make a request using :meth:`urlopen` with the appropriate encoding of ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual effort. It can be used in most situations, while still having the option to drop down to more specific methods when necessary, such as :me...
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
method = method.upper() if (method in self._encode_url_methods): return self.request_encode_url(method, url, fields=fields, headers=headers, **urlopen_kw) else: return self.request_encode_body(method, url, fields=fields, headers=headers, **urlopen_kw)
'Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc.'
def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
if (headers is None): headers = self.headers extra_kw = {'headers': headers} extra_kw.update(urlopen_kw) if fields: url += ('?' + urlencode(fields)) return self.urlopen(method, url, **extra_kw)
'Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise :meth...
def request_encode_body(self, method, url, fields=None, headers=None, encode_multipart=True, multipart_boundary=None, **urlopen_kw):
if (headers is None): headers = self.headers extra_kw = {'headers': {}} if fields: if ('body' in urlopen_kw): raise TypeError("request got values for both 'fields' and 'body', can only specify one.") if encode_multipart: (body,...
'Initialize RequestException with `request` and `response` objects.'
def __init__(self, *args, **kwargs):
response = kwargs.pop('response', None) self.response = response self.request = kwargs.pop('request', None) if ((response is not None) and (not self.request) and hasattr(response, 'request')): self.request = self.response.request super(RequestException, self).__init__(*args, **kwargs)
'Build the path URL to use.'
@property def path_url(self):
url = [] p = urlsplit(self.url) path = p.path if (not path): path = '/' url.append(path) query = p.query if query: url.append('?') url.append(query) return ''.join(url)
'Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict.'
@staticmethod def _encode_params(data):
if isinstance(data, (str, bytes)): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for (k, vs) in to_key_val_list(data): if (isinstance(vs, basestring) or (not hasattr(vs, '__iter__'))): vs = [vs] ...
'Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict.'
@staticmethod def _encode_files(files, data):
if (not files): raise ValueError('Files must be provided.') elif isinstance(data, basestring): raise ValueError('Data must not be a string.') new_fields = [] fields = to_key_val_list((data or {})) files = to_key_val_list((files or {})) for (field, val) in ...
'Properly register a hook.'
def register_hook(self, event, hook):
if (event not in self.hooks): raise ValueError(('Unsupported event specified, with event name "%s"' % event)) if isinstance(hook, collections.Callable): self.hooks[event].append(hook) elif hasattr(hook, '__iter__'): self.hooks[event].extend((h for h in hook if isins...
'Deregister a previously registered hook. Returns True if the hook existed, False if not.'
def deregister_hook(self, event, hook):
try: self.hooks[event].remove(hook) return True except ValueError: return False
'Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.'
def prepare(self):
p = PreparedRequest() p.prepare(method=self.method, url=self.url, headers=self.headers, files=self.files, data=self.data, json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, hooks=self.hooks) return p
'Prepares the entire request with the given parameters.'
def prepare(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) self.prepare_hooks(hooks)
'Prepares the given HTTP method.'
def prepare_method(self, method):
self.method = method if (self.method is not None): self.method = to_native_string(self.method.upper())
'Prepares the given HTTP URL.'
def prepare_url(self, url, params):
if isinstance(url, bytes): url = url.decode('utf8') else: url = (unicode(url) if is_py2 else str(url)) if ((':' in url) and (not url.lower().startswith('http'))): self.url = url return try: (scheme, auth, host, port, path, query, fragment) = parse_url(url) exc...
'Prepares the given HTTP headers.'
def prepare_headers(self, headers):
if headers: self.headers = CaseInsensitiveDict(((to_native_string(name), value) for (name, value) in headers.items())) else: self.headers = CaseInsensitiveDict()
'Prepares the given HTTP body data.'
def prepare_body(self, data, files, json=None):
body = None content_type = None length = None if ((not data) and (json is not None)): content_type = 'application/json' body = complexjson.dumps(json) is_stream = all([hasattr(data, '__iter__'), (not isinstance(data, (basestring, list, tuple, dict)))]) try: length = super...
'Prepares the given HTTP auth data.'
def prepare_auth(self, auth, url=''):
if (auth is None): url_auth = get_auth_from_url(self.url) auth = (url_auth if any(url_auth) else None) if auth: if (isinstance(auth, tuple) and (len(auth) == 2)): auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_...
'Prepares the given HTTP cookie data. This function eventually generates a ``Cookie`` header from the given cookies using cookielib. Due to cookielib\'s design, the header will not be regenerated if it already exists, meaning this function can only be called once for the life of the :class:`PreparedRequest <PreparedReq...
def prepare_cookies(self, cookies):
if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies else: self._cookies = cookiejar_from_dict(cookies) cookie_header = get_cookie_header(self._cookies, self) if (cookie_header is not None): self.headers['Cookie'] = cookie_header
'Prepares the given hooks.'
def prepare_hooks(self, hooks):
hooks = (hooks or []) for event in hooks: self.register_hook(event, hooks[event])
'Returns true if :attr:`status_code` is \'OK\'.'
def __bool__(self):
return self.ok
'Returns true if :attr:`status_code` is \'OK\'.'
def __nonzero__(self):
return self.ok
'Allows you to use a response as an iterator.'
def __iter__(self):
return self.iter_content(128)
'True if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`).'
@property def is_redirect(self):
return (('location' in self.headers) and (self.status_code in REDIRECT_STATI))
'True if this Response one of the permanent versions of redirect'
@property def is_permanent_redirect(self):
return (('location' in self.headers) and (self.status_code in (codes.moved_permanently, codes.permanent_redirect)))
'The apparent encoding, provided by the chardet library'
@property def apparent_encoding(self):
return chardet.detect(self.content)['encoding']
'Iterates over the response data. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. If decode_unicode i...
def iter_content(self, chunk_size=1, decode_unicode=False):
def generate(): if hasattr(self.raw, 'stream'): try: for chunk in self.raw.stream(chunk_size, decode_content=True): (yield chunk) except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: ...
'Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. .. note:: This method is not reentrant safe.'
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
pending = None for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): if (pending is not None): chunk = (pending + chunk) if delimiter: lines = chunk.split(delimiter) else: lines = chunk.splitlines() if (lines and...
'Content of the response, in bytes.'
@property def content(self):
if (self._content is False): try: if self._content_consumed: raise RuntimeError('The content for this response was already consumed') if (self.status_code == 0): self._content = None else: self._content ...
'Content of the response, in unicode. If Response.encoding is None, encoding will be guessed using ``chardet``. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you s...
@property def text(self):
content = None encoding = self.encoding if (not self.content): return str('') if (self.encoding is None): encoding = self.apparent_encoding try: content = str(self.content, encoding, errors='replace') except (LookupError, TypeError): content = str(self.content, er...
'Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes.'
def json(self, **kwargs):
if ((not self.encoding) and (len(self.content) > 3)): encoding = guess_json_utf(self.content) if (encoding is not None): try: return complexjson.loads(self.content.decode(encoding), **kwargs) except UnicodeDecodeError: pass return complexjs...
'Returns the parsed header links of the response, if any.'
@property def links(self):
header = self.headers.get('link') l = {} if header: links = parse_header_links(header) for link in links: key = (link.get('rel') or link.get('url')) l[key] = link return l
'Raises stored :class:`HTTPError`, if one occurred.'
def raise_for_status(self):
http_error_msg = '' if (400 <= self.status_code < 500): http_error_msg = ('%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)) elif (500 <= self.status_code < 600): http_error_msg = ('%s Server Error: %s for url: %s' % (self.statu...
'Releases the connection back to the pool. Once this method has been called the underlying ``raw`` object must not be accessed again. *Note: Should not normally need to be called explicitly.*'
def close(self):
if (not self._content_consumed): return self.raw.close() return self.raw.release_conn()
'Reset num_401_calls counter on redirects.'
def handle_redirect(self, r, **kwargs):
if r.is_redirect: self._thread_local.num_401_calls = 1
'Takes the given response and tries digest-auth, if needed.'
def handle_401(self, r, **kwargs):
if (self._thread_local.pos is not None): r.request.body.seek(self._thread_local.pos) s_auth = r.headers.get('www-authenticate', '') if (('digest' in s_auth.lower()) and (self._thread_local.num_401_calls < 2)): self._thread_local.num_401_calls += 1 pat = re.compile('digest ', flags...
'Receives a Response. Returns a generator of Responses.'
def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None, **adapter_kwargs):
i = 0 hist = [] while resp.is_redirect: prepared_request = req.copy() if (i > 0): hist.append(resp) new_hist = list(hist) resp.history = new_hist try: resp.content except (ChunkedEncodingError, ContentDecodingError, RuntimeError...