id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
26,900
aio-libs/aiohttp
aiohttp/cookiejar.py
CookieJar._is_domain_match
def _is_domain_match(domain: str, hostname: str) -> bool: """Implements domain matching adhering to RFC 6265.""" if hostname == domain: return True if not hostname.endswith(domain): return False non_matching = hostname[:-len(domain)] if not non_matching.endswith("."): return False return not is_ip_address(hostname)
python
def _is_domain_match(domain: str, hostname: str) -> bool: """Implements domain matching adhering to RFC 6265.""" if hostname == domain: return True if not hostname.endswith(domain): return False non_matching = hostname[:-len(domain)] if not non_matching.endswith("."): return False return not is_ip_address(hostname)
[ "def", "_is_domain_match", "(", "domain", ":", "str", ",", "hostname", ":", "str", ")", "->", "bool", ":", "if", "hostname", "==", "domain", ":", "return", "True", "if", "not", "hostname", ".", "endswith", "(", "domain", ")", ":", "return", "False", "non_matching", "=", "hostname", "[", ":", "-", "len", "(", "domain", ")", "]", "if", "not", "non_matching", ".", "endswith", "(", "\".\"", ")", ":", "return", "False", "return", "not", "is_ip_address", "(", "hostname", ")" ]
Implements domain matching adhering to RFC 6265.
[ "Implements", "domain", "matching", "adhering", "to", "RFC", "6265", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/cookiejar.py#L229-L242
26,901
aio-libs/aiohttp
aiohttp/cookiejar.py
CookieJar._is_path_match
def _is_path_match(req_path: str, cookie_path: str) -> bool: """Implements path matching adhering to RFC 6265.""" if not req_path.startswith("/"): req_path = "/" if req_path == cookie_path: return True if not req_path.startswith(cookie_path): return False if cookie_path.endswith("/"): return True non_matching = req_path[len(cookie_path):] return non_matching.startswith("/")
python
def _is_path_match(req_path: str, cookie_path: str) -> bool: """Implements path matching adhering to RFC 6265.""" if not req_path.startswith("/"): req_path = "/" if req_path == cookie_path: return True if not req_path.startswith(cookie_path): return False if cookie_path.endswith("/"): return True non_matching = req_path[len(cookie_path):] return non_matching.startswith("/")
[ "def", "_is_path_match", "(", "req_path", ":", "str", ",", "cookie_path", ":", "str", ")", "->", "bool", ":", "if", "not", "req_path", ".", "startswith", "(", "\"/\"", ")", ":", "req_path", "=", "\"/\"", "if", "req_path", "==", "cookie_path", ":", "return", "True", "if", "not", "req_path", ".", "startswith", "(", "cookie_path", ")", ":", "return", "False", "if", "cookie_path", ".", "endswith", "(", "\"/\"", ")", ":", "return", "True", "non_matching", "=", "req_path", "[", "len", "(", "cookie_path", ")", ":", "]", "return", "non_matching", ".", "startswith", "(", "\"/\"", ")" ]
Implements path matching adhering to RFC 6265.
[ "Implements", "path", "matching", "adhering", "to", "RFC", "6265", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/cookiejar.py#L245-L261
26,902
aio-libs/aiohttp
aiohttp/cookiejar.py
CookieJar._parse_date
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]: """Implements date string parsing adhering to RFC 6265.""" if not date_str: return None found_time = False found_day = False found_month = False found_year = False hour = minute = second = 0 day = 0 month = 0 year = 0 for token_match in cls.DATE_TOKENS_RE.finditer(date_str): token = token_match.group("token") if not found_time: time_match = cls.DATE_HMS_TIME_RE.match(token) if time_match: found_time = True hour, minute, second = [ int(s) for s in time_match.groups()] continue if not found_day: day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) if day_match: found_day = True day = int(day_match.group()) continue if not found_month: month_match = cls.DATE_MONTH_RE.match(token) if month_match: found_month = True month = month_match.lastindex continue if not found_year: year_match = cls.DATE_YEAR_RE.match(token) if year_match: found_year = True year = int(year_match.group()) if 70 <= year <= 99: year += 1900 elif 0 <= year <= 69: year += 2000 if False in (found_day, found_month, found_year, found_time): return None if not 1 <= day <= 31: return None if year < 1601 or hour > 23 or minute > 59 or second > 59: return None return datetime.datetime(year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc)
python
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]: """Implements date string parsing adhering to RFC 6265.""" if not date_str: return None found_time = False found_day = False found_month = False found_year = False hour = minute = second = 0 day = 0 month = 0 year = 0 for token_match in cls.DATE_TOKENS_RE.finditer(date_str): token = token_match.group("token") if not found_time: time_match = cls.DATE_HMS_TIME_RE.match(token) if time_match: found_time = True hour, minute, second = [ int(s) for s in time_match.groups()] continue if not found_day: day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) if day_match: found_day = True day = int(day_match.group()) continue if not found_month: month_match = cls.DATE_MONTH_RE.match(token) if month_match: found_month = True month = month_match.lastindex continue if not found_year: year_match = cls.DATE_YEAR_RE.match(token) if year_match: found_year = True year = int(year_match.group()) if 70 <= year <= 99: year += 1900 elif 0 <= year <= 69: year += 2000 if False in (found_day, found_month, found_year, found_time): return None if not 1 <= day <= 31: return None if year < 1601 or hour > 23 or minute > 59 or second > 59: return None return datetime.datetime(year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc)
[ "def", "_parse_date", "(", "cls", ",", "date_str", ":", "str", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "if", "not", "date_str", ":", "return", "None", "found_time", "=", "False", "found_day", "=", "False", "found_month", "=", "False", "found_year", "=", "False", "hour", "=", "minute", "=", "second", "=", "0", "day", "=", "0", "month", "=", "0", "year", "=", "0", "for", "token_match", "in", "cls", ".", "DATE_TOKENS_RE", ".", "finditer", "(", "date_str", ")", ":", "token", "=", "token_match", ".", "group", "(", "\"token\"", ")", "if", "not", "found_time", ":", "time_match", "=", "cls", ".", "DATE_HMS_TIME_RE", ".", "match", "(", "token", ")", "if", "time_match", ":", "found_time", "=", "True", "hour", ",", "minute", ",", "second", "=", "[", "int", "(", "s", ")", "for", "s", "in", "time_match", ".", "groups", "(", ")", "]", "continue", "if", "not", "found_day", ":", "day_match", "=", "cls", ".", "DATE_DAY_OF_MONTH_RE", ".", "match", "(", "token", ")", "if", "day_match", ":", "found_day", "=", "True", "day", "=", "int", "(", "day_match", ".", "group", "(", ")", ")", "continue", "if", "not", "found_month", ":", "month_match", "=", "cls", ".", "DATE_MONTH_RE", ".", "match", "(", "token", ")", "if", "month_match", ":", "found_month", "=", "True", "month", "=", "month_match", ".", "lastindex", "continue", "if", "not", "found_year", ":", "year_match", "=", "cls", ".", "DATE_YEAR_RE", ".", "match", "(", "token", ")", "if", "year_match", ":", "found_year", "=", "True", "year", "=", "int", "(", "year_match", ".", "group", "(", ")", ")", "if", "70", "<=", "year", "<=", "99", ":", "year", "+=", "1900", "elif", "0", "<=", "year", "<=", "69", ":", "year", "+=", "2000", "if", "False", "in", "(", "found_day", ",", "found_month", ",", "found_year", ",", "found_time", ")", ":", "return", "None", "if", "not", "1", "<=", "day", "<=", "31", ":", "return", "None", "if", "year", "<", "1601", "or", "hour", ">", "23", "or", "minute", ">", "59", "or", "second", ">", "59", ":", "return", "None", "return", "datetime", ".", "datetime", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ",", "tzinfo", "=", "datetime", ".", "timezone", ".", "utc", ")" ]
Implements date string parsing adhering to RFC 6265.
[ "Implements", "date", "string", "parsing", "adhering", "to", "RFC", "6265", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/cookiejar.py#L264-L327
26,903
aio-libs/aiohttp
examples/legacy/tcp_protocol_parser.py
my_protocol_parser
def my_protocol_parser(out, buf): """Parser is used with StreamParser for incremental protocol parsing. Parser is a generator function, but it is not a coroutine. Usually parsers are implemented as a state machine. more details in asyncio/parsers.py existing parsers: * HTTP protocol parsers asyncio/http/protocol.py * websocket parser asyncio/http/websocket.py """ while True: tp = yield from buf.read(5) if tp in (MSG_PING, MSG_PONG): # skip line yield from buf.skipuntil(b'\r\n') out.feed_data(Message(tp, None)) elif tp == MSG_STOP: out.feed_data(Message(tp, None)) elif tp == MSG_TEXT: # read text text = yield from buf.readuntil(b'\r\n') out.feed_data(Message(tp, text.strip().decode('utf-8'))) else: raise ValueError('Unknown protocol prefix.')
python
def my_protocol_parser(out, buf): """Parser is used with StreamParser for incremental protocol parsing. Parser is a generator function, but it is not a coroutine. Usually parsers are implemented as a state machine. more details in asyncio/parsers.py existing parsers: * HTTP protocol parsers asyncio/http/protocol.py * websocket parser asyncio/http/websocket.py """ while True: tp = yield from buf.read(5) if tp in (MSG_PING, MSG_PONG): # skip line yield from buf.skipuntil(b'\r\n') out.feed_data(Message(tp, None)) elif tp == MSG_STOP: out.feed_data(Message(tp, None)) elif tp == MSG_TEXT: # read text text = yield from buf.readuntil(b'\r\n') out.feed_data(Message(tp, text.strip().decode('utf-8'))) else: raise ValueError('Unknown protocol prefix.')
[ "def", "my_protocol_parser", "(", "out", ",", "buf", ")", ":", "while", "True", ":", "tp", "=", "yield", "from", "buf", ".", "read", "(", "5", ")", "if", "tp", "in", "(", "MSG_PING", ",", "MSG_PONG", ")", ":", "# skip line", "yield", "from", "buf", ".", "skipuntil", "(", "b'\\r\\n'", ")", "out", ".", "feed_data", "(", "Message", "(", "tp", ",", "None", ")", ")", "elif", "tp", "==", "MSG_STOP", ":", "out", ".", "feed_data", "(", "Message", "(", "tp", ",", "None", ")", ")", "elif", "tp", "==", "MSG_TEXT", ":", "# read text", "text", "=", "yield", "from", "buf", ".", "readuntil", "(", "b'\\r\\n'", ")", "out", ".", "feed_data", "(", "Message", "(", "tp", ",", "text", ".", "strip", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "'Unknown protocol prefix.'", ")" ]
Parser is used with StreamParser for incremental protocol parsing. Parser is a generator function, but it is not a coroutine. Usually parsers are implemented as a state machine. more details in asyncio/parsers.py existing parsers: * HTTP protocol parsers asyncio/http/protocol.py * websocket parser asyncio/http/websocket.py
[ "Parser", "is", "used", "with", "StreamParser", "for", "incremental", "protocol", "parsing", ".", "Parser", "is", "a", "generator", "function", "but", "it", "is", "not", "a", "coroutine", ".", "Usually", "parsers", "are", "implemented", "as", "a", "state", "machine", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/examples/legacy/tcp_protocol_parser.py#L23-L46
26,904
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.clone
def clone(self, *, method: str=sentinel, rel_url: StrOrURL=sentinel, headers: LooseHeaders=sentinel, scheme: str=sentinel, host: str=sentinel, remote: str=sentinel) -> 'BaseRequest': """Clone itself with replacement some attributes. Creates and returns a new instance of Request object. If no parameters are given, an exact copy is returned. If a parameter is not passed, it will reuse the one from the current request object. """ if self._read_bytes: raise RuntimeError("Cannot clone request " "after reading its content") dct = {} # type: Dict[str, Any] if method is not sentinel: dct['method'] = method if rel_url is not sentinel: new_url = URL(rel_url) dct['url'] = new_url dct['path'] = str(new_url) if headers is not sentinel: # a copy semantic dct['headers'] = CIMultiDictProxy(CIMultiDict(headers)) dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items()) message = self._message._replace(**dct) kwargs = {} if scheme is not sentinel: kwargs['scheme'] = scheme if host is not sentinel: kwargs['host'] = host if remote is not sentinel: kwargs['remote'] = remote return self.__class__( message, self._payload, self._protocol, self._payload_writer, self._task, self._loop, client_max_size=self._client_max_size, state=self._state.copy(), **kwargs)
python
def clone(self, *, method: str=sentinel, rel_url: StrOrURL=sentinel, headers: LooseHeaders=sentinel, scheme: str=sentinel, host: str=sentinel, remote: str=sentinel) -> 'BaseRequest': """Clone itself with replacement some attributes. Creates and returns a new instance of Request object. If no parameters are given, an exact copy is returned. If a parameter is not passed, it will reuse the one from the current request object. """ if self._read_bytes: raise RuntimeError("Cannot clone request " "after reading its content") dct = {} # type: Dict[str, Any] if method is not sentinel: dct['method'] = method if rel_url is not sentinel: new_url = URL(rel_url) dct['url'] = new_url dct['path'] = str(new_url) if headers is not sentinel: # a copy semantic dct['headers'] = CIMultiDictProxy(CIMultiDict(headers)) dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items()) message = self._message._replace(**dct) kwargs = {} if scheme is not sentinel: kwargs['scheme'] = scheme if host is not sentinel: kwargs['host'] = host if remote is not sentinel: kwargs['remote'] = remote return self.__class__( message, self._payload, self._protocol, self._payload_writer, self._task, self._loop, client_max_size=self._client_max_size, state=self._state.copy(), **kwargs)
[ "def", "clone", "(", "self", ",", "*", ",", "method", ":", "str", "=", "sentinel", ",", "rel_url", ":", "StrOrURL", "=", "sentinel", ",", "headers", ":", "LooseHeaders", "=", "sentinel", ",", "scheme", ":", "str", "=", "sentinel", ",", "host", ":", "str", "=", "sentinel", ",", "remote", ":", "str", "=", "sentinel", ")", "->", "'BaseRequest'", ":", "if", "self", ".", "_read_bytes", ":", "raise", "RuntimeError", "(", "\"Cannot clone request \"", "\"after reading its content\"", ")", "dct", "=", "{", "}", "# type: Dict[str, Any]", "if", "method", "is", "not", "sentinel", ":", "dct", "[", "'method'", "]", "=", "method", "if", "rel_url", "is", "not", "sentinel", ":", "new_url", "=", "URL", "(", "rel_url", ")", "dct", "[", "'url'", "]", "=", "new_url", "dct", "[", "'path'", "]", "=", "str", "(", "new_url", ")", "if", "headers", "is", "not", "sentinel", ":", "# a copy semantic", "dct", "[", "'headers'", "]", "=", "CIMultiDictProxy", "(", "CIMultiDict", "(", "headers", ")", ")", "dct", "[", "'raw_headers'", "]", "=", "tuple", "(", "(", "k", ".", "encode", "(", "'utf-8'", ")", ",", "v", ".", "encode", "(", "'utf-8'", ")", ")", "for", "k", ",", "v", "in", "headers", ".", "items", "(", ")", ")", "message", "=", "self", ".", "_message", ".", "_replace", "(", "*", "*", "dct", ")", "kwargs", "=", "{", "}", "if", "scheme", "is", "not", "sentinel", ":", "kwargs", "[", "'scheme'", "]", "=", "scheme", "if", "host", "is", "not", "sentinel", ":", "kwargs", "[", "'host'", "]", "=", "host", "if", "remote", "is", "not", "sentinel", ":", "kwargs", "[", "'remote'", "]", "=", "remote", "return", "self", ".", "__class__", "(", "message", ",", "self", ".", "_payload", ",", "self", ".", "_protocol", ",", "self", ".", "_payload_writer", ",", "self", ".", "_task", ",", "self", ".", "_loop", ",", "client_max_size", "=", "self", ".", "_client_max_size", ",", "state", "=", "self", ".", "_state", ".", "copy", "(", ")", ",", "*", "*", "kwargs", ")" ]
Clone itself with replacement some attributes. Creates and returns a new instance of Request object. If no parameters are given, an exact copy is returned. If a parameter is not passed, it will reuse the one from the current request object.
[ "Clone", "itself", "with", "replacement", "some", "attributes", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L148-L196
26,905
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.host
def host(self) -> str: """Hostname of the request. Hostname is resolved in this order: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value """ host = self._message.headers.get(hdrs.HOST) if host is not None: return host else: return socket.getfqdn()
python
def host(self) -> str: """Hostname of the request. Hostname is resolved in this order: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value """ host = self._message.headers.get(hdrs.HOST) if host is not None: return host else: return socket.getfqdn()
[ "def", "host", "(", "self", ")", "->", "str", ":", "host", "=", "self", ".", "_message", ".", "headers", ".", "get", "(", "hdrs", ".", "HOST", ")", "if", "host", "is", "not", "None", ":", "return", "host", "else", ":", "return", "socket", ".", "getfqdn", "(", ")" ]
Hostname of the request. Hostname is resolved in this order: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value
[ "Hostname", "of", "the", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L353-L366
26,906
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.remote
def remote(self) -> Optional[str]: """Remote IP of client initiated HTTP request. The IP is resolved in this order: - overridden value by .clone(remote=new_remote) call. - peername of opened socket """ if isinstance(self._transport_peername, (list, tuple)): return self._transport_peername[0] else: return self._transport_peername
python
def remote(self) -> Optional[str]: """Remote IP of client initiated HTTP request. The IP is resolved in this order: - overridden value by .clone(remote=new_remote) call. - peername of opened socket """ if isinstance(self._transport_peername, (list, tuple)): return self._transport_peername[0] else: return self._transport_peername
[ "def", "remote", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "if", "isinstance", "(", "self", ".", "_transport_peername", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "self", ".", "_transport_peername", "[", "0", "]", "else", ":", "return", "self", ".", "_transport_peername" ]
Remote IP of client initiated HTTP request. The IP is resolved in this order: - overridden value by .clone(remote=new_remote) call. - peername of opened socket
[ "Remote", "IP", "of", "client", "initiated", "HTTP", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L369-L380
26,907
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest._http_date
def _http_date(_date_str: str) -> Optional[datetime.datetime]: """Process a date string, return a datetime object """ if _date_str is not None: timetuple = parsedate(_date_str) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
python
def _http_date(_date_str: str) -> Optional[datetime.datetime]: """Process a date string, return a datetime object """ if _date_str is not None: timetuple = parsedate(_date_str) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
[ "def", "_http_date", "(", "_date_str", ":", "str", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "if", "_date_str", "is", "not", "None", ":", "timetuple", "=", "parsedate", "(", "_date_str", ")", "if", "timetuple", "is", "not", "None", ":", "return", "datetime", ".", "datetime", "(", "*", "timetuple", "[", ":", "6", "]", ",", "tzinfo", "=", "datetime", ".", "timezone", ".", "utc", ")", "return", "None" ]
Process a date string, return a datetime object
[ "Process", "a", "date", "string", "return", "a", "datetime", "object" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L436-L444
26,908
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.if_modified_since
def if_modified_since(self) -> Optional[datetime.datetime]: """The value of If-Modified-Since HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
python
def if_modified_since(self) -> Optional[datetime.datetime]: """The value of If-Modified-Since HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
[ "def", "if_modified_since", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "return", "self", ".", "_http_date", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "IF_MODIFIED_SINCE", ")", ")" ]
The value of If-Modified-Since HTTP header, or None. This header is represented as a `datetime` object.
[ "The", "value", "of", "If", "-", "Modified", "-", "Since", "HTTP", "header", "or", "None", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L447-L452
26,909
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.if_unmodified_since
def if_unmodified_since(self) -> Optional[datetime.datetime]: """The value of If-Unmodified-Since HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
python
def if_unmodified_since(self) -> Optional[datetime.datetime]: """The value of If-Unmodified-Since HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
[ "def", "if_unmodified_since", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "return", "self", ".", "_http_date", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "IF_UNMODIFIED_SINCE", ")", ")" ]
The value of If-Unmodified-Since HTTP header, or None. This header is represented as a `datetime` object.
[ "The", "value", "of", "If", "-", "Unmodified", "-", "Since", "HTTP", "header", "or", "None", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L455-L460
26,910
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.if_range
def if_range(self) -> Optional[datetime.datetime]: """The value of If-Range HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_RANGE))
python
def if_range(self) -> Optional[datetime.datetime]: """The value of If-Range HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_RANGE))
[ "def", "if_range", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "return", "self", ".", "_http_date", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "IF_RANGE", ")", ")" ]
The value of If-Range HTTP header, or None. This header is represented as a `datetime` object.
[ "The", "value", "of", "If", "-", "Range", "HTTP", "header", "or", "None", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L463-L468
26,911
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.cookies
def cookies(self) -> Mapping[str, str]: """Return request cookies. A read-only dictionary-like object. """ raw = self.headers.get(hdrs.COOKIE, '') parsed = SimpleCookie(raw) return MappingProxyType( {key: val.value for key, val in parsed.items()})
python
def cookies(self) -> Mapping[str, str]: """Return request cookies. A read-only dictionary-like object. """ raw = self.headers.get(hdrs.COOKIE, '') parsed = SimpleCookie(raw) return MappingProxyType( {key: val.value for key, val in parsed.items()})
[ "def", "cookies", "(", "self", ")", "->", "Mapping", "[", "str", ",", "str", "]", ":", "raw", "=", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "COOKIE", ",", "''", ")", "parsed", "=", "SimpleCookie", "(", "raw", ")", "return", "MappingProxyType", "(", "{", "key", ":", "val", ".", "value", "for", "key", ",", "val", "in", "parsed", ".", "items", "(", ")", "}", ")" ]
Return request cookies. A read-only dictionary-like object.
[ "Return", "request", "cookies", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L476-L484
26,912
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.http_range
def http_range(self) -> slice: """The content of Range HTTP header. Return a slice instance. """ rng = self._headers.get(hdrs.RANGE) start, end = None, None if rng is not None: try: pattern = r'^bytes=(\d*)-(\d*)$' start, end = re.findall(pattern, rng)[0] except IndexError: # pattern was not found in header raise ValueError("range not in acceptable format") end = int(end) if end else None start = int(start) if start else None if start is None and end is not None: # end with no start is to return tail of content start = -end end = None if start is not None and end is not None: # end is inclusive in range header, exclusive for slice end += 1 if start >= end: raise ValueError('start cannot be after end') if start is end is None: # No valid range supplied raise ValueError('No start or end of range specified') return slice(start, end, 1)
python
def http_range(self) -> slice: """The content of Range HTTP header. Return a slice instance. """ rng = self._headers.get(hdrs.RANGE) start, end = None, None if rng is not None: try: pattern = r'^bytes=(\d*)-(\d*)$' start, end = re.findall(pattern, rng)[0] except IndexError: # pattern was not found in header raise ValueError("range not in acceptable format") end = int(end) if end else None start = int(start) if start else None if start is None and end is not None: # end with no start is to return tail of content start = -end end = None if start is not None and end is not None: # end is inclusive in range header, exclusive for slice end += 1 if start >= end: raise ValueError('start cannot be after end') if start is end is None: # No valid range supplied raise ValueError('No start or end of range specified') return slice(start, end, 1)
[ "def", "http_range", "(", "self", ")", "->", "slice", ":", "rng", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "RANGE", ")", "start", ",", "end", "=", "None", ",", "None", "if", "rng", "is", "not", "None", ":", "try", ":", "pattern", "=", "r'^bytes=(\\d*)-(\\d*)$'", "start", ",", "end", "=", "re", ".", "findall", "(", "pattern", ",", "rng", ")", "[", "0", "]", "except", "IndexError", ":", "# pattern was not found in header", "raise", "ValueError", "(", "\"range not in acceptable format\"", ")", "end", "=", "int", "(", "end", ")", "if", "end", "else", "None", "start", "=", "int", "(", "start", ")", "if", "start", "else", "None", "if", "start", "is", "None", "and", "end", "is", "not", "None", ":", "# end with no start is to return tail of content", "start", "=", "-", "end", "end", "=", "None", "if", "start", "is", "not", "None", "and", "end", "is", "not", "None", ":", "# end is inclusive in range header, exclusive for slice", "end", "+=", "1", "if", "start", ">=", "end", ":", "raise", "ValueError", "(", "'start cannot be after end'", ")", "if", "start", "is", "end", "is", "None", ":", "# No valid range supplied", "raise", "ValueError", "(", "'No start or end of range specified'", ")", "return", "slice", "(", "start", ",", "end", ",", "1", ")" ]
The content of Range HTTP header. Return a slice instance.
[ "The", "content", "of", "Range", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L487-L520
26,913
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.has_body
def has_body(self) -> bool: """Return True if request's HTTP BODY can be read, False otherwise.""" warnings.warn( "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2) return not self._payload.at_eof()
python
def has_body(self) -> bool: """Return True if request's HTTP BODY can be read, False otherwise.""" warnings.warn( "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2) return not self._payload.at_eof()
[ "def", "has_body", "(", "self", ")", "->", "bool", ":", "warnings", ".", "warn", "(", "\"Deprecated, use .can_read_body #2005\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "return", "not", "self", ".", "_payload", ".", "at_eof", "(", ")" ]
Return True if request's HTTP BODY can be read, False otherwise.
[ "Return", "True", "if", "request", "s", "HTTP", "BODY", "can", "be", "read", "False", "otherwise", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L528-L533
26,914
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.read
async def read(self) -> bytes: """Read request body if present. Returns bytes object with full request content. """ if self._read_bytes is None: body = bytearray() while True: chunk = await self._payload.readany() body.extend(chunk) if self._client_max_size: body_size = len(body) if body_size >= self._client_max_size: raise HTTPRequestEntityTooLarge( max_size=self._client_max_size, actual_size=body_size ) if not chunk: break self._read_bytes = bytes(body) return self._read_bytes
python
async def read(self) -> bytes: """Read request body if present. Returns bytes object with full request content. """ if self._read_bytes is None: body = bytearray() while True: chunk = await self._payload.readany() body.extend(chunk) if self._client_max_size: body_size = len(body) if body_size >= self._client_max_size: raise HTTPRequestEntityTooLarge( max_size=self._client_max_size, actual_size=body_size ) if not chunk: break self._read_bytes = bytes(body) return self._read_bytes
[ "async", "def", "read", "(", "self", ")", "->", "bytes", ":", "if", "self", ".", "_read_bytes", "is", "None", ":", "body", "=", "bytearray", "(", ")", "while", "True", ":", "chunk", "=", "await", "self", ".", "_payload", ".", "readany", "(", ")", "body", ".", "extend", "(", "chunk", ")", "if", "self", ".", "_client_max_size", ":", "body_size", "=", "len", "(", "body", ")", "if", "body_size", ">=", "self", ".", "_client_max_size", ":", "raise", "HTTPRequestEntityTooLarge", "(", "max_size", "=", "self", ".", "_client_max_size", ",", "actual_size", "=", "body_size", ")", "if", "not", "chunk", ":", "break", "self", ".", "_read_bytes", "=", "bytes", "(", "body", ")", "return", "self", ".", "_read_bytes" ]
Read request body if present. Returns bytes object with full request content.
[ "Read", "request", "body", "if", "present", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L553-L573
26,915
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.text
async def text(self) -> str: """Return BODY as text using encoding from .charset.""" bytes_body = await self.read() encoding = self.charset or 'utf-8' return bytes_body.decode(encoding)
python
async def text(self) -> str: """Return BODY as text using encoding from .charset.""" bytes_body = await self.read() encoding = self.charset or 'utf-8' return bytes_body.decode(encoding)
[ "async", "def", "text", "(", "self", ")", "->", "str", ":", "bytes_body", "=", "await", "self", ".", "read", "(", ")", "encoding", "=", "self", ".", "charset", "or", "'utf-8'", "return", "bytes_body", ".", "decode", "(", "encoding", ")" ]
Return BODY as text using encoding from .charset.
[ "Return", "BODY", "as", "text", "using", "encoding", "from", ".", "charset", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L575-L579
26,916
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.json
async def json(self, *, loads: JSONDecoder=DEFAULT_JSON_DECODER) -> Any: """Return BODY as JSON.""" body = await self.text() return loads(body)
python
async def json(self, *, loads: JSONDecoder=DEFAULT_JSON_DECODER) -> Any: """Return BODY as JSON.""" body = await self.text() return loads(body)
[ "async", "def", "json", "(", "self", ",", "*", ",", "loads", ":", "JSONDecoder", "=", "DEFAULT_JSON_DECODER", ")", "->", "Any", ":", "body", "=", "await", "self", ".", "text", "(", ")", "return", "loads", "(", "body", ")" ]
Return BODY as JSON.
[ "Return", "BODY", "as", "JSON", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L581-L584
26,917
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.post
async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]': """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]] if content_type == 'multipart/form-data': multipart = await self.multipart() max_size = self._client_max_size field = await multipart.next() while field is not None: size = 0 content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, cast(io.BufferedReader, tmp), content_type, field.headers) out.add(field.name, ff) else: value = await field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) field = await multipart.next() else: data = await self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl( data.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post
python
async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]': """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]] if content_type == 'multipart/form-data': multipart = await self.multipart() max_size = self._client_max_size field = await multipart.next() while field is not None: size = 0 content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, cast(io.BufferedReader, tmp), content_type, field.headers) out.add(field.name, ff) else: value = await field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) field = await multipart.next() else: data = await self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl( data.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post
[ "async", "def", "post", "(", "self", ")", "->", "'MultiDictProxy[Union[str, bytes, FileField]]'", ":", "if", "self", ".", "_post", "is", "not", "None", ":", "return", "self", ".", "_post", "if", "self", ".", "_method", "not", "in", "self", ".", "POST_METHODS", ":", "self", ".", "_post", "=", "MultiDictProxy", "(", "MultiDict", "(", ")", ")", "return", "self", ".", "_post", "content_type", "=", "self", ".", "content_type", "if", "(", "content_type", "not", "in", "(", "''", ",", "'application/x-www-form-urlencoded'", ",", "'multipart/form-data'", ")", ")", ":", "self", ".", "_post", "=", "MultiDictProxy", "(", "MultiDict", "(", ")", ")", "return", "self", ".", "_post", "out", "=", "MultiDict", "(", ")", "# type: MultiDict[Union[str, bytes, FileField]]", "if", "content_type", "==", "'multipart/form-data'", ":", "multipart", "=", "await", "self", ".", "multipart", "(", ")", "max_size", "=", "self", ".", "_client_max_size", "field", "=", "await", "multipart", ".", "next", "(", ")", "while", "field", "is", "not", "None", ":", "size", "=", "0", "content_type", "=", "field", ".", "headers", ".", "get", "(", "hdrs", ".", "CONTENT_TYPE", ")", "if", "field", ".", "filename", ":", "# store file in temp file", "tmp", "=", "tempfile", ".", "TemporaryFile", "(", ")", "chunk", "=", "await", "field", ".", "read_chunk", "(", "size", "=", "2", "**", "16", ")", "while", "chunk", ":", "chunk", "=", "field", ".", "decode", "(", "chunk", ")", "tmp", ".", "write", "(", "chunk", ")", "size", "+=", "len", "(", "chunk", ")", "if", "0", "<", "max_size", "<", "size", ":", "raise", "HTTPRequestEntityTooLarge", "(", "max_size", "=", "max_size", ",", "actual_size", "=", "size", ")", "chunk", "=", "await", "field", ".", "read_chunk", "(", "size", "=", "2", "**", "16", ")", "tmp", ".", "seek", "(", "0", ")", "ff", "=", "FileField", "(", "field", ".", "name", ",", "field", ".", "filename", ",", "cast", "(", "io", ".", "BufferedReader", ",", "tmp", ")", ",", "content_type", ",", "field", ".", "headers", ")", "out", ".", "add", "(", "field", ".", "name", ",", "ff", ")", "else", ":", "value", "=", "await", "field", ".", "read", "(", "decode", "=", "True", ")", "if", "content_type", "is", "None", "or", "content_type", ".", "startswith", "(", "'text/'", ")", ":", "charset", "=", "field", ".", "get_charset", "(", "default", "=", "'utf-8'", ")", "value", "=", "value", ".", "decode", "(", "charset", ")", "out", ".", "add", "(", "field", ".", "name", ",", "value", ")", "size", "+=", "len", "(", "value", ")", "if", "0", "<", "max_size", "<", "size", ":", "raise", "HTTPRequestEntityTooLarge", "(", "max_size", "=", "max_size", ",", "actual_size", "=", "size", ")", "field", "=", "await", "multipart", ".", "next", "(", ")", "else", ":", "data", "=", "await", "self", ".", "read", "(", ")", "if", "data", ":", "charset", "=", "self", ".", "charset", "or", "'utf-8'", "out", ".", "extend", "(", "parse_qsl", "(", "data", ".", "rstrip", "(", ")", ".", "decode", "(", "charset", ")", ",", "keep_blank_values", "=", "True", ",", "encoding", "=", "charset", ")", ")", "self", ".", "_post", "=", "MultiDictProxy", "(", "out", ")", "return", "self", ".", "_post" ]
Return POST parameters.
[ "Return", "POST", "parameters", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L590-L662
26,918
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.shutdown
async def shutdown(self, timeout: Optional[float]=15.0) -> None: """Worker process is about to exit, we need cleanup everything and stop accepting requests. It is especially important for keep-alive connections.""" self._force_close = True if self._keepalive_handle is not None: self._keepalive_handle.cancel() if self._waiter: self._waiter.cancel() # wait for handlers with suppress(asyncio.CancelledError, asyncio.TimeoutError): with CeilTimeout(timeout, loop=self._loop): if (self._error_handler is not None and not self._error_handler.done()): await self._error_handler if (self._task_handler is not None and not self._task_handler.done()): await self._task_handler # force-close non-idle handler if self._task_handler is not None: self._task_handler.cancel() if self.transport is not None: self.transport.close() self.transport = None
python
async def shutdown(self, timeout: Optional[float]=15.0) -> None: """Worker process is about to exit, we need cleanup everything and stop accepting requests. It is especially important for keep-alive connections.""" self._force_close = True if self._keepalive_handle is not None: self._keepalive_handle.cancel() if self._waiter: self._waiter.cancel() # wait for handlers with suppress(asyncio.CancelledError, asyncio.TimeoutError): with CeilTimeout(timeout, loop=self._loop): if (self._error_handler is not None and not self._error_handler.done()): await self._error_handler if (self._task_handler is not None and not self._task_handler.done()): await self._task_handler # force-close non-idle handler if self._task_handler is not None: self._task_handler.cancel() if self.transport is not None: self.transport.close() self.transport = None
[ "async", "def", "shutdown", "(", "self", ",", "timeout", ":", "Optional", "[", "float", "]", "=", "15.0", ")", "->", "None", ":", "self", ".", "_force_close", "=", "True", "if", "self", ".", "_keepalive_handle", "is", "not", "None", ":", "self", ".", "_keepalive_handle", ".", "cancel", "(", ")", "if", "self", ".", "_waiter", ":", "self", ".", "_waiter", ".", "cancel", "(", ")", "# wait for handlers", "with", "suppress", "(", "asyncio", ".", "CancelledError", ",", "asyncio", ".", "TimeoutError", ")", ":", "with", "CeilTimeout", "(", "timeout", ",", "loop", "=", "self", ".", "_loop", ")", ":", "if", "(", "self", ".", "_error_handler", "is", "not", "None", "and", "not", "self", ".", "_error_handler", ".", "done", "(", ")", ")", ":", "await", "self", ".", "_error_handler", "if", "(", "self", ".", "_task_handler", "is", "not", "None", "and", "not", "self", ".", "_task_handler", ".", "done", "(", ")", ")", ":", "await", "self", ".", "_task_handler", "# force-close non-idle handler", "if", "self", ".", "_task_handler", "is", "not", "None", ":", "self", ".", "_task_handler", ".", "cancel", "(", ")", "if", "self", ".", "transport", "is", "not", "None", ":", "self", ".", "transport", ".", "close", "(", ")", "self", ".", "transport", "=", "None" ]
Worker process is about to exit, we need cleanup everything and stop accepting requests. It is especially important for keep-alive connections.
[ "Worker", "process", "is", "about", "to", "exit", "we", "need", "cleanup", "everything", "and", "stop", "accepting", "requests", ".", "It", "is", "especially", "important", "for", "keep", "-", "alive", "connections", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L184-L213
26,919
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.keep_alive
def keep_alive(self, val: bool) -> None: """Set keep-alive connection mode. :param bool val: new state. """ self._keepalive = val if self._keepalive_handle: self._keepalive_handle.cancel() self._keepalive_handle = None
python
def keep_alive(self, val: bool) -> None: """Set keep-alive connection mode. :param bool val: new state. """ self._keepalive = val if self._keepalive_handle: self._keepalive_handle.cancel() self._keepalive_handle = None
[ "def", "keep_alive", "(", "self", ",", "val", ":", "bool", ")", "->", "None", ":", "self", ".", "_keepalive", "=", "val", "if", "self", ".", "_keepalive_handle", ":", "self", ".", "_keepalive_handle", ".", "cancel", "(", ")", "self", ".", "_keepalive_handle", "=", "None" ]
Set keep-alive connection mode. :param bool val: new state.
[ "Set", "keep", "-", "alive", "connection", "mode", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L316-L324
26,920
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.close
def close(self) -> None: """Stop accepting new pipelinig messages and close connection when handlers done processing messages""" self._close = True if self._waiter: self._waiter.cancel()
python
def close(self) -> None: """Stop accepting new pipelinig messages and close connection when handlers done processing messages""" self._close = True if self._waiter: self._waiter.cancel()
[ "def", "close", "(", "self", ")", "->", "None", ":", "self", ".", "_close", "=", "True", "if", "self", ".", "_waiter", ":", "self", ".", "_waiter", ".", "cancel", "(", ")" ]
Stop accepting new pipelinig messages and close connection when handlers done processing messages
[ "Stop", "accepting", "new", "pipelinig", "messages", "and", "close", "connection", "when", "handlers", "done", "processing", "messages" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L326-L331
26,921
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.force_close
def force_close(self) -> None: """Force close connection""" self._force_close = True if self._waiter: self._waiter.cancel() if self.transport is not None: self.transport.close() self.transport = None
python
def force_close(self) -> None: """Force close connection""" self._force_close = True if self._waiter: self._waiter.cancel() if self.transport is not None: self.transport.close() self.transport = None
[ "def", "force_close", "(", "self", ")", "->", "None", ":", "self", ".", "_force_close", "=", "True", "if", "self", ".", "_waiter", ":", "self", ".", "_waiter", ".", "cancel", "(", ")", "if", "self", ".", "transport", "is", "not", "None", ":", "self", ".", "transport", ".", "close", "(", ")", "self", ".", "transport", "=", "None" ]
Force close connection
[ "Force", "close", "connection" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L333-L340
26,922
aio-libs/aiohttp
aiohttp/web.py
run_app
def run_app(app: Union[Application, Awaitable[Application]], *, host: Optional[str]=None, port: Optional[int]=None, path: Optional[str]=None, sock: Optional[socket.socket]=None, shutdown_timeout: float=60.0, ssl_context: Optional[SSLContext]=None, print: Optional[Callable[..., None]]=print, backlog: int=128, access_log_class: Type[AbstractAccessLogger]=AccessLogger, access_log_format: str=AccessLogger.LOG_FORMAT, access_log: Optional[logging.Logger]=access_logger, handle_signals: bool=True, reuse_address: Optional[bool]=None, reuse_port: Optional[bool]=None) -> None: """Run an app locally""" loop = asyncio.get_event_loop() # Configure if and only if in debugging mode and using the default logger if loop.get_debug() and access_log and access_log.name == 'aiohttp.access': if access_log.level == logging.NOTSET: access_log.setLevel(logging.DEBUG) if not access_log.hasHandlers(): access_log.addHandler(logging.StreamHandler()) try: loop.run_until_complete(_run_app(app, host=host, port=port, path=path, sock=sock, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, print=print, backlog=backlog, access_log_class=access_log_class, access_log_format=access_log_format, access_log=access_log, handle_signals=handle_signals, reuse_address=reuse_address, reuse_port=reuse_port)) except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: _cancel_all_tasks(loop) if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy loop.run_until_complete(loop.shutdown_asyncgens()) loop.close()
python
def run_app(app: Union[Application, Awaitable[Application]], *, host: Optional[str]=None, port: Optional[int]=None, path: Optional[str]=None, sock: Optional[socket.socket]=None, shutdown_timeout: float=60.0, ssl_context: Optional[SSLContext]=None, print: Optional[Callable[..., None]]=print, backlog: int=128, access_log_class: Type[AbstractAccessLogger]=AccessLogger, access_log_format: str=AccessLogger.LOG_FORMAT, access_log: Optional[logging.Logger]=access_logger, handle_signals: bool=True, reuse_address: Optional[bool]=None, reuse_port: Optional[bool]=None) -> None: """Run an app locally""" loop = asyncio.get_event_loop() # Configure if and only if in debugging mode and using the default logger if loop.get_debug() and access_log and access_log.name == 'aiohttp.access': if access_log.level == logging.NOTSET: access_log.setLevel(logging.DEBUG) if not access_log.hasHandlers(): access_log.addHandler(logging.StreamHandler()) try: loop.run_until_complete(_run_app(app, host=host, port=port, path=path, sock=sock, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, print=print, backlog=backlog, access_log_class=access_log_class, access_log_format=access_log_format, access_log=access_log, handle_signals=handle_signals, reuse_address=reuse_address, reuse_port=reuse_port)) except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: _cancel_all_tasks(loop) if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy loop.run_until_complete(loop.shutdown_asyncgens()) loop.close()
[ "def", "run_app", "(", "app", ":", "Union", "[", "Application", ",", "Awaitable", "[", "Application", "]", "]", ",", "*", ",", "host", ":", "Optional", "[", "str", "]", "=", "None", ",", "port", ":", "Optional", "[", "int", "]", "=", "None", ",", "path", ":", "Optional", "[", "str", "]", "=", "None", ",", "sock", ":", "Optional", "[", "socket", ".", "socket", "]", "=", "None", ",", "shutdown_timeout", ":", "float", "=", "60.0", ",", "ssl_context", ":", "Optional", "[", "SSLContext", "]", "=", "None", ",", "print", ":", "Optional", "[", "Callable", "[", "...", ",", "None", "]", "]", "=", "print", ",", "backlog", ":", "int", "=", "128", ",", "access_log_class", ":", "Type", "[", "AbstractAccessLogger", "]", "=", "AccessLogger", ",", "access_log_format", ":", "str", "=", "AccessLogger", ".", "LOG_FORMAT", ",", "access_log", ":", "Optional", "[", "logging", ".", "Logger", "]", "=", "access_logger", ",", "handle_signals", ":", "bool", "=", "True", ",", "reuse_address", ":", "Optional", "[", "bool", "]", "=", "None", ",", "reuse_port", ":", "Optional", "[", "bool", "]", "=", "None", ")", "->", "None", ":", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "# Configure if and only if in debugging mode and using the default logger", "if", "loop", ".", "get_debug", "(", ")", "and", "access_log", "and", "access_log", ".", "name", "==", "'aiohttp.access'", ":", "if", "access_log", ".", "level", "==", "logging", ".", "NOTSET", ":", "access_log", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "if", "not", "access_log", ".", "hasHandlers", "(", ")", ":", "access_log", ".", "addHandler", "(", "logging", ".", "StreamHandler", "(", ")", ")", "try", ":", "loop", ".", "run_until_complete", "(", "_run_app", "(", "app", ",", "host", "=", "host", ",", "port", "=", "port", ",", "path", "=", "path", ",", "sock", "=", "sock", ",", "shutdown_timeout", "=", "shutdown_timeout", ",", "ssl_context", "=", "ssl_context", ",", "print", "=", "print", ",", "backlog", "=", "backlog", ",", "access_log_class", "=", "access_log_class", ",", "access_log_format", "=", "access_log_format", ",", "access_log", "=", "access_log", ",", "handle_signals", "=", "handle_signals", ",", "reuse_address", "=", "reuse_address", ",", "reuse_port", "=", "reuse_port", ")", ")", "except", "(", "GracefulExit", ",", "KeyboardInterrupt", ")", ":", "# pragma: no cover", "pass", "finally", ":", "_cancel_all_tasks", "(", "loop", ")", "if", "sys", ".", "version_info", ">=", "(", "3", ",", "6", ")", ":", "# don't use PY_36 to pass mypy", "loop", ".", "run_until_complete", "(", "loop", ".", "shutdown_asyncgens", "(", ")", ")", "loop", ".", "close", "(", ")" ]
Run an app locally
[ "Run", "an", "app", "locally" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web.py#L375-L422
26,923
aio-libs/aiohttp
aiohttp/streams.py
AsyncStreamReaderMixin.iter_chunked
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: """Returns an asynchronous iterator that yields chunks of size n. Python-3.5 available for Python 3.5+ only """ return AsyncStreamIterator(lambda: self.read(n))
python
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: """Returns an asynchronous iterator that yields chunks of size n. Python-3.5 available for Python 3.5+ only """ return AsyncStreamIterator(lambda: self.read(n))
[ "def", "iter_chunked", "(", "self", ",", "n", ":", "int", ")", "->", "AsyncStreamIterator", "[", "bytes", "]", ":", "return", "AsyncStreamIterator", "(", "lambda", ":", "self", ".", "read", "(", "n", ")", ")" ]
Returns an asynchronous iterator that yields chunks of size n. Python-3.5 available for Python 3.5+ only
[ "Returns", "an", "asynchronous", "iterator", "that", "yields", "chunks", "of", "size", "n", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/streams.py#L68-L73
26,924
aio-libs/aiohttp
aiohttp/streams.py
StreamReader.unread_data
def unread_data(self, data: bytes) -> None: """ rollback reading some data from stream, inserting it to buffer head. """ warnings.warn("unread_data() is deprecated " "and will be removed in future releases (#3260)", DeprecationWarning, stacklevel=2) if not data: return if self._buffer_offset: self._buffer[0] = self._buffer[0][self._buffer_offset:] self._buffer_offset = 0 self._size += len(data) self._cursor -= len(data) self._buffer.appendleft(data) self._eof_counter = 0
python
def unread_data(self, data: bytes) -> None: """ rollback reading some data from stream, inserting it to buffer head. """ warnings.warn("unread_data() is deprecated " "and will be removed in future releases (#3260)", DeprecationWarning, stacklevel=2) if not data: return if self._buffer_offset: self._buffer[0] = self._buffer[0][self._buffer_offset:] self._buffer_offset = 0 self._size += len(data) self._cursor -= len(data) self._buffer.appendleft(data) self._eof_counter = 0
[ "def", "unread_data", "(", "self", ",", "data", ":", "bytes", ")", "->", "None", ":", "warnings", ".", "warn", "(", "\"unread_data() is deprecated \"", "\"and will be removed in future releases (#3260)\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "if", "not", "data", ":", "return", "if", "self", ".", "_buffer_offset", ":", "self", ".", "_buffer", "[", "0", "]", "=", "self", ".", "_buffer", "[", "0", "]", "[", "self", ".", "_buffer_offset", ":", "]", "self", ".", "_buffer_offset", "=", "0", "self", ".", "_size", "+=", "len", "(", "data", ")", "self", ".", "_cursor", "-=", "len", "(", "data", ")", "self", ".", "_buffer", ".", "appendleft", "(", "data", ")", "self", ".", "_eof_counter", "=", "0" ]
rollback reading some data from stream, inserting it to buffer head.
[ "rollback", "reading", "some", "data", "from", "stream", "inserting", "it", "to", "buffer", "head", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/streams.py#L211-L227
26,925
aio-libs/aiohttp
aiohttp/streams.py
StreamReader._read_nowait
def _read_nowait(self, n: int) -> bytes: """ Read not more than n bytes, or whole buffer is n == -1 """ chunks = [] while self._buffer: chunk = self._read_nowait_chunk(n) chunks.append(chunk) if n != -1: n -= len(chunk) if n == 0: break return b''.join(chunks) if chunks else b''
python
def _read_nowait(self, n: int) -> bytes: """ Read not more than n bytes, or whole buffer is n == -1 """ chunks = [] while self._buffer: chunk = self._read_nowait_chunk(n) chunks.append(chunk) if n != -1: n -= len(chunk) if n == 0: break return b''.join(chunks) if chunks else b''
[ "def", "_read_nowait", "(", "self", ",", "n", ":", "int", ")", "->", "bytes", ":", "chunks", "=", "[", "]", "while", "self", ".", "_buffer", ":", "chunk", "=", "self", ".", "_read_nowait_chunk", "(", "n", ")", "chunks", ".", "append", "(", "chunk", ")", "if", "n", "!=", "-", "1", ":", "n", "-=", "len", "(", "chunk", ")", "if", "n", "==", "0", ":", "break", "return", "b''", ".", "join", "(", "chunks", ")", "if", "chunks", "else", "b''" ]
Read not more than n bytes, or whole buffer is n == -1
[ "Read", "not", "more", "than", "n", "bytes", "or", "whole", "buffer", "is", "n", "==", "-", "1" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/streams.py#L472-L484
26,926
aio-libs/aiohttp
aiohttp/signals.py
Signal.send
async def send(self, *args, **kwargs): """ Sends data to all registered receivers. """ if not self.frozen: raise RuntimeError("Cannot send non-frozen signal.") for receiver in self: await receiver(*args, **kwargs)
python
async def send(self, *args, **kwargs): """ Sends data to all registered receivers. """ if not self.frozen: raise RuntimeError("Cannot send non-frozen signal.") for receiver in self: await receiver(*args, **kwargs)
[ "async", "def", "send", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "frozen", ":", "raise", "RuntimeError", "(", "\"Cannot send non-frozen signal.\"", ")", "for", "receiver", "in", "self", ":", "await", "receiver", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Sends data to all registered receivers.
[ "Sends", "data", "to", "all", "registered", "receivers", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/signals.py#L26-L34
26,927
aio-libs/aiohttp
aiohttp/web_log.py
AccessLogger.compile_format
def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: """Translate log_format into form usable by modulo formatting All known atoms will be replaced with %s Also methods for formatting of those atoms will be added to _methods in appropriate order For example we have log_format = "%a %t" This format will be translated to "%s %s" Also contents of _methods will be [self._format_a, self._format_t] These method will be called and results will be passed to translated string format. Each _format_* method receive 'args' which is list of arguments given to self.log Exceptions are _format_e, _format_i and _format_o methods which also receive key name (by functools.partial) """ # list of (key, method) tuples, we don't use an OrderedDict as users # can repeat the same key more than once methods = list() for atom in self.FORMAT_RE.findall(log_format): if atom[1] == '': format_key1 = self.LOG_FORMAT_MAP[atom[0]] m = getattr(AccessLogger, '_format_%s' % atom[0]) key_method = KeyMethod(format_key1, m) else: format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) m = getattr(AccessLogger, '_format_%s' % atom[2]) key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) methods.append(key_method) log_format = self.FORMAT_RE.sub(r'%s', log_format) log_format = self.CLEANUP_RE.sub(r'%\1', log_format) return log_format, methods
python
def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: """Translate log_format into form usable by modulo formatting All known atoms will be replaced with %s Also methods for formatting of those atoms will be added to _methods in appropriate order For example we have log_format = "%a %t" This format will be translated to "%s %s" Also contents of _methods will be [self._format_a, self._format_t] These method will be called and results will be passed to translated string format. Each _format_* method receive 'args' which is list of arguments given to self.log Exceptions are _format_e, _format_i and _format_o methods which also receive key name (by functools.partial) """ # list of (key, method) tuples, we don't use an OrderedDict as users # can repeat the same key more than once methods = list() for atom in self.FORMAT_RE.findall(log_format): if atom[1] == '': format_key1 = self.LOG_FORMAT_MAP[atom[0]] m = getattr(AccessLogger, '_format_%s' % atom[0]) key_method = KeyMethod(format_key1, m) else: format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) m = getattr(AccessLogger, '_format_%s' % atom[2]) key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) methods.append(key_method) log_format = self.FORMAT_RE.sub(r'%s', log_format) log_format = self.CLEANUP_RE.sub(r'%\1', log_format) return log_format, methods
[ "def", "compile_format", "(", "self", ",", "log_format", ":", "str", ")", "->", "Tuple", "[", "str", ",", "List", "[", "KeyMethod", "]", "]", ":", "# list of (key, method) tuples, we don't use an OrderedDict as users", "# can repeat the same key more than once", "methods", "=", "list", "(", ")", "for", "atom", "in", "self", ".", "FORMAT_RE", ".", "findall", "(", "log_format", ")", ":", "if", "atom", "[", "1", "]", "==", "''", ":", "format_key1", "=", "self", ".", "LOG_FORMAT_MAP", "[", "atom", "[", "0", "]", "]", "m", "=", "getattr", "(", "AccessLogger", ",", "'_format_%s'", "%", "atom", "[", "0", "]", ")", "key_method", "=", "KeyMethod", "(", "format_key1", ",", "m", ")", "else", ":", "format_key2", "=", "(", "self", ".", "LOG_FORMAT_MAP", "[", "atom", "[", "2", "]", "]", ",", "atom", "[", "1", "]", ")", "m", "=", "getattr", "(", "AccessLogger", ",", "'_format_%s'", "%", "atom", "[", "2", "]", ")", "key_method", "=", "KeyMethod", "(", "format_key2", ",", "functools", ".", "partial", "(", "m", ",", "atom", "[", "1", "]", ")", ")", "methods", ".", "append", "(", "key_method", ")", "log_format", "=", "self", ".", "FORMAT_RE", ".", "sub", "(", "r'%s'", ",", "log_format", ")", "log_format", "=", "self", ".", "CLEANUP_RE", ".", "sub", "(", "r'%\\1'", ",", "log_format", ")", "return", "log_format", ",", "methods" ]
Translate log_format into form usable by modulo formatting All known atoms will be replaced with %s Also methods for formatting of those atoms will be added to _methods in appropriate order For example we have log_format = "%a %t" This format will be translated to "%s %s" Also contents of _methods will be [self._format_a, self._format_t] These method will be called and results will be passed to translated string format. Each _format_* method receive 'args' which is list of arguments given to self.log Exceptions are _format_e, _format_i and _format_o methods which also receive key name (by functools.partial)
[ "Translate", "log_format", "into", "form", "usable", "by", "modulo", "formatting" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_log.py#L78-L118
26,928
aio-libs/aiohttp
aiohttp/http_writer.py
StreamWriter.write
async def write(self, chunk: bytes, *, drain: bool=True, LIMIT: int=0x10000) -> None: """Writes chunk of data to a stream. write_eof() indicates end of stream. writer can't be used after write_eof() method being called. write() return drain future. """ if self._on_chunk_sent is not None: await self._on_chunk_sent(chunk) if self._compress is not None: chunk = self._compress.compress(chunk) if not chunk: return if self.length is not None: chunk_len = len(chunk) if self.length >= chunk_len: self.length = self.length - chunk_len else: chunk = chunk[:self.length] self.length = 0 if not chunk: return if chunk: if self.chunked: chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii') chunk = chunk_len_pre + chunk + b'\r\n' self._write(chunk) if self.buffer_size > LIMIT and drain: self.buffer_size = 0 await self.drain()
python
async def write(self, chunk: bytes, *, drain: bool=True, LIMIT: int=0x10000) -> None: """Writes chunk of data to a stream. write_eof() indicates end of stream. writer can't be used after write_eof() method being called. write() return drain future. """ if self._on_chunk_sent is not None: await self._on_chunk_sent(chunk) if self._compress is not None: chunk = self._compress.compress(chunk) if not chunk: return if self.length is not None: chunk_len = len(chunk) if self.length >= chunk_len: self.length = self.length - chunk_len else: chunk = chunk[:self.length] self.length = 0 if not chunk: return if chunk: if self.chunked: chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii') chunk = chunk_len_pre + chunk + b'\r\n' self._write(chunk) if self.buffer_size > LIMIT and drain: self.buffer_size = 0 await self.drain()
[ "async", "def", "write", "(", "self", ",", "chunk", ":", "bytes", ",", "*", ",", "drain", ":", "bool", "=", "True", ",", "LIMIT", ":", "int", "=", "0x10000", ")", "->", "None", ":", "if", "self", ".", "_on_chunk_sent", "is", "not", "None", ":", "await", "self", ".", "_on_chunk_sent", "(", "chunk", ")", "if", "self", ".", "_compress", "is", "not", "None", ":", "chunk", "=", "self", ".", "_compress", ".", "compress", "(", "chunk", ")", "if", "not", "chunk", ":", "return", "if", "self", ".", "length", "is", "not", "None", ":", "chunk_len", "=", "len", "(", "chunk", ")", "if", "self", ".", "length", ">=", "chunk_len", ":", "self", ".", "length", "=", "self", ".", "length", "-", "chunk_len", "else", ":", "chunk", "=", "chunk", "[", ":", "self", ".", "length", "]", "self", ".", "length", "=", "0", "if", "not", "chunk", ":", "return", "if", "chunk", ":", "if", "self", ".", "chunked", ":", "chunk_len_pre", "=", "(", "'%x\\r\\n'", "%", "len", "(", "chunk", ")", ")", ".", "encode", "(", "'ascii'", ")", "chunk", "=", "chunk_len_pre", "+", "chunk", "+", "b'\\r\\n'", "self", ".", "_write", "(", "chunk", ")", "if", "self", ".", "buffer_size", ">", "LIMIT", "and", "drain", ":", "self", ".", "buffer_size", "=", "0", "await", "self", ".", "drain", "(", ")" ]
Writes chunk of data to a stream. write_eof() indicates end of stream. writer can't be used after write_eof() method being called. write() return drain future.
[ "Writes", "chunk", "of", "data", "to", "a", "stream", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_writer.py#L70-L105
26,929
aio-libs/aiohttp
aiohttp/helpers.py
netrc_from_env
def netrc_from_env() -> Optional[netrc.netrc]: """Attempt to load the netrc file from the path specified by the env-var NETRC or in the default location in the user's home directory. Returns None if it couldn't be found or fails to parse. """ netrc_env = os.environ.get('NETRC') if netrc_env is not None: netrc_path = Path(netrc_env) else: try: home_dir = Path.home() except RuntimeError as e: # pragma: no cover # if pathlib can't resolve home, it may raise a RuntimeError client_logger.debug('Could not resolve home directory when ' 'trying to look for .netrc file: %s', e) return None netrc_path = home_dir / ( '_netrc' if platform.system() == 'Windows' else '.netrc') try: return netrc.netrc(str(netrc_path)) except netrc.NetrcParseError as e: client_logger.warning('Could not parse .netrc file: %s', e) except OSError as e: # we couldn't read the file (doesn't exist, permissions, etc.) if netrc_env or netrc_path.is_file(): # only warn if the environment wanted us to load it, # or it appears like the default file does actually exist client_logger.warning('Could not read .netrc file: %s', e) return None
python
def netrc_from_env() -> Optional[netrc.netrc]: """Attempt to load the netrc file from the path specified by the env-var NETRC or in the default location in the user's home directory. Returns None if it couldn't be found or fails to parse. """ netrc_env = os.environ.get('NETRC') if netrc_env is not None: netrc_path = Path(netrc_env) else: try: home_dir = Path.home() except RuntimeError as e: # pragma: no cover # if pathlib can't resolve home, it may raise a RuntimeError client_logger.debug('Could not resolve home directory when ' 'trying to look for .netrc file: %s', e) return None netrc_path = home_dir / ( '_netrc' if platform.system() == 'Windows' else '.netrc') try: return netrc.netrc(str(netrc_path)) except netrc.NetrcParseError as e: client_logger.warning('Could not parse .netrc file: %s', e) except OSError as e: # we couldn't read the file (doesn't exist, permissions, etc.) if netrc_env or netrc_path.is_file(): # only warn if the environment wanted us to load it, # or it appears like the default file does actually exist client_logger.warning('Could not read .netrc file: %s', e) return None
[ "def", "netrc_from_env", "(", ")", "->", "Optional", "[", "netrc", ".", "netrc", "]", ":", "netrc_env", "=", "os", ".", "environ", ".", "get", "(", "'NETRC'", ")", "if", "netrc_env", "is", "not", "None", ":", "netrc_path", "=", "Path", "(", "netrc_env", ")", "else", ":", "try", ":", "home_dir", "=", "Path", ".", "home", "(", ")", "except", "RuntimeError", "as", "e", ":", "# pragma: no cover", "# if pathlib can't resolve home, it may raise a RuntimeError", "client_logger", ".", "debug", "(", "'Could not resolve home directory when '", "'trying to look for .netrc file: %s'", ",", "e", ")", "return", "None", "netrc_path", "=", "home_dir", "/", "(", "'_netrc'", "if", "platform", ".", "system", "(", ")", "==", "'Windows'", "else", "'.netrc'", ")", "try", ":", "return", "netrc", ".", "netrc", "(", "str", "(", "netrc_path", ")", ")", "except", "netrc", ".", "NetrcParseError", "as", "e", ":", "client_logger", ".", "warning", "(", "'Could not parse .netrc file: %s'", ",", "e", ")", "except", "OSError", "as", "e", ":", "# we couldn't read the file (doesn't exist, permissions, etc.)", "if", "netrc_env", "or", "netrc_path", ".", "is_file", "(", ")", ":", "# only warn if the environment wanted us to load it,", "# or it appears like the default file does actually exist", "client_logger", ".", "warning", "(", "'Could not read .netrc file: %s'", ",", "e", ")", "return", "None" ]
Attempt to load the netrc file from the path specified by the env-var NETRC or in the default location in the user's home directory. Returns None if it couldn't be found or fails to parse.
[ "Attempt", "to", "load", "the", "netrc", "file", "from", "the", "path", "specified", "by", "the", "env", "-", "var", "NETRC", "or", "in", "the", "default", "location", "in", "the", "user", "s", "home", "directory", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L186-L219
26,930
aio-libs/aiohttp
aiohttp/helpers.py
parse_mimetype
def parse_mimetype(mimetype: str) -> MimeType: """Parses a MIME type into its components. mimetype is a MIME type string. Returns a MimeType object. Example: >>> parse_mimetype('text/html; charset=utf-8') MimeType(type='text', subtype='html', suffix='', parameters={'charset': 'utf-8'}) """ if not mimetype: return MimeType(type='', subtype='', suffix='', parameters=MultiDictProxy(MultiDict())) parts = mimetype.split(';') params = MultiDict() # type: MultiDict[str] for item in parts[1:]: if not item: continue key, value = cast(Tuple[str, str], item.split('=', 1) if '=' in item else (item, '')) params.add(key.lower().strip(), value.strip(' "')) fulltype = parts[0].strip().lower() if fulltype == '*': fulltype = '*/*' mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1)) if '/' in fulltype else (fulltype, '')) stype, suffix = (cast(Tuple[str, str], stype.split('+', 1)) if '+' in stype else (stype, '')) return MimeType(type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params))
python
def parse_mimetype(mimetype: str) -> MimeType: """Parses a MIME type into its components. mimetype is a MIME type string. Returns a MimeType object. Example: >>> parse_mimetype('text/html; charset=utf-8') MimeType(type='text', subtype='html', suffix='', parameters={'charset': 'utf-8'}) """ if not mimetype: return MimeType(type='', subtype='', suffix='', parameters=MultiDictProxy(MultiDict())) parts = mimetype.split(';') params = MultiDict() # type: MultiDict[str] for item in parts[1:]: if not item: continue key, value = cast(Tuple[str, str], item.split('=', 1) if '=' in item else (item, '')) params.add(key.lower().strip(), value.strip(' "')) fulltype = parts[0].strip().lower() if fulltype == '*': fulltype = '*/*' mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1)) if '/' in fulltype else (fulltype, '')) stype, suffix = (cast(Tuple[str, str], stype.split('+', 1)) if '+' in stype else (stype, '')) return MimeType(type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params))
[ "def", "parse_mimetype", "(", "mimetype", ":", "str", ")", "->", "MimeType", ":", "if", "not", "mimetype", ":", "return", "MimeType", "(", "type", "=", "''", ",", "subtype", "=", "''", ",", "suffix", "=", "''", ",", "parameters", "=", "MultiDictProxy", "(", "MultiDict", "(", ")", ")", ")", "parts", "=", "mimetype", ".", "split", "(", "';'", ")", "params", "=", "MultiDict", "(", ")", "# type: MultiDict[str]", "for", "item", "in", "parts", "[", "1", ":", "]", ":", "if", "not", "item", ":", "continue", "key", ",", "value", "=", "cast", "(", "Tuple", "[", "str", ",", "str", "]", ",", "item", ".", "split", "(", "'='", ",", "1", ")", "if", "'='", "in", "item", "else", "(", "item", ",", "''", ")", ")", "params", ".", "add", "(", "key", ".", "lower", "(", ")", ".", "strip", "(", ")", ",", "value", ".", "strip", "(", "' \"'", ")", ")", "fulltype", "=", "parts", "[", "0", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "fulltype", "==", "'*'", ":", "fulltype", "=", "'*/*'", "mtype", ",", "stype", "=", "(", "cast", "(", "Tuple", "[", "str", ",", "str", "]", ",", "fulltype", ".", "split", "(", "'/'", ",", "1", ")", ")", "if", "'/'", "in", "fulltype", "else", "(", "fulltype", ",", "''", ")", ")", "stype", ",", "suffix", "=", "(", "cast", "(", "Tuple", "[", "str", ",", "str", "]", ",", "stype", ".", "split", "(", "'+'", ",", "1", ")", ")", "if", "'+'", "in", "stype", "else", "(", "stype", ",", "''", ")", ")", "return", "MimeType", "(", "type", "=", "mtype", ",", "subtype", "=", "stype", ",", "suffix", "=", "suffix", ",", "parameters", "=", "MultiDictProxy", "(", "params", ")", ")" ]
Parses a MIME type into its components. mimetype is a MIME type string. Returns a MimeType object. Example: >>> parse_mimetype('text/html; charset=utf-8') MimeType(type='text', subtype='html', suffix='', parameters={'charset': 'utf-8'})
[ "Parses", "a", "MIME", "type", "into", "its", "components", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L291-L328
26,931
aio-libs/aiohttp
aiohttp/helpers.py
BasicAuth.decode
def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth': """Create a BasicAuth object from an Authorization HTTP header.""" try: auth_type, encoded_credentials = auth_header.split(' ', 1) except ValueError: raise ValueError('Could not parse authorization header.') if auth_type.lower() != 'basic': raise ValueError('Unknown authorization method %s' % auth_type) try: decoded = base64.b64decode( encoded_credentials.encode('ascii'), validate=True ).decode(encoding) except binascii.Error: raise ValueError('Invalid base64 encoding.') try: # RFC 2617 HTTP Authentication # https://www.ietf.org/rfc/rfc2617.txt # the colon must be present, but the username and password may be # otherwise blank. username, password = decoded.split(':', 1) except ValueError: raise ValueError('Invalid credentials.') return cls(username, password, encoding=encoding)
python
def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth': """Create a BasicAuth object from an Authorization HTTP header.""" try: auth_type, encoded_credentials = auth_header.split(' ', 1) except ValueError: raise ValueError('Could not parse authorization header.') if auth_type.lower() != 'basic': raise ValueError('Unknown authorization method %s' % auth_type) try: decoded = base64.b64decode( encoded_credentials.encode('ascii'), validate=True ).decode(encoding) except binascii.Error: raise ValueError('Invalid base64 encoding.') try: # RFC 2617 HTTP Authentication # https://www.ietf.org/rfc/rfc2617.txt # the colon must be present, but the username and password may be # otherwise blank. username, password = decoded.split(':', 1) except ValueError: raise ValueError('Invalid credentials.') return cls(username, password, encoding=encoding)
[ "def", "decode", "(", "cls", ",", "auth_header", ":", "str", ",", "encoding", ":", "str", "=", "'latin1'", ")", "->", "'BasicAuth'", ":", "try", ":", "auth_type", ",", "encoded_credentials", "=", "auth_header", ".", "split", "(", "' '", ",", "1", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Could not parse authorization header.'", ")", "if", "auth_type", ".", "lower", "(", ")", "!=", "'basic'", ":", "raise", "ValueError", "(", "'Unknown authorization method %s'", "%", "auth_type", ")", "try", ":", "decoded", "=", "base64", ".", "b64decode", "(", "encoded_credentials", ".", "encode", "(", "'ascii'", ")", ",", "validate", "=", "True", ")", ".", "decode", "(", "encoding", ")", "except", "binascii", ".", "Error", ":", "raise", "ValueError", "(", "'Invalid base64 encoding.'", ")", "try", ":", "# RFC 2617 HTTP Authentication", "# https://www.ietf.org/rfc/rfc2617.txt", "# the colon must be present, but the username and password may be", "# otherwise blank.", "username", ",", "password", "=", "decoded", ".", "split", "(", "':'", ",", "1", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Invalid credentials.'", ")", "return", "cls", "(", "username", ",", "password", ",", "encoding", "=", "encoding", ")" ]
Create a BasicAuth object from an Authorization HTTP header.
[ "Create", "a", "BasicAuth", "object", "from", "an", "Authorization", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L134-L160
26,932
aio-libs/aiohttp
aiohttp/helpers.py
BasicAuth.from_url
def from_url(cls, url: URL, *, encoding: str='latin1') -> Optional['BasicAuth']: """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") if url.user is None: return None return cls(url.user, url.password or '', encoding=encoding)
python
def from_url(cls, url: URL, *, encoding: str='latin1') -> Optional['BasicAuth']: """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") if url.user is None: return None return cls(url.user, url.password or '', encoding=encoding)
[ "def", "from_url", "(", "cls", ",", "url", ":", "URL", ",", "*", ",", "encoding", ":", "str", "=", "'latin1'", ")", "->", "Optional", "[", "'BasicAuth'", "]", ":", "if", "not", "isinstance", "(", "url", ",", "URL", ")", ":", "raise", "TypeError", "(", "\"url should be yarl.URL instance\"", ")", "if", "url", ".", "user", "is", "None", ":", "return", "None", "return", "cls", "(", "url", ".", "user", ",", "url", ".", "password", "or", "''", ",", "encoding", "=", "encoding", ")" ]
Create BasicAuth from url.
[ "Create", "BasicAuth", "from", "url", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L163-L170
26,933
aio-libs/aiohttp
aiohttp/helpers.py
BasicAuth.encode
def encode(self) -> str: """Encode credentials.""" creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding) return 'Basic %s' % base64.b64encode(creds).decode(self.encoding)
python
def encode(self) -> str: """Encode credentials.""" creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding) return 'Basic %s' % base64.b64encode(creds).decode(self.encoding)
[ "def", "encode", "(", "self", ")", "->", "str", ":", "creds", "=", "(", "'%s:%s'", "%", "(", "self", ".", "login", ",", "self", ".", "password", ")", ")", ".", "encode", "(", "self", ".", "encoding", ")", "return", "'Basic %s'", "%", "base64", ".", "b64encode", "(", "creds", ")", ".", "decode", "(", "self", ".", "encoding", ")" ]
Encode credentials.
[ "Encode", "credentials", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L172-L175
26,934
aio-libs/aiohttp
aiohttp/helpers.py
HeadersMixin.content_type
def content_type(self) -> str: """The value of content part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_type
python
def content_type(self) -> str: """The value of content part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_type
[ "def", "content_type", "(", "self", ")", "->", "str", ":", "raw", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "CONTENT_TYPE", ")", "# type: ignore", "if", "self", ".", "_stored_content_type", "!=", "raw", ":", "self", ".", "_parse_content_type", "(", "raw", ")", "return", "self", ".", "_content_type" ]
The value of content part for Content-Type HTTP header.
[ "The", "value", "of", "content", "part", "for", "Content", "-", "Type", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L624-L629
26,935
aio-libs/aiohttp
aiohttp/helpers.py
HeadersMixin.charset
def charset(self) -> Optional[str]: """The value of charset part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_dict.get('charset')
python
def charset(self) -> Optional[str]: """The value of charset part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_dict.get('charset')
[ "def", "charset", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "raw", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "CONTENT_TYPE", ")", "# type: ignore", "if", "self", ".", "_stored_content_type", "!=", "raw", ":", "self", ".", "_parse_content_type", "(", "raw", ")", "return", "self", ".", "_content_dict", ".", "get", "(", "'charset'", ")" ]
The value of charset part for Content-Type HTTP header.
[ "The", "value", "of", "charset", "part", "for", "Content", "-", "Type", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L632-L637
26,936
aio-libs/aiohttp
aiohttp/helpers.py
HeadersMixin.content_length
def content_length(self) -> Optional[int]: """The value of Content-Length HTTP header.""" content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore if content_length is not None: return int(content_length) else: return None
python
def content_length(self) -> Optional[int]: """The value of Content-Length HTTP header.""" content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore if content_length is not None: return int(content_length) else: return None
[ "def", "content_length", "(", "self", ")", "->", "Optional", "[", "int", "]", ":", "content_length", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "CONTENT_LENGTH", ")", "# type: ignore", "if", "content_length", "is", "not", "None", ":", "return", "int", "(", "content_length", ")", "else", ":", "return", "None" ]
The value of Content-Length HTTP header.
[ "The", "value", "of", "Content", "-", "Length", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L640-L647
26,937
aio-libs/aiohttp
aiohttp/client.py
ClientSession.request
def request(self, method: str, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP request.""" return _RequestContextManager(self._request(method, url, **kwargs))
python
def request(self, method: str, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP request.""" return _RequestContextManager(self._request(method, url, **kwargs))
[ "def", "request", "(", "self", ",", "method", ":", "str", ",", "url", ":", "StrOrURL", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "method", ",", "url", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP request.
[ "Perform", "HTTP", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L297-L302
26,938
aio-libs/aiohttp
aiohttp/client.py
ClientSession.ws_connect
def ws_connect( self, url: StrOrURL, *, method: str=hdrs.METH_GET, protocols: Iterable[str]=(), timeout: float=10.0, receive_timeout: Optional[float]=None, autoclose: bool=True, autoping: bool=True, heartbeat: Optional[float]=None, auth: Optional[BasicAuth]=None, origin: Optional[str]=None, headers: Optional[LooseHeaders]=None, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, ssl: Union[SSLContext, bool, None, Fingerprint]=None, proxy_headers: Optional[LooseHeaders]=None, compress: int=0, max_msg_size: int=4*1024*1024) -> '_WSRequestContextManager': """Initiate websocket connection.""" return _WSRequestContextManager( self._ws_connect(url, method=method, protocols=protocols, timeout=timeout, receive_timeout=receive_timeout, autoclose=autoclose, autoping=autoping, heartbeat=heartbeat, auth=auth, origin=origin, headers=headers, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers, compress=compress, max_msg_size=max_msg_size))
python
def ws_connect( self, url: StrOrURL, *, method: str=hdrs.METH_GET, protocols: Iterable[str]=(), timeout: float=10.0, receive_timeout: Optional[float]=None, autoclose: bool=True, autoping: bool=True, heartbeat: Optional[float]=None, auth: Optional[BasicAuth]=None, origin: Optional[str]=None, headers: Optional[LooseHeaders]=None, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, ssl: Union[SSLContext, bool, None, Fingerprint]=None, proxy_headers: Optional[LooseHeaders]=None, compress: int=0, max_msg_size: int=4*1024*1024) -> '_WSRequestContextManager': """Initiate websocket connection.""" return _WSRequestContextManager( self._ws_connect(url, method=method, protocols=protocols, timeout=timeout, receive_timeout=receive_timeout, autoclose=autoclose, autoping=autoping, heartbeat=heartbeat, auth=auth, origin=origin, headers=headers, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers, compress=compress, max_msg_size=max_msg_size))
[ "def", "ws_connect", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "method", ":", "str", "=", "hdrs", ".", "METH_GET", ",", "protocols", ":", "Iterable", "[", "str", "]", "=", "(", ")", ",", "timeout", ":", "float", "=", "10.0", ",", "receive_timeout", ":", "Optional", "[", "float", "]", "=", "None", ",", "autoclose", ":", "bool", "=", "True", ",", "autoping", ":", "bool", "=", "True", ",", "heartbeat", ":", "Optional", "[", "float", "]", "=", "None", ",", "auth", ":", "Optional", "[", "BasicAuth", "]", "=", "None", ",", "origin", ":", "Optional", "[", "str", "]", "=", "None", ",", "headers", ":", "Optional", "[", "LooseHeaders", "]", "=", "None", ",", "proxy", ":", "Optional", "[", "StrOrURL", "]", "=", "None", ",", "proxy_auth", ":", "Optional", "[", "BasicAuth", "]", "=", "None", ",", "ssl", ":", "Union", "[", "SSLContext", ",", "bool", ",", "None", ",", "Fingerprint", "]", "=", "None", ",", "proxy_headers", ":", "Optional", "[", "LooseHeaders", "]", "=", "None", ",", "compress", ":", "int", "=", "0", ",", "max_msg_size", ":", "int", "=", "4", "*", "1024", "*", "1024", ")", "->", "'_WSRequestContextManager'", ":", "return", "_WSRequestContextManager", "(", "self", ".", "_ws_connect", "(", "url", ",", "method", "=", "method", ",", "protocols", "=", "protocols", ",", "timeout", "=", "timeout", ",", "receive_timeout", "=", "receive_timeout", ",", "autoclose", "=", "autoclose", ",", "autoping", "=", "autoping", ",", "heartbeat", "=", "heartbeat", ",", "auth", "=", "auth", ",", "origin", "=", "origin", ",", "headers", "=", "headers", ",", "proxy", "=", "proxy", ",", "proxy_auth", "=", "proxy_auth", ",", "ssl", "=", "ssl", ",", "proxy_headers", "=", "proxy_headers", ",", "compress", "=", "compress", ",", "max_msg_size", "=", "max_msg_size", ")", ")" ]
Initiate websocket connection.
[ "Initiate", "websocket", "connection", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L604-L641
26,939
aio-libs/aiohttp
aiohttp/client.py
ClientSession._prepare_headers
def _prepare_headers( self, headers: Optional[LooseHeaders]) -> 'CIMultiDict[str]': """ Add default headers and transform it to CIMultiDict """ # Convert headers to MultiDict result = CIMultiDict(self._default_headers) if headers: if not isinstance(headers, (MultiDictProxy, MultiDict)): headers = CIMultiDict(headers) added_names = set() # type: Set[str] for key, value in headers.items(): if key in added_names: result.add(key, value) else: result[key] = value added_names.add(key) return result
python
def _prepare_headers( self, headers: Optional[LooseHeaders]) -> 'CIMultiDict[str]': """ Add default headers and transform it to CIMultiDict """ # Convert headers to MultiDict result = CIMultiDict(self._default_headers) if headers: if not isinstance(headers, (MultiDictProxy, MultiDict)): headers = CIMultiDict(headers) added_names = set() # type: Set[str] for key, value in headers.items(): if key in added_names: result.add(key, value) else: result[key] = value added_names.add(key) return result
[ "def", "_prepare_headers", "(", "self", ",", "headers", ":", "Optional", "[", "LooseHeaders", "]", ")", "->", "'CIMultiDict[str]'", ":", "# Convert headers to MultiDict", "result", "=", "CIMultiDict", "(", "self", ".", "_default_headers", ")", "if", "headers", ":", "if", "not", "isinstance", "(", "headers", ",", "(", "MultiDictProxy", ",", "MultiDict", ")", ")", ":", "headers", "=", "CIMultiDict", "(", "headers", ")", "added_names", "=", "set", "(", ")", "# type: Set[str]", "for", "key", ",", "value", "in", "headers", ".", "items", "(", ")", ":", "if", "key", "in", "added_names", ":", "result", ".", "add", "(", "key", ",", "value", ")", "else", ":", "result", "[", "key", "]", "=", "value", "added_names", ".", "add", "(", "key", ")", "return", "result" ]
Add default headers and transform it to CIMultiDict
[ "Add", "default", "headers", "and", "transform", "it", "to", "CIMultiDict" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L800-L817
26,940
aio-libs/aiohttp
aiohttp/client.py
ClientSession.get
def get(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP GET request.""" return _RequestContextManager( self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs))
python
def get(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP GET request.""" return _RequestContextManager( self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs))
[ "def", "get", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "allow_redirects", ":", "bool", "=", "True", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_GET", ",", "url", ",", "allow_redirects", "=", "allow_redirects", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP GET request.
[ "Perform", "HTTP", "GET", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L819-L825
26,941
aio-libs/aiohttp
aiohttp/client.py
ClientSession.options
def options(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP OPTIONS request.""" return _RequestContextManager( self._request(hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs))
python
def options(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP OPTIONS request.""" return _RequestContextManager( self._request(hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs))
[ "def", "options", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "allow_redirects", ":", "bool", "=", "True", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_OPTIONS", ",", "url", ",", "allow_redirects", "=", "allow_redirects", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP OPTIONS request.
[ "Perform", "HTTP", "OPTIONS", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L827-L833
26,942
aio-libs/aiohttp
aiohttp/client.py
ClientSession.head
def head(self, url: StrOrURL, *, allow_redirects: bool=False, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP HEAD request.""" return _RequestContextManager( self._request(hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs))
python
def head(self, url: StrOrURL, *, allow_redirects: bool=False, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP HEAD request.""" return _RequestContextManager( self._request(hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs))
[ "def", "head", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "allow_redirects", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_HEAD", ",", "url", ",", "allow_redirects", "=", "allow_redirects", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP HEAD request.
[ "Perform", "HTTP", "HEAD", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L835-L841
26,943
aio-libs/aiohttp
aiohttp/client.py
ClientSession.post
def post(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP POST request.""" return _RequestContextManager( self._request(hdrs.METH_POST, url, data=data, **kwargs))
python
def post(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP POST request.""" return _RequestContextManager( self._request(hdrs.METH_POST, url, data=data, **kwargs))
[ "def", "post", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "data", ":", "Any", "=", "None", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_POST", ",", "url", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP POST request.
[ "Perform", "HTTP", "POST", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L843-L849
26,944
aio-libs/aiohttp
aiohttp/client.py
ClientSession.put
def put(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP PUT request.""" return _RequestContextManager( self._request(hdrs.METH_PUT, url, data=data, **kwargs))
python
def put(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP PUT request.""" return _RequestContextManager( self._request(hdrs.METH_PUT, url, data=data, **kwargs))
[ "def", "put", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "data", ":", "Any", "=", "None", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_PUT", ",", "url", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP PUT request.
[ "Perform", "HTTP", "PUT", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L851-L857
26,945
aio-libs/aiohttp
aiohttp/client.py
ClientSession.patch
def patch(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP PATCH request.""" return _RequestContextManager( self._request(hdrs.METH_PATCH, url, data=data, **kwargs))
python
def patch(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP PATCH request.""" return _RequestContextManager( self._request(hdrs.METH_PATCH, url, data=data, **kwargs))
[ "def", "patch", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "data", ":", "Any", "=", "None", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_PATCH", ",", "url", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP PATCH request.
[ "Perform", "HTTP", "PATCH", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L859-L865
26,946
aio-libs/aiohttp
aiohttp/client.py
ClientSession.delete
def delete(self, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP DELETE request.""" return _RequestContextManager( self._request(hdrs.METH_DELETE, url, **kwargs))
python
def delete(self, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP DELETE request.""" return _RequestContextManager( self._request(hdrs.METH_DELETE, url, **kwargs))
[ "def", "delete", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_DELETE", ",", "url", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP DELETE request.
[ "Perform", "HTTP", "DELETE", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L867-L871
26,947
aio-libs/aiohttp
aiohttp/client.py
ClientSession.close
async def close(self) -> None: """Close underlying connector. Release all acquired resources. """ if not self.closed: if self._connector is not None and self._connector_owner: await self._connector.close() self._connector = None
python
async def close(self) -> None: """Close underlying connector. Release all acquired resources. """ if not self.closed: if self._connector is not None and self._connector_owner: await self._connector.close() self._connector = None
[ "async", "def", "close", "(", "self", ")", "->", "None", ":", "if", "not", "self", ".", "closed", ":", "if", "self", ".", "_connector", "is", "not", "None", "and", "self", ".", "_connector_owner", ":", "await", "self", ".", "_connector", ".", "close", "(", ")", "self", ".", "_connector", "=", "None" ]
Close underlying connector. Release all acquired resources.
[ "Close", "underlying", "connector", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L873-L881
26,948
aio-libs/aiohttp
aiohttp/client.py
ClientSession.requote_redirect_url
def requote_redirect_url(self, val: bool) -> None: """Do URL requoting on redirection handling.""" warnings.warn("session.requote_redirect_url modification " "is deprecated #2778", DeprecationWarning, stacklevel=2) self._requote_redirect_url = val
python
def requote_redirect_url(self, val: bool) -> None: """Do URL requoting on redirection handling.""" warnings.warn("session.requote_redirect_url modification " "is deprecated #2778", DeprecationWarning, stacklevel=2) self._requote_redirect_url = val
[ "def", "requote_redirect_url", "(", "self", ",", "val", ":", "bool", ")", "->", "None", ":", "warnings", ".", "warn", "(", "\"session.requote_redirect_url modification \"", "\"is deprecated #2778\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "self", ".", "_requote_redirect_url", "=", "val" ]
Do URL requoting on redirection handling.
[ "Do", "URL", "requoting", "on", "redirection", "handling", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L912-L918
26,949
aio-libs/aiohttp
aiohttp/multipart.py
MultipartResponseWrapper.next
async def next(self) -> Any: """Emits next multipart reader object.""" item = await self.stream.next() if self.stream.at_eof(): await self.release() return item
python
async def next(self) -> Any: """Emits next multipart reader object.""" item = await self.stream.next() if self.stream.at_eof(): await self.release() return item
[ "async", "def", "next", "(", "self", ")", "->", "Any", ":", "item", "=", "await", "self", ".", "stream", ".", "next", "(", ")", "if", "self", ".", "stream", ".", "at_eof", "(", ")", ":", "await", "self", ".", "release", "(", ")", "return", "item" ]
Emits next multipart reader object.
[ "Emits", "next", "multipart", "reader", "object", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L222-L227
26,950
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.read
async def read(self, *, decode: bool=False) -> Any: """Reads body part data. decode: Decodes data following by encoding method from Content-Encoding header. If it missed data remains untouched """ if self._at_eof: return b'' data = bytearray() while not self._at_eof: data.extend((await self.read_chunk(self.chunk_size))) if decode: return self.decode(data) return data
python
async def read(self, *, decode: bool=False) -> Any: """Reads body part data. decode: Decodes data following by encoding method from Content-Encoding header. If it missed data remains untouched """ if self._at_eof: return b'' data = bytearray() while not self._at_eof: data.extend((await self.read_chunk(self.chunk_size))) if decode: return self.decode(data) return data
[ "async", "def", "read", "(", "self", ",", "*", ",", "decode", ":", "bool", "=", "False", ")", "->", "Any", ":", "if", "self", ".", "_at_eof", ":", "return", "b''", "data", "=", "bytearray", "(", ")", "while", "not", "self", ".", "_at_eof", ":", "data", ".", "extend", "(", "(", "await", "self", ".", "read_chunk", "(", "self", ".", "chunk_size", ")", ")", ")", "if", "decode", ":", "return", "self", ".", "decode", "(", "data", ")", "return", "data" ]
Reads body part data. decode: Decodes data following by encoding method from Content-Encoding header. If it missed data remains untouched
[ "Reads", "body", "part", "data", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L277-L291
26,951
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.read_chunk
async def read_chunk(self, size: int=chunk_size) -> bytes: """Reads body part content chunk of the specified size. size: chunk size """ if self._at_eof: return b'' if self._length: chunk = await self._read_chunk_from_length(size) else: chunk = await self._read_chunk_from_stream(size) self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True if self._at_eof: newline = await self._content.readline() assert newline == self._newline, \ 'reader did not read all the data or it is malformed' return chunk
python
async def read_chunk(self, size: int=chunk_size) -> bytes: """Reads body part content chunk of the specified size. size: chunk size """ if self._at_eof: return b'' if self._length: chunk = await self._read_chunk_from_length(size) else: chunk = await self._read_chunk_from_stream(size) self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True if self._at_eof: newline = await self._content.readline() assert newline == self._newline, \ 'reader did not read all the data or it is malformed' return chunk
[ "async", "def", "read_chunk", "(", "self", ",", "size", ":", "int", "=", "chunk_size", ")", "->", "bytes", ":", "if", "self", ".", "_at_eof", ":", "return", "b''", "if", "self", ".", "_length", ":", "chunk", "=", "await", "self", ".", "_read_chunk_from_length", "(", "size", ")", "else", ":", "chunk", "=", "await", "self", ".", "_read_chunk_from_stream", "(", "size", ")", "self", ".", "_read_bytes", "+=", "len", "(", "chunk", ")", "if", "self", ".", "_read_bytes", "==", "self", ".", "_length", ":", "self", ".", "_at_eof", "=", "True", "if", "self", ".", "_at_eof", ":", "newline", "=", "await", "self", ".", "_content", ".", "readline", "(", ")", "assert", "newline", "==", "self", ".", "_newline", ",", "'reader did not read all the data or it is malformed'", "return", "chunk" ]
Reads body part content chunk of the specified size. size: chunk size
[ "Reads", "body", "part", "content", "chunk", "of", "the", "specified", "size", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L293-L312
26,952
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.readline
async def readline(self) -> bytes: """Reads body part by line by line.""" if self._at_eof: return b'' if self._unread: line = self._unread.popleft() else: line = await self._content.readline() if line.startswith(self._boundary): # the very last boundary may not come with \r\n, # so set single rules for everyone sline = line.rstrip(b'\r\n') boundary = self._boundary last_boundary = self._boundary + b'--' # ensure that we read exactly the boundary, not something alike if sline == boundary or sline == last_boundary: self._at_eof = True self._unread.append(line) return b'' else: next_line = await self._content.readline() if next_line.startswith(self._boundary): # strip newline but only once line = line[:-len(self._newline)] self._unread.append(next_line) return line
python
async def readline(self) -> bytes: """Reads body part by line by line.""" if self._at_eof: return b'' if self._unread: line = self._unread.popleft() else: line = await self._content.readline() if line.startswith(self._boundary): # the very last boundary may not come with \r\n, # so set single rules for everyone sline = line.rstrip(b'\r\n') boundary = self._boundary last_boundary = self._boundary + b'--' # ensure that we read exactly the boundary, not something alike if sline == boundary or sline == last_boundary: self._at_eof = True self._unread.append(line) return b'' else: next_line = await self._content.readline() if next_line.startswith(self._boundary): # strip newline but only once line = line[:-len(self._newline)] self._unread.append(next_line) return line
[ "async", "def", "readline", "(", "self", ")", "->", "bytes", ":", "if", "self", ".", "_at_eof", ":", "return", "b''", "if", "self", ".", "_unread", ":", "line", "=", "self", ".", "_unread", ".", "popleft", "(", ")", "else", ":", "line", "=", "await", "self", ".", "_content", ".", "readline", "(", ")", "if", "line", ".", "startswith", "(", "self", ".", "_boundary", ")", ":", "# the very last boundary may not come with \\r\\n,", "# so set single rules for everyone", "sline", "=", "line", ".", "rstrip", "(", "b'\\r\\n'", ")", "boundary", "=", "self", ".", "_boundary", "last_boundary", "=", "self", ".", "_boundary", "+", "b'--'", "# ensure that we read exactly the boundary, not something alike", "if", "sline", "==", "boundary", "or", "sline", "==", "last_boundary", ":", "self", ".", "_at_eof", "=", "True", "self", ".", "_unread", ".", "append", "(", "line", ")", "return", "b''", "else", ":", "next_line", "=", "await", "self", ".", "_content", ".", "readline", "(", ")", "if", "next_line", ".", "startswith", "(", "self", ".", "_boundary", ")", ":", "# strip newline but only once", "line", "=", "line", "[", ":", "-", "len", "(", "self", ".", "_newline", ")", "]", "self", ".", "_unread", ".", "append", "(", "next_line", ")", "return", "line" ]
Reads body part by line by line.
[ "Reads", "body", "part", "by", "line", "by", "line", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L362-L390
26,953
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.decode
def decode(self, data: bytes) -> bytes: """Decodes data according the specified Content-Encoding or Content-Transfer-Encoding headers value. """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) if CONTENT_ENCODING in self.headers: return self._decode_content(data) return data
python
def decode(self, data: bytes) -> bytes: """Decodes data according the specified Content-Encoding or Content-Transfer-Encoding headers value. """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) if CONTENT_ENCODING in self.headers: return self._decode_content(data) return data
[ "def", "decode", "(", "self", ",", "data", ":", "bytes", ")", "->", "bytes", ":", "if", "CONTENT_TRANSFER_ENCODING", "in", "self", ".", "headers", ":", "data", "=", "self", ".", "_decode_content_transfer", "(", "data", ")", "if", "CONTENT_ENCODING", "in", "self", ".", "headers", ":", "return", "self", ".", "_decode_content", "(", "data", ")", "return", "data" ]
Decodes data according the specified Content-Encoding or Content-Transfer-Encoding headers value.
[ "Decodes", "data", "according", "the", "specified", "Content", "-", "Encoding", "or", "Content", "-", "Transfer", "-", "Encoding", "headers", "value", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L435-L443
26,954
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.get_charset
def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) return mimetype.parameters.get('charset', default)
python
def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) return mimetype.parameters.get('charset', default)
[ "def", "get_charset", "(", "self", ",", "default", ":", "str", ")", "->", "str", ":", "ctype", "=", "self", ".", "headers", ".", "get", "(", "CONTENT_TYPE", ",", "''", ")", "mimetype", "=", "parse_mimetype", "(", "ctype", ")", "return", "mimetype", ".", "parameters", ".", "get", "(", "'charset'", ",", "default", ")" ]
Returns charset parameter from Content-Type header or default.
[ "Returns", "charset", "parameter", "from", "Content", "-", "Type", "header", "or", "default", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L470-L474
26,955
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.name
def name(self) -> Optional[str]: """Returns name specified in Content-Disposition header or None if missed or header is malformed. """ _, params = parse_content_disposition( self.headers.get(CONTENT_DISPOSITION)) return content_disposition_filename(params, 'name')
python
def name(self) -> Optional[str]: """Returns name specified in Content-Disposition header or None if missed or header is malformed. """ _, params = parse_content_disposition( self.headers.get(CONTENT_DISPOSITION)) return content_disposition_filename(params, 'name')
[ "def", "name", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "_", ",", "params", "=", "parse_content_disposition", "(", "self", ".", "headers", ".", "get", "(", "CONTENT_DISPOSITION", ")", ")", "return", "content_disposition_filename", "(", "params", ",", "'name'", ")" ]
Returns name specified in Content-Disposition header or None if missed or header is malformed.
[ "Returns", "name", "specified", "in", "Content", "-", "Disposition", "header", "or", "None", "if", "missed", "or", "header", "is", "malformed", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L477-L484
26,956
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader.from_response
def from_response(cls, response: 'ClientResponse') -> Any: """Constructs reader instance from HTTP response. :param response: :class:`~aiohttp.client.ClientResponse` instance """ obj = cls.response_wrapper_cls(response, cls(response.headers, response.content)) return obj
python
def from_response(cls, response: 'ClientResponse') -> Any: """Constructs reader instance from HTTP response. :param response: :class:`~aiohttp.client.ClientResponse` instance """ obj = cls.response_wrapper_cls(response, cls(response.headers, response.content)) return obj
[ "def", "from_response", "(", "cls", ",", "response", ":", "'ClientResponse'", ")", "->", "Any", ":", "obj", "=", "cls", ".", "response_wrapper_cls", "(", "response", ",", "cls", "(", "response", ".", "headers", ",", "response", ".", "content", ")", ")", "return", "obj" ]
Constructs reader instance from HTTP response. :param response: :class:`~aiohttp.client.ClientResponse` instance
[ "Constructs", "reader", "instance", "from", "HTTP", "response", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L557-L564
26,957
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader.next
async def next(self) -> Any: """Emits the next multipart body part.""" # So, if we're at BOF, we need to skip till the boundary. if self._at_eof: return await self._maybe_release_last_part() if self._at_bof: await self._read_until_first_boundary() self._at_bof = False else: await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return self._last_part = await self.fetch_next_part() return self._last_part
python
async def next(self) -> Any: """Emits the next multipart body part.""" # So, if we're at BOF, we need to skip till the boundary. if self._at_eof: return await self._maybe_release_last_part() if self._at_bof: await self._read_until_first_boundary() self._at_bof = False else: await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return self._last_part = await self.fetch_next_part() return self._last_part
[ "async", "def", "next", "(", "self", ")", "->", "Any", ":", "# So, if we're at BOF, we need to skip till the boundary.", "if", "self", ".", "_at_eof", ":", "return", "await", "self", ".", "_maybe_release_last_part", "(", ")", "if", "self", ".", "_at_bof", ":", "await", "self", ".", "_read_until_first_boundary", "(", ")", "self", ".", "_at_bof", "=", "False", "else", ":", "await", "self", ".", "_read_boundary", "(", ")", "if", "self", ".", "_at_eof", ":", "# we just read the last boundary, nothing to do there", "return", "self", ".", "_last_part", "=", "await", "self", ".", "fetch_next_part", "(", ")", "return", "self", ".", "_last_part" ]
Emits the next multipart body part.
[ "Emits", "the", "next", "multipart", "body", "part", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L572-L586
26,958
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader.release
async def release(self) -> None: """Reads all the body parts to the void till the final boundary.""" while not self._at_eof: item = await self.next() if item is None: break await item.release()
python
async def release(self) -> None: """Reads all the body parts to the void till the final boundary.""" while not self._at_eof: item = await self.next() if item is None: break await item.release()
[ "async", "def", "release", "(", "self", ")", "->", "None", ":", "while", "not", "self", ".", "_at_eof", ":", "item", "=", "await", "self", ".", "next", "(", ")", "if", "item", "is", "None", ":", "break", "await", "item", ".", "release", "(", ")" ]
Reads all the body parts to the void till the final boundary.
[ "Reads", "all", "the", "body", "parts", "to", "the", "void", "till", "the", "final", "boundary", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L588-L594
26,959
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader._get_part_reader
def _get_part_reader(self, headers: 'CIMultiDictProxy[str]') -> Any: """Dispatches the response by the `Content-Type` header, returning suitable reader instance. :param dict headers: Response headers """ ctype = headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) if mimetype.type == 'multipart': if self.multipart_reader_cls is None: return type(self)(headers, self._content) return self.multipart_reader_cls( headers, self._content, _newline=self._newline ) else: return self.part_reader_cls( self._boundary, headers, self._content, _newline=self._newline )
python
def _get_part_reader(self, headers: 'CIMultiDictProxy[str]') -> Any: """Dispatches the response by the `Content-Type` header, returning suitable reader instance. :param dict headers: Response headers """ ctype = headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) if mimetype.type == 'multipart': if self.multipart_reader_cls is None: return type(self)(headers, self._content) return self.multipart_reader_cls( headers, self._content, _newline=self._newline ) else: return self.part_reader_cls( self._boundary, headers, self._content, _newline=self._newline )
[ "def", "_get_part_reader", "(", "self", ",", "headers", ":", "'CIMultiDictProxy[str]'", ")", "->", "Any", ":", "ctype", "=", "headers", ".", "get", "(", "CONTENT_TYPE", ",", "''", ")", "mimetype", "=", "parse_mimetype", "(", "ctype", ")", "if", "mimetype", ".", "type", "==", "'multipart'", ":", "if", "self", ".", "multipart_reader_cls", "is", "None", ":", "return", "type", "(", "self", ")", "(", "headers", ",", "self", ".", "_content", ")", "return", "self", ".", "multipart_reader_cls", "(", "headers", ",", "self", ".", "_content", ",", "_newline", "=", "self", ".", "_newline", ")", "else", ":", "return", "self", ".", "part_reader_cls", "(", "self", ".", "_boundary", ",", "headers", ",", "self", ".", "_content", ",", "_newline", "=", "self", ".", "_newline", ")" ]
Dispatches the response by the `Content-Type` header, returning suitable reader instance. :param dict headers: Response headers
[ "Dispatches", "the", "response", "by", "the", "Content", "-", "Type", "header", "returning", "suitable", "reader", "instance", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L601-L619
26,960
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader._maybe_release_last_part
async def _maybe_release_last_part(self) -> None: """Ensures that the last read body part is read completely.""" if self._last_part is not None: if not self._last_part.at_eof(): await self._last_part.release() self._unread.extend(self._last_part._unread) self._last_part = None
python
async def _maybe_release_last_part(self) -> None: """Ensures that the last read body part is read completely.""" if self._last_part is not None: if not self._last_part.at_eof(): await self._last_part.release() self._unread.extend(self._last_part._unread) self._last_part = None
[ "async", "def", "_maybe_release_last_part", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_last_part", "is", "not", "None", ":", "if", "not", "self", ".", "_last_part", ".", "at_eof", "(", ")", ":", "await", "self", ".", "_last_part", ".", "release", "(", ")", "self", ".", "_unread", ".", "extend", "(", "self", ".", "_last_part", ".", "_unread", ")", "self", ".", "_last_part", "=", "None" ]
Ensures that the last read body part is read completely.
[ "Ensures", "that", "the", "last", "read", "body", "part", "is", "read", "completely", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L698-L704
26,961
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter._boundary_value
def _boundary_value(self) -> str: """Wrap boundary parameter value in quotes, if necessary. Reads self.boundary and returns a unicode sting. """ # Refer to RFCs 7231, 7230, 5234. # # parameter = token "=" ( token / quoted-string ) # token = 1*tchar # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text # obs-text = %x80-FF # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" # / DIGIT / ALPHA # ; any VCHAR, except delimiters # VCHAR = %x21-7E value = self._boundary if re.match(self._valid_tchar_regex, value): return value.decode('ascii') # cannot fail if re.search(self._invalid_qdtext_char_regex, value): raise ValueError("boundary value contains invalid characters") # escape %x5C and %x22 quoted_value_content = value.replace(b'\\', b'\\\\') quoted_value_content = quoted_value_content.replace(b'"', b'\\"') return '"' + quoted_value_content.decode('ascii') + '"'
python
def _boundary_value(self) -> str: """Wrap boundary parameter value in quotes, if necessary. Reads self.boundary and returns a unicode sting. """ # Refer to RFCs 7231, 7230, 5234. # # parameter = token "=" ( token / quoted-string ) # token = 1*tchar # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text # obs-text = %x80-FF # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" # / DIGIT / ALPHA # ; any VCHAR, except delimiters # VCHAR = %x21-7E value = self._boundary if re.match(self._valid_tchar_regex, value): return value.decode('ascii') # cannot fail if re.search(self._invalid_qdtext_char_regex, value): raise ValueError("boundary value contains invalid characters") # escape %x5C and %x22 quoted_value_content = value.replace(b'\\', b'\\\\') quoted_value_content = quoted_value_content.replace(b'"', b'\\"') return '"' + quoted_value_content.decode('ascii') + '"'
[ "def", "_boundary_value", "(", "self", ")", "->", "str", ":", "# Refer to RFCs 7231, 7230, 5234.", "#", "# parameter = token \"=\" ( token / quoted-string )", "# token = 1*tchar", "# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE", "# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text", "# obs-text = %x80-FF", "# quoted-pair = \"\\\" ( HTAB / SP / VCHAR / obs-text )", "# tchar = \"!\" / \"#\" / \"$\" / \"%\" / \"&\" / \"'\" / \"*\"", "# / \"+\" / \"-\" / \".\" / \"^\" / \"_\" / \"`\" / \"|\" / \"~\"", "# / DIGIT / ALPHA", "# ; any VCHAR, except delimiters", "# VCHAR = %x21-7E", "value", "=", "self", ".", "_boundary", "if", "re", ".", "match", "(", "self", ".", "_valid_tchar_regex", ",", "value", ")", ":", "return", "value", ".", "decode", "(", "'ascii'", ")", "# cannot fail", "if", "re", ".", "search", "(", "self", ".", "_invalid_qdtext_char_regex", ",", "value", ")", ":", "raise", "ValueError", "(", "\"boundary value contains invalid characters\"", ")", "# escape %x5C and %x22", "quoted_value_content", "=", "value", ".", "replace", "(", "b'\\\\'", ",", "b'\\\\\\\\'", ")", "quoted_value_content", "=", "quoted_value_content", ".", "replace", "(", "b'\"'", ",", "b'\\\\\"'", ")", "return", "'\"'", "+", "quoted_value_content", ".", "decode", "(", "'ascii'", ")", "+", "'\"'" ]
Wrap boundary parameter value in quotes, if necessary. Reads self.boundary and returns a unicode sting.
[ "Wrap", "boundary", "parameter", "value", "in", "quotes", "if", "necessary", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L752-L781
26,962
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.append_payload
def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" # compression encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa if encoding and encoding not in ('deflate', 'gzip', 'identity'): raise RuntimeError('unknown content encoding: {}'.format(encoding)) if encoding == 'identity': encoding = None # te encoding te_encoding = payload.headers.get( CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'): raise RuntimeError('unknown content transfer encoding: {}' ''.format(te_encoding)) if te_encoding == 'binary': te_encoding = None # size size = payload.size if size is not None and not (encoding or te_encoding): payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore return payload
python
def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" # compression encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa if encoding and encoding not in ('deflate', 'gzip', 'identity'): raise RuntimeError('unknown content encoding: {}'.format(encoding)) if encoding == 'identity': encoding = None # te encoding te_encoding = payload.headers.get( CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'): raise RuntimeError('unknown content transfer encoding: {}' ''.format(te_encoding)) if te_encoding == 'binary': te_encoding = None # size size = payload.size if size is not None and not (encoding or te_encoding): payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore return payload
[ "def", "append_payload", "(", "self", ",", "payload", ":", "Payload", ")", "->", "Payload", ":", "# compression", "encoding", "=", "payload", ".", "headers", ".", "get", "(", "CONTENT_ENCODING", ",", "''", ")", ".", "lower", "(", ")", "# type: Optional[str] # noqa", "if", "encoding", "and", "encoding", "not", "in", "(", "'deflate'", ",", "'gzip'", ",", "'identity'", ")", ":", "raise", "RuntimeError", "(", "'unknown content encoding: {}'", ".", "format", "(", "encoding", ")", ")", "if", "encoding", "==", "'identity'", ":", "encoding", "=", "None", "# te encoding", "te_encoding", "=", "payload", ".", "headers", ".", "get", "(", "CONTENT_TRANSFER_ENCODING", ",", "''", ")", ".", "lower", "(", ")", "# type: Optional[str] # noqa", "if", "te_encoding", "not", "in", "(", "''", ",", "'base64'", ",", "'quoted-printable'", ",", "'binary'", ")", ":", "raise", "RuntimeError", "(", "'unknown content transfer encoding: {}'", "''", ".", "format", "(", "te_encoding", ")", ")", "if", "te_encoding", "==", "'binary'", ":", "te_encoding", "=", "None", "# size", "size", "=", "payload", ".", "size", "if", "size", "is", "not", "None", "and", "not", "(", "encoding", "or", "te_encoding", ")", ":", "payload", ".", "headers", "[", "CONTENT_LENGTH", "]", "=", "str", "(", "size", ")", "self", ".", "_parts", ".", "append", "(", "(", "payload", ",", "encoding", ",", "te_encoding", ")", ")", "# type: ignore", "return", "payload" ]
Adds a new body part to multipart writer.
[ "Adds", "a", "new", "body", "part", "to", "multipart", "writer", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L806-L830
26,963
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.append_json
def append_json( self, obj: Any, headers: Optional['MultiMapping[str]']=None ) -> Payload: """Helper to append JSON part.""" if headers is None: headers = CIMultiDict() return self.append_payload(JsonPayload(obj, headers=headers))
python
def append_json( self, obj: Any, headers: Optional['MultiMapping[str]']=None ) -> Payload: """Helper to append JSON part.""" if headers is None: headers = CIMultiDict() return self.append_payload(JsonPayload(obj, headers=headers))
[ "def", "append_json", "(", "self", ",", "obj", ":", "Any", ",", "headers", ":", "Optional", "[", "'MultiMapping[str]'", "]", "=", "None", ")", "->", "Payload", ":", "if", "headers", "is", "None", ":", "headers", "=", "CIMultiDict", "(", ")", "return", "self", ".", "append_payload", "(", "JsonPayload", "(", "obj", ",", "headers", "=", "headers", ")", ")" ]
Helper to append JSON part.
[ "Helper", "to", "append", "JSON", "part", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L832-L841
26,964
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.append_form
def append_form( self, obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], headers: Optional['MultiMapping[str]']=None ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) if headers is None: headers = CIMultiDict() if isinstance(obj, Mapping): obj = list(obj.items()) data = urlencode(obj, doseq=True) return self.append_payload( StringPayload(data, headers=headers, content_type='application/x-www-form-urlencoded'))
python
def append_form( self, obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], headers: Optional['MultiMapping[str]']=None ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) if headers is None: headers = CIMultiDict() if isinstance(obj, Mapping): obj = list(obj.items()) data = urlencode(obj, doseq=True) return self.append_payload( StringPayload(data, headers=headers, content_type='application/x-www-form-urlencoded'))
[ "def", "append_form", "(", "self", ",", "obj", ":", "Union", "[", "Sequence", "[", "Tuple", "[", "str", ",", "str", "]", "]", ",", "Mapping", "[", "str", ",", "str", "]", "]", ",", "headers", ":", "Optional", "[", "'MultiMapping[str]'", "]", "=", "None", ")", "->", "Payload", ":", "assert", "isinstance", "(", "obj", ",", "(", "Sequence", ",", "Mapping", ")", ")", "if", "headers", "is", "None", ":", "headers", "=", "CIMultiDict", "(", ")", "if", "isinstance", "(", "obj", ",", "Mapping", ")", ":", "obj", "=", "list", "(", "obj", ".", "items", "(", ")", ")", "data", "=", "urlencode", "(", "obj", ",", "doseq", "=", "True", ")", "return", "self", ".", "append_payload", "(", "StringPayload", "(", "data", ",", "headers", "=", "headers", ",", "content_type", "=", "'application/x-www-form-urlencoded'", ")", ")" ]
Helper to append form urlencoded part.
[ "Helper", "to", "append", "form", "urlencoded", "part", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L843-L861
26,965
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.size
def size(self) -> Optional[int]: """Size of the payload.""" if not self._parts: return 0 total = 0 for part, encoding, te_encoding in self._parts: if encoding or te_encoding or part.size is None: return None total += int( 2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n' part.size + len(part._binary_headers) + 2 # b'\r\n' ) total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total
python
def size(self) -> Optional[int]: """Size of the payload.""" if not self._parts: return 0 total = 0 for part, encoding, te_encoding in self._parts: if encoding or te_encoding or part.size is None: return None total += int( 2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n' part.size + len(part._binary_headers) + 2 # b'\r\n' ) total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total
[ "def", "size", "(", "self", ")", "->", "Optional", "[", "int", "]", ":", "if", "not", "self", ".", "_parts", ":", "return", "0", "total", "=", "0", "for", "part", ",", "encoding", ",", "te_encoding", "in", "self", ".", "_parts", ":", "if", "encoding", "or", "te_encoding", "or", "part", ".", "size", "is", "None", ":", "return", "None", "total", "+=", "int", "(", "2", "+", "len", "(", "self", ".", "_boundary", ")", "+", "2", "+", "# b'--'+self._boundary+b'\\r\\n'", "part", ".", "size", "+", "len", "(", "part", ".", "_binary_headers", ")", "+", "2", "# b'\\r\\n'", ")", "total", "+=", "2", "+", "len", "(", "self", ".", "_boundary", ")", "+", "4", "# b'--'+self._boundary+b'--\\r\\n'", "return", "total" ]
Size of the payload.
[ "Size", "of", "the", "payload", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L864-L881
26,966
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.write
async def write(self, writer: Any, close_boundary: bool=True) -> None: """Write body.""" if not self._parts: return for part, encoding, te_encoding in self._parts: await writer.write(b'--' + self._boundary + b'\r\n') await writer.write(part._binary_headers) if encoding or te_encoding: w = MultipartPayloadWriter(writer) if encoding: w.enable_compression(encoding) if te_encoding: w.enable_encoding(te_encoding) await part.write(w) # type: ignore await w.write_eof() else: await part.write(writer) await writer.write(b'\r\n') if close_boundary: await writer.write(b'--' + self._boundary + b'--\r\n')
python
async def write(self, writer: Any, close_boundary: bool=True) -> None: """Write body.""" if not self._parts: return for part, encoding, te_encoding in self._parts: await writer.write(b'--' + self._boundary + b'\r\n') await writer.write(part._binary_headers) if encoding or te_encoding: w = MultipartPayloadWriter(writer) if encoding: w.enable_compression(encoding) if te_encoding: w.enable_encoding(te_encoding) await part.write(w) # type: ignore await w.write_eof() else: await part.write(writer) await writer.write(b'\r\n') if close_boundary: await writer.write(b'--' + self._boundary + b'--\r\n')
[ "async", "def", "write", "(", "self", ",", "writer", ":", "Any", ",", "close_boundary", ":", "bool", "=", "True", ")", "->", "None", ":", "if", "not", "self", ".", "_parts", ":", "return", "for", "part", ",", "encoding", ",", "te_encoding", "in", "self", ".", "_parts", ":", "await", "writer", ".", "write", "(", "b'--'", "+", "self", ".", "_boundary", "+", "b'\\r\\n'", ")", "await", "writer", ".", "write", "(", "part", ".", "_binary_headers", ")", "if", "encoding", "or", "te_encoding", ":", "w", "=", "MultipartPayloadWriter", "(", "writer", ")", "if", "encoding", ":", "w", ".", "enable_compression", "(", "encoding", ")", "if", "te_encoding", ":", "w", ".", "enable_encoding", "(", "te_encoding", ")", "await", "part", ".", "write", "(", "w", ")", "# type: ignore", "await", "w", ".", "write_eof", "(", ")", "else", ":", "await", "part", ".", "write", "(", "writer", ")", "await", "writer", ".", "write", "(", "b'\\r\\n'", ")", "if", "close_boundary", ":", "await", "writer", ".", "write", "(", "b'--'", "+", "self", ".", "_boundary", "+", "b'--\\r\\n'", ")" ]
Write body.
[ "Write", "body", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L883-L907
26,967
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_version
def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. parser HTTP version '1.1' => (1, 1) """ if isinstance(version, str): v = [l.strip() for l in version.split('.', 1)] try: version = http.HttpVersion(int(v[0]), int(v[1])) except ValueError: raise ValueError( 'Can not parse http version number: {}' .format(version)) from None self.version = version
python
def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. parser HTTP version '1.1' => (1, 1) """ if isinstance(version, str): v = [l.strip() for l in version.split('.', 1)] try: version = http.HttpVersion(int(v[0]), int(v[1])) except ValueError: raise ValueError( 'Can not parse http version number: {}' .format(version)) from None self.version = version
[ "def", "update_version", "(", "self", ",", "version", ":", "Union", "[", "http", ".", "HttpVersion", ",", "str", "]", ")", "->", "None", ":", "if", "isinstance", "(", "version", ",", "str", ")", ":", "v", "=", "[", "l", ".", "strip", "(", ")", "for", "l", "in", "version", ".", "split", "(", "'.'", ",", "1", ")", "]", "try", ":", "version", "=", "http", ".", "HttpVersion", "(", "int", "(", "v", "[", "0", "]", ")", ",", "int", "(", "v", "[", "1", "]", ")", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Can not parse http version number: {}'", ".", "format", "(", "version", ")", ")", "from", "None", "self", ".", "version", "=", "version" ]
Convert request version to two elements tuple. parser HTTP version '1.1' => (1, 1)
[ "Convert", "request", "version", "to", "two", "elements", "tuple", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L307-L320
26,968
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_headers
def update_headers(self, headers: Optional[LooseHeaders]) -> None: """Update request headers.""" self.headers = CIMultiDict() # type: CIMultiDict[str] # add host netloc = cast(str, self.url.raw_host) if helpers.is_ipv6_address(netloc): netloc = '[{}]'.format(netloc) if not self.url.is_default_port(): netloc += ':' + str(self.url.port) self.headers[hdrs.HOST] = netloc if headers: if isinstance(headers, (dict, MultiDictProxy, MultiDict)): headers = headers.items() # type: ignore for key, value in headers: # A special case for Host header if key.lower() == 'host': self.headers[key] = value else: self.headers.add(key, value)
python
def update_headers(self, headers: Optional[LooseHeaders]) -> None: """Update request headers.""" self.headers = CIMultiDict() # type: CIMultiDict[str] # add host netloc = cast(str, self.url.raw_host) if helpers.is_ipv6_address(netloc): netloc = '[{}]'.format(netloc) if not self.url.is_default_port(): netloc += ':' + str(self.url.port) self.headers[hdrs.HOST] = netloc if headers: if isinstance(headers, (dict, MultiDictProxy, MultiDict)): headers = headers.items() # type: ignore for key, value in headers: # A special case for Host header if key.lower() == 'host': self.headers[key] = value else: self.headers.add(key, value)
[ "def", "update_headers", "(", "self", ",", "headers", ":", "Optional", "[", "LooseHeaders", "]", ")", "->", "None", ":", "self", ".", "headers", "=", "CIMultiDict", "(", ")", "# type: CIMultiDict[str]", "# add host", "netloc", "=", "cast", "(", "str", ",", "self", ".", "url", ".", "raw_host", ")", "if", "helpers", ".", "is_ipv6_address", "(", "netloc", ")", ":", "netloc", "=", "'[{}]'", ".", "format", "(", "netloc", ")", "if", "not", "self", ".", "url", ".", "is_default_port", "(", ")", ":", "netloc", "+=", "':'", "+", "str", "(", "self", ".", "url", ".", "port", ")", "self", ".", "headers", "[", "hdrs", ".", "HOST", "]", "=", "netloc", "if", "headers", ":", "if", "isinstance", "(", "headers", ",", "(", "dict", ",", "MultiDictProxy", ",", "MultiDict", ")", ")", ":", "headers", "=", "headers", ".", "items", "(", ")", "# type: ignore", "for", "key", ",", "value", "in", "headers", ":", "# A special case for Host header", "if", "key", ".", "lower", "(", ")", "==", "'host'", ":", "self", ".", "headers", "[", "key", "]", "=", "value", "else", ":", "self", ".", "headers", ".", "add", "(", "key", ",", "value", ")" ]
Update request headers.
[ "Update", "request", "headers", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L322-L343
26,969
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_cookies
def update_cookies(self, cookies: Optional[LooseCookies]) -> None: """Update request cookies header.""" if not cookies: return c = SimpleCookie() if hdrs.COOKIE in self.headers: c.load(self.headers.get(hdrs.COOKIE, '')) del self.headers[hdrs.COOKIE] if isinstance(cookies, Mapping): iter_cookies = cookies.items() else: iter_cookies = cookies # type: ignore for name, value in iter_cookies: if isinstance(value, Morsel): # Preserve coded_value mrsl_val = value.get(value.key, Morsel()) mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa c[name] = mrsl_val else: c[name] = value # type: ignore self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
python
def update_cookies(self, cookies: Optional[LooseCookies]) -> None: """Update request cookies header.""" if not cookies: return c = SimpleCookie() if hdrs.COOKIE in self.headers: c.load(self.headers.get(hdrs.COOKIE, '')) del self.headers[hdrs.COOKIE] if isinstance(cookies, Mapping): iter_cookies = cookies.items() else: iter_cookies = cookies # type: ignore for name, value in iter_cookies: if isinstance(value, Morsel): # Preserve coded_value mrsl_val = value.get(value.key, Morsel()) mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa c[name] = mrsl_val else: c[name] = value # type: ignore self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
[ "def", "update_cookies", "(", "self", ",", "cookies", ":", "Optional", "[", "LooseCookies", "]", ")", "->", "None", ":", "if", "not", "cookies", ":", "return", "c", "=", "SimpleCookie", "(", ")", "if", "hdrs", ".", "COOKIE", "in", "self", ".", "headers", ":", "c", ".", "load", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "COOKIE", ",", "''", ")", ")", "del", "self", ".", "headers", "[", "hdrs", ".", "COOKIE", "]", "if", "isinstance", "(", "cookies", ",", "Mapping", ")", ":", "iter_cookies", "=", "cookies", ".", "items", "(", ")", "else", ":", "iter_cookies", "=", "cookies", "# type: ignore", "for", "name", ",", "value", "in", "iter_cookies", ":", "if", "isinstance", "(", "value", ",", "Morsel", ")", ":", "# Preserve coded_value", "mrsl_val", "=", "value", ".", "get", "(", "value", ".", "key", ",", "Morsel", "(", ")", ")", "mrsl_val", ".", "set", "(", "value", ".", "key", ",", "value", ".", "value", ",", "value", ".", "coded_value", ")", "# type: ignore # noqa", "c", "[", "name", "]", "=", "mrsl_val", "else", ":", "c", "[", "name", "]", "=", "value", "# type: ignore", "self", ".", "headers", "[", "hdrs", ".", "COOKIE", "]", "=", "c", ".", "output", "(", "header", "=", "''", ",", "sep", "=", "';'", ")", ".", "strip", "(", ")" ]
Update request cookies header.
[ "Update", "request", "cookies", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L358-L381
26,970
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_content_encoding
def update_content_encoding(self, data: Any) -> None: """Set request content encoding.""" if not data: return enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower() if enc: if self.compress: raise ValueError( 'compress can not be set ' 'if Content-Encoding header is set') elif self.compress: if not isinstance(self.compress, str): self.compress = 'deflate' self.headers[hdrs.CONTENT_ENCODING] = self.compress self.chunked = True
python
def update_content_encoding(self, data: Any) -> None: """Set request content encoding.""" if not data: return enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower() if enc: if self.compress: raise ValueError( 'compress can not be set ' 'if Content-Encoding header is set') elif self.compress: if not isinstance(self.compress, str): self.compress = 'deflate' self.headers[hdrs.CONTENT_ENCODING] = self.compress self.chunked = True
[ "def", "update_content_encoding", "(", "self", ",", "data", ":", "Any", ")", "->", "None", ":", "if", "not", "data", ":", "return", "enc", "=", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "CONTENT_ENCODING", ",", "''", ")", ".", "lower", "(", ")", "if", "enc", ":", "if", "self", ".", "compress", ":", "raise", "ValueError", "(", "'compress can not be set '", "'if Content-Encoding header is set'", ")", "elif", "self", ".", "compress", ":", "if", "not", "isinstance", "(", "self", ".", "compress", ",", "str", ")", ":", "self", ".", "compress", "=", "'deflate'", "self", ".", "headers", "[", "hdrs", ".", "CONTENT_ENCODING", "]", "=", "self", ".", "compress", "self", ".", "chunked", "=", "True" ]
Set request content encoding.
[ "Set", "request", "content", "encoding", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L383-L398
26,971
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_transfer_encoding
def update_transfer_encoding(self) -> None: """Analyze transfer-encoding header.""" te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower() if 'chunked' in te: if self.chunked: raise ValueError( 'chunked can not be set ' 'if "Transfer-Encoding: chunked" header is set') elif self.chunked: if hdrs.CONTENT_LENGTH in self.headers: raise ValueError( 'chunked can not be set ' 'if Content-Length header is set') self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' else: if hdrs.CONTENT_LENGTH not in self.headers: self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
python
def update_transfer_encoding(self) -> None: """Analyze transfer-encoding header.""" te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower() if 'chunked' in te: if self.chunked: raise ValueError( 'chunked can not be set ' 'if "Transfer-Encoding: chunked" header is set') elif self.chunked: if hdrs.CONTENT_LENGTH in self.headers: raise ValueError( 'chunked can not be set ' 'if Content-Length header is set') self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' else: if hdrs.CONTENT_LENGTH not in self.headers: self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
[ "def", "update_transfer_encoding", "(", "self", ")", "->", "None", ":", "te", "=", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "TRANSFER_ENCODING", ",", "''", ")", ".", "lower", "(", ")", "if", "'chunked'", "in", "te", ":", "if", "self", ".", "chunked", ":", "raise", "ValueError", "(", "'chunked can not be set '", "'if \"Transfer-Encoding: chunked\" header is set'", ")", "elif", "self", ".", "chunked", ":", "if", "hdrs", ".", "CONTENT_LENGTH", "in", "self", ".", "headers", ":", "raise", "ValueError", "(", "'chunked can not be set '", "'if Content-Length header is set'", ")", "self", ".", "headers", "[", "hdrs", ".", "TRANSFER_ENCODING", "]", "=", "'chunked'", "else", ":", "if", "hdrs", ".", "CONTENT_LENGTH", "not", "in", "self", ".", "headers", ":", "self", ".", "headers", "[", "hdrs", ".", "CONTENT_LENGTH", "]", "=", "str", "(", "len", "(", "self", ".", "body", ")", ")" ]
Analyze transfer-encoding header.
[ "Analyze", "transfer", "-", "encoding", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L400-L419
26,972
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_auth
def update_auth(self, auth: Optional[BasicAuth]) -> None: """Set basic auth.""" if auth is None: auth = self.auth if auth is None: return if not isinstance(auth, helpers.BasicAuth): raise TypeError('BasicAuth() tuple is required instead') self.headers[hdrs.AUTHORIZATION] = auth.encode()
python
def update_auth(self, auth: Optional[BasicAuth]) -> None: """Set basic auth.""" if auth is None: auth = self.auth if auth is None: return if not isinstance(auth, helpers.BasicAuth): raise TypeError('BasicAuth() tuple is required instead') self.headers[hdrs.AUTHORIZATION] = auth.encode()
[ "def", "update_auth", "(", "self", ",", "auth", ":", "Optional", "[", "BasicAuth", "]", ")", "->", "None", ":", "if", "auth", "is", "None", ":", "auth", "=", "self", ".", "auth", "if", "auth", "is", "None", ":", "return", "if", "not", "isinstance", "(", "auth", ",", "helpers", ".", "BasicAuth", ")", ":", "raise", "TypeError", "(", "'BasicAuth() tuple is required instead'", ")", "self", ".", "headers", "[", "hdrs", ".", "AUTHORIZATION", "]", "=", "auth", ".", "encode", "(", ")" ]
Set basic auth.
[ "Set", "basic", "auth", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L421-L431
26,973
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.write_bytes
async def write_bytes(self, writer: AbstractStreamWriter, conn: 'Connection') -> None: """Support coroutines that yields bytes objects.""" # 100 response if self._continue is not None: await writer.drain() await self._continue protocol = conn.protocol assert protocol is not None try: if isinstance(self.body, payload.Payload): await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): self.body = (self.body,) # type: ignore for chunk in self.body: await writer.write(chunk) # type: ignore await writer.write_eof() except OSError as exc: new_exc = ClientOSError( exc.errno, 'Can not write request body for %s' % self.url) new_exc.__context__ = exc new_exc.__cause__ = exc protocol.set_exception(new_exc) except asyncio.CancelledError as exc: if not conn.closed: protocol.set_exception(exc) except Exception as exc: protocol.set_exception(exc) finally: self._writer = None
python
async def write_bytes(self, writer: AbstractStreamWriter, conn: 'Connection') -> None: """Support coroutines that yields bytes objects.""" # 100 response if self._continue is not None: await writer.drain() await self._continue protocol = conn.protocol assert protocol is not None try: if isinstance(self.body, payload.Payload): await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): self.body = (self.body,) # type: ignore for chunk in self.body: await writer.write(chunk) # type: ignore await writer.write_eof() except OSError as exc: new_exc = ClientOSError( exc.errno, 'Can not write request body for %s' % self.url) new_exc.__context__ = exc new_exc.__cause__ = exc protocol.set_exception(new_exc) except asyncio.CancelledError as exc: if not conn.closed: protocol.set_exception(exc) except Exception as exc: protocol.set_exception(exc) finally: self._writer = None
[ "async", "def", "write_bytes", "(", "self", ",", "writer", ":", "AbstractStreamWriter", ",", "conn", ":", "'Connection'", ")", "->", "None", ":", "# 100 response", "if", "self", ".", "_continue", "is", "not", "None", ":", "await", "writer", ".", "drain", "(", ")", "await", "self", ".", "_continue", "protocol", "=", "conn", ".", "protocol", "assert", "protocol", "is", "not", "None", "try", ":", "if", "isinstance", "(", "self", ".", "body", ",", "payload", ".", "Payload", ")", ":", "await", "self", ".", "body", ".", "write", "(", "writer", ")", "else", ":", "if", "isinstance", "(", "self", ".", "body", ",", "(", "bytes", ",", "bytearray", ")", ")", ":", "self", ".", "body", "=", "(", "self", ".", "body", ",", ")", "# type: ignore", "for", "chunk", "in", "self", ".", "body", ":", "await", "writer", ".", "write", "(", "chunk", ")", "# type: ignore", "await", "writer", ".", "write_eof", "(", ")", "except", "OSError", "as", "exc", ":", "new_exc", "=", "ClientOSError", "(", "exc", ".", "errno", ",", "'Can not write request body for %s'", "%", "self", ".", "url", ")", "new_exc", ".", "__context__", "=", "exc", "new_exc", ".", "__cause__", "=", "exc", "protocol", ".", "set_exception", "(", "new_exc", ")", "except", "asyncio", ".", "CancelledError", "as", "exc", ":", "if", "not", "conn", ".", "closed", ":", "protocol", ".", "set_exception", "(", "exc", ")", "except", "Exception", "as", "exc", ":", "protocol", ".", "set_exception", "(", "exc", ")", "finally", ":", "self", ".", "_writer", "=", "None" ]
Support coroutines that yields bytes objects.
[ "Support", "coroutines", "that", "yields", "bytes", "objects", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L501-L535
26,974
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientResponse.start
async def start(self, connection: 'Connection') -> 'ClientResponse': """Start response processing.""" self._closed = False self._protocol = connection.protocol self._connection = connection with self._timer: while True: # read response try: message, payload = await self._protocol.read() # type: ignore # noqa except http.HttpProcessingError as exc: raise ClientResponseError( self.request_info, self.history, status=exc.code, message=exc.message, headers=exc.headers) from exc if (message.code < 100 or message.code > 199 or message.code == 101): break if self._continue is not None: set_result(self._continue, True) self._continue = None # payload eof handler payload.on_eof(self._response_eof) # response status self.version = message.version self.status = message.code self.reason = message.reason # headers self._headers = message.headers # type is CIMultiDictProxy self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] # payload self.content = payload # cookies for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): try: self.cookies.load(hdr) except CookieError as exc: client_logger.warning( 'Can not load response cookies: %s', exc) return self
python
async def start(self, connection: 'Connection') -> 'ClientResponse': """Start response processing.""" self._closed = False self._protocol = connection.protocol self._connection = connection with self._timer: while True: # read response try: message, payload = await self._protocol.read() # type: ignore # noqa except http.HttpProcessingError as exc: raise ClientResponseError( self.request_info, self.history, status=exc.code, message=exc.message, headers=exc.headers) from exc if (message.code < 100 or message.code > 199 or message.code == 101): break if self._continue is not None: set_result(self._continue, True) self._continue = None # payload eof handler payload.on_eof(self._response_eof) # response status self.version = message.version self.status = message.code self.reason = message.reason # headers self._headers = message.headers # type is CIMultiDictProxy self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] # payload self.content = payload # cookies for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): try: self.cookies.load(hdr) except CookieError as exc: client_logger.warning( 'Can not load response cookies: %s', exc) return self
[ "async", "def", "start", "(", "self", ",", "connection", ":", "'Connection'", ")", "->", "'ClientResponse'", ":", "self", ".", "_closed", "=", "False", "self", ".", "_protocol", "=", "connection", ".", "protocol", "self", ".", "_connection", "=", "connection", "with", "self", ".", "_timer", ":", "while", "True", ":", "# read response", "try", ":", "message", ",", "payload", "=", "await", "self", ".", "_protocol", ".", "read", "(", ")", "# type: ignore # noqa", "except", "http", ".", "HttpProcessingError", "as", "exc", ":", "raise", "ClientResponseError", "(", "self", ".", "request_info", ",", "self", ".", "history", ",", "status", "=", "exc", ".", "code", ",", "message", "=", "exc", ".", "message", ",", "headers", "=", "exc", ".", "headers", ")", "from", "exc", "if", "(", "message", ".", "code", "<", "100", "or", "message", ".", "code", ">", "199", "or", "message", ".", "code", "==", "101", ")", ":", "break", "if", "self", ".", "_continue", "is", "not", "None", ":", "set_result", "(", "self", ".", "_continue", ",", "True", ")", "self", ".", "_continue", "=", "None", "# payload eof handler", "payload", ".", "on_eof", "(", "self", ".", "_response_eof", ")", "# response status", "self", ".", "version", "=", "message", ".", "version", "self", ".", "status", "=", "message", ".", "code", "self", ".", "reason", "=", "message", ".", "reason", "# headers", "self", ".", "_headers", "=", "message", ".", "headers", "# type is CIMultiDictProxy", "self", ".", "_raw_headers", "=", "message", ".", "raw_headers", "# type is Tuple[bytes, bytes]", "# payload", "self", ".", "content", "=", "payload", "# cookies", "for", "hdr", "in", "self", ".", "headers", ".", "getall", "(", "hdrs", ".", "SET_COOKIE", ",", "(", ")", ")", ":", "try", ":", "self", ".", "cookies", ".", "load", "(", "hdr", ")", "except", "CookieError", "as", "exc", ":", "client_logger", ".", "warning", "(", "'Can not load response cookies: %s'", ",", "exc", ")", "return", "self" ]
Start response processing.
[ "Start", "response", "processing", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L788-L835
26,975
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientResponse.read
async def read(self) -> bytes: """Read response payload.""" if self._body is None: try: self._body = await self.content.read() for trace in self._traces: await trace.send_response_chunk_received(self._body) except BaseException: self.close() raise elif self._released: raise ClientConnectionError('Connection closed') return self._body
python
async def read(self) -> bytes: """Read response payload.""" if self._body is None: try: self._body = await self.content.read() for trace in self._traces: await trace.send_response_chunk_received(self._body) except BaseException: self.close() raise elif self._released: raise ClientConnectionError('Connection closed') return self._body
[ "async", "def", "read", "(", "self", ")", "->", "bytes", ":", "if", "self", ".", "_body", "is", "None", ":", "try", ":", "self", ".", "_body", "=", "await", "self", ".", "content", ".", "read", "(", ")", "for", "trace", "in", "self", ".", "_traces", ":", "await", "trace", ".", "send_response_chunk_received", "(", "self", ".", "_body", ")", "except", "BaseException", ":", "self", ".", "close", "(", ")", "raise", "elif", "self", ".", "_released", ":", "raise", "ClientConnectionError", "(", "'Connection closed'", ")", "return", "self", ".", "_body" ]
Read response payload.
[ "Read", "response", "payload", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L920-L933
26,976
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientResponse.text
async def text(self, encoding: Optional[str]=None, errors: str='strict') -> str: """Read response payload and decode.""" if self._body is None: await self.read() if encoding is None: encoding = self.get_encoding() return self._body.decode(encoding, errors=errors)
python
async def text(self, encoding: Optional[str]=None, errors: str='strict') -> str: """Read response payload and decode.""" if self._body is None: await self.read() if encoding is None: encoding = self.get_encoding() return self._body.decode(encoding, errors=errors)
[ "async", "def", "text", "(", "self", ",", "encoding", ":", "Optional", "[", "str", "]", "=", "None", ",", "errors", ":", "str", "=", "'strict'", ")", "->", "str", ":", "if", "self", ".", "_body", "is", "None", ":", "await", "self", ".", "read", "(", ")", "if", "encoding", "is", "None", ":", "encoding", "=", "self", ".", "get_encoding", "(", ")", "return", "self", ".", "_body", ".", "decode", "(", "encoding", ",", "errors", "=", "errors", ")" ]
Read response payload and decode.
[ "Read", "response", "payload", "and", "decode", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L956-L965
26,977
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientResponse.json
async def json(self, *, encoding: str=None, loads: JSONDecoder=DEFAULT_JSON_DECODER, content_type: Optional[str]='application/json') -> Any: """Read and decodes JSON response.""" if self._body is None: await self.read() if content_type: ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() if not _is_expected_content_type(ctype, content_type): raise ContentTypeError( self.request_info, self.history, message=('Attempt to decode JSON with ' 'unexpected mimetype: %s' % ctype), headers=self.headers) if encoding is None: encoding = self.get_encoding() return loads(self._body.decode(encoding))
python
async def json(self, *, encoding: str=None, loads: JSONDecoder=DEFAULT_JSON_DECODER, content_type: Optional[str]='application/json') -> Any: """Read and decodes JSON response.""" if self._body is None: await self.read() if content_type: ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() if not _is_expected_content_type(ctype, content_type): raise ContentTypeError( self.request_info, self.history, message=('Attempt to decode JSON with ' 'unexpected mimetype: %s' % ctype), headers=self.headers) if encoding is None: encoding = self.get_encoding() return loads(self._body.decode(encoding))
[ "async", "def", "json", "(", "self", ",", "*", ",", "encoding", ":", "str", "=", "None", ",", "loads", ":", "JSONDecoder", "=", "DEFAULT_JSON_DECODER", ",", "content_type", ":", "Optional", "[", "str", "]", "=", "'application/json'", ")", "->", "Any", ":", "if", "self", ".", "_body", "is", "None", ":", "await", "self", ".", "read", "(", ")", "if", "content_type", ":", "ctype", "=", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "CONTENT_TYPE", ",", "''", ")", ".", "lower", "(", ")", "if", "not", "_is_expected_content_type", "(", "ctype", ",", "content_type", ")", ":", "raise", "ContentTypeError", "(", "self", ".", "request_info", ",", "self", ".", "history", ",", "message", "=", "(", "'Attempt to decode JSON with '", "'unexpected mimetype: %s'", "%", "ctype", ")", ",", "headers", "=", "self", ".", "headers", ")", "if", "encoding", "is", "None", ":", "encoding", "=", "self", ".", "get_encoding", "(", ")", "return", "loads", "(", "self", ".", "_body", ".", "decode", "(", "encoding", ")", ")" ]
Read and decodes JSON response.
[ "Read", "and", "decodes", "JSON", "response", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L967-L987
26,978
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.enable_chunked_encoding
def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None: """Enables automatic chunked transfer encoding.""" self._chunked = True if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError("You can't enable chunked encoding when " "a content length is set") if chunk_size is not None: warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)
python
def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None: """Enables automatic chunked transfer encoding.""" self._chunked = True if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError("You can't enable chunked encoding when " "a content length is set") if chunk_size is not None: warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)
[ "def", "enable_chunked_encoding", "(", "self", ",", "chunk_size", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "None", ":", "self", ".", "_chunked", "=", "True", "if", "hdrs", ".", "CONTENT_LENGTH", "in", "self", ".", "_headers", ":", "raise", "RuntimeError", "(", "\"You can't enable chunked encoding when \"", "\"a content length is set\"", ")", "if", "chunk_size", "is", "not", "None", ":", "warnings", ".", "warn", "(", "'Chunk size is deprecated #1615'", ",", "DeprecationWarning", ")" ]
Enables automatic chunked transfer encoding.
[ "Enables", "automatic", "chunked", "transfer", "encoding", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L144-L152
26,979
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.enable_compression
def enable_compression(self, force: Optional[Union[bool, ContentCoding]]=None ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. if type(force) == bool: force = ContentCoding.deflate if force else ContentCoding.identity warnings.warn("Using boolean for force is deprecated #3318", DeprecationWarning) elif force is not None: assert isinstance(force, ContentCoding), ("force should one of " "None, bool or " "ContentEncoding") self._compression = True self._compression_force = force
python
def enable_compression(self, force: Optional[Union[bool, ContentCoding]]=None ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. if type(force) == bool: force = ContentCoding.deflate if force else ContentCoding.identity warnings.warn("Using boolean for force is deprecated #3318", DeprecationWarning) elif force is not None: assert isinstance(force, ContentCoding), ("force should one of " "None, bool or " "ContentEncoding") self._compression = True self._compression_force = force
[ "def", "enable_compression", "(", "self", ",", "force", ":", "Optional", "[", "Union", "[", "bool", ",", "ContentCoding", "]", "]", "=", "None", ")", "->", "None", ":", "# Backwards compatibility for when force was a bool <0.17.", "if", "type", "(", "force", ")", "==", "bool", ":", "force", "=", "ContentCoding", ".", "deflate", "if", "force", "else", "ContentCoding", ".", "identity", "warnings", ".", "warn", "(", "\"Using boolean for force is deprecated #3318\"", ",", "DeprecationWarning", ")", "elif", "force", "is", "not", "None", ":", "assert", "isinstance", "(", "force", ",", "ContentCoding", ")", ",", "(", "\"force should one of \"", "\"None, bool or \"", "\"ContentEncoding\"", ")", "self", ".", "_compression", "=", "True", "self", ".", "_compression_force", "=", "force" ]
Enables response compression encoding.
[ "Enables", "response", "compression", "encoding", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L154-L169
26,980
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.set_cookie
def set_cookie(self, name: str, value: str, *, expires: Optional[str]=None, domain: Optional[str]=None, max_age: Optional[Union[int, str]]=None, path: str='/', secure: Optional[str]=None, httponly: Optional[str]=None, version: Optional[str]=None) -> None: """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = str(max_age) elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version
python
def set_cookie(self, name: str, value: str, *, expires: Optional[str]=None, domain: Optional[str]=None, max_age: Optional[Union[int, str]]=None, path: str='/', secure: Optional[str]=None, httponly: Optional[str]=None, version: Optional[str]=None) -> None: """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = str(max_age) elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version
[ "def", "set_cookie", "(", "self", ",", "name", ":", "str", ",", "value", ":", "str", ",", "*", ",", "expires", ":", "Optional", "[", "str", "]", "=", "None", ",", "domain", ":", "Optional", "[", "str", "]", "=", "None", ",", "max_age", ":", "Optional", "[", "Union", "[", "int", ",", "str", "]", "]", "=", "None", ",", "path", ":", "str", "=", "'/'", ",", "secure", ":", "Optional", "[", "str", "]", "=", "None", ",", "httponly", ":", "Optional", "[", "str", "]", "=", "None", ",", "version", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "None", ":", "old", "=", "self", ".", "_cookies", ".", "get", "(", "name", ")", "if", "old", "is", "not", "None", "and", "old", ".", "coded_value", "==", "''", ":", "# deleted cookie", "self", ".", "_cookies", ".", "pop", "(", "name", ",", "None", ")", "self", ".", "_cookies", "[", "name", "]", "=", "value", "c", "=", "self", ".", "_cookies", "[", "name", "]", "if", "expires", "is", "not", "None", ":", "c", "[", "'expires'", "]", "=", "expires", "elif", "c", ".", "get", "(", "'expires'", ")", "==", "'Thu, 01 Jan 1970 00:00:00 GMT'", ":", "del", "c", "[", "'expires'", "]", "if", "domain", "is", "not", "None", ":", "c", "[", "'domain'", "]", "=", "domain", "if", "max_age", "is", "not", "None", ":", "c", "[", "'max-age'", "]", "=", "str", "(", "max_age", ")", "elif", "'max-age'", "in", "c", ":", "del", "c", "[", "'max-age'", "]", "c", "[", "'path'", "]", "=", "path", "if", "secure", "is", "not", "None", ":", "c", "[", "'secure'", "]", "=", "secure", "if", "httponly", "is", "not", "None", ":", "c", "[", "'httponly'", "]", "=", "httponly", "if", "version", "is", "not", "None", ":", "c", "[", "'version'", "]", "=", "version" ]
Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None.
[ "Set", "or", "update", "response", "cookie", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L179-L221
26,981
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.del_cookie
def del_cookie(self, name: str, *, domain: Optional[str]=None, path: str='/') -> None: """Delete cookie. Creates new empty expired cookie. """ # TODO: do we need domain/path here? self._cookies.pop(name, None) self.set_cookie(name, '', max_age=0, expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path)
python
def del_cookie(self, name: str, *, domain: Optional[str]=None, path: str='/') -> None: """Delete cookie. Creates new empty expired cookie. """ # TODO: do we need domain/path here? self._cookies.pop(name, None) self.set_cookie(name, '', max_age=0, expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path)
[ "def", "del_cookie", "(", "self", ",", "name", ":", "str", ",", "*", ",", "domain", ":", "Optional", "[", "str", "]", "=", "None", ",", "path", ":", "str", "=", "'/'", ")", "->", "None", ":", "# TODO: do we need domain/path here?", "self", ".", "_cookies", ".", "pop", "(", "name", ",", "None", ")", "self", ".", "set_cookie", "(", "name", ",", "''", ",", "max_age", "=", "0", ",", "expires", "=", "\"Thu, 01 Jan 1970 00:00:00 GMT\"", ",", "domain", "=", "domain", ",", "path", "=", "path", ")" ]
Delete cookie. Creates new empty expired cookie.
[ "Delete", "cookie", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L223-L234
26,982
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.last_modified
def last_modified(self) -> Optional[datetime.datetime]: """The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object. """ httpdate = self._headers.get(hdrs.LAST_MODIFIED) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
python
def last_modified(self) -> Optional[datetime.datetime]: """The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object. """ httpdate = self._headers.get(hdrs.LAST_MODIFIED) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
[ "def", "last_modified", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "httpdate", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "LAST_MODIFIED", ")", "if", "httpdate", "is", "not", "None", ":", "timetuple", "=", "parsedate", "(", "httpdate", ")", "if", "timetuple", "is", "not", "None", ":", "return", "datetime", ".", "datetime", "(", "*", "timetuple", "[", ":", "6", "]", ",", "tzinfo", "=", "datetime", ".", "timezone", ".", "utc", ")", "return", "None" ]
The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object.
[ "The", "value", "of", "Last", "-", "Modified", "HTTP", "header", "or", "None", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L282-L293
26,983
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_static
def add_static(self, prefix: str, path: PathLike, *, name: Optional[str]=None, expect_handler: Optional[_ExpectHandler]=None, chunk_size: int=256 * 1024, show_index: bool=False, follow_symlinks: bool=False, append_version: bool=False) -> AbstractResource: """Add static files view. prefix - url prefix path - folder with files """ assert prefix.startswith('/') if prefix.endswith('/'): prefix = prefix[:-1] resource = StaticResource(prefix, path, name=name, expect_handler=expect_handler, chunk_size=chunk_size, show_index=show_index, follow_symlinks=follow_symlinks, append_version=append_version) self.register_resource(resource) return resource
python
def add_static(self, prefix: str, path: PathLike, *, name: Optional[str]=None, expect_handler: Optional[_ExpectHandler]=None, chunk_size: int=256 * 1024, show_index: bool=False, follow_symlinks: bool=False, append_version: bool=False) -> AbstractResource: """Add static files view. prefix - url prefix path - folder with files """ assert prefix.startswith('/') if prefix.endswith('/'): prefix = prefix[:-1] resource = StaticResource(prefix, path, name=name, expect_handler=expect_handler, chunk_size=chunk_size, show_index=show_index, follow_symlinks=follow_symlinks, append_version=append_version) self.register_resource(resource) return resource
[ "def", "add_static", "(", "self", ",", "prefix", ":", "str", ",", "path", ":", "PathLike", ",", "*", ",", "name", ":", "Optional", "[", "str", "]", "=", "None", ",", "expect_handler", ":", "Optional", "[", "_ExpectHandler", "]", "=", "None", ",", "chunk_size", ":", "int", "=", "256", "*", "1024", ",", "show_index", ":", "bool", "=", "False", ",", "follow_symlinks", ":", "bool", "=", "False", ",", "append_version", ":", "bool", "=", "False", ")", "->", "AbstractResource", ":", "assert", "prefix", ".", "startswith", "(", "'/'", ")", "if", "prefix", ".", "endswith", "(", "'/'", ")", ":", "prefix", "=", "prefix", "[", ":", "-", "1", "]", "resource", "=", "StaticResource", "(", "prefix", ",", "path", ",", "name", "=", "name", ",", "expect_handler", "=", "expect_handler", ",", "chunk_size", "=", "chunk_size", ",", "show_index", "=", "show_index", ",", "follow_symlinks", "=", "follow_symlinks", ",", "append_version", "=", "append_version", ")", "self", ".", "register_resource", "(", "resource", ")", "return", "resource" ]
Add static files view. prefix - url prefix path - folder with files
[ "Add", "static", "files", "view", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1036-L1059
26,984
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_options
def add_options(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method OPTIONS """ return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
python
def add_options(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method OPTIONS """ return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
[ "def", "add_options", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_OPTIONS", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method OPTIONS
[ "Shortcut", "for", "add_route", "with", "method", "OPTIONS" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1068-L1073
26,985
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_get
def add_get(self, path: str, handler: _WebHandler, *, name: Optional[str]=None, allow_head: bool=True, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method GET, if allow_head is true another route is added allowing head requests to the same endpoint """ resource = self.add_resource(path, name=name) if allow_head: resource.add_route(hdrs.METH_HEAD, handler, **kwargs) return resource.add_route(hdrs.METH_GET, handler, **kwargs)
python
def add_get(self, path: str, handler: _WebHandler, *, name: Optional[str]=None, allow_head: bool=True, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method GET, if allow_head is true another route is added allowing head requests to the same endpoint """ resource = self.add_resource(path, name=name) if allow_head: resource.add_route(hdrs.METH_HEAD, handler, **kwargs) return resource.add_route(hdrs.METH_GET, handler, **kwargs)
[ "def", "add_get", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", ",", "name", ":", "Optional", "[", "str", "]", "=", "None", ",", "allow_head", ":", "bool", "=", "True", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "resource", "=", "self", ".", "add_resource", "(", "path", ",", "name", "=", "name", ")", "if", "allow_head", ":", "resource", ".", "add_route", "(", "hdrs", ".", "METH_HEAD", ",", "handler", ",", "*", "*", "kwargs", ")", "return", "resource", ".", "add_route", "(", "hdrs", ".", "METH_GET", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method GET, if allow_head is true another route is added allowing head requests to the same endpoint
[ "Shortcut", "for", "add_route", "with", "method", "GET", "if", "allow_head", "is", "true", "another", "route", "is", "added", "allowing", "head", "requests", "to", "the", "same", "endpoint" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1075-L1085
26,986
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_view
def add_view(self, path: str, handler: AbstractView, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with ANY methods for a class-based view """ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
python
def add_view(self, path: str, handler: AbstractView, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with ANY methods for a class-based view """ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
[ "def", "add_view", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "AbstractView", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_ANY", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with ANY methods for a class-based view
[ "Shortcut", "for", "add_route", "with", "ANY", "methods", "for", "a", "class", "-", "based", "view" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1115-L1120
26,987
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_routes
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None: """Append routes to route table. Parameter should be a sequence of RouteDef objects. """ for route_def in routes: route_def.register(self)
python
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None: """Append routes to route table. Parameter should be a sequence of RouteDef objects. """ for route_def in routes: route_def.register(self)
[ "def", "add_routes", "(", "self", ",", "routes", ":", "Iterable", "[", "AbstractRouteDef", "]", ")", "->", "None", ":", "for", "route_def", "in", "routes", ":", "route_def", ".", "register", "(", "self", ")" ]
Append routes to route table. Parameter should be a sequence of RouteDef objects.
[ "Append", "routes", "to", "route", "table", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1127-L1133
26,988
aio-libs/aiohttp
aiohttp/http_parser.py
HttpParser.parse_headers
def parse_headers( self, lines: List[bytes] ) -> Tuple['CIMultiDictProxy[str]', RawHeaders, Optional[bool], Optional[str], bool, bool]: """Parses RFC 5322 headers from a stream. Line continuations are supported. Returns list of header name and value pairs. Header name is in upper case. """ headers, raw_headers = self._headers_parser.parse_headers(lines) close_conn = None encoding = None upgrade = False chunked = False # keep-alive conn = headers.get(hdrs.CONNECTION) if conn: v = conn.lower() if v == 'close': close_conn = True elif v == 'keep-alive': close_conn = False elif v == 'upgrade': upgrade = True # encoding enc = headers.get(hdrs.CONTENT_ENCODING) if enc: enc = enc.lower() if enc in ('gzip', 'deflate', 'br'): encoding = enc # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te and 'chunked' in te.lower(): chunked = True return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
python
def parse_headers( self, lines: List[bytes] ) -> Tuple['CIMultiDictProxy[str]', RawHeaders, Optional[bool], Optional[str], bool, bool]: """Parses RFC 5322 headers from a stream. Line continuations are supported. Returns list of header name and value pairs. Header name is in upper case. """ headers, raw_headers = self._headers_parser.parse_headers(lines) close_conn = None encoding = None upgrade = False chunked = False # keep-alive conn = headers.get(hdrs.CONNECTION) if conn: v = conn.lower() if v == 'close': close_conn = True elif v == 'keep-alive': close_conn = False elif v == 'upgrade': upgrade = True # encoding enc = headers.get(hdrs.CONTENT_ENCODING) if enc: enc = enc.lower() if enc in ('gzip', 'deflate', 'br'): encoding = enc # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te and 'chunked' in te.lower(): chunked = True return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
[ "def", "parse_headers", "(", "self", ",", "lines", ":", "List", "[", "bytes", "]", ")", "->", "Tuple", "[", "'CIMultiDictProxy[str]'", ",", "RawHeaders", ",", "Optional", "[", "bool", "]", ",", "Optional", "[", "str", "]", ",", "bool", ",", "bool", "]", ":", "headers", ",", "raw_headers", "=", "self", ".", "_headers_parser", ".", "parse_headers", "(", "lines", ")", "close_conn", "=", "None", "encoding", "=", "None", "upgrade", "=", "False", "chunked", "=", "False", "# keep-alive", "conn", "=", "headers", ".", "get", "(", "hdrs", ".", "CONNECTION", ")", "if", "conn", ":", "v", "=", "conn", ".", "lower", "(", ")", "if", "v", "==", "'close'", ":", "close_conn", "=", "True", "elif", "v", "==", "'keep-alive'", ":", "close_conn", "=", "False", "elif", "v", "==", "'upgrade'", ":", "upgrade", "=", "True", "# encoding", "enc", "=", "headers", ".", "get", "(", "hdrs", ".", "CONTENT_ENCODING", ")", "if", "enc", ":", "enc", "=", "enc", ".", "lower", "(", ")", "if", "enc", "in", "(", "'gzip'", ",", "'deflate'", ",", "'br'", ")", ":", "encoding", "=", "enc", "# chunking", "te", "=", "headers", ".", "get", "(", "hdrs", ".", "TRANSFER_ENCODING", ")", "if", "te", "and", "'chunked'", "in", "te", ".", "lower", "(", ")", ":", "chunked", "=", "True", "return", "(", "headers", ",", "raw_headers", ",", "close_conn", ",", "encoding", ",", "upgrade", ",", "chunked", ")" ]
Parses RFC 5322 headers from a stream. Line continuations are supported. Returns list of header name and value pairs. Header name is in upper case.
[ "Parses", "RFC", "5322", "headers", "from", "a", "stream", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_parser.py#L369-L412
26,989
aio-libs/aiohttp
aiohttp/client_ws.py
ClientWebSocketResponse.get_extra_info
def get_extra_info(self, name: str, default: Any=None) -> Any: """extra info from connection transport""" conn = self._response.connection if conn is None: return default transport = conn.transport if transport is None: return default return transport.get_extra_info(name, default)
python
def get_extra_info(self, name: str, default: Any=None) -> Any: """extra info from connection transport""" conn = self._response.connection if conn is None: return default transport = conn.transport if transport is None: return default return transport.get_extra_info(name, default)
[ "def", "get_extra_info", "(", "self", ",", "name", ":", "str", ",", "default", ":", "Any", "=", "None", ")", "->", "Any", ":", "conn", "=", "self", ".", "_response", ".", "connection", "if", "conn", "is", "None", ":", "return", "default", "transport", "=", "conn", ".", "transport", "if", "transport", "is", "None", ":", "return", "default", "return", "transport", ".", "get_extra_info", "(", "name", ",", "default", ")" ]
extra info from connection transport
[ "extra", "info", "from", "connection", "transport" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_ws.py#L125-L133
26,990
kennethreitz/requests-html
requests_html.py
user_agent
def user_agent(style=None) -> _UserAgent: """Returns an apparently legit user-agent, if not requested one of a specific style. Defaults to a Chrome-style User-Agent. """ global useragent if (not useragent) and style: useragent = UserAgent() return useragent[style] if style else DEFAULT_USER_AGENT
python
def user_agent(style=None) -> _UserAgent: """Returns an apparently legit user-agent, if not requested one of a specific style. Defaults to a Chrome-style User-Agent. """ global useragent if (not useragent) and style: useragent = UserAgent() return useragent[style] if style else DEFAULT_USER_AGENT
[ "def", "user_agent", "(", "style", "=", "None", ")", "->", "_UserAgent", ":", "global", "useragent", "if", "(", "not", "useragent", ")", "and", "style", ":", "useragent", "=", "UserAgent", "(", ")", "return", "useragent", "[", "style", "]", "if", "style", "else", "DEFAULT_USER_AGENT" ]
Returns an apparently legit user-agent, if not requested one of a specific style. Defaults to a Chrome-style User-Agent.
[ "Returns", "an", "apparently", "legit", "user", "-", "agent", "if", "not", "requested", "one", "of", "a", "specific", "style", ".", "Defaults", "to", "a", "Chrome", "-", "style", "User", "-", "Agent", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L665-L673
26,991
kennethreitz/requests-html
requests_html.py
BaseParser._make_absolute
def _make_absolute(self, link): """Makes a given link absolute.""" # Parse the link with stdlib. parsed = urlparse(link)._asdict() # If link is relative, then join it with base_url. if not parsed['netloc']: return urljoin(self.base_url, link) # Link is absolute; if it lacks a scheme, add one from base_url. if not parsed['scheme']: parsed['scheme'] = urlparse(self.base_url).scheme # Reconstruct the URL to incorporate the new scheme. parsed = (v for v in parsed.values()) return urlunparse(parsed) # Link is absolute and complete with scheme; nothing to be done here. return link
python
def _make_absolute(self, link): """Makes a given link absolute.""" # Parse the link with stdlib. parsed = urlparse(link)._asdict() # If link is relative, then join it with base_url. if not parsed['netloc']: return urljoin(self.base_url, link) # Link is absolute; if it lacks a scheme, add one from base_url. if not parsed['scheme']: parsed['scheme'] = urlparse(self.base_url).scheme # Reconstruct the URL to incorporate the new scheme. parsed = (v for v in parsed.values()) return urlunparse(parsed) # Link is absolute and complete with scheme; nothing to be done here. return link
[ "def", "_make_absolute", "(", "self", ",", "link", ")", ":", "# Parse the link with stdlib.", "parsed", "=", "urlparse", "(", "link", ")", ".", "_asdict", "(", ")", "# If link is relative, then join it with base_url.", "if", "not", "parsed", "[", "'netloc'", "]", ":", "return", "urljoin", "(", "self", ".", "base_url", ",", "link", ")", "# Link is absolute; if it lacks a scheme, add one from base_url.", "if", "not", "parsed", "[", "'scheme'", "]", ":", "parsed", "[", "'scheme'", "]", "=", "urlparse", "(", "self", ".", "base_url", ")", ".", "scheme", "# Reconstruct the URL to incorporate the new scheme.", "parsed", "=", "(", "v", "for", "v", "in", "parsed", ".", "values", "(", ")", ")", "return", "urlunparse", "(", "parsed", ")", "# Link is absolute and complete with scheme; nothing to be done here.", "return", "link" ]
Makes a given link absolute.
[ "Makes", "a", "given", "link", "absolute", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L306-L325
26,992
kennethreitz/requests-html
requests_html.py
HTML.render
def render(self, retries: int = 8, script: str = None, wait: float = 0.2, scrolldown=False, sleep: int = 0, reload: bool = True, timeout: Union[float, int] = 8.0, keep_page: bool = False): """Reloads the response in Chromium, and replaces HTML content with an updated version, with JavaScript executed. :param retries: The number of times to retry loading the page in Chromium. :param script: JavaScript to execute upon page load (optional). :param wait: The number of seconds to wait before loading the page, preventing timeouts (optional). :param scrolldown: Integer, if provided, of how many times to page down. :param sleep: Integer, if provided, of how many long to sleep after initial render. :param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory. :param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``. If ``scrolldown`` is specified, the page will scrolldown the specified number of times, after sleeping the specified amount of time (e.g. ``scrolldown=10, sleep=1``). If just ``sleep`` is provided, the rendering will wait *n* seconds, before returning. If ``script`` is specified, it will execute the provided JavaScript at runtime. Example: .. code-block:: python script = \"\"\" () => { return { width: document.documentElement.clientWidth, height: document.documentElement.clientHeight, deviceScaleFactor: window.devicePixelRatio, } } \"\"\" Returns the return value of the executed ``script``, if any is provided: .. code-block:: python >>> r.html.render(script=script) {'width': 800, 'height': 600, 'deviceScaleFactor': 1} Warning: the first time you run this method, it will download Chromium into your home directory (``~/.pyppeteer``). """ self.browser = self.session.browser # Automatically create a event loop and browser content = None # Automatically set Reload to False, if example URL is being used. if self.url == DEFAULT_URL: reload = False for i in range(retries): if not content: try: content, result, page = self.session.loop.run_until_complete(self._async_render(url=self.url, script=script, sleep=sleep, wait=wait, content=self.html, reload=reload, scrolldown=scrolldown, timeout=timeout, keep_page=keep_page)) except TypeError: pass else: break if not content: raise MaxRetries("Unable to render the page. Try increasing timeout") html = HTML(url=self.url, html=content.encode(DEFAULT_ENCODING), default_encoding=DEFAULT_ENCODING) self.__dict__.update(html.__dict__) self.page = page return result
python
def render(self, retries: int = 8, script: str = None, wait: float = 0.2, scrolldown=False, sleep: int = 0, reload: bool = True, timeout: Union[float, int] = 8.0, keep_page: bool = False): """Reloads the response in Chromium, and replaces HTML content with an updated version, with JavaScript executed. :param retries: The number of times to retry loading the page in Chromium. :param script: JavaScript to execute upon page load (optional). :param wait: The number of seconds to wait before loading the page, preventing timeouts (optional). :param scrolldown: Integer, if provided, of how many times to page down. :param sleep: Integer, if provided, of how many long to sleep after initial render. :param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory. :param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``. If ``scrolldown`` is specified, the page will scrolldown the specified number of times, after sleeping the specified amount of time (e.g. ``scrolldown=10, sleep=1``). If just ``sleep`` is provided, the rendering will wait *n* seconds, before returning. If ``script`` is specified, it will execute the provided JavaScript at runtime. Example: .. code-block:: python script = \"\"\" () => { return { width: document.documentElement.clientWidth, height: document.documentElement.clientHeight, deviceScaleFactor: window.devicePixelRatio, } } \"\"\" Returns the return value of the executed ``script``, if any is provided: .. code-block:: python >>> r.html.render(script=script) {'width': 800, 'height': 600, 'deviceScaleFactor': 1} Warning: the first time you run this method, it will download Chromium into your home directory (``~/.pyppeteer``). """ self.browser = self.session.browser # Automatically create a event loop and browser content = None # Automatically set Reload to False, if example URL is being used. if self.url == DEFAULT_URL: reload = False for i in range(retries): if not content: try: content, result, page = self.session.loop.run_until_complete(self._async_render(url=self.url, script=script, sleep=sleep, wait=wait, content=self.html, reload=reload, scrolldown=scrolldown, timeout=timeout, keep_page=keep_page)) except TypeError: pass else: break if not content: raise MaxRetries("Unable to render the page. Try increasing timeout") html = HTML(url=self.url, html=content.encode(DEFAULT_ENCODING), default_encoding=DEFAULT_ENCODING) self.__dict__.update(html.__dict__) self.page = page return result
[ "def", "render", "(", "self", ",", "retries", ":", "int", "=", "8", ",", "script", ":", "str", "=", "None", ",", "wait", ":", "float", "=", "0.2", ",", "scrolldown", "=", "False", ",", "sleep", ":", "int", "=", "0", ",", "reload", ":", "bool", "=", "True", ",", "timeout", ":", "Union", "[", "float", ",", "int", "]", "=", "8.0", ",", "keep_page", ":", "bool", "=", "False", ")", ":", "self", ".", "browser", "=", "self", ".", "session", ".", "browser", "# Automatically create a event loop and browser", "content", "=", "None", "# Automatically set Reload to False, if example URL is being used.", "if", "self", ".", "url", "==", "DEFAULT_URL", ":", "reload", "=", "False", "for", "i", "in", "range", "(", "retries", ")", ":", "if", "not", "content", ":", "try", ":", "content", ",", "result", ",", "page", "=", "self", ".", "session", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_async_render", "(", "url", "=", "self", ".", "url", ",", "script", "=", "script", ",", "sleep", "=", "sleep", ",", "wait", "=", "wait", ",", "content", "=", "self", ".", "html", ",", "reload", "=", "reload", ",", "scrolldown", "=", "scrolldown", ",", "timeout", "=", "timeout", ",", "keep_page", "=", "keep_page", ")", ")", "except", "TypeError", ":", "pass", "else", ":", "break", "if", "not", "content", ":", "raise", "MaxRetries", "(", "\"Unable to render the page. Try increasing timeout\"", ")", "html", "=", "HTML", "(", "url", "=", "self", ".", "url", ",", "html", "=", "content", ".", "encode", "(", "DEFAULT_ENCODING", ")", ",", "default_encoding", "=", "DEFAULT_ENCODING", ")", "self", ".", "__dict__", ".", "update", "(", "html", ".", "__dict__", ")", "self", ".", "page", "=", "page", "return", "result" ]
Reloads the response in Chromium, and replaces HTML content with an updated version, with JavaScript executed. :param retries: The number of times to retry loading the page in Chromium. :param script: JavaScript to execute upon page load (optional). :param wait: The number of seconds to wait before loading the page, preventing timeouts (optional). :param scrolldown: Integer, if provided, of how many times to page down. :param sleep: Integer, if provided, of how many long to sleep after initial render. :param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory. :param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``. If ``scrolldown`` is specified, the page will scrolldown the specified number of times, after sleeping the specified amount of time (e.g. ``scrolldown=10, sleep=1``). If just ``sleep`` is provided, the rendering will wait *n* seconds, before returning. If ``script`` is specified, it will execute the provided JavaScript at runtime. Example: .. code-block:: python script = \"\"\" () => { return { width: document.documentElement.clientWidth, height: document.documentElement.clientHeight, deviceScaleFactor: window.devicePixelRatio, } } \"\"\" Returns the return value of the executed ``script``, if any is provided: .. code-block:: python >>> r.html.render(script=script) {'width': 800, 'height': 600, 'deviceScaleFactor': 1} Warning: the first time you run this method, it will download Chromium into your home directory (``~/.pyppeteer``).
[ "Reloads", "the", "response", "in", "Chromium", "and", "replaces", "HTML", "content", "with", "an", "updated", "version", "with", "JavaScript", "executed", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L541-L610
26,993
kennethreitz/requests-html
requests_html.py
HTMLSession.close
def close(self): """ If a browser was created close it first. """ if hasattr(self, "_browser"): self.loop.run_until_complete(self._browser.close()) super().close()
python
def close(self): """ If a browser was created close it first. """ if hasattr(self, "_browser"): self.loop.run_until_complete(self._browser.close()) super().close()
[ "def", "close", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "\"_browser\"", ")", ":", "self", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_browser", ".", "close", "(", ")", ")", "super", "(", ")", ".", "close", "(", ")" ]
If a browser was created close it first.
[ "If", "a", "browser", "was", "created", "close", "it", "first", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L733-L737
26,994
kennethreitz/requests-html
requests_html.py
AsyncHTMLSession.request
def request(self, *args, **kwargs): """ Partial original request func and run it in a thread. """ func = partial(super().request, *args, **kwargs) return self.loop.run_in_executor(self.thread_pool, func)
python
def request(self, *args, **kwargs): """ Partial original request func and run it in a thread. """ func = partial(super().request, *args, **kwargs) return self.loop.run_in_executor(self.thread_pool, func)
[ "def", "request", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "func", "=", "partial", "(", "super", "(", ")", ".", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self", ".", "loop", ".", "run_in_executor", "(", "self", ".", "thread_pool", ",", "func", ")" ]
Partial original request func and run it in a thread.
[ "Partial", "original", "request", "func", "and", "run", "it", "in", "a", "thread", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L756-L759
26,995
kennethreitz/requests-html
requests_html.py
AsyncHTMLSession.run
def run(self, *coros): """ Pass in all the coroutines you want to run, it will wrap each one in a task, run it and wait for the result. Return a list with all results, this is returned in the same order coros are passed in. """ tasks = [ asyncio.ensure_future(coro()) for coro in coros ] done, _ = self.loop.run_until_complete(asyncio.wait(tasks)) return [t.result() for t in done]
python
def run(self, *coros): """ Pass in all the coroutines you want to run, it will wrap each one in a task, run it and wait for the result. Return a list with all results, this is returned in the same order coros are passed in. """ tasks = [ asyncio.ensure_future(coro()) for coro in coros ] done, _ = self.loop.run_until_complete(asyncio.wait(tasks)) return [t.result() for t in done]
[ "def", "run", "(", "self", ",", "*", "coros", ")", ":", "tasks", "=", "[", "asyncio", ".", "ensure_future", "(", "coro", "(", ")", ")", "for", "coro", "in", "coros", "]", "done", ",", "_", "=", "self", ".", "loop", ".", "run_until_complete", "(", "asyncio", ".", "wait", "(", "tasks", ")", ")", "return", "[", "t", ".", "result", "(", ")", "for", "t", "in", "done", "]" ]
Pass in all the coroutines you want to run, it will wrap each one in a task, run it and wait for the result. Return a list with all results, this is returned in the same order coros are passed in.
[ "Pass", "in", "all", "the", "coroutines", "you", "want", "to", "run", "it", "will", "wrap", "each", "one", "in", "a", "task", "run", "it", "and", "wait", "for", "the", "result", ".", "Return", "a", "list", "with", "all", "results", "this", "is", "returned", "in", "the", "same", "order", "coros", "are", "passed", "in", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L767-L775
26,996
Microsoft/nni
examples/trials/weight_sharing/ga_squad/util.py
shape
def shape(tensor): ''' Get shape of variable. Return type is tuple. ''' temp_s = tensor.get_shape() return tuple([temp_s[i].value for i in range(0, len(temp_s))])
python
def shape(tensor): ''' Get shape of variable. Return type is tuple. ''' temp_s = tensor.get_shape() return tuple([temp_s[i].value for i in range(0, len(temp_s))])
[ "def", "shape", "(", "tensor", ")", ":", "temp_s", "=", "tensor", ".", "get_shape", "(", ")", "return", "tuple", "(", "[", "temp_s", "[", "i", "]", ".", "value", "for", "i", "in", "range", "(", "0", ",", "len", "(", "temp_s", ")", ")", "]", ")" ]
Get shape of variable. Return type is tuple.
[ "Get", "shape", "of", "variable", ".", "Return", "type", "is", "tuple", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L30-L36
26,997
Microsoft/nni
examples/trials/weight_sharing/ga_squad/util.py
get_variable
def get_variable(name, temp_s): ''' Get variable by name. ''' return tf.Variable(tf.zeros(temp_s), name=name)
python
def get_variable(name, temp_s): ''' Get variable by name. ''' return tf.Variable(tf.zeros(temp_s), name=name)
[ "def", "get_variable", "(", "name", ",", "temp_s", ")", ":", "return", "tf", ".", "Variable", "(", "tf", ".", "zeros", "(", "temp_s", ")", ",", "name", "=", "name", ")" ]
Get variable by name.
[ "Get", "variable", "by", "name", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L39-L43
26,998
Microsoft/nni
examples/trials/weight_sharing/ga_squad/util.py
dropout
def dropout(tensor, drop_prob, is_training): ''' Dropout except test. ''' if not is_training: return tensor return tf.nn.dropout(tensor, 1.0 - drop_prob)
python
def dropout(tensor, drop_prob, is_training): ''' Dropout except test. ''' if not is_training: return tensor return tf.nn.dropout(tensor, 1.0 - drop_prob)
[ "def", "dropout", "(", "tensor", ",", "drop_prob", ",", "is_training", ")", ":", "if", "not", "is_training", ":", "return", "tensor", "return", "tf", ".", "nn", ".", "dropout", "(", "tensor", ",", "1.0", "-", "drop_prob", ")" ]
Dropout except test.
[ "Dropout", "except", "test", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L46-L52
26,999
Microsoft/nni
examples/trials/weight_sharing/ga_squad/util.py
Timer.get_elapsed
def get_elapsed(self, restart=True): ''' Calculate time span. ''' end = time.time() span = end - self.__start if restart: self.__start = end return span
python
def get_elapsed(self, restart=True): ''' Calculate time span. ''' end = time.time() span = end - self.__start if restart: self.__start = end return span
[ "def", "get_elapsed", "(", "self", ",", "restart", "=", "True", ")", ":", "end", "=", "time", ".", "time", "(", ")", "span", "=", "end", "-", "self", ".", "__start", "if", "restart", ":", "self", ".", "__start", "=", "end", "return", "span" ]
Calculate time span.
[ "Calculate", "time", "span", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L68-L76