partition stringclasses 3 values | func_name stringlengths 1 134 | docstring stringlengths 1 46.9k | path stringlengths 4 223 | original_string stringlengths 75 104k | code stringlengths 75 104k | docstring_tokens listlengths 1 1.97k | repo stringlengths 7 55 | language stringclasses 1 value | url stringlengths 87 315 | code_tokens listlengths 19 28.4k | sha stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|---|---|---|
train | ClientRequest.update_headers | Update request headers. | aiohttp/client_reqrep.py | def update_headers(self, headers: Optional[LooseHeaders]) -> None:
"""Update request headers."""
self.headers = CIMultiDict() # type: CIMultiDict[str]
# add host
netloc = cast(str, self.url.raw_host)
if helpers.is_ipv6_address(netloc):
netloc = '[{}]'.format(netloc)
if not self.url.is_default_port():
netloc += ':' + str(self.url.port)
self.headers[hdrs.HOST] = netloc
if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items() # type: ignore
for key, value in headers:
# A special case for Host header
if key.lower() == 'host':
self.headers[key] = value
else:
self.headers.add(key, value) | def update_headers(self, headers: Optional[LooseHeaders]) -> None:
"""Update request headers."""
self.headers = CIMultiDict() # type: CIMultiDict[str]
# add host
netloc = cast(str, self.url.raw_host)
if helpers.is_ipv6_address(netloc):
netloc = '[{}]'.format(netloc)
if not self.url.is_default_port():
netloc += ':' + str(self.url.port)
self.headers[hdrs.HOST] = netloc
if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items() # type: ignore
for key, value in headers:
# A special case for Host header
if key.lower() == 'host':
self.headers[key] = value
else:
self.headers.add(key, value) | [
"Update",
"request",
"headers",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L322-L343 | [
"def",
"update_headers",
"(",
"self",
",",
"headers",
":",
"Optional",
"[",
"LooseHeaders",
"]",
")",
"->",
"None",
":",
"self",
".",
"headers",
"=",
"CIMultiDict",
"(",
")",
"# type: CIMultiDict[str]",
"# add host",
"netloc",
"=",
"cast",
"(",
"str",
",",
... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientRequest.update_cookies | Update request cookies header. | aiohttp/client_reqrep.py | def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
"""Update request cookies header."""
if not cookies:
return
c = SimpleCookie()
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ''))
del self.headers[hdrs.COOKIE]
if isinstance(cookies, Mapping):
iter_cookies = cookies.items()
else:
iter_cookies = cookies # type: ignore
for name, value in iter_cookies:
if isinstance(value, Morsel):
# Preserve coded_value
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa
c[name] = mrsl_val
else:
c[name] = value # type: ignore
self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip() | def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
"""Update request cookies header."""
if not cookies:
return
c = SimpleCookie()
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ''))
del self.headers[hdrs.COOKIE]
if isinstance(cookies, Mapping):
iter_cookies = cookies.items()
else:
iter_cookies = cookies # type: ignore
for name, value in iter_cookies:
if isinstance(value, Morsel):
# Preserve coded_value
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa
c[name] = mrsl_val
else:
c[name] = value # type: ignore
self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip() | [
"Update",
"request",
"cookies",
"header",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L358-L381 | [
"def",
"update_cookies",
"(",
"self",
",",
"cookies",
":",
"Optional",
"[",
"LooseCookies",
"]",
")",
"->",
"None",
":",
"if",
"not",
"cookies",
":",
"return",
"c",
"=",
"SimpleCookie",
"(",
")",
"if",
"hdrs",
".",
"COOKIE",
"in",
"self",
".",
"headers... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientRequest.update_content_encoding | Set request content encoding. | aiohttp/client_reqrep.py | def update_content_encoding(self, data: Any) -> None:
"""Set request content encoding."""
if not data:
return
enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
if enc:
if self.compress:
raise ValueError(
'compress can not be set '
'if Content-Encoding header is set')
elif self.compress:
if not isinstance(self.compress, str):
self.compress = 'deflate'
self.headers[hdrs.CONTENT_ENCODING] = self.compress
self.chunked = True | def update_content_encoding(self, data: Any) -> None:
"""Set request content encoding."""
if not data:
return
enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
if enc:
if self.compress:
raise ValueError(
'compress can not be set '
'if Content-Encoding header is set')
elif self.compress:
if not isinstance(self.compress, str):
self.compress = 'deflate'
self.headers[hdrs.CONTENT_ENCODING] = self.compress
self.chunked = True | [
"Set",
"request",
"content",
"encoding",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L383-L398 | [
"def",
"update_content_encoding",
"(",
"self",
",",
"data",
":",
"Any",
")",
"->",
"None",
":",
"if",
"not",
"data",
":",
"return",
"enc",
"=",
"self",
".",
"headers",
".",
"get",
"(",
"hdrs",
".",
"CONTENT_ENCODING",
",",
"''",
")",
".",
"lower",
"(... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientRequest.update_transfer_encoding | Analyze transfer-encoding header. | aiohttp/client_reqrep.py | def update_transfer_encoding(self) -> None:
"""Analyze transfer-encoding header."""
te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()
if 'chunked' in te:
if self.chunked:
raise ValueError(
'chunked can not be set '
'if "Transfer-Encoding: chunked" header is set')
elif self.chunked:
if hdrs.CONTENT_LENGTH in self.headers:
raise ValueError(
'chunked can not be set '
'if Content-Length header is set')
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) | def update_transfer_encoding(self) -> None:
"""Analyze transfer-encoding header."""
te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()
if 'chunked' in te:
if self.chunked:
raise ValueError(
'chunked can not be set '
'if "Transfer-Encoding: chunked" header is set')
elif self.chunked:
if hdrs.CONTENT_LENGTH in self.headers:
raise ValueError(
'chunked can not be set '
'if Content-Length header is set')
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) | [
"Analyze",
"transfer",
"-",
"encoding",
"header",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L400-L419 | [
"def",
"update_transfer_encoding",
"(",
"self",
")",
"->",
"None",
":",
"te",
"=",
"self",
".",
"headers",
".",
"get",
"(",
"hdrs",
".",
"TRANSFER_ENCODING",
",",
"''",
")",
".",
"lower",
"(",
")",
"if",
"'chunked'",
"in",
"te",
":",
"if",
"self",
".... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientRequest.update_auth | Set basic auth. | aiohttp/client_reqrep.py | def update_auth(self, auth: Optional[BasicAuth]) -> None:
"""Set basic auth."""
if auth is None:
auth = self.auth
if auth is None:
return
if not isinstance(auth, helpers.BasicAuth):
raise TypeError('BasicAuth() tuple is required instead')
self.headers[hdrs.AUTHORIZATION] = auth.encode() | def update_auth(self, auth: Optional[BasicAuth]) -> None:
"""Set basic auth."""
if auth is None:
auth = self.auth
if auth is None:
return
if not isinstance(auth, helpers.BasicAuth):
raise TypeError('BasicAuth() tuple is required instead')
self.headers[hdrs.AUTHORIZATION] = auth.encode() | [
"Set",
"basic",
"auth",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L421-L431 | [
"def",
"update_auth",
"(",
"self",
",",
"auth",
":",
"Optional",
"[",
"BasicAuth",
"]",
")",
"->",
"None",
":",
"if",
"auth",
"is",
"None",
":",
"auth",
"=",
"self",
".",
"auth",
"if",
"auth",
"is",
"None",
":",
"return",
"if",
"not",
"isinstance",
... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientRequest.write_bytes | Support coroutines that yields bytes objects. | aiohttp/client_reqrep.py | async def write_bytes(self, writer: AbstractStreamWriter,
conn: 'Connection') -> None:
"""Support coroutines that yields bytes objects."""
# 100 response
if self._continue is not None:
await writer.drain()
await self._continue
protocol = conn.protocol
assert protocol is not None
try:
if isinstance(self.body, payload.Payload):
await self.body.write(writer)
else:
if isinstance(self.body, (bytes, bytearray)):
self.body = (self.body,) # type: ignore
for chunk in self.body:
await writer.write(chunk) # type: ignore
await writer.write_eof()
except OSError as exc:
new_exc = ClientOSError(
exc.errno,
'Can not write request body for %s' % self.url)
new_exc.__context__ = exc
new_exc.__cause__ = exc
protocol.set_exception(new_exc)
except asyncio.CancelledError as exc:
if not conn.closed:
protocol.set_exception(exc)
except Exception as exc:
protocol.set_exception(exc)
finally:
self._writer = None | async def write_bytes(self, writer: AbstractStreamWriter,
conn: 'Connection') -> None:
"""Support coroutines that yields bytes objects."""
# 100 response
if self._continue is not None:
await writer.drain()
await self._continue
protocol = conn.protocol
assert protocol is not None
try:
if isinstance(self.body, payload.Payload):
await self.body.write(writer)
else:
if isinstance(self.body, (bytes, bytearray)):
self.body = (self.body,) # type: ignore
for chunk in self.body:
await writer.write(chunk) # type: ignore
await writer.write_eof()
except OSError as exc:
new_exc = ClientOSError(
exc.errno,
'Can not write request body for %s' % self.url)
new_exc.__context__ = exc
new_exc.__cause__ = exc
protocol.set_exception(new_exc)
except asyncio.CancelledError as exc:
if not conn.closed:
protocol.set_exception(exc)
except Exception as exc:
protocol.set_exception(exc)
finally:
self._writer = None | [
"Support",
"coroutines",
"that",
"yields",
"bytes",
"objects",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L501-L535 | [
"async",
"def",
"write_bytes",
"(",
"self",
",",
"writer",
":",
"AbstractStreamWriter",
",",
"conn",
":",
"'Connection'",
")",
"->",
"None",
":",
"# 100 response",
"if",
"self",
".",
"_continue",
"is",
"not",
"None",
":",
"await",
"writer",
".",
"drain",
"... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientResponse.start | Start response processing. | aiohttp/client_reqrep.py | async def start(self, connection: 'Connection') -> 'ClientResponse':
"""Start response processing."""
self._closed = False
self._protocol = connection.protocol
self._connection = connection
with self._timer:
while True:
# read response
try:
message, payload = await self._protocol.read() # type: ignore # noqa
except http.HttpProcessingError as exc:
raise ClientResponseError(
self.request_info, self.history,
status=exc.code,
message=exc.message, headers=exc.headers) from exc
if (message.code < 100 or
message.code > 199 or message.code == 101):
break
if self._continue is not None:
set_result(self._continue, True)
self._continue = None
# payload eof handler
payload.on_eof(self._response_eof)
# response status
self.version = message.version
self.status = message.code
self.reason = message.reason
# headers
self._headers = message.headers # type is CIMultiDictProxy
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
# payload
self.content = payload
# cookies
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
try:
self.cookies.load(hdr)
except CookieError as exc:
client_logger.warning(
'Can not load response cookies: %s', exc)
return self | async def start(self, connection: 'Connection') -> 'ClientResponse':
"""Start response processing."""
self._closed = False
self._protocol = connection.protocol
self._connection = connection
with self._timer:
while True:
# read response
try:
message, payload = await self._protocol.read() # type: ignore # noqa
except http.HttpProcessingError as exc:
raise ClientResponseError(
self.request_info, self.history,
status=exc.code,
message=exc.message, headers=exc.headers) from exc
if (message.code < 100 or
message.code > 199 or message.code == 101):
break
if self._continue is not None:
set_result(self._continue, True)
self._continue = None
# payload eof handler
payload.on_eof(self._response_eof)
# response status
self.version = message.version
self.status = message.code
self.reason = message.reason
# headers
self._headers = message.headers # type is CIMultiDictProxy
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
# payload
self.content = payload
# cookies
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
try:
self.cookies.load(hdr)
except CookieError as exc:
client_logger.warning(
'Can not load response cookies: %s', exc)
return self | [
"Start",
"response",
"processing",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L788-L835 | [
"async",
"def",
"start",
"(",
"self",
",",
"connection",
":",
"'Connection'",
")",
"->",
"'ClientResponse'",
":",
"self",
".",
"_closed",
"=",
"False",
"self",
".",
"_protocol",
"=",
"connection",
".",
"protocol",
"self",
".",
"_connection",
"=",
"connection... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientResponse.read | Read response payload. | aiohttp/client_reqrep.py | async def read(self) -> bytes:
"""Read response payload."""
if self._body is None:
try:
self._body = await self.content.read()
for trace in self._traces:
await trace.send_response_chunk_received(self._body)
except BaseException:
self.close()
raise
elif self._released:
raise ClientConnectionError('Connection closed')
return self._body | async def read(self) -> bytes:
"""Read response payload."""
if self._body is None:
try:
self._body = await self.content.read()
for trace in self._traces:
await trace.send_response_chunk_received(self._body)
except BaseException:
self.close()
raise
elif self._released:
raise ClientConnectionError('Connection closed')
return self._body | [
"Read",
"response",
"payload",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L920-L933 | [
"async",
"def",
"read",
"(",
"self",
")",
"->",
"bytes",
":",
"if",
"self",
".",
"_body",
"is",
"None",
":",
"try",
":",
"self",
".",
"_body",
"=",
"await",
"self",
".",
"content",
".",
"read",
"(",
")",
"for",
"trace",
"in",
"self",
".",
"_trace... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientResponse.text | Read response payload and decode. | aiohttp/client_reqrep.py | async def text(self,
encoding: Optional[str]=None, errors: str='strict') -> str:
"""Read response payload and decode."""
if self._body is None:
await self.read()
if encoding is None:
encoding = self.get_encoding()
return self._body.decode(encoding, errors=errors) | async def text(self,
encoding: Optional[str]=None, errors: str='strict') -> str:
"""Read response payload and decode."""
if self._body is None:
await self.read()
if encoding is None:
encoding = self.get_encoding()
return self._body.decode(encoding, errors=errors) | [
"Read",
"response",
"payload",
"and",
"decode",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L956-L965 | [
"async",
"def",
"text",
"(",
"self",
",",
"encoding",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"errors",
":",
"str",
"=",
"'strict'",
")",
"->",
"str",
":",
"if",
"self",
".",
"_body",
"is",
"None",
":",
"await",
"self",
".",
"read",
"... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientResponse.json | Read and decodes JSON response. | aiohttp/client_reqrep.py | async def json(self, *, encoding: str=None,
loads: JSONDecoder=DEFAULT_JSON_DECODER,
content_type: Optional[str]='application/json') -> Any:
"""Read and decodes JSON response."""
if self._body is None:
await self.read()
if content_type:
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
if not _is_expected_content_type(ctype, content_type):
raise ContentTypeError(
self.request_info,
self.history,
message=('Attempt to decode JSON with '
'unexpected mimetype: %s' % ctype),
headers=self.headers)
if encoding is None:
encoding = self.get_encoding()
return loads(self._body.decode(encoding)) | async def json(self, *, encoding: str=None,
loads: JSONDecoder=DEFAULT_JSON_DECODER,
content_type: Optional[str]='application/json') -> Any:
"""Read and decodes JSON response."""
if self._body is None:
await self.read()
if content_type:
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
if not _is_expected_content_type(ctype, content_type):
raise ContentTypeError(
self.request_info,
self.history,
message=('Attempt to decode JSON with '
'unexpected mimetype: %s' % ctype),
headers=self.headers)
if encoding is None:
encoding = self.get_encoding()
return loads(self._body.decode(encoding)) | [
"Read",
"and",
"decodes",
"JSON",
"response",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L967-L987 | [
"async",
"def",
"json",
"(",
"self",
",",
"*",
",",
"encoding",
":",
"str",
"=",
"None",
",",
"loads",
":",
"JSONDecoder",
"=",
"DEFAULT_JSON_DECODER",
",",
"content_type",
":",
"Optional",
"[",
"str",
"]",
"=",
"'application/json'",
")",
"->",
"Any",
":... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | StreamResponse.enable_chunked_encoding | Enables automatic chunked transfer encoding. | aiohttp/web_response.py | def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None:
"""Enables automatic chunked transfer encoding."""
self._chunked = True
if hdrs.CONTENT_LENGTH in self._headers:
raise RuntimeError("You can't enable chunked encoding when "
"a content length is set")
if chunk_size is not None:
warnings.warn('Chunk size is deprecated #1615', DeprecationWarning) | def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None:
"""Enables automatic chunked transfer encoding."""
self._chunked = True
if hdrs.CONTENT_LENGTH in self._headers:
raise RuntimeError("You can't enable chunked encoding when "
"a content length is set")
if chunk_size is not None:
warnings.warn('Chunk size is deprecated #1615', DeprecationWarning) | [
"Enables",
"automatic",
"chunked",
"transfer",
"encoding",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L144-L152 | [
"def",
"enable_chunked_encoding",
"(",
"self",
",",
"chunk_size",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
")",
"->",
"None",
":",
"self",
".",
"_chunked",
"=",
"True",
"if",
"hdrs",
".",
"CONTENT_LENGTH",
"in",
"self",
".",
"_headers",
":",
"raise... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | StreamResponse.enable_compression | Enables response compression encoding. | aiohttp/web_response.py | def enable_compression(self,
force: Optional[Union[bool, ContentCoding]]=None
) -> None:
"""Enables response compression encoding."""
# Backwards compatibility for when force was a bool <0.17.
if type(force) == bool:
force = ContentCoding.deflate if force else ContentCoding.identity
warnings.warn("Using boolean for force is deprecated #3318",
DeprecationWarning)
elif force is not None:
assert isinstance(force, ContentCoding), ("force should one of "
"None, bool or "
"ContentEncoding")
self._compression = True
self._compression_force = force | def enable_compression(self,
force: Optional[Union[bool, ContentCoding]]=None
) -> None:
"""Enables response compression encoding."""
# Backwards compatibility for when force was a bool <0.17.
if type(force) == bool:
force = ContentCoding.deflate if force else ContentCoding.identity
warnings.warn("Using boolean for force is deprecated #3318",
DeprecationWarning)
elif force is not None:
assert isinstance(force, ContentCoding), ("force should one of "
"None, bool or "
"ContentEncoding")
self._compression = True
self._compression_force = force | [
"Enables",
"response",
"compression",
"encoding",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L154-L169 | [
"def",
"enable_compression",
"(",
"self",
",",
"force",
":",
"Optional",
"[",
"Union",
"[",
"bool",
",",
"ContentCoding",
"]",
"]",
"=",
"None",
")",
"->",
"None",
":",
"# Backwards compatibility for when force was a bool <0.17.",
"if",
"type",
"(",
"force",
")"... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | StreamResponse.set_cookie | Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None. | aiohttp/web_response.py | def set_cookie(self, name: str, value: str, *,
expires: Optional[str]=None,
domain: Optional[str]=None,
max_age: Optional[Union[int, str]]=None,
path: str='/',
secure: Optional[str]=None,
httponly: Optional[str]=None,
version: Optional[str]=None) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == '':
# deleted cookie
self._cookies.pop(name, None)
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c['expires'] = expires
elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT':
del c['expires']
if domain is not None:
c['domain'] = domain
if max_age is not None:
c['max-age'] = str(max_age)
elif 'max-age' in c:
del c['max-age']
c['path'] = path
if secure is not None:
c['secure'] = secure
if httponly is not None:
c['httponly'] = httponly
if version is not None:
c['version'] = version | def set_cookie(self, name: str, value: str, *,
expires: Optional[str]=None,
domain: Optional[str]=None,
max_age: Optional[Union[int, str]]=None,
path: str='/',
secure: Optional[str]=None,
httponly: Optional[str]=None,
version: Optional[str]=None) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == '':
# deleted cookie
self._cookies.pop(name, None)
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c['expires'] = expires
elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT':
del c['expires']
if domain is not None:
c['domain'] = domain
if max_age is not None:
c['max-age'] = str(max_age)
elif 'max-age' in c:
del c['max-age']
c['path'] = path
if secure is not None:
c['secure'] = secure
if httponly is not None:
c['httponly'] = httponly
if version is not None:
c['version'] = version | [
"Set",
"or",
"update",
"response",
"cookie",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L179-L221 | [
"def",
"set_cookie",
"(",
"self",
",",
"name",
":",
"str",
",",
"value",
":",
"str",
",",
"*",
",",
"expires",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"domain",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"max_age",
":",
"Opti... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | StreamResponse.del_cookie | Delete cookie.
Creates new empty expired cookie. | aiohttp/web_response.py | def del_cookie(self, name: str, *,
domain: Optional[str]=None,
path: str='/') -> None:
"""Delete cookie.
Creates new empty expired cookie.
"""
# TODO: do we need domain/path here?
self._cookies.pop(name, None)
self.set_cookie(name, '', max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain, path=path) | def del_cookie(self, name: str, *,
domain: Optional[str]=None,
path: str='/') -> None:
"""Delete cookie.
Creates new empty expired cookie.
"""
# TODO: do we need domain/path here?
self._cookies.pop(name, None)
self.set_cookie(name, '', max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain, path=path) | [
"Delete",
"cookie",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L223-L234 | [
"def",
"del_cookie",
"(",
"self",
",",
"name",
":",
"str",
",",
"*",
",",
"domain",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"path",
":",
"str",
"=",
"'/'",
")",
"->",
"None",
":",
"# TODO: do we need domain/path here?",
"self",
".",
"_cooki... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | StreamResponse.last_modified | The value of Last-Modified HTTP header, or None.
This header is represented as a `datetime` object. | aiohttp/web_response.py | def last_modified(self) -> Optional[datetime.datetime]:
"""The value of Last-Modified HTTP header, or None.
This header is represented as a `datetime` object.
"""
httpdate = self._headers.get(hdrs.LAST_MODIFIED)
if httpdate is not None:
timetuple = parsedate(httpdate)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return None | def last_modified(self) -> Optional[datetime.datetime]:
"""The value of Last-Modified HTTP header, or None.
This header is represented as a `datetime` object.
"""
httpdate = self._headers.get(hdrs.LAST_MODIFIED)
if httpdate is not None:
timetuple = parsedate(httpdate)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return None | [
"The",
"value",
"of",
"Last",
"-",
"Modified",
"HTTP",
"header",
"or",
"None",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L282-L293 | [
"def",
"last_modified",
"(",
"self",
")",
"->",
"Optional",
"[",
"datetime",
".",
"datetime",
"]",
":",
"httpdate",
"=",
"self",
".",
"_headers",
".",
"get",
"(",
"hdrs",
".",
"LAST_MODIFIED",
")",
"if",
"httpdate",
"is",
"not",
"None",
":",
"timetuple",... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | _default_expect_handler | Default handler for Expect header.
Just send "100 Continue" to client.
raise HTTPExpectationFailed if value of header is not "100-continue" | aiohttp/web_urldispatcher.py | async def _default_expect_handler(request: Request) -> None:
"""Default handler for Expect header.
Just send "100 Continue" to client.
raise HTTPExpectationFailed if value of header is not "100-continue"
"""
expect = request.headers.get(hdrs.EXPECT)
if request.version == HttpVersion11:
if expect.lower() == "100-continue":
await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
else:
raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) | async def _default_expect_handler(request: Request) -> None:
"""Default handler for Expect header.
Just send "100 Continue" to client.
raise HTTPExpectationFailed if value of header is not "100-continue"
"""
expect = request.headers.get(hdrs.EXPECT)
if request.version == HttpVersion11:
if expect.lower() == "100-continue":
await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
else:
raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) | [
"Default",
"handler",
"for",
"Expect",
"header",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L278-L289 | [
"async",
"def",
"_default_expect_handler",
"(",
"request",
":",
"Request",
")",
"->",
"None",
":",
"expect",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"hdrs",
".",
"EXPECT",
")",
"if",
"request",
".",
"version",
"==",
"HttpVersion11",
":",
"if",
"... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ResourceRoute.url_for | Construct url for route with additional params. | aiohttp/web_urldispatcher.py | def url_for(self, *args: str, **kwargs: str) -> URL:
"""Construct url for route with additional params."""
return self._resource.url_for(*args, **kwargs) | def url_for(self, *args: str, **kwargs: str) -> URL:
"""Construct url for route with additional params."""
return self._resource.url_for(*args, **kwargs) | [
"Construct",
"url",
"for",
"route",
"with",
"additional",
"params",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L843-L845 | [
"def",
"url_for",
"(",
"self",
",",
"*",
"args",
":",
"str",
",",
"*",
"*",
"kwargs",
":",
"str",
")",
"->",
"URL",
":",
"return",
"self",
".",
"_resource",
".",
"url_for",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_static | Add static files view.
prefix - url prefix
path - folder with files | aiohttp/web_urldispatcher.py | def add_static(self, prefix: str, path: PathLike, *,
name: Optional[str]=None,
expect_handler: Optional[_ExpectHandler]=None,
chunk_size: int=256 * 1024,
show_index: bool=False, follow_symlinks: bool=False,
append_version: bool=False) -> AbstractResource:
"""Add static files view.
prefix - url prefix
path - folder with files
"""
assert prefix.startswith('/')
if prefix.endswith('/'):
prefix = prefix[:-1]
resource = StaticResource(prefix, path,
name=name,
expect_handler=expect_handler,
chunk_size=chunk_size,
show_index=show_index,
follow_symlinks=follow_symlinks,
append_version=append_version)
self.register_resource(resource)
return resource | def add_static(self, prefix: str, path: PathLike, *,
name: Optional[str]=None,
expect_handler: Optional[_ExpectHandler]=None,
chunk_size: int=256 * 1024,
show_index: bool=False, follow_symlinks: bool=False,
append_version: bool=False) -> AbstractResource:
"""Add static files view.
prefix - url prefix
path - folder with files
"""
assert prefix.startswith('/')
if prefix.endswith('/'):
prefix = prefix[:-1]
resource = StaticResource(prefix, path,
name=name,
expect_handler=expect_handler,
chunk_size=chunk_size,
show_index=show_index,
follow_symlinks=follow_symlinks,
append_version=append_version)
self.register_resource(resource)
return resource | [
"Add",
"static",
"files",
"view",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1036-L1059 | [
"def",
"add_static",
"(",
"self",
",",
"prefix",
":",
"str",
",",
"path",
":",
"PathLike",
",",
"*",
",",
"name",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"expect_handler",
":",
"Optional",
"[",
"_ExpectHandler",
"]",
"=",
"None",
",",
"ch... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_head | Shortcut for add_route with method HEAD | aiohttp/web_urldispatcher.py | def add_head(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method HEAD
"""
return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) | def add_head(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method HEAD
"""
return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) | [
"Shortcut",
"for",
"add_route",
"with",
"method",
"HEAD"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1061-L1066 | [
"def",
"add_head",
"(",
"self",
",",
"path",
":",
"str",
",",
"handler",
":",
"_WebHandler",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"AbstractRoute",
":",
"return",
"self",
".",
"add_route",
"(",
"hdrs",
".",
"METH_HEAD",
",",
"path",
",",
"h... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_options | Shortcut for add_route with method OPTIONS | aiohttp/web_urldispatcher.py | def add_options(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method OPTIONS
"""
return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) | def add_options(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method OPTIONS
"""
return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) | [
"Shortcut",
"for",
"add_route",
"with",
"method",
"OPTIONS"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1068-L1073 | [
"def",
"add_options",
"(",
"self",
",",
"path",
":",
"str",
",",
"handler",
":",
"_WebHandler",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"AbstractRoute",
":",
"return",
"self",
".",
"add_route",
"(",
"hdrs",
".",
"METH_OPTIONS",
",",
"path",
","... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_get | Shortcut for add_route with method GET, if allow_head is true another
route is added allowing head requests to the same endpoint | aiohttp/web_urldispatcher.py | def add_get(self, path: str, handler: _WebHandler, *,
name: Optional[str]=None, allow_head: bool=True,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method GET, if allow_head is true another
route is added allowing head requests to the same endpoint
"""
resource = self.add_resource(path, name=name)
if allow_head:
resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
return resource.add_route(hdrs.METH_GET, handler, **kwargs) | def add_get(self, path: str, handler: _WebHandler, *,
name: Optional[str]=None, allow_head: bool=True,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method GET, if allow_head is true another
route is added allowing head requests to the same endpoint
"""
resource = self.add_resource(path, name=name)
if allow_head:
resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
return resource.add_route(hdrs.METH_GET, handler, **kwargs) | [
"Shortcut",
"for",
"add_route",
"with",
"method",
"GET",
"if",
"allow_head",
"is",
"true",
"another",
"route",
"is",
"added",
"allowing",
"head",
"requests",
"to",
"the",
"same",
"endpoint"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1075-L1085 | [
"def",
"add_get",
"(",
"self",
",",
"path",
":",
"str",
",",
"handler",
":",
"_WebHandler",
",",
"*",
",",
"name",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"allow_head",
":",
"bool",
"=",
"True",
",",
"*",
"*",
"kwargs",
":",
"Any",
")... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_post | Shortcut for add_route with method POST | aiohttp/web_urldispatcher.py | def add_post(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method POST
"""
return self.add_route(hdrs.METH_POST, path, handler, **kwargs) | def add_post(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method POST
"""
return self.add_route(hdrs.METH_POST, path, handler, **kwargs) | [
"Shortcut",
"for",
"add_route",
"with",
"method",
"POST"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1087-L1092 | [
"def",
"add_post",
"(",
"self",
",",
"path",
":",
"str",
",",
"handler",
":",
"_WebHandler",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"AbstractRoute",
":",
"return",
"self",
".",
"add_route",
"(",
"hdrs",
".",
"METH_POST",
",",
"path",
",",
"h... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_put | Shortcut for add_route with method PUT | aiohttp/web_urldispatcher.py | def add_put(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method PUT
"""
return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) | def add_put(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method PUT
"""
return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) | [
"Shortcut",
"for",
"add_route",
"with",
"method",
"PUT"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1094-L1099 | [
"def",
"add_put",
"(",
"self",
",",
"path",
":",
"str",
",",
"handler",
":",
"_WebHandler",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"AbstractRoute",
":",
"return",
"self",
".",
"add_route",
"(",
"hdrs",
".",
"METH_PUT",
",",
"path",
",",
"han... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_patch | Shortcut for add_route with method PATCH | aiohttp/web_urldispatcher.py | def add_patch(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method PATCH
"""
return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) | def add_patch(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method PATCH
"""
return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) | [
"Shortcut",
"for",
"add_route",
"with",
"method",
"PATCH"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1101-L1106 | [
"def",
"add_patch",
"(",
"self",
",",
"path",
":",
"str",
",",
"handler",
":",
"_WebHandler",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"AbstractRoute",
":",
"return",
"self",
".",
"add_route",
"(",
"hdrs",
".",
"METH_PATCH",
",",
"path",
",",
... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_delete | Shortcut for add_route with method DELETE | aiohttp/web_urldispatcher.py | def add_delete(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method DELETE
"""
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) | def add_delete(self, path: str, handler: _WebHandler,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method DELETE
"""
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) | [
"Shortcut",
"for",
"add_route",
"with",
"method",
"DELETE"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1108-L1113 | [
"def",
"add_delete",
"(",
"self",
",",
"path",
":",
"str",
",",
"handler",
":",
"_WebHandler",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"AbstractRoute",
":",
"return",
"self",
".",
"add_route",
"(",
"hdrs",
".",
"METH_DELETE",
",",
"path",
",",
... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_view | Shortcut for add_route with ANY methods for a class-based view | aiohttp/web_urldispatcher.py | def add_view(self, path: str, handler: AbstractView,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with ANY methods for a class-based view
"""
return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) | def add_view(self, path: str, handler: AbstractView,
**kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with ANY methods for a class-based view
"""
return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) | [
"Shortcut",
"for",
"add_route",
"with",
"ANY",
"methods",
"for",
"a",
"class",
"-",
"based",
"view"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1115-L1120 | [
"def",
"add_view",
"(",
"self",
",",
"path",
":",
"str",
",",
"handler",
":",
"AbstractView",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"AbstractRoute",
":",
"return",
"self",
".",
"add_route",
"(",
"hdrs",
".",
"METH_ANY",
",",
"path",
",",
"h... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | UrlDispatcher.add_routes | Append routes to route table.
Parameter should be a sequence of RouteDef objects. | aiohttp/web_urldispatcher.py | def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None:
"""Append routes to route table.
Parameter should be a sequence of RouteDef objects.
"""
for route_def in routes:
route_def.register(self) | def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None:
"""Append routes to route table.
Parameter should be a sequence of RouteDef objects.
"""
for route_def in routes:
route_def.register(self) | [
"Append",
"routes",
"to",
"route",
"table",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1127-L1133 | [
"def",
"add_routes",
"(",
"self",
",",
"routes",
":",
"Iterable",
"[",
"AbstractRouteDef",
"]",
")",
"->",
"None",
":",
"for",
"route_def",
"in",
"routes",
":",
"route_def",
".",
"register",
"(",
"self",
")"
] | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | HttpParser.parse_headers | Parses RFC 5322 headers from a stream.
Line continuations are supported. Returns list of header name
and value pairs. Header name is in upper case. | aiohttp/http_parser.py | def parse_headers(
self,
lines: List[bytes]
) -> Tuple['CIMultiDictProxy[str]',
RawHeaders,
Optional[bool],
Optional[str],
bool,
bool]:
"""Parses RFC 5322 headers from a stream.
Line continuations are supported. Returns list of header name
and value pairs. Header name is in upper case.
"""
headers, raw_headers = self._headers_parser.parse_headers(lines)
close_conn = None
encoding = None
upgrade = False
chunked = False
# keep-alive
conn = headers.get(hdrs.CONNECTION)
if conn:
v = conn.lower()
if v == 'close':
close_conn = True
elif v == 'keep-alive':
close_conn = False
elif v == 'upgrade':
upgrade = True
# encoding
enc = headers.get(hdrs.CONTENT_ENCODING)
if enc:
enc = enc.lower()
if enc in ('gzip', 'deflate', 'br'):
encoding = enc
# chunking
te = headers.get(hdrs.TRANSFER_ENCODING)
if te and 'chunked' in te.lower():
chunked = True
return (headers, raw_headers, close_conn, encoding, upgrade, chunked) | def parse_headers(
self,
lines: List[bytes]
) -> Tuple['CIMultiDictProxy[str]',
RawHeaders,
Optional[bool],
Optional[str],
bool,
bool]:
"""Parses RFC 5322 headers from a stream.
Line continuations are supported. Returns list of header name
and value pairs. Header name is in upper case.
"""
headers, raw_headers = self._headers_parser.parse_headers(lines)
close_conn = None
encoding = None
upgrade = False
chunked = False
# keep-alive
conn = headers.get(hdrs.CONNECTION)
if conn:
v = conn.lower()
if v == 'close':
close_conn = True
elif v == 'keep-alive':
close_conn = False
elif v == 'upgrade':
upgrade = True
# encoding
enc = headers.get(hdrs.CONTENT_ENCODING)
if enc:
enc = enc.lower()
if enc in ('gzip', 'deflate', 'br'):
encoding = enc
# chunking
te = headers.get(hdrs.TRANSFER_ENCODING)
if te and 'chunked' in te.lower():
chunked = True
return (headers, raw_headers, close_conn, encoding, upgrade, chunked) | [
"Parses",
"RFC",
"5322",
"headers",
"from",
"a",
"stream",
"."
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_parser.py#L369-L412 | [
"def",
"parse_headers",
"(",
"self",
",",
"lines",
":",
"List",
"[",
"bytes",
"]",
")",
"->",
"Tuple",
"[",
"'CIMultiDictProxy[str]'",
",",
"RawHeaders",
",",
"Optional",
"[",
"bool",
"]",
",",
"Optional",
"[",
"str",
"]",
",",
"bool",
",",
"bool",
"]"... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | ClientWebSocketResponse.get_extra_info | extra info from connection transport | aiohttp/client_ws.py | def get_extra_info(self, name: str, default: Any=None) -> Any:
"""extra info from connection transport"""
conn = self._response.connection
if conn is None:
return default
transport = conn.transport
if transport is None:
return default
return transport.get_extra_info(name, default) | def get_extra_info(self, name: str, default: Any=None) -> Any:
"""extra info from connection transport"""
conn = self._response.connection
if conn is None:
return default
transport = conn.transport
if transport is None:
return default
return transport.get_extra_info(name, default) | [
"extra",
"info",
"from",
"connection",
"transport"
] | aio-libs/aiohttp | python | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_ws.py#L125-L133 | [
"def",
"get_extra_info",
"(",
"self",
",",
"name",
":",
"str",
",",
"default",
":",
"Any",
"=",
"None",
")",
"->",
"Any",
":",
"conn",
"=",
"self",
".",
"_response",
".",
"connection",
"if",
"conn",
"is",
"None",
":",
"return",
"default",
"transport",
... | 9504fe2affaaff673fa4f3754c1c44221f8ba47d |
train | user_agent | Returns an apparently legit user-agent, if not requested one of a specific
style. Defaults to a Chrome-style User-Agent. | requests_html.py | def user_agent(style=None) -> _UserAgent:
"""Returns an apparently legit user-agent, if not requested one of a specific
style. Defaults to a Chrome-style User-Agent.
"""
global useragent
if (not useragent) and style:
useragent = UserAgent()
return useragent[style] if style else DEFAULT_USER_AGENT | def user_agent(style=None) -> _UserAgent:
"""Returns an apparently legit user-agent, if not requested one of a specific
style. Defaults to a Chrome-style User-Agent.
"""
global useragent
if (not useragent) and style:
useragent = UserAgent()
return useragent[style] if style else DEFAULT_USER_AGENT | [
"Returns",
"an",
"apparently",
"legit",
"user",
"-",
"agent",
"if",
"not",
"requested",
"one",
"of",
"a",
"specific",
"style",
".",
"Defaults",
"to",
"a",
"Chrome",
"-",
"style",
"User",
"-",
"Agent",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L665-L673 | [
"def",
"user_agent",
"(",
"style",
"=",
"None",
")",
"->",
"_UserAgent",
":",
"global",
"useragent",
"if",
"(",
"not",
"useragent",
")",
"and",
"style",
":",
"useragent",
"=",
"UserAgent",
"(",
")",
"return",
"useragent",
"[",
"style",
"]",
"if",
"style"... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.raw_html | Bytes representation of the HTML content.
(`learn more <http://www.diveintopython3.net/strings.html>`_). | requests_html.py | def raw_html(self) -> _RawHTML:
"""Bytes representation of the HTML content.
(`learn more <http://www.diveintopython3.net/strings.html>`_).
"""
if self._html:
return self._html
else:
return etree.tostring(self.element, encoding='unicode').strip().encode(self.encoding) | def raw_html(self) -> _RawHTML:
"""Bytes representation of the HTML content.
(`learn more <http://www.diveintopython3.net/strings.html>`_).
"""
if self._html:
return self._html
else:
return etree.tostring(self.element, encoding='unicode').strip().encode(self.encoding) | [
"Bytes",
"representation",
"of",
"the",
"HTML",
"content",
".",
"(",
"learn",
"more",
"<http",
":",
"//",
"www",
".",
"diveintopython3",
".",
"net",
"/",
"strings",
".",
"html",
">",
"_",
")",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L90-L97 | [
"def",
"raw_html",
"(",
"self",
")",
"->",
"_RawHTML",
":",
"if",
"self",
".",
"_html",
":",
"return",
"self",
".",
"_html",
"else",
":",
"return",
"etree",
".",
"tostring",
"(",
"self",
".",
"element",
",",
"encoding",
"=",
"'unicode'",
")",
".",
"s... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.html | Unicode representation of the HTML content
(`learn more <http://www.diveintopython3.net/strings.html>`_). | requests_html.py | def html(self) -> _BaseHTML:
"""Unicode representation of the HTML content
(`learn more <http://www.diveintopython3.net/strings.html>`_).
"""
if self._html:
return self.raw_html.decode(self.encoding, errors='replace')
else:
return etree.tostring(self.element, encoding='unicode').strip() | def html(self) -> _BaseHTML:
"""Unicode representation of the HTML content
(`learn more <http://www.diveintopython3.net/strings.html>`_).
"""
if self._html:
return self.raw_html.decode(self.encoding, errors='replace')
else:
return etree.tostring(self.element, encoding='unicode').strip() | [
"Unicode",
"representation",
"of",
"the",
"HTML",
"content",
"(",
"learn",
"more",
"<http",
":",
"//",
"www",
".",
"diveintopython3",
".",
"net",
"/",
"strings",
".",
"html",
">",
"_",
")",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L100-L107 | [
"def",
"html",
"(",
"self",
")",
"->",
"_BaseHTML",
":",
"if",
"self",
".",
"_html",
":",
"return",
"self",
".",
"raw_html",
".",
"decode",
"(",
"self",
".",
"encoding",
",",
"errors",
"=",
"'replace'",
")",
"else",
":",
"return",
"etree",
".",
"tost... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.encoding | The encoding string to be used, extracted from the HTML and
:class:`HTMLResponse <HTMLResponse>` headers. | requests_html.py | def encoding(self) -> _Encoding:
"""The encoding string to be used, extracted from the HTML and
:class:`HTMLResponse <HTMLResponse>` headers.
"""
if self._encoding:
return self._encoding
# Scan meta tags for charset.
if self._html:
self._encoding = html_to_unicode(self.default_encoding, self._html)[0]
# Fall back to requests' detected encoding if decode fails.
try:
self.raw_html.decode(self.encoding, errors='replace')
except UnicodeDecodeError:
self._encoding = self.default_encoding
return self._encoding if self._encoding else self.default_encoding | def encoding(self) -> _Encoding:
"""The encoding string to be used, extracted from the HTML and
:class:`HTMLResponse <HTMLResponse>` headers.
"""
if self._encoding:
return self._encoding
# Scan meta tags for charset.
if self._html:
self._encoding = html_to_unicode(self.default_encoding, self._html)[0]
# Fall back to requests' detected encoding if decode fails.
try:
self.raw_html.decode(self.encoding, errors='replace')
except UnicodeDecodeError:
self._encoding = self.default_encoding
return self._encoding if self._encoding else self.default_encoding | [
"The",
"encoding",
"string",
"to",
"be",
"used",
"extracted",
"from",
"the",
"HTML",
"and",
":",
"class",
":",
"HTMLResponse",
"<HTMLResponse",
">",
"headers",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L119-L136 | [
"def",
"encoding",
"(",
"self",
")",
"->",
"_Encoding",
":",
"if",
"self",
".",
"_encoding",
":",
"return",
"self",
".",
"_encoding",
"# Scan meta tags for charset.",
"if",
"self",
".",
"_html",
":",
"self",
".",
"_encoding",
"=",
"html_to_unicode",
"(",
"se... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.pq | `PyQuery <https://pythonhosted.org/pyquery/>`_ representation
of the :class:`Element <Element>` or :class:`HTML <HTML>`. | requests_html.py | def pq(self) -> PyQuery:
"""`PyQuery <https://pythonhosted.org/pyquery/>`_ representation
of the :class:`Element <Element>` or :class:`HTML <HTML>`.
"""
if self._pq is None:
self._pq = PyQuery(self.lxml)
return self._pq | def pq(self) -> PyQuery:
"""`PyQuery <https://pythonhosted.org/pyquery/>`_ representation
of the :class:`Element <Element>` or :class:`HTML <HTML>`.
"""
if self._pq is None:
self._pq = PyQuery(self.lxml)
return self._pq | [
"PyQuery",
"<https",
":",
"//",
"pythonhosted",
".",
"org",
"/",
"pyquery",
"/",
">",
"_",
"representation",
"of",
"the",
":",
"class",
":",
"Element",
"<Element",
">",
"or",
":",
"class",
":",
"HTML",
"<HTML",
">",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L144-L151 | [
"def",
"pq",
"(",
"self",
")",
"->",
"PyQuery",
":",
"if",
"self",
".",
"_pq",
"is",
"None",
":",
"self",
".",
"_pq",
"=",
"PyQuery",
"(",
"self",
".",
"lxml",
")",
"return",
"self",
".",
"_pq"
] | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.lxml | `lxml <http://lxml.de>`_ representation of the
:class:`Element <Element>` or :class:`HTML <HTML>`. | requests_html.py | def lxml(self) -> HtmlElement:
"""`lxml <http://lxml.de>`_ representation of the
:class:`Element <Element>` or :class:`HTML <HTML>`.
"""
if self._lxml is None:
try:
self._lxml = soup_parse(self.html, features='html.parser')
except ValueError:
self._lxml = lxml.html.fromstring(self.raw_html)
return self._lxml | def lxml(self) -> HtmlElement:
"""`lxml <http://lxml.de>`_ representation of the
:class:`Element <Element>` or :class:`HTML <HTML>`.
"""
if self._lxml is None:
try:
self._lxml = soup_parse(self.html, features='html.parser')
except ValueError:
self._lxml = lxml.html.fromstring(self.raw_html)
return self._lxml | [
"lxml",
"<http",
":",
"//",
"lxml",
".",
"de",
">",
"_",
"representation",
"of",
"the",
":",
"class",
":",
"Element",
"<Element",
">",
"or",
":",
"class",
":",
"HTML",
"<HTML",
">",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L154-L164 | [
"def",
"lxml",
"(",
"self",
")",
"->",
"HtmlElement",
":",
"if",
"self",
".",
"_lxml",
"is",
"None",
":",
"try",
":",
"self",
".",
"_lxml",
"=",
"soup_parse",
"(",
"self",
".",
"html",
",",
"features",
"=",
"'html.parser'",
")",
"except",
"ValueError",... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.find | Given a CSS Selector, returns a list of
:class:`Element <Element>` objects or a single one.
:param selector: CSS Selector to use.
:param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags.
:param containing: If specified, only return elements that contain the provided text.
:param first: Whether or not to return just the first result.
:param _encoding: The encoding format.
Example CSS Selectors:
- ``a``
- ``a.someClass``
- ``a#someID``
- ``a[target=_blank]``
See W3School's `CSS Selectors Reference
<https://www.w3schools.com/cssref/css_selectors.asp>`_
for more details.
If ``first`` is ``True``, only returns the first
:class:`Element <Element>` found. | requests_html.py | def find(self, selector: str = "*", *, containing: _Containing = None, clean: bool = False, first: bool = False, _encoding: str = None) -> _Find:
"""Given a CSS Selector, returns a list of
:class:`Element <Element>` objects or a single one.
:param selector: CSS Selector to use.
:param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags.
:param containing: If specified, only return elements that contain the provided text.
:param first: Whether or not to return just the first result.
:param _encoding: The encoding format.
Example CSS Selectors:
- ``a``
- ``a.someClass``
- ``a#someID``
- ``a[target=_blank]``
See W3School's `CSS Selectors Reference
<https://www.w3schools.com/cssref/css_selectors.asp>`_
for more details.
If ``first`` is ``True``, only returns the first
:class:`Element <Element>` found.
"""
# Convert a single containing into a list.
if isinstance(containing, str):
containing = [containing]
encoding = _encoding or self.encoding
elements = [
Element(element=found, url=self.url, default_encoding=encoding)
for found in self.pq(selector)
]
if containing:
elements_copy = elements.copy()
elements = []
for element in elements_copy:
if any([c.lower() in element.full_text.lower() for c in containing]):
elements.append(element)
elements.reverse()
# Sanitize the found HTML.
if clean:
elements_copy = elements.copy()
elements = []
for element in elements_copy:
element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml))
elements.append(element)
return _get_first_or_list(elements, first) | def find(self, selector: str = "*", *, containing: _Containing = None, clean: bool = False, first: bool = False, _encoding: str = None) -> _Find:
"""Given a CSS Selector, returns a list of
:class:`Element <Element>` objects or a single one.
:param selector: CSS Selector to use.
:param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags.
:param containing: If specified, only return elements that contain the provided text.
:param first: Whether or not to return just the first result.
:param _encoding: The encoding format.
Example CSS Selectors:
- ``a``
- ``a.someClass``
- ``a#someID``
- ``a[target=_blank]``
See W3School's `CSS Selectors Reference
<https://www.w3schools.com/cssref/css_selectors.asp>`_
for more details.
If ``first`` is ``True``, only returns the first
:class:`Element <Element>` found.
"""
# Convert a single containing into a list.
if isinstance(containing, str):
containing = [containing]
encoding = _encoding or self.encoding
elements = [
Element(element=found, url=self.url, default_encoding=encoding)
for found in self.pq(selector)
]
if containing:
elements_copy = elements.copy()
elements = []
for element in elements_copy:
if any([c.lower() in element.full_text.lower() for c in containing]):
elements.append(element)
elements.reverse()
# Sanitize the found HTML.
if clean:
elements_copy = elements.copy()
elements = []
for element in elements_copy:
element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml))
elements.append(element)
return _get_first_or_list(elements, first) | [
"Given",
"a",
"CSS",
"Selector",
"returns",
"a",
"list",
"of",
":",
"class",
":",
"Element",
"<Element",
">",
"objects",
"or",
"a",
"single",
"one",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L180-L234 | [
"def",
"find",
"(",
"self",
",",
"selector",
":",
"str",
"=",
"\"*\"",
",",
"*",
",",
"containing",
":",
"_Containing",
"=",
"None",
",",
"clean",
":",
"bool",
"=",
"False",
",",
"first",
":",
"bool",
"=",
"False",
",",
"_encoding",
":",
"str",
"="... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.xpath | Given an XPath selector, returns a list of
:class:`Element <Element>` objects or a single one.
:param selector: XPath Selector to use.
:param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags.
:param first: Whether or not to return just the first result.
:param _encoding: The encoding format.
If a sub-selector is specified (e.g. ``//a/@href``), a simple
list of results is returned.
See W3School's `XPath Examples
<https://www.w3schools.com/xml/xpath_examples.asp>`_
for more details.
If ``first`` is ``True``, only returns the first
:class:`Element <Element>` found. | requests_html.py | def xpath(self, selector: str, *, clean: bool = False, first: bool = False, _encoding: str = None) -> _XPath:
"""Given an XPath selector, returns a list of
:class:`Element <Element>` objects or a single one.
:param selector: XPath Selector to use.
:param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags.
:param first: Whether or not to return just the first result.
:param _encoding: The encoding format.
If a sub-selector is specified (e.g. ``//a/@href``), a simple
list of results is returned.
See W3School's `XPath Examples
<https://www.w3schools.com/xml/xpath_examples.asp>`_
for more details.
If ``first`` is ``True``, only returns the first
:class:`Element <Element>` found.
"""
selected = self.lxml.xpath(selector)
elements = [
Element(element=selection, url=self.url, default_encoding=_encoding or self.encoding)
if not isinstance(selection, etree._ElementUnicodeResult) else str(selection)
for selection in selected
]
# Sanitize the found HTML.
if clean:
elements_copy = elements.copy()
elements = []
for element in elements_copy:
element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml))
elements.append(element)
return _get_first_or_list(elements, first) | def xpath(self, selector: str, *, clean: bool = False, first: bool = False, _encoding: str = None) -> _XPath:
"""Given an XPath selector, returns a list of
:class:`Element <Element>` objects or a single one.
:param selector: XPath Selector to use.
:param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags.
:param first: Whether or not to return just the first result.
:param _encoding: The encoding format.
If a sub-selector is specified (e.g. ``//a/@href``), a simple
list of results is returned.
See W3School's `XPath Examples
<https://www.w3schools.com/xml/xpath_examples.asp>`_
for more details.
If ``first`` is ``True``, only returns the first
:class:`Element <Element>` found.
"""
selected = self.lxml.xpath(selector)
elements = [
Element(element=selection, url=self.url, default_encoding=_encoding or self.encoding)
if not isinstance(selection, etree._ElementUnicodeResult) else str(selection)
for selection in selected
]
# Sanitize the found HTML.
if clean:
elements_copy = elements.copy()
elements = []
for element in elements_copy:
element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml))
elements.append(element)
return _get_first_or_list(elements, first) | [
"Given",
"an",
"XPath",
"selector",
"returns",
"a",
"list",
"of",
":",
"class",
":",
"Element",
"<Element",
">",
"objects",
"or",
"a",
"single",
"one",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L236-L272 | [
"def",
"xpath",
"(",
"self",
",",
"selector",
":",
"str",
",",
"*",
",",
"clean",
":",
"bool",
"=",
"False",
",",
"first",
":",
"bool",
"=",
"False",
",",
"_encoding",
":",
"str",
"=",
"None",
")",
"->",
"_XPath",
":",
"selected",
"=",
"self",
".... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.search_all | Search the :class:`Element <Element>` (multiple times) for the given parse
template.
:param template: The Parse template to use. | requests_html.py | def search_all(self, template: str) -> _Result:
"""Search the :class:`Element <Element>` (multiple times) for the given parse
template.
:param template: The Parse template to use.
"""
return [r for r in findall(template, self.html)] | def search_all(self, template: str) -> _Result:
"""Search the :class:`Element <Element>` (multiple times) for the given parse
template.
:param template: The Parse template to use.
"""
return [r for r in findall(template, self.html)] | [
"Search",
"the",
":",
"class",
":",
"Element",
"<Element",
">",
"(",
"multiple",
"times",
")",
"for",
"the",
"given",
"parse",
"template",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L282-L288 | [
"def",
"search_all",
"(",
"self",
",",
"template",
":",
"str",
")",
"->",
"_Result",
":",
"return",
"[",
"r",
"for",
"r",
"in",
"findall",
"(",
"template",
",",
"self",
".",
"html",
")",
"]"
] | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.links | All found links on page, in as–is form. | requests_html.py | def links(self) -> _Links:
"""All found links on page, in as–is form."""
def gen():
for link in self.find('a'):
try:
href = link.attrs['href'].strip()
if href and not (href.startswith('#') and self.skip_anchors) and not href.startswith(('javascript:', 'mailto:')):
yield href
except KeyError:
pass
return set(gen()) | def links(self) -> _Links:
"""All found links on page, in as–is form."""
def gen():
for link in self.find('a'):
try:
href = link.attrs['href'].strip()
if href and not (href.startswith('#') and self.skip_anchors) and not href.startswith(('javascript:', 'mailto:')):
yield href
except KeyError:
pass
return set(gen()) | [
"All",
"found",
"links",
"on",
"page",
"in",
"as–is",
"form",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L291-L304 | [
"def",
"links",
"(",
"self",
")",
"->",
"_Links",
":",
"def",
"gen",
"(",
")",
":",
"for",
"link",
"in",
"self",
".",
"find",
"(",
"'a'",
")",
":",
"try",
":",
"href",
"=",
"link",
".",
"attrs",
"[",
"'href'",
"]",
".",
"strip",
"(",
")",
"if... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser._make_absolute | Makes a given link absolute. | requests_html.py | def _make_absolute(self, link):
"""Makes a given link absolute."""
# Parse the link with stdlib.
parsed = urlparse(link)._asdict()
# If link is relative, then join it with base_url.
if not parsed['netloc']:
return urljoin(self.base_url, link)
# Link is absolute; if it lacks a scheme, add one from base_url.
if not parsed['scheme']:
parsed['scheme'] = urlparse(self.base_url).scheme
# Reconstruct the URL to incorporate the new scheme.
parsed = (v for v in parsed.values())
return urlunparse(parsed)
# Link is absolute and complete with scheme; nothing to be done here.
return link | def _make_absolute(self, link):
"""Makes a given link absolute."""
# Parse the link with stdlib.
parsed = urlparse(link)._asdict()
# If link is relative, then join it with base_url.
if not parsed['netloc']:
return urljoin(self.base_url, link)
# Link is absolute; if it lacks a scheme, add one from base_url.
if not parsed['scheme']:
parsed['scheme'] = urlparse(self.base_url).scheme
# Reconstruct the URL to incorporate the new scheme.
parsed = (v for v in parsed.values())
return urlunparse(parsed)
# Link is absolute and complete with scheme; nothing to be done here.
return link | [
"Makes",
"a",
"given",
"link",
"absolute",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L306-L325 | [
"def",
"_make_absolute",
"(",
"self",
",",
"link",
")",
":",
"# Parse the link with stdlib.",
"parsed",
"=",
"urlparse",
"(",
"link",
")",
".",
"_asdict",
"(",
")",
"# If link is relative, then join it with base_url.",
"if",
"not",
"parsed",
"[",
"'netloc'",
"]",
... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.absolute_links | All found links on page, in absolute form
(`learn more <https://www.navegabem.com/absolute-or-relative-links.html>`_). | requests_html.py | def absolute_links(self) -> _Links:
"""All found links on page, in absolute form
(`learn more <https://www.navegabem.com/absolute-or-relative-links.html>`_).
"""
def gen():
for link in self.links:
yield self._make_absolute(link)
return set(gen()) | def absolute_links(self) -> _Links:
"""All found links on page, in absolute form
(`learn more <https://www.navegabem.com/absolute-or-relative-links.html>`_).
"""
def gen():
for link in self.links:
yield self._make_absolute(link)
return set(gen()) | [
"All",
"found",
"links",
"on",
"page",
"in",
"absolute",
"form",
"(",
"learn",
"more",
"<https",
":",
"//",
"www",
".",
"navegabem",
".",
"com",
"/",
"absolute",
"-",
"or",
"-",
"relative",
"-",
"links",
".",
"html",
">",
"_",
")",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L329-L338 | [
"def",
"absolute_links",
"(",
"self",
")",
"->",
"_Links",
":",
"def",
"gen",
"(",
")",
":",
"for",
"link",
"in",
"self",
".",
"links",
":",
"yield",
"self",
".",
"_make_absolute",
"(",
"link",
")",
"return",
"set",
"(",
"gen",
"(",
")",
")"
] | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseParser.base_url | The base URL for the page. Supports the ``<base>`` tag
(`learn more <https://www.w3schools.com/tags/tag_base.asp>`_). | requests_html.py | def base_url(self) -> _URL:
"""The base URL for the page. Supports the ``<base>`` tag
(`learn more <https://www.w3schools.com/tags/tag_base.asp>`_)."""
# Support for <base> tag.
base = self.find('base', first=True)
if base:
result = base.attrs.get('href', '').strip()
if result:
return result
# Parse the url to separate out the path
parsed = urlparse(self.url)._asdict()
# Remove any part of the path after the last '/'
parsed['path'] = '/'.join(parsed['path'].split('/')[:-1]) + '/'
# Reconstruct the url with the modified path
parsed = (v for v in parsed.values())
url = urlunparse(parsed)
return url | def base_url(self) -> _URL:
"""The base URL for the page. Supports the ``<base>`` tag
(`learn more <https://www.w3schools.com/tags/tag_base.asp>`_)."""
# Support for <base> tag.
base = self.find('base', first=True)
if base:
result = base.attrs.get('href', '').strip()
if result:
return result
# Parse the url to separate out the path
parsed = urlparse(self.url)._asdict()
# Remove any part of the path after the last '/'
parsed['path'] = '/'.join(parsed['path'].split('/')[:-1]) + '/'
# Reconstruct the url with the modified path
parsed = (v for v in parsed.values())
url = urlunparse(parsed)
return url | [
"The",
"base",
"URL",
"for",
"the",
"page",
".",
"Supports",
"the",
"<base",
">",
"tag",
"(",
"learn",
"more",
"<https",
":",
"//",
"www",
".",
"w3schools",
".",
"com",
"/",
"tags",
"/",
"tag_base",
".",
"asp",
">",
"_",
")",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L341-L362 | [
"def",
"base_url",
"(",
"self",
")",
"->",
"_URL",
":",
"# Support for <base> tag.",
"base",
"=",
"self",
".",
"find",
"(",
"'base'",
",",
"first",
"=",
"True",
")",
"if",
"base",
":",
"result",
"=",
"base",
".",
"attrs",
".",
"get",
"(",
"'href'",
"... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | Element.attrs | Returns a dictionary of the attributes of the :class:`Element <Element>`
(`learn more <https://www.w3schools.com/tags/ref_attributes.asp>`_). | requests_html.py | def attrs(self) -> _Attrs:
"""Returns a dictionary of the attributes of the :class:`Element <Element>`
(`learn more <https://www.w3schools.com/tags/ref_attributes.asp>`_).
"""
if self._attrs is None:
self._attrs = {k: v for k, v in self.element.items()}
# Split class and rel up, as there are ussually many of them:
for attr in ['class', 'rel']:
if attr in self._attrs:
self._attrs[attr] = tuple(self._attrs[attr].split())
return self._attrs | def attrs(self) -> _Attrs:
"""Returns a dictionary of the attributes of the :class:`Element <Element>`
(`learn more <https://www.w3schools.com/tags/ref_attributes.asp>`_).
"""
if self._attrs is None:
self._attrs = {k: v for k, v in self.element.items()}
# Split class and rel up, as there are ussually many of them:
for attr in ['class', 'rel']:
if attr in self._attrs:
self._attrs[attr] = tuple(self._attrs[attr].split())
return self._attrs | [
"Returns",
"a",
"dictionary",
"of",
"the",
"attributes",
"of",
"the",
":",
"class",
":",
"Element",
"<Element",
">",
"(",
"learn",
"more",
"<https",
":",
"//",
"www",
".",
"w3schools",
".",
"com",
"/",
"tags",
"/",
"ref_attributes",
".",
"asp",
">",
"_... | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L390-L402 | [
"def",
"attrs",
"(",
"self",
")",
"->",
"_Attrs",
":",
"if",
"self",
".",
"_attrs",
"is",
"None",
":",
"self",
".",
"_attrs",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"self",
".",
"element",
".",
"items",
"(",
")",
"}",
"# Split cla... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | HTML.next | Attempts to find the next page, if there is one. If ``fetch``
is ``True`` (default), returns :class:`HTML <HTML>` object of
next page. If ``fetch`` is ``False``, simply returns the next URL. | requests_html.py | def next(self, fetch: bool = False, next_symbol: _NextSymbol = DEFAULT_NEXT_SYMBOL) -> _Next:
"""Attempts to find the next page, if there is one. If ``fetch``
is ``True`` (default), returns :class:`HTML <HTML>` object of
next page. If ``fetch`` is ``False``, simply returns the next URL.
"""
def get_next():
candidates = self.find('a', containing=next_symbol)
for candidate in candidates:
if candidate.attrs.get('href'):
# Support 'next' rel (e.g. reddit).
if 'next' in candidate.attrs.get('rel', []):
return candidate.attrs['href']
# Support 'next' in classnames.
for _class in candidate.attrs.get('class', []):
if 'next' in _class:
return candidate.attrs['href']
if 'page' in candidate.attrs['href']:
return candidate.attrs['href']
try:
# Resort to the last candidate.
return candidates[-1].attrs['href']
except IndexError:
return None
__next = get_next()
if __next:
url = self._make_absolute(__next)
else:
return None
if fetch:
return self.session.get(url)
else:
return url | def next(self, fetch: bool = False, next_symbol: _NextSymbol = DEFAULT_NEXT_SYMBOL) -> _Next:
"""Attempts to find the next page, if there is one. If ``fetch``
is ``True`` (default), returns :class:`HTML <HTML>` object of
next page. If ``fetch`` is ``False``, simply returns the next URL.
"""
def get_next():
candidates = self.find('a', containing=next_symbol)
for candidate in candidates:
if candidate.attrs.get('href'):
# Support 'next' rel (e.g. reddit).
if 'next' in candidate.attrs.get('rel', []):
return candidate.attrs['href']
# Support 'next' in classnames.
for _class in candidate.attrs.get('class', []):
if 'next' in _class:
return candidate.attrs['href']
if 'page' in candidate.attrs['href']:
return candidate.attrs['href']
try:
# Resort to the last candidate.
return candidates[-1].attrs['href']
except IndexError:
return None
__next = get_next()
if __next:
url = self._make_absolute(__next)
else:
return None
if fetch:
return self.session.get(url)
else:
return url | [
"Attempts",
"to",
"find",
"the",
"next",
"page",
"if",
"there",
"is",
"one",
".",
"If",
"fetch",
"is",
"True",
"(",
"default",
")",
"returns",
":",
"class",
":",
"HTML",
"<HTML",
">",
"object",
"of",
"next",
"page",
".",
"If",
"fetch",
"is",
"False",... | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L433-L472 | [
"def",
"next",
"(",
"self",
",",
"fetch",
":",
"bool",
"=",
"False",
",",
"next_symbol",
":",
"_NextSymbol",
"=",
"DEFAULT_NEXT_SYMBOL",
")",
"->",
"_Next",
":",
"def",
"get_next",
"(",
")",
":",
"candidates",
"=",
"self",
".",
"find",
"(",
"'a'",
",",... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | HTML._async_render | Handle page creation and js rendering. Internal use for render/arender methods. | requests_html.py | async def _async_render(self, *, url: str, script: str = None, scrolldown, sleep: int, wait: float, reload, content: Optional[str], timeout: Union[float, int], keep_page: bool):
""" Handle page creation and js rendering. Internal use for render/arender methods. """
try:
page = await self.browser.newPage()
# Wait before rendering the page, to prevent timeouts.
await asyncio.sleep(wait)
# Load the given page (GET request, obviously.)
if reload:
await page.goto(url, options={'timeout': int(timeout * 1000)})
else:
await page.goto(f'data:text/html,{self.html}', options={'timeout': int(timeout * 1000)})
result = None
if script:
result = await page.evaluate(script)
if scrolldown:
for _ in range(scrolldown):
await page._keyboard.down('PageDown')
await asyncio.sleep(sleep)
else:
await asyncio.sleep(sleep)
if scrolldown:
await page._keyboard.up('PageDown')
# Return the content of the page, JavaScript evaluated.
content = await page.content()
if not keep_page:
await page.close()
page = None
return content, result, page
except TimeoutError:
await page.close()
page = None
return None | async def _async_render(self, *, url: str, script: str = None, scrolldown, sleep: int, wait: float, reload, content: Optional[str], timeout: Union[float, int], keep_page: bool):
""" Handle page creation and js rendering. Internal use for render/arender methods. """
try:
page = await self.browser.newPage()
# Wait before rendering the page, to prevent timeouts.
await asyncio.sleep(wait)
# Load the given page (GET request, obviously.)
if reload:
await page.goto(url, options={'timeout': int(timeout * 1000)})
else:
await page.goto(f'data:text/html,{self.html}', options={'timeout': int(timeout * 1000)})
result = None
if script:
result = await page.evaluate(script)
if scrolldown:
for _ in range(scrolldown):
await page._keyboard.down('PageDown')
await asyncio.sleep(sleep)
else:
await asyncio.sleep(sleep)
if scrolldown:
await page._keyboard.up('PageDown')
# Return the content of the page, JavaScript evaluated.
content = await page.content()
if not keep_page:
await page.close()
page = None
return content, result, page
except TimeoutError:
await page.close()
page = None
return None | [
"Handle",
"page",
"creation",
"and",
"js",
"rendering",
".",
"Internal",
"use",
"for",
"render",
"/",
"arender",
"methods",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L502-L539 | [
"async",
"def",
"_async_render",
"(",
"self",
",",
"*",
",",
"url",
":",
"str",
",",
"script",
":",
"str",
"=",
"None",
",",
"scrolldown",
",",
"sleep",
":",
"int",
",",
"wait",
":",
"float",
",",
"reload",
",",
"content",
":",
"Optional",
"[",
"st... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | HTML.render | Reloads the response in Chromium, and replaces HTML content
with an updated version, with JavaScript executed.
:param retries: The number of times to retry loading the page in Chromium.
:param script: JavaScript to execute upon page load (optional).
:param wait: The number of seconds to wait before loading the page, preventing timeouts (optional).
:param scrolldown: Integer, if provided, of how many times to page down.
:param sleep: Integer, if provided, of how many long to sleep after initial render.
:param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory.
:param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``.
If ``scrolldown`` is specified, the page will scrolldown the specified
number of times, after sleeping the specified amount of time
(e.g. ``scrolldown=10, sleep=1``).
If just ``sleep`` is provided, the rendering will wait *n* seconds, before
returning.
If ``script`` is specified, it will execute the provided JavaScript at
runtime. Example:
.. code-block:: python
script = \"\"\"
() => {
return {
width: document.documentElement.clientWidth,
height: document.documentElement.clientHeight,
deviceScaleFactor: window.devicePixelRatio,
}
}
\"\"\"
Returns the return value of the executed ``script``, if any is provided:
.. code-block:: python
>>> r.html.render(script=script)
{'width': 800, 'height': 600, 'deviceScaleFactor': 1}
Warning: the first time you run this method, it will download
Chromium into your home directory (``~/.pyppeteer``). | requests_html.py | def render(self, retries: int = 8, script: str = None, wait: float = 0.2, scrolldown=False, sleep: int = 0, reload: bool = True, timeout: Union[float, int] = 8.0, keep_page: bool = False):
"""Reloads the response in Chromium, and replaces HTML content
with an updated version, with JavaScript executed.
:param retries: The number of times to retry loading the page in Chromium.
:param script: JavaScript to execute upon page load (optional).
:param wait: The number of seconds to wait before loading the page, preventing timeouts (optional).
:param scrolldown: Integer, if provided, of how many times to page down.
:param sleep: Integer, if provided, of how many long to sleep after initial render.
:param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory.
:param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``.
If ``scrolldown`` is specified, the page will scrolldown the specified
number of times, after sleeping the specified amount of time
(e.g. ``scrolldown=10, sleep=1``).
If just ``sleep`` is provided, the rendering will wait *n* seconds, before
returning.
If ``script`` is specified, it will execute the provided JavaScript at
runtime. Example:
.. code-block:: python
script = \"\"\"
() => {
return {
width: document.documentElement.clientWidth,
height: document.documentElement.clientHeight,
deviceScaleFactor: window.devicePixelRatio,
}
}
\"\"\"
Returns the return value of the executed ``script``, if any is provided:
.. code-block:: python
>>> r.html.render(script=script)
{'width': 800, 'height': 600, 'deviceScaleFactor': 1}
Warning: the first time you run this method, it will download
Chromium into your home directory (``~/.pyppeteer``).
"""
self.browser = self.session.browser # Automatically create a event loop and browser
content = None
# Automatically set Reload to False, if example URL is being used.
if self.url == DEFAULT_URL:
reload = False
for i in range(retries):
if not content:
try:
content, result, page = self.session.loop.run_until_complete(self._async_render(url=self.url, script=script, sleep=sleep, wait=wait, content=self.html, reload=reload, scrolldown=scrolldown, timeout=timeout, keep_page=keep_page))
except TypeError:
pass
else:
break
if not content:
raise MaxRetries("Unable to render the page. Try increasing timeout")
html = HTML(url=self.url, html=content.encode(DEFAULT_ENCODING), default_encoding=DEFAULT_ENCODING)
self.__dict__.update(html.__dict__)
self.page = page
return result | def render(self, retries: int = 8, script: str = None, wait: float = 0.2, scrolldown=False, sleep: int = 0, reload: bool = True, timeout: Union[float, int] = 8.0, keep_page: bool = False):
"""Reloads the response in Chromium, and replaces HTML content
with an updated version, with JavaScript executed.
:param retries: The number of times to retry loading the page in Chromium.
:param script: JavaScript to execute upon page load (optional).
:param wait: The number of seconds to wait before loading the page, preventing timeouts (optional).
:param scrolldown: Integer, if provided, of how many times to page down.
:param sleep: Integer, if provided, of how many long to sleep after initial render.
:param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory.
:param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``.
If ``scrolldown`` is specified, the page will scrolldown the specified
number of times, after sleeping the specified amount of time
(e.g. ``scrolldown=10, sleep=1``).
If just ``sleep`` is provided, the rendering will wait *n* seconds, before
returning.
If ``script`` is specified, it will execute the provided JavaScript at
runtime. Example:
.. code-block:: python
script = \"\"\"
() => {
return {
width: document.documentElement.clientWidth,
height: document.documentElement.clientHeight,
deviceScaleFactor: window.devicePixelRatio,
}
}
\"\"\"
Returns the return value of the executed ``script``, if any is provided:
.. code-block:: python
>>> r.html.render(script=script)
{'width': 800, 'height': 600, 'deviceScaleFactor': 1}
Warning: the first time you run this method, it will download
Chromium into your home directory (``~/.pyppeteer``).
"""
self.browser = self.session.browser # Automatically create a event loop and browser
content = None
# Automatically set Reload to False, if example URL is being used.
if self.url == DEFAULT_URL:
reload = False
for i in range(retries):
if not content:
try:
content, result, page = self.session.loop.run_until_complete(self._async_render(url=self.url, script=script, sleep=sleep, wait=wait, content=self.html, reload=reload, scrolldown=scrolldown, timeout=timeout, keep_page=keep_page))
except TypeError:
pass
else:
break
if not content:
raise MaxRetries("Unable to render the page. Try increasing timeout")
html = HTML(url=self.url, html=content.encode(DEFAULT_ENCODING), default_encoding=DEFAULT_ENCODING)
self.__dict__.update(html.__dict__)
self.page = page
return result | [
"Reloads",
"the",
"response",
"in",
"Chromium",
"and",
"replaces",
"HTML",
"content",
"with",
"an",
"updated",
"version",
"with",
"JavaScript",
"executed",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L541-L610 | [
"def",
"render",
"(",
"self",
",",
"retries",
":",
"int",
"=",
"8",
",",
"script",
":",
"str",
"=",
"None",
",",
"wait",
":",
"float",
"=",
"0.2",
",",
"scrolldown",
"=",
"False",
",",
"sleep",
":",
"int",
"=",
"0",
",",
"reload",
":",
"bool",
... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | BaseSession.response_hook | Change response enconding and replace it by a HTMLResponse. | requests_html.py | def response_hook(self, response, **kwargs) -> HTMLResponse:
""" Change response enconding and replace it by a HTMLResponse. """
if not response.encoding:
response.encoding = DEFAULT_ENCODING
return HTMLResponse._from_response(response, self) | def response_hook(self, response, **kwargs) -> HTMLResponse:
""" Change response enconding and replace it by a HTMLResponse. """
if not response.encoding:
response.encoding = DEFAULT_ENCODING
return HTMLResponse._from_response(response, self) | [
"Change",
"response",
"enconding",
"and",
"replace",
"it",
"by",
"a",
"HTMLResponse",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L705-L709 | [
"def",
"response_hook",
"(",
"self",
",",
"response",
",",
"*",
"*",
"kwargs",
")",
"->",
"HTMLResponse",
":",
"if",
"not",
"response",
".",
"encoding",
":",
"response",
".",
"encoding",
"=",
"DEFAULT_ENCODING",
"return",
"HTMLResponse",
".",
"_from_response",... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | HTMLSession.close | If a browser was created close it first. | requests_html.py | def close(self):
""" If a browser was created close it first. """
if hasattr(self, "_browser"):
self.loop.run_until_complete(self._browser.close())
super().close() | def close(self):
""" If a browser was created close it first. """
if hasattr(self, "_browser"):
self.loop.run_until_complete(self._browser.close())
super().close() | [
"If",
"a",
"browser",
"was",
"created",
"close",
"it",
"first",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L733-L737 | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"\"_browser\"",
")",
":",
"self",
".",
"loop",
".",
"run_until_complete",
"(",
"self",
".",
"_browser",
".",
"close",
"(",
")",
")",
"super",
"(",
")",
".",
"close",
"(",
")"... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | AsyncHTMLSession.request | Partial original request func and run it in a thread. | requests_html.py | def request(self, *args, **kwargs):
""" Partial original request func and run it in a thread. """
func = partial(super().request, *args, **kwargs)
return self.loop.run_in_executor(self.thread_pool, func) | def request(self, *args, **kwargs):
""" Partial original request func and run it in a thread. """
func = partial(super().request, *args, **kwargs)
return self.loop.run_in_executor(self.thread_pool, func) | [
"Partial",
"original",
"request",
"func",
"and",
"run",
"it",
"in",
"a",
"thread",
"."
] | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L756-L759 | [
"def",
"request",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"func",
"=",
"partial",
"(",
"super",
"(",
")",
".",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"loop",
".",
"run_in_execut... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | AsyncHTMLSession.run | Pass in all the coroutines you want to run, it will wrap each one
in a task, run it and wait for the result. Return a list with all
results, this is returned in the same order coros are passed in. | requests_html.py | def run(self, *coros):
""" Pass in all the coroutines you want to run, it will wrap each one
in a task, run it and wait for the result. Return a list with all
results, this is returned in the same order coros are passed in. """
tasks = [
asyncio.ensure_future(coro()) for coro in coros
]
done, _ = self.loop.run_until_complete(asyncio.wait(tasks))
return [t.result() for t in done] | def run(self, *coros):
""" Pass in all the coroutines you want to run, it will wrap each one
in a task, run it and wait for the result. Return a list with all
results, this is returned in the same order coros are passed in. """
tasks = [
asyncio.ensure_future(coro()) for coro in coros
]
done, _ = self.loop.run_until_complete(asyncio.wait(tasks))
return [t.result() for t in done] | [
"Pass",
"in",
"all",
"the",
"coroutines",
"you",
"want",
"to",
"run",
"it",
"will",
"wrap",
"each",
"one",
"in",
"a",
"task",
"run",
"it",
"and",
"wait",
"for",
"the",
"result",
".",
"Return",
"a",
"list",
"with",
"all",
"results",
"this",
"is",
"ret... | kennethreitz/requests-html | python | https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L767-L775 | [
"def",
"run",
"(",
"self",
",",
"*",
"coros",
")",
":",
"tasks",
"=",
"[",
"asyncio",
".",
"ensure_future",
"(",
"coro",
"(",
")",
")",
"for",
"coro",
"in",
"coros",
"]",
"done",
",",
"_",
"=",
"self",
".",
"loop",
".",
"run_until_complete",
"(",
... | b59a9f2fb9333d7d467154a0fd82978efdb9d23b |
train | add_depth_channel | img_tensor: N, C, H, W | examples/trials/kaggle-tgs-salt/loader.py | def add_depth_channel(img_tensor, pad_mode):
'''
img_tensor: N, C, H, W
'''
img_tensor[:, 1] = get_depth_tensor(pad_mode)
img_tensor[:, 2] = img_tensor[:, 0] * get_depth_tensor(pad_mode) | def add_depth_channel(img_tensor, pad_mode):
'''
img_tensor: N, C, H, W
'''
img_tensor[:, 1] = get_depth_tensor(pad_mode)
img_tensor[:, 2] = img_tensor[:, 0] * get_depth_tensor(pad_mode) | [
"img_tensor",
":",
"N",
"C",
"H",
"W"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/loader.py#L256-L261 | [
"def",
"add_depth_channel",
"(",
"img_tensor",
",",
"pad_mode",
")",
":",
"img_tensor",
"[",
":",
",",
"1",
"]",
"=",
"get_depth_tensor",
"(",
"pad_mode",
")",
"img_tensor",
"[",
":",
",",
"2",
"]",
"=",
"img_tensor",
"[",
":",
",",
"0",
"]",
"*",
"g... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | DotAttention.get_pre_compute | :param s: [src_sequence, batch_size, src_dim]
:return: [src_sequence, batch_size. hidden_dim] | examples/trials/weight_sharing/ga_squad/attention.py | def get_pre_compute(self, s):
'''
:param s: [src_sequence, batch_size, src_dim]
:return: [src_sequence, batch_size. hidden_dim]
'''
hidden_dim = self.hidden_dim
src_dim = s.get_shape().as_list()[-1]
assert src_dim is not None, 'src dim must be defined'
W = self._get_var('W', shape=[src_dim, hidden_dim])
b = self._get_var('b', shape=[1, hidden_dim])
return tf.tensordot(s, W, [[2], [0]]) + b | def get_pre_compute(self, s):
'''
:param s: [src_sequence, batch_size, src_dim]
:return: [src_sequence, batch_size. hidden_dim]
'''
hidden_dim = self.hidden_dim
src_dim = s.get_shape().as_list()[-1]
assert src_dim is not None, 'src dim must be defined'
W = self._get_var('W', shape=[src_dim, hidden_dim])
b = self._get_var('b', shape=[1, hidden_dim])
return tf.tensordot(s, W, [[2], [0]]) + b | [
":",
"param",
"s",
":",
"[",
"src_sequence",
"batch_size",
"src_dim",
"]",
":",
"return",
":",
"[",
"src_sequence",
"batch_size",
".",
"hidden_dim",
"]"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/attention.py#L94-L104 | [
"def",
"get_pre_compute",
"(",
"self",
",",
"s",
")",
":",
"hidden_dim",
"=",
"self",
".",
"hidden_dim",
"src_dim",
"=",
"s",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"[",
"-",
"1",
"]",
"assert",
"src_dim",
"is",
"not",
"None",
",",
"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | DotAttention.get_prob | :param s: [src_sequence_length, batch_size, src_dim]
:param h: [batch_size, tgt_dim] or [tgt_sequence_length, batch_size, tgt_dim]
:param mask: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_sizse]
:param pre_compute: [src_sequence_length, batch_size, hidden_dim]
:return: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_size] | examples/trials/weight_sharing/ga_squad/attention.py | def get_prob(self, src, tgt, mask, pre_compute, return_logits=False):
'''
:param s: [src_sequence_length, batch_size, src_dim]
:param h: [batch_size, tgt_dim] or [tgt_sequence_length, batch_size, tgt_dim]
:param mask: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_sizse]
:param pre_compute: [src_sequence_length, batch_size, hidden_dim]
:return: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_size]
'''
s_shape = src.get_shape().as_list()
h_shape = tgt.get_shape().as_list()
src_dim = s_shape[-1]
tgt_dim = h_shape[-1]
assert src_dim is not None, 'src dimension must be defined'
assert tgt_dim is not None, 'tgt dimension must be defined'
self._define_params(src_dim, tgt_dim)
if len(h_shape) == 2:
tgt = tf.expand_dims(tgt, 0)
if pre_compute is None:
pre_compute = self.get_pre_compute(src)
buf0 = pre_compute
buf1 = tf.tensordot(tgt, self.var['U'], axes=[[2], [0]])
buf2 = tf.tanh(tf.expand_dims(buf0, 0) + tf.expand_dims(buf1, 1))
if not self.is_vanilla:
xh1 = tgt
xh2 = tgt
s1 = src
if self.need_padding:
xh1 = tf.tensordot(xh1, self.var['V_t'], 1)
xh2 = tf.tensordot(xh2, self.var['S_t'], 1)
s1 = tf.tensordot(s1, self.var['V_s'], 1)
if not self.is_identity_transform:
xh1 = tf.tensordot(xh1, self.var['T'], 1)
xh2 = tf.tensordot(xh2, self.var['T'], 1)
buf3 = tf.expand_dims(s1, 0) * tf.expand_dims(xh1, 1)
buf3 = tf.tanh(tf.tensordot(buf3, self.var['V'], axes=[[3], [0]]))
buf = tf.reshape(tf.tanh(buf2 + buf3), shape=tf.shape(buf3))
else:
buf = buf2
v = self.var['v']
e = tf.tensordot(buf, v, [[3], [0]])
e = tf.squeeze(e, axis=[3])
tmp = tf.reshape(e + (mask - 1) * 10000.0, shape=tf.shape(e))
prob = tf.nn.softmax(tmp, 1)
if len(h_shape) == 2:
prob = tf.squeeze(prob, axis=[0])
tmp = tf.squeeze(tmp, axis=[0])
if return_logits:
return prob, tmp
return prob | def get_prob(self, src, tgt, mask, pre_compute, return_logits=False):
'''
:param s: [src_sequence_length, batch_size, src_dim]
:param h: [batch_size, tgt_dim] or [tgt_sequence_length, batch_size, tgt_dim]
:param mask: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_sizse]
:param pre_compute: [src_sequence_length, batch_size, hidden_dim]
:return: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_size]
'''
s_shape = src.get_shape().as_list()
h_shape = tgt.get_shape().as_list()
src_dim = s_shape[-1]
tgt_dim = h_shape[-1]
assert src_dim is not None, 'src dimension must be defined'
assert tgt_dim is not None, 'tgt dimension must be defined'
self._define_params(src_dim, tgt_dim)
if len(h_shape) == 2:
tgt = tf.expand_dims(tgt, 0)
if pre_compute is None:
pre_compute = self.get_pre_compute(src)
buf0 = pre_compute
buf1 = tf.tensordot(tgt, self.var['U'], axes=[[2], [0]])
buf2 = tf.tanh(tf.expand_dims(buf0, 0) + tf.expand_dims(buf1, 1))
if not self.is_vanilla:
xh1 = tgt
xh2 = tgt
s1 = src
if self.need_padding:
xh1 = tf.tensordot(xh1, self.var['V_t'], 1)
xh2 = tf.tensordot(xh2, self.var['S_t'], 1)
s1 = tf.tensordot(s1, self.var['V_s'], 1)
if not self.is_identity_transform:
xh1 = tf.tensordot(xh1, self.var['T'], 1)
xh2 = tf.tensordot(xh2, self.var['T'], 1)
buf3 = tf.expand_dims(s1, 0) * tf.expand_dims(xh1, 1)
buf3 = tf.tanh(tf.tensordot(buf3, self.var['V'], axes=[[3], [0]]))
buf = tf.reshape(tf.tanh(buf2 + buf3), shape=tf.shape(buf3))
else:
buf = buf2
v = self.var['v']
e = tf.tensordot(buf, v, [[3], [0]])
e = tf.squeeze(e, axis=[3])
tmp = tf.reshape(e + (mask - 1) * 10000.0, shape=tf.shape(e))
prob = tf.nn.softmax(tmp, 1)
if len(h_shape) == 2:
prob = tf.squeeze(prob, axis=[0])
tmp = tf.squeeze(tmp, axis=[0])
if return_logits:
return prob, tmp
return prob | [
":",
"param",
"s",
":",
"[",
"src_sequence_length",
"batch_size",
"src_dim",
"]",
":",
"param",
"h",
":",
"[",
"batch_size",
"tgt_dim",
"]",
"or",
"[",
"tgt_sequence_length",
"batch_size",
"tgt_dim",
"]",
":",
"param",
"mask",
":",
"[",
"src_sequence_length",
... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/attention.py#L106-L160 | [
"def",
"get_prob",
"(",
"self",
",",
"src",
",",
"tgt",
",",
"mask",
",",
"pre_compute",
",",
"return_logits",
"=",
"False",
")",
":",
"s_shape",
"=",
"src",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"h_shape",
"=",
"tgt",
".",
"get_shape... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | DotAttention.get_att | :param s: [src_sequence_length, batch_size, src_dim]
:param prob: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_size]
:return: [batch_size, src_dim] or [tgt_sequence_length, batch_size, src_dim] | examples/trials/weight_sharing/ga_squad/attention.py | def get_att(self, s, prob):
'''
:param s: [src_sequence_length, batch_size, src_dim]
:param prob: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_size]
:return: [batch_size, src_dim] or [tgt_sequence_length, batch_size, src_dim]
'''
buf = s * tf.expand_dims(prob, axis=-1)
att = tf.reduce_sum(buf, axis=-3)
return att | def get_att(self, s, prob):
'''
:param s: [src_sequence_length, batch_size, src_dim]
:param prob: [src_sequence_length, batch_size]\
or [tgt_sequence_length, src_sequence_length, batch_size]
:return: [batch_size, src_dim] or [tgt_sequence_length, batch_size, src_dim]
'''
buf = s * tf.expand_dims(prob, axis=-1)
att = tf.reduce_sum(buf, axis=-3)
return att | [
":",
"param",
"s",
":",
"[",
"src_sequence_length",
"batch_size",
"src_dim",
"]",
":",
"param",
"prob",
":",
"[",
"src_sequence_length",
"batch_size",
"]",
"\\",
"or",
"[",
"tgt_sequence_length",
"src_sequence_length",
"batch_size",
"]",
":",
"return",
":",
"[",... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/attention.py#L162-L171 | [
"def",
"get_att",
"(",
"self",
",",
"s",
",",
"prob",
")",
":",
"buf",
"=",
"s",
"*",
"tf",
".",
"expand_dims",
"(",
"prob",
",",
"axis",
"=",
"-",
"1",
")",
"att",
"=",
"tf",
".",
"reduce_sum",
"(",
"buf",
",",
"axis",
"=",
"-",
"3",
")",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | shape | Get shape of variable.
Return type is tuple. | examples/trials/weight_sharing/ga_squad/util.py | def shape(tensor):
'''
Get shape of variable.
Return type is tuple.
'''
temp_s = tensor.get_shape()
return tuple([temp_s[i].value for i in range(0, len(temp_s))]) | def shape(tensor):
'''
Get shape of variable.
Return type is tuple.
'''
temp_s = tensor.get_shape()
return tuple([temp_s[i].value for i in range(0, len(temp_s))]) | [
"Get",
"shape",
"of",
"variable",
".",
"Return",
"type",
"is",
"tuple",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L30-L36 | [
"def",
"shape",
"(",
"tensor",
")",
":",
"temp_s",
"=",
"tensor",
".",
"get_shape",
"(",
")",
"return",
"tuple",
"(",
"[",
"temp_s",
"[",
"i",
"]",
".",
"value",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"temp_s",
")",
")",
"]",
")"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | get_variable | Get variable by name. | examples/trials/weight_sharing/ga_squad/util.py | def get_variable(name, temp_s):
'''
Get variable by name.
'''
return tf.Variable(tf.zeros(temp_s), name=name) | def get_variable(name, temp_s):
'''
Get variable by name.
'''
return tf.Variable(tf.zeros(temp_s), name=name) | [
"Get",
"variable",
"by",
"name",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L39-L43 | [
"def",
"get_variable",
"(",
"name",
",",
"temp_s",
")",
":",
"return",
"tf",
".",
"Variable",
"(",
"tf",
".",
"zeros",
"(",
"temp_s",
")",
",",
"name",
"=",
"name",
")"
] | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | dropout | Dropout except test. | examples/trials/weight_sharing/ga_squad/util.py | def dropout(tensor, drop_prob, is_training):
'''
Dropout except test.
'''
if not is_training:
return tensor
return tf.nn.dropout(tensor, 1.0 - drop_prob) | def dropout(tensor, drop_prob, is_training):
'''
Dropout except test.
'''
if not is_training:
return tensor
return tf.nn.dropout(tensor, 1.0 - drop_prob) | [
"Dropout",
"except",
"test",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L46-L52 | [
"def",
"dropout",
"(",
"tensor",
",",
"drop_prob",
",",
"is_training",
")",
":",
"if",
"not",
"is_training",
":",
"return",
"tensor",
"return",
"tf",
".",
"nn",
".",
"dropout",
"(",
"tensor",
",",
"1.0",
"-",
"drop_prob",
")"
] | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | Timer.get_elapsed | Calculate time span. | examples/trials/weight_sharing/ga_squad/util.py | def get_elapsed(self, restart=True):
'''
Calculate time span.
'''
end = time.time()
span = end - self.__start
if restart:
self.__start = end
return span | def get_elapsed(self, restart=True):
'''
Calculate time span.
'''
end = time.time()
span = end - self.__start
if restart:
self.__start = end
return span | [
"Calculate",
"time",
"span",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L68-L76 | [
"def",
"get_elapsed",
"(",
"self",
",",
"restart",
"=",
"True",
")",
":",
"end",
"=",
"time",
".",
"time",
"(",
")",
"span",
"=",
"end",
"-",
"self",
".",
"__start",
"if",
"restart",
":",
"self",
".",
"__start",
"=",
"end",
"return",
"span"
] | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | do_tta_predict | return 18000x128x128 np array | examples/trials/kaggle-tgs-salt/predict.py | def do_tta_predict(args, model, ckp_path, tta_num=4):
'''
return 18000x128x128 np array
'''
model.eval()
preds = []
meta = None
# i is tta index, 0: no change, 1: horizon flip, 2: vertical flip, 3: do both
for flip_index in range(tta_num):
print('flip_index:', flip_index)
test_loader = get_test_loader(args.batch_size, index=flip_index, dev_mode=False, pad_mode=args.pad_mode)
meta = test_loader.meta
outputs = None
with torch.no_grad():
for i, img in enumerate(test_loader):
add_depth_channel(img, args.pad_mode)
img = img.cuda()
output, _ = model(img)
output = torch.sigmoid(output)
if outputs is None:
outputs = output.squeeze()
else:
outputs = torch.cat([outputs, output.squeeze()], 0)
print('{} / {}'.format(args.batch_size*(i+1), test_loader.num), end='\r')
outputs = outputs.cpu().numpy()
# flip back masks
if flip_index == 1:
outputs = np.flip(outputs, 2)
elif flip_index == 2:
outputs = np.flip(outputs, 1)
elif flip_index == 3:
outputs = np.flip(outputs, 2)
outputs = np.flip(outputs, 1)
#print(outputs.shape)
preds.append(outputs)
parent_dir = ckp_path+'_out'
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
np_file = os.path.join(parent_dir, 'pred.npy')
model_pred_result = np.mean(preds, 0)
np.save(np_file, model_pred_result)
return model_pred_result, meta | def do_tta_predict(args, model, ckp_path, tta_num=4):
'''
return 18000x128x128 np array
'''
model.eval()
preds = []
meta = None
# i is tta index, 0: no change, 1: horizon flip, 2: vertical flip, 3: do both
for flip_index in range(tta_num):
print('flip_index:', flip_index)
test_loader = get_test_loader(args.batch_size, index=flip_index, dev_mode=False, pad_mode=args.pad_mode)
meta = test_loader.meta
outputs = None
with torch.no_grad():
for i, img in enumerate(test_loader):
add_depth_channel(img, args.pad_mode)
img = img.cuda()
output, _ = model(img)
output = torch.sigmoid(output)
if outputs is None:
outputs = output.squeeze()
else:
outputs = torch.cat([outputs, output.squeeze()], 0)
print('{} / {}'.format(args.batch_size*(i+1), test_loader.num), end='\r')
outputs = outputs.cpu().numpy()
# flip back masks
if flip_index == 1:
outputs = np.flip(outputs, 2)
elif flip_index == 2:
outputs = np.flip(outputs, 1)
elif flip_index == 3:
outputs = np.flip(outputs, 2)
outputs = np.flip(outputs, 1)
#print(outputs.shape)
preds.append(outputs)
parent_dir = ckp_path+'_out'
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
np_file = os.path.join(parent_dir, 'pred.npy')
model_pred_result = np.mean(preds, 0)
np.save(np_file, model_pred_result)
return model_pred_result, meta | [
"return",
"18000x128x128",
"np",
"array"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/predict.py#L37-L83 | [
"def",
"do_tta_predict",
"(",
"args",
",",
"model",
",",
"ckp_path",
",",
"tta_num",
"=",
"4",
")",
":",
"model",
".",
"eval",
"(",
")",
"preds",
"=",
"[",
"]",
"meta",
"=",
"None",
"# i is tta index, 0: no change, 1: horizon flip, 2: vertical flip, 3: do both",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | partition_dataset | Partitioning MNIST | examples/trials/mnist-distributed-pytorch/dist_mnist.py | def partition_dataset():
""" Partitioning MNIST """
dataset = datasets.MNIST(
'./data',
train=True,
download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307, ), (0.3081, ))
]))
size = dist.get_world_size()
bsz = 128 / float(size)
partition_sizes = [1.0 / size for _ in range(size)]
partition = DataPartitioner(dataset, partition_sizes)
partition = partition.use(dist.get_rank())
train_set = torch.utils.data.DataLoader(
partition, batch_size=int(bsz), shuffle=True)
return train_set, bsz | def partition_dataset():
""" Partitioning MNIST """
dataset = datasets.MNIST(
'./data',
train=True,
download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307, ), (0.3081, ))
]))
size = dist.get_world_size()
bsz = 128 / float(size)
partition_sizes = [1.0 / size for _ in range(size)]
partition = DataPartitioner(dataset, partition_sizes)
partition = partition.use(dist.get_rank())
train_set = torch.utils.data.DataLoader(
partition, batch_size=int(bsz), shuffle=True)
return train_set, bsz | [
"Partitioning",
"MNIST"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/mnist-distributed-pytorch/dist_mnist.py#L93-L110 | [
"def",
"partition_dataset",
"(",
")",
":",
"dataset",
"=",
"datasets",
".",
"MNIST",
"(",
"'./data'",
",",
"train",
"=",
"True",
",",
"download",
"=",
"True",
",",
"transform",
"=",
"transforms",
".",
"Compose",
"(",
"[",
"transforms",
".",
"ToTensor",
"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | average_gradients | Gradient averaging. | examples/trials/mnist-distributed-pytorch/dist_mnist.py | def average_gradients(model):
""" Gradient averaging. """
size = float(dist.get_world_size())
for param in model.parameters():
dist.all_reduce(param.grad.data, op=dist.reduce_op.SUM, group=0)
param.grad.data /= size | def average_gradients(model):
""" Gradient averaging. """
size = float(dist.get_world_size())
for param in model.parameters():
dist.all_reduce(param.grad.data, op=dist.reduce_op.SUM, group=0)
param.grad.data /= size | [
"Gradient",
"averaging",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/mnist-distributed-pytorch/dist_mnist.py#L113-L118 | [
"def",
"average_gradients",
"(",
"model",
")",
":",
"size",
"=",
"float",
"(",
"dist",
".",
"get_world_size",
"(",
")",
")",
"for",
"param",
"in",
"model",
".",
"parameters",
"(",
")",
":",
"dist",
".",
"all_reduce",
"(",
"param",
".",
"grad",
".",
"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | run | Distributed Synchronous SGD Example | examples/trials/mnist-distributed-pytorch/dist_mnist.py | def run(params):
""" Distributed Synchronous SGD Example """
rank = dist.get_rank()
torch.manual_seed(1234)
train_set, bsz = partition_dataset()
model = Net()
model = model
optimizer = optim.SGD(model.parameters(), lr=params['learning_rate'], momentum=params['momentum'])
num_batches = ceil(len(train_set.dataset) / float(bsz))
total_loss = 0.0
for epoch in range(3):
epoch_loss = 0.0
for data, target in train_set:
data, target = Variable(data), Variable(target)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
epoch_loss += loss.item()
loss.backward()
average_gradients(model)
optimizer.step()
#logger.debug('Rank: ', rank, ', epoch: ', epoch, ': ', epoch_loss / num_batches)
if rank == 0:
nni.report_intermediate_result(epoch_loss / num_batches)
total_loss += (epoch_loss / num_batches)
total_loss /= 3
logger.debug('Final loss: {}'.format(total_loss))
if rank == 0:
nni.report_final_result(total_loss) | def run(params):
""" Distributed Synchronous SGD Example """
rank = dist.get_rank()
torch.manual_seed(1234)
train_set, bsz = partition_dataset()
model = Net()
model = model
optimizer = optim.SGD(model.parameters(), lr=params['learning_rate'], momentum=params['momentum'])
num_batches = ceil(len(train_set.dataset) / float(bsz))
total_loss = 0.0
for epoch in range(3):
epoch_loss = 0.0
for data, target in train_set:
data, target = Variable(data), Variable(target)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
epoch_loss += loss.item()
loss.backward()
average_gradients(model)
optimizer.step()
#logger.debug('Rank: ', rank, ', epoch: ', epoch, ': ', epoch_loss / num_batches)
if rank == 0:
nni.report_intermediate_result(epoch_loss / num_batches)
total_loss += (epoch_loss / num_batches)
total_loss /= 3
logger.debug('Final loss: {}'.format(total_loss))
if rank == 0:
nni.report_final_result(total_loss) | [
"Distributed",
"Synchronous",
"SGD",
"Example"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/mnist-distributed-pytorch/dist_mnist.py#L121-L150 | [
"def",
"run",
"(",
"params",
")",
":",
"rank",
"=",
"dist",
".",
"get_rank",
"(",
")",
"torch",
".",
"manual_seed",
"(",
"1234",
")",
"train_set",
",",
"bsz",
"=",
"partition_dataset",
"(",
")",
"model",
"=",
"Net",
"(",
")",
"model",
"=",
"model",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | graph_loads | Load graph | examples/trials/ga_squad/graph.py | def graph_loads(graph_json):
'''
Load graph
'''
layers = []
for layer in graph_json['layers']:
layer_info = Layer(layer['type'], layer['input'], layer['output'], layer['size'])
layer_info.is_delete = layer['is_delete']
layers.append(layer_info)
graph = Graph(graph_json['max_layer_num'], [], [], [])
graph.layers = layers
return graph | def graph_loads(graph_json):
'''
Load graph
'''
layers = []
for layer in graph_json['layers']:
layer_info = Layer(layer['type'], layer['input'], layer['output'], layer['size'])
layer_info.is_delete = layer['is_delete']
layers.append(layer_info)
graph = Graph(graph_json['max_layer_num'], [], [], [])
graph.layers = layers
return graph | [
"Load",
"graph"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L103-L114 | [
"def",
"graph_loads",
"(",
"graph_json",
")",
":",
"layers",
"=",
"[",
"]",
"for",
"layer",
"in",
"graph_json",
"[",
"'layers'",
"]",
":",
"layer_info",
"=",
"Layer",
"(",
"layer",
"[",
"'type'",
"]",
",",
"layer",
"[",
"'input'",
"]",
",",
"layer",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | Layer.set_size | Set size. | examples/trials/ga_squad/graph.py | def set_size(self, graph_id, size):
'''
Set size.
'''
if self.graph_type == LayerType.attention.value:
if self.input[0] == graph_id:
self.size = size
if self.graph_type == LayerType.rnn.value:
self.size = size
if self.graph_type == LayerType.self_attention.value:
self.size = size
if self.graph_type == LayerType.output.value:
if self.size != size:
return False
return True | def set_size(self, graph_id, size):
'''
Set size.
'''
if self.graph_type == LayerType.attention.value:
if self.input[0] == graph_id:
self.size = size
if self.graph_type == LayerType.rnn.value:
self.size = size
if self.graph_type == LayerType.self_attention.value:
self.size = size
if self.graph_type == LayerType.output.value:
if self.size != size:
return False
return True | [
"Set",
"size",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L69-L83 | [
"def",
"set_size",
"(",
"self",
",",
"graph_id",
",",
"size",
")",
":",
"if",
"self",
".",
"graph_type",
"==",
"LayerType",
".",
"attention",
".",
"value",
":",
"if",
"self",
".",
"input",
"[",
"0",
"]",
"==",
"graph_id",
":",
"self",
".",
"size",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | Layer.clear_size | Clear size | examples/trials/ga_squad/graph.py | def clear_size(self):
'''
Clear size
'''
if self.graph_type == LayerType.attention.value or \
LayerType.rnn.value or LayerType.self_attention.value:
self.size = None | def clear_size(self):
'''
Clear size
'''
if self.graph_type == LayerType.attention.value or \
LayerType.rnn.value or LayerType.self_attention.value:
self.size = None | [
"Clear",
"size"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L85-L91 | [
"def",
"clear_size",
"(",
"self",
")",
":",
"if",
"self",
".",
"graph_type",
"==",
"LayerType",
".",
"attention",
".",
"value",
"or",
"LayerType",
".",
"rnn",
".",
"value",
"or",
"LayerType",
".",
"self_attention",
".",
"value",
":",
"self",
".",
"size",... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | Graph.is_topology | valid the topology | examples/trials/ga_squad/graph.py | def is_topology(self, layers=None):
'''
valid the topology
'''
if layers is None:
layers = self.layers
layers_nodle = []
result = []
for i, layer in enumerate(layers):
if layer.is_delete is False:
layers_nodle.append(i)
while True:
flag_break = True
layers_toremove = []
for layer1 in layers_nodle:
flag_arrive = True
for layer2 in layers[layer1].input:
if layer2 in layers_nodle:
flag_arrive = False
if flag_arrive is True:
for layer2 in layers[layer1].output:
# Size is error
if layers[layer2].set_size(layer1, layers[layer1].size) is False:
return False
layers_toremove.append(layer1)
result.append(layer1)
flag_break = False
for layer in layers_toremove:
layers_nodle.remove(layer)
result.append('|')
if flag_break:
break
# There is loop in graph || some layers can't to arrive
if layers_nodle:
return False
return result | def is_topology(self, layers=None):
'''
valid the topology
'''
if layers is None:
layers = self.layers
layers_nodle = []
result = []
for i, layer in enumerate(layers):
if layer.is_delete is False:
layers_nodle.append(i)
while True:
flag_break = True
layers_toremove = []
for layer1 in layers_nodle:
flag_arrive = True
for layer2 in layers[layer1].input:
if layer2 in layers_nodle:
flag_arrive = False
if flag_arrive is True:
for layer2 in layers[layer1].output:
# Size is error
if layers[layer2].set_size(layer1, layers[layer1].size) is False:
return False
layers_toremove.append(layer1)
result.append(layer1)
flag_break = False
for layer in layers_toremove:
layers_nodle.remove(layer)
result.append('|')
if flag_break:
break
# There is loop in graph || some layers can't to arrive
if layers_nodle:
return False
return result | [
"valid",
"the",
"topology"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L133-L168 | [
"def",
"is_topology",
"(",
"self",
",",
"layers",
"=",
"None",
")",
":",
"if",
"layers",
"is",
"None",
":",
"layers",
"=",
"self",
".",
"layers",
"layers_nodle",
"=",
"[",
"]",
"result",
"=",
"[",
"]",
"for",
"i",
",",
"layer",
"in",
"enumerate",
"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | Graph.is_legal | Judge whether is legal for layers | examples/trials/ga_squad/graph.py | def is_legal(self, layers=None):
'''
Judge whether is legal for layers
'''
if layers is None:
layers = self.layers
for layer in layers:
if layer.is_delete is False:
if len(layer.input) != layer.input_size:
return False
if len(layer.output) < layer.output_size:
return False
# layer_num <= max_layer_num
if self.layer_num(layers) > self.max_layer_num:
return False
# There is loop in graph || some layers can't to arrive
if self.is_topology(layers) is False:
return False
return True | def is_legal(self, layers=None):
'''
Judge whether is legal for layers
'''
if layers is None:
layers = self.layers
for layer in layers:
if layer.is_delete is False:
if len(layer.input) != layer.input_size:
return False
if len(layer.output) < layer.output_size:
return False
# layer_num <= max_layer_num
if self.layer_num(layers) > self.max_layer_num:
return False
# There is loop in graph || some layers can't to arrive
if self.is_topology(layers) is False:
return False
return True | [
"Judge",
"whether",
"is",
"legal",
"for",
"layers"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L183-L205 | [
"def",
"is_legal",
"(",
"self",
",",
"layers",
"=",
"None",
")",
":",
"if",
"layers",
"is",
"None",
":",
"layers",
"=",
"self",
".",
"layers",
"for",
"layer",
"in",
"layers",
":",
"if",
"layer",
".",
"is_delete",
"is",
"False",
":",
"if",
"len",
"(... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | Graph.mutation | Mutation for a graph | examples/trials/ga_squad/graph.py | def mutation(self, only_add=False):
'''
Mutation for a graph
'''
types = []
if self.layer_num() < self.max_layer_num:
types.append(0)
types.append(1)
if self.layer_num() > 5 and only_add is False:
types.append(2)
types.append(3)
# 0 : add a layer , delete a edge
# 1 : add a layer , change a edge
# 2 : delete a layer, delete a edge
# 3 : delete a layer, change a edge
graph_type = random.choice(types)
layer_type = random.choice([LayerType.attention.value,\
LayerType.self_attention.value, LayerType.rnn.value])
layers = copy.deepcopy(self.layers)
cnt_try = 0
while True:
layers_in = []
layers_out = []
layers_del = []
for i, layer in enumerate(layers):
if layer.is_delete is False:
if layer.graph_type != LayerType.output.value:
layers_in.append(i)
if layer.graph_type != LayerType.input.value:
layers_out.append(i)
if layer.graph_type != LayerType.output.value\
and layer.graph_type != LayerType.input.value:
layers_del.append(i)
if graph_type <= 1:
new_id = len(layers)
out = random.choice(layers_out)
inputs = []
output = [out]
pos = random.randint(0, len(layers[out].input) - 1)
last_in = layers[out].input[pos]
layers[out].input[pos] = new_id
if graph_type == 0:
layers[last_in].output.remove(out)
if graph_type == 1:
layers[last_in].output.remove(out)
layers[last_in].output.append(new_id)
inputs = [last_in]
lay = Layer(graph_type=layer_type, inputs=inputs, output=output)
while len(inputs) < lay.input_size:
layer1 = random.choice(layers_in)
inputs.append(layer1)
layers[layer1].output.append(new_id)
lay.input = inputs
layers.append(lay)
else:
layer1 = random.choice(layers_del)
for layer2 in layers[layer1].output:
layers[layer2].input.remove(layer1)
if graph_type == 2:
random_in = random.choice(layers_in)
else:
random_in = random.choice(layers[layer1].input)
layers[layer2].input.append(random_in)
layers[random_in].output.append(layer2)
for layer2 in layers[layer1].input:
layers[layer2].output.remove(layer1)
layers[layer1].is_delete = True
if self.is_legal(layers):
self.layers = layers
break
else:
layers = copy.deepcopy(self.layers)
cnt_try += 1 | def mutation(self, only_add=False):
'''
Mutation for a graph
'''
types = []
if self.layer_num() < self.max_layer_num:
types.append(0)
types.append(1)
if self.layer_num() > 5 and only_add is False:
types.append(2)
types.append(3)
# 0 : add a layer , delete a edge
# 1 : add a layer , change a edge
# 2 : delete a layer, delete a edge
# 3 : delete a layer, change a edge
graph_type = random.choice(types)
layer_type = random.choice([LayerType.attention.value,\
LayerType.self_attention.value, LayerType.rnn.value])
layers = copy.deepcopy(self.layers)
cnt_try = 0
while True:
layers_in = []
layers_out = []
layers_del = []
for i, layer in enumerate(layers):
if layer.is_delete is False:
if layer.graph_type != LayerType.output.value:
layers_in.append(i)
if layer.graph_type != LayerType.input.value:
layers_out.append(i)
if layer.graph_type != LayerType.output.value\
and layer.graph_type != LayerType.input.value:
layers_del.append(i)
if graph_type <= 1:
new_id = len(layers)
out = random.choice(layers_out)
inputs = []
output = [out]
pos = random.randint(0, len(layers[out].input) - 1)
last_in = layers[out].input[pos]
layers[out].input[pos] = new_id
if graph_type == 0:
layers[last_in].output.remove(out)
if graph_type == 1:
layers[last_in].output.remove(out)
layers[last_in].output.append(new_id)
inputs = [last_in]
lay = Layer(graph_type=layer_type, inputs=inputs, output=output)
while len(inputs) < lay.input_size:
layer1 = random.choice(layers_in)
inputs.append(layer1)
layers[layer1].output.append(new_id)
lay.input = inputs
layers.append(lay)
else:
layer1 = random.choice(layers_del)
for layer2 in layers[layer1].output:
layers[layer2].input.remove(layer1)
if graph_type == 2:
random_in = random.choice(layers_in)
else:
random_in = random.choice(layers[layer1].input)
layers[layer2].input.append(random_in)
layers[random_in].output.append(layer2)
for layer2 in layers[layer1].input:
layers[layer2].output.remove(layer1)
layers[layer1].is_delete = True
if self.is_legal(layers):
self.layers = layers
break
else:
layers = copy.deepcopy(self.layers)
cnt_try += 1 | [
"Mutation",
"for",
"a",
"graph"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L207-L280 | [
"def",
"mutation",
"(",
"self",
",",
"only_add",
"=",
"False",
")",
":",
"types",
"=",
"[",
"]",
"if",
"self",
".",
"layer_num",
"(",
")",
"<",
"self",
".",
"max_layer_num",
":",
"types",
".",
"append",
"(",
"0",
")",
"types",
".",
"append",
"(",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | SMACTuner._main_cli | Main function of SMAC for CLI interface
Returns
-------
instance
optimizer | src/sdk/pynni/nni/smac_tuner/smac_tuner.py | def _main_cli(self):
"""Main function of SMAC for CLI interface
Returns
-------
instance
optimizer
"""
self.logger.info("SMAC call: %s" % (" ".join(sys.argv)))
cmd_reader = CMDReader()
args, _ = cmd_reader.read_cmd()
root_logger = logging.getLogger()
root_logger.setLevel(args.verbose_level)
logger_handler = logging.StreamHandler(
stream=sys.stdout)
if root_logger.level >= logging.INFO:
formatter = logging.Formatter(
"%(levelname)s:\t%(message)s")
else:
formatter = logging.Formatter(
"%(asctime)s:%(levelname)s:%(name)s:%(message)s",
"%Y-%m-%d %H:%M:%S")
logger_handler.setFormatter(formatter)
root_logger.addHandler(logger_handler)
# remove default handler
root_logger.removeHandler(root_logger.handlers[0])
# Create defaults
rh = None
initial_configs = None
stats = None
incumbent = None
# Create scenario-object
scen = Scenario(args.scenario_file, [])
if args.mode == "SMAC":
optimizer = SMAC(
scenario=scen,
rng=np.random.RandomState(args.seed),
runhistory=rh,
initial_configurations=initial_configs,
stats=stats,
restore_incumbent=incumbent,
run_id=args.seed)
elif args.mode == "ROAR":
optimizer = ROAR(
scenario=scen,
rng=np.random.RandomState(args.seed),
runhistory=rh,
initial_configurations=initial_configs,
run_id=args.seed)
elif args.mode == "EPILS":
optimizer = EPILS(
scenario=scen,
rng=np.random.RandomState(args.seed),
runhistory=rh,
initial_configurations=initial_configs,
run_id=args.seed)
else:
optimizer = None
return optimizer | def _main_cli(self):
"""Main function of SMAC for CLI interface
Returns
-------
instance
optimizer
"""
self.logger.info("SMAC call: %s" % (" ".join(sys.argv)))
cmd_reader = CMDReader()
args, _ = cmd_reader.read_cmd()
root_logger = logging.getLogger()
root_logger.setLevel(args.verbose_level)
logger_handler = logging.StreamHandler(
stream=sys.stdout)
if root_logger.level >= logging.INFO:
formatter = logging.Formatter(
"%(levelname)s:\t%(message)s")
else:
formatter = logging.Formatter(
"%(asctime)s:%(levelname)s:%(name)s:%(message)s",
"%Y-%m-%d %H:%M:%S")
logger_handler.setFormatter(formatter)
root_logger.addHandler(logger_handler)
# remove default handler
root_logger.removeHandler(root_logger.handlers[0])
# Create defaults
rh = None
initial_configs = None
stats = None
incumbent = None
# Create scenario-object
scen = Scenario(args.scenario_file, [])
if args.mode == "SMAC":
optimizer = SMAC(
scenario=scen,
rng=np.random.RandomState(args.seed),
runhistory=rh,
initial_configurations=initial_configs,
stats=stats,
restore_incumbent=incumbent,
run_id=args.seed)
elif args.mode == "ROAR":
optimizer = ROAR(
scenario=scen,
rng=np.random.RandomState(args.seed),
runhistory=rh,
initial_configurations=initial_configs,
run_id=args.seed)
elif args.mode == "EPILS":
optimizer = EPILS(
scenario=scen,
rng=np.random.RandomState(args.seed),
runhistory=rh,
initial_configurations=initial_configs,
run_id=args.seed)
else:
optimizer = None
return optimizer | [
"Main",
"function",
"of",
"SMAC",
"for",
"CLI",
"interface",
"Returns",
"-------",
"instance",
"optimizer"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L66-L130 | [
"def",
"_main_cli",
"(",
"self",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"SMAC call: %s\"",
"%",
"(",
"\" \"",
".",
"join",
"(",
"sys",
".",
"argv",
")",
")",
")",
"cmd_reader",
"=",
"CMDReader",
"(",
")",
"args",
",",
"_",
"=",
"cmd_r... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | SMACTuner.update_search_space | TODO: this is urgly, we put all the initialization work in this method, because initialization relies
on search space, also because update_search_space is called at the beginning.
NOTE: updating search space is not supported.
Parameters
----------
search_space:
search space | src/sdk/pynni/nni/smac_tuner/smac_tuner.py | def update_search_space(self, search_space):
"""TODO: this is urgly, we put all the initialization work in this method, because initialization relies
on search space, also because update_search_space is called at the beginning.
NOTE: updating search space is not supported.
Parameters
----------
search_space:
search space
"""
if not self.update_ss_done:
self.categorical_dict = generate_scenario(search_space)
if self.categorical_dict is None:
raise RuntimeError('categorical dict is not correctly returned after parsing search space.')
self.optimizer = self._main_cli()
self.smbo_solver = self.optimizer.solver
self.loguniform_key = {key for key in search_space.keys() if search_space[key]['_type'] == 'loguniform'}
self.update_ss_done = True
else:
self.logger.warning('update search space is not supported.') | def update_search_space(self, search_space):
"""TODO: this is urgly, we put all the initialization work in this method, because initialization relies
on search space, also because update_search_space is called at the beginning.
NOTE: updating search space is not supported.
Parameters
----------
search_space:
search space
"""
if not self.update_ss_done:
self.categorical_dict = generate_scenario(search_space)
if self.categorical_dict is None:
raise RuntimeError('categorical dict is not correctly returned after parsing search space.')
self.optimizer = self._main_cli()
self.smbo_solver = self.optimizer.solver
self.loguniform_key = {key for key in search_space.keys() if search_space[key]['_type'] == 'loguniform'}
self.update_ss_done = True
else:
self.logger.warning('update search space is not supported.') | [
"TODO",
":",
"this",
"is",
"urgly",
"we",
"put",
"all",
"the",
"initialization",
"work",
"in",
"this",
"method",
"because",
"initialization",
"relies",
"on",
"search",
"space",
"also",
"because",
"update_search_space",
"is",
"called",
"at",
"the",
"beginning",
... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L132-L151 | [
"def",
"update_search_space",
"(",
"self",
",",
"search_space",
")",
":",
"if",
"not",
"self",
".",
"update_ss_done",
":",
"self",
".",
"categorical_dict",
"=",
"generate_scenario",
"(",
"search_space",
")",
"if",
"self",
".",
"categorical_dict",
"is",
"None",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | SMACTuner.receive_trial_result | receive_trial_result
Parameters
----------
parameter_id: int
parameter id
parameters:
parameters
value:
value
Raises
------
RuntimeError
Received parameter id not in total_data | src/sdk/pynni/nni/smac_tuner/smac_tuner.py | def receive_trial_result(self, parameter_id, parameters, value):
"""receive_trial_result
Parameters
----------
parameter_id: int
parameter id
parameters:
parameters
value:
value
Raises
------
RuntimeError
Received parameter id not in total_data
"""
reward = extract_scalar_reward(value)
if self.optimize_mode is OptimizeMode.Maximize:
reward = -reward
if parameter_id not in self.total_data:
raise RuntimeError('Received parameter_id not in total_data.')
if self.first_one:
self.smbo_solver.nni_smac_receive_first_run(self.total_data[parameter_id], reward)
self.first_one = False
else:
self.smbo_solver.nni_smac_receive_runs(self.total_data[parameter_id], reward) | def receive_trial_result(self, parameter_id, parameters, value):
"""receive_trial_result
Parameters
----------
parameter_id: int
parameter id
parameters:
parameters
value:
value
Raises
------
RuntimeError
Received parameter id not in total_data
"""
reward = extract_scalar_reward(value)
if self.optimize_mode is OptimizeMode.Maximize:
reward = -reward
if parameter_id not in self.total_data:
raise RuntimeError('Received parameter_id not in total_data.')
if self.first_one:
self.smbo_solver.nni_smac_receive_first_run(self.total_data[parameter_id], reward)
self.first_one = False
else:
self.smbo_solver.nni_smac_receive_runs(self.total_data[parameter_id], reward) | [
"receive_trial_result",
"Parameters",
"----------",
"parameter_id",
":",
"int",
"parameter",
"id",
"parameters",
":",
"parameters",
"value",
":",
"value",
"Raises",
"------",
"RuntimeError",
"Received",
"parameter",
"id",
"not",
"in",
"total_data"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L153-L180 | [
"def",
"receive_trial_result",
"(",
"self",
",",
"parameter_id",
",",
"parameters",
",",
"value",
")",
":",
"reward",
"=",
"extract_scalar_reward",
"(",
"value",
")",
"if",
"self",
".",
"optimize_mode",
"is",
"OptimizeMode",
".",
"Maximize",
":",
"reward",
"="... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | SMACTuner.convert_loguniform_categorical | Convert the values of type `loguniform` back to their initial range
Also, we convert categorical:
categorical values in search space are changed to list of numbers before,
those original values will be changed back in this function
Parameters
----------
challenger_dict: dict
challenger dict
Returns
-------
dict
dict which stores copy of challengers | src/sdk/pynni/nni/smac_tuner/smac_tuner.py | def convert_loguniform_categorical(self, challenger_dict):
"""Convert the values of type `loguniform` back to their initial range
Also, we convert categorical:
categorical values in search space are changed to list of numbers before,
those original values will be changed back in this function
Parameters
----------
challenger_dict: dict
challenger dict
Returns
-------
dict
dict which stores copy of challengers
"""
converted_dict = {}
for key, value in challenger_dict.items():
# convert to loguniform
if key in self.loguniform_key:
converted_dict[key] = np.exp(challenger_dict[key])
# convert categorical back to original value
elif key in self.categorical_dict:
idx = challenger_dict[key]
converted_dict[key] = self.categorical_dict[key][idx]
else:
converted_dict[key] = value
return converted_dict | def convert_loguniform_categorical(self, challenger_dict):
"""Convert the values of type `loguniform` back to their initial range
Also, we convert categorical:
categorical values in search space are changed to list of numbers before,
those original values will be changed back in this function
Parameters
----------
challenger_dict: dict
challenger dict
Returns
-------
dict
dict which stores copy of challengers
"""
converted_dict = {}
for key, value in challenger_dict.items():
# convert to loguniform
if key in self.loguniform_key:
converted_dict[key] = np.exp(challenger_dict[key])
# convert categorical back to original value
elif key in self.categorical_dict:
idx = challenger_dict[key]
converted_dict[key] = self.categorical_dict[key][idx]
else:
converted_dict[key] = value
return converted_dict | [
"Convert",
"the",
"values",
"of",
"type",
"loguniform",
"back",
"to",
"their",
"initial",
"range",
"Also",
"we",
"convert",
"categorical",
":",
"categorical",
"values",
"in",
"search",
"space",
"are",
"changed",
"to",
"list",
"of",
"numbers",
"before",
"those"... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L182-L209 | [
"def",
"convert_loguniform_categorical",
"(",
"self",
",",
"challenger_dict",
")",
":",
"converted_dict",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"challenger_dict",
".",
"items",
"(",
")",
":",
"# convert to loguniform",
"if",
"key",
"in",
"self",
"."... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | SMACTuner.generate_parameters | generate one instance of hyperparameters
Parameters
----------
parameter_id: int
parameter id
Returns
-------
list
new generated parameters | src/sdk/pynni/nni/smac_tuner/smac_tuner.py | def generate_parameters(self, parameter_id):
"""generate one instance of hyperparameters
Parameters
----------
parameter_id: int
parameter id
Returns
-------
list
new generated parameters
"""
if self.first_one:
init_challenger = self.smbo_solver.nni_smac_start()
self.total_data[parameter_id] = init_challenger
return self.convert_loguniform_categorical(init_challenger.get_dictionary())
else:
challengers = self.smbo_solver.nni_smac_request_challengers()
for challenger in challengers:
self.total_data[parameter_id] = challenger
return self.convert_loguniform_categorical(challenger.get_dictionary()) | def generate_parameters(self, parameter_id):
"""generate one instance of hyperparameters
Parameters
----------
parameter_id: int
parameter id
Returns
-------
list
new generated parameters
"""
if self.first_one:
init_challenger = self.smbo_solver.nni_smac_start()
self.total_data[parameter_id] = init_challenger
return self.convert_loguniform_categorical(init_challenger.get_dictionary())
else:
challengers = self.smbo_solver.nni_smac_request_challengers()
for challenger in challengers:
self.total_data[parameter_id] = challenger
return self.convert_loguniform_categorical(challenger.get_dictionary()) | [
"generate",
"one",
"instance",
"of",
"hyperparameters",
"Parameters",
"----------",
"parameter_id",
":",
"int",
"parameter",
"id",
"Returns",
"-------",
"list",
"new",
"generated",
"parameters"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L211-L232 | [
"def",
"generate_parameters",
"(",
"self",
",",
"parameter_id",
")",
":",
"if",
"self",
".",
"first_one",
":",
"init_challenger",
"=",
"self",
".",
"smbo_solver",
".",
"nni_smac_start",
"(",
")",
"self",
".",
"total_data",
"[",
"parameter_id",
"]",
"=",
"ini... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | SMACTuner.generate_multiple_parameters | generate mutiple instances of hyperparameters
Parameters
----------
parameter_id_list: list
list of parameter id
Returns
-------
list
list of new generated parameters | src/sdk/pynni/nni/smac_tuner/smac_tuner.py | def generate_multiple_parameters(self, parameter_id_list):
"""generate mutiple instances of hyperparameters
Parameters
----------
parameter_id_list: list
list of parameter id
Returns
-------
list
list of new generated parameters
"""
if self.first_one:
params = []
for one_id in parameter_id_list:
init_challenger = self.smbo_solver.nni_smac_start()
self.total_data[one_id] = init_challenger
params.append(self.convert_loguniform_categorical(init_challenger.get_dictionary()))
else:
challengers = self.smbo_solver.nni_smac_request_challengers()
cnt = 0
params = []
for challenger in challengers:
if cnt >= len(parameter_id_list):
break
self.total_data[parameter_id_list[cnt]] = challenger
params.append(self.convert_loguniform_categorical(challenger.get_dictionary()))
cnt += 1
return params | def generate_multiple_parameters(self, parameter_id_list):
"""generate mutiple instances of hyperparameters
Parameters
----------
parameter_id_list: list
list of parameter id
Returns
-------
list
list of new generated parameters
"""
if self.first_one:
params = []
for one_id in parameter_id_list:
init_challenger = self.smbo_solver.nni_smac_start()
self.total_data[one_id] = init_challenger
params.append(self.convert_loguniform_categorical(init_challenger.get_dictionary()))
else:
challengers = self.smbo_solver.nni_smac_request_challengers()
cnt = 0
params = []
for challenger in challengers:
if cnt >= len(parameter_id_list):
break
self.total_data[parameter_id_list[cnt]] = challenger
params.append(self.convert_loguniform_categorical(challenger.get_dictionary()))
cnt += 1
return params | [
"generate",
"mutiple",
"instances",
"of",
"hyperparameters",
"Parameters",
"----------",
"parameter_id_list",
":",
"list",
"list",
"of",
"parameter",
"id",
"Returns",
"-------",
"list",
"list",
"of",
"new",
"generated",
"parameters"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L234-L263 | [
"def",
"generate_multiple_parameters",
"(",
"self",
",",
"parameter_id_list",
")",
":",
"if",
"self",
".",
"first_one",
":",
"params",
"=",
"[",
"]",
"for",
"one_id",
"in",
"parameter_id_list",
":",
"init_challenger",
"=",
"self",
".",
"smbo_solver",
".",
"nni... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | lovasz_grad | Computes gradient of the Lovasz extension w.r.t sorted errors
See Alg. 1 in paper | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def lovasz_grad(gt_sorted):
"""
Computes gradient of the Lovasz extension w.r.t sorted errors
See Alg. 1 in paper
"""
p = len(gt_sorted)
gts = gt_sorted.sum()
intersection = gts - gt_sorted.float().cumsum(0)
union = gts + (1 - gt_sorted).float().cumsum(0)
jaccard = 1. - intersection / union
if p > 1: # cover 1-pixel case
jaccard[1:p] = jaccard[1:p] - jaccard[0:-1]
return jaccard | def lovasz_grad(gt_sorted):
"""
Computes gradient of the Lovasz extension w.r.t sorted errors
See Alg. 1 in paper
"""
p = len(gt_sorted)
gts = gt_sorted.sum()
intersection = gts - gt_sorted.float().cumsum(0)
union = gts + (1 - gt_sorted).float().cumsum(0)
jaccard = 1. - intersection / union
if p > 1: # cover 1-pixel case
jaccard[1:p] = jaccard[1:p] - jaccard[0:-1]
return jaccard | [
"Computes",
"gradient",
"of",
"the",
"Lovasz",
"extension",
"w",
".",
"r",
".",
"t",
"sorted",
"errors",
"See",
"Alg",
".",
"1",
"in",
"paper"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L36-L48 | [
"def",
"lovasz_grad",
"(",
"gt_sorted",
")",
":",
"p",
"=",
"len",
"(",
"gt_sorted",
")",
"gts",
"=",
"gt_sorted",
".",
"sum",
"(",
")",
"intersection",
"=",
"gts",
"-",
"gt_sorted",
".",
"float",
"(",
")",
".",
"cumsum",
"(",
"0",
")",
"union",
"=... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | iou_binary | IoU for foreground class
binary: 1 foreground, 0 background | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def iou_binary(preds, labels, EMPTY=1., ignore=None, per_image=True):
"""
IoU for foreground class
binary: 1 foreground, 0 background
"""
if not per_image:
preds, labels = (preds,), (labels,)
ious = []
for pred, label in zip(preds, labels):
intersection = ((label == 1) & (pred == 1)).sum()
union = ((label == 1) | ((pred == 1) & (label != ignore))).sum()
if not union:
iou = EMPTY
else:
iou = float(intersection) / union
ious.append(iou)
iou = mean(ious) # mean accross images if per_image
return 100 * iou | def iou_binary(preds, labels, EMPTY=1., ignore=None, per_image=True):
"""
IoU for foreground class
binary: 1 foreground, 0 background
"""
if not per_image:
preds, labels = (preds,), (labels,)
ious = []
for pred, label in zip(preds, labels):
intersection = ((label == 1) & (pred == 1)).sum()
union = ((label == 1) | ((pred == 1) & (label != ignore))).sum()
if not union:
iou = EMPTY
else:
iou = float(intersection) / union
ious.append(iou)
iou = mean(ious) # mean accross images if per_image
return 100 * iou | [
"IoU",
"for",
"foreground",
"class",
"binary",
":",
"1",
"foreground",
"0",
"background"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L51-L68 | [
"def",
"iou_binary",
"(",
"preds",
",",
"labels",
",",
"EMPTY",
"=",
"1.",
",",
"ignore",
"=",
"None",
",",
"per_image",
"=",
"True",
")",
":",
"if",
"not",
"per_image",
":",
"preds",
",",
"labels",
"=",
"(",
"preds",
",",
")",
",",
"(",
"labels",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | iou | Array of IoU for each (non ignored) class | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def iou(preds, labels, C, EMPTY=1., ignore=None, per_image=False):
"""
Array of IoU for each (non ignored) class
"""
if not per_image:
preds, labels = (preds,), (labels,)
ious = []
for pred, label in zip(preds, labels):
iou = []
for i in range(C):
if i != ignore: # The ignored label is sometimes among predicted classes (ENet - CityScapes)
intersection = ((label == i) & (pred == i)).sum()
union = ((label == i) | ((pred == i) & (label != ignore))).sum()
if not union:
iou.append(EMPTY)
else:
iou.append(float(intersection) / union)
ious.append(iou)
ious = map(mean, zip(*ious)) # mean accross images if per_image
return 100 * np.array(ious) | def iou(preds, labels, C, EMPTY=1., ignore=None, per_image=False):
"""
Array of IoU for each (non ignored) class
"""
if not per_image:
preds, labels = (preds,), (labels,)
ious = []
for pred, label in zip(preds, labels):
iou = []
for i in range(C):
if i != ignore: # The ignored label is sometimes among predicted classes (ENet - CityScapes)
intersection = ((label == i) & (pred == i)).sum()
union = ((label == i) | ((pred == i) & (label != ignore))).sum()
if not union:
iou.append(EMPTY)
else:
iou.append(float(intersection) / union)
ious.append(iou)
ious = map(mean, zip(*ious)) # mean accross images if per_image
return 100 * np.array(ious) | [
"Array",
"of",
"IoU",
"for",
"each",
"(",
"non",
"ignored",
")",
"class"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L71-L90 | [
"def",
"iou",
"(",
"preds",
",",
"labels",
",",
"C",
",",
"EMPTY",
"=",
"1.",
",",
"ignore",
"=",
"None",
",",
"per_image",
"=",
"False",
")",
":",
"if",
"not",
"per_image",
":",
"preds",
",",
"labels",
"=",
"(",
"preds",
",",
")",
",",
"(",
"l... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | lovasz_hinge | Binary Lovasz hinge loss
logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty)
labels: [B, H, W] Tensor, binary ground truth masks (0 or 1)
per_image: compute the loss per image instead of per batch
ignore: void class id | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def lovasz_hinge(logits, labels, per_image=True, ignore=None):
"""
Binary Lovasz hinge loss
logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty)
labels: [B, H, W] Tensor, binary ground truth masks (0 or 1)
per_image: compute the loss per image instead of per batch
ignore: void class id
"""
if per_image:
loss = mean(lovasz_hinge_flat(*flatten_binary_scores(log.unsqueeze(0), lab.unsqueeze(0), ignore))
for log, lab in zip(logits, labels))
else:
loss = lovasz_hinge_flat(*flatten_binary_scores(logits, labels, ignore))
return loss | def lovasz_hinge(logits, labels, per_image=True, ignore=None):
"""
Binary Lovasz hinge loss
logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty)
labels: [B, H, W] Tensor, binary ground truth masks (0 or 1)
per_image: compute the loss per image instead of per batch
ignore: void class id
"""
if per_image:
loss = mean(lovasz_hinge_flat(*flatten_binary_scores(log.unsqueeze(0), lab.unsqueeze(0), ignore))
for log, lab in zip(logits, labels))
else:
loss = lovasz_hinge_flat(*flatten_binary_scores(logits, labels, ignore))
return loss | [
"Binary",
"Lovasz",
"hinge",
"loss",
"logits",
":",
"[",
"B",
"H",
"W",
"]",
"Variable",
"logits",
"at",
"each",
"pixel",
"(",
"between",
"-",
"\\",
"infty",
"and",
"+",
"\\",
"infty",
")",
"labels",
":",
"[",
"B",
"H",
"W",
"]",
"Tensor",
"binary"... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L96-L109 | [
"def",
"lovasz_hinge",
"(",
"logits",
",",
"labels",
",",
"per_image",
"=",
"True",
",",
"ignore",
"=",
"None",
")",
":",
"if",
"per_image",
":",
"loss",
"=",
"mean",
"(",
"lovasz_hinge_flat",
"(",
"*",
"flatten_binary_scores",
"(",
"log",
".",
"unsqueeze"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | lovasz_hinge_flat | Binary Lovasz hinge loss
logits: [P] Variable, logits at each prediction (between -\infty and +\infty)
labels: [P] Tensor, binary ground truth labels (0 or 1)
ignore: label to ignore | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def lovasz_hinge_flat(logits, labels):
"""
Binary Lovasz hinge loss
logits: [P] Variable, logits at each prediction (between -\infty and +\infty)
labels: [P] Tensor, binary ground truth labels (0 or 1)
ignore: label to ignore
"""
if len(labels) == 0:
# only void pixels, the gradients should be 0
return logits.sum() * 0.
signs = 2. * labels.float() - 1.
errors = (1. - logits * Variable(signs))
errors_sorted, perm = torch.sort(errors, dim=0, descending=True)
perm = perm.data
gt_sorted = labels[perm]
grad = lovasz_grad(gt_sorted)
loss = torch.dot(F.elu(errors_sorted)+1, Variable(grad))
#loss = torch.dot(F.relu(errors_sorted), Variable(grad))
return loss | def lovasz_hinge_flat(logits, labels):
"""
Binary Lovasz hinge loss
logits: [P] Variable, logits at each prediction (between -\infty and +\infty)
labels: [P] Tensor, binary ground truth labels (0 or 1)
ignore: label to ignore
"""
if len(labels) == 0:
# only void pixels, the gradients should be 0
return logits.sum() * 0.
signs = 2. * labels.float() - 1.
errors = (1. - logits * Variable(signs))
errors_sorted, perm = torch.sort(errors, dim=0, descending=True)
perm = perm.data
gt_sorted = labels[perm]
grad = lovasz_grad(gt_sorted)
loss = torch.dot(F.elu(errors_sorted)+1, Variable(grad))
#loss = torch.dot(F.relu(errors_sorted), Variable(grad))
return loss | [
"Binary",
"Lovasz",
"hinge",
"loss",
"logits",
":",
"[",
"P",
"]",
"Variable",
"logits",
"at",
"each",
"prediction",
"(",
"between",
"-",
"\\",
"infty",
"and",
"+",
"\\",
"infty",
")",
"labels",
":",
"[",
"P",
"]",
"Tensor",
"binary",
"ground",
"truth"... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L112-L131 | [
"def",
"lovasz_hinge_flat",
"(",
"logits",
",",
"labels",
")",
":",
"if",
"len",
"(",
"labels",
")",
"==",
"0",
":",
"# only void pixels, the gradients should be 0",
"return",
"logits",
".",
"sum",
"(",
")",
"*",
"0.",
"signs",
"=",
"2.",
"*",
"labels",
".... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | flatten_binary_scores | Flattens predictions in the batch (binary case)
Remove labels equal to 'ignore' | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def flatten_binary_scores(scores, labels, ignore=None):
"""
Flattens predictions in the batch (binary case)
Remove labels equal to 'ignore'
"""
scores = scores.view(-1)
labels = labels.view(-1)
if ignore is None:
return scores, labels
valid = (labels != ignore)
vscores = scores[valid]
vlabels = labels[valid]
return vscores, vlabels | def flatten_binary_scores(scores, labels, ignore=None):
"""
Flattens predictions in the batch (binary case)
Remove labels equal to 'ignore'
"""
scores = scores.view(-1)
labels = labels.view(-1)
if ignore is None:
return scores, labels
valid = (labels != ignore)
vscores = scores[valid]
vlabels = labels[valid]
return vscores, vlabels | [
"Flattens",
"predictions",
"in",
"the",
"batch",
"(",
"binary",
"case",
")",
"Remove",
"labels",
"equal",
"to",
"ignore"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L134-L146 | [
"def",
"flatten_binary_scores",
"(",
"scores",
",",
"labels",
",",
"ignore",
"=",
"None",
")",
":",
"scores",
"=",
"scores",
".",
"view",
"(",
"-",
"1",
")",
"labels",
"=",
"labels",
".",
"view",
"(",
"-",
"1",
")",
"if",
"ignore",
"is",
"None",
":... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | binary_xloss | Binary Cross entropy loss
logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty)
labels: [B, H, W] Tensor, binary ground truth masks (0 or 1)
ignore: void class id | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def binary_xloss(logits, labels, ignore=None):
"""
Binary Cross entropy loss
logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty)
labels: [B, H, W] Tensor, binary ground truth masks (0 or 1)
ignore: void class id
"""
logits, labels = flatten_binary_scores(logits, labels, ignore)
loss = StableBCELoss()(logits, Variable(labels.float()))
return loss | def binary_xloss(logits, labels, ignore=None):
"""
Binary Cross entropy loss
logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty)
labels: [B, H, W] Tensor, binary ground truth masks (0 or 1)
ignore: void class id
"""
logits, labels = flatten_binary_scores(logits, labels, ignore)
loss = StableBCELoss()(logits, Variable(labels.float()))
return loss | [
"Binary",
"Cross",
"entropy",
"loss",
"logits",
":",
"[",
"B",
"H",
"W",
"]",
"Variable",
"logits",
"at",
"each",
"pixel",
"(",
"between",
"-",
"\\",
"infty",
"and",
"+",
"\\",
"infty",
")",
"labels",
":",
"[",
"B",
"H",
"W",
"]",
"Tensor",
"binary... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L158-L167 | [
"def",
"binary_xloss",
"(",
"logits",
",",
"labels",
",",
"ignore",
"=",
"None",
")",
":",
"logits",
",",
"labels",
"=",
"flatten_binary_scores",
"(",
"logits",
",",
"labels",
",",
"ignore",
")",
"loss",
"=",
"StableBCELoss",
"(",
")",
"(",
"logits",
","... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | lovasz_softmax | Multi-class Lovasz-Softmax loss
probas: [B, C, H, W] Variable, class probabilities at each prediction (between 0 and 1)
labels: [B, H, W] Tensor, ground truth labels (between 0 and C - 1)
only_present: average only on classes present in ground truth
per_image: compute the loss per image instead of per batch
ignore: void class labels | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def lovasz_softmax(probas, labels, only_present=False, per_image=False, ignore=None):
"""
Multi-class Lovasz-Softmax loss
probas: [B, C, H, W] Variable, class probabilities at each prediction (between 0 and 1)
labels: [B, H, W] Tensor, ground truth labels (between 0 and C - 1)
only_present: average only on classes present in ground truth
per_image: compute the loss per image instead of per batch
ignore: void class labels
"""
if per_image:
loss = mean(lovasz_softmax_flat(*flatten_probas(prob.unsqueeze(0), lab.unsqueeze(0), ignore), only_present=only_present)
for prob, lab in zip(probas, labels))
else:
loss = lovasz_softmax_flat(*flatten_probas(probas, labels, ignore), only_present=only_present)
return loss | def lovasz_softmax(probas, labels, only_present=False, per_image=False, ignore=None):
"""
Multi-class Lovasz-Softmax loss
probas: [B, C, H, W] Variable, class probabilities at each prediction (between 0 and 1)
labels: [B, H, W] Tensor, ground truth labels (between 0 and C - 1)
only_present: average only on classes present in ground truth
per_image: compute the loss per image instead of per batch
ignore: void class labels
"""
if per_image:
loss = mean(lovasz_softmax_flat(*flatten_probas(prob.unsqueeze(0), lab.unsqueeze(0), ignore), only_present=only_present)
for prob, lab in zip(probas, labels))
else:
loss = lovasz_softmax_flat(*flatten_probas(probas, labels, ignore), only_present=only_present)
return loss | [
"Multi",
"-",
"class",
"Lovasz",
"-",
"Softmax",
"loss",
"probas",
":",
"[",
"B",
"C",
"H",
"W",
"]",
"Variable",
"class",
"probabilities",
"at",
"each",
"prediction",
"(",
"between",
"0",
"and",
"1",
")",
"labels",
":",
"[",
"B",
"H",
"W",
"]",
"T... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L173-L187 | [
"def",
"lovasz_softmax",
"(",
"probas",
",",
"labels",
",",
"only_present",
"=",
"False",
",",
"per_image",
"=",
"False",
",",
"ignore",
"=",
"None",
")",
":",
"if",
"per_image",
":",
"loss",
"=",
"mean",
"(",
"lovasz_softmax_flat",
"(",
"*",
"flatten_prob... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | lovasz_softmax_flat | Multi-class Lovasz-Softmax loss
probas: [P, C] Variable, class probabilities at each prediction (between 0 and 1)
labels: [P] Tensor, ground truth labels (between 0 and C - 1)
only_present: average only on classes present in ground truth | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def lovasz_softmax_flat(probas, labels, only_present=False):
"""
Multi-class Lovasz-Softmax loss
probas: [P, C] Variable, class probabilities at each prediction (between 0 and 1)
labels: [P] Tensor, ground truth labels (between 0 and C - 1)
only_present: average only on classes present in ground truth
"""
C = probas.size(1)
losses = []
for c in range(C):
fg = (labels == c).float() # foreground for class c
if only_present and fg.sum() == 0:
continue
errors = (Variable(fg) - probas[:, c]).abs()
errors_sorted, perm = torch.sort(errors, 0, descending=True)
perm = perm.data
fg_sorted = fg[perm]
losses.append(torch.dot(errors_sorted, Variable(lovasz_grad(fg_sorted))))
return mean(losses) | def lovasz_softmax_flat(probas, labels, only_present=False):
"""
Multi-class Lovasz-Softmax loss
probas: [P, C] Variable, class probabilities at each prediction (between 0 and 1)
labels: [P] Tensor, ground truth labels (between 0 and C - 1)
only_present: average only on classes present in ground truth
"""
C = probas.size(1)
losses = []
for c in range(C):
fg = (labels == c).float() # foreground for class c
if only_present and fg.sum() == 0:
continue
errors = (Variable(fg) - probas[:, c]).abs()
errors_sorted, perm = torch.sort(errors, 0, descending=True)
perm = perm.data
fg_sorted = fg[perm]
losses.append(torch.dot(errors_sorted, Variable(lovasz_grad(fg_sorted))))
return mean(losses) | [
"Multi",
"-",
"class",
"Lovasz",
"-",
"Softmax",
"loss",
"probas",
":",
"[",
"P",
"C",
"]",
"Variable",
"class",
"probabilities",
"at",
"each",
"prediction",
"(",
"between",
"0",
"and",
"1",
")",
"labels",
":",
"[",
"P",
"]",
"Tensor",
"ground",
"truth... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L190-L208 | [
"def",
"lovasz_softmax_flat",
"(",
"probas",
",",
"labels",
",",
"only_present",
"=",
"False",
")",
":",
"C",
"=",
"probas",
".",
"size",
"(",
"1",
")",
"losses",
"=",
"[",
"]",
"for",
"c",
"in",
"range",
"(",
"C",
")",
":",
"fg",
"=",
"(",
"labe... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | flatten_probas | Flattens predictions in the batch | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def flatten_probas(probas, labels, ignore=None):
"""
Flattens predictions in the batch
"""
B, C, H, W = probas.size()
probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C
labels = labels.view(-1)
if ignore is None:
return probas, labels
valid = (labels != ignore)
vprobas = probas[valid.nonzero().squeeze()]
vlabels = labels[valid]
return vprobas, vlabels | def flatten_probas(probas, labels, ignore=None):
"""
Flattens predictions in the batch
"""
B, C, H, W = probas.size()
probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C
labels = labels.view(-1)
if ignore is None:
return probas, labels
valid = (labels != ignore)
vprobas = probas[valid.nonzero().squeeze()]
vlabels = labels[valid]
return vprobas, vlabels | [
"Flattens",
"predictions",
"in",
"the",
"batch"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L211-L223 | [
"def",
"flatten_probas",
"(",
"probas",
",",
"labels",
",",
"ignore",
"=",
"None",
")",
":",
"B",
",",
"C",
",",
"H",
",",
"W",
"=",
"probas",
".",
"size",
"(",
")",
"probas",
"=",
"probas",
".",
"permute",
"(",
"0",
",",
"2",
",",
"3",
",",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | xloss | Cross entropy loss | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def xloss(logits, labels, ignore=None):
"""
Cross entropy loss
"""
return F.cross_entropy(logits, Variable(labels), ignore_index=255) | def xloss(logits, labels, ignore=None):
"""
Cross entropy loss
"""
return F.cross_entropy(logits, Variable(labels), ignore_index=255) | [
"Cross",
"entropy",
"loss"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L225-L229 | [
"def",
"xloss",
"(",
"logits",
",",
"labels",
",",
"ignore",
"=",
"None",
")",
":",
"return",
"F",
".",
"cross_entropy",
"(",
"logits",
",",
"Variable",
"(",
"labels",
")",
",",
"ignore_index",
"=",
"255",
")"
] | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | mean | nanmean compatible with generators. | examples/trials/kaggle-tgs-salt/lovasz_losses.py | def mean(l, ignore_nan=False, empty=0):
"""
nanmean compatible with generators.
"""
l = iter(l)
if ignore_nan:
l = ifilterfalse(np.isnan, l)
try:
n = 1
acc = next(l)
except StopIteration:
if empty == 'raise':
raise ValueError('Empty mean')
return empty
for n, v in enumerate(l, 2):
acc += v
if n == 1:
return acc
return acc / n | def mean(l, ignore_nan=False, empty=0):
"""
nanmean compatible with generators.
"""
l = iter(l)
if ignore_nan:
l = ifilterfalse(np.isnan, l)
try:
n = 1
acc = next(l)
except StopIteration:
if empty == 'raise':
raise ValueError('Empty mean')
return empty
for n, v in enumerate(l, 2):
acc += v
if n == 1:
return acc
return acc / n | [
"nanmean",
"compatible",
"with",
"generators",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L234-L252 | [
"def",
"mean",
"(",
"l",
",",
"ignore_nan",
"=",
"False",
",",
"empty",
"=",
"0",
")",
":",
"l",
"=",
"iter",
"(",
"l",
")",
"if",
"ignore_nan",
":",
"l",
"=",
"ifilterfalse",
"(",
"np",
".",
"isnan",
",",
"l",
")",
"try",
":",
"n",
"=",
"1",... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | main_loop | main loop logic for trial keeper | tools/nni_trial_tool/trial_keeper.py | def main_loop(args):
'''main loop logic for trial keeper'''
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
stdout_file = open(STDOUT_FULL_PATH, 'a+')
stderr_file = open(STDERR_FULL_PATH, 'a+')
trial_keeper_syslogger = RemoteLogger(args.nnimanager_ip, args.nnimanager_port, 'trial_keeper', StdOutputType.Stdout, args.log_collection)
# redirect trial keeper's stdout and stderr to syslog
trial_syslogger_stdout = RemoteLogger(args.nnimanager_ip, args.nnimanager_port, 'trial', StdOutputType.Stdout, args.log_collection)
sys.stdout = sys.stderr = trial_keeper_syslogger
# backward compatibility
hdfs_host = None
hdfs_output_dir = None
if args.hdfs_host:
hdfs_host = args.hdfs_host
elif args.pai_hdfs_host:
hdfs_host = args.pai_hdfs_host
if args.hdfs_output_dir:
hdfs_output_dir = args.hdfs_output_dir
elif args.pai_hdfs_output_dir:
hdfs_output_dir = args.pai_hdfs_output_dir
if hdfs_host is not None and args.nni_hdfs_exp_dir is not None:
try:
if args.webhdfs_path:
hdfs_client = HdfsClient(hosts='{0}:80'.format(hdfs_host), user_name=args.pai_user_name, webhdfs_path=args.webhdfs_path, timeout=5)
else:
# backward compatibility
hdfs_client = HdfsClient(hosts='{0}:{1}'.format(hdfs_host, '50070'), user_name=args.pai_user_name, timeout=5)
except Exception as e:
nni_log(LogType.Error, 'Create HDFS client error: ' + str(e))
raise e
copyHdfsDirectoryToLocal(args.nni_hdfs_exp_dir, os.getcwd(), hdfs_client)
# Notice: We don't appoint env, which means subprocess wil inherit current environment and that is expected behavior
log_pipe_stdout = trial_syslogger_stdout.get_pipelog_reader()
process = Popen(args.trial_command, shell = True, stdout = log_pipe_stdout, stderr = log_pipe_stdout)
nni_log(LogType.Info, 'Trial keeper spawns a subprocess (pid {0}) to run command: {1}'.format(process.pid, shlex.split(args.trial_command)))
while True:
retCode = process.poll()
# child worker process exits and all stdout data is read
if retCode is not None and log_pipe_stdout.set_process_exit() and log_pipe_stdout.is_read_completed == True:
nni_log(LogType.Info, 'subprocess terminated. Exit code is {}. Quit'.format(retCode))
if hdfs_output_dir is not None:
# Copy local directory to hdfs for OpenPAI
nni_local_output_dir = os.environ['NNI_OUTPUT_DIR']
try:
if copyDirectoryToHdfs(nni_local_output_dir, hdfs_output_dir, hdfs_client):
nni_log(LogType.Info, 'copy directory from {0} to {1} success!'.format(nni_local_output_dir, hdfs_output_dir))
else:
nni_log(LogType.Info, 'copy directory from {0} to {1} failed!'.format(nni_local_output_dir, hdfs_output_dir))
except Exception as e:
nni_log(LogType.Error, 'HDFS copy directory got exception: ' + str(e))
raise e
## Exit as the retCode of subprocess(trial)
exit(retCode)
break
time.sleep(2) | def main_loop(args):
'''main loop logic for trial keeper'''
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
stdout_file = open(STDOUT_FULL_PATH, 'a+')
stderr_file = open(STDERR_FULL_PATH, 'a+')
trial_keeper_syslogger = RemoteLogger(args.nnimanager_ip, args.nnimanager_port, 'trial_keeper', StdOutputType.Stdout, args.log_collection)
# redirect trial keeper's stdout and stderr to syslog
trial_syslogger_stdout = RemoteLogger(args.nnimanager_ip, args.nnimanager_port, 'trial', StdOutputType.Stdout, args.log_collection)
sys.stdout = sys.stderr = trial_keeper_syslogger
# backward compatibility
hdfs_host = None
hdfs_output_dir = None
if args.hdfs_host:
hdfs_host = args.hdfs_host
elif args.pai_hdfs_host:
hdfs_host = args.pai_hdfs_host
if args.hdfs_output_dir:
hdfs_output_dir = args.hdfs_output_dir
elif args.pai_hdfs_output_dir:
hdfs_output_dir = args.pai_hdfs_output_dir
if hdfs_host is not None and args.nni_hdfs_exp_dir is not None:
try:
if args.webhdfs_path:
hdfs_client = HdfsClient(hosts='{0}:80'.format(hdfs_host), user_name=args.pai_user_name, webhdfs_path=args.webhdfs_path, timeout=5)
else:
# backward compatibility
hdfs_client = HdfsClient(hosts='{0}:{1}'.format(hdfs_host, '50070'), user_name=args.pai_user_name, timeout=5)
except Exception as e:
nni_log(LogType.Error, 'Create HDFS client error: ' + str(e))
raise e
copyHdfsDirectoryToLocal(args.nni_hdfs_exp_dir, os.getcwd(), hdfs_client)
# Notice: We don't appoint env, which means subprocess wil inherit current environment and that is expected behavior
log_pipe_stdout = trial_syslogger_stdout.get_pipelog_reader()
process = Popen(args.trial_command, shell = True, stdout = log_pipe_stdout, stderr = log_pipe_stdout)
nni_log(LogType.Info, 'Trial keeper spawns a subprocess (pid {0}) to run command: {1}'.format(process.pid, shlex.split(args.trial_command)))
while True:
retCode = process.poll()
# child worker process exits and all stdout data is read
if retCode is not None and log_pipe_stdout.set_process_exit() and log_pipe_stdout.is_read_completed == True:
nni_log(LogType.Info, 'subprocess terminated. Exit code is {}. Quit'.format(retCode))
if hdfs_output_dir is not None:
# Copy local directory to hdfs for OpenPAI
nni_local_output_dir = os.environ['NNI_OUTPUT_DIR']
try:
if copyDirectoryToHdfs(nni_local_output_dir, hdfs_output_dir, hdfs_client):
nni_log(LogType.Info, 'copy directory from {0} to {1} success!'.format(nni_local_output_dir, hdfs_output_dir))
else:
nni_log(LogType.Info, 'copy directory from {0} to {1} failed!'.format(nni_local_output_dir, hdfs_output_dir))
except Exception as e:
nni_log(LogType.Error, 'HDFS copy directory got exception: ' + str(e))
raise e
## Exit as the retCode of subprocess(trial)
exit(retCode)
break
time.sleep(2) | [
"main",
"loop",
"logic",
"for",
"trial",
"keeper"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/tools/nni_trial_tool/trial_keeper.py#L43-L105 | [
"def",
"main_loop",
"(",
"args",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"LOG_DIR",
")",
":",
"os",
".",
"makedirs",
"(",
"LOG_DIR",
")",
"stdout_file",
"=",
"open",
"(",
"STDOUT_FULL_PATH",
",",
"'a+'",
")",
"stderr_file",
"=",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | ShuffleBlock.forward | Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,w] -> [N,C,H,W] | examples/trials/cifar10_pytorch/models/shufflenet.py | def forward(self, x):
'''Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,w] -> [N,C,H,W]'''
N,C,H,W = x.size()
g = self.groups
return x.view(N,g,C/g,H,W).permute(0,2,1,3,4).contiguous().view(N,C,H,W) | def forward(self, x):
'''Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,w] -> [N,C,H,W]'''
N,C,H,W = x.size()
g = self.groups
return x.view(N,g,C/g,H,W).permute(0,2,1,3,4).contiguous().view(N,C,H,W) | [
"Channel",
"shuffle",
":",
"[",
"N",
"C",
"H",
"W",
"]",
"-",
">",
"[",
"N",
"g",
"C",
"/",
"g",
"H",
"W",
"]",
"-",
">",
"[",
"N",
"C",
"/",
"g",
"g",
"H",
"w",
"]",
"-",
">",
"[",
"N",
"C",
"H",
"W",
"]"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/cifar10_pytorch/models/shufflenet.py#L15-L19 | [
"def",
"forward",
"(",
"self",
",",
"x",
")",
":",
"N",
",",
"C",
",",
"H",
",",
"W",
"=",
"x",
".",
"size",
"(",
")",
"g",
"=",
"self",
".",
"groups",
"return",
"x",
".",
"view",
"(",
"N",
",",
"g",
",",
"C",
"/",
"g",
",",
"H",
",",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | load_embedding | return embedding for a specific file by given file path. | examples/trials/ga_squad/trial.py | def load_embedding(path):
'''
return embedding for a specific file by given file path.
'''
EMBEDDING_DIM = 300
embedding_dict = {}
with open(path, 'r', encoding='utf-8') as file:
pairs = [line.strip('\r\n').split() for line in file.readlines()]
for pair in pairs:
if len(pair) == EMBEDDING_DIM + 1:
embedding_dict[pair[0]] = [float(x) for x in pair[1:]]
logger.debug('embedding_dict size: %d', len(embedding_dict))
return embedding_dict | def load_embedding(path):
'''
return embedding for a specific file by given file path.
'''
EMBEDDING_DIM = 300
embedding_dict = {}
with open(path, 'r', encoding='utf-8') as file:
pairs = [line.strip('\r\n').split() for line in file.readlines()]
for pair in pairs:
if len(pair) == EMBEDDING_DIM + 1:
embedding_dict[pair[0]] = [float(x) for x in pair[1:]]
logger.debug('embedding_dict size: %d', len(embedding_dict))
return embedding_dict | [
"return",
"embedding",
"for",
"a",
"specific",
"file",
"by",
"given",
"file",
"path",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/trial.py#L87-L99 | [
"def",
"load_embedding",
"(",
"path",
")",
":",
"EMBEDDING_DIM",
"=",
"300",
"embedding_dict",
"=",
"{",
"}",
"with",
"open",
"(",
"path",
",",
"'r'",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"file",
":",
"pairs",
"=",
"[",
"line",
".",
"strip",
"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | generate_predict_json | Generate json by prediction. | examples/trials/ga_squad/trial.py | def generate_predict_json(position1_result, position2_result, ids, passage_tokens):
'''
Generate json by prediction.
'''
predict_len = len(position1_result)
logger.debug('total prediction num is %s', str(predict_len))
answers = {}
for i in range(predict_len):
sample_id = ids[i]
passage, tokens = passage_tokens[i]
kbest = find_best_answer_span(
position1_result[i], position2_result[i], len(tokens), 23)
_, start, end = kbest[0]
answer = passage[tokens[start]['char_begin']:tokens[end]['char_end']]
answers[sample_id] = answer
logger.debug('generate predict done.')
return answers | def generate_predict_json(position1_result, position2_result, ids, passage_tokens):
'''
Generate json by prediction.
'''
predict_len = len(position1_result)
logger.debug('total prediction num is %s', str(predict_len))
answers = {}
for i in range(predict_len):
sample_id = ids[i]
passage, tokens = passage_tokens[i]
kbest = find_best_answer_span(
position1_result[i], position2_result[i], len(tokens), 23)
_, start, end = kbest[0]
answer = passage[tokens[start]['char_begin']:tokens[end]['char_end']]
answers[sample_id] = answer
logger.debug('generate predict done.')
return answers | [
"Generate",
"json",
"by",
"prediction",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/trial.py#L252-L269 | [
"def",
"generate_predict_json",
"(",
"position1_result",
",",
"position2_result",
",",
"ids",
",",
"passage_tokens",
")",
":",
"predict_len",
"=",
"len",
"(",
"position1_result",
")",
"logger",
".",
"debug",
"(",
"'total prediction num is %s'",
",",
"str",
"(",
"p... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | generate_data | Generate data | examples/trials/ga_squad/trial.py | def generate_data(path, tokenizer, char_vcb, word_vcb, is_training=False):
'''
Generate data
'''
global root_path
qp_pairs = data.load_from_file(path=path, is_training=is_training)
tokenized_sent = 0
# qp_pairs = qp_pairs[:1000]1
for qp_pair in qp_pairs:
tokenized_sent += 1
data.tokenize(qp_pair, tokenizer, is_training)
for word in qp_pair['question_tokens']:
word_vcb.add(word['word'])
for char in word['word']:
char_vcb.add(char)
for word in qp_pair['passage_tokens']:
word_vcb.add(word['word'])
for char in word['word']:
char_vcb.add(char)
max_query_length = max(len(x['question_tokens']) for x in qp_pairs)
max_passage_length = max(len(x['passage_tokens']) for x in qp_pairs)
#min_passage_length = min(len(x['passage_tokens']) for x in qp_pairs)
cfg.max_query_length = max_query_length
cfg.max_passage_length = max_passage_length
return qp_pairs | def generate_data(path, tokenizer, char_vcb, word_vcb, is_training=False):
'''
Generate data
'''
global root_path
qp_pairs = data.load_from_file(path=path, is_training=is_training)
tokenized_sent = 0
# qp_pairs = qp_pairs[:1000]1
for qp_pair in qp_pairs:
tokenized_sent += 1
data.tokenize(qp_pair, tokenizer, is_training)
for word in qp_pair['question_tokens']:
word_vcb.add(word['word'])
for char in word['word']:
char_vcb.add(char)
for word in qp_pair['passage_tokens']:
word_vcb.add(word['word'])
for char in word['word']:
char_vcb.add(char)
max_query_length = max(len(x['question_tokens']) for x in qp_pairs)
max_passage_length = max(len(x['passage_tokens']) for x in qp_pairs)
#min_passage_length = min(len(x['passage_tokens']) for x in qp_pairs)
cfg.max_query_length = max_query_length
cfg.max_passage_length = max_passage_length
return qp_pairs | [
"Generate",
"data"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/trial.py#L272-L299 | [
"def",
"generate_data",
"(",
"path",
",",
"tokenizer",
",",
"char_vcb",
",",
"word_vcb",
",",
"is_training",
"=",
"False",
")",
":",
"global",
"root_path",
"qp_pairs",
"=",
"data",
".",
"load_from_file",
"(",
"path",
"=",
"path",
",",
"is_training",
"=",
"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | f1_score | Calculate the f1 score. | examples/trials/ga_squad/evaluate.py | def f1_score(prediction, ground_truth):
'''
Calculate the f1 score.
'''
prediction_tokens = normalize_answer(prediction).split()
ground_truth_tokens = normalize_answer(ground_truth).split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1_result = (2 * precision * recall) / (precision + recall)
return f1_result | def f1_score(prediction, ground_truth):
'''
Calculate the f1 score.
'''
prediction_tokens = normalize_answer(prediction).split()
ground_truth_tokens = normalize_answer(ground_truth).split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1_result = (2 * precision * recall) / (precision + recall)
return f1_result | [
"Calculate",
"the",
"f1",
"score",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/evaluate.py#L63-L76 | [
"def",
"f1_score",
"(",
"prediction",
",",
"ground_truth",
")",
":",
"prediction_tokens",
"=",
"normalize_answer",
"(",
"prediction",
")",
".",
"split",
"(",
")",
"ground_truth_tokens",
"=",
"normalize_answer",
"(",
"ground_truth",
")",
".",
"split",
"(",
")",
... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | _evaluate | Evaluate function. | examples/trials/ga_squad/evaluate.py | def _evaluate(dataset, predictions):
'''
Evaluate function.
'''
f1_result = exact_match = total = 0
count = 0
for article in dataset:
for paragraph in article['paragraphs']:
for qa_pair in paragraph['qas']:
total += 1
if qa_pair['id'] not in predictions:
count += 1
continue
ground_truths = list(map(lambda x: x['text'], qa_pair['answers']))
prediction = predictions[qa_pair['id']]
exact_match += metric_max_over_ground_truths(
exact_match_score, prediction, ground_truths)
f1_result += metric_max_over_ground_truths(
f1_score, prediction, ground_truths)
print('total', total, 'exact_match', exact_match, 'unanswer_question ', count)
exact_match = 100.0 * exact_match / total
f1_result = 100.0 * f1_result / total
return {'exact_match': exact_match, 'f1': f1_result} | def _evaluate(dataset, predictions):
'''
Evaluate function.
'''
f1_result = exact_match = total = 0
count = 0
for article in dataset:
for paragraph in article['paragraphs']:
for qa_pair in paragraph['qas']:
total += 1
if qa_pair['id'] not in predictions:
count += 1
continue
ground_truths = list(map(lambda x: x['text'], qa_pair['answers']))
prediction = predictions[qa_pair['id']]
exact_match += metric_max_over_ground_truths(
exact_match_score, prediction, ground_truths)
f1_result += metric_max_over_ground_truths(
f1_score, prediction, ground_truths)
print('total', total, 'exact_match', exact_match, 'unanswer_question ', count)
exact_match = 100.0 * exact_match / total
f1_result = 100.0 * f1_result / total
return {'exact_match': exact_match, 'f1': f1_result} | [
"Evaluate",
"function",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/evaluate.py#L94-L116 | [
"def",
"_evaluate",
"(",
"dataset",
",",
"predictions",
")",
":",
"f1_result",
"=",
"exact_match",
"=",
"total",
"=",
"0",
"count",
"=",
"0",
"for",
"article",
"in",
"dataset",
":",
"for",
"paragraph",
"in",
"article",
"[",
"'paragraphs'",
"]",
":",
"for... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | evaluate | Evaluate. | examples/trials/ga_squad/evaluate.py | def evaluate(data_file, pred_file):
'''
Evaluate.
'''
expected_version = '1.1'
with open(data_file) as dataset_file:
dataset_json = json.load(dataset_file)
if dataset_json['version'] != expected_version:
print('Evaluation expects v-' + expected_version +
', but got dataset with v-' + dataset_json['version'],
file=sys.stderr)
dataset = dataset_json['data']
with open(pred_file) as prediction_file:
predictions = json.load(prediction_file)
# print(json.dumps(evaluate(dataset, predictions)))
result = _evaluate(dataset, predictions)
# print('em:', result['exact_match'], 'f1:', result['f1'])
return result['exact_match'] | def evaluate(data_file, pred_file):
'''
Evaluate.
'''
expected_version = '1.1'
with open(data_file) as dataset_file:
dataset_json = json.load(dataset_file)
if dataset_json['version'] != expected_version:
print('Evaluation expects v-' + expected_version +
', but got dataset with v-' + dataset_json['version'],
file=sys.stderr)
dataset = dataset_json['data']
with open(pred_file) as prediction_file:
predictions = json.load(prediction_file)
# print(json.dumps(evaluate(dataset, predictions)))
result = _evaluate(dataset, predictions)
# print('em:', result['exact_match'], 'f1:', result['f1'])
return result['exact_match'] | [
"Evaluate",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/evaluate.py#L118-L135 | [
"def",
"evaluate",
"(",
"data_file",
",",
"pred_file",
")",
":",
"expected_version",
"=",
"'1.1'",
"with",
"open",
"(",
"data_file",
")",
"as",
"dataset_file",
":",
"dataset_json",
"=",
"json",
".",
"load",
"(",
"dataset_file",
")",
"if",
"dataset_json",
"["... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | evaluate_with_predictions | Evalutate with predictions/ | examples/trials/ga_squad/evaluate.py | def evaluate_with_predictions(data_file, predictions):
'''
Evalutate with predictions/
'''
expected_version = '1.1'
with open(data_file) as dataset_file:
dataset_json = json.load(dataset_file)
if dataset_json['version'] != expected_version:
print('Evaluation expects v-' + expected_version +
', but got dataset with v-' + dataset_json['version'],
file=sys.stderr)
dataset = dataset_json['data']
result = _evaluate(dataset, predictions)
return result['exact_match'] | def evaluate_with_predictions(data_file, predictions):
'''
Evalutate with predictions/
'''
expected_version = '1.1'
with open(data_file) as dataset_file:
dataset_json = json.load(dataset_file)
if dataset_json['version'] != expected_version:
print('Evaluation expects v-' + expected_version +
', but got dataset with v-' + dataset_json['version'],
file=sys.stderr)
dataset = dataset_json['data']
result = _evaluate(dataset, predictions)
return result['exact_match'] | [
"Evalutate",
"with",
"predictions",
"/"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/evaluate.py#L137-L150 | [
"def",
"evaluate_with_predictions",
"(",
"data_file",
",",
"predictions",
")",
":",
"expected_version",
"=",
"'1.1'",
"with",
"open",
"(",
"data_file",
")",
"as",
"dataset_file",
":",
"dataset_json",
"=",
"json",
".",
"load",
"(",
"dataset_file",
")",
"if",
"d... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | send | Send command to Training Service.
command: CommandType object.
data: string payload. | src/sdk/pynni/nni/protocol.py | def send(command, data):
"""Send command to Training Service.
command: CommandType object.
data: string payload.
"""
global _lock
try:
_lock.acquire()
data = data.encode('utf8')
assert len(data) < 1000000, 'Command too long'
msg = b'%b%06d%b' % (command.value, len(data), data)
logging.getLogger(__name__).debug('Sending command, data: [%s]' % msg)
_out_file.write(msg)
_out_file.flush()
finally:
_lock.release() | def send(command, data):
"""Send command to Training Service.
command: CommandType object.
data: string payload.
"""
global _lock
try:
_lock.acquire()
data = data.encode('utf8')
assert len(data) < 1000000, 'Command too long'
msg = b'%b%06d%b' % (command.value, len(data), data)
logging.getLogger(__name__).debug('Sending command, data: [%s]' % msg)
_out_file.write(msg)
_out_file.flush()
finally:
_lock.release() | [
"Send",
"command",
"to",
"Training",
"Service",
".",
"command",
":",
"CommandType",
"object",
".",
"data",
":",
"string",
"payload",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/protocol.py#L56-L71 | [
"def",
"send",
"(",
"command",
",",
"data",
")",
":",
"global",
"_lock",
"try",
":",
"_lock",
".",
"acquire",
"(",
")",
"data",
"=",
"data",
".",
"encode",
"(",
"'utf8'",
")",
"assert",
"len",
"(",
"data",
")",
"<",
"1000000",
",",
"'Command too long... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | receive | Receive a command from Training Service.
Returns a tuple of command (CommandType) and payload (str) | src/sdk/pynni/nni/protocol.py | def receive():
"""Receive a command from Training Service.
Returns a tuple of command (CommandType) and payload (str)
"""
header = _in_file.read(8)
logging.getLogger(__name__).debug('Received command, header: [%s]' % header)
if header is None or len(header) < 8:
# Pipe EOF encountered
logging.getLogger(__name__).debug('Pipe EOF encountered')
return None, None
length = int(header[2:])
data = _in_file.read(length)
command = CommandType(header[:2])
data = data.decode('utf8')
logging.getLogger(__name__).debug('Received command, data: [%s]' % data)
return command, data | def receive():
"""Receive a command from Training Service.
Returns a tuple of command (CommandType) and payload (str)
"""
header = _in_file.read(8)
logging.getLogger(__name__).debug('Received command, header: [%s]' % header)
if header is None or len(header) < 8:
# Pipe EOF encountered
logging.getLogger(__name__).debug('Pipe EOF encountered')
return None, None
length = int(header[2:])
data = _in_file.read(length)
command = CommandType(header[:2])
data = data.decode('utf8')
logging.getLogger(__name__).debug('Received command, data: [%s]' % data)
return command, data | [
"Receive",
"a",
"command",
"from",
"Training",
"Service",
".",
"Returns",
"a",
"tuple",
"of",
"command",
"(",
"CommandType",
")",
"and",
"payload",
"(",
"str",
")"
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/protocol.py#L74-L89 | [
"def",
"receive",
"(",
")",
":",
"header",
"=",
"_in_file",
".",
"read",
"(",
"8",
")",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
".",
"debug",
"(",
"'Received command, header: [%s]'",
"%",
"header",
")",
"if",
"header",
"is",
"None",
"or",
"len... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | json2space | Change json to search space in hyperopt.
Parameters
----------
in_x : dict/list/str/int/float
The part of json.
name : str
name could be ROOT, TYPE, VALUE or INDEX. | src/sdk/pynni/nni/hyperopt_tuner/hyperopt_tuner.py | def json2space(in_x, name=ROOT):
"""
Change json to search space in hyperopt.
Parameters
----------
in_x : dict/list/str/int/float
The part of json.
name : str
name could be ROOT, TYPE, VALUE or INDEX.
"""
out_y = copy.deepcopy(in_x)
if isinstance(in_x, dict):
if TYPE in in_x.keys():
_type = in_x[TYPE]
name = name + '-' + _type
_value = json2space(in_x[VALUE], name=name)
if _type == 'choice':
out_y = eval('hp.hp.'+_type)(name, _value)
else:
if _type in ['loguniform', 'qloguniform']:
_value[:2] = np.log(_value[:2])
out_y = eval('hp.hp.' + _type)(name, *_value)
else:
out_y = dict()
for key in in_x.keys():
out_y[key] = json2space(in_x[key], name+'[%s]' % str(key))
elif isinstance(in_x, list):
out_y = list()
for i, x_i in enumerate(in_x):
out_y.append(json2space(x_i, name+'[%d]' % i))
else:
logger.info('in_x is not a dict or a list in json2space fuinction %s', str(in_x))
return out_y | def json2space(in_x, name=ROOT):
"""
Change json to search space in hyperopt.
Parameters
----------
in_x : dict/list/str/int/float
The part of json.
name : str
name could be ROOT, TYPE, VALUE or INDEX.
"""
out_y = copy.deepcopy(in_x)
if isinstance(in_x, dict):
if TYPE in in_x.keys():
_type = in_x[TYPE]
name = name + '-' + _type
_value = json2space(in_x[VALUE], name=name)
if _type == 'choice':
out_y = eval('hp.hp.'+_type)(name, _value)
else:
if _type in ['loguniform', 'qloguniform']:
_value[:2] = np.log(_value[:2])
out_y = eval('hp.hp.' + _type)(name, *_value)
else:
out_y = dict()
for key in in_x.keys():
out_y[key] = json2space(in_x[key], name+'[%s]' % str(key))
elif isinstance(in_x, list):
out_y = list()
for i, x_i in enumerate(in_x):
out_y.append(json2space(x_i, name+'[%d]' % i))
else:
logger.info('in_x is not a dict or a list in json2space fuinction %s', str(in_x))
return out_y | [
"Change",
"json",
"to",
"search",
"space",
"in",
"hyperopt",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/hyperopt_tuner/hyperopt_tuner.py#L52-L85 | [
"def",
"json2space",
"(",
"in_x",
",",
"name",
"=",
"ROOT",
")",
":",
"out_y",
"=",
"copy",
".",
"deepcopy",
"(",
"in_x",
")",
"if",
"isinstance",
"(",
"in_x",
",",
"dict",
")",
":",
"if",
"TYPE",
"in",
"in_x",
".",
"keys",
"(",
")",
":",
"_type"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | json2parameter | Change json to parameters. | src/sdk/pynni/nni/hyperopt_tuner/hyperopt_tuner.py | def json2parameter(in_x, parameter, name=ROOT):
"""
Change json to parameters.
"""
out_y = copy.deepcopy(in_x)
if isinstance(in_x, dict):
if TYPE in in_x.keys():
_type = in_x[TYPE]
name = name + '-' + _type
if _type == 'choice':
_index = parameter[name]
out_y = {
INDEX: _index,
VALUE: json2parameter(in_x[VALUE][_index], parameter, name=name+'[%d]' % _index)
}
else:
out_y = parameter[name]
else:
out_y = dict()
for key in in_x.keys():
out_y[key] = json2parameter(
in_x[key], parameter, name + '[%s]' % str(key))
elif isinstance(in_x, list):
out_y = list()
for i, x_i in enumerate(in_x):
out_y.append(json2parameter(x_i, parameter, name + '[%d]' % i))
else:
logger.info('in_x is not a dict or a list in json2space fuinction %s', str(in_x))
return out_y | def json2parameter(in_x, parameter, name=ROOT):
"""
Change json to parameters.
"""
out_y = copy.deepcopy(in_x)
if isinstance(in_x, dict):
if TYPE in in_x.keys():
_type = in_x[TYPE]
name = name + '-' + _type
if _type == 'choice':
_index = parameter[name]
out_y = {
INDEX: _index,
VALUE: json2parameter(in_x[VALUE][_index], parameter, name=name+'[%d]' % _index)
}
else:
out_y = parameter[name]
else:
out_y = dict()
for key in in_x.keys():
out_y[key] = json2parameter(
in_x[key], parameter, name + '[%s]' % str(key))
elif isinstance(in_x, list):
out_y = list()
for i, x_i in enumerate(in_x):
out_y.append(json2parameter(x_i, parameter, name + '[%d]' % i))
else:
logger.info('in_x is not a dict or a list in json2space fuinction %s', str(in_x))
return out_y | [
"Change",
"json",
"to",
"parameters",
"."
] | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/hyperopt_tuner/hyperopt_tuner.py#L88-L116 | [
"def",
"json2parameter",
"(",
"in_x",
",",
"parameter",
",",
"name",
"=",
"ROOT",
")",
":",
"out_y",
"=",
"copy",
".",
"deepcopy",
"(",
"in_x",
")",
"if",
"isinstance",
"(",
"in_x",
",",
"dict",
")",
":",
"if",
"TYPE",
"in",
"in_x",
".",
"keys",
"(... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
train | _add_index | change parameters in NNI format to parameters in hyperopt format(This function also support nested dict.).
For example, receive parameters like:
{'dropout_rate': 0.8, 'conv_size': 3, 'hidden_size': 512}
Will change to format in hyperopt, like:
{'dropout_rate': 0.8, 'conv_size': {'_index': 1, '_value': 3}, 'hidden_size': {'_index': 1, '_value': 512}} | src/sdk/pynni/nni/hyperopt_tuner/hyperopt_tuner.py | def _add_index(in_x, parameter):
"""
change parameters in NNI format to parameters in hyperopt format(This function also support nested dict.).
For example, receive parameters like:
{'dropout_rate': 0.8, 'conv_size': 3, 'hidden_size': 512}
Will change to format in hyperopt, like:
{'dropout_rate': 0.8, 'conv_size': {'_index': 1, '_value': 3}, 'hidden_size': {'_index': 1, '_value': 512}}
"""
if TYPE not in in_x: # if at the top level
out_y = dict()
for key, value in parameter.items():
out_y[key] = _add_index(in_x[key], value)
return out_y
elif isinstance(in_x, dict):
value_type = in_x[TYPE]
value_format = in_x[VALUE]
if value_type == "choice":
choice_name = parameter[0] if isinstance(parameter, list) else parameter
for pos, item in enumerate(value_format): # here value_format is a list
if isinstance(item, list): # this format is ["choice_key", format_dict]
choice_key = item[0]
choice_value_format = item[1]
if choice_key == choice_name:
return {INDEX: pos, VALUE: [choice_name, _add_index(choice_value_format, parameter[1])]}
elif choice_name == item:
return {INDEX: pos, VALUE: item}
else:
return parameter | def _add_index(in_x, parameter):
"""
change parameters in NNI format to parameters in hyperopt format(This function also support nested dict.).
For example, receive parameters like:
{'dropout_rate': 0.8, 'conv_size': 3, 'hidden_size': 512}
Will change to format in hyperopt, like:
{'dropout_rate': 0.8, 'conv_size': {'_index': 1, '_value': 3}, 'hidden_size': {'_index': 1, '_value': 512}}
"""
if TYPE not in in_x: # if at the top level
out_y = dict()
for key, value in parameter.items():
out_y[key] = _add_index(in_x[key], value)
return out_y
elif isinstance(in_x, dict):
value_type = in_x[TYPE]
value_format = in_x[VALUE]
if value_type == "choice":
choice_name = parameter[0] if isinstance(parameter, list) else parameter
for pos, item in enumerate(value_format): # here value_format is a list
if isinstance(item, list): # this format is ["choice_key", format_dict]
choice_key = item[0]
choice_value_format = item[1]
if choice_key == choice_name:
return {INDEX: pos, VALUE: [choice_name, _add_index(choice_value_format, parameter[1])]}
elif choice_name == item:
return {INDEX: pos, VALUE: item}
else:
return parameter | [
"change",
"parameters",
"in",
"NNI",
"format",
"to",
"parameters",
"in",
"hyperopt",
"format",
"(",
"This",
"function",
"also",
"support",
"nested",
"dict",
".",
")",
".",
"For",
"example",
"receive",
"parameters",
"like",
":",
"{",
"dropout_rate",
":",
"0",... | Microsoft/nni | python | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/hyperopt_tuner/hyperopt_tuner.py#L142-L169 | [
"def",
"_add_index",
"(",
"in_x",
",",
"parameter",
")",
":",
"if",
"TYPE",
"not",
"in",
"in_x",
":",
"# if at the top level",
"out_y",
"=",
"dict",
"(",
")",
"for",
"key",
",",
"value",
"in",
"parameter",
".",
"items",
"(",
")",
":",
"out_y",
"[",
"... | c7cc8db32da8d2ec77a382a55089f4e17247ce41 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.