hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b592d59715e6a8a565150fd2e350af229c86b8f9
| 636,289
|
py
|
Python
|
playwright/async_api/_generated.py
|
elkd/playwright-python
|
417a5b04628c769446665010f9232ec0bfbc7581
|
[
"Apache-2.0"
] | 2
|
2022-02-18T02:16:31.000Z
|
2022-02-26T05:44:25.000Z
|
playwright/async_api/_generated.py
|
jiyulongxu/playwright-python
|
417a5b04628c769446665010f9232ec0bfbc7581
|
[
"Apache-2.0"
] | null | null | null |
playwright/async_api/_generated.py
|
jiyulongxu/playwright-python
|
417a5b04628c769446665010f9232ec0bfbc7581
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
import sys
import typing
if sys.version_info >= (3, 8): # pragma: no cover
from typing import Literal
else: # pragma: no cover
from typing_extensions import Literal
from playwright._impl._accessibility import Accessibility as AccessibilityImpl
from playwright._impl._api_structures import (
Cookie,
FilePayload,
FloatRect,
Geolocation,
HttpCredentials,
NameValue,
PdfMargins,
Position,
ProxySettings,
RemoteAddr,
RequestSizes,
ResourceTiming,
SecurityDetails,
SetCookieParam,
SourceLocation,
StorageState,
ViewportSize,
)
from playwright._impl._api_types import Error
from playwright._impl._assertions import (
APIResponseAssertions as APIResponseAssertionsImpl,
)
from playwright._impl._assertions import LocatorAssertions as LocatorAssertionsImpl
from playwright._impl._assertions import PageAssertions as PageAssertionsImpl
from playwright._impl._async_base import (
AsyncBase,
AsyncContextManager,
AsyncEventContextManager,
mapping,
)
from playwright._impl._browser import Browser as BrowserImpl
from playwright._impl._browser_context import BrowserContext as BrowserContextImpl
from playwright._impl._browser_type import BrowserType as BrowserTypeImpl
from playwright._impl._cdp_session import CDPSession as CDPSessionImpl
from playwright._impl._console_message import ConsoleMessage as ConsoleMessageImpl
from playwright._impl._dialog import Dialog as DialogImpl
from playwright._impl._download import Download as DownloadImpl
from playwright._impl._element_handle import ElementHandle as ElementHandleImpl
from playwright._impl._fetch import APIRequest as APIRequestImpl
from playwright._impl._fetch import APIRequestContext as APIRequestContextImpl
from playwright._impl._fetch import APIResponse as APIResponseImpl
from playwright._impl._file_chooser import FileChooser as FileChooserImpl
from playwright._impl._frame import Frame as FrameImpl
from playwright._impl._input import Keyboard as KeyboardImpl
from playwright._impl._input import Mouse as MouseImpl
from playwright._impl._input import Touchscreen as TouchscreenImpl
from playwright._impl._js_handle import JSHandle as JSHandleImpl
from playwright._impl._locator import FrameLocator as FrameLocatorImpl
from playwright._impl._locator import Locator as LocatorImpl
from playwright._impl._network import Request as RequestImpl
from playwright._impl._network import Response as ResponseImpl
from playwright._impl._network import Route as RouteImpl
from playwright._impl._network import WebSocket as WebSocketImpl
from playwright._impl._page import Page as PageImpl
from playwright._impl._page import Worker as WorkerImpl
from playwright._impl._playwright import Playwright as PlaywrightImpl
from playwright._impl._selectors import Selectors as SelectorsImpl
from playwright._impl._tracing import Tracing as TracingImpl
from playwright._impl._video import Video as VideoImpl
NoneType = type(None)
class Request(AsyncBase):
@property
def url(self) -> str:
"""Request.url
URL of the request.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.url)
@property
def resource_type(self) -> str:
"""Request.resource_type
Contains the request's resource type as it was perceived by the rendering engine. ResourceType will be one of the
following: `document`, `stylesheet`, `image`, `media`, `font`, `script`, `texttrack`, `xhr`, `fetch`, `eventsource`,
`websocket`, `manifest`, `other`.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.resource_type)
@property
def method(self) -> str:
"""Request.method
Request's method (GET, POST, etc.)
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.method)
@property
def post_data(self) -> typing.Optional[str]:
"""Request.post_data
Request's post body, if any.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(self._impl_obj.post_data)
@property
def post_data_json(self) -> typing.Optional[typing.Any]:
"""Request.post_data_json
Returns parsed request's body for `form-urlencoded` and JSON as a fallback if any.
When the response is `application/x-www-form-urlencoded` then a key/value object of the values will be returned.
Otherwise it will be parsed as JSON.
Returns
-------
Union[Any, NoneType]
"""
return mapping.from_maybe_impl(self._impl_obj.post_data_json)
@property
def post_data_buffer(self) -> typing.Optional[bytes]:
"""Request.post_data_buffer
Request's post body in a binary form, if any.
Returns
-------
Union[bytes, NoneType]
"""
return mapping.from_maybe_impl(self._impl_obj.post_data_buffer)
@property
def frame(self) -> "Frame":
"""Request.frame
Returns the `Frame` that initiated this request.
Returns
-------
Frame
"""
return mapping.from_impl(self._impl_obj.frame)
@property
def redirected_from(self) -> typing.Optional["Request"]:
"""Request.redirected_from
Request that was redirected by the server to this one, if any.
When the server responds with a redirect, Playwright creates a new `Request` object. The two requests are connected by
`redirectedFrom()` and `redirectedTo()` methods. When multiple server redirects has happened, it is possible to
construct the whole redirect chain by repeatedly calling `redirectedFrom()`.
For example, if the website `http://example.com` redirects to `https://example.com`:
```py
response = await page.goto(\"http://example.com\")
print(response.request.redirected_from.url) # \"http://example.com\"
```
If the website `https://google.com` has no redirects:
```py
response = await page.goto(\"https://google.com\")
print(response.request.redirected_from) # None
```
Returns
-------
Union[Request, NoneType]
"""
return mapping.from_impl_nullable(self._impl_obj.redirected_from)
@property
def redirected_to(self) -> typing.Optional["Request"]:
"""Request.redirected_to
New request issued by the browser if the server responded with redirect.
This method is the opposite of `request.redirected_from()`:
```py
assert request.redirected_from.redirected_to == request
```
Returns
-------
Union[Request, NoneType]
"""
return mapping.from_impl_nullable(self._impl_obj.redirected_to)
@property
def failure(self) -> typing.Optional[str]:
"""Request.failure
The method returns `null` unless this request has failed, as reported by `requestfailed` event.
Example of logging of all the failed requests:
```py
page.on(\"requestfailed\", lambda request: print(request.url + \" \" + request.failure))
```
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(self._impl_obj.failure)
@property
def timing(self) -> ResourceTiming:
"""Request.timing
Returns resource timing information for given request. Most of the timing values become available upon the response,
`responseEnd` becomes available when request finishes. Find more information at
[Resource Timing API](https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming).
```py
async with page.expect_event(\"requestfinished\") as request_info:
await page.goto(\"http://example.com\")
request = await request_info.value
print(request.timing)
```
Returns
-------
{startTime: float, domainLookupStart: float, domainLookupEnd: float, connectStart: float, secureConnectionStart: float, connectEnd: float, requestStart: float, responseStart: float, responseEnd: float}
"""
return mapping.from_impl(self._impl_obj.timing)
@property
def headers(self) -> typing.Dict[str, str]:
"""Request.headers
**DEPRECATED** Incomplete list of headers as seen by the rendering engine. Use `request.all_headers()` instead.
Returns
-------
Dict[str, str]
"""
return mapping.from_maybe_impl(self._impl_obj.headers)
async def sizes(self) -> RequestSizes:
"""Request.sizes
Returns resource size information for given request.
Returns
-------
{requestBodySize: int, requestHeadersSize: int, responseBodySize: int, responseHeadersSize: int}
"""
return mapping.from_impl(
await self._async("request.sizes", self._impl_obj.sizes())
)
async def response(self) -> typing.Optional["Response"]:
"""Request.response
Returns the matching `Response` object, or `null` if the response was not received due to error.
Returns
-------
Union[Response, NoneType]
"""
return mapping.from_impl_nullable(
await self._async("request.response", self._impl_obj.response())
)
def is_navigation_request(self) -> bool:
"""Request.is_navigation_request
Whether this request is driving frame's navigation.
Returns
-------
bool
"""
return mapping.from_maybe_impl(self._impl_obj.is_navigation_request())
async def all_headers(self) -> typing.Dict[str, str]:
"""Request.all_headers
An object with all the request HTTP headers associated with this request. The header names are lower-cased.
Returns
-------
Dict[str, str]
"""
return mapping.from_maybe_impl(
await self._async("request.all_headers", self._impl_obj.all_headers())
)
async def headers_array(self) -> typing.List[NameValue]:
"""Request.headers_array
An array with all the request HTTP headers associated with this request. Unlike `request.all_headers()`, header
names are NOT lower-cased. Headers with multiple entries, such as `Set-Cookie`, appear in the array multiple times.
Returns
-------
List[{name: str, value: str}]
"""
return mapping.from_impl_list(
await self._async("request.headers_array", self._impl_obj.headers_array())
)
async def header_value(self, name: str) -> typing.Optional[str]:
"""Request.header_value
Returns the value of the header matching the name. The name is case insensitive.
Parameters
----------
name : str
Name of the header.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"request.header_value", self._impl_obj.header_value(name=name)
)
)
mapping.register(RequestImpl, Request)
class Response(AsyncBase):
@property
def url(self) -> str:
"""Response.url
Contains the URL of the response.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.url)
@property
def ok(self) -> bool:
"""Response.ok
Contains a boolean stating whether the response was successful (status in the range 200-299) or not.
Returns
-------
bool
"""
return mapping.from_maybe_impl(self._impl_obj.ok)
@property
def status(self) -> int:
"""Response.status
Contains the status code of the response (e.g., 200 for a success).
Returns
-------
int
"""
return mapping.from_maybe_impl(self._impl_obj.status)
@property
def status_text(self) -> str:
"""Response.status_text
Contains the status text of the response (e.g. usually an \"OK\" for a success).
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.status_text)
@property
def headers(self) -> typing.Dict[str, str]:
"""Response.headers
**DEPRECATED** Incomplete list of headers as seen by the rendering engine. Use `response.all_headers()` instead.
Returns
-------
Dict[str, str]
"""
return mapping.from_maybe_impl(self._impl_obj.headers)
@property
def request(self) -> "Request":
"""Response.request
Returns the matching `Request` object.
Returns
-------
Request
"""
return mapping.from_impl(self._impl_obj.request)
@property
def frame(self) -> "Frame":
"""Response.frame
Returns the `Frame` that initiated this response.
Returns
-------
Frame
"""
return mapping.from_impl(self._impl_obj.frame)
async def all_headers(self) -> typing.Dict[str, str]:
"""Response.all_headers
An object with all the response HTTP headers associated with this response.
Returns
-------
Dict[str, str]
"""
return mapping.from_maybe_impl(
await self._async("response.all_headers", self._impl_obj.all_headers())
)
async def headers_array(self) -> typing.List[NameValue]:
"""Response.headers_array
An array with all the request HTTP headers associated with this response. Unlike `response.all_headers()`, header
names are NOT lower-cased. Headers with multiple entries, such as `Set-Cookie`, appear in the array multiple times.
Returns
-------
List[{name: str, value: str}]
"""
return mapping.from_impl_list(
await self._async("response.headers_array", self._impl_obj.headers_array())
)
async def header_value(self, name: str) -> typing.Optional[str]:
"""Response.header_value
Returns the value of the header matching the name. The name is case insensitive. If multiple headers have the same name
(except `set-cookie`), they are returned as a list separated by `, `. For `set-cookie`, the `\\n` separator is used. If
no headers are found, `null` is returned.
Parameters
----------
name : str
Name of the header.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"response.header_value", self._impl_obj.header_value(name=name)
)
)
async def header_values(self, name: str) -> typing.List[str]:
"""Response.header_values
Returns all values of the headers matching the name, for example `set-cookie`. The name is case insensitive.
Parameters
----------
name : str
Name of the header.
Returns
-------
List[str]
"""
return mapping.from_maybe_impl(
await self._async(
"response.header_values", self._impl_obj.header_values(name=name)
)
)
async def server_addr(self) -> typing.Optional[RemoteAddr]:
"""Response.server_addr
Returns the IP address and port of the server.
Returns
-------
Union[{ipAddress: str, port: int}, NoneType]
"""
return mapping.from_impl_nullable(
await self._async("response.server_addr", self._impl_obj.server_addr())
)
async def security_details(self) -> typing.Optional[SecurityDetails]:
"""Response.security_details
Returns SSL and other security information.
Returns
-------
Union[{issuer: Union[str, NoneType], protocol: Union[str, NoneType], subjectName: Union[str, NoneType], validFrom: Union[float, NoneType], validTo: Union[float, NoneType]}, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"response.security_details", self._impl_obj.security_details()
)
)
async def finished(self) -> NoneType:
"""Response.finished
Waits for this response to finish, returns always `null`.
"""
return mapping.from_maybe_impl(
await self._async("response.finished", self._impl_obj.finished())
)
async def body(self) -> bytes:
"""Response.body
Returns the buffer with response body.
Returns
-------
bytes
"""
return mapping.from_maybe_impl(
await self._async("response.body", self._impl_obj.body())
)
async def text(self) -> str:
"""Response.text
Returns the text representation of response body.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async("response.text", self._impl_obj.text())
)
async def json(self) -> typing.Any:
"""Response.json
Returns the JSON representation of response body.
This method will throw if the response body is not parsable via `JSON.parse`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async("response.json", self._impl_obj.json())
)
mapping.register(ResponseImpl, Response)
class Route(AsyncBase):
@property
def request(self) -> "Request":
"""Route.request
A request to be routed.
Returns
-------
Request
"""
return mapping.from_impl(self._impl_obj.request)
async def abort(self, error_code: str = None) -> NoneType:
"""Route.abort
Aborts the route's request.
Parameters
----------
error_code : Union[str, NoneType]
Optional error code. Defaults to `failed`, could be one of the following:
- `'aborted'` - An operation was aborted (due to user action)
- `'accessdenied'` - Permission to access a resource, other than the network, was denied
- `'addressunreachable'` - The IP address is unreachable. This usually means that there is no route to the specified
host or network.
- `'blockedbyclient'` - The client chose to block the request.
- `'blockedbyresponse'` - The request failed because the response was delivered along with requirements which are not
met ('X-Frame-Options' and 'Content-Security-Policy' ancestor checks, for instance).
- `'connectionaborted'` - A connection timed out as a result of not receiving an ACK for data sent.
- `'connectionclosed'` - A connection was closed (corresponding to a TCP FIN).
- `'connectionfailed'` - A connection attempt failed.
- `'connectionrefused'` - A connection attempt was refused.
- `'connectionreset'` - A connection was reset (corresponding to a TCP RST).
- `'internetdisconnected'` - The Internet connection has been lost.
- `'namenotresolved'` - The host name could not be resolved.
- `'timedout'` - An operation timed out.
- `'failed'` - A generic failure occurred.
"""
return mapping.from_maybe_impl(
await self._async("route.abort", self._impl_obj.abort(errorCode=error_code))
)
async def fulfill(
self,
*,
status: int = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
body: typing.Union[str, bytes] = None,
path: typing.Union[str, pathlib.Path] = None,
content_type: str = None,
response: "APIResponse" = None
) -> NoneType:
"""Route.fulfill
Fulfills route's request with given response.
An example of fulfilling all requests with 404 responses:
```py
await page.route(\"**/*\", lambda route: route.fulfill(
status=404,
content_type=\"text/plain\",
body=\"not found!\"))
```
An example of serving static file:
```py
await page.route(\"**/xhr_endpoint\", lambda route: route.fulfill(path=\"mock_data.json\"))
```
Parameters
----------
status : Union[int, NoneType]
Response status code, defaults to `200`.
headers : Union[Dict[str, str], NoneType]
Response headers. Header values will be converted to a string.
body : Union[bytes, str, NoneType]
Response body.
path : Union[pathlib.Path, str, NoneType]
File path to respond with. The content type will be inferred from file extension. If `path` is a relative path, then it
is resolved relative to the current working directory.
content_type : Union[str, NoneType]
If set, equals to setting `Content-Type` response header.
response : Union[APIResponse, NoneType]
`APIResponse` to fulfill route's request with. Individual fields of the response (such as headers) can be overridden
using fulfill options.
"""
return mapping.from_maybe_impl(
await self._async(
"route.fulfill",
self._impl_obj.fulfill(
status=status,
headers=mapping.to_impl(headers),
body=body,
path=path,
contentType=content_type,
response=response._impl_obj if response else None,
),
)
)
async def continue_(
self,
*,
url: str = None,
method: str = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
post_data: typing.Union[str, bytes] = None
) -> NoneType:
"""Route.continue_
Continues route's request with optional overrides.
```py
async def handle(route, request):
# override headers
headers = {
**request.headers,
\"foo\": \"bar\" # set \"foo\" header
\"origin\": None # remove \"origin\" header
}
await route.continue_(headers=headers)
}
await page.route(\"**/*\", handle)
```
Parameters
----------
url : Union[str, NoneType]
If set changes the request URL. New URL must have same protocol as original one.
method : Union[str, NoneType]
If set changes the request method (e.g. GET or POST)
headers : Union[Dict[str, str], NoneType]
If set changes the request HTTP headers. Header values will be converted to a string.
post_data : Union[bytes, str, NoneType]
If set changes the post data of request
"""
return mapping.from_maybe_impl(
await self._async(
"route.continue_",
self._impl_obj.continue_(
url=url,
method=method,
headers=mapping.to_impl(headers),
postData=post_data,
),
)
)
mapping.register(RouteImpl, Route)
class WebSocket(AsyncBase):
@typing.overload
def on(
self,
event: Literal["close"],
f: typing.Callable[["WebSocket"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Fired when the websocket closes."""
@typing.overload
def on(
self,
event: Literal["framereceived"],
f: typing.Callable[
["typing.Dict"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Fired when the websocket receives a frame."""
@typing.overload
def on(
self,
event: Literal["framesent"],
f: typing.Callable[
["typing.Dict"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Fired when the websocket sends a frame."""
@typing.overload
def on(
self,
event: Literal["socketerror"],
f: typing.Callable[["str"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Fired when the websocket has an error."""
def on(
self,
event: str,
f: typing.Callable[..., typing.Union[typing.Awaitable[None], None]],
) -> None:
return super().on(event=event, f=f)
@typing.overload
def once(
self,
event: Literal["close"],
f: typing.Callable[["WebSocket"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Fired when the websocket closes."""
@typing.overload
def once(
self,
event: Literal["framereceived"],
f: typing.Callable[
["typing.Dict"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Fired when the websocket receives a frame."""
@typing.overload
def once(
self,
event: Literal["framesent"],
f: typing.Callable[
["typing.Dict"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Fired when the websocket sends a frame."""
@typing.overload
def once(
self,
event: Literal["socketerror"],
f: typing.Callable[["str"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Fired when the websocket has an error."""
def once(
self,
event: str,
f: typing.Callable[..., typing.Union[typing.Awaitable[None], None]],
) -> None:
return super().once(event=event, f=f)
@property
def url(self) -> str:
"""WebSocket.url
Contains the URL of the WebSocket.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.url)
def expect_event(
self, event: str, predicate: typing.Callable = None, *, timeout: float = None
) -> AsyncEventContextManager:
"""WebSocket.expect_event
Waits for event to fire and passes its value into the predicate function. Returns when the predicate returns truthy
value. Will throw an error if the webSocket is closed before the event is fired. Returns the event data value.
Parameters
----------
event : str
Event name, same one would pass into `webSocket.on(event)`.
predicate : Union[Callable, NoneType]
Receives the event data and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager
"""
return AsyncEventContextManager(
self._impl_obj.expect_event(
event=event, predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
async def wait_for_event(
self, event: str, predicate: typing.Callable = None, *, timeout: float = None
) -> typing.Any:
"""WebSocket.wait_for_event
> NOTE: In most cases, you should use `web_socket.expect_event()`.
Waits for given `event` to fire. If predicate is provided, it passes event's value into the `predicate` function and
waits for `predicate(event)` to return a truthy value. Will throw an error if the socket is closed before the `event` is
fired.
Parameters
----------
event : str
Event name, same one typically passed into `*.on(event)`.
predicate : Union[Callable, NoneType]
Receives the event data and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"web_socket.wait_for_event",
self._impl_obj.wait_for_event(
event=event,
predicate=self._wrap_handler(predicate),
timeout=timeout,
),
)
)
def is_closed(self) -> bool:
"""WebSocket.is_closed
Indicates that the web socket has been closed.
Returns
-------
bool
"""
return mapping.from_maybe_impl(self._impl_obj.is_closed())
mapping.register(WebSocketImpl, WebSocket)
class Keyboard(AsyncBase):
async def down(self, key: str) -> NoneType:
"""Keyboard.down
Dispatches a `keydown` event.
`key` can specify the intended [keyboardEvent.key](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key)
value or a single character to generate the text for. A superset of the `key` values can be found
[here](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key/Key_Values). Examples of the keys are:
`F1` - `F12`, `Digit0`- `Digit9`, `KeyA`- `KeyZ`, `Backquote`, `Minus`, `Equal`, `Backslash`, `Backspace`, `Tab`,
`Delete`, `Escape`, `ArrowDown`, `End`, `Enter`, `Home`, `Insert`, `PageDown`, `PageUp`, `ArrowRight`, `ArrowUp`, etc.
Following modification shortcuts are also supported: `Shift`, `Control`, `Alt`, `Meta`, `ShiftLeft`.
Holding down `Shift` will type the text that corresponds to the `key` in the upper case.
If `key` is a single character, it is case-sensitive, so the values `a` and `A` will generate different respective
texts.
If `key` is a modifier key, `Shift`, `Meta`, `Control`, or `Alt`, subsequent key presses will be sent with that modifier
active. To release the modifier key, use `keyboard.up()`.
After the key is pressed once, subsequent calls to `keyboard.down()` will have
[repeat](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/repeat) set to true. To release the key, use
`keyboard.up()`.
> NOTE: Modifier keys DO influence `keyboard.down`. Holding down `Shift` will type the text in upper case.
Parameters
----------
key : str
Name of the key to press or a character to generate, such as `ArrowLeft` or `a`.
"""
return mapping.from_maybe_impl(
await self._async("keyboard.down", self._impl_obj.down(key=key))
)
async def up(self, key: str) -> NoneType:
"""Keyboard.up
Dispatches a `keyup` event.
Parameters
----------
key : str
Name of the key to press or a character to generate, such as `ArrowLeft` or `a`.
"""
return mapping.from_maybe_impl(
await self._async("keyboard.up", self._impl_obj.up(key=key))
)
async def insert_text(self, text: str) -> NoneType:
"""Keyboard.insert_text
Dispatches only `input` event, does not emit the `keydown`, `keyup` or `keypress` events.
```py
await page.keyboard.insert_text(\"嗨\")
```
> NOTE: Modifier keys DO NOT effect `keyboard.insertText`. Holding down `Shift` will not type the text in upper case.
Parameters
----------
text : str
Sets input to the specified text value.
"""
return mapping.from_maybe_impl(
await self._async(
"keyboard.insert_text", self._impl_obj.insert_text(text=text)
)
)
async def type(self, text: str, *, delay: float = None) -> NoneType:
"""Keyboard.type
Sends a `keydown`, `keypress`/`input`, and `keyup` event for each character in the text.
To press a special key, like `Control` or `ArrowDown`, use `keyboard.press()`.
```py
await page.keyboard.type(\"Hello\") # types instantly
await page.keyboard.type(\"World\", delay=100) # types slower, like a user
```
> NOTE: Modifier keys DO NOT effect `keyboard.type`. Holding down `Shift` will not type the text in upper case.
> NOTE: For characters that are not on a US keyboard, only an `input` event will be sent.
Parameters
----------
text : str
A text to type into a focused element.
delay : Union[float, NoneType]
Time to wait between key presses in milliseconds. Defaults to 0.
"""
return mapping.from_maybe_impl(
await self._async(
"keyboard.type", self._impl_obj.type(text=text, delay=delay)
)
)
async def press(self, key: str, *, delay: float = None) -> NoneType:
"""Keyboard.press
`key` can specify the intended [keyboardEvent.key](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key)
value or a single character to generate the text for. A superset of the `key` values can be found
[here](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key/Key_Values). Examples of the keys are:
`F1` - `F12`, `Digit0`- `Digit9`, `KeyA`- `KeyZ`, `Backquote`, `Minus`, `Equal`, `Backslash`, `Backspace`, `Tab`,
`Delete`, `Escape`, `ArrowDown`, `End`, `Enter`, `Home`, `Insert`, `PageDown`, `PageUp`, `ArrowRight`, `ArrowUp`, etc.
Following modification shortcuts are also supported: `Shift`, `Control`, `Alt`, `Meta`, `ShiftLeft`.
Holding down `Shift` will type the text that corresponds to the `key` in the upper case.
If `key` is a single character, it is case-sensitive, so the values `a` and `A` will generate different respective
texts.
Shortcuts such as `key: \"Control+o\"` or `key: \"Control+Shift+T\"` are supported as well. When specified with the
modifier, modifier is pressed and being held while the subsequent key is being pressed.
```py
page = await browser.new_page()
await page.goto(\"https://keycode.info\")
await page.keyboard.press(\"a\")
await page.screenshot(path=\"a.png\")
await page.keyboard.press(\"ArrowLeft\")
await page.screenshot(path=\"arrow_left.png\")
await page.keyboard.press(\"Shift+O\")
await page.screenshot(path=\"o.png\")
await browser.close()
```
Shortcut for `keyboard.down()` and `keyboard.up()`.
Parameters
----------
key : str
Name of the key to press or a character to generate, such as `ArrowLeft` or `a`.
delay : Union[float, NoneType]
Time to wait between `keydown` and `keyup` in milliseconds. Defaults to 0.
"""
return mapping.from_maybe_impl(
await self._async(
"keyboard.press", self._impl_obj.press(key=key, delay=delay)
)
)
mapping.register(KeyboardImpl, Keyboard)
class Mouse(AsyncBase):
async def move(self, x: float, y: float, *, steps: int = None) -> NoneType:
"""Mouse.move
Dispatches a `mousemove` event.
Parameters
----------
x : float
y : float
steps : Union[int, NoneType]
defaults to 1. Sends intermediate `mousemove` events.
"""
return mapping.from_maybe_impl(
await self._async("mouse.move", self._impl_obj.move(x=x, y=y, steps=steps))
)
async def down(
self,
*,
button: Literal["left", "middle", "right"] = None,
click_count: int = None
) -> NoneType:
"""Mouse.down
Dispatches a `mousedown` event.
Parameters
----------
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
click_count : Union[int, NoneType]
defaults to 1. See [UIEvent.detail].
"""
return mapping.from_maybe_impl(
await self._async(
"mouse.down", self._impl_obj.down(button=button, clickCount=click_count)
)
)
async def up(
self,
*,
button: Literal["left", "middle", "right"] = None,
click_count: int = None
) -> NoneType:
"""Mouse.up
Dispatches a `mouseup` event.
Parameters
----------
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
click_count : Union[int, NoneType]
defaults to 1. See [UIEvent.detail].
"""
return mapping.from_maybe_impl(
await self._async(
"mouse.up", self._impl_obj.up(button=button, clickCount=click_count)
)
)
async def click(
self,
x: float,
y: float,
*,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
click_count: int = None
) -> NoneType:
"""Mouse.click
Shortcut for `mouse.move()`, `mouse.down()`, `mouse.up()`.
Parameters
----------
x : float
y : float
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
click_count : Union[int, NoneType]
defaults to 1. See [UIEvent.detail].
"""
return mapping.from_maybe_impl(
await self._async(
"mouse.click",
self._impl_obj.click(
x=x, y=y, delay=delay, button=button, clickCount=click_count
),
)
)
async def dblclick(
self,
x: float,
y: float,
*,
delay: float = None,
button: Literal["left", "middle", "right"] = None
) -> NoneType:
"""Mouse.dblclick
Shortcut for `mouse.move()`, `mouse.down()`, `mouse.up()`, `mouse.down()` and
`mouse.up()`.
Parameters
----------
x : float
y : float
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
"""
return mapping.from_maybe_impl(
await self._async(
"mouse.dblclick",
self._impl_obj.dblclick(x=x, y=y, delay=delay, button=button),
)
)
async def wheel(self, delta_x: float, delta_y: float) -> NoneType:
"""Mouse.wheel
Dispatches a `wheel` event.
> NOTE: Wheel events may cause scrolling if they are not handled, and this method does not wait for the scrolling to
finish before returning.
Parameters
----------
delta_x : float
Pixels to scroll horizontally.
delta_y : float
Pixels to scroll vertically.
"""
return mapping.from_maybe_impl(
await self._async(
"mouse.wheel", self._impl_obj.wheel(deltaX=delta_x, deltaY=delta_y)
)
)
mapping.register(MouseImpl, Mouse)
class Touchscreen(AsyncBase):
async def tap(self, x: float, y: float) -> NoneType:
"""Touchscreen.tap
Dispatches a `touchstart` and `touchend` event with a single touch at the position (`x`,`y`).
Parameters
----------
x : float
y : float
"""
return mapping.from_maybe_impl(
await self._async("touchscreen.tap", self._impl_obj.tap(x=x, y=y))
)
mapping.register(TouchscreenImpl, Touchscreen)
class JSHandle(AsyncBase):
async def evaluate(self, expression: str, arg: typing.Any = None) -> typing.Any:
"""JSHandle.evaluate
Returns the return value of `expression`.
This method passes this handle as the first argument to `expression`.
If `expression` returns a [Promise], then `handle.evaluate` would wait for the promise to resolve and return its value.
Examples:
```py
tweet_handle = await page.query_selector(\".tweet .retweets\")
assert await tweet_handle.evaluate(\"node => node.innerText\") == \"10 retweets\"
```
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"js_handle.evaluate",
self._impl_obj.evaluate(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def evaluate_handle(
self, expression: str, arg: typing.Any = None
) -> "JSHandle":
"""JSHandle.evaluate_handle
Returns the return value of `expression` as a `JSHandle`.
This method passes this handle as the first argument to `expression`.
The only difference between `jsHandle.evaluate` and `jsHandle.evaluateHandle` is that `jsHandle.evaluateHandle` returns
`JSHandle`.
If the function passed to the `jsHandle.evaluateHandle` returns a [Promise], then `jsHandle.evaluateHandle` would wait
for the promise to resolve and return its value.
See `page.evaluate_handle()` for more details.
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
JSHandle
"""
return mapping.from_impl(
await self._async(
"js_handle.evaluate_handle",
self._impl_obj.evaluate_handle(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def get_property(self, property_name: str) -> "JSHandle":
"""JSHandle.get_property
Fetches a single property from the referenced object.
Parameters
----------
property_name : str
property to get
Returns
-------
JSHandle
"""
return mapping.from_impl(
await self._async(
"js_handle.get_property",
self._impl_obj.get_property(propertyName=property_name),
)
)
async def get_properties(self) -> typing.Dict[str, "JSHandle"]:
"""JSHandle.get_properties
The method returns a map with **own property names** as keys and JSHandle instances for the property values.
```py
handle = await page.evaluate_handle(\"{window, document}\")
properties = await handle.get_properties()
window_handle = properties.get(\"window\")
document_handle = properties.get(\"document\")
await handle.dispose()
```
Returns
-------
Dict[str, JSHandle]
"""
return mapping.from_impl_dict(
await self._async(
"js_handle.get_properties", self._impl_obj.get_properties()
)
)
def as_element(self) -> typing.Optional["ElementHandle"]:
"""JSHandle.as_element
Returns either `null` or the object handle itself, if the object handle is an instance of `ElementHandle`.
Returns
-------
Union[ElementHandle, NoneType]
"""
return mapping.from_impl_nullable(self._impl_obj.as_element())
async def dispose(self) -> NoneType:
"""JSHandle.dispose
The `jsHandle.dispose` method stops referencing the element handle.
"""
return mapping.from_maybe_impl(
await self._async("js_handle.dispose", self._impl_obj.dispose())
)
async def json_value(self) -> typing.Any:
"""JSHandle.json_value
Returns a JSON representation of the object. If the object has a `toJSON` function, it **will not be called**.
> NOTE: The method will return an empty JSON object if the referenced object is not stringifiable. It will throw an
error if the object has circular references.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async("js_handle.json_value", self._impl_obj.json_value())
)
mapping.register(JSHandleImpl, JSHandle)
class ElementHandle(JSHandle):
def as_element(self) -> typing.Optional["ElementHandle"]:
"""ElementHandle.as_element
Returns either `null` or the object handle itself, if the object handle is an instance of `ElementHandle`.
Returns
-------
Union[ElementHandle, NoneType]
"""
return mapping.from_impl_nullable(self._impl_obj.as_element())
async def owner_frame(self) -> typing.Optional["Frame"]:
"""ElementHandle.owner_frame
Returns the frame containing the given element.
Returns
-------
Union[Frame, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"element_handle.owner_frame", self._impl_obj.owner_frame()
)
)
async def content_frame(self) -> typing.Optional["Frame"]:
"""ElementHandle.content_frame
Returns the content frame for element handles referencing iframe nodes, or `null` otherwise
Returns
-------
Union[Frame, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"element_handle.content_frame", self._impl_obj.content_frame()
)
)
async def get_attribute(self, name: str) -> typing.Optional[str]:
"""ElementHandle.get_attribute
Returns element attribute value.
Parameters
----------
name : str
Attribute name to get the value for.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.get_attribute", self._impl_obj.get_attribute(name=name)
)
)
async def text_content(self) -> typing.Optional[str]:
"""ElementHandle.text_content
Returns the `node.textContent`.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.text_content", self._impl_obj.text_content()
)
)
async def inner_text(self) -> str:
"""ElementHandle.inner_text
Returns the `element.innerText`.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async("element_handle.inner_text", self._impl_obj.inner_text())
)
async def inner_html(self) -> str:
"""ElementHandle.inner_html
Returns the `element.innerHTML`.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async("element_handle.inner_html", self._impl_obj.inner_html())
)
async def is_checked(self) -> bool:
"""ElementHandle.is_checked
Returns whether the element is checked. Throws if the element is not a checkbox or radio input.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async("element_handle.is_checked", self._impl_obj.is_checked())
)
async def is_disabled(self) -> bool:
"""ElementHandle.is_disabled
Returns whether the element is disabled, the opposite of [enabled](./actionability.md#enabled).
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.is_disabled", self._impl_obj.is_disabled()
)
)
async def is_editable(self) -> bool:
"""ElementHandle.is_editable
Returns whether the element is [editable](./actionability.md#editable).
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.is_editable", self._impl_obj.is_editable()
)
)
async def is_enabled(self) -> bool:
"""ElementHandle.is_enabled
Returns whether the element is [enabled](./actionability.md#enabled).
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async("element_handle.is_enabled", self._impl_obj.is_enabled())
)
async def is_hidden(self) -> bool:
"""ElementHandle.is_hidden
Returns whether the element is hidden, the opposite of [visible](./actionability.md#visible).
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async("element_handle.is_hidden", self._impl_obj.is_hidden())
)
async def is_visible(self) -> bool:
"""ElementHandle.is_visible
Returns whether the element is [visible](./actionability.md#visible).
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async("element_handle.is_visible", self._impl_obj.is_visible())
)
async def dispatch_event(
self, type: str, event_init: typing.Dict = None
) -> NoneType:
"""ElementHandle.dispatch_event
The snippet below dispatches the `click` event on the element. Regardless of the visibility state of the element,
`click` is dispatched. This is equivalent to calling
[element.click()](https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/click).
```py
await element_handle.dispatch_event(\"click\")
```
Under the hood, it creates an instance of an event based on the given `type`, initializes it with `eventInit` properties
and dispatches it on the element. Events are `composed`, `cancelable` and bubble by default.
Since `eventInit` is event-specific, please refer to the events documentation for the lists of initial properties:
- [DragEvent](https://developer.mozilla.org/en-US/docs/Web/API/DragEvent/DragEvent)
- [FocusEvent](https://developer.mozilla.org/en-US/docs/Web/API/FocusEvent/FocusEvent)
- [KeyboardEvent](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/KeyboardEvent)
- [MouseEvent](https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/MouseEvent)
- [PointerEvent](https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/PointerEvent)
- [TouchEvent](https://developer.mozilla.org/en-US/docs/Web/API/TouchEvent/TouchEvent)
- [Event](https://developer.mozilla.org/en-US/docs/Web/API/Event/Event)
You can also specify `JSHandle` as the property value if you want live objects to be passed into the event:
```py
# note you can only create data_transfer in chromium and firefox
data_transfer = await page.evaluate_handle(\"new DataTransfer()\")
await element_handle.dispatch_event(\"#source\", \"dragstart\", {\"dataTransfer\": data_transfer})
```
Parameters
----------
type : str
DOM event type: `"click"`, `"dragstart"`, etc.
event_init : Union[Dict, NoneType]
Optional event-specific initialization properties.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.dispatch_event",
self._impl_obj.dispatch_event(
type=type, eventInit=mapping.to_impl(event_init)
),
)
)
async def scroll_into_view_if_needed(self, *, timeout: float = None) -> NoneType:
"""ElementHandle.scroll_into_view_if_needed
This method waits for [actionability](./actionability.md) checks, then tries to scroll element into view, unless it is
completely visible as defined by
[IntersectionObserver](https://developer.mozilla.org/en-US/docs/Web/API/Intersection_Observer_API)'s `ratio`.
Throws when `elementHandle` does not point to an element
[connected](https://developer.mozilla.org/en-US/docs/Web/API/Node/isConnected) to a Document or a ShadowRoot.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.scroll_into_view_if_needed",
self._impl_obj.scroll_into_view_if_needed(timeout=timeout),
)
)
async def hover(
self,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
timeout: float = None,
force: bool = None,
trial: bool = None
) -> NoneType:
"""ElementHandle.hover
This method hovers over the element by performing the following steps:
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to hover over the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.hover",
self._impl_obj.hover(
modifiers=mapping.to_impl(modifiers),
position=position,
timeout=timeout,
force=force,
trial=trial,
),
)
)
async def click(
self,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
click_count: int = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""ElementHandle.click
This method clicks the element by performing the following steps:
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
click_count : Union[int, NoneType]
defaults to 1. See [UIEvent.detail].
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.click",
self._impl_obj.click(
modifiers=mapping.to_impl(modifiers),
position=position,
delay=delay,
button=button,
clickCount=click_count,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def dblclick(
self,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""ElementHandle.dblclick
This method double clicks the element by performing the following steps:
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to double click in the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set. Note that if the
first click of the `dblclick()` triggers a navigation event, this method will throw.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
> NOTE: `elementHandle.dblclick()` dispatches two `click` events and a single `dblclick` event.
Parameters
----------
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.dblclick",
self._impl_obj.dblclick(
modifiers=mapping.to_impl(modifiers),
position=position,
delay=delay,
button=button,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def select_option(
self,
value: typing.Union[str, typing.List[str]] = None,
*,
index: typing.Union[int, typing.List[int]] = None,
label: typing.Union[str, typing.List[str]] = None,
element: typing.Union["ElementHandle", typing.List["ElementHandle"]] = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None
) -> typing.List[str]:
"""ElementHandle.select_option
This method waits for [actionability](./actionability.md) checks, waits until all specified options are present in the
`<select>` element and selects these options.
If the target element is not a `<select>` element, this method throws an error. However, if the element is inside the
`<label>` element that has an associated
[control](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLabelElement/control), the control will be used instead.
Returns the array of option values that have been successfully selected.
Triggers a `change` and `input` event once all the provided options have been selected.
```py
# single selection matching the value
await handle.select_option(\"blue\")
# single selection matching the label
await handle.select_option(label=\"blue\")
# multiple selection
await handle.select_option(value=[\"red\", \"green\", \"blue\"])
```
Parameters
----------
value : Union[List[str], str, NoneType]
Options to select by value. If the `<select>` has the `multiple` attribute, all given options are selected, otherwise
only the first option matching one of the passed options is selected. Optional.
index : Union[List[int], int, NoneType]
Options to select by index. Optional.
label : Union[List[str], str, NoneType]
Options to select by label. If the `<select>` has the `multiple` attribute, all given options are selected, otherwise
only the first option matching one of the passed options is selected. Optional.
element : Union[ElementHandle, List[ElementHandle], NoneType]
Option elements to select. Optional.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
Returns
-------
List[str]
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.select_option",
self._impl_obj.select_option(
value=mapping.to_impl(value),
index=mapping.to_impl(index),
label=mapping.to_impl(label),
element=mapping.to_impl(element),
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
),
)
)
async def tap(
self,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""ElementHandle.tap
This method taps the element by performing the following steps:
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.touchscreen` to tap the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
> NOTE: `elementHandle.tap()` requires that the `hasTouch` option of the browser context be set to true.
Parameters
----------
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.tap",
self._impl_obj.tap(
modifiers=mapping.to_impl(modifiers),
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def fill(
self,
value: str,
*,
timeout: float = None,
no_wait_after: bool = None,
force: bool = None
) -> NoneType:
"""ElementHandle.fill
This method waits for [actionability](./actionability.md) checks, focuses the element, fills it and triggers an `input`
event after filling. Note that you can pass an empty string to clear the input field.
If the target element is not an `<input>`, `<textarea>` or `[contenteditable]` element, this method throws an error.
However, if the element is inside the `<label>` element that has an associated
[control](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLabelElement/control), the control will be filled
instead.
To send fine-grained keyboard events, use `element_handle.type()`.
Parameters
----------
value : str
Value to set for the `<input>`, `<textarea>` or `[contenteditable]` element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.fill",
self._impl_obj.fill(
value=value, timeout=timeout, noWaitAfter=no_wait_after, force=force
),
)
)
async def select_text(
self, *, force: bool = None, timeout: float = None
) -> NoneType:
"""ElementHandle.select_text
This method waits for [actionability](./actionability.md) checks, then focuses the element and selects all its text
content.
Parameters
----------
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.select_text",
self._impl_obj.select_text(force=force, timeout=timeout),
)
)
async def input_value(self, *, timeout: float = None) -> str:
"""ElementHandle.input_value
Returns `input.value` for `<input>` or `<textarea>` or `<select>` element. Throws for non-input elements.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.input_value",
self._impl_obj.input_value(timeout=timeout),
)
)
async def set_input_files(
self,
files: typing.Union[
str,
pathlib.Path,
FilePayload,
typing.List[typing.Union[str, pathlib.Path]],
typing.List[FilePayload],
],
*,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""ElementHandle.set_input_files
This method expects `elementHandle` to point to an
[input element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input).
Sets the value of the file input to these file paths or files. If some of the `filePaths` are relative paths, then they
are resolved relative to the the current working directory. For empty array, clears the selected files.
Parameters
----------
files : Union[List[Union[pathlib.Path, str]], List[{name: str, mimeType: str, buffer: bytes}], pathlib.Path, str, {name: str, mimeType: str, buffer: bytes}]
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.set_input_files",
self._impl_obj.set_input_files(
files=mapping.to_impl(files),
timeout=timeout,
noWaitAfter=no_wait_after,
),
)
)
async def focus(self) -> NoneType:
"""ElementHandle.focus
Calls [focus](https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/focus) on the element.
"""
return mapping.from_maybe_impl(
await self._async("element_handle.focus", self._impl_obj.focus())
)
async def type(
self,
text: str,
*,
delay: float = None,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""ElementHandle.type
Focuses the element, and then sends a `keydown`, `keypress`/`input`, and `keyup` event for each character in the text.
To press a special key, like `Control` or `ArrowDown`, use `element_handle.press()`.
```py
await element_handle.type(\"hello\") # types instantly
await element_handle.type(\"world\", delay=100) # types slower, like a user
```
An example of typing into a text field and then submitting the form:
```py
element_handle = await page.query_selector(\"input\")
await element_handle.type(\"some text\")
await element_handle.press(\"Enter\")
```
Parameters
----------
text : str
A text to type into a focused element.
delay : Union[float, NoneType]
Time to wait between key presses in milliseconds. Defaults to 0.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.type",
self._impl_obj.type(
text=text, delay=delay, timeout=timeout, noWaitAfter=no_wait_after
),
)
)
async def press(
self,
key: str,
*,
delay: float = None,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""ElementHandle.press
Focuses the element, and then uses `keyboard.down()` and `keyboard.up()`.
`key` can specify the intended [keyboardEvent.key](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key)
value or a single character to generate the text for. A superset of the `key` values can be found
[here](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key/Key_Values). Examples of the keys are:
`F1` - `F12`, `Digit0`- `Digit9`, `KeyA`- `KeyZ`, `Backquote`, `Minus`, `Equal`, `Backslash`, `Backspace`, `Tab`,
`Delete`, `Escape`, `ArrowDown`, `End`, `Enter`, `Home`, `Insert`, `PageDown`, `PageUp`, `ArrowRight`, `ArrowUp`, etc.
Following modification shortcuts are also supported: `Shift`, `Control`, `Alt`, `Meta`, `ShiftLeft`.
Holding down `Shift` will type the text that corresponds to the `key` in the upper case.
If `key` is a single character, it is case-sensitive, so the values `a` and `A` will generate different respective
texts.
Shortcuts such as `key: \"Control+o\"` or `key: \"Control+Shift+T\"` are supported as well. When specified with the
modifier, modifier is pressed and being held while the subsequent key is being pressed.
Parameters
----------
key : str
Name of the key to press or a character to generate, such as `ArrowLeft` or `a`.
delay : Union[float, NoneType]
Time to wait between `keydown` and `keyup` in milliseconds. Defaults to 0.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.press",
self._impl_obj.press(
key=key, delay=delay, timeout=timeout, noWaitAfter=no_wait_after
),
)
)
async def set_checked(
self,
checked: bool,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""ElementHandle.set_checked
This method checks or unchecks an element by performing the following steps:
1. Ensure that element is a checkbox or a radio input. If not, this method throws.
1. If the element already has the right checked state, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now checked or unchecked. If not, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
checked : bool
Whether to check or uncheck the checkbox.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.set_checked",
self._impl_obj.set_checked(
checked=checked,
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def check(
self,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""ElementHandle.check
This method checks the element by performing the following steps:
1. Ensure that element is a checkbox or a radio input. If not, this method throws. If the element is already checked,
this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now checked. If not, this method throws.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.check",
self._impl_obj.check(
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def uncheck(
self,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""ElementHandle.uncheck
This method checks the element by performing the following steps:
1. Ensure that element is a checkbox or a radio input. If not, this method throws. If the element is already
unchecked, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now unchecked. If not, this method throws.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.uncheck",
self._impl_obj.uncheck(
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def bounding_box(self) -> typing.Optional[FloatRect]:
"""ElementHandle.bounding_box
This method returns the bounding box of the element, or `null` if the element is not visible. The bounding box is
calculated relative to the main frame viewport - which is usually the same as the browser window.
Scrolling affects the returned bonding box, similarly to
[Element.getBoundingClientRect](https://developer.mozilla.org/en-US/docs/Web/API/Element/getBoundingClientRect). That
means `x` and/or `y` may be negative.
Elements from child frames return the bounding box relative to the main frame, unlike the
[Element.getBoundingClientRect](https://developer.mozilla.org/en-US/docs/Web/API/Element/getBoundingClientRect).
Assuming the page is static, it is safe to use bounding box coordinates to perform input. For example, the following
snippet should click the center of the element.
```py
box = await element_handle.bounding_box()
await page.mouse.click(box[\"x\"] + box[\"width\"] / 2, box[\"y\"] + box[\"height\"] / 2)
```
Returns
-------
Union[{x: float, y: float, width: float, height: float}, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"element_handle.bounding_box", self._impl_obj.bounding_box()
)
)
async def screenshot(
self,
*,
timeout: float = None,
type: Literal["jpeg", "png"] = None,
path: typing.Union[str, pathlib.Path] = None,
quality: int = None,
omit_background: bool = None,
animations: Literal["disabled"] = None,
mask: typing.List["Locator"] = None
) -> bytes:
"""ElementHandle.screenshot
Returns the buffer with the captured screenshot.
This method waits for the [actionability](./actionability.md) checks, then scrolls element into view before taking a
screenshot. If the element is detached from DOM, the method throws an error.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
type : Union["jpeg", "png", NoneType]
Specify screenshot type, defaults to `png`.
path : Union[pathlib.Path, str, NoneType]
The file path to save the image to. The screenshot type will be inferred from file extension. If `path` is a relative
path, then it is resolved relative to the current working directory. If no path is provided, the image won't be saved to
the disk.
quality : Union[int, NoneType]
The quality of the image, between 0-100. Not applicable to `png` images.
omit_background : Union[bool, NoneType]
Hides default white background and allows capturing screenshots with transparency. Not applicable to `jpeg` images.
Defaults to `false`.
animations : Union["disabled", NoneType]
When set to `"disabled"`, stops CSS animations, CSS transitions and Web Animations. Animations get different treatment
depending on their duration:
- finite animations are fast-forwarded to completion, so they'll fire `transitionend` event.
- infinite animations are canceled to initial state, and then played over after the screenshot.
mask : Union[List[Locator], NoneType]
Specify locators that should be masked when the screenshot is taken. Masked elements will be overlayed with a pink box
`#FF00FF` that completely covers its bounding box.
Returns
-------
bytes
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.screenshot",
self._impl_obj.screenshot(
timeout=timeout,
type=type,
path=path,
quality=quality,
omitBackground=omit_background,
animations=animations,
mask=mapping.to_impl(mask),
),
)
)
async def query_selector(self, selector: str) -> typing.Optional["ElementHandle"]:
"""ElementHandle.query_selector
The method finds an element matching the specified selector in the `ElementHandle`'s subtree. See
[Working with selectors](./selectors.md) for more details. If no elements match the selector, returns `null`.
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
Returns
-------
Union[ElementHandle, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"element_handle.query_selector",
self._impl_obj.query_selector(selector=selector),
)
)
async def query_selector_all(self, selector: str) -> typing.List["ElementHandle"]:
"""ElementHandle.query_selector_all
The method finds all elements matching the specified selector in the `ElementHandle`s subtree. See
[Working with selectors](./selectors.md) for more details. If no elements match the selector, returns empty array.
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
Returns
-------
List[ElementHandle]
"""
return mapping.from_impl_list(
await self._async(
"element_handle.query_selector_all",
self._impl_obj.query_selector_all(selector=selector),
)
)
async def eval_on_selector(
self, selector: str, expression: str, arg: typing.Any = None
) -> typing.Any:
"""ElementHandle.eval_on_selector
Returns the return value of `expression`.
The method finds an element matching the specified selector in the `ElementHandle`s subtree and passes it as a first
argument to `expression`. See [Working with selectors](./selectors.md) for more details. If no elements match the
selector, the method throws an error.
If `expression` returns a [Promise], then `element_handle.eval_on_selector()` would wait for the promise to resolve
and return its value.
Examples:
```py
tweet_handle = await page.query_selector(\".tweet\")
assert await tweet_handle.eval_on_selector(\".like\", \"node => node.innerText\") == \"100\"
assert await tweet_handle.eval_on_selector(\".retweets\", \"node => node.innerText\") = \"10\"
```
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.eval_on_selector",
self._impl_obj.eval_on_selector(
selector=selector, expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def eval_on_selector_all(
self, selector: str, expression: str, arg: typing.Any = None
) -> typing.Any:
"""ElementHandle.eval_on_selector_all
Returns the return value of `expression`.
The method finds all elements matching the specified selector in the `ElementHandle`'s subtree and passes an array of
matched elements as a first argument to `expression`. See [Working with selectors](./selectors.md) for more details.
If `expression` returns a [Promise], then `element_handle.eval_on_selector_all()` would wait for the promise to
resolve and return its value.
Examples:
```html
<div class=\"feed\">
<div class=\"tweet\">Hello!</div>
<div class=\"tweet\">Hi!</div>
</div>
```
```py
feed_handle = await page.query_selector(\".feed\")
assert await feed_handle.eval_on_selector_all(\".tweet\", \"nodes => nodes.map(n => n.innerText)\") == [\"hello!\", \"hi!\"]
```
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.eval_on_selector_all",
self._impl_obj.eval_on_selector_all(
selector=selector, expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def wait_for_element_state(
self,
state: Literal[
"disabled", "editable", "enabled", "hidden", "stable", "visible"
],
*,
timeout: float = None
) -> NoneType:
"""ElementHandle.wait_for_element_state
Returns when the element satisfies the `state`.
Depending on the `state` parameter, this method waits for one of the [actionability](./actionability.md) checks to pass.
This method throws when the element is detached while waiting, unless waiting for the `\"hidden\"` state.
- `\"visible\"` Wait until the element is [visible](./actionability.md#visible).
- `\"hidden\"` Wait until the element is [not visible](./actionability.md#visible) or
[not attached](./actionability.md#attached). Note that waiting for hidden does not throw when the element detaches.
- `\"stable\"` Wait until the element is both [visible](./actionability.md#visible) and
[stable](./actionability.md#stable).
- `\"enabled\"` Wait until the element is [enabled](./actionability.md#enabled).
- `\"disabled\"` Wait until the element is [not enabled](./actionability.md#enabled).
- `\"editable\"` Wait until the element is [editable](./actionability.md#editable).
If the element does not satisfy the condition for the `timeout` milliseconds, this method will throw.
Parameters
----------
state : Union["disabled", "editable", "enabled", "hidden", "stable", "visible"]
A state to wait for, see below for more details.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"element_handle.wait_for_element_state",
self._impl_obj.wait_for_element_state(state=state, timeout=timeout),
)
)
async def wait_for_selector(
self,
selector: str,
*,
state: Literal["attached", "detached", "hidden", "visible"] = None,
timeout: float = None,
strict: bool = None
) -> typing.Optional["ElementHandle"]:
"""ElementHandle.wait_for_selector
Returns element specified by selector when it satisfies `state` option. Returns `null` if waiting for `hidden` or
`detached`.
Wait for the `selector` relative to the element handle to satisfy `state` option (either appear/disappear from dom, or
become visible/hidden). If at the moment of calling the method `selector` already satisfies the condition, the method
will return immediately. If the selector doesn't satisfy the condition for the `timeout` milliseconds, the function will
throw.
```py
await page.set_content(\"<div><span></span></div>\")
div = await page.query_selector(\"div\")
# waiting for the \"span\" selector relative to the div.
span = await div.wait_for_selector(\"span\", state=\"attached\")
```
> NOTE: This method does not work across navigations, use `page.wait_for_selector()` instead.
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
state : Union["attached", "detached", "hidden", "visible", NoneType]
Defaults to `'visible'`. Can be either:
- `'attached'` - wait for element to be present in DOM.
- `'detached'` - wait for element to not be present in DOM.
- `'visible'` - wait for element to have non-empty bounding box and no `visibility:hidden`. Note that element without
any content or with `display:none` has an empty bounding box and is not considered visible.
- `'hidden'` - wait for element to be either detached from DOM, or have an empty bounding box or `visibility:hidden`.
This is opposite to the `'visible'` option.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
Returns
-------
Union[ElementHandle, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"element_handle.wait_for_selector",
self._impl_obj.wait_for_selector(
selector=selector, state=state, timeout=timeout, strict=strict
),
)
)
mapping.register(ElementHandleImpl, ElementHandle)
class Accessibility(AsyncBase):
async def snapshot(
self, *, interesting_only: bool = None, root: "ElementHandle" = None
) -> typing.Optional[typing.Dict]:
"""Accessibility.snapshot
Captures the current state of the accessibility tree. The returned object represents the root accessible node of the
page.
> NOTE: The Chromium accessibility tree contains nodes that go unused on most platforms and by most screen readers.
Playwright will discard them as well for an easier to process tree, unless `interestingOnly` is set to `false`.
An example of dumping the entire accessibility tree:
```py
snapshot = await page.accessibility.snapshot()
print(snapshot)
```
An example of logging the focused node's name:
```py
def find_focused_node(node):
if (node.get(\"focused\"))
return node
for child in (node.get(\"children\") or []):
found_node = find_focused_node(child)
return found_node
return None
snapshot = await page.accessibility.snapshot()
node = find_focused_node(snapshot)
if node:
print(node[\"name\"])
```
Parameters
----------
interesting_only : Union[bool, NoneType]
Prune uninteresting nodes from the tree. Defaults to `true`.
root : Union[ElementHandle, NoneType]
The root DOM element for the snapshot. Defaults to the whole page.
Returns
-------
Union[Dict, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"accessibility.snapshot",
self._impl_obj.snapshot(
interestingOnly=interesting_only, root=mapping.to_impl(root)
),
)
)
mapping.register(AccessibilityImpl, Accessibility)
class FileChooser(AsyncBase):
@property
def page(self) -> "Page":
"""FileChooser.page
Returns page this file chooser belongs to.
Returns
-------
Page
"""
return mapping.from_impl(self._impl_obj.page)
@property
def element(self) -> "ElementHandle":
"""FileChooser.element
Returns input element associated with this file chooser.
Returns
-------
ElementHandle
"""
return mapping.from_impl(self._impl_obj.element)
def is_multiple(self) -> bool:
"""FileChooser.is_multiple
Returns whether this file chooser accepts multiple files.
Returns
-------
bool
"""
return mapping.from_maybe_impl(self._impl_obj.is_multiple())
async def set_files(
self,
files: typing.Union[
str,
pathlib.Path,
FilePayload,
typing.List[typing.Union[str, pathlib.Path]],
typing.List[FilePayload],
],
*,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""FileChooser.set_files
Sets the value of the file input this chooser is associated with. If some of the `filePaths` are relative paths, then
they are resolved relative to the the current working directory. For empty array, clears the selected files.
Parameters
----------
files : Union[List[Union[pathlib.Path, str]], List[{name: str, mimeType: str, buffer: bytes}], pathlib.Path, str, {name: str, mimeType: str, buffer: bytes}]
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"file_chooser.set_files",
self._impl_obj.set_files(
files=mapping.to_impl(files),
timeout=timeout,
noWaitAfter=no_wait_after,
),
)
)
mapping.register(FileChooserImpl, FileChooser)
class Frame(AsyncBase):
@property
def page(self) -> "Page":
"""Frame.page
Returns the page containing this frame.
Returns
-------
Page
"""
return mapping.from_impl(self._impl_obj.page)
@property
def name(self) -> str:
"""Frame.name
Returns frame's name attribute as specified in the tag.
If the name is empty, returns the id attribute instead.
> NOTE: This value is calculated once when the frame is created, and will not update if the attribute is changed later.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.name)
@property
def url(self) -> str:
"""Frame.url
Returns frame's url.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.url)
@property
def parent_frame(self) -> typing.Optional["Frame"]:
"""Frame.parent_frame
Parent frame, if any. Detached frames and main frames return `null`.
Returns
-------
Union[Frame, NoneType]
"""
return mapping.from_impl_nullable(self._impl_obj.parent_frame)
@property
def child_frames(self) -> typing.List["Frame"]:
"""Frame.child_frames
Returns
-------
List[Frame]
"""
return mapping.from_impl_list(self._impl_obj.child_frames)
async def goto(
self,
url: str,
*,
timeout: float = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None,
referer: str = None
) -> typing.Optional["Response"]:
"""Frame.goto
Returns the main resource response. In case of multiple redirects, the navigation will resolve with the response of the
last redirect.
The method will throw an error if:
- there's an SSL error (e.g. in case of self-signed certificates).
- target URL is invalid.
- the `timeout` is exceeded during navigation.
- the remote server does not respond or is unreachable.
- the main resource failed to load.
The method will not throw an error when any valid HTTP status code is returned by the remote server, including 404 \"Not
Found\" and 500 \"Internal Server Error\". The status code for such responses can be retrieved by calling
`response.status()`.
> NOTE: The method either throws an error or returns a main resource response. The only exceptions are navigation to
`about:blank` or navigation to the same URL with a different hash, which would succeed and return `null`.
> NOTE: Headless mode doesn't support navigation to a PDF document. See the
[upstream issue](https://bugs.chromium.org/p/chromium/issues/detail?id=761295).
Parameters
----------
url : str
URL to navigate frame to. The url should include scheme, e.g. `https://`.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
referer : Union[str, NoneType]
Referer header value. If provided it will take preference over the referer header value set by
`page.set_extra_http_headers()`.
Returns
-------
Union[Response, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"frame.goto",
self._impl_obj.goto(
url=url, timeout=timeout, waitUntil=wait_until, referer=referer
),
)
)
def expect_navigation(
self,
*,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]] = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None,
timeout: float = None
) -> AsyncEventContextManager["Response"]:
"""Frame.expect_navigation
Waits for the frame navigation and returns the main resource response. In case of multiple redirects, the navigation
will resolve with the response of the last redirect. In case of navigation to a different anchor or navigation due to
History API usage, the navigation will resolve with `null`.
This method waits for the frame to navigate to a new URL. It is useful for when you run code which will indirectly cause
the frame to navigate. Consider this example:
```py
async with frame.expect_navigation():
await frame.click(\"a.delayed-navigation\") # clicking the link will indirectly cause a navigation
# Resolves after navigation has finished
```
> NOTE: Usage of the [History API](https://developer.mozilla.org/en-US/docs/Web/API/History_API) to change the URL is
considered a navigation.
Parameters
----------
url : Union[Callable[[str], bool], Pattern, str, NoneType]
A glob pattern, regex pattern or predicate receiving [URL] to match while waiting for the navigation. Note that if the
parameter is a string without wilcard characters, the method will wait for navigation to URL that is exactly equal to
the string.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
Returns
-------
EventContextManager[Response]
"""
return AsyncEventContextManager(
self._impl_obj.expect_navigation(
url=self._wrap_handler(url), wait_until=wait_until, timeout=timeout
).future
)
async def wait_for_url(
self,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]],
*,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None,
timeout: float = None
) -> NoneType:
"""Frame.wait_for_url
Waits for the frame to navigate to the given URL.
```py
await frame.click(\"a.delayed-navigation\") # clicking the link will indirectly cause a navigation
await frame.wait_for_url(\"**/target.html\")
```
Parameters
----------
url : Union[Callable[[str], bool], Pattern, str]
A glob pattern, regex pattern or predicate receiving [URL] to match while waiting for the navigation. Note that if the
parameter is a string without wilcard characters, the method will wait for navigation to URL that is exactly equal to
the string.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.wait_for_url",
self._impl_obj.wait_for_url(
url=self._wrap_handler(url), wait_until=wait_until, timeout=timeout
),
)
)
async def wait_for_load_state(
self,
state: Literal["domcontentloaded", "load", "networkidle"] = None,
*,
timeout: float = None
) -> NoneType:
"""Frame.wait_for_load_state
Waits for the required load state to be reached.
This returns when the frame reaches a required load state, `load` by default. The navigation must have been committed
when this method is called. If current document has already reached the required state, resolves immediately.
```py
await frame.click(\"button\") # click triggers navigation.
await frame.wait_for_load_state() # the promise resolves after \"load\" event.
```
Parameters
----------
state : Union["domcontentloaded", "load", "networkidle", NoneType]
Optional load state to wait for, defaults to `load`. If the state has been already reached while loading current
document, the method resolves immediately. Can be one of:
- `'load'` - wait for the `load` event to be fired.
- `'domcontentloaded'` - wait for the `DOMContentLoaded` event to be fired.
- `'networkidle'` - wait until there are no network connections for at least `500` ms.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.wait_for_load_state",
self._impl_obj.wait_for_load_state(state=state, timeout=timeout),
)
)
async def frame_element(self) -> "ElementHandle":
"""Frame.frame_element
Returns the `frame` or `iframe` element handle which corresponds to this frame.
This is an inverse of `element_handle.content_frame()`. Note that returned handle actually belongs to the parent
frame.
This method throws an error if the frame has been detached before `frameElement()` returns.
```py
frame_element = await frame.frame_element()
content_frame = await frame_element.content_frame()
assert frame == content_frame
```
Returns
-------
ElementHandle
"""
return mapping.from_impl(
await self._async("frame.frame_element", self._impl_obj.frame_element())
)
async def evaluate(self, expression: str, arg: typing.Any = None) -> typing.Any:
"""Frame.evaluate
Returns the return value of `expression`.
If the function passed to the `frame.evaluate()` returns a [Promise], then `frame.evaluate()` would wait
for the promise to resolve and return its value.
If the function passed to the `frame.evaluate()` returns a non-[Serializable] value, then
`frame.evaluate()` returns `undefined`. Playwright also supports transferring some additional values that are
not serializable by `JSON`: `-0`, `NaN`, `Infinity`, `-Infinity`.
```py
result = await frame.evaluate(\"([x, y]) => Promise.resolve(x * y)\", [7, 8])
print(result) # prints \"56\"
```
A string can also be passed in instead of a function.
```py
print(await frame.evaluate(\"1 + 2\")) # prints \"3\"
x = 10
print(await frame.evaluate(f\"1 + {x}\")) # prints \"11\"
```
`ElementHandle` instances can be passed as an argument to the `frame.evaluate()`:
```py
body_handle = await frame.evaluate(\"document.body\")
html = await frame.evaluate(\"([body, suffix]) => body.innerHTML + suffix\", [body_handle, \"hello\"])
await body_handle.dispose()
```
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"frame.evaluate",
self._impl_obj.evaluate(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def evaluate_handle(
self, expression: str, arg: typing.Any = None
) -> "JSHandle":
"""Frame.evaluate_handle
Returns the return value of `expression` as a `JSHandle`.
The only difference between `frame.evaluate()` and `frame.evaluate_handle()` is that
`frame.evaluate_handle()` returns `JSHandle`.
If the function, passed to the `frame.evaluate_handle()`, returns a [Promise], then
`frame.evaluate_handle()` would wait for the promise to resolve and return its value.
```py
a_window_handle = await frame.evaluate_handle(\"Promise.resolve(window)\")
a_window_handle # handle for the window object.
```
A string can also be passed in instead of a function.
```py
a_handle = await page.evaluate_handle(\"document\") # handle for the \"document\"
```
`JSHandle` instances can be passed as an argument to the `frame.evaluate_handle()`:
```py
a_handle = await page.evaluate_handle(\"document.body\")
result_handle = await page.evaluate_handle(\"body => body.innerHTML\", a_handle)
print(await result_handle.json_value())
await result_handle.dispose()
```
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
JSHandle
"""
return mapping.from_impl(
await self._async(
"frame.evaluate_handle",
self._impl_obj.evaluate_handle(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def query_selector(
self, selector: str, *, strict: bool = None
) -> typing.Optional["ElementHandle"]:
"""Frame.query_selector
Returns the ElementHandle pointing to the frame element.
> NOTE: The use of `ElementHandle` is discouraged, use `Locator` objects and web-first assertions instead.
The method finds an element matching the specified selector within the frame. See
[Working with selectors](./selectors.md) for more details. If no elements match the selector, returns `null`.
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
Returns
-------
Union[ElementHandle, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"frame.query_selector",
self._impl_obj.query_selector(selector=selector, strict=strict),
)
)
async def query_selector_all(self, selector: str) -> typing.List["ElementHandle"]:
"""Frame.query_selector_all
Returns the ElementHandles pointing to the frame elements.
> NOTE: The use of `ElementHandle` is discouraged, use `Locator` objects instead.
The method finds all elements matching the specified selector within the frame. See
[Working with selectors](./selectors.md) for more details. If no elements match the selector, returns empty array.
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
Returns
-------
List[ElementHandle]
"""
return mapping.from_impl_list(
await self._async(
"frame.query_selector_all",
self._impl_obj.query_selector_all(selector=selector),
)
)
async def wait_for_selector(
self,
selector: str,
*,
strict: bool = None,
timeout: float = None,
state: Literal["attached", "detached", "hidden", "visible"] = None
) -> typing.Optional["ElementHandle"]:
"""Frame.wait_for_selector
Returns when element specified by selector satisfies `state` option. Returns `null` if waiting for `hidden` or
`detached`.
> NOTE: Playwright automatically waits for element to be ready before performing an action. Using `Locator` objects and
web-first assertions make the code wait-for-selector-free.
Wait for the `selector` to satisfy `state` option (either appear/disappear from dom, or become visible/hidden). If at
the moment of calling the method `selector` already satisfies the condition, the method will return immediately. If the
selector doesn't satisfy the condition for the `timeout` milliseconds, the function will throw.
This method works across navigations:
```py
import asyncio
from playwright.async_api import async_playwright
async def run(playwright):
chromium = playwright.chromium
browser = await chromium.launch()
page = await browser.new_page()
for current_url in [\"https://google.com\", \"https://bbc.com\"]:
await page.goto(current_url, wait_until=\"domcontentloaded\")
element = await page.main_frame.wait_for_selector(\"img\")
print(\"Loaded image: \" + str(await element.get_attribute(\"src\")))
await browser.close()
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
state : Union["attached", "detached", "hidden", "visible", NoneType]
Defaults to `'visible'`. Can be either:
- `'attached'` - wait for element to be present in DOM.
- `'detached'` - wait for element to not be present in DOM.
- `'visible'` - wait for element to have non-empty bounding box and no `visibility:hidden`. Note that element without
any content or with `display:none` has an empty bounding box and is not considered visible.
- `'hidden'` - wait for element to be either detached from DOM, or have an empty bounding box or `visibility:hidden`.
This is opposite to the `'visible'` option.
Returns
-------
Union[ElementHandle, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"frame.wait_for_selector",
self._impl_obj.wait_for_selector(
selector=selector, strict=strict, timeout=timeout, state=state
),
)
)
async def is_checked(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Frame.is_checked
Returns whether the element is checked. Throws if the element is not a checkbox or radio input.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"frame.is_checked",
self._impl_obj.is_checked(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_disabled(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Frame.is_disabled
Returns whether the element is disabled, the opposite of [enabled](./actionability.md#enabled).
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"frame.is_disabled",
self._impl_obj.is_disabled(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_editable(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Frame.is_editable
Returns whether the element is [editable](./actionability.md#editable).
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"frame.is_editable",
self._impl_obj.is_editable(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_enabled(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Frame.is_enabled
Returns whether the element is [enabled](./actionability.md#enabled).
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"frame.is_enabled",
self._impl_obj.is_enabled(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_hidden(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Frame.is_hidden
Returns whether the element is hidden, the opposite of [visible](./actionability.md#visible). `selector` that does not
match any elements is considered hidden.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
**DEPRECATED** This option is ignored. `frame.is_hidden()` does not wait for the element to become hidden and
returns immediately.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"frame.is_hidden",
self._impl_obj.is_hidden(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_visible(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Frame.is_visible
Returns whether the element is [visible](./actionability.md#visible). `selector` that does not match any elements is
considered not visible.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
**DEPRECATED** This option is ignored. `frame.is_visible()` does not wait for the element to become visible and
returns immediately.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"frame.is_visible",
self._impl_obj.is_visible(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def dispatch_event(
self,
selector: str,
type: str,
event_init: typing.Dict = None,
*,
strict: bool = None,
timeout: float = None
) -> NoneType:
"""Frame.dispatch_event
The snippet below dispatches the `click` event on the element. Regardless of the visibility state of the element,
`click` is dispatched. This is equivalent to calling
[element.click()](https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/click).
```py
await frame.dispatch_event(\"button#submit\", \"click\")
```
Under the hood, it creates an instance of an event based on the given `type`, initializes it with `eventInit` properties
and dispatches it on the element. Events are `composed`, `cancelable` and bubble by default.
Since `eventInit` is event-specific, please refer to the events documentation for the lists of initial properties:
- [DragEvent](https://developer.mozilla.org/en-US/docs/Web/API/DragEvent/DragEvent)
- [FocusEvent](https://developer.mozilla.org/en-US/docs/Web/API/FocusEvent/FocusEvent)
- [KeyboardEvent](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/KeyboardEvent)
- [MouseEvent](https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/MouseEvent)
- [PointerEvent](https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/PointerEvent)
- [TouchEvent](https://developer.mozilla.org/en-US/docs/Web/API/TouchEvent/TouchEvent)
- [Event](https://developer.mozilla.org/en-US/docs/Web/API/Event/Event)
You can also specify `JSHandle` as the property value if you want live objects to be passed into the event:
```py
# note you can only create data_transfer in chromium and firefox
data_transfer = await frame.evaluate_handle(\"new DataTransfer()\")
await frame.dispatch_event(\"#source\", \"dragstart\", { \"dataTransfer\": data_transfer })
```
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
type : str
DOM event type: `"click"`, `"dragstart"`, etc.
event_init : Union[Dict, NoneType]
Optional event-specific initialization properties.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.dispatch_event",
self._impl_obj.dispatch_event(
selector=selector,
type=type,
eventInit=mapping.to_impl(event_init),
strict=strict,
timeout=timeout,
),
)
)
async def eval_on_selector(
self,
selector: str,
expression: str,
arg: typing.Any = None,
*,
strict: bool = None
) -> typing.Any:
"""Frame.eval_on_selector
Returns the return value of `expression`.
> NOTE: This method does not wait for the element to pass actionability checks and therefore can lead to the flaky
tests. Use `locator.evaluate()`, other `Locator` helper methods or web-first assertions instead.
The method finds an element matching the specified selector within the frame and passes it as a first argument to
`expression`. See [Working with selectors](./selectors.md) for more details. If no elements match the selector, the
method throws an error.
If `expression` returns a [Promise], then `frame.eval_on_selector()` would wait for the promise to resolve and
return its value.
Examples:
```py
search_value = await frame.eval_on_selector(\"#search\", \"el => el.value\")
preload_href = await frame.eval_on_selector(\"link[rel=preload]\", \"el => el.href\")
html = await frame.eval_on_selector(\".main-container\", \"(e, suffix) => e.outerHTML + suffix\", \"hello\")
```
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"frame.eval_on_selector",
self._impl_obj.eval_on_selector(
selector=selector,
expression=expression,
arg=mapping.to_impl(arg),
strict=strict,
),
)
)
async def eval_on_selector_all(
self, selector: str, expression: str, arg: typing.Any = None
) -> typing.Any:
"""Frame.eval_on_selector_all
Returns the return value of `expression`.
> NOTE: In most cases, `locator.evaluate_all()`, other `Locator` helper methods and web-first assertions do a
better job.
The method finds all elements matching the specified selector within the frame and passes an array of matched elements
as a first argument to `expression`. See [Working with selectors](./selectors.md) for more details.
If `expression` returns a [Promise], then `frame.eval_on_selector_all()` would wait for the promise to resolve and
return its value.
Examples:
```py
divs_counts = await frame.eval_on_selector_all(\"div\", \"(divs, min) => divs.length >= min\", 10)
```
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"frame.eval_on_selector_all",
self._impl_obj.eval_on_selector_all(
selector=selector, expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def content(self) -> str:
"""Frame.content
Gets the full HTML contents of the frame, including the doctype.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async("frame.content", self._impl_obj.content())
)
async def set_content(
self,
html: str,
*,
timeout: float = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None
) -> NoneType:
"""Frame.set_content
Parameters
----------
html : str
HTML markup to assign to the page.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.set_content",
self._impl_obj.set_content(
html=html, timeout=timeout, waitUntil=wait_until
),
)
)
def is_detached(self) -> bool:
"""Frame.is_detached
Returns `true` if the frame has been detached, or `false` otherwise.
Returns
-------
bool
"""
return mapping.from_maybe_impl(self._impl_obj.is_detached())
async def add_script_tag(
self,
*,
url: str = None,
path: typing.Union[str, pathlib.Path] = None,
content: str = None,
type: str = None
) -> "ElementHandle":
"""Frame.add_script_tag
Returns the added tag when the script's onload fires or when the script content was injected into frame.
Adds a `<script>` tag into the page with the desired url or content.
Parameters
----------
url : Union[str, NoneType]
URL of a script to be added.
path : Union[pathlib.Path, str, NoneType]
Path to the JavaScript file to be injected into frame. If `path` is a relative path, then it is resolved relative to the
current working directory.
content : Union[str, NoneType]
Raw JavaScript content to be injected into frame.
type : Union[str, NoneType]
Script type. Use 'module' in order to load a Javascript ES6 module. See
[script](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script) for more details.
Returns
-------
ElementHandle
"""
return mapping.from_impl(
await self._async(
"frame.add_script_tag",
self._impl_obj.add_script_tag(
url=url, path=path, content=content, type=type
),
)
)
async def add_style_tag(
self,
*,
url: str = None,
path: typing.Union[str, pathlib.Path] = None,
content: str = None
) -> "ElementHandle":
"""Frame.add_style_tag
Returns the added tag when the stylesheet's onload fires or when the CSS content was injected into frame.
Adds a `<link rel=\"stylesheet\">` tag into the page with the desired url or a `<style type=\"text/css\">` tag with the
content.
Parameters
----------
url : Union[str, NoneType]
URL of the `<link>` tag.
path : Union[pathlib.Path, str, NoneType]
Path to the CSS file to be injected into frame. If `path` is a relative path, then it is resolved relative to the
current working directory.
content : Union[str, NoneType]
Raw CSS content to be injected into frame.
Returns
-------
ElementHandle
"""
return mapping.from_impl(
await self._async(
"frame.add_style_tag",
self._impl_obj.add_style_tag(url=url, path=path, content=content),
)
)
async def click(
self,
selector: str,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
click_count: int = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Frame.click
This method clicks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
click_count : Union[int, NoneType]
defaults to 1. See [UIEvent.detail].
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.click",
self._impl_obj.click(
selector=selector,
modifiers=mapping.to_impl(modifiers),
position=position,
delay=delay,
button=button,
clickCount=click_count,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def dblclick(
self,
selector: str,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Frame.dblclick
This method double clicks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to double click in the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set. Note that if the
first click of the `dblclick()` triggers a navigation event, this method will throw.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
> NOTE: `frame.dblclick()` dispatches two `click` events and a single `dblclick` event.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.dblclick",
self._impl_obj.dblclick(
selector=selector,
modifiers=mapping.to_impl(modifiers),
position=position,
delay=delay,
button=button,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def tap(
self,
selector: str,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Frame.tap
This method taps an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.touchscreen` to tap the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
> NOTE: `frame.tap()` requires that the `hasTouch` option of the browser context be set to true.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.tap",
self._impl_obj.tap(
selector=selector,
modifiers=mapping.to_impl(modifiers),
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def fill(
self,
selector: str,
value: str,
*,
timeout: float = None,
no_wait_after: bool = None,
strict: bool = None,
force: bool = None
) -> NoneType:
"""Frame.fill
This method waits for an element matching `selector`, waits for [actionability](./actionability.md) checks, focuses the
element, fills it and triggers an `input` event after filling. Note that you can pass an empty string to clear the input
field.
If the target element is not an `<input>`, `<textarea>` or `[contenteditable]` element, this method throws an error.
However, if the element is inside the `<label>` element that has an associated
[control](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLabelElement/control), the control will be filled
instead.
To send fine-grained keyboard events, use `frame.type()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
value : str
Value to fill for the `<input>`, `<textarea>` or `[contenteditable]` element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.fill",
self._impl_obj.fill(
selector=selector,
value=value,
timeout=timeout,
noWaitAfter=no_wait_after,
strict=strict,
force=force,
),
)
)
def locator(
self,
selector: str,
*,
has_text: typing.Union[str, typing.Pattern] = None,
has: "Locator" = None
) -> "Locator":
"""Frame.locator
The method returns an element locator that can be used to perform actions in the frame. Locator is resolved to the
element immediately before performing an action, so a series of actions on the same locator can in fact be performed on
different DOM elements. That would happen if the DOM structure between those actions has changed.
Parameters
----------
selector : str
A selector to use when resolving DOM element. See [working with selectors](./selectors.md) for more details.
has_text : Union[Pattern, str, NoneType]
Matches elements containing specified text somewhere inside, possibly in a child or a descendant element. For example,
`"Playwright"` matches `<article><div>Playwright</div></article>`.
has : Union[Locator, NoneType]
Matches elements containing an element that matches an inner locator. Inner locator is queried against the outer one.
For example, `article` that has `text=Playwright` matches `<article><div>Playwright</div></article>`.
Note that outer and inner locators must belong to the same frame. Inner locator must not contain `FrameLocator`s.
Returns
-------
Locator
"""
return mapping.from_impl(
self._impl_obj.locator(
selector=selector, has_text=has_text, has=has._impl_obj if has else None
)
)
def frame_locator(self, selector: str) -> "FrameLocator":
"""Frame.frame_locator
When working with iframes, you can create a frame locator that will enter the iframe and allow selecting elements in
that iframe. Following snippet locates element with text \"Submit\" in the iframe with id `my-frame`, like `<iframe
id=\"my-frame\">`:
```py
locator = frame.frame_locator(\"#my-iframe\").locator(\"text=Submit\")
await locator.click()
```
Parameters
----------
selector : str
A selector to use when resolving DOM element. See [working with selectors](./selectors.md) for more details.
Returns
-------
FrameLocator
"""
return mapping.from_impl(self._impl_obj.frame_locator(selector=selector))
async def focus(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> NoneType:
"""Frame.focus
This method fetches an element with `selector` and focuses it. If there's no element matching `selector`, the method
waits until a matching element appears in the DOM.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.focus",
self._impl_obj.focus(selector=selector, strict=strict, timeout=timeout),
)
)
async def text_content(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> typing.Optional[str]:
"""Frame.text_content
Returns `element.textContent`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"frame.text_content",
self._impl_obj.text_content(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def inner_text(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> str:
"""Frame.inner_text
Returns `element.innerText`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"frame.inner_text",
self._impl_obj.inner_text(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def inner_html(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> str:
"""Frame.inner_html
Returns `element.innerHTML`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"frame.inner_html",
self._impl_obj.inner_html(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def get_attribute(
self, selector: str, name: str, *, strict: bool = None, timeout: float = None
) -> typing.Optional[str]:
"""Frame.get_attribute
Returns element attribute value.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
name : str
Attribute name to get the value for.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"frame.get_attribute",
self._impl_obj.get_attribute(
selector=selector, name=name, strict=strict, timeout=timeout
),
)
)
async def hover(
self,
selector: str,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
timeout: float = None,
force: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Frame.hover
This method hovers over an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to hover over the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.hover",
self._impl_obj.hover(
selector=selector,
modifiers=mapping.to_impl(modifiers),
position=position,
timeout=timeout,
force=force,
strict=strict,
trial=trial,
),
)
)
async def drag_and_drop(
self,
source: str,
target: str,
*,
source_position: Position = None,
target_position: Position = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
timeout: float = None,
trial: bool = None
) -> NoneType:
"""Frame.drag_and_drop
Parameters
----------
source : str
target : str
source_position : Union[{x: float, y: float}, NoneType]
Clicks on the source element at this point relative to the top-left corner of the element's padding box. If not
specified, some visible point of the element is used.
target_position : Union[{x: float, y: float}, NoneType]
Drops on the target element at this point relative to the top-left corner of the element's padding box. If not
specified, some visible point of the element is used.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.drag_and_drop",
self._impl_obj.drag_and_drop(
source=source,
target=target,
sourcePosition=source_position,
targetPosition=target_position,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
timeout=timeout,
trial=trial,
),
)
)
async def select_option(
self,
selector: str,
value: typing.Union[str, typing.List[str]] = None,
*,
index: typing.Union[int, typing.List[int]] = None,
label: typing.Union[str, typing.List[str]] = None,
element: typing.Union["ElementHandle", typing.List["ElementHandle"]] = None,
timeout: float = None,
no_wait_after: bool = None,
strict: bool = None,
force: bool = None
) -> typing.List[str]:
"""Frame.select_option
This method waits for an element matching `selector`, waits for [actionability](./actionability.md) checks, waits until
all specified options are present in the `<select>` element and selects these options.
If the target element is not a `<select>` element, this method throws an error. However, if the element is inside the
`<label>` element that has an associated
[control](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLabelElement/control), the control will be used instead.
Returns the array of option values that have been successfully selected.
Triggers a `change` and `input` event once all the provided options have been selected.
```py
# single selection matching the value
await frame.select_option(\"select#colors\", \"blue\")
# single selection matching the label
await frame.select_option(\"select#colors\", label=\"blue\")
# multiple selection
await frame.select_option(\"select#colors\", value=[\"red\", \"green\", \"blue\"])
```
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
value : Union[List[str], str, NoneType]
Options to select by value. If the `<select>` has the `multiple` attribute, all given options are selected, otherwise
only the first option matching one of the passed options is selected. Optional.
index : Union[List[int], int, NoneType]
Options to select by index. Optional.
label : Union[List[str], str, NoneType]
Options to select by label. If the `<select>` has the `multiple` attribute, all given options are selected, otherwise
only the first option matching one of the passed options is selected. Optional.
element : Union[ElementHandle, List[ElementHandle], NoneType]
Option elements to select. Optional.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
Returns
-------
List[str]
"""
return mapping.from_maybe_impl(
await self._async(
"frame.select_option",
self._impl_obj.select_option(
selector=selector,
value=mapping.to_impl(value),
index=mapping.to_impl(index),
label=mapping.to_impl(label),
element=mapping.to_impl(element),
timeout=timeout,
noWaitAfter=no_wait_after,
strict=strict,
force=force,
),
)
)
async def input_value(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> str:
"""Frame.input_value
Returns `input.value` for the selected `<input>` or `<textarea>` or `<select>` element. Throws for non-input elements.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"frame.input_value",
self._impl_obj.input_value(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def set_input_files(
self,
selector: str,
files: typing.Union[
str,
pathlib.Path,
FilePayload,
typing.List[typing.Union[str, pathlib.Path]],
typing.List[FilePayload],
],
*,
strict: bool = None,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""Frame.set_input_files
This method expects `selector` to point to an
[input element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input).
Sets the value of the file input to these file paths or files. If some of the `filePaths` are relative paths, then they
are resolved relative to the the current working directory. For empty array, clears the selected files.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
files : Union[List[Union[pathlib.Path, str]], List[{name: str, mimeType: str, buffer: bytes}], pathlib.Path, str, {name: str, mimeType: str, buffer: bytes}]
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.set_input_files",
self._impl_obj.set_input_files(
selector=selector,
files=mapping.to_impl(files),
strict=strict,
timeout=timeout,
noWaitAfter=no_wait_after,
),
)
)
async def type(
self,
selector: str,
text: str,
*,
delay: float = None,
strict: bool = None,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""Frame.type
Sends a `keydown`, `keypress`/`input`, and `keyup` event for each character in the text. `frame.type` can be used to
send fine-grained keyboard events. To fill values in form fields, use `frame.fill()`.
To press a special key, like `Control` or `ArrowDown`, use `keyboard.press()`.
```py
await frame.type(\"#mytextarea\", \"hello\") # types instantly
await frame.type(\"#mytextarea\", \"world\", delay=100) # types slower, like a user
```
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
text : str
A text to type into a focused element.
delay : Union[float, NoneType]
Time to wait between key presses in milliseconds. Defaults to 0.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.type",
self._impl_obj.type(
selector=selector,
text=text,
delay=delay,
strict=strict,
timeout=timeout,
noWaitAfter=no_wait_after,
),
)
)
async def press(
self,
selector: str,
key: str,
*,
delay: float = None,
strict: bool = None,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""Frame.press
`key` can specify the intended [keyboardEvent.key](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key)
value or a single character to generate the text for. A superset of the `key` values can be found
[here](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key/Key_Values). Examples of the keys are:
`F1` - `F12`, `Digit0`- `Digit9`, `KeyA`- `KeyZ`, `Backquote`, `Minus`, `Equal`, `Backslash`, `Backspace`, `Tab`,
`Delete`, `Escape`, `ArrowDown`, `End`, `Enter`, `Home`, `Insert`, `PageDown`, `PageUp`, `ArrowRight`, `ArrowUp`, etc.
Following modification shortcuts are also supported: `Shift`, `Control`, `Alt`, `Meta`, `ShiftLeft`.
Holding down `Shift` will type the text that corresponds to the `key` in the upper case.
If `key` is a single character, it is case-sensitive, so the values `a` and `A` will generate different respective
texts.
Shortcuts such as `key: \"Control+o\"` or `key: \"Control+Shift+T\"` are supported as well. When specified with the
modifier, modifier is pressed and being held while the subsequent key is being pressed.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
key : str
Name of the key to press or a character to generate, such as `ArrowLeft` or `a`.
delay : Union[float, NoneType]
Time to wait between `keydown` and `keyup` in milliseconds. Defaults to 0.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.press",
self._impl_obj.press(
selector=selector,
key=key,
delay=delay,
strict=strict,
timeout=timeout,
noWaitAfter=no_wait_after,
),
)
)
async def check(
self,
selector: str,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Frame.check
This method checks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Ensure that matched element is a checkbox or a radio input. If not, this method throws. If the element is already
checked, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now checked. If not, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.check",
self._impl_obj.check(
selector=selector,
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def uncheck(
self,
selector: str,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Frame.uncheck
This method checks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Ensure that matched element is a checkbox or a radio input. If not, this method throws. If the element is already
unchecked, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now unchecked. If not, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.uncheck",
self._impl_obj.uncheck(
selector=selector,
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def wait_for_timeout(self, timeout: float) -> NoneType:
"""Frame.wait_for_timeout
Waits for the given `timeout` in milliseconds.
Note that `frame.waitForTimeout()` should only be used for debugging. Tests using the timer in production are going to
be flaky. Use signals such as network events, selectors becoming visible and others instead.
Parameters
----------
timeout : float
A timeout to wait for
"""
return mapping.from_maybe_impl(
await self._async(
"frame.wait_for_timeout",
self._impl_obj.wait_for_timeout(timeout=timeout),
)
)
async def wait_for_function(
self,
expression: str,
*,
arg: typing.Any = None,
timeout: float = None,
polling: typing.Union[float, Literal["raf"]] = None
) -> "JSHandle":
"""Frame.wait_for_function
Returns when the `expression` returns a truthy value, returns that value.
The `frame.wait_for_function()` can be used to observe viewport size change:
```py
import asyncio
from playwright.async_api import async_playwright
async def run(playwright):
webkit = playwright.webkit
browser = await webkit.launch()
page = await browser.new_page()
await page.evaluate(\"window.x = 0; setTimeout(() => { window.x = 100 }, 1000);\")
await page.main_frame.wait_for_function(\"() => window.x > 0\")
await browser.close()
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
To pass an argument to the predicate of `frame.waitForFunction` function:
```py
selector = \".foo\"
await frame.wait_for_function(\"selector => !!document.querySelector(selector)\", selector)
```
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
timeout : Union[float, NoneType]
maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
polling : Union["raf", float, NoneType]
If `polling` is `'raf'`, then `expression` is constantly executed in `requestAnimationFrame` callback. If `polling` is a
number, then it is treated as an interval in milliseconds at which the function would be executed. Defaults to `raf`.
Returns
-------
JSHandle
"""
return mapping.from_impl(
await self._async(
"frame.wait_for_function",
self._impl_obj.wait_for_function(
expression=expression,
arg=mapping.to_impl(arg),
timeout=timeout,
polling=polling,
),
)
)
async def title(self) -> str:
"""Frame.title
Returns the page title.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async("frame.title", self._impl_obj.title())
)
async def set_checked(
self,
selector: str,
checked: bool,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Frame.set_checked
This method checks or unchecks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Ensure that matched element is a checkbox or a radio input. If not, this method throws.
1. If the element already has the right checked state, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now checked or unchecked. If not, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
checked : bool
Whether to check or uncheck the checkbox.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"frame.set_checked",
self._impl_obj.set_checked(
selector=selector,
checked=checked,
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
mapping.register(FrameImpl, Frame)
class FrameLocator(AsyncBase):
@property
def first(self) -> "FrameLocator":
"""FrameLocator.first
Returns locator to the first matching frame.
Returns
-------
FrameLocator
"""
return mapping.from_impl(self._impl_obj.first)
@property
def last(self) -> "FrameLocator":
"""FrameLocator.last
Returns locator to the last matching frame.
Returns
-------
FrameLocator
"""
return mapping.from_impl(self._impl_obj.last)
def locator(
self,
selector: str,
*,
has_text: typing.Union[str, typing.Pattern] = None,
has: "Locator" = None
) -> "Locator":
"""FrameLocator.locator
The method finds an element matching the specified selector in the FrameLocator's subtree.
Parameters
----------
selector : str
A selector to use when resolving DOM element. See [working with selectors](./selectors.md) for more details.
has_text : Union[Pattern, str, NoneType]
Matches elements containing specified text somewhere inside, possibly in a child or a descendant element. For example,
`"Playwright"` matches `<article><div>Playwright</div></article>`.
has : Union[Locator, NoneType]
Matches elements containing an element that matches an inner locator. Inner locator is queried against the outer one.
For example, `article` that has `text=Playwright` matches `<article><div>Playwright</div></article>`.
Note that outer and inner locators must belong to the same frame. Inner locator must not contain `FrameLocator`s.
Returns
-------
Locator
"""
return mapping.from_impl(
self._impl_obj.locator(
selector=selector, has_text=has_text, has=has._impl_obj if has else None
)
)
def frame_locator(self, selector: str) -> "FrameLocator":
"""FrameLocator.frame_locator
When working with iframes, you can create a frame locator that will enter the iframe and allow selecting elements in
that iframe.
Parameters
----------
selector : str
A selector to use when resolving DOM element. See [working with selectors](./selectors.md) for more details.
Returns
-------
FrameLocator
"""
return mapping.from_impl(self._impl_obj.frame_locator(selector=selector))
def nth(self, index: int) -> "FrameLocator":
"""FrameLocator.nth
Returns locator to the n-th matching frame.
Parameters
----------
index : int
Returns
-------
FrameLocator
"""
return mapping.from_impl(self._impl_obj.nth(index=index))
mapping.register(FrameLocatorImpl, FrameLocator)
class Worker(AsyncBase):
def on(
self,
event: Literal["close"],
f: typing.Callable[["Worker"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when this dedicated [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) is terminated."""
return super().on(event=event, f=f)
def once(
self,
event: Literal["close"],
f: typing.Callable[["Worker"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when this dedicated [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) is terminated."""
return super().once(event=event, f=f)
@property
def url(self) -> str:
"""Worker.url
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.url)
async def evaluate(self, expression: str, arg: typing.Any = None) -> typing.Any:
"""Worker.evaluate
Returns the return value of `expression`.
If the function passed to the `worker.evaluate()` returns a [Promise], then `worker.evaluate()` would
wait for the promise to resolve and return its value.
If the function passed to the `worker.evaluate()` returns a non-[Serializable] value, then
`worker.evaluate()` returns `undefined`. Playwright also supports transferring some additional values that are
not serializable by `JSON`: `-0`, `NaN`, `Infinity`, `-Infinity`.
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"worker.evaluate",
self._impl_obj.evaluate(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def evaluate_handle(
self, expression: str, arg: typing.Any = None
) -> "JSHandle":
"""Worker.evaluate_handle
Returns the return value of `expression` as a `JSHandle`.
The only difference between `worker.evaluate()` and `worker.evaluate_handle()` is that
`worker.evaluate_handle()` returns `JSHandle`.
If the function passed to the `worker.evaluate_handle()` returns a [Promise], then
`worker.evaluate_handle()` would wait for the promise to resolve and return its value.
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
JSHandle
"""
return mapping.from_impl(
await self._async(
"worker.evaluate_handle",
self._impl_obj.evaluate_handle(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
mapping.register(WorkerImpl, Worker)
class Selectors(AsyncBase):
async def register(
self,
name: str,
script: str = None,
*,
path: typing.Union[str, pathlib.Path] = None,
content_script: bool = None
) -> NoneType:
"""Selectors.register
An example of registering selector engine that queries elements based on a tag name:
```py
import asyncio
from playwright.async_api import async_playwright
async def run(playwright):
tag_selector = \"\"\"
{
// Returns the first element matching given selector in the root's subtree.
query(root, selector) {
return root.querySelector(selector);
},
// Returns all elements matching given selector in the root's subtree.
queryAll(root, selector) {
return Array.from(root.querySelectorAll(selector));
}
}\"\"\"
# Register the engine. Selectors will be prefixed with \"tag=\".
await playwright.selectors.register(\"tag\", tag_selector)
browser = await playwright.chromium.launch()
page = await browser.new_page()
await page.set_content('<div><button>Click me</button></div>')
# Use the selector prefixed with its name.
button = await page.query_selector('tag=button')
# Combine it with other selector engines.
await page.click('tag=div >> text=\"Click me\"')
# Can use it in any methods supporting selectors.
button_count = await page.locator('tag=button').count()
print(button_count)
await browser.close()
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
Parameters
----------
name : str
Name that is used in selectors as a prefix, e.g. `{name: 'foo'}` enables `foo=myselectorbody` selectors. May only
contain `[a-zA-Z0-9_]` characters.
script : Union[str, NoneType]
Raw script content.
path : Union[pathlib.Path, str, NoneType]
Path to the JavaScript file. If `path` is a relative path, then it is resolved relative to the current working
directory.
content_script : Union[bool, NoneType]
Whether to run this selector engine in isolated JavaScript environment. This environment has access to the same DOM, but
not any JavaScript objects from the frame's scripts. Defaults to `false`. Note that running as a content script is not
guaranteed when this engine is used together with other registered engines.
"""
return mapping.from_maybe_impl(
await self._async(
"selectors.register",
self._impl_obj.register(
name=name, script=script, path=path, contentScript=content_script
),
)
)
mapping.register(SelectorsImpl, Selectors)
class ConsoleMessage(AsyncBase):
@property
def type(self) -> str:
"""ConsoleMessage.type
One of the following values: `'log'`, `'debug'`, `'info'`, `'error'`, `'warning'`, `'dir'`, `'dirxml'`, `'table'`,
`'trace'`, `'clear'`, `'startGroup'`, `'startGroupCollapsed'`, `'endGroup'`, `'assert'`, `'profile'`, `'profileEnd'`,
`'count'`, `'timeEnd'`.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.type)
@property
def text(self) -> str:
"""ConsoleMessage.text
The text of the console message.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.text)
@property
def args(self) -> typing.List["JSHandle"]:
"""ConsoleMessage.args
List of arguments passed to a `console` function call. See also `page.on('console')`.
Returns
-------
List[JSHandle]
"""
return mapping.from_impl_list(self._impl_obj.args)
@property
def location(self) -> SourceLocation:
"""ConsoleMessage.location
Returns
-------
{url: str, lineNumber: int, columnNumber: int}
"""
return mapping.from_impl(self._impl_obj.location)
mapping.register(ConsoleMessageImpl, ConsoleMessage)
class Dialog(AsyncBase):
@property
def type(self) -> str:
"""Dialog.type
Returns dialog's type, can be one of `alert`, `beforeunload`, `confirm` or `prompt`.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.type)
@property
def message(self) -> str:
"""Dialog.message
A message displayed in the dialog.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.message)
@property
def default_value(self) -> str:
"""Dialog.default_value
If dialog is prompt, returns default prompt value. Otherwise, returns empty string.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.default_value)
async def accept(self, prompt_text: str = None) -> NoneType:
"""Dialog.accept
Returns when the dialog has been accepted.
Parameters
----------
prompt_text : Union[str, NoneType]
A text to enter in prompt. Does not cause any effects if the dialog's `type` is not prompt. Optional.
"""
return mapping.from_maybe_impl(
await self._async(
"dialog.accept", self._impl_obj.accept(promptText=prompt_text)
)
)
async def dismiss(self) -> NoneType:
"""Dialog.dismiss
Returns when the dialog has been dismissed.
"""
return mapping.from_maybe_impl(
await self._async("dialog.dismiss", self._impl_obj.dismiss())
)
mapping.register(DialogImpl, Dialog)
class Download(AsyncBase):
@property
def page(self) -> "Page":
"""Download.page
Get the page that the download belongs to.
Returns
-------
Page
"""
return mapping.from_impl(self._impl_obj.page)
@property
def url(self) -> str:
"""Download.url
Returns downloaded url.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.url)
@property
def suggested_filename(self) -> str:
"""Download.suggested_filename
Returns suggested filename for this download. It is typically computed by the browser from the
[`Content-Disposition`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition) response header
or the `download` attribute. See the spec on [whatwg](https://html.spec.whatwg.org/#downloading-resources). Different
browsers can use different logic for computing it.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.suggested_filename)
async def delete(self) -> NoneType:
"""Download.delete
Deletes the downloaded file. Will wait for the download to finish if necessary.
"""
return mapping.from_maybe_impl(
await self._async("download.delete", self._impl_obj.delete())
)
async def failure(self) -> typing.Optional[str]:
"""Download.failure
Returns download error if any. Will wait for the download to finish if necessary.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async("download.failure", self._impl_obj.failure())
)
async def path(self) -> typing.Optional[pathlib.Path]:
"""Download.path
Returns path to the downloaded file in case of successful download. The method will wait for the download to finish if
necessary. The method throws when connected remotely.
Note that the download's file name is a random GUID, use `download.suggested_filename()` to get suggested file
name.
Returns
-------
Union[pathlib.Path, NoneType]
"""
return mapping.from_maybe_impl(
await self._async("download.path", self._impl_obj.path())
)
async def save_as(self, path: typing.Union[str, pathlib.Path]) -> NoneType:
"""Download.save_as
Copy the download to a user-specified path. It is safe to call this method while the download is still in progress. Will
wait for the download to finish if necessary.
Parameters
----------
path : Union[pathlib.Path, str]
Path where the download should be copied.
"""
return mapping.from_maybe_impl(
await self._async("download.save_as", self._impl_obj.save_as(path=path))
)
async def cancel(self) -> NoneType:
"""Download.cancel
Cancels a download. Will not fail if the download is already finished or canceled. Upon successful cancellations,
`download.failure()` would resolve to `'canceled'`.
"""
return mapping.from_maybe_impl(
await self._async("download.cancel", self._impl_obj.cancel())
)
mapping.register(DownloadImpl, Download)
class Video(AsyncBase):
async def path(self) -> pathlib.Path:
"""Video.path
Returns the file system path this video will be recorded to. The video is guaranteed to be written to the filesystem
upon closing the browser context. This method throws when connected remotely.
Returns
-------
pathlib.Path
"""
return mapping.from_maybe_impl(
await self._async("video.path", self._impl_obj.path())
)
async def save_as(self, path: typing.Union[str, pathlib.Path]) -> NoneType:
"""Video.save_as
Saves the video to a user-specified path. It is safe to call this method while the video is still in progress, or after
the page has closed. This method waits until the page is closed and the video is fully saved.
Parameters
----------
path : Union[pathlib.Path, str]
Path where the video should be saved.
"""
return mapping.from_maybe_impl(
await self._async("video.save_as", self._impl_obj.save_as(path=path))
)
async def delete(self) -> NoneType:
"""Video.delete
Deletes the video file. Will wait for the video to finish if necessary.
"""
return mapping.from_maybe_impl(
await self._async("video.delete", self._impl_obj.delete())
)
mapping.register(VideoImpl, Video)
class Page(AsyncContextManager):
@typing.overload
def on(
self,
event: Literal["close"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the page closes."""
@typing.overload
def on(
self,
event: Literal["console"],
f: typing.Callable[
["ConsoleMessage"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Emitted when JavaScript within the page calls one of console API methods, e.g. `console.log` or `console.dir`. Also
emitted if the page throws an error or a warning.
The arguments passed into `console.log` appear as arguments on the event handler.
An example of handling `console` event:
```py
async def print_args(msg):
values = []
for arg in msg.args:
values.append(await arg.json_value())
print(values)
page.on(\"console\", print_args)
await page.evaluate(\"console.log('hello', 5, {foo: 'bar'})\")
```"""
@typing.overload
def on(
self,
event: Literal["crash"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the page crashes. Browser pages might crash if they try to allocate too much memory. When the page crashes,
ongoing and subsequent operations will throw.
The most common way to deal with crashes is to catch an exception:
```py
try:
# crash might happen during a click.
await page.click(\"button\")
# or while waiting for an event.
await page.wait_for_event(\"popup\")
except Error as e:
# when the page crashes, exception message contains \"crash\".
```"""
@typing.overload
def on(
self,
event: Literal["dialog"],
f: typing.Callable[["Dialog"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a JavaScript dialog appears, such as `alert`, `prompt`, `confirm` or `beforeunload`. Listener **must**
either `dialog.accept()` or `dialog.dismiss()` the dialog - otherwise the page will
[freeze](https://developer.mozilla.org/en-US/docs/Web/JavaScript/EventLoop#never_blocking) waiting for the dialog, and
actions like click will never finish.
> NOTE: When no `page.on('dialog')` listeners are present, all dialogs are automatically dismissed."""
@typing.overload
def on(
self,
event: Literal["domcontentloaded"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the JavaScript [`DOMContentLoaded`](https://developer.mozilla.org/en-US/docs/Web/Events/DOMContentLoaded)
event is dispatched."""
@typing.overload
def on(
self,
event: Literal["download"],
f: typing.Callable[["Download"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when attachment download started. User can access basic file operations on downloaded content via the passed
`Download` instance."""
@typing.overload
def on(
self,
event: Literal["filechooser"],
f: typing.Callable[
["FileChooser"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Emitted when a file chooser is supposed to appear, such as after clicking the `<input type=file>`. Playwright can
respond to it via setting the input files using `file_chooser.set_files()` that can be uploaded after that.
```py
page.on(\"filechooser\", lambda file_chooser: file_chooser.set_files(\"/tmp/myfile.pdf\"))
```"""
@typing.overload
def on(
self,
event: Literal["frameattached"],
f: typing.Callable[["Frame"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a frame is attached."""
@typing.overload
def on(
self,
event: Literal["framedetached"],
f: typing.Callable[["Frame"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a frame is detached."""
@typing.overload
def on(
self,
event: Literal["framenavigated"],
f: typing.Callable[["Frame"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a frame is navigated to a new url."""
@typing.overload
def on(
self,
event: Literal["load"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the JavaScript [`load`](https://developer.mozilla.org/en-US/docs/Web/Events/load) event is dispatched."""
@typing.overload
def on(
self,
event: Literal["pageerror"],
f: typing.Callable[["Error"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when an uncaught exception happens within the page."""
@typing.overload
def on(
self,
event: Literal["popup"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the page opens a new tab or window. This event is emitted in addition to the
`browser_context.on('page')`, but only for popups relevant to this page.
The earliest moment that page is available is when it has navigated to the initial url. For example, when opening a
popup with `window.open('http://example.com')`, this event will fire when the network request to \"http://example.com\" is
done and its response has started loading in the popup.
```py
async with page.expect_event(\"popup\") as page_info:
page.evaluate(\"window.open('https://example.com')\")
popup = await page_info.value
print(await popup.evaluate(\"location.href\"))
```
> NOTE: Use `page.wait_for_load_state()` to wait until the page gets to a particular state (you should not need it
in most cases)."""
@typing.overload
def on(
self,
event: Literal["request"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a page issues a request. The [request] object is read-only. In order to intercept and mutate requests, see
`page.route()` or `browser_context.route()`."""
@typing.overload
def on(
self,
event: Literal["requestfailed"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request fails, for example by timing out.
> NOTE: HTTP Error responses, such as 404 or 503, are still successful responses from HTTP standpoint, so request will
complete with `page.on('request_finished')` event and not with `page.on('request_failed')`. A request will only be
considered failed when the client cannot get an HTTP response from the server, e.g. due to network error
net::ERR_FAILED."""
@typing.overload
def on(
self,
event: Literal["requestfinished"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request finishes successfully after downloading the response body. For a successful response, the
sequence of events is `request`, `response` and `requestfinished`."""
@typing.overload
def on(
self,
event: Literal["response"],
f: typing.Callable[["Response"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when [response] status and headers are received for a request. For a successful response, the sequence of events
is `request`, `response` and `requestfinished`."""
@typing.overload
def on(
self,
event: Literal["websocket"],
f: typing.Callable[["WebSocket"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when `WebSocket` request is sent."""
@typing.overload
def on(
self,
event: Literal["worker"],
f: typing.Callable[["Worker"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a dedicated [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) is spawned by the
page."""
def on(
self,
event: str,
f: typing.Callable[..., typing.Union[typing.Awaitable[None], None]],
) -> None:
return super().on(event=event, f=f)
@typing.overload
def once(
self,
event: Literal["close"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the page closes."""
@typing.overload
def once(
self,
event: Literal["console"],
f: typing.Callable[
["ConsoleMessage"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Emitted when JavaScript within the page calls one of console API methods, e.g. `console.log` or `console.dir`. Also
emitted if the page throws an error or a warning.
The arguments passed into `console.log` appear as arguments on the event handler.
An example of handling `console` event:
```py
async def print_args(msg):
values = []
for arg in msg.args:
values.append(await arg.json_value())
print(values)
page.on(\"console\", print_args)
await page.evaluate(\"console.log('hello', 5, {foo: 'bar'})\")
```"""
@typing.overload
def once(
self,
event: Literal["crash"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the page crashes. Browser pages might crash if they try to allocate too much memory. When the page crashes,
ongoing and subsequent operations will throw.
The most common way to deal with crashes is to catch an exception:
```py
try:
# crash might happen during a click.
await page.click(\"button\")
# or while waiting for an event.
await page.wait_for_event(\"popup\")
except Error as e:
# when the page crashes, exception message contains \"crash\".
```"""
@typing.overload
def once(
self,
event: Literal["dialog"],
f: typing.Callable[["Dialog"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a JavaScript dialog appears, such as `alert`, `prompt`, `confirm` or `beforeunload`. Listener **must**
either `dialog.accept()` or `dialog.dismiss()` the dialog - otherwise the page will
[freeze](https://developer.mozilla.org/en-US/docs/Web/JavaScript/EventLoop#never_blocking) waiting for the dialog, and
actions like click will never finish.
> NOTE: When no `page.on('dialog')` listeners are present, all dialogs are automatically dismissed."""
@typing.overload
def once(
self,
event: Literal["domcontentloaded"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the JavaScript [`DOMContentLoaded`](https://developer.mozilla.org/en-US/docs/Web/Events/DOMContentLoaded)
event is dispatched."""
@typing.overload
def once(
self,
event: Literal["download"],
f: typing.Callable[["Download"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when attachment download started. User can access basic file operations on downloaded content via the passed
`Download` instance."""
@typing.overload
def once(
self,
event: Literal["filechooser"],
f: typing.Callable[
["FileChooser"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Emitted when a file chooser is supposed to appear, such as after clicking the `<input type=file>`. Playwright can
respond to it via setting the input files using `file_chooser.set_files()` that can be uploaded after that.
```py
page.on(\"filechooser\", lambda file_chooser: file_chooser.set_files(\"/tmp/myfile.pdf\"))
```"""
@typing.overload
def once(
self,
event: Literal["frameattached"],
f: typing.Callable[["Frame"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a frame is attached."""
@typing.overload
def once(
self,
event: Literal["framedetached"],
f: typing.Callable[["Frame"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a frame is detached."""
@typing.overload
def once(
self,
event: Literal["framenavigated"],
f: typing.Callable[["Frame"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a frame is navigated to a new url."""
@typing.overload
def once(
self,
event: Literal["load"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the JavaScript [`load`](https://developer.mozilla.org/en-US/docs/Web/Events/load) event is dispatched."""
@typing.overload
def once(
self,
event: Literal["pageerror"],
f: typing.Callable[["Error"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when an uncaught exception happens within the page."""
@typing.overload
def once(
self,
event: Literal["popup"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when the page opens a new tab or window. This event is emitted in addition to the
`browser_context.on('page')`, but only for popups relevant to this page.
The earliest moment that page is available is when it has navigated to the initial url. For example, when opening a
popup with `window.open('http://example.com')`, this event will fire when the network request to \"http://example.com\" is
done and its response has started loading in the popup.
```py
async with page.expect_event(\"popup\") as page_info:
page.evaluate(\"window.open('https://example.com')\")
popup = await page_info.value
print(await popup.evaluate(\"location.href\"))
```
> NOTE: Use `page.wait_for_load_state()` to wait until the page gets to a particular state (you should not need it
in most cases)."""
@typing.overload
def once(
self,
event: Literal["request"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a page issues a request. The [request] object is read-only. In order to intercept and mutate requests, see
`page.route()` or `browser_context.route()`."""
@typing.overload
def once(
self,
event: Literal["requestfailed"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request fails, for example by timing out.
> NOTE: HTTP Error responses, such as 404 or 503, are still successful responses from HTTP standpoint, so request will
complete with `page.on('request_finished')` event and not with `page.on('request_failed')`. A request will only be
considered failed when the client cannot get an HTTP response from the server, e.g. due to network error
net::ERR_FAILED."""
@typing.overload
def once(
self,
event: Literal["requestfinished"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request finishes successfully after downloading the response body. For a successful response, the
sequence of events is `request`, `response` and `requestfinished`."""
@typing.overload
def once(
self,
event: Literal["response"],
f: typing.Callable[["Response"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when [response] status and headers are received for a request. For a successful response, the sequence of events
is `request`, `response` and `requestfinished`."""
@typing.overload
def once(
self,
event: Literal["websocket"],
f: typing.Callable[["WebSocket"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when `WebSocket` request is sent."""
@typing.overload
def once(
self,
event: Literal["worker"],
f: typing.Callable[["Worker"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a dedicated [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) is spawned by the
page."""
def once(
self,
event: str,
f: typing.Callable[..., typing.Union[typing.Awaitable[None], None]],
) -> None:
return super().once(event=event, f=f)
@property
def accessibility(self) -> "Accessibility":
"""Page.accessibility
Returns
-------
Accessibility
"""
return mapping.from_impl(self._impl_obj.accessibility)
@property
def keyboard(self) -> "Keyboard":
"""Page.keyboard
Returns
-------
Keyboard
"""
return mapping.from_impl(self._impl_obj.keyboard)
@property
def mouse(self) -> "Mouse":
"""Page.mouse
Returns
-------
Mouse
"""
return mapping.from_impl(self._impl_obj.mouse)
@property
def touchscreen(self) -> "Touchscreen":
"""Page.touchscreen
Returns
-------
Touchscreen
"""
return mapping.from_impl(self._impl_obj.touchscreen)
@property
def context(self) -> "BrowserContext":
"""Page.context
Get the browser context that the page belongs to.
Returns
-------
BrowserContext
"""
return mapping.from_impl(self._impl_obj.context)
@property
def main_frame(self) -> "Frame":
"""Page.main_frame
The page's main frame. Page is guaranteed to have a main frame which persists during navigations.
Returns
-------
Frame
"""
return mapping.from_impl(self._impl_obj.main_frame)
@property
def frames(self) -> typing.List["Frame"]:
"""Page.frames
An array of all frames attached to the page.
Returns
-------
List[Frame]
"""
return mapping.from_impl_list(self._impl_obj.frames)
@property
def url(self) -> str:
"""Page.url
Shortcut for main frame's `frame.url()`.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.url)
@property
def viewport_size(self) -> typing.Optional[ViewportSize]:
"""Page.viewport_size
Returns
-------
Union[{width: int, height: int}, NoneType]
"""
return mapping.from_impl_nullable(self._impl_obj.viewport_size)
@property
def workers(self) -> typing.List["Worker"]:
"""Page.workers
This method returns all of the dedicated [WebWorkers](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API)
associated with the page.
> NOTE: This does not contain ServiceWorkers
Returns
-------
List[Worker]
"""
return mapping.from_impl_list(self._impl_obj.workers)
@property
def request(self) -> "APIRequestContext":
"""Page.request
API testing helper associated with this page. Requests made with this API will use page cookies.
Returns
-------
APIRequestContext
"""
return mapping.from_impl(self._impl_obj.request)
@property
def video(self) -> typing.Optional["Video"]:
"""Page.video
Video object associated with this page.
Returns
-------
Union[Video, NoneType]
"""
return mapping.from_impl_nullable(self._impl_obj.video)
async def opener(self) -> typing.Optional["Page"]:
"""Page.opener
Returns the opener for popup pages and `null` for others. If the opener has been closed already the returns `null`.
Returns
-------
Union[Page, NoneType]
"""
return mapping.from_impl_nullable(
await self._async("page.opener", self._impl_obj.opener())
)
def frame(
self,
name: str = None,
*,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]] = None
) -> typing.Optional["Frame"]:
"""Page.frame
Returns frame matching the specified criteria. Either `name` or `url` must be specified.
```py
frame = page.frame(name=\"frame-name\")
```
```py
frame = page.frame(url=r\".*domain.*\")
```
Parameters
----------
name : Union[str, NoneType]
Frame name specified in the `iframe`'s `name` attribute. Optional.
url : Union[Callable[[str], bool], Pattern, str, NoneType]
A glob pattern, regex pattern or predicate receiving frame's `url` as a [URL] object. Optional.
Returns
-------
Union[Frame, NoneType]
"""
return mapping.from_impl_nullable(
self._impl_obj.frame(name=name, url=self._wrap_handler(url))
)
def set_default_navigation_timeout(self, timeout: float) -> NoneType:
"""Page.set_default_navigation_timeout
This setting will change the default maximum navigation time for the following methods and related shortcuts:
- `page.go_back()`
- `page.go_forward()`
- `page.goto()`
- `page.reload()`
- `page.set_content()`
- `page.expect_navigation()`
- `page.wait_for_url()`
> NOTE: `page.set_default_navigation_timeout()` takes priority over `page.set_default_timeout()`,
`browser_context.set_default_timeout()` and `browser_context.set_default_navigation_timeout()`.
Parameters
----------
timeout : float
Maximum navigation time in milliseconds
"""
return mapping.from_maybe_impl(
self._impl_obj.set_default_navigation_timeout(timeout=timeout)
)
def set_default_timeout(self, timeout: float) -> NoneType:
"""Page.set_default_timeout
This setting will change the default maximum time for all the methods accepting `timeout` option.
> NOTE: `page.set_default_navigation_timeout()` takes priority over `page.set_default_timeout()`.
Parameters
----------
timeout : float
Maximum time in milliseconds
"""
return mapping.from_maybe_impl(
self._impl_obj.set_default_timeout(timeout=timeout)
)
async def query_selector(
self, selector: str, *, strict: bool = None
) -> typing.Optional["ElementHandle"]:
"""Page.query_selector
> NOTE: The use of `ElementHandle` is discouraged, use `Locator` objects and web-first assertions instead.
The method finds an element matching the specified selector within the page. If no elements match the selector, the
return value resolves to `null`. To wait for an element on the page, use `locator.wait_for()`.
Shortcut for main frame's `frame.query_selector()`.
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
Returns
-------
Union[ElementHandle, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"page.query_selector",
self._impl_obj.query_selector(selector=selector, strict=strict),
)
)
async def query_selector_all(self, selector: str) -> typing.List["ElementHandle"]:
"""Page.query_selector_all
> NOTE: The use of `ElementHandle` is discouraged, use `Locator` objects and web-first assertions instead.
The method finds all elements matching the specified selector within the page. If no elements match the selector, the
return value resolves to `[]`.
Shortcut for main frame's `frame.query_selector_all()`.
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
Returns
-------
List[ElementHandle]
"""
return mapping.from_impl_list(
await self._async(
"page.query_selector_all",
self._impl_obj.query_selector_all(selector=selector),
)
)
async def wait_for_selector(
self,
selector: str,
*,
timeout: float = None,
state: Literal["attached", "detached", "hidden", "visible"] = None,
strict: bool = None
) -> typing.Optional["ElementHandle"]:
"""Page.wait_for_selector
Returns when element specified by selector satisfies `state` option. Returns `null` if waiting for `hidden` or
`detached`.
> NOTE: Playwright automatically waits for element to be ready before performing an action. Using `Locator` objects and
web-first assertions make the code wait-for-selector-free.
Wait for the `selector` to satisfy `state` option (either appear/disappear from dom, or become visible/hidden). If at
the moment of calling the method `selector` already satisfies the condition, the method will return immediately. If the
selector doesn't satisfy the condition for the `timeout` milliseconds, the function will throw.
This method works across navigations:
```py
import asyncio
from playwright.async_api import async_playwright
async def run(playwright):
chromium = playwright.chromium
browser = await chromium.launch()
page = await browser.new_page()
for current_url in [\"https://google.com\", \"https://bbc.com\"]:
await page.goto(current_url, wait_until=\"domcontentloaded\")
element = await page.wait_for_selector(\"img\")
print(\"Loaded image: \" + str(await element.get_attribute(\"src\")))
await browser.close()
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
state : Union["attached", "detached", "hidden", "visible", NoneType]
Defaults to `'visible'`. Can be either:
- `'attached'` - wait for element to be present in DOM.
- `'detached'` - wait for element to not be present in DOM.
- `'visible'` - wait for element to have non-empty bounding box and no `visibility:hidden`. Note that element without
any content or with `display:none` has an empty bounding box and is not considered visible.
- `'hidden'` - wait for element to be either detached from DOM, or have an empty bounding box or `visibility:hidden`.
This is opposite to the `'visible'` option.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
Returns
-------
Union[ElementHandle, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"page.wait_for_selector",
self._impl_obj.wait_for_selector(
selector=selector, timeout=timeout, state=state, strict=strict
),
)
)
async def is_checked(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Page.is_checked
Returns whether the element is checked. Throws if the element is not a checkbox or radio input.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"page.is_checked",
self._impl_obj.is_checked(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_disabled(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Page.is_disabled
Returns whether the element is disabled, the opposite of [enabled](./actionability.md#enabled).
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"page.is_disabled",
self._impl_obj.is_disabled(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_editable(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Page.is_editable
Returns whether the element is [editable](./actionability.md#editable).
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"page.is_editable",
self._impl_obj.is_editable(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_enabled(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Page.is_enabled
Returns whether the element is [enabled](./actionability.md#enabled).
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"page.is_enabled",
self._impl_obj.is_enabled(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_hidden(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Page.is_hidden
Returns whether the element is hidden, the opposite of [visible](./actionability.md#visible). `selector` that does not
match any elements is considered hidden.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
**DEPRECATED** This option is ignored. `page.is_hidden()` does not wait for the element to become hidden and
returns immediately.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"page.is_hidden",
self._impl_obj.is_hidden(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def is_visible(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> bool:
"""Page.is_visible
Returns whether the element is [visible](./actionability.md#visible). `selector` that does not match any elements is
considered not visible.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
**DEPRECATED** This option is ignored. `page.is_visible()` does not wait for the element to become visible and
returns immediately.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"page.is_visible",
self._impl_obj.is_visible(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def dispatch_event(
self,
selector: str,
type: str,
event_init: typing.Dict = None,
*,
timeout: float = None,
strict: bool = None
) -> NoneType:
"""Page.dispatch_event
The snippet below dispatches the `click` event on the element. Regardless of the visibility state of the element,
`click` is dispatched. This is equivalent to calling
[element.click()](https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/click).
```py
await page.dispatch_event(\"button#submit\", \"click\")
```
Under the hood, it creates an instance of an event based on the given `type`, initializes it with `eventInit` properties
and dispatches it on the element. Events are `composed`, `cancelable` and bubble by default.
Since `eventInit` is event-specific, please refer to the events documentation for the lists of initial properties:
- [DragEvent](https://developer.mozilla.org/en-US/docs/Web/API/DragEvent/DragEvent)
- [FocusEvent](https://developer.mozilla.org/en-US/docs/Web/API/FocusEvent/FocusEvent)
- [KeyboardEvent](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/KeyboardEvent)
- [MouseEvent](https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/MouseEvent)
- [PointerEvent](https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/PointerEvent)
- [TouchEvent](https://developer.mozilla.org/en-US/docs/Web/API/TouchEvent/TouchEvent)
- [Event](https://developer.mozilla.org/en-US/docs/Web/API/Event/Event)
You can also specify `JSHandle` as the property value if you want live objects to be passed into the event:
```py
# note you can only create data_transfer in chromium and firefox
data_transfer = await page.evaluate_handle(\"new DataTransfer()\")
await page.dispatch_event(\"#source\", \"dragstart\", { \"dataTransfer\": data_transfer })
```
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
type : str
DOM event type: `"click"`, `"dragstart"`, etc.
event_init : Union[Dict, NoneType]
Optional event-specific initialization properties.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
"""
return mapping.from_maybe_impl(
await self._async(
"page.dispatch_event",
self._impl_obj.dispatch_event(
selector=selector,
type=type,
eventInit=mapping.to_impl(event_init),
timeout=timeout,
strict=strict,
),
)
)
async def evaluate(self, expression: str, arg: typing.Any = None) -> typing.Any:
"""Page.evaluate
Returns the value of the `expression` invocation.
If the function passed to the `page.evaluate()` returns a [Promise], then `page.evaluate()` would wait
for the promise to resolve and return its value.
If the function passed to the `page.evaluate()` returns a non-[Serializable] value, then
`page.evaluate()` resolves to `undefined`. Playwright also supports transferring some additional values that are
not serializable by `JSON`: `-0`, `NaN`, `Infinity`, `-Infinity`.
Passing argument to `expression`:
```py
result = await page.evaluate(\"([x, y]) => Promise.resolve(x * y)\", [7, 8])
print(result) # prints \"56\"
```
A string can also be passed in instead of a function:
```py
print(await page.evaluate(\"1 + 2\")) # prints \"3\"
x = 10
print(await page.evaluate(f\"1 + {x}\")) # prints \"11\"
```
`ElementHandle` instances can be passed as an argument to the `page.evaluate()`:
```py
body_handle = await page.evaluate(\"document.body\")
html = await page.evaluate(\"([body, suffix]) => body.innerHTML + suffix\", [body_handle, \"hello\"])
await body_handle.dispose()
```
Shortcut for main frame's `frame.evaluate()`.
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"page.evaluate",
self._impl_obj.evaluate(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def evaluate_handle(
self, expression: str, arg: typing.Any = None
) -> "JSHandle":
"""Page.evaluate_handle
Returns the value of the `expression` invocation as a `JSHandle`.
The only difference between `page.evaluate()` and `page.evaluate_handle()` is that
`page.evaluate_handle()` returns `JSHandle`.
If the function passed to the `page.evaluate_handle()` returns a [Promise], then `page.evaluate_handle()`
would wait for the promise to resolve and return its value.
```py
a_window_handle = await page.evaluate_handle(\"Promise.resolve(window)\")
a_window_handle # handle for the window object.
```
A string can also be passed in instead of a function:
```py
a_handle = await page.evaluate_handle(\"document\") # handle for the \"document\"
```
`JSHandle` instances can be passed as an argument to the `page.evaluate_handle()`:
```py
a_handle = await page.evaluate_handle(\"document.body\")
result_handle = await page.evaluate_handle(\"body => body.innerHTML\", a_handle)
print(await result_handle.json_value())
await result_handle.dispose()
```
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
JSHandle
"""
return mapping.from_impl(
await self._async(
"page.evaluate_handle",
self._impl_obj.evaluate_handle(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def eval_on_selector(
self,
selector: str,
expression: str,
arg: typing.Any = None,
*,
strict: bool = None
) -> typing.Any:
"""Page.eval_on_selector
> NOTE: This method does not wait for the element to pass actionability checks and therefore can lead to the flaky
tests. Use `locator.evaluate()`, other `Locator` helper methods or web-first assertions instead.
The method finds an element matching the specified selector within the page and passes it as a first argument to
`expression`. If no elements match the selector, the method throws an error. Returns the value of `expression`.
If `expression` returns a [Promise], then `page.eval_on_selector()` would wait for the promise to resolve and
return its value.
Examples:
```py
search_value = await page.eval_on_selector(\"#search\", \"el => el.value\")
preload_href = await page.eval_on_selector(\"link[rel=preload]\", \"el => el.href\")
html = await page.eval_on_selector(\".main-container\", \"(e, suffix) => e.outer_html + suffix\", \"hello\")
```
Shortcut for main frame's `frame.eval_on_selector()`.
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"page.eval_on_selector",
self._impl_obj.eval_on_selector(
selector=selector,
expression=expression,
arg=mapping.to_impl(arg),
strict=strict,
),
)
)
async def eval_on_selector_all(
self, selector: str, expression: str, arg: typing.Any = None
) -> typing.Any:
"""Page.eval_on_selector_all
> NOTE: In most cases, `locator.evaluate_all()`, other `Locator` helper methods and web-first assertions do a
better job.
The method finds all elements matching the specified selector within the page and passes an array of matched elements as
a first argument to `expression`. Returns the result of `expression` invocation.
If `expression` returns a [Promise], then `page.eval_on_selector_all()` would wait for the promise to resolve and
return its value.
Examples:
```py
div_counts = await page.eval_on_selector_all(\"div\", \"(divs, min) => divs.length >= min\", 10)
```
Parameters
----------
selector : str
A selector to query for. See [working with selectors](./selectors.md) for more details.
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"page.eval_on_selector_all",
self._impl_obj.eval_on_selector_all(
selector=selector, expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def add_script_tag(
self,
*,
url: str = None,
path: typing.Union[str, pathlib.Path] = None,
content: str = None,
type: str = None
) -> "ElementHandle":
"""Page.add_script_tag
Adds a `<script>` tag into the page with the desired url or content. Returns the added tag when the script's onload
fires or when the script content was injected into frame.
Shortcut for main frame's `frame.add_script_tag()`.
Parameters
----------
url : Union[str, NoneType]
URL of a script to be added.
path : Union[pathlib.Path, str, NoneType]
Path to the JavaScript file to be injected into frame. If `path` is a relative path, then it is resolved relative to the
current working directory.
content : Union[str, NoneType]
Raw JavaScript content to be injected into frame.
type : Union[str, NoneType]
Script type. Use 'module' in order to load a Javascript ES6 module. See
[script](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script) for more details.
Returns
-------
ElementHandle
"""
return mapping.from_impl(
await self._async(
"page.add_script_tag",
self._impl_obj.add_script_tag(
url=url, path=path, content=content, type=type
),
)
)
async def add_style_tag(
self,
*,
url: str = None,
path: typing.Union[str, pathlib.Path] = None,
content: str = None
) -> "ElementHandle":
"""Page.add_style_tag
Adds a `<link rel=\"stylesheet\">` tag into the page with the desired url or a `<style type=\"text/css\">` tag with the
content. Returns the added tag when the stylesheet's onload fires or when the CSS content was injected into frame.
Shortcut for main frame's `frame.add_style_tag()`.
Parameters
----------
url : Union[str, NoneType]
URL of the `<link>` tag.
path : Union[pathlib.Path, str, NoneType]
Path to the CSS file to be injected into frame. If `path` is a relative path, then it is resolved relative to the
current working directory.
content : Union[str, NoneType]
Raw CSS content to be injected into frame.
Returns
-------
ElementHandle
"""
return mapping.from_impl(
await self._async(
"page.add_style_tag",
self._impl_obj.add_style_tag(url=url, path=path, content=content),
)
)
async def expose_function(self, name: str, callback: typing.Callable) -> NoneType:
"""Page.expose_function
The method adds a function called `name` on the `window` object of every frame in the page. When called, the function
executes `callback` and returns a [Promise] which resolves to the return value of `callback`.
If the `callback` returns a [Promise], it will be awaited.
See `browser_context.expose_function()` for context-wide exposed function.
> NOTE: Functions installed via `page.expose_function()` survive navigations.
An example of adding a `sha256` function to the page:
```py
import asyncio
import hashlib
from playwright.async_api import async_playwright
def sha256(text):
m = hashlib.sha256()
m.update(bytes(text, \"utf8\"))
return m.hexdigest()
async def run(playwright):
webkit = playwright.webkit
browser = await webkit.launch(headless=False)
page = await browser.new_page()
await page.expose_function(\"sha256\", sha256)
await page.set_content(\"\"\"
<script>
async function onClick() {
document.querySelector('div').textContent = await window.sha256('PLAYWRIGHT');
}
</script>
<button onclick=\"onClick()\">Click me</button>
<div></div>
\"\"\")
await page.click(\"button\")
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
Parameters
----------
name : str
Name of the function on the window object
callback : Callable
Callback function which will be called in Playwright's context.
"""
return mapping.from_maybe_impl(
await self._async(
"page.expose_function",
self._impl_obj.expose_function(
name=name, callback=self._wrap_handler(callback)
),
)
)
async def expose_binding(
self, name: str, callback: typing.Callable, *, handle: bool = None
) -> NoneType:
"""Page.expose_binding
The method adds a function called `name` on the `window` object of every frame in this page. When called, the function
executes `callback` and returns a [Promise] which resolves to the return value of `callback`. If the `callback` returns
a [Promise], it will be awaited.
The first argument of the `callback` function contains information about the caller: `{ browserContext: BrowserContext,
page: Page, frame: Frame }`.
See `browser_context.expose_binding()` for the context-wide version.
> NOTE: Functions installed via `page.expose_binding()` survive navigations.
An example of exposing page URL to all frames in a page:
```py
import asyncio
from playwright.async_api import async_playwright
async def run(playwright):
webkit = playwright.webkit
browser = await webkit.launch(headless=false)
context = await browser.new_context()
page = await context.new_page()
await page.expose_binding(\"pageURL\", lambda source: source[\"page\"].url)
await page.set_content(\"\"\"
<script>
async function onClick() {
document.querySelector('div').textContent = await window.pageURL();
}
</script>
<button onclick=\"onClick()\">Click me</button>
<div></div>
\"\"\")
await page.click(\"button\")
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
An example of passing an element handle:
```py
async def print(source, element):
print(await element.text_content())
await page.expose_binding(\"clicked\", print, handle=true)
await page.set_content(\"\"\"
<script>
document.addEventListener('click', event => window.clicked(event.target));
</script>
<div>Click me</div>
<div>Or click me</div>
\"\"\")
```
Parameters
----------
name : str
Name of the function on the window object.
callback : Callable
Callback function that will be called in the Playwright's context.
handle : Union[bool, NoneType]
Whether to pass the argument as a handle, instead of passing by value. When passing a handle, only one argument is
supported. When passing by value, multiple arguments are supported.
"""
return mapping.from_maybe_impl(
await self._async(
"page.expose_binding",
self._impl_obj.expose_binding(
name=name, callback=self._wrap_handler(callback), handle=handle
),
)
)
async def set_extra_http_headers(self, headers: typing.Dict[str, str]) -> NoneType:
"""Page.set_extra_http_headers
The extra HTTP headers will be sent with every request the page initiates.
> NOTE: `page.set_extra_http_headers()` does not guarantee the order of headers in the outgoing requests.
Parameters
----------
headers : Dict[str, str]
An object containing additional HTTP headers to be sent with every request. All header values must be strings.
"""
return mapping.from_maybe_impl(
await self._async(
"page.set_extra_http_headers",
self._impl_obj.set_extra_http_headers(headers=mapping.to_impl(headers)),
)
)
async def content(self) -> str:
"""Page.content
Gets the full HTML contents of the page, including the doctype.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async("page.content", self._impl_obj.content())
)
async def set_content(
self,
html: str,
*,
timeout: float = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None
) -> NoneType:
"""Page.set_content
Parameters
----------
html : str
HTML markup to assign to the page.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
"""
return mapping.from_maybe_impl(
await self._async(
"page.set_content",
self._impl_obj.set_content(
html=html, timeout=timeout, waitUntil=wait_until
),
)
)
async def goto(
self,
url: str,
*,
timeout: float = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None,
referer: str = None
) -> typing.Optional["Response"]:
"""Page.goto
Returns the main resource response. In case of multiple redirects, the navigation will resolve with the response of the
last redirect.
The method will throw an error if:
- there's an SSL error (e.g. in case of self-signed certificates).
- target URL is invalid.
- the `timeout` is exceeded during navigation.
- the remote server does not respond or is unreachable.
- the main resource failed to load.
The method will not throw an error when any valid HTTP status code is returned by the remote server, including 404 \"Not
Found\" and 500 \"Internal Server Error\". The status code for such responses can be retrieved by calling
`response.status()`.
> NOTE: The method either throws an error or returns a main resource response. The only exceptions are navigation to
`about:blank` or navigation to the same URL with a different hash, which would succeed and return `null`.
> NOTE: Headless mode doesn't support navigation to a PDF document. See the
[upstream issue](https://bugs.chromium.org/p/chromium/issues/detail?id=761295).
Shortcut for main frame's `frame.goto()`
Parameters
----------
url : str
URL to navigate page to. The url should include scheme, e.g. `https://`. When a `baseURL` via the context options was
provided and the passed URL is a path, it gets merged via the
[`new URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
referer : Union[str, NoneType]
Referer header value. If provided it will take preference over the referer header value set by
`page.set_extra_http_headers()`.
Returns
-------
Union[Response, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"page.goto",
self._impl_obj.goto(
url=url, timeout=timeout, waitUntil=wait_until, referer=referer
),
)
)
async def reload(
self,
*,
timeout: float = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None
) -> typing.Optional["Response"]:
"""Page.reload
This method reloads the current page, in the same way as if the user had triggered a browser refresh. Returns the main
resource response. In case of multiple redirects, the navigation will resolve with the response of the last redirect.
Parameters
----------
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
Returns
-------
Union[Response, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"page.reload",
self._impl_obj.reload(timeout=timeout, waitUntil=wait_until),
)
)
async def wait_for_load_state(
self,
state: Literal["domcontentloaded", "load", "networkidle"] = None,
*,
timeout: float = None
) -> NoneType:
"""Page.wait_for_load_state
Returns when the required load state has been reached.
This resolves when the page reaches a required load state, `load` by default. The navigation must have been committed
when this method is called. If current document has already reached the required state, resolves immediately.
```py
await page.click(\"button\") # click triggers navigation.
await page.wait_for_load_state() # the promise resolves after \"load\" event.
```
```py
async with page.expect_popup() as page_info:
await page.click(\"button\") # click triggers a popup.
popup = await page_info.value
# Following resolves after \"domcontentloaded\" event.
await popup.wait_for_load_state(\"domcontentloaded\")
print(await popup.title()) # popup is ready to use.
```
Shortcut for main frame's `frame.wait_for_load_state()`.
Parameters
----------
state : Union["domcontentloaded", "load", "networkidle", NoneType]
Optional load state to wait for, defaults to `load`. If the state has been already reached while loading current
document, the method resolves immediately. Can be one of:
- `'load'` - wait for the `load` event to be fired.
- `'domcontentloaded'` - wait for the `DOMContentLoaded` event to be fired.
- `'networkidle'` - wait until there are no network connections for at least `500` ms.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"page.wait_for_load_state",
self._impl_obj.wait_for_load_state(state=state, timeout=timeout),
)
)
async def wait_for_url(
self,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]],
*,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None,
timeout: float = None
) -> NoneType:
"""Page.wait_for_url
Waits for the main frame to navigate to the given URL.
```py
await page.click(\"a.delayed-navigation\") # clicking the link will indirectly cause a navigation
await page.wait_for_url(\"**/target.html\")
```
Shortcut for main frame's `frame.wait_for_url()`.
Parameters
----------
url : Union[Callable[[str], bool], Pattern, str]
A glob pattern, regex pattern or predicate receiving [URL] to match while waiting for the navigation. Note that if the
parameter is a string without wilcard characters, the method will wait for navigation to URL that is exactly equal to
the string.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"page.wait_for_url",
self._impl_obj.wait_for_url(
url=self._wrap_handler(url), wait_until=wait_until, timeout=timeout
),
)
)
async def wait_for_event(
self, event: str, predicate: typing.Callable = None, *, timeout: float = None
) -> typing.Any:
"""Page.wait_for_event
> NOTE: In most cases, you should use `page.expect_event()`.
Waits for given `event` to fire. If predicate is provided, it passes event's value into the `predicate` function and
waits for `predicate(event)` to return a truthy value. Will throw an error if the page is closed before the `event` is
fired.
Parameters
----------
event : str
Event name, same one typically passed into `*.on(event)`.
predicate : Union[Callable, NoneType]
Receives the event data and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"page.wait_for_event",
self._impl_obj.wait_for_event(
event=event,
predicate=self._wrap_handler(predicate),
timeout=timeout,
),
)
)
async def go_back(
self,
*,
timeout: float = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None
) -> typing.Optional["Response"]:
"""Page.go_back
Returns the main resource response. In case of multiple redirects, the navigation will resolve with the response of the
last redirect. If can not go back, returns `null`.
Navigate to the previous page in history.
Parameters
----------
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
Returns
-------
Union[Response, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"page.go_back",
self._impl_obj.go_back(timeout=timeout, waitUntil=wait_until),
)
)
async def go_forward(
self,
*,
timeout: float = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None
) -> typing.Optional["Response"]:
"""Page.go_forward
Returns the main resource response. In case of multiple redirects, the navigation will resolve with the response of the
last redirect. If can not go forward, returns `null`.
Navigate to the next page in history.
Parameters
----------
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
Returns
-------
Union[Response, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"page.go_forward",
self._impl_obj.go_forward(timeout=timeout, waitUntil=wait_until),
)
)
async def emulate_media(
self,
*,
media: Literal["print", "screen"] = None,
color_scheme: Literal["dark", "light", "no-preference"] = None,
reduced_motion: Literal["no-preference", "reduce"] = None,
forced_colors: Literal["active", "none"] = None
) -> NoneType:
"""Page.emulate_media
This method changes the `CSS media type` through the `media` argument, and/or the `'prefers-colors-scheme'` media
feature, using the `colorScheme` argument.
```py
await page.evaluate(\"matchMedia('screen').matches\")
# → True
await page.evaluate(\"matchMedia('print').matches\")
# → False
await page.emulate_media(media=\"print\")
await page.evaluate(\"matchMedia('screen').matches\")
# → False
await page.evaluate(\"matchMedia('print').matches\")
# → True
await page.emulate_media()
await page.evaluate(\"matchMedia('screen').matches\")
# → True
await page.evaluate(\"matchMedia('print').matches\")
# → False
```
```py
await page.emulate_media(color_scheme=\"dark\")
await page.evaluate(\"matchMedia('(prefers-color-scheme: dark)').matches\")
# → True
await page.evaluate(\"matchMedia('(prefers-color-scheme: light)').matches\")
# → False
await page.evaluate(\"matchMedia('(prefers-color-scheme: no-preference)').matches\")
# → False
```
Parameters
----------
media : Union["print", "screen", NoneType]
Changes the CSS media type of the page. The only allowed values are `'screen'`, `'print'` and `null`. Passing `null`
disables CSS media emulation.
color_scheme : Union["dark", "light", "no-preference", NoneType]
Emulates `'prefers-colors-scheme'` media feature, supported values are `'light'`, `'dark'`, `'no-preference'`. Passing
`null` disables color scheme emulation.
reduced_motion : Union["no-preference", "reduce", NoneType]
Emulates `'prefers-reduced-motion'` media feature, supported values are `'reduce'`, `'no-preference'`. Passing `null`
disables reduced motion emulation.
forced_colors : Union["active", "none", NoneType]
Emulates `'forced-colors'` media feature, supported values are `'active'` and `'none'`. Passing `null` disables forced
colors emulation.
> NOTE: It's not supported in WebKit, see [here](https://bugs.webkit.org/show_bug.cgi?id=225281) in their issue tracker.
"""
return mapping.from_maybe_impl(
await self._async(
"page.emulate_media",
self._impl_obj.emulate_media(
media=media,
colorScheme=color_scheme,
reducedMotion=reduced_motion,
forcedColors=forced_colors,
),
)
)
async def set_viewport_size(self, viewport_size: ViewportSize) -> NoneType:
"""Page.set_viewport_size
In the case of multiple pages in a single browser, each page can have its own viewport size. However,
`browser.new_context()` allows to set viewport size (and more) for all pages in the context at once.
`page.set_viewport_size()` will resize the page. A lot of websites don't expect phones to change size, so you
should set the viewport size before navigating to the page. `page.set_viewport_size()` will also reset `screen`
size, use `browser.new_context()` with `screen` and `viewport` parameters if you need better control of these
properties.
```py
page = await browser.new_page()
await page.set_viewport_size({\"width\": 640, \"height\": 480})
await page.goto(\"https://example.com\")
```
Parameters
----------
viewport_size : {width: int, height: int}
"""
return mapping.from_maybe_impl(
await self._async(
"page.set_viewport_size",
self._impl_obj.set_viewport_size(viewportSize=viewport_size),
)
)
async def bring_to_front(self) -> NoneType:
"""Page.bring_to_front
Brings page to front (activates tab).
"""
return mapping.from_maybe_impl(
await self._async("page.bring_to_front", self._impl_obj.bring_to_front())
)
async def add_init_script(
self, script: str = None, *, path: typing.Union[str, pathlib.Path] = None
) -> NoneType:
"""Page.add_init_script
Adds a script which would be evaluated in one of the following scenarios:
- Whenever the page is navigated.
- Whenever the child frame is attached or navigated. In this case, the script is evaluated in the context of the newly
attached frame.
The script is evaluated after the document was created but before any of its scripts were run. This is useful to amend
the JavaScript environment, e.g. to seed `Math.random`.
An example of overriding `Math.random` before the page loads:
```py
# in your playwright script, assuming the preload.js file is in same directory
await page.add_init_script(path=\"./preload.js\")
```
> NOTE: The order of evaluation of multiple scripts installed via `browser_context.add_init_script()` and
`page.add_init_script()` is not defined.
Parameters
----------
script : Union[str, NoneType]
Script to be evaluated in all pages in the browser context. Optional.
path : Union[pathlib.Path, str, NoneType]
Path to the JavaScript file. If `path` is a relative path, then it is resolved relative to the current working
directory. Optional.
"""
return mapping.from_maybe_impl(
await self._async(
"page.add_init_script",
self._impl_obj.add_init_script(script=script, path=path),
)
)
async def route(
self,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]],
handler: typing.Union[
typing.Callable[["Route"], typing.Any],
typing.Callable[["Route", "Request"], typing.Any],
],
*,
times: int = None
) -> NoneType:
"""Page.route
Routing provides the capability to modify network requests that are made by a page.
Once routing is enabled, every request matching the url pattern will stall unless it's continued, fulfilled or aborted.
> NOTE: The handler will only be called for the first url if the response is a redirect.
> NOTE: `page.route()` will not intercept requests intercepted by Service Worker. See
[this](https://github.com/microsoft/playwright/issues/1090) issue. We recommend disabling Service Workers when using
request interception. Via `await context.addInitScript(() => delete window.navigator.serviceWorker);`
An example of a naive handler that aborts all image requests:
```py
page = await browser.new_page()
await page.route(\"**/*.{png,jpg,jpeg}\", lambda route: route.abort())
await page.goto(\"https://example.com\")
await browser.close()
```
or the same snippet using a regex pattern instead:
```py
page = await browser.new_page()
await page.route(re.compile(r\"(\\.png$)|(\\.jpg$)\"), lambda route: route.abort())
await page.goto(\"https://example.com\")
await browser.close()
```
It is possible to examine the request to decide the route action. For example, mocking all requests that contain some
post data, and leaving all other requests as is:
```py
def handle_route(route):
if (\"my-string\" in route.request.post_data)
route.fulfill(body=\"mocked-data\")
else
route.continue_()
await page.route(\"/api/**\", handle_route)
```
Page routes take precedence over browser context routes (set up with `browser_context.route()`) when request
matches both handlers.
To remove a route with its handler you can use `page.unroute()`.
> NOTE: Enabling routing disables http cache.
Parameters
----------
url : Union[Callable[[str], bool], Pattern, str]
A glob pattern, regex pattern or predicate receiving [URL] to match while routing. When a `baseURL` via the context
options was provided and the passed URL is a path, it gets merged via the
[`new URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor.
handler : Union[Callable[[Route, Request], Any], Callable[[Route], Any]]
handler function to route the request.
times : Union[int, NoneType]
How often a route should be used. By default it will be used every time.
"""
return mapping.from_maybe_impl(
await self._async(
"page.route",
self._impl_obj.route(
url=self._wrap_handler(url),
handler=self._wrap_handler(handler),
times=times,
),
)
)
async def unroute(
self,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]],
handler: typing.Union[
typing.Callable[["Route"], typing.Any],
typing.Callable[["Route", "Request"], typing.Any],
] = None,
) -> NoneType:
"""Page.unroute
Removes a route created with `page.route()`. When `handler` is not specified, removes all routes for the `url`.
Parameters
----------
url : Union[Callable[[str], bool], Pattern, str]
A glob pattern, regex pattern or predicate receiving [URL] to match while routing.
handler : Union[Callable[[Route, Request], Any], Callable[[Route], Any], NoneType]
Optional handler function to route the request.
"""
return mapping.from_maybe_impl(
await self._async(
"page.unroute",
self._impl_obj.unroute(
url=self._wrap_handler(url), handler=self._wrap_handler(handler)
),
)
)
async def screenshot(
self,
*,
timeout: float = None,
type: Literal["jpeg", "png"] = None,
path: typing.Union[str, pathlib.Path] = None,
quality: int = None,
omit_background: bool = None,
full_page: bool = None,
clip: FloatRect = None,
animations: Literal["disabled"] = None,
mask: typing.List["Locator"] = None
) -> bytes:
"""Page.screenshot
Returns the buffer with the captured screenshot.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
type : Union["jpeg", "png", NoneType]
Specify screenshot type, defaults to `png`.
path : Union[pathlib.Path, str, NoneType]
The file path to save the image to. The screenshot type will be inferred from file extension. If `path` is a relative
path, then it is resolved relative to the current working directory. If no path is provided, the image won't be saved to
the disk.
quality : Union[int, NoneType]
The quality of the image, between 0-100. Not applicable to `png` images.
omit_background : Union[bool, NoneType]
Hides default white background and allows capturing screenshots with transparency. Not applicable to `jpeg` images.
Defaults to `false`.
full_page : Union[bool, NoneType]
When true, takes a screenshot of the full scrollable page, instead of the currently visible viewport. Defaults to
`false`.
clip : Union[{x: float, y: float, width: float, height: float}, NoneType]
An object which specifies clipping of the resulting image. Should have the following fields:
animations : Union["disabled", NoneType]
When set to `"disabled"`, stops CSS animations, CSS transitions and Web Animations. Animations get different treatment
depending on their duration:
- finite animations are fast-forwarded to completion, so they'll fire `transitionend` event.
- infinite animations are canceled to initial state, and then played over after the screenshot.
mask : Union[List[Locator], NoneType]
Specify locators that should be masked when the screenshot is taken. Masked elements will be overlayed with a pink box
`#FF00FF` that completely covers its bounding box.
Returns
-------
bytes
"""
return mapping.from_maybe_impl(
await self._async(
"page.screenshot",
self._impl_obj.screenshot(
timeout=timeout,
type=type,
path=path,
quality=quality,
omitBackground=omit_background,
fullPage=full_page,
clip=clip,
animations=animations,
mask=mapping.to_impl(mask),
),
)
)
async def title(self) -> str:
"""Page.title
Returns the page's title. Shortcut for main frame's `frame.title()`.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async("page.title", self._impl_obj.title())
)
async def close(self, *, run_before_unload: bool = None) -> NoneType:
"""Page.close
If `runBeforeUnload` is `false`, does not run any unload handlers and waits for the page to be closed. If
`runBeforeUnload` is `true` the method will run unload handlers, but will **not** wait for the page to close.
By default, `page.close()` **does not** run `beforeunload` handlers.
> NOTE: if `runBeforeUnload` is passed as true, a `beforeunload` dialog might be summoned and should be handled manually
via `page.on('dialog')` event.
Parameters
----------
run_before_unload : Union[bool, NoneType]
Defaults to `false`. Whether to run the
[before unload](https://developer.mozilla.org/en-US/docs/Web/Events/beforeunload) page handlers.
"""
return mapping.from_maybe_impl(
await self._async(
"page.close", self._impl_obj.close(runBeforeUnload=run_before_unload)
)
)
def is_closed(self) -> bool:
"""Page.is_closed
Indicates that the page has been closed.
Returns
-------
bool
"""
return mapping.from_maybe_impl(self._impl_obj.is_closed())
async def click(
self,
selector: str,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
click_count: int = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None,
strict: bool = None
) -> NoneType:
"""Page.click
This method clicks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Shortcut for main frame's `frame.click()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
click_count : Union[int, NoneType]
defaults to 1. See [UIEvent.detail].
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
"""
return mapping.from_maybe_impl(
await self._async(
"page.click",
self._impl_obj.click(
selector=selector,
modifiers=mapping.to_impl(modifiers),
position=position,
delay=delay,
button=button,
clickCount=click_count,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
strict=strict,
),
)
)
async def dblclick(
self,
selector: str,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Page.dblclick
This method double clicks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to double click in the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set. Note that if the
first click of the `dblclick()` triggers a navigation event, this method will throw.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
> NOTE: `page.dblclick()` dispatches two `click` events and a single `dblclick` event.
Shortcut for main frame's `frame.dblclick()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"page.dblclick",
self._impl_obj.dblclick(
selector=selector,
modifiers=mapping.to_impl(modifiers),
position=position,
delay=delay,
button=button,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def tap(
self,
selector: str,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Page.tap
This method taps an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.touchscreen` to tap the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
> NOTE: `page.tap()` requires that the `hasTouch` option of the browser context be set to true.
Shortcut for main frame's `frame.tap()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"page.tap",
self._impl_obj.tap(
selector=selector,
modifiers=mapping.to_impl(modifiers),
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def fill(
self,
selector: str,
value: str,
*,
timeout: float = None,
no_wait_after: bool = None,
strict: bool = None,
force: bool = None
) -> NoneType:
"""Page.fill
This method waits for an element matching `selector`, waits for [actionability](./actionability.md) checks, focuses the
element, fills it and triggers an `input` event after filling. Note that you can pass an empty string to clear the input
field.
If the target element is not an `<input>`, `<textarea>` or `[contenteditable]` element, this method throws an error.
However, if the element is inside the `<label>` element that has an associated
[control](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLabelElement/control), the control will be filled
instead.
To send fine-grained keyboard events, use `page.type()`.
Shortcut for main frame's `frame.fill()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
value : str
Value to fill for the `<input>`, `<textarea>` or `[contenteditable]` element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"page.fill",
self._impl_obj.fill(
selector=selector,
value=value,
timeout=timeout,
noWaitAfter=no_wait_after,
strict=strict,
force=force,
),
)
)
def locator(
self,
selector: str,
*,
has_text: typing.Union[str, typing.Pattern] = None,
has: "Locator" = None
) -> "Locator":
"""Page.locator
The method returns an element locator that can be used to perform actions on the page. Locator is resolved to the
element immediately before performing an action, so a series of actions on the same locator can in fact be performed on
different DOM elements. That would happen if the DOM structure between those actions has changed.
Shortcut for main frame's `frame.locator()`.
Parameters
----------
selector : str
A selector to use when resolving DOM element. See [working with selectors](./selectors.md) for more details.
has_text : Union[Pattern, str, NoneType]
Matches elements containing specified text somewhere inside, possibly in a child or a descendant element. For example,
`"Playwright"` matches `<article><div>Playwright</div></article>`.
has : Union[Locator, NoneType]
Matches elements containing an element that matches an inner locator. Inner locator is queried against the outer one.
For example, `article` that has `text=Playwright` matches `<article><div>Playwright</div></article>`.
Note that outer and inner locators must belong to the same frame. Inner locator must not contain `FrameLocator`s.
Returns
-------
Locator
"""
return mapping.from_impl(
self._impl_obj.locator(
selector=selector, has_text=has_text, has=has._impl_obj if has else None
)
)
def frame_locator(self, selector: str) -> "FrameLocator":
"""Page.frame_locator
When working with iframes, you can create a frame locator that will enter the iframe and allow selecting elements in
that iframe. Following snippet locates element with text \"Submit\" in the iframe with id `my-frame`, like `<iframe
id=\"my-frame\">`:
```py
locator = page.frame_locator(\"#my-iframe\").locator(\"text=Submit\")
await locator.click()
```
Parameters
----------
selector : str
A selector to use when resolving DOM element. See [working with selectors](./selectors.md) for more details.
Returns
-------
FrameLocator
"""
return mapping.from_impl(self._impl_obj.frame_locator(selector=selector))
async def focus(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> NoneType:
"""Page.focus
This method fetches an element with `selector` and focuses it. If there's no element matching `selector`, the method
waits until a matching element appears in the DOM.
Shortcut for main frame's `frame.focus()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"page.focus",
self._impl_obj.focus(selector=selector, strict=strict, timeout=timeout),
)
)
async def text_content(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> typing.Optional[str]:
"""Page.text_content
Returns `element.textContent`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"page.text_content",
self._impl_obj.text_content(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def inner_text(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> str:
"""Page.inner_text
Returns `element.innerText`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"page.inner_text",
self._impl_obj.inner_text(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def inner_html(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> str:
"""Page.inner_html
Returns `element.innerHTML`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"page.inner_html",
self._impl_obj.inner_html(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def get_attribute(
self, selector: str, name: str, *, strict: bool = None, timeout: float = None
) -> typing.Optional[str]:
"""Page.get_attribute
Returns element attribute value.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
name : str
Attribute name to get the value for.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"page.get_attribute",
self._impl_obj.get_attribute(
selector=selector, name=name, strict=strict, timeout=timeout
),
)
)
async def hover(
self,
selector: str,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
timeout: float = None,
force: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Page.hover
This method hovers over an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to hover over the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Shortcut for main frame's `frame.hover()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"page.hover",
self._impl_obj.hover(
selector=selector,
modifiers=mapping.to_impl(modifiers),
position=position,
timeout=timeout,
force=force,
strict=strict,
trial=trial,
),
)
)
async def drag_and_drop(
self,
source: str,
target: str,
*,
source_position: Position = None,
target_position: Position = None,
force: bool = None,
no_wait_after: bool = None,
timeout: float = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Page.drag_and_drop
Parameters
----------
source : str
target : str
source_position : Union[{x: float, y: float}, NoneType]
Clicks on the source element at this point relative to the top-left corner of the element's padding box. If not
specified, some visible point of the element is used.
target_position : Union[{x: float, y: float}, NoneType]
Drops on the target element at this point relative to the top-left corner of the element's padding box. If not
specified, some visible point of the element is used.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"page.drag_and_drop",
self._impl_obj.drag_and_drop(
source=source,
target=target,
sourcePosition=source_position,
targetPosition=target_position,
force=force,
noWaitAfter=no_wait_after,
timeout=timeout,
strict=strict,
trial=trial,
),
)
)
async def select_option(
self,
selector: str,
value: typing.Union[str, typing.List[str]] = None,
*,
index: typing.Union[int, typing.List[int]] = None,
label: typing.Union[str, typing.List[str]] = None,
element: typing.Union["ElementHandle", typing.List["ElementHandle"]] = None,
timeout: float = None,
no_wait_after: bool = None,
force: bool = None,
strict: bool = None
) -> typing.List[str]:
"""Page.select_option
This method waits for an element matching `selector`, waits for [actionability](./actionability.md) checks, waits until
all specified options are present in the `<select>` element and selects these options.
If the target element is not a `<select>` element, this method throws an error. However, if the element is inside the
`<label>` element that has an associated
[control](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLabelElement/control), the control will be used instead.
Returns the array of option values that have been successfully selected.
Triggers a `change` and `input` event once all the provided options have been selected.
```py
# single selection matching the value
await page.select_option(\"select#colors\", \"blue\")
# single selection matching the label
await page.select_option(\"select#colors\", label=\"blue\")
# multiple selection
await page.select_option(\"select#colors\", value=[\"red\", \"green\", \"blue\"])
```
Shortcut for main frame's `frame.select_option()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
value : Union[List[str], str, NoneType]
Options to select by value. If the `<select>` has the `multiple` attribute, all given options are selected, otherwise
only the first option matching one of the passed options is selected. Optional.
index : Union[List[int], int, NoneType]
Options to select by index. Optional.
label : Union[List[str], str, NoneType]
Options to select by label. If the `<select>` has the `multiple` attribute, all given options are selected, otherwise
only the first option matching one of the passed options is selected. Optional.
element : Union[ElementHandle, List[ElementHandle], NoneType]
Option elements to select. Optional.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
Returns
-------
List[str]
"""
return mapping.from_maybe_impl(
await self._async(
"page.select_option",
self._impl_obj.select_option(
selector=selector,
value=mapping.to_impl(value),
index=mapping.to_impl(index),
label=mapping.to_impl(label),
element=mapping.to_impl(element),
timeout=timeout,
noWaitAfter=no_wait_after,
force=force,
strict=strict,
),
)
)
async def input_value(
self, selector: str, *, strict: bool = None, timeout: float = None
) -> str:
"""Page.input_value
Returns `input.value` for the selected `<input>` or `<textarea>` or `<select>` element. Throws for non-input elements.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"page.input_value",
self._impl_obj.input_value(
selector=selector, strict=strict, timeout=timeout
),
)
)
async def set_input_files(
self,
selector: str,
files: typing.Union[
str,
pathlib.Path,
FilePayload,
typing.List[typing.Union[str, pathlib.Path]],
typing.List[FilePayload],
],
*,
timeout: float = None,
strict: bool = None,
no_wait_after: bool = None
) -> NoneType:
"""Page.set_input_files
This method expects `selector` to point to an
[input element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input).
Sets the value of the file input to these file paths or files. If some of the `filePaths` are relative paths, then they
are resolved relative to the the current working directory. For empty array, clears the selected files.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
files : Union[List[Union[pathlib.Path, str]], List[{name: str, mimeType: str, buffer: bytes}], pathlib.Path, str, {name: str, mimeType: str, buffer: bytes}]
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"page.set_input_files",
self._impl_obj.set_input_files(
selector=selector,
files=mapping.to_impl(files),
timeout=timeout,
strict=strict,
noWaitAfter=no_wait_after,
),
)
)
async def type(
self,
selector: str,
text: str,
*,
delay: float = None,
timeout: float = None,
no_wait_after: bool = None,
strict: bool = None
) -> NoneType:
"""Page.type
Sends a `keydown`, `keypress`/`input`, and `keyup` event for each character in the text. `page.type` can be used to send
fine-grained keyboard events. To fill values in form fields, use `page.fill()`.
To press a special key, like `Control` or `ArrowDown`, use `keyboard.press()`.
```py
await page.type(\"#mytextarea\", \"hello\") # types instantly
await page.type(\"#mytextarea\", \"world\", delay=100) # types slower, like a user
```
Shortcut for main frame's `frame.type()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
text : str
A text to type into a focused element.
delay : Union[float, NoneType]
Time to wait between key presses in milliseconds. Defaults to 0.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
"""
return mapping.from_maybe_impl(
await self._async(
"page.type",
self._impl_obj.type(
selector=selector,
text=text,
delay=delay,
timeout=timeout,
noWaitAfter=no_wait_after,
strict=strict,
),
)
)
async def press(
self,
selector: str,
key: str,
*,
delay: float = None,
timeout: float = None,
no_wait_after: bool = None,
strict: bool = None
) -> NoneType:
"""Page.press
Focuses the element, and then uses `keyboard.down()` and `keyboard.up()`.
`key` can specify the intended [keyboardEvent.key](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key)
value or a single character to generate the text for. A superset of the `key` values can be found
[here](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key/Key_Values). Examples of the keys are:
`F1` - `F12`, `Digit0`- `Digit9`, `KeyA`- `KeyZ`, `Backquote`, `Minus`, `Equal`, `Backslash`, `Backspace`, `Tab`,
`Delete`, `Escape`, `ArrowDown`, `End`, `Enter`, `Home`, `Insert`, `PageDown`, `PageUp`, `ArrowRight`, `ArrowUp`, etc.
Following modification shortcuts are also supported: `Shift`, `Control`, `Alt`, `Meta`, `ShiftLeft`.
Holding down `Shift` will type the text that corresponds to the `key` in the upper case.
If `key` is a single character, it is case-sensitive, so the values `a` and `A` will generate different respective
texts.
Shortcuts such as `key: \"Control+o\"` or `key: \"Control+Shift+T\"` are supported as well. When specified with the
modifier, modifier is pressed and being held while the subsequent key is being pressed.
```py
page = await browser.new_page()
await page.goto(\"https://keycode.info\")
await page.press(\"body\", \"A\")
await page.screenshot(path=\"a.png\")
await page.press(\"body\", \"ArrowLeft\")
await page.screenshot(path=\"arrow_left.png\")
await page.press(\"body\", \"Shift+O\")
await page.screenshot(path=\"o.png\")
await browser.close()
```
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
key : str
Name of the key to press or a character to generate, such as `ArrowLeft` or `a`.
delay : Union[float, NoneType]
Time to wait between `keydown` and `keyup` in milliseconds. Defaults to 0.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
"""
return mapping.from_maybe_impl(
await self._async(
"page.press",
self._impl_obj.press(
selector=selector,
key=key,
delay=delay,
timeout=timeout,
noWaitAfter=no_wait_after,
strict=strict,
),
)
)
async def check(
self,
selector: str,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Page.check
This method checks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Ensure that matched element is a checkbox or a radio input. If not, this method throws. If the element is already
checked, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now checked. If not, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Shortcut for main frame's `frame.check()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"page.check",
self._impl_obj.check(
selector=selector,
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def uncheck(
self,
selector: str,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Page.uncheck
This method unchecks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Ensure that matched element is a checkbox or a radio input. If not, this method throws. If the element is already
unchecked, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now unchecked. If not, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Shortcut for main frame's `frame.uncheck()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"page.uncheck",
self._impl_obj.uncheck(
selector=selector,
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
async def wait_for_timeout(self, timeout: float) -> NoneType:
"""Page.wait_for_timeout
Waits for the given `timeout` in milliseconds.
Note that `page.waitForTimeout()` should only be used for debugging. Tests using the timer in production are going to be
flaky. Use signals such as network events, selectors becoming visible and others instead.
```py
# wait for 1 second
await page.wait_for_timeout(1000)
```
Shortcut for main frame's `frame.wait_for_timeout()`.
Parameters
----------
timeout : float
A timeout to wait for
"""
return mapping.from_maybe_impl(
await self._async(
"page.wait_for_timeout",
self._impl_obj.wait_for_timeout(timeout=timeout),
)
)
async def wait_for_function(
self,
expression: str,
*,
arg: typing.Any = None,
timeout: float = None,
polling: typing.Union[float, Literal["raf"]] = None
) -> "JSHandle":
"""Page.wait_for_function
Returns when the `expression` returns a truthy value. It resolves to a JSHandle of the truthy value.
The `page.wait_for_function()` can be used to observe viewport size change:
```py
import asyncio
from playwright.async_api import async_playwright
async def run(playwright):
webkit = playwright.webkit
browser = await webkit.launch()
page = await browser.new_page()
await page.evaluate(\"window.x = 0; setTimeout(() => { window.x = 100 }, 1000);\")
await page.wait_for_function(\"() => window.x > 0\")
await browser.close()
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
To pass an argument to the predicate of `page.wait_for_function()` function:
```py
selector = \".foo\"
await page.wait_for_function(\"selector => !!document.querySelector(selector)\", selector)
```
Shortcut for main frame's `frame.wait_for_function()`.
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
timeout : Union[float, NoneType]
maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
polling : Union["raf", float, NoneType]
If `polling` is `'raf'`, then `expression` is constantly executed in `requestAnimationFrame` callback. If `polling` is a
number, then it is treated as an interval in milliseconds at which the function would be executed. Defaults to `raf`.
Returns
-------
JSHandle
"""
return mapping.from_impl(
await self._async(
"page.wait_for_function",
self._impl_obj.wait_for_function(
expression=expression,
arg=mapping.to_impl(arg),
timeout=timeout,
polling=polling,
),
)
)
async def pause(self) -> NoneType:
"""Page.pause
Pauses script execution. Playwright will stop executing the script and wait for the user to either press 'Resume' button
in the page overlay or to call `playwright.resume()` in the DevTools console.
User can inspect selectors or perform manual steps while paused. Resume will continue running the original script from
the place it was paused.
> NOTE: This method requires Playwright to be started in a headed mode, with a falsy `headless` value in the
`browser_type.launch()`.
"""
return mapping.from_maybe_impl(
await self._async("page.pause", self._impl_obj.pause())
)
async def pdf(
self,
*,
scale: float = None,
display_header_footer: bool = None,
header_template: str = None,
footer_template: str = None,
print_background: bool = None,
landscape: bool = None,
page_ranges: str = None,
format: str = None,
width: typing.Union[str, float] = None,
height: typing.Union[str, float] = None,
prefer_css_page_size: bool = None,
margin: PdfMargins = None,
path: typing.Union[str, pathlib.Path] = None
) -> bytes:
"""Page.pdf
Returns the PDF buffer.
> NOTE: Generating a pdf is currently only supported in Chromium headless.
`page.pdf()` generates a pdf of the page with `print` css media. To generate a pdf with `screen` media, call
`page.emulate_media()` before calling `page.pdf()`:
> NOTE: By default, `page.pdf()` generates a pdf with modified colors for printing. Use the
[`-webkit-print-color-adjust`](https://developer.mozilla.org/en-US/docs/Web/CSS/-webkit-print-color-adjust) property to
force rendering of exact colors.
```py
# generates a pdf with \"screen\" media type.
await page.emulate_media(media=\"screen\")
await page.pdf(path=\"page.pdf\")
```
The `width`, `height`, and `margin` options accept values labeled with units. Unlabeled values are treated as pixels.
A few examples:
- `page.pdf({width: 100})` - prints with width set to 100 pixels
- `page.pdf({width: '100px'})` - prints with width set to 100 pixels
- `page.pdf({width: '10cm'})` - prints with width set to 10 centimeters.
All possible units are:
- `px` - pixel
- `in` - inch
- `cm` - centimeter
- `mm` - millimeter
The `format` options are:
- `Letter`: 8.5in x 11in
- `Legal`: 8.5in x 14in
- `Tabloid`: 11in x 17in
- `Ledger`: 17in x 11in
- `A0`: 33.1in x 46.8in
- `A1`: 23.4in x 33.1in
- `A2`: 16.54in x 23.4in
- `A3`: 11.7in x 16.54in
- `A4`: 8.27in x 11.7in
- `A5`: 5.83in x 8.27in
- `A6`: 4.13in x 5.83in
> NOTE: `headerTemplate` and `footerTemplate` markup have the following limitations: > 1. Script tags inside templates
are not evaluated. > 2. Page styles are not visible inside templates.
Parameters
----------
scale : Union[float, NoneType]
Scale of the webpage rendering. Defaults to `1`. Scale amount must be between 0.1 and 2.
display_header_footer : Union[bool, NoneType]
Display header and footer. Defaults to `false`.
header_template : Union[str, NoneType]
HTML template for the print header. Should be valid HTML markup with following classes used to inject printing values
into them:
- `'date'` formatted print date
- `'title'` document title
- `'url'` document location
- `'pageNumber'` current page number
- `'totalPages'` total pages in the document
footer_template : Union[str, NoneType]
HTML template for the print footer. Should use the same format as the `headerTemplate`.
print_background : Union[bool, NoneType]
Print background graphics. Defaults to `false`.
landscape : Union[bool, NoneType]
Paper orientation. Defaults to `false`.
page_ranges : Union[str, NoneType]
Paper ranges to print, e.g., '1-5, 8, 11-13'. Defaults to the empty string, which means print all pages.
format : Union[str, NoneType]
Paper format. If set, takes priority over `width` or `height` options. Defaults to 'Letter'.
width : Union[float, str, NoneType]
Paper width, accepts values labeled with units.
height : Union[float, str, NoneType]
Paper height, accepts values labeled with units.
prefer_css_page_size : Union[bool, NoneType]
Give any CSS `@page` size declared in the page priority over what is declared in `width` and `height` or `format`
options. Defaults to `false`, which will scale the content to fit the paper size.
margin : Union[{top: Union[float, str, NoneType], right: Union[float, str, NoneType], bottom: Union[float, str, NoneType], left: Union[float, str, NoneType]}, NoneType]
Paper margins, defaults to none.
path : Union[pathlib.Path, str, NoneType]
The file path to save the PDF to. If `path` is a relative path, then it is resolved relative to the current working
directory. If no path is provided, the PDF won't be saved to the disk.
Returns
-------
bytes
"""
return mapping.from_maybe_impl(
await self._async(
"page.pdf",
self._impl_obj.pdf(
scale=scale,
displayHeaderFooter=display_header_footer,
headerTemplate=header_template,
footerTemplate=footer_template,
printBackground=print_background,
landscape=landscape,
pageRanges=page_ranges,
format=format,
width=width,
height=height,
preferCSSPageSize=prefer_css_page_size,
margin=margin,
path=path,
),
)
)
def expect_event(
self, event: str, predicate: typing.Callable = None, *, timeout: float = None
) -> AsyncEventContextManager:
"""Page.expect_event
Waits for event to fire and passes its value into the predicate function. Returns when the predicate returns truthy
value. Will throw an error if the page is closed before the event is fired. Returns the event data value.
```py
async with page.expect_event(\"framenavigated\") as event_info:
await page.click(\"button\")
frame = await event_info.value
```
Parameters
----------
event : str
Event name, same one typically passed into `*.on(event)`.
predicate : Union[Callable, NoneType]
Receives the event data and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager
"""
return AsyncEventContextManager(
self._impl_obj.expect_event(
event=event, predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
def expect_console_message(
self,
predicate: typing.Optional[typing.Callable[["ConsoleMessage"], bool]] = None,
*,
timeout: float = None
) -> AsyncEventContextManager["ConsoleMessage"]:
"""Page.expect_console_message
Performs action and waits for a `ConsoleMessage` to be logged by in the page. If predicate is provided, it passes
`ConsoleMessage` value into the `predicate` function and waits for `predicate(message)` to return a truthy value. Will
throw an error if the page is closed before the `page.on('console')` event is fired.
Parameters
----------
predicate : Union[Callable[[ConsoleMessage], bool], NoneType]
Receives the `ConsoleMessage` object and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager[ConsoleMessage]
"""
return AsyncEventContextManager(
self._impl_obj.expect_console_message(
predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
def expect_download(
self,
predicate: typing.Optional[typing.Callable[["Download"], bool]] = None,
*,
timeout: float = None
) -> AsyncEventContextManager["Download"]:
"""Page.expect_download
Performs action and waits for a new `Download`. If predicate is provided, it passes `Download` value into the
`predicate` function and waits for `predicate(download)` to return a truthy value. Will throw an error if the page is
closed before the download event is fired.
Parameters
----------
predicate : Union[Callable[[Download], bool], NoneType]
Receives the `Download` object and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager[Download]
"""
return AsyncEventContextManager(
self._impl_obj.expect_download(
predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
def expect_file_chooser(
self,
predicate: typing.Optional[typing.Callable[["FileChooser"], bool]] = None,
*,
timeout: float = None
) -> AsyncEventContextManager["FileChooser"]:
"""Page.expect_file_chooser
Performs action and waits for a new `FileChooser` to be created. If predicate is provided, it passes `FileChooser` value
into the `predicate` function and waits for `predicate(fileChooser)` to return a truthy value. Will throw an error if
the page is closed before the file chooser is opened.
Parameters
----------
predicate : Union[Callable[[FileChooser], bool], NoneType]
Receives the `FileChooser` object and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager[FileChooser]
"""
return AsyncEventContextManager(
self._impl_obj.expect_file_chooser(
predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
def expect_navigation(
self,
*,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]] = None,
wait_until: Literal["commit", "domcontentloaded", "load", "networkidle"] = None,
timeout: float = None
) -> AsyncEventContextManager["Response"]:
"""Page.expect_navigation
Waits for the main frame navigation and returns the main resource response. In case of multiple redirects, the
navigation will resolve with the response of the last redirect. In case of navigation to a different anchor or
navigation due to History API usage, the navigation will resolve with `null`.
This resolves when the page navigates to a new URL or reloads. It is useful for when you run code which will indirectly
cause the page to navigate. e.g. The click target has an `onclick` handler that triggers navigation from a `setTimeout`.
Consider this example:
```py
async with page.expect_navigation():
await page.click(\"a.delayed-navigation\") # clicking the link will indirectly cause a navigation
# Resolves after navigation has finished
```
> NOTE: Usage of the [History API](https://developer.mozilla.org/en-US/docs/Web/API/History_API) to change the URL is
considered a navigation.
Shortcut for main frame's `frame.expect_navigation()`.
Parameters
----------
url : Union[Callable[[str], bool], Pattern, str, NoneType]
A glob pattern, regex pattern or predicate receiving [URL] to match while waiting for the navigation. Note that if the
parameter is a string without wilcard characters, the method will wait for navigation to URL that is exactly equal to
the string.
wait_until : Union["commit", "domcontentloaded", "load", "networkidle", NoneType]
When to consider operation succeeded, defaults to `load`. Events can be either:
- `'domcontentloaded'` - consider operation to be finished when the `DOMContentLoaded` event is fired.
- `'load'` - consider operation to be finished when the `load` event is fired.
- `'networkidle'` - consider operation to be finished when there are no network connections for at least `500` ms.
- `'commit'` - consider operation to be finished when network response is received and the document started loading.
timeout : Union[float, NoneType]
Maximum operation time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be
changed by using the `browser_context.set_default_navigation_timeout()`,
`browser_context.set_default_timeout()`, `page.set_default_navigation_timeout()` or
`page.set_default_timeout()` methods.
Returns
-------
EventContextManager[Response]
"""
return AsyncEventContextManager(
self._impl_obj.expect_navigation(
url=self._wrap_handler(url), wait_until=wait_until, timeout=timeout
).future
)
def expect_popup(
self,
predicate: typing.Optional[typing.Callable[["Page"], bool]] = None,
*,
timeout: float = None
) -> AsyncEventContextManager["Page"]:
"""Page.expect_popup
Performs action and waits for a popup `Page`. If predicate is provided, it passes [Popup] value into the `predicate`
function and waits for `predicate(page)` to return a truthy value. Will throw an error if the page is closed before the
popup event is fired.
Parameters
----------
predicate : Union[Callable[[Page], bool], NoneType]
Receives the `Page` object and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager[Page]
"""
return AsyncEventContextManager(
self._impl_obj.expect_popup(
predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
def expect_request(
self,
url_or_predicate: typing.Union[
str, typing.Pattern, typing.Callable[["Request"], bool]
],
*,
timeout: float = None
) -> AsyncEventContextManager["Request"]:
"""Page.expect_request
Waits for the matching request and returns it. See [waiting for event](./events.md#waiting-for-event) for more details
about events.
```py
async with page.expect_request(\"http://example.com/resource\") as first:
await page.click('button')
first_request = await first.value
# or with a lambda
async with page.expect_request(lambda request: request.url == \"http://example.com\" and request.method == \"get\") as second:
await page.click('img')
second_request = await second.value
```
Parameters
----------
url_or_predicate : Union[Callable[[Request], bool], Pattern, str]
Request URL string, regex or predicate receiving `Request` object. When a `baseURL` via the context options was provided
and the passed URL is a path, it gets merged via the
[`new URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor.
timeout : Union[float, NoneType]
Maximum wait time in milliseconds, defaults to 30 seconds, pass `0` to disable the timeout. The default value can be
changed by using the `page.set_default_timeout()` method.
Returns
-------
EventContextManager[Request]
"""
return AsyncEventContextManager(
self._impl_obj.expect_request(
url_or_predicate=self._wrap_handler(url_or_predicate), timeout=timeout
).future
)
def expect_request_finished(
self,
predicate: typing.Optional[typing.Callable[["Request"], bool]] = None,
*,
timeout: float = None
) -> AsyncEventContextManager["Request"]:
"""Page.expect_request_finished
Performs action and waits for a `Request` to finish loading. If predicate is provided, it passes `Request` value into
the `predicate` function and waits for `predicate(request)` to return a truthy value. Will throw an error if the page is
closed before the `page.on('request_finished')` event is fired.
Parameters
----------
predicate : Union[Callable[[Request], bool], NoneType]
Receives the `Request` object and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager[Request]
"""
return AsyncEventContextManager(
self._impl_obj.expect_request_finished(
predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
def expect_response(
self,
url_or_predicate: typing.Union[
str, typing.Pattern, typing.Callable[["Response"], bool]
],
*,
timeout: float = None
) -> AsyncEventContextManager["Response"]:
"""Page.expect_response
Returns the matched response. See [waiting for event](./events.md#waiting-for-event) for more details about events.
```py
async with page.expect_response(\"https://example.com/resource\") as response_info:
await page.click(\"input\")
response = await response_info.value
return response.ok
# or with a lambda
async with page.expect_response(lambda response: response.url == \"https://example.com\" and response.status == 200) as response_info:
await page.click(\"input\")
response = await response_info.value
return response.ok
```
Parameters
----------
url_or_predicate : Union[Callable[[Response], bool], Pattern, str]
Request URL string, regex or predicate receiving `Response` object. When a `baseURL` via the context options was
provided and the passed URL is a path, it gets merged via the
[`new URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor.
timeout : Union[float, NoneType]
Maximum wait time in milliseconds, defaults to 30 seconds, pass `0` to disable the timeout. The default value can be
changed by using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
EventContextManager[Response]
"""
return AsyncEventContextManager(
self._impl_obj.expect_response(
url_or_predicate=self._wrap_handler(url_or_predicate), timeout=timeout
).future
)
def expect_websocket(
self,
predicate: typing.Optional[typing.Callable[["WebSocket"], bool]] = None,
*,
timeout: float = None
) -> AsyncEventContextManager["WebSocket"]:
"""Page.expect_websocket
Performs action and waits for a new `WebSocket`. If predicate is provided, it passes `WebSocket` value into the
`predicate` function and waits for `predicate(webSocket)` to return a truthy value. Will throw an error if the page is
closed before the WebSocket event is fired.
Parameters
----------
predicate : Union[Callable[[WebSocket], bool], NoneType]
Receives the `WebSocket` object and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager[WebSocket]
"""
return AsyncEventContextManager(
self._impl_obj.expect_websocket(
predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
def expect_worker(
self,
predicate: typing.Optional[typing.Callable[["Worker"], bool]] = None,
*,
timeout: float = None
) -> AsyncEventContextManager["Worker"]:
"""Page.expect_worker
Performs action and waits for a new `Worker`. If predicate is provided, it passes `Worker` value into the `predicate`
function and waits for `predicate(worker)` to return a truthy value. Will throw an error if the page is closed before
the worker event is fired.
Parameters
----------
predicate : Union[Callable[[Worker], bool], NoneType]
Receives the `Worker` object and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager[Worker]
"""
return AsyncEventContextManager(
self._impl_obj.expect_worker(
predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
async def set_checked(
self,
selector: str,
checked: bool,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
strict: bool = None,
trial: bool = None
) -> NoneType:
"""Page.set_checked
This method checks or unchecks an element matching `selector` by performing the following steps:
1. Find an element matching `selector`. If there is none, wait until a matching element is attached to the DOM.
1. Ensure that matched element is a checkbox or a radio input. If not, this method throws.
1. If the element already has the right checked state, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now checked or unchecked. If not, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Shortcut for main frame's `frame.set_checked()`.
Parameters
----------
selector : str
A selector to search for an element. If there are multiple elements satisfying the selector, the first will be used. See
[working with selectors](./selectors.md) for more details.
checked : bool
Whether to check or uncheck the checkbox.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
strict : Union[bool, NoneType]
When true, the call requires selector to resolve to a single element. If given selector resolves to more then one
element, the call throws an exception.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"page.set_checked",
self._impl_obj.set_checked(
selector=selector,
checked=checked,
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
strict=strict,
trial=trial,
),
)
)
mapping.register(PageImpl, Page)
class BrowserContext(AsyncContextManager):
@typing.overload
def on(
self,
event: Literal["backgroundpage"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
> NOTE: Only works with Chromium browser's persistent context.
Emitted when new background page is created in the context.
```py
background_page = await context.wait_for_event(\"backgroundpage\")
```"""
@typing.overload
def on(
self,
event: Literal["close"],
f: typing.Callable[
["BrowserContext"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Emitted when Browser context gets closed. This might happen because of one of the following:
- Browser context is closed.
- Browser application is closed or crashed.
- The `browser.close()` method was called."""
@typing.overload
def on(
self,
event: Literal["page"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
The event is emitted when a new Page is created in the BrowserContext. The page may still be loading. The event will
also fire for popup pages. See also `page.on('popup')` to receive events about popups relevant to a specific page.
The earliest moment that page is available is when it has navigated to the initial url. For example, when opening a
popup with `window.open('http://example.com')`, this event will fire when the network request to \"http://example.com\" is
done and its response has started loading in the popup.
```py
async with context.expect_page() as page_info:
await page.click(\"a[target=_blank]\"),
page = await page_info.value
print(await page.evaluate(\"location.href\"))
```
> NOTE: Use `page.wait_for_load_state()` to wait until the page gets to a particular state (you should not need it
in most cases)."""
@typing.overload
def on(
self,
event: Literal["request"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request is issued from any pages created through this context. The [request] object is read-only. To only
listen for requests from a particular page, use `page.on('request')`.
In order to intercept and mutate requests, see `browser_context.route()` or `page.route()`."""
@typing.overload
def on(
self,
event: Literal["requestfailed"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request fails, for example by timing out. To only listen for failed requests from a particular page, use
`page.on('request_failed')`.
> NOTE: HTTP Error responses, such as 404 or 503, are still successful responses from HTTP standpoint, so request will
complete with `browser_context.on('request_finished')` event and not with `browser_context.on('request_failed')`."""
@typing.overload
def on(
self,
event: Literal["requestfinished"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request finishes successfully after downloading the response body. For a successful response, the
sequence of events is `request`, `response` and `requestfinished`. To listen for successful requests from a particular
page, use `page.on('request_finished')`."""
@typing.overload
def on(
self,
event: Literal["response"],
f: typing.Callable[["Response"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when [response] status and headers are received for a request. For a successful response, the sequence of events
is `request`, `response` and `requestfinished`. To listen for response events from a particular page, use
`page.on('response')`."""
@typing.overload
def on(
self,
event: Literal["serviceworker"],
f: typing.Callable[["Worker"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
> NOTE: Service workers are only supported on Chromium-based browsers.
Emitted when new service worker is created in the context."""
def on(
self,
event: str,
f: typing.Callable[..., typing.Union[typing.Awaitable[None], None]],
) -> None:
return super().on(event=event, f=f)
@typing.overload
def once(
self,
event: Literal["backgroundpage"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
> NOTE: Only works with Chromium browser's persistent context.
Emitted when new background page is created in the context.
```py
background_page = await context.wait_for_event(\"backgroundpage\")
```"""
@typing.overload
def once(
self,
event: Literal["close"],
f: typing.Callable[
["BrowserContext"], "typing.Union[typing.Awaitable[None], None]"
],
) -> None:
"""
Emitted when Browser context gets closed. This might happen because of one of the following:
- Browser context is closed.
- Browser application is closed or crashed.
- The `browser.close()` method was called."""
@typing.overload
def once(
self,
event: Literal["page"],
f: typing.Callable[["Page"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
The event is emitted when a new Page is created in the BrowserContext. The page may still be loading. The event will
also fire for popup pages. See also `page.on('popup')` to receive events about popups relevant to a specific page.
The earliest moment that page is available is when it has navigated to the initial url. For example, when opening a
popup with `window.open('http://example.com')`, this event will fire when the network request to \"http://example.com\" is
done and its response has started loading in the popup.
```py
async with context.expect_page() as page_info:
await page.click(\"a[target=_blank]\"),
page = await page_info.value
print(await page.evaluate(\"location.href\"))
```
> NOTE: Use `page.wait_for_load_state()` to wait until the page gets to a particular state (you should not need it
in most cases)."""
@typing.overload
def once(
self,
event: Literal["request"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request is issued from any pages created through this context. The [request] object is read-only. To only
listen for requests from a particular page, use `page.on('request')`.
In order to intercept and mutate requests, see `browser_context.route()` or `page.route()`."""
@typing.overload
def once(
self,
event: Literal["requestfailed"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request fails, for example by timing out. To only listen for failed requests from a particular page, use
`page.on('request_failed')`.
> NOTE: HTTP Error responses, such as 404 or 503, are still successful responses from HTTP standpoint, so request will
complete with `browser_context.on('request_finished')` event and not with `browser_context.on('request_failed')`."""
@typing.overload
def once(
self,
event: Literal["requestfinished"],
f: typing.Callable[["Request"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when a request finishes successfully after downloading the response body. For a successful response, the
sequence of events is `request`, `response` and `requestfinished`. To listen for successful requests from a particular
page, use `page.on('request_finished')`."""
@typing.overload
def once(
self,
event: Literal["response"],
f: typing.Callable[["Response"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when [response] status and headers are received for a request. For a successful response, the sequence of events
is `request`, `response` and `requestfinished`. To listen for response events from a particular page, use
`page.on('response')`."""
@typing.overload
def once(
self,
event: Literal["serviceworker"],
f: typing.Callable[["Worker"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
> NOTE: Service workers are only supported on Chromium-based browsers.
Emitted when new service worker is created in the context."""
def once(
self,
event: str,
f: typing.Callable[..., typing.Union[typing.Awaitable[None], None]],
) -> None:
return super().once(event=event, f=f)
@property
def pages(self) -> typing.List["Page"]:
"""BrowserContext.pages
Returns all open pages in the context.
Returns
-------
List[Page]
"""
return mapping.from_impl_list(self._impl_obj.pages)
@property
def browser(self) -> typing.Optional["Browser"]:
"""BrowserContext.browser
Returns the browser instance of the context. If it was launched as a persistent context null gets returned.
Returns
-------
Union[Browser, NoneType]
"""
return mapping.from_impl_nullable(self._impl_obj.browser)
@property
def background_pages(self) -> typing.List["Page"]:
"""BrowserContext.background_pages
> NOTE: Background pages are only supported on Chromium-based browsers.
All existing background pages in the context.
Returns
-------
List[Page]
"""
return mapping.from_impl_list(self._impl_obj.background_pages)
@property
def service_workers(self) -> typing.List["Worker"]:
"""BrowserContext.service_workers
> NOTE: Service workers are only supported on Chromium-based browsers.
All existing service workers in the context.
Returns
-------
List[Worker]
"""
return mapping.from_impl_list(self._impl_obj.service_workers)
@property
def tracing(self) -> "Tracing":
"""BrowserContext.tracing
Returns
-------
Tracing
"""
return mapping.from_impl(self._impl_obj.tracing)
@property
def request(self) -> "APIRequestContext":
"""BrowserContext.request
API testing helper associated with this context. Requests made with this API will use context cookies.
Returns
-------
APIRequestContext
"""
return mapping.from_impl(self._impl_obj.request)
def set_default_navigation_timeout(self, timeout: float) -> NoneType:
"""BrowserContext.set_default_navigation_timeout
This setting will change the default maximum navigation time for the following methods and related shortcuts:
- `page.go_back()`
- `page.go_forward()`
- `page.goto()`
- `page.reload()`
- `page.set_content()`
- `page.expect_navigation()`
> NOTE: `page.set_default_navigation_timeout()` and `page.set_default_timeout()` take priority over
`browser_context.set_default_navigation_timeout()`.
Parameters
----------
timeout : float
Maximum navigation time in milliseconds
"""
return mapping.from_maybe_impl(
self._impl_obj.set_default_navigation_timeout(timeout=timeout)
)
def set_default_timeout(self, timeout: float) -> NoneType:
"""BrowserContext.set_default_timeout
This setting will change the default maximum time for all the methods accepting `timeout` option.
> NOTE: `page.set_default_navigation_timeout()`, `page.set_default_timeout()` and
`browser_context.set_default_navigation_timeout()` take priority over `browser_context.set_default_timeout()`.
Parameters
----------
timeout : float
Maximum time in milliseconds
"""
return mapping.from_maybe_impl(
self._impl_obj.set_default_timeout(timeout=timeout)
)
async def new_page(self) -> "Page":
"""BrowserContext.new_page
Creates a new page in the browser context.
Returns
-------
Page
"""
return mapping.from_impl(
await self._async("browser_context.new_page", self._impl_obj.new_page())
)
async def cookies(
self, urls: typing.Union[str, typing.List[str]] = None
) -> typing.List[Cookie]:
"""BrowserContext.cookies
If no URLs are specified, this method returns all cookies. If URLs are specified, only cookies that affect those URLs
are returned.
Parameters
----------
urls : Union[List[str], str, NoneType]
Optional list of URLs.
Returns
-------
List[{name: str, value: str, domain: str, path: str, expires: float, httpOnly: bool, secure: bool, sameSite: Union["Lax", "None", "Strict"]}]
"""
return mapping.from_impl_list(
await self._async(
"browser_context.cookies",
self._impl_obj.cookies(urls=mapping.to_impl(urls)),
)
)
async def add_cookies(self, cookies: typing.List[SetCookieParam]) -> NoneType:
"""BrowserContext.add_cookies
Adds cookies into this browser context. All pages within this context will have these cookies installed. Cookies can be
obtained via `browser_context.cookies()`.
```py
await browser_context.add_cookies([cookie_object1, cookie_object2])
```
Parameters
----------
cookies : List[{name: str, value: str, url: Union[str, NoneType], domain: Union[str, NoneType], path: Union[str, NoneType], expires: Union[float, NoneType], httpOnly: Union[bool, NoneType], secure: Union[bool, NoneType], sameSite: Union["Lax", "None", "Strict", NoneType]}]
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.add_cookies",
self._impl_obj.add_cookies(cookies=mapping.to_impl(cookies)),
)
)
async def clear_cookies(self) -> NoneType:
"""BrowserContext.clear_cookies
Clears context cookies.
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.clear_cookies", self._impl_obj.clear_cookies()
)
)
async def grant_permissions(
self, permissions: typing.List[str], *, origin: str = None
) -> NoneType:
"""BrowserContext.grant_permissions
Grants specified permissions to the browser context. Only grants corresponding permissions to the given origin if
specified.
Parameters
----------
permissions : List[str]
A permission or an array of permissions to grant. Permissions can be one of the following values:
- `'geolocation'`
- `'midi'`
- `'midi-sysex'` (system-exclusive midi)
- `'notifications'`
- `'camera'`
- `'microphone'`
- `'background-sync'`
- `'ambient-light-sensor'`
- `'accelerometer'`
- `'gyroscope'`
- `'magnetometer'`
- `'accessibility-events'`
- `'clipboard-read'`
- `'clipboard-write'`
- `'payment-handler'`
origin : Union[str, NoneType]
The [origin] to grant permissions to, e.g. "https://example.com".
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.grant_permissions",
self._impl_obj.grant_permissions(
permissions=mapping.to_impl(permissions), origin=origin
),
)
)
async def clear_permissions(self) -> NoneType:
"""BrowserContext.clear_permissions
Clears all permission overrides for the browser context.
```py
context = await browser.new_context()
await context.grant_permissions([\"clipboard-read\"])
# do stuff ..
context.clear_permissions()
```
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.clear_permissions", self._impl_obj.clear_permissions()
)
)
async def set_geolocation(self, geolocation: Geolocation = None) -> NoneType:
"""BrowserContext.set_geolocation
Sets the context's geolocation. Passing `null` or `undefined` emulates position unavailable.
```py
await browser_context.set_geolocation({\"latitude\": 59.95, \"longitude\": 30.31667})
```
> NOTE: Consider using `browser_context.grant_permissions()` to grant permissions for the browser context pages to
read its geolocation.
Parameters
----------
geolocation : Union[{latitude: float, longitude: float, accuracy: Union[float, NoneType]}, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.set_geolocation",
self._impl_obj.set_geolocation(geolocation=geolocation),
)
)
async def set_extra_http_headers(self, headers: typing.Dict[str, str]) -> NoneType:
"""BrowserContext.set_extra_http_headers
The extra HTTP headers will be sent with every request initiated by any page in the context. These headers are merged
with page-specific extra HTTP headers set with `page.set_extra_http_headers()`. If page overrides a particular
header, page-specific header value will be used instead of the browser context header value.
> NOTE: `browser_context.set_extra_http_headers()` does not guarantee the order of headers in the outgoing requests.
Parameters
----------
headers : Dict[str, str]
An object containing additional HTTP headers to be sent with every request. All header values must be strings.
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.set_extra_http_headers",
self._impl_obj.set_extra_http_headers(headers=mapping.to_impl(headers)),
)
)
async def set_offline(self, offline: bool) -> NoneType:
"""BrowserContext.set_offline
Parameters
----------
offline : bool
Whether to emulate network being offline for the browser context.
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.set_offline",
self._impl_obj.set_offline(offline=offline),
)
)
async def add_init_script(
self, script: str = None, *, path: typing.Union[str, pathlib.Path] = None
) -> NoneType:
"""BrowserContext.add_init_script
Adds a script which would be evaluated in one of the following scenarios:
- Whenever a page is created in the browser context or is navigated.
- Whenever a child frame is attached or navigated in any page in the browser context. In this case, the script is
evaluated in the context of the newly attached frame.
The script is evaluated after the document was created but before any of its scripts were run. This is useful to amend
the JavaScript environment, e.g. to seed `Math.random`.
An example of overriding `Math.random` before the page loads:
```py
# in your playwright script, assuming the preload.js file is in same directory.
await browser_context.add_init_script(path=\"preload.js\")
```
> NOTE: The order of evaluation of multiple scripts installed via `browser_context.add_init_script()` and
`page.add_init_script()` is not defined.
Parameters
----------
script : Union[str, NoneType]
Script to be evaluated in all pages in the browser context. Optional.
path : Union[pathlib.Path, str, NoneType]
Path to the JavaScript file. If `path` is a relative path, then it is resolved relative to the current working
directory. Optional.
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.add_init_script",
self._impl_obj.add_init_script(script=script, path=path),
)
)
async def expose_binding(
self, name: str, callback: typing.Callable, *, handle: bool = None
) -> NoneType:
"""BrowserContext.expose_binding
The method adds a function called `name` on the `window` object of every frame in every page in the context. When
called, the function executes `callback` and returns a [Promise] which resolves to the return value of `callback`. If
the `callback` returns a [Promise], it will be awaited.
The first argument of the `callback` function contains information about the caller: `{ browserContext: BrowserContext,
page: Page, frame: Frame }`.
See `page.expose_binding()` for page-only version.
An example of exposing page URL to all frames in all pages in the context:
```py
import asyncio
from playwright.async_api import async_playwright
async def run(playwright):
webkit = playwright.webkit
browser = await webkit.launch(headless=false)
context = await browser.new_context()
await context.expose_binding(\"pageURL\", lambda source: source[\"page\"].url)
page = await context.new_page()
await page.set_content(\"\"\"
<script>
async function onClick() {
document.querySelector('div').textContent = await window.pageURL();
}
</script>
<button onclick=\"onClick()\">Click me</button>
<div></div>
\"\"\")
await page.click(\"button\")
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
An example of passing an element handle:
```py
async def print(source, element):
print(await element.text_content())
await context.expose_binding(\"clicked\", print, handle=true)
await page.set_content(\"\"\"
<script>
document.addEventListener('click', event => window.clicked(event.target));
</script>
<div>Click me</div>
<div>Or click me</div>
\"\"\")
```
Parameters
----------
name : str
Name of the function on the window object.
callback : Callable
Callback function that will be called in the Playwright's context.
handle : Union[bool, NoneType]
Whether to pass the argument as a handle, instead of passing by value. When passing a handle, only one argument is
supported. When passing by value, multiple arguments are supported.
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.expose_binding",
self._impl_obj.expose_binding(
name=name, callback=self._wrap_handler(callback), handle=handle
),
)
)
async def expose_function(self, name: str, callback: typing.Callable) -> NoneType:
"""BrowserContext.expose_function
The method adds a function called `name` on the `window` object of every frame in every page in the context. When
called, the function executes `callback` and returns a [Promise] which resolves to the return value of `callback`.
If the `callback` returns a [Promise], it will be awaited.
See `page.expose_function()` for page-only version.
An example of adding a `sha256` function to all pages in the context:
```py
import asyncio
import hashlib
from playwright.async_api import async_playwright
def sha256(text):
m = hashlib.sha256()
m.update(bytes(text, \"utf8\"))
return m.hexdigest()
async def run(playwright):
webkit = playwright.webkit
browser = await webkit.launch(headless=False)
context = await browser.new_context()
await context.expose_function(\"sha256\", sha256)
page = await context.new_page()
await page.set_content(\"\"\"
<script>
async function onClick() {
document.querySelector('div').textContent = await window.sha256('PLAYWRIGHT');
}
</script>
<button onclick=\"onClick()\">Click me</button>
<div></div>
\"\"\")
await page.click(\"button\")
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
Parameters
----------
name : str
Name of the function on the window object.
callback : Callable
Callback function that will be called in the Playwright's context.
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.expose_function",
self._impl_obj.expose_function(
name=name, callback=self._wrap_handler(callback)
),
)
)
async def route(
self,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]],
handler: typing.Union[
typing.Callable[["Route"], typing.Any],
typing.Callable[["Route", "Request"], typing.Any],
],
*,
times: int = None
) -> NoneType:
"""BrowserContext.route
Routing provides the capability to modify network requests that are made by any page in the browser context. Once route
is enabled, every request matching the url pattern will stall unless it's continued, fulfilled or aborted.
> NOTE: `page.route()` will not intercept requests intercepted by Service Worker. See
[this](https://github.com/microsoft/playwright/issues/1090) issue. We recommend disabling Service Workers when using
request interception. Via `await context.addInitScript(() => delete window.navigator.serviceWorker);`
An example of a naive handler that aborts all image requests:
```py
context = await browser.new_context()
page = await context.new_page()
await context.route(\"**/*.{png,jpg,jpeg}\", lambda route: route.abort())
await page.goto(\"https://example.com\")
await browser.close()
```
or the same snippet using a regex pattern instead:
```py
context = await browser.new_context()
page = await context.new_page()
await context.route(re.compile(r\"(\\.png$)|(\\.jpg$)\"), lambda route: route.abort())
page = await context.new_page()
await page.goto(\"https://example.com\")
await browser.close()
```
It is possible to examine the request to decide the route action. For example, mocking all requests that contain some
post data, and leaving all other requests as is:
```py
def handle_route(route):
if (\"my-string\" in route.request.post_data)
route.fulfill(body=\"mocked-data\")
else
route.continue_()
await context.route(\"/api/**\", handle_route)
```
Page routes (set up with `page.route()`) take precedence over browser context routes when request matches both
handlers.
To remove a route with its handler you can use `browser_context.unroute()`.
> NOTE: Enabling routing disables http cache.
Parameters
----------
url : Union[Callable[[str], bool], Pattern, str]
A glob pattern, regex pattern or predicate receiving [URL] to match while routing. When a `baseURL` via the context
options was provided and the passed URL is a path, it gets merged via the
[`new URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor.
handler : Union[Callable[[Route, Request], Any], Callable[[Route], Any]]
handler function to route the request.
times : Union[int, NoneType]
How often a route should be used. By default it will be used every time.
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.route",
self._impl_obj.route(
url=self._wrap_handler(url),
handler=self._wrap_handler(handler),
times=times,
),
)
)
async def unroute(
self,
url: typing.Union[str, typing.Pattern, typing.Callable[[str], bool]],
handler: typing.Union[
typing.Callable[["Route"], typing.Any],
typing.Callable[["Route", "Request"], typing.Any],
] = None,
) -> NoneType:
"""BrowserContext.unroute
Removes a route created with `browser_context.route()`. When `handler` is not specified, removes all routes for
the `url`.
Parameters
----------
url : Union[Callable[[str], bool], Pattern, str]
A glob pattern, regex pattern or predicate receiving [URL] used to register a routing with
`browser_context.route()`.
handler : Union[Callable[[Route, Request], Any], Callable[[Route], Any], NoneType]
Optional handler function used to register a routing with `browser_context.route()`.
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.unroute",
self._impl_obj.unroute(
url=self._wrap_handler(url), handler=self._wrap_handler(handler)
),
)
)
def expect_event(
self, event: str, predicate: typing.Callable = None, *, timeout: float = None
) -> AsyncEventContextManager:
"""BrowserContext.expect_event
Waits for event to fire and passes its value into the predicate function. Returns when the predicate returns truthy
value. Will throw an error if the context closes before the event is fired. Returns the event data value.
```py
async with context.expect_event(\"page\") as event_info:
await page.click(\"button\")
page = await event_info.value
```
Parameters
----------
event : str
Event name, same one would pass into `browserContext.on(event)`.
predicate : Union[Callable, NoneType]
Receives the event data and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager
"""
return AsyncEventContextManager(
self._impl_obj.expect_event(
event=event, predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
async def close(self) -> NoneType:
"""BrowserContext.close
Closes the browser context. All the pages that belong to the browser context will be closed.
> NOTE: The default browser context cannot be closed.
"""
return mapping.from_maybe_impl(
await self._async("browser_context.close", self._impl_obj.close())
)
async def storage_state(
self, *, path: typing.Union[str, pathlib.Path] = None
) -> StorageState:
"""BrowserContext.storage_state
Returns storage state for this browser context, contains current cookies and local storage snapshot.
Parameters
----------
path : Union[pathlib.Path, str, NoneType]
The file path to save the storage state to. If `path` is a relative path, then it is resolved relative to current
working directory. If no path is provided, storage state is still returned, but won't be saved to the disk.
Returns
-------
{cookies: List[{name: str, value: str, domain: str, path: str, expires: float, httpOnly: bool, secure: bool, sameSite: Union["Lax", "None", "Strict"]}], origins: List[{origin: str, localStorage: List[{name: str, value: str}]}]}
"""
return mapping.from_impl(
await self._async(
"browser_context.storage_state", self._impl_obj.storage_state(path=path)
)
)
async def wait_for_event(
self, event: str, predicate: typing.Callable = None, *, timeout: float = None
) -> typing.Any:
"""BrowserContext.wait_for_event
> NOTE: In most cases, you should use `browser_context.expect_event()`.
Waits for given `event` to fire. If predicate is provided, it passes event's value into the `predicate` function and
waits for `predicate(event)` to return a truthy value. Will throw an error if the browser context is closed before the
`event` is fired.
Parameters
----------
event : str
Event name, same one typically passed into `*.on(event)`.
predicate : Union[Callable, NoneType]
Receives the event data and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"browser_context.wait_for_event",
self._impl_obj.wait_for_event(
event=event,
predicate=self._wrap_handler(predicate),
timeout=timeout,
),
)
)
def expect_page(
self,
predicate: typing.Optional[typing.Callable[["Page"], bool]] = None,
*,
timeout: float = None
) -> AsyncEventContextManager["Page"]:
"""BrowserContext.expect_page
Performs action and waits for a new `Page` to be created in the context. If predicate is provided, it passes `Page`
value into the `predicate` function and waits for `predicate(event)` to return a truthy value. Will throw an error if
the context closes before new `Page` is created.
Parameters
----------
predicate : Union[Callable[[Page], bool], NoneType]
Receives the `Page` object and resolves to truthy value when the waiting should resolve.
timeout : Union[float, NoneType]
Maximum time to wait for in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout. The default
value can be changed by using the `browser_context.set_default_timeout()`.
Returns
-------
EventContextManager[Page]
"""
return AsyncEventContextManager(
self._impl_obj.expect_page(
predicate=self._wrap_handler(predicate), timeout=timeout
).future
)
async def new_cdp_session(
self, page: typing.Union["Page", "Frame"]
) -> "CDPSession":
"""BrowserContext.new_cdp_session
> NOTE: CDP sessions are only supported on Chromium-based browsers.
Returns the newly created session.
Parameters
----------
page : Union[Frame, Page]
Target to create new session for. For backwards-compatibility, this parameter is named `page`, but it can be a `Page` or
`Frame` type.
Returns
-------
CDPSession
"""
return mapping.from_impl(
await self._async(
"browser_context.new_cdp_session",
self._impl_obj.new_cdp_session(page=page),
)
)
mapping.register(BrowserContextImpl, BrowserContext)
class CDPSession(AsyncBase):
async def send(self, method: str, params: typing.Dict = None) -> typing.Dict:
"""CDPSession.send
Parameters
----------
method : str
protocol method name
params : Union[Dict, NoneType]
Optional method parameters
Returns
-------
Dict
"""
return mapping.from_maybe_impl(
await self._async(
"cdp_session.send",
self._impl_obj.send(method=method, params=mapping.to_impl(params)),
)
)
async def detach(self) -> NoneType:
"""CDPSession.detach
Detaches the CDPSession from the target. Once detached, the CDPSession object won't emit any events and can't be used to
send messages.
"""
return mapping.from_maybe_impl(
await self._async("cdp_session.detach", self._impl_obj.detach())
)
mapping.register(CDPSessionImpl, CDPSession)
class Browser(AsyncContextManager):
def on(
self,
event: Literal["disconnected"],
f: typing.Callable[["Browser"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when Browser gets disconnected from the browser application. This might happen because of one of the following:
- Browser application is closed or crashed.
- The `browser.close()` method was called."""
return super().on(event=event, f=f)
def once(
self,
event: Literal["disconnected"],
f: typing.Callable[["Browser"], "typing.Union[typing.Awaitable[None], None]"],
) -> None:
"""
Emitted when Browser gets disconnected from the browser application. This might happen because of one of the following:
- Browser application is closed or crashed.
- The `browser.close()` method was called."""
return super().once(event=event, f=f)
@property
def contexts(self) -> typing.List["BrowserContext"]:
"""Browser.contexts
Returns an array of all open browser contexts. In a newly created browser, this will return zero browser contexts.
```py
browser = await pw.webkit.launch()
print(len(browser.contexts())) # prints `0`
context = await browser.new_context()
print(len(browser.contexts())) # prints `1`
```
Returns
-------
List[BrowserContext]
"""
return mapping.from_impl_list(self._impl_obj.contexts)
@property
def version(self) -> str:
"""Browser.version
Returns the browser version.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.version)
def is_connected(self) -> bool:
"""Browser.is_connected
Indicates that the browser is connected.
Returns
-------
bool
"""
return mapping.from_maybe_impl(self._impl_obj.is_connected())
async def new_context(
self,
*,
viewport: ViewportSize = None,
screen: ViewportSize = None,
no_viewport: bool = None,
ignore_https_errors: bool = None,
java_script_enabled: bool = None,
bypass_csp: bool = None,
user_agent: str = None,
locale: str = None,
timezone_id: str = None,
geolocation: Geolocation = None,
permissions: typing.List[str] = None,
extra_http_headers: typing.Optional[typing.Dict[str, str]] = None,
offline: bool = None,
http_credentials: HttpCredentials = None,
device_scale_factor: float = None,
is_mobile: bool = None,
has_touch: bool = None,
color_scheme: Literal["dark", "light", "no-preference"] = None,
reduced_motion: Literal["no-preference", "reduce"] = None,
forced_colors: Literal["active", "none"] = None,
accept_downloads: bool = None,
default_browser_type: str = None,
proxy: ProxySettings = None,
record_har_path: typing.Union[str, pathlib.Path] = None,
record_har_omit_content: bool = None,
record_video_dir: typing.Union[str, pathlib.Path] = None,
record_video_size: ViewportSize = None,
storage_state: typing.Union[StorageState, str, pathlib.Path] = None,
base_url: str = None,
strict_selectors: bool = None
) -> "BrowserContext":
"""Browser.new_context
Creates a new browser context. It won't share cookies/cache with other browser contexts.
```py
browser = await playwright.firefox.launch() # or \"chromium\" or \"webkit\".
# create a new incognito browser context.
context = await browser.new_context()
# create a new page in a pristine context.
page = await context.new_page()
await page.goto(\"https://example.com\")
```
Parameters
----------
viewport : Union[{width: int, height: int}, NoneType]
Sets a consistent viewport for each page. Defaults to an 1280x720 viewport. `no_viewport` disables the fixed viewport.
screen : Union[{width: int, height: int}, NoneType]
Emulates consistent window screen size available inside web page via `window.screen`. Is only used when the `viewport`
is set.
no_viewport : Union[bool, NoneType]
Does not enforce fixed viewport, allows resizing window in the headed mode.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
java_script_enabled : Union[bool, NoneType]
Whether or not to enable JavaScript in the context. Defaults to `true`.
bypass_csp : Union[bool, NoneType]
Toggles bypassing page's Content-Security-Policy.
user_agent : Union[str, NoneType]
Specific user agent to use in this context.
locale : Union[str, NoneType]
Specify user locale, for example `en-GB`, `de-DE`, etc. Locale will affect `navigator.language` value, `Accept-Language`
request header value as well as number and date formatting rules.
timezone_id : Union[str, NoneType]
Changes the timezone of the context. See
[ICU's metaZones.txt](https://cs.chromium.org/chromium/src/third_party/icu/source/data/misc/metaZones.txt?rcl=faee8bc70570192d82d2978a71e2a615788597d1)
for a list of supported timezone IDs.
geolocation : Union[{latitude: float, longitude: float, accuracy: Union[float, NoneType]}, NoneType]
permissions : Union[List[str], NoneType]
A list of permissions to grant to all pages in this context. See `browser_context.grant_permissions()` for more
details.
extra_http_headers : Union[Dict[str, str], NoneType]
An object containing additional HTTP headers to be sent with every request.
offline : Union[bool, NoneType]
Whether to emulate network being offline. Defaults to `false`.
http_credentials : Union[{username: str, password: str}, NoneType]
Credentials for [HTTP authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication).
device_scale_factor : Union[float, NoneType]
Specify device scale factor (can be thought of as dpr). Defaults to `1`.
is_mobile : Union[bool, NoneType]
Whether the `meta viewport` tag is taken into account and touch events are enabled. Defaults to `false`. Not supported
in Firefox.
has_touch : Union[bool, NoneType]
Specifies if viewport supports touch events. Defaults to false.
color_scheme : Union["dark", "light", "no-preference", NoneType]
Emulates `'prefers-colors-scheme'` media feature, supported values are `'light'`, `'dark'`, `'no-preference'`. See
`page.emulate_media()` for more details. Defaults to `'light'`.
reduced_motion : Union["no-preference", "reduce", NoneType]
Emulates `'prefers-reduced-motion'` media feature, supported values are `'reduce'`, `'no-preference'`. See
`page.emulate_media()` for more details. Defaults to `'no-preference'`.
forced_colors : Union["active", "none", NoneType]
Emulates `'forced-colors'` media feature, supported values are `'active'`, `'none'`. See `page.emulate_media()`
for more details. Defaults to `'none'`.
> NOTE: It's not supported in WebKit, see [here](https://bugs.webkit.org/show_bug.cgi?id=225281) in their issue tracker.
accept_downloads : Union[bool, NoneType]
Whether to automatically download all the attachments. Defaults to `true` where all the downloads are accepted.
proxy : Union[{server: str, bypass: Union[str, NoneType], username: Union[str, NoneType], password: Union[str, NoneType]}, NoneType]
Network proxy settings to use with this context.
> NOTE: For Chromium on Windows the browser needs to be launched with the global proxy for this option to work. If all
contexts override the proxy, global proxy will be never used and can be any string, for example `launch({ proxy: {
server: 'http://per-context' } })`.
record_har_path : Union[pathlib.Path, str, NoneType]
Enables [HAR](http://www.softwareishard.com/blog/har-12-spec) recording for all pages into the specified HAR file on the
filesystem. If not specified, the HAR is not recorded. Make sure to call `browser_context.close()` for the HAR to
be saved.
record_har_omit_content : Union[bool, NoneType]
Optional setting to control whether to omit request content from the HAR. Defaults to `false`.
record_video_dir : Union[pathlib.Path, str, NoneType]
Enables video recording for all pages into the specified directory. If not specified videos are not recorded. Make sure
to call `browser_context.close()` for videos to be saved.
record_video_size : Union[{width: int, height: int}, NoneType]
Dimensions of the recorded videos. If not specified the size will be equal to `viewport` scaled down to fit into
800x800. If `viewport` is not configured explicitly the video size defaults to 800x450. Actual picture of each page will
be scaled down if necessary to fit the specified size.
storage_state : Union[pathlib.Path, str, {cookies: List[{name: str, value: str, domain: str, path: str, expires: float, httpOnly: bool, secure: bool, sameSite: Union["Lax", "None", "Strict"]}], origins: List[{origin: str, localStorage: List[{name: str, value: str}]}]}, NoneType]
Populates context with given storage state. This option can be used to initialize context with logged-in information
obtained via `browser_context.storage_state()`. Either a path to the file with saved storage, or an object with
the following fields:
base_url : Union[str, NoneType]
When using `page.goto()`, `page.route()`, `page.wait_for_url()`, `page.expect_request()`,
or `page.expect_response()` it takes the base URL in consideration by using the
[`URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor for building the corresponding URL.
Examples:
- baseURL: `http://localhost:3000` and navigating to `/bar.html` results in `http://localhost:3000/bar.html`
- baseURL: `http://localhost:3000/foo/` and navigating to `./bar.html` results in `http://localhost:3000/foo/bar.html`
- baseURL: `http://localhost:3000/foo` (without trailing slash) and navigating to `./bar.html` results in
`http://localhost:3000/bar.html`
strict_selectors : Union[bool, NoneType]
It specified, enables strict selectors mode for this context. In the strict selectors mode all operations on selectors
that imply single target DOM element will throw when more than one element matches the selector. See `Locator` to learn
more about the strict mode.
Returns
-------
BrowserContext
"""
return mapping.from_impl(
await self._async(
"browser.new_context",
self._impl_obj.new_context(
viewport=viewport,
screen=screen,
noViewport=no_viewport,
ignoreHTTPSErrors=ignore_https_errors,
javaScriptEnabled=java_script_enabled,
bypassCSP=bypass_csp,
userAgent=user_agent,
locale=locale,
timezoneId=timezone_id,
geolocation=geolocation,
permissions=mapping.to_impl(permissions),
extraHTTPHeaders=mapping.to_impl(extra_http_headers),
offline=offline,
httpCredentials=http_credentials,
deviceScaleFactor=device_scale_factor,
isMobile=is_mobile,
hasTouch=has_touch,
colorScheme=color_scheme,
reducedMotion=reduced_motion,
forcedColors=forced_colors,
acceptDownloads=accept_downloads,
defaultBrowserType=default_browser_type,
proxy=proxy,
recordHarPath=record_har_path,
recordHarOmitContent=record_har_omit_content,
recordVideoDir=record_video_dir,
recordVideoSize=record_video_size,
storageState=storage_state,
baseURL=base_url,
strictSelectors=strict_selectors,
),
)
)
async def new_page(
self,
*,
viewport: ViewportSize = None,
screen: ViewportSize = None,
no_viewport: bool = None,
ignore_https_errors: bool = None,
java_script_enabled: bool = None,
bypass_csp: bool = None,
user_agent: str = None,
locale: str = None,
timezone_id: str = None,
geolocation: Geolocation = None,
permissions: typing.List[str] = None,
extra_http_headers: typing.Optional[typing.Dict[str, str]] = None,
offline: bool = None,
http_credentials: HttpCredentials = None,
device_scale_factor: float = None,
is_mobile: bool = None,
has_touch: bool = None,
color_scheme: Literal["dark", "light", "no-preference"] = None,
forced_colors: Literal["active", "none"] = None,
reduced_motion: Literal["no-preference", "reduce"] = None,
accept_downloads: bool = None,
default_browser_type: str = None,
proxy: ProxySettings = None,
record_har_path: typing.Union[str, pathlib.Path] = None,
record_har_omit_content: bool = None,
record_video_dir: typing.Union[str, pathlib.Path] = None,
record_video_size: ViewportSize = None,
storage_state: typing.Union[StorageState, str, pathlib.Path] = None,
base_url: str = None,
strict_selectors: bool = None
) -> "Page":
"""Browser.new_page
Creates a new page in a new browser context. Closing this page will close the context as well.
This is a convenience API that should only be used for the single-page scenarios and short snippets. Production code and
testing frameworks should explicitly create `browser.new_context()` followed by the
`browser_context.new_page()` to control their exact life times.
Parameters
----------
viewport : Union[{width: int, height: int}, NoneType]
Sets a consistent viewport for each page. Defaults to an 1280x720 viewport. `no_viewport` disables the fixed viewport.
screen : Union[{width: int, height: int}, NoneType]
Emulates consistent window screen size available inside web page via `window.screen`. Is only used when the `viewport`
is set.
no_viewport : Union[bool, NoneType]
Does not enforce fixed viewport, allows resizing window in the headed mode.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
java_script_enabled : Union[bool, NoneType]
Whether or not to enable JavaScript in the context. Defaults to `true`.
bypass_csp : Union[bool, NoneType]
Toggles bypassing page's Content-Security-Policy.
user_agent : Union[str, NoneType]
Specific user agent to use in this context.
locale : Union[str, NoneType]
Specify user locale, for example `en-GB`, `de-DE`, etc. Locale will affect `navigator.language` value, `Accept-Language`
request header value as well as number and date formatting rules.
timezone_id : Union[str, NoneType]
Changes the timezone of the context. See
[ICU's metaZones.txt](https://cs.chromium.org/chromium/src/third_party/icu/source/data/misc/metaZones.txt?rcl=faee8bc70570192d82d2978a71e2a615788597d1)
for a list of supported timezone IDs.
geolocation : Union[{latitude: float, longitude: float, accuracy: Union[float, NoneType]}, NoneType]
permissions : Union[List[str], NoneType]
A list of permissions to grant to all pages in this context. See `browser_context.grant_permissions()` for more
details.
extra_http_headers : Union[Dict[str, str], NoneType]
An object containing additional HTTP headers to be sent with every request.
offline : Union[bool, NoneType]
Whether to emulate network being offline. Defaults to `false`.
http_credentials : Union[{username: str, password: str}, NoneType]
Credentials for [HTTP authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication).
device_scale_factor : Union[float, NoneType]
Specify device scale factor (can be thought of as dpr). Defaults to `1`.
is_mobile : Union[bool, NoneType]
Whether the `meta viewport` tag is taken into account and touch events are enabled. Defaults to `false`. Not supported
in Firefox.
has_touch : Union[bool, NoneType]
Specifies if viewport supports touch events. Defaults to false.
color_scheme : Union["dark", "light", "no-preference", NoneType]
Emulates `'prefers-colors-scheme'` media feature, supported values are `'light'`, `'dark'`, `'no-preference'`. See
`page.emulate_media()` for more details. Defaults to `'light'`.
forced_colors : Union["active", "none", NoneType]
Emulates `'forced-colors'` media feature, supported values are `'active'`, `'none'`. See `page.emulate_media()`
for more details. Defaults to `'none'`.
> NOTE: It's not supported in WebKit, see [here](https://bugs.webkit.org/show_bug.cgi?id=225281) in their issue tracker.
reduced_motion : Union["no-preference", "reduce", NoneType]
Emulates `'prefers-reduced-motion'` media feature, supported values are `'reduce'`, `'no-preference'`. See
`page.emulate_media()` for more details. Defaults to `'no-preference'`.
accept_downloads : Union[bool, NoneType]
Whether to automatically download all the attachments. Defaults to `true` where all the downloads are accepted.
proxy : Union[{server: str, bypass: Union[str, NoneType], username: Union[str, NoneType], password: Union[str, NoneType]}, NoneType]
Network proxy settings to use with this context.
> NOTE: For Chromium on Windows the browser needs to be launched with the global proxy for this option to work. If all
contexts override the proxy, global proxy will be never used and can be any string, for example `launch({ proxy: {
server: 'http://per-context' } })`.
record_har_path : Union[pathlib.Path, str, NoneType]
Enables [HAR](http://www.softwareishard.com/blog/har-12-spec) recording for all pages into the specified HAR file on the
filesystem. If not specified, the HAR is not recorded. Make sure to call `browser_context.close()` for the HAR to
be saved.
record_har_omit_content : Union[bool, NoneType]
Optional setting to control whether to omit request content from the HAR. Defaults to `false`.
record_video_dir : Union[pathlib.Path, str, NoneType]
Enables video recording for all pages into the specified directory. If not specified videos are not recorded. Make sure
to call `browser_context.close()` for videos to be saved.
record_video_size : Union[{width: int, height: int}, NoneType]
Dimensions of the recorded videos. If not specified the size will be equal to `viewport` scaled down to fit into
800x800. If `viewport` is not configured explicitly the video size defaults to 800x450. Actual picture of each page will
be scaled down if necessary to fit the specified size.
storage_state : Union[pathlib.Path, str, {cookies: List[{name: str, value: str, domain: str, path: str, expires: float, httpOnly: bool, secure: bool, sameSite: Union["Lax", "None", "Strict"]}], origins: List[{origin: str, localStorage: List[{name: str, value: str}]}]}, NoneType]
Populates context with given storage state. This option can be used to initialize context with logged-in information
obtained via `browser_context.storage_state()`. Either a path to the file with saved storage, or an object with
the following fields:
base_url : Union[str, NoneType]
When using `page.goto()`, `page.route()`, `page.wait_for_url()`, `page.expect_request()`,
or `page.expect_response()` it takes the base URL in consideration by using the
[`URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor for building the corresponding URL.
Examples:
- baseURL: `http://localhost:3000` and navigating to `/bar.html` results in `http://localhost:3000/bar.html`
- baseURL: `http://localhost:3000/foo/` and navigating to `./bar.html` results in `http://localhost:3000/foo/bar.html`
- baseURL: `http://localhost:3000/foo` (without trailing slash) and navigating to `./bar.html` results in
`http://localhost:3000/bar.html`
strict_selectors : Union[bool, NoneType]
It specified, enables strict selectors mode for this context. In the strict selectors mode all operations on selectors
that imply single target DOM element will throw when more than one element matches the selector. See `Locator` to learn
more about the strict mode.
Returns
-------
Page
"""
return mapping.from_impl(
await self._async(
"browser.new_page",
self._impl_obj.new_page(
viewport=viewport,
screen=screen,
noViewport=no_viewport,
ignoreHTTPSErrors=ignore_https_errors,
javaScriptEnabled=java_script_enabled,
bypassCSP=bypass_csp,
userAgent=user_agent,
locale=locale,
timezoneId=timezone_id,
geolocation=geolocation,
permissions=mapping.to_impl(permissions),
extraHTTPHeaders=mapping.to_impl(extra_http_headers),
offline=offline,
httpCredentials=http_credentials,
deviceScaleFactor=device_scale_factor,
isMobile=is_mobile,
hasTouch=has_touch,
colorScheme=color_scheme,
forcedColors=forced_colors,
reducedMotion=reduced_motion,
acceptDownloads=accept_downloads,
defaultBrowserType=default_browser_type,
proxy=proxy,
recordHarPath=record_har_path,
recordHarOmitContent=record_har_omit_content,
recordVideoDir=record_video_dir,
recordVideoSize=record_video_size,
storageState=storage_state,
baseURL=base_url,
strictSelectors=strict_selectors,
),
)
)
async def close(self) -> NoneType:
"""Browser.close
In case this browser is obtained using `browser_type.launch()`, closes the browser and all of its pages (if any
were opened).
In case this browser is connected to, clears all created contexts belonging to this browser and disconnects from the
browser server.
The `Browser` object itself is considered to be disposed and cannot be used anymore.
"""
return mapping.from_maybe_impl(
await self._async("browser.close", self._impl_obj.close())
)
async def new_browser_cdp_session(self) -> "CDPSession":
"""Browser.new_browser_cdp_session
> NOTE: CDP Sessions are only supported on Chromium-based browsers.
Returns the newly created browser session.
Returns
-------
CDPSession
"""
return mapping.from_impl(
await self._async(
"browser.new_browser_cdp_session",
self._impl_obj.new_browser_cdp_session(),
)
)
async def start_tracing(
self,
*,
page: "Page" = None,
path: typing.Union[str, pathlib.Path] = None,
screenshots: bool = None,
categories: typing.List[str] = None
) -> NoneType:
"""Browser.start_tracing
> NOTE: This API controls [Chromium Tracing](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool)
which is a low-level chromium-specific debugging tool. API to control [Playwright Tracing](../trace-viewer) could be
found [here](./class-tracing).
You can use `browser.start_tracing()` and `browser.stop_tracing()` to create a trace file that can be
opened in Chrome DevTools performance panel.
```py
await browser.start_tracing(page, path=\"trace.json\")
await page.goto(\"https://www.google.com\")
await browser.stop_tracing()
```
Parameters
----------
page : Union[Page, NoneType]
Optional, if specified, tracing includes screenshots of the given page.
path : Union[pathlib.Path, str, NoneType]
A path to write the trace file to.
screenshots : Union[bool, NoneType]
captures screenshots in the trace.
categories : Union[List[str], NoneType]
specify custom categories to use instead of default.
"""
return mapping.from_maybe_impl(
await self._async(
"browser.start_tracing",
self._impl_obj.start_tracing(
page=page._impl_obj if page else None,
path=path,
screenshots=screenshots,
categories=mapping.to_impl(categories),
),
)
)
async def stop_tracing(self) -> bytes:
"""Browser.stop_tracing
> NOTE: This API controls [Chromium Tracing](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool)
which is a low-level chromium-specific debugging tool. API to control [Playwright Tracing](../trace-viewer) could be
found [here](./class-tracing).
Returns the buffer with trace data.
Returns
-------
bytes
"""
return mapping.from_maybe_impl(
await self._async("browser.stop_tracing", self._impl_obj.stop_tracing())
)
mapping.register(BrowserImpl, Browser)
class BrowserType(AsyncBase):
@property
def name(self) -> str:
"""BrowserType.name
Returns browser name. For example: `'chromium'`, `'webkit'` or `'firefox'`.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.name)
@property
def executable_path(self) -> str:
"""BrowserType.executable_path
A path where Playwright expects to find a bundled browser executable.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.executable_path)
async def launch(
self,
*,
executable_path: typing.Union[str, pathlib.Path] = None,
channel: str = None,
args: typing.List[str] = None,
ignore_default_args: typing.Union[bool, typing.List[str]] = None,
handle_sigint: bool = None,
handle_sigterm: bool = None,
handle_sighup: bool = None,
timeout: float = None,
env: typing.Optional[typing.Dict[str, typing.Union[str, float, bool]]] = None,
headless: bool = None,
devtools: bool = None,
proxy: ProxySettings = None,
downloads_path: typing.Union[str, pathlib.Path] = None,
slow_mo: float = None,
traces_dir: typing.Union[str, pathlib.Path] = None,
chromium_sandbox: bool = None,
firefox_user_prefs: typing.Optional[
typing.Dict[str, typing.Union[str, float, bool]]
] = None
) -> "Browser":
"""BrowserType.launch
Returns the browser instance.
You can use `ignoreDefaultArgs` to filter out `--mute-audio` from default arguments:
```py
browser = await playwright.chromium.launch( # or \"firefox\" or \"webkit\".
ignore_default_args=[\"--mute-audio\"]
)
```
> **Chromium-only** Playwright can also be used to control the Google Chrome or Microsoft Edge browsers, but it works
best with the version of Chromium it is bundled with. There is no guarantee it will work with any other version. Use
`executablePath` option with extreme caution.
>
> If Google Chrome (rather than Chromium) is preferred, a
[Chrome Canary](https://www.google.com/chrome/browser/canary.html) or
[Dev Channel](https://www.chromium.org/getting-involved/dev-channel) build is suggested.
>
> Stock browsers like Google Chrome and Microsoft Edge are suitable for tests that require proprietary media codecs for
video playback. See
[this article](https://www.howtogeek.com/202825/what%E2%80%99s-the-difference-between-chromium-and-chrome/) for other
differences between Chromium and Chrome.
[This article](https://chromium.googlesource.com/chromium/src/+/lkgr/docs/chromium_browser_vs_google_chrome.md)
describes some differences for Linux users.
Parameters
----------
executable_path : Union[pathlib.Path, str, NoneType]
Path to a browser executable to run instead of the bundled one. If `executablePath` is a relative path, then it is
resolved relative to the current working directory. Note that Playwright only works with the bundled Chromium, Firefox
or WebKit, use at your own risk.
channel : Union[str, NoneType]
Browser distribution channel. Supported values are "chrome", "chrome-beta", "chrome-dev", "chrome-canary", "msedge",
"msedge-beta", "msedge-dev", "msedge-canary". Read more about using
[Google Chrome and Microsoft Edge](./browsers.md#google-chrome--microsoft-edge).
args : Union[List[str], NoneType]
Additional arguments to pass to the browser instance. The list of Chromium flags can be found
[here](http://peter.sh/experiments/chromium-command-line-switches/).
ignore_default_args : Union[List[str], bool, NoneType]
If `true`, Playwright does not pass its own configurations args and only uses the ones from `args`. If an array is
given, then filters out the given default arguments. Dangerous option; use with care. Defaults to `false`.
handle_sigint : Union[bool, NoneType]
Close the browser process on Ctrl-C. Defaults to `true`.
handle_sigterm : Union[bool, NoneType]
Close the browser process on SIGTERM. Defaults to `true`.
handle_sighup : Union[bool, NoneType]
Close the browser process on SIGHUP. Defaults to `true`.
timeout : Union[float, NoneType]
Maximum time in milliseconds to wait for the browser instance to start. Defaults to `30000` (30 seconds). Pass `0` to
disable timeout.
env : Union[Dict[str, Union[bool, float, str]], NoneType]
Specify environment variables that will be visible to the browser. Defaults to `process.env`.
headless : Union[bool, NoneType]
Whether to run browser in headless mode. More details for
[Chromium](https://developers.google.com/web/updates/2017/04/headless-chrome) and
[Firefox](https://developer.mozilla.org/en-US/docs/Mozilla/Firefox/Headless_mode). Defaults to `true` unless the
`devtools` option is `true`.
devtools : Union[bool, NoneType]
**Chromium-only** Whether to auto-open a Developer Tools panel for each tab. If this option is `true`, the `headless`
option will be set `false`.
proxy : Union[{server: str, bypass: Union[str, NoneType], username: Union[str, NoneType], password: Union[str, NoneType]}, NoneType]
Network proxy settings.
downloads_path : Union[pathlib.Path, str, NoneType]
If specified, accepted downloads are downloaded into this directory. Otherwise, temporary directory is created and is
deleted when browser is closed. In either case, the downloads are deleted when the browser context they were created in
is closed.
slow_mo : Union[float, NoneType]
Slows down Playwright operations by the specified amount of milliseconds. Useful so that you can see what is going on.
traces_dir : Union[pathlib.Path, str, NoneType]
If specified, traces are saved into this directory.
chromium_sandbox : Union[bool, NoneType]
Enable Chromium sandboxing. Defaults to `false`.
firefox_user_prefs : Union[Dict[str, Union[bool, float, str]], NoneType]
Firefox user preferences. Learn more about the Firefox user preferences at
[`about:config`](https://support.mozilla.org/en-US/kb/about-config-editor-firefox).
Returns
-------
Browser
"""
return mapping.from_impl(
await self._async(
"browser_type.launch",
self._impl_obj.launch(
executablePath=executable_path,
channel=channel,
args=mapping.to_impl(args),
ignoreDefaultArgs=mapping.to_impl(ignore_default_args),
handleSIGINT=handle_sigint,
handleSIGTERM=handle_sigterm,
handleSIGHUP=handle_sighup,
timeout=timeout,
env=mapping.to_impl(env),
headless=headless,
devtools=devtools,
proxy=proxy,
downloadsPath=downloads_path,
slowMo=slow_mo,
tracesDir=traces_dir,
chromiumSandbox=chromium_sandbox,
firefoxUserPrefs=mapping.to_impl(firefox_user_prefs),
),
)
)
async def launch_persistent_context(
self,
user_data_dir: typing.Union[str, pathlib.Path],
*,
channel: str = None,
executable_path: typing.Union[str, pathlib.Path] = None,
args: typing.List[str] = None,
ignore_default_args: typing.Union[bool, typing.List[str]] = None,
handle_sigint: bool = None,
handle_sigterm: bool = None,
handle_sighup: bool = None,
timeout: float = None,
env: typing.Optional[typing.Dict[str, typing.Union[str, float, bool]]] = None,
headless: bool = None,
devtools: bool = None,
proxy: ProxySettings = None,
downloads_path: typing.Union[str, pathlib.Path] = None,
slow_mo: float = None,
viewport: ViewportSize = None,
screen: ViewportSize = None,
no_viewport: bool = None,
ignore_https_errors: bool = None,
java_script_enabled: bool = None,
bypass_csp: bool = None,
user_agent: str = None,
locale: str = None,
timezone_id: str = None,
geolocation: Geolocation = None,
permissions: typing.List[str] = None,
extra_http_headers: typing.Optional[typing.Dict[str, str]] = None,
offline: bool = None,
http_credentials: HttpCredentials = None,
device_scale_factor: float = None,
is_mobile: bool = None,
has_touch: bool = None,
color_scheme: Literal["dark", "light", "no-preference"] = None,
reduced_motion: Literal["no-preference", "reduce"] = None,
forced_colors: Literal["active", "none"] = None,
accept_downloads: bool = None,
traces_dir: typing.Union[str, pathlib.Path] = None,
chromium_sandbox: bool = None,
record_har_path: typing.Union[str, pathlib.Path] = None,
record_har_omit_content: bool = None,
record_video_dir: typing.Union[str, pathlib.Path] = None,
record_video_size: ViewportSize = None,
base_url: str = None,
strict_selectors: bool = None
) -> "BrowserContext":
"""BrowserType.launch_persistent_context
Returns the persistent browser context instance.
Launches browser that uses persistent storage located at `userDataDir` and returns the only context. Closing this
context will automatically close the browser.
Parameters
----------
user_data_dir : Union[pathlib.Path, str]
Path to a User Data Directory, which stores browser session data like cookies and local storage. More details for
[Chromium](https://chromium.googlesource.com/chromium/src/+/master/docs/user_data_dir.md#introduction) and
[Firefox](https://developer.mozilla.org/en-US/docs/Mozilla/Command_Line_Options#User_Profile). Note that Chromium's user
data directory is the **parent** directory of the "Profile Path" seen at `chrome://version`. Pass an empty string to use
a temporary directory instead.
channel : Union[str, NoneType]
Browser distribution channel. Supported values are "chrome", "chrome-beta", "chrome-dev", "chrome-canary", "msedge",
"msedge-beta", "msedge-dev", "msedge-canary". Read more about using
[Google Chrome and Microsoft Edge](./browsers.md#google-chrome--microsoft-edge).
executable_path : Union[pathlib.Path, str, NoneType]
Path to a browser executable to run instead of the bundled one. If `executablePath` is a relative path, then it is
resolved relative to the current working directory. Note that Playwright only works with the bundled Chromium, Firefox
or WebKit, use at your own risk.
args : Union[List[str], NoneType]
Additional arguments to pass to the browser instance. The list of Chromium flags can be found
[here](http://peter.sh/experiments/chromium-command-line-switches/).
ignore_default_args : Union[List[str], bool, NoneType]
If `true`, Playwright does not pass its own configurations args and only uses the ones from `args`. If an array is
given, then filters out the given default arguments. Dangerous option; use with care. Defaults to `false`.
handle_sigint : Union[bool, NoneType]
Close the browser process on Ctrl-C. Defaults to `true`.
handle_sigterm : Union[bool, NoneType]
Close the browser process on SIGTERM. Defaults to `true`.
handle_sighup : Union[bool, NoneType]
Close the browser process on SIGHUP. Defaults to `true`.
timeout : Union[float, NoneType]
Maximum time in milliseconds to wait for the browser instance to start. Defaults to `30000` (30 seconds). Pass `0` to
disable timeout.
env : Union[Dict[str, Union[bool, float, str]], NoneType]
Specify environment variables that will be visible to the browser. Defaults to `process.env`.
headless : Union[bool, NoneType]
Whether to run browser in headless mode. More details for
[Chromium](https://developers.google.com/web/updates/2017/04/headless-chrome) and
[Firefox](https://developer.mozilla.org/en-US/docs/Mozilla/Firefox/Headless_mode). Defaults to `true` unless the
`devtools` option is `true`.
devtools : Union[bool, NoneType]
**Chromium-only** Whether to auto-open a Developer Tools panel for each tab. If this option is `true`, the `headless`
option will be set `false`.
proxy : Union[{server: str, bypass: Union[str, NoneType], username: Union[str, NoneType], password: Union[str, NoneType]}, NoneType]
Network proxy settings.
downloads_path : Union[pathlib.Path, str, NoneType]
If specified, accepted downloads are downloaded into this directory. Otherwise, temporary directory is created and is
deleted when browser is closed. In either case, the downloads are deleted when the browser context they were created in
is closed.
slow_mo : Union[float, NoneType]
Slows down Playwright operations by the specified amount of milliseconds. Useful so that you can see what is going on.
viewport : Union[{width: int, height: int}, NoneType]
Sets a consistent viewport for each page. Defaults to an 1280x720 viewport. `no_viewport` disables the fixed viewport.
screen : Union[{width: int, height: int}, NoneType]
Emulates consistent window screen size available inside web page via `window.screen`. Is only used when the `viewport`
is set.
no_viewport : Union[bool, NoneType]
Does not enforce fixed viewport, allows resizing window in the headed mode.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
java_script_enabled : Union[bool, NoneType]
Whether or not to enable JavaScript in the context. Defaults to `true`.
bypass_csp : Union[bool, NoneType]
Toggles bypassing page's Content-Security-Policy.
user_agent : Union[str, NoneType]
Specific user agent to use in this context.
locale : Union[str, NoneType]
Specify user locale, for example `en-GB`, `de-DE`, etc. Locale will affect `navigator.language` value, `Accept-Language`
request header value as well as number and date formatting rules.
timezone_id : Union[str, NoneType]
Changes the timezone of the context. See
[ICU's metaZones.txt](https://cs.chromium.org/chromium/src/third_party/icu/source/data/misc/metaZones.txt?rcl=faee8bc70570192d82d2978a71e2a615788597d1)
for a list of supported timezone IDs.
geolocation : Union[{latitude: float, longitude: float, accuracy: Union[float, NoneType]}, NoneType]
permissions : Union[List[str], NoneType]
A list of permissions to grant to all pages in this context. See `browser_context.grant_permissions()` for more
details.
extra_http_headers : Union[Dict[str, str], NoneType]
An object containing additional HTTP headers to be sent with every request.
offline : Union[bool, NoneType]
Whether to emulate network being offline. Defaults to `false`.
http_credentials : Union[{username: str, password: str}, NoneType]
Credentials for [HTTP authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication).
device_scale_factor : Union[float, NoneType]
Specify device scale factor (can be thought of as dpr). Defaults to `1`.
is_mobile : Union[bool, NoneType]
Whether the `meta viewport` tag is taken into account and touch events are enabled. Defaults to `false`. Not supported
in Firefox.
has_touch : Union[bool, NoneType]
Specifies if viewport supports touch events. Defaults to false.
color_scheme : Union["dark", "light", "no-preference", NoneType]
Emulates `'prefers-colors-scheme'` media feature, supported values are `'light'`, `'dark'`, `'no-preference'`. See
`page.emulate_media()` for more details. Defaults to `'light'`.
reduced_motion : Union["no-preference", "reduce", NoneType]
Emulates `'prefers-reduced-motion'` media feature, supported values are `'reduce'`, `'no-preference'`. See
`page.emulate_media()` for more details. Defaults to `'no-preference'`.
forced_colors : Union["active", "none", NoneType]
Emulates `'forced-colors'` media feature, supported values are `'active'`, `'none'`. See `page.emulate_media()`
for more details. Defaults to `'none'`.
> NOTE: It's not supported in WebKit, see [here](https://bugs.webkit.org/show_bug.cgi?id=225281) in their issue tracker.
accept_downloads : Union[bool, NoneType]
Whether to automatically download all the attachments. Defaults to `true` where all the downloads are accepted.
traces_dir : Union[pathlib.Path, str, NoneType]
If specified, traces are saved into this directory.
chromium_sandbox : Union[bool, NoneType]
Enable Chromium sandboxing. Defaults to `false`.
record_har_path : Union[pathlib.Path, str, NoneType]
Enables [HAR](http://www.softwareishard.com/blog/har-12-spec) recording for all pages into the specified HAR file on the
filesystem. If not specified, the HAR is not recorded. Make sure to call `browser_context.close()` for the HAR to
be saved.
record_har_omit_content : Union[bool, NoneType]
Optional setting to control whether to omit request content from the HAR. Defaults to `false`.
record_video_dir : Union[pathlib.Path, str, NoneType]
Enables video recording for all pages into the specified directory. If not specified videos are not recorded. Make sure
to call `browser_context.close()` for videos to be saved.
record_video_size : Union[{width: int, height: int}, NoneType]
Dimensions of the recorded videos. If not specified the size will be equal to `viewport` scaled down to fit into
800x800. If `viewport` is not configured explicitly the video size defaults to 800x450. Actual picture of each page will
be scaled down if necessary to fit the specified size.
base_url : Union[str, NoneType]
When using `page.goto()`, `page.route()`, `page.wait_for_url()`, `page.expect_request()`,
or `page.expect_response()` it takes the base URL in consideration by using the
[`URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor for building the corresponding URL.
Examples:
- baseURL: `http://localhost:3000` and navigating to `/bar.html` results in `http://localhost:3000/bar.html`
- baseURL: `http://localhost:3000/foo/` and navigating to `./bar.html` results in `http://localhost:3000/foo/bar.html`
- baseURL: `http://localhost:3000/foo` (without trailing slash) and navigating to `./bar.html` results in
`http://localhost:3000/bar.html`
strict_selectors : Union[bool, NoneType]
It specified, enables strict selectors mode for this context. In the strict selectors mode all operations on selectors
that imply single target DOM element will throw when more than one element matches the selector. See `Locator` to learn
more about the strict mode.
Returns
-------
BrowserContext
"""
return mapping.from_impl(
await self._async(
"browser_type.launch_persistent_context",
self._impl_obj.launch_persistent_context(
userDataDir=user_data_dir,
channel=channel,
executablePath=executable_path,
args=mapping.to_impl(args),
ignoreDefaultArgs=mapping.to_impl(ignore_default_args),
handleSIGINT=handle_sigint,
handleSIGTERM=handle_sigterm,
handleSIGHUP=handle_sighup,
timeout=timeout,
env=mapping.to_impl(env),
headless=headless,
devtools=devtools,
proxy=proxy,
downloadsPath=downloads_path,
slowMo=slow_mo,
viewport=viewport,
screen=screen,
noViewport=no_viewport,
ignoreHTTPSErrors=ignore_https_errors,
javaScriptEnabled=java_script_enabled,
bypassCSP=bypass_csp,
userAgent=user_agent,
locale=locale,
timezoneId=timezone_id,
geolocation=geolocation,
permissions=mapping.to_impl(permissions),
extraHTTPHeaders=mapping.to_impl(extra_http_headers),
offline=offline,
httpCredentials=http_credentials,
deviceScaleFactor=device_scale_factor,
isMobile=is_mobile,
hasTouch=has_touch,
colorScheme=color_scheme,
reducedMotion=reduced_motion,
forcedColors=forced_colors,
acceptDownloads=accept_downloads,
tracesDir=traces_dir,
chromiumSandbox=chromium_sandbox,
recordHarPath=record_har_path,
recordHarOmitContent=record_har_omit_content,
recordVideoDir=record_video_dir,
recordVideoSize=record_video_size,
baseURL=base_url,
strictSelectors=strict_selectors,
),
)
)
async def connect_over_cdp(
self,
endpoint_url: str,
*,
timeout: float = None,
slow_mo: float = None,
headers: typing.Optional[typing.Dict[str, str]] = None
) -> "Browser":
"""BrowserType.connect_over_cdp
This methods attaches Playwright to an existing browser instance using the Chrome DevTools Protocol.
The default browser context is accessible via `browser.contexts()`.
> NOTE: Connecting over the Chrome DevTools Protocol is only supported for Chromium-based browsers.
Parameters
----------
endpoint_url : str
A CDP websocket endpoint or http url to connect to. For example `http://localhost:9222/` or
`ws://127.0.0.1:9222/devtools/browser/387adf4c-243f-4051-a181-46798f4a46f4`.
timeout : Union[float, NoneType]
Maximum time in milliseconds to wait for the connection to be established. Defaults to `30000` (30 seconds). Pass `0` to
disable timeout.
slow_mo : Union[float, NoneType]
Slows down Playwright operations by the specified amount of milliseconds. Useful so that you can see what is going on.
Defaults to 0.
headers : Union[Dict[str, str], NoneType]
Additional HTTP headers to be sent with connect request. Optional.
Returns
-------
Browser
"""
return mapping.from_impl(
await self._async(
"browser_type.connect_over_cdp",
self._impl_obj.connect_over_cdp(
endpointURL=endpoint_url,
timeout=timeout,
slow_mo=slow_mo,
headers=mapping.to_impl(headers),
),
)
)
async def connect(
self,
ws_endpoint: str,
*,
timeout: float = None,
slow_mo: float = None,
headers: typing.Optional[typing.Dict[str, str]] = None
) -> "Browser":
"""BrowserType.connect
This methods attaches Playwright to an existing browser instance.
Parameters
----------
ws_endpoint : str
A browser websocket endpoint to connect to.
timeout : Union[float, NoneType]
Maximum time in milliseconds to wait for the connection to be established. Defaults to `30000` (30 seconds). Pass `0` to
disable timeout.
slow_mo : Union[float, NoneType]
Slows down Playwright operations by the specified amount of milliseconds. Useful so that you can see what is going on.
Defaults to 0.
headers : Union[Dict[str, str], NoneType]
Additional HTTP headers to be sent with web socket connect request. Optional.
Returns
-------
Browser
"""
return mapping.from_impl(
await self._async(
"browser_type.connect",
self._impl_obj.connect(
ws_endpoint=ws_endpoint,
timeout=timeout,
slow_mo=slow_mo,
headers=mapping.to_impl(headers),
),
)
)
mapping.register(BrowserTypeImpl, BrowserType)
class Playwright(AsyncBase):
@property
def devices(self) -> typing.Dict:
"""Playwright.devices
Returns a dictionary of devices to be used with `browser.new_context()` or `browser.new_page()`.
```py
import asyncio
from playwright.async_api import async_playwright
async def run(playwright):
webkit = playwright.webkit
iphone = playwright.devices[\"iPhone 6\"]
browser = await webkit.launch()
context = await browser.new_context(**iphone)
page = await context.new_page()
await page.goto(\"http://example.com\")
# other actions...
await browser.close()
async def main():
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())
```
Returns
-------
Dict
"""
return mapping.from_maybe_impl(self._impl_obj.devices)
@property
def selectors(self) -> "Selectors":
"""Playwright.selectors
Selectors can be used to install custom selector engines. See [Working with selectors](./selectors.md) for more
information.
Returns
-------
Selectors
"""
return mapping.from_impl(self._impl_obj.selectors)
@property
def chromium(self) -> "BrowserType":
"""Playwright.chromium
This object can be used to launch or connect to Chromium, returning instances of `Browser`.
Returns
-------
BrowserType
"""
return mapping.from_impl(self._impl_obj.chromium)
@property
def firefox(self) -> "BrowserType":
"""Playwright.firefox
This object can be used to launch or connect to Firefox, returning instances of `Browser`.
Returns
-------
BrowserType
"""
return mapping.from_impl(self._impl_obj.firefox)
@property
def webkit(self) -> "BrowserType":
"""Playwright.webkit
This object can be used to launch or connect to WebKit, returning instances of `Browser`.
Returns
-------
BrowserType
"""
return mapping.from_impl(self._impl_obj.webkit)
@property
def request(self) -> "APIRequest":
"""Playwright.request
Exposes API that can be used for the Web API testing.
Returns
-------
APIRequest
"""
return mapping.from_impl(self._impl_obj.request)
def __getitem__(self, value: str) -> "BrowserType":
return mapping.from_impl(self._impl_obj.__getitem__(value=value))
def stop(self) -> NoneType:
"""Playwright.stop
Terminates this instance of Playwright in case it was created bypassing the Python context manager. This is useful in
REPL applications.
```py
>>> from playwright.sync_api import sync_playwright
>>> playwright = sync_playwright().start()
>>> browser = playwright.chromium.launch()
>>> page = browser.new_page()
>>> page.goto(\"http://whatsmyuseragent.org/\")
>>> page.screenshot(path=\"example.png\")
>>> browser.close()
>>> playwright.stop()
```
"""
return mapping.from_maybe_impl(self._impl_obj.stop())
mapping.register(PlaywrightImpl, Playwright)
class Tracing(AsyncBase):
async def start(
self,
*,
name: str = None,
title: str = None,
snapshots: bool = None,
screenshots: bool = None,
sources: bool = None
) -> NoneType:
"""Tracing.start
Start tracing.
```py
await context.tracing.start(name=\"trace\", screenshots=True, snapshots=True)
page = await context.new_page()
await page.goto(\"https://playwright.dev\")
await context.tracing.stop(path = \"trace.zip\")
```
Parameters
----------
name : Union[str, NoneType]
If specified, the trace is going to be saved into the file with the given name inside the `tracesDir` folder specified
in `browser_type.launch()`.
title : Union[str, NoneType]
Trace name to be shown in the Trace Viewer.
snapshots : Union[bool, NoneType]
If this option is true tracing will
- capture DOM snapshot on every action
- record network activity
screenshots : Union[bool, NoneType]
Whether to capture screenshots during tracing. Screenshots are used to build a timeline preview.
sources : Union[bool, NoneType]
Whether to include source files for trace actions.
"""
return mapping.from_maybe_impl(
await self._async(
"tracing.start",
self._impl_obj.start(
name=name,
title=title,
snapshots=snapshots,
screenshots=screenshots,
sources=sources,
),
)
)
async def start_chunk(self, *, title: str = None) -> NoneType:
"""Tracing.start_chunk
Start a new trace chunk. If you'd like to record multiple traces on the same `BrowserContext`, use
`tracing.start()` once, and then create multiple trace chunks with `tracing.start_chunk()` and
`tracing.stop_chunk()`.
```py
await context.tracing.start(name=\"trace\", screenshots=True, snapshots=True)
page = await context.new_page()
await page.goto(\"https://playwright.dev\")
await context.tracing.start_chunk()
await page.click(\"text=Get Started\")
# Everything between start_chunk and stop_chunk will be recorded in the trace.
await context.tracing.stop_chunk(path = \"trace1.zip\")
await context.tracing.start_chunk()
await page.goto(\"http://example.com\")
# Save a second trace file with different actions.
await context.tracing.stop_chunk(path = \"trace2.zip\")
```
Parameters
----------
title : Union[str, NoneType]
Trace name to be shown in the Trace Viewer.
"""
return mapping.from_maybe_impl(
await self._async(
"tracing.start_chunk", self._impl_obj.start_chunk(title=title)
)
)
async def stop_chunk(
self, *, path: typing.Union[str, pathlib.Path] = None
) -> NoneType:
"""Tracing.stop_chunk
Stop the trace chunk. See `tracing.start_chunk()` for more details about multiple trace chunks.
Parameters
----------
path : Union[pathlib.Path, str, NoneType]
Export trace collected since the last `tracing.start_chunk()` call into the file with the given path.
"""
return mapping.from_maybe_impl(
await self._async(
"tracing.stop_chunk", self._impl_obj.stop_chunk(path=path)
)
)
async def stop(self, *, path: typing.Union[str, pathlib.Path] = None) -> NoneType:
"""Tracing.stop
Stop tracing.
Parameters
----------
path : Union[pathlib.Path, str, NoneType]
Export trace into the file with the given path.
"""
return mapping.from_maybe_impl(
await self._async("tracing.stop", self._impl_obj.stop(path=path))
)
mapping.register(TracingImpl, Tracing)
class Locator(AsyncBase):
@property
def page(self) -> "Page":
"""Locator.page
A page this locator belongs to.
Returns
-------
Page
"""
return mapping.from_impl(self._impl_obj.page)
@property
def first(self) -> "Locator":
"""Locator.first
Returns locator to the first matching element.
Returns
-------
Locator
"""
return mapping.from_impl(self._impl_obj.first)
@property
def last(self) -> "Locator":
"""Locator.last
Returns locator to the last matching element.
Returns
-------
Locator
"""
return mapping.from_impl(self._impl_obj.last)
async def bounding_box(
self, *, timeout: float = None
) -> typing.Optional[FloatRect]:
"""Locator.bounding_box
This method returns the bounding box of the element, or `null` if the element is not visible. The bounding box is
calculated relative to the main frame viewport - which is usually the same as the browser window.
Scrolling affects the returned bonding box, similarly to
[Element.getBoundingClientRect](https://developer.mozilla.org/en-US/docs/Web/API/Element/getBoundingClientRect). That
means `x` and/or `y` may be negative.
Elements from child frames return the bounding box relative to the main frame, unlike the
[Element.getBoundingClientRect](https://developer.mozilla.org/en-US/docs/Web/API/Element/getBoundingClientRect).
Assuming the page is static, it is safe to use bounding box coordinates to perform input. For example, the following
snippet should click the center of the element.
```py
box = await element.bounding_box()
await page.mouse.click(box[\"x\"] + box[\"width\"] / 2, box[\"y\"] + box[\"height\"] / 2)
```
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
Union[{x: float, y: float, width: float, height: float}, NoneType]
"""
return mapping.from_impl_nullable(
await self._async(
"locator.bounding_box", self._impl_obj.bounding_box(timeout=timeout)
)
)
async def check(
self,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""Locator.check
This method checks the element by performing the following steps:
1. Ensure that element is a checkbox or a radio input. If not, this method throws. If the element is already checked,
this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now checked. If not, this method throws.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.check",
self._impl_obj.check(
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def click(
self,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
click_count: int = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""Locator.click
This method clicks the element by performing the following steps:
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
click_count : Union[int, NoneType]
defaults to 1. See [UIEvent.detail].
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.click",
self._impl_obj.click(
modifiers=mapping.to_impl(modifiers),
position=position,
delay=delay,
button=button,
clickCount=click_count,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def dblclick(
self,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
delay: float = None,
button: Literal["left", "middle", "right"] = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""Locator.dblclick
This method double clicks the element by performing the following steps:
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to double click in the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set. Note that if the
first click of the `dblclick()` triggers a navigation event, this method will throw.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
> NOTE: `element.dblclick()` dispatches two `click` events and a single `dblclick` event.
Parameters
----------
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
delay : Union[float, NoneType]
Time to wait between `mousedown` and `mouseup` in milliseconds. Defaults to 0.
button : Union["left", "middle", "right", NoneType]
Defaults to `left`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.dblclick",
self._impl_obj.dblclick(
modifiers=mapping.to_impl(modifiers),
position=position,
delay=delay,
button=button,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def dispatch_event(
self, type: str, event_init: typing.Dict = None, *, timeout: float = None
) -> NoneType:
"""Locator.dispatch_event
The snippet below dispatches the `click` event on the element. Regardless of the visibility state of the element,
`click` is dispatched. This is equivalent to calling
[element.click()](https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/click).
```py
await element.dispatch_event(\"click\")
```
Under the hood, it creates an instance of an event based on the given `type`, initializes it with `eventInit` properties
and dispatches it on the element. Events are `composed`, `cancelable` and bubble by default.
Since `eventInit` is event-specific, please refer to the events documentation for the lists of initial properties:
- [DragEvent](https://developer.mozilla.org/en-US/docs/Web/API/DragEvent/DragEvent)
- [FocusEvent](https://developer.mozilla.org/en-US/docs/Web/API/FocusEvent/FocusEvent)
- [KeyboardEvent](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/KeyboardEvent)
- [MouseEvent](https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/MouseEvent)
- [PointerEvent](https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/PointerEvent)
- [TouchEvent](https://developer.mozilla.org/en-US/docs/Web/API/TouchEvent/TouchEvent)
- [Event](https://developer.mozilla.org/en-US/docs/Web/API/Event/Event)
You can also specify `JSHandle` as the property value if you want live objects to be passed into the event:
```py
# note you can only create data_transfer in chromium and firefox
data_transfer = await page.evaluate_handle(\"new DataTransfer()\")
await element.dispatch_event(\"#source\", \"dragstart\", {\"dataTransfer\": data_transfer})
```
Parameters
----------
type : str
DOM event type: `"click"`, `"dragstart"`, etc.
event_init : Union[Dict, NoneType]
Optional event-specific initialization properties.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.dispatch_event",
self._impl_obj.dispatch_event(
type=type, eventInit=mapping.to_impl(event_init), timeout=timeout
),
)
)
async def evaluate(
self, expression: str, arg: typing.Any = None, *, timeout: float = None
) -> typing.Any:
"""Locator.evaluate
Returns the return value of `expression`.
This method passes this handle as the first argument to `expression`.
If `expression` returns a [Promise], then `handle.evaluate` would wait for the promise to resolve and return its value.
Examples:
```py
tweets = page.locator(\".tweet .retweets\")
assert await tweets.evaluate(\"node => node.innerText\") == \"10 retweets\"
```
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"locator.evaluate",
self._impl_obj.evaluate(
expression=expression, arg=mapping.to_impl(arg), timeout=timeout
),
)
)
async def evaluate_all(self, expression: str, arg: typing.Any = None) -> typing.Any:
"""Locator.evaluate_all
The method finds all elements matching the specified locator and passes an array of matched elements as a first argument
to `expression`. Returns the result of `expression` invocation.
If `expression` returns a [Promise], then `locator.evaluate_all()` would wait for the promise to resolve and
return its value.
Examples:
```py
elements = page.locator(\"div\")
div_counts = await elements(\"(divs, min) => divs.length >= min\", 10)
```
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async(
"locator.evaluate_all",
self._impl_obj.evaluate_all(
expression=expression, arg=mapping.to_impl(arg)
),
)
)
async def evaluate_handle(
self, expression: str, arg: typing.Any = None, *, timeout: float = None
) -> "JSHandle":
"""Locator.evaluate_handle
Returns the return value of `expression` as a `JSHandle`.
This method passes this handle as the first argument to `expression`.
The only difference between `locator.evaluate()` and `locator.evaluate_handle()` is that
`locator.evaluate_handle()` returns `JSHandle`.
If the function passed to the `locator.evaluate_handle()` returns a [Promise], then
`locator.evaluate_handle()` would wait for the promise to resolve and return its value.
See `page.evaluate_handle()` for more details.
Parameters
----------
expression : str
JavaScript expression to be evaluated in the browser context. If it looks like a function declaration, it is interpreted
as a function. Otherwise, evaluated as an expression.
arg : Union[Any, NoneType]
Optional argument to pass to `expression`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
JSHandle
"""
return mapping.from_impl(
await self._async(
"locator.evaluate_handle",
self._impl_obj.evaluate_handle(
expression=expression, arg=mapping.to_impl(arg), timeout=timeout
),
)
)
async def fill(
self,
value: str,
*,
timeout: float = None,
no_wait_after: bool = None,
force: bool = None
) -> NoneType:
"""Locator.fill
This method waits for [actionability](./actionability.md) checks, focuses the element, fills it and triggers an `input`
event after filling. Note that you can pass an empty string to clear the input field.
If the target element is not an `<input>`, `<textarea>` or `[contenteditable]` element, this method throws an error.
However, if the element is inside the `<label>` element that has an associated
[control](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLabelElement/control), the control will be filled
instead.
To send fine-grained keyboard events, use `locator.type()`.
Parameters
----------
value : str
Value to set for the `<input>`, `<textarea>` or `[contenteditable]` element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.fill",
self._impl_obj.fill(
value=value, timeout=timeout, noWaitAfter=no_wait_after, force=force
),
)
)
def locator(
self,
selector: str,
*,
has_text: typing.Union[str, typing.Pattern] = None,
has: "Locator" = None
) -> "Locator":
"""Locator.locator
The method finds an element matching the specified selector in the `Locator`'s subtree.
Parameters
----------
selector : str
A selector to use when resolving DOM element. See [working with selectors](./selectors.md) for more details.
has_text : Union[Pattern, str, NoneType]
Matches elements containing specified text somewhere inside, possibly in a child or a descendant element. For example,
`"Playwright"` matches `<article><div>Playwright</div></article>`.
has : Union[Locator, NoneType]
Matches elements containing an element that matches an inner locator. Inner locator is queried against the outer one.
For example, `article` that has `text=Playwright` matches `<article><div>Playwright</div></article>`.
Note that outer and inner locators must belong to the same frame. Inner locator must not contain `FrameLocator`s.
Returns
-------
Locator
"""
return mapping.from_impl(
self._impl_obj.locator(
selector=selector, has_text=has_text, has=has._impl_obj if has else None
)
)
def frame_locator(self, selector: str) -> "FrameLocator":
"""Locator.frame_locator
When working with iframes, you can create a frame locator that will enter the iframe and allow selecting elements in
that iframe:
```py
locator = page.frame_locator(\"iframe\").locator(\"text=Submit\")
await locator.click()
```
Parameters
----------
selector : str
A selector to use when resolving DOM element. See [working with selectors](./selectors.md) for more details.
Returns
-------
FrameLocator
"""
return mapping.from_impl(self._impl_obj.frame_locator(selector=selector))
async def element_handle(self, *, timeout: float = None) -> "ElementHandle":
"""Locator.element_handle
Resolves given locator to the first matching DOM element. If no elements matching the query are visible, waits for them
up to a given timeout. If multiple elements match the selector, throws.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
ElementHandle
"""
return mapping.from_impl(
await self._async(
"locator.element_handle", self._impl_obj.element_handle(timeout=timeout)
)
)
async def element_handles(self) -> typing.List["ElementHandle"]:
"""Locator.element_handles
Resolves given locator to all matching DOM elements.
Returns
-------
List[ElementHandle]
"""
return mapping.from_impl_list(
await self._async(
"locator.element_handles", self._impl_obj.element_handles()
)
)
def nth(self, index: int) -> "Locator":
"""Locator.nth
Returns locator to the n-th matching element.
Parameters
----------
index : int
Returns
-------
Locator
"""
return mapping.from_impl(self._impl_obj.nth(index=index))
async def focus(self, *, timeout: float = None) -> NoneType:
"""Locator.focus
Calls [focus](https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/focus) on the element.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async("locator.focus", self._impl_obj.focus(timeout=timeout))
)
async def count(self) -> int:
"""Locator.count
Returns the number of elements matching given selector.
Returns
-------
int
"""
return mapping.from_maybe_impl(
await self._async("locator.count", self._impl_obj.count())
)
async def drag_to(
self,
target: "Locator",
*,
force: bool = None,
no_wait_after: bool = None,
timeout: float = None,
trial: bool = None,
source_position: Position = None,
target_position: Position = None
) -> NoneType:
"""Locator.drag_to
Parameters
----------
target : Locator
Locator of the element to drag to.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
source_position : Union[{x: float, y: float}, NoneType]
Clicks on the source element at this point relative to the top-left corner of the element's padding box. If not
specified, some visible point of the element is used.
target_position : Union[{x: float, y: float}, NoneType]
Drops on the target element at this point relative to the top-left corner of the element's padding box. If not
specified, some visible point of the element is used.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.drag_to",
self._impl_obj.drag_to(
target=target._impl_obj,
force=force,
noWaitAfter=no_wait_after,
timeout=timeout,
trial=trial,
sourcePosition=source_position,
targetPosition=target_position,
),
)
)
async def get_attribute(
self, name: str, *, timeout: float = None
) -> typing.Optional[str]:
"""Locator.get_attribute
Returns element attribute value.
Parameters
----------
name : str
Attribute name to get the value for.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"locator.get_attribute",
self._impl_obj.get_attribute(name=name, timeout=timeout),
)
)
async def hover(
self,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
timeout: float = None,
force: bool = None,
trial: bool = None
) -> NoneType:
"""Locator.hover
This method hovers over the element by performing the following steps:
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to hover over the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.hover",
self._impl_obj.hover(
modifiers=mapping.to_impl(modifiers),
position=position,
timeout=timeout,
force=force,
trial=trial,
),
)
)
async def inner_html(self, *, timeout: float = None) -> str:
"""Locator.inner_html
Returns the `element.innerHTML`.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"locator.inner_html", self._impl_obj.inner_html(timeout=timeout)
)
)
async def inner_text(self, *, timeout: float = None) -> str:
"""Locator.inner_text
Returns the `element.innerText`.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"locator.inner_text", self._impl_obj.inner_text(timeout=timeout)
)
)
async def input_value(self, *, timeout: float = None) -> str:
"""Locator.input_value
Returns `input.value` for `<input>` or `<textarea>` or `<select>` element. Throws for non-input elements.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async(
"locator.input_value", self._impl_obj.input_value(timeout=timeout)
)
)
async def is_checked(self, *, timeout: float = None) -> bool:
"""Locator.is_checked
Returns whether the element is checked. Throws if the element is not a checkbox or radio input.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"locator.is_checked", self._impl_obj.is_checked(timeout=timeout)
)
)
async def is_disabled(self, *, timeout: float = None) -> bool:
"""Locator.is_disabled
Returns whether the element is disabled, the opposite of [enabled](./actionability.md#enabled).
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"locator.is_disabled", self._impl_obj.is_disabled(timeout=timeout)
)
)
async def is_editable(self, *, timeout: float = None) -> bool:
"""Locator.is_editable
Returns whether the element is [editable](./actionability.md#editable).
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"locator.is_editable", self._impl_obj.is_editable(timeout=timeout)
)
)
async def is_enabled(self, *, timeout: float = None) -> bool:
"""Locator.is_enabled
Returns whether the element is [enabled](./actionability.md#enabled).
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"locator.is_enabled", self._impl_obj.is_enabled(timeout=timeout)
)
)
async def is_hidden(self, *, timeout: float = None) -> bool:
"""Locator.is_hidden
Returns whether the element is hidden, the opposite of [visible](./actionability.md#visible).
Parameters
----------
timeout : Union[float, NoneType]
**DEPRECATED** This option is ignored. `locator.is_hidden()` does not wait for the element to become hidden and
returns immediately.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"locator.is_hidden", self._impl_obj.is_hidden(timeout=timeout)
)
)
async def is_visible(self, *, timeout: float = None) -> bool:
"""Locator.is_visible
Returns whether the element is [visible](./actionability.md#visible).
Parameters
----------
timeout : Union[float, NoneType]
**DEPRECATED** This option is ignored. `locator.is_visible()` does not wait for the element to become visible and
returns immediately.
Returns
-------
bool
"""
return mapping.from_maybe_impl(
await self._async(
"locator.is_visible", self._impl_obj.is_visible(timeout=timeout)
)
)
async def press(
self,
key: str,
*,
delay: float = None,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""Locator.press
Focuses the element, and then uses `keyboard.down()` and `keyboard.up()`.
`key` can specify the intended [keyboardEvent.key](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key)
value or a single character to generate the text for. A superset of the `key` values can be found
[here](https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key/Key_Values). Examples of the keys are:
`F1` - `F12`, `Digit0`- `Digit9`, `KeyA`- `KeyZ`, `Backquote`, `Minus`, `Equal`, `Backslash`, `Backspace`, `Tab`,
`Delete`, `Escape`, `ArrowDown`, `End`, `Enter`, `Home`, `Insert`, `PageDown`, `PageUp`, `ArrowRight`, `ArrowUp`, etc.
Following modification shortcuts are also supported: `Shift`, `Control`, `Alt`, `Meta`, `ShiftLeft`.
Holding down `Shift` will type the text that corresponds to the `key` in the upper case.
If `key` is a single character, it is case-sensitive, so the values `a` and `A` will generate different respective
texts.
Shortcuts such as `key: \"Control+o\"` or `key: \"Control+Shift+T\"` are supported as well. When specified with the
modifier, modifier is pressed and being held while the subsequent key is being pressed.
Parameters
----------
key : str
Name of the key to press or a character to generate, such as `ArrowLeft` or `a`.
delay : Union[float, NoneType]
Time to wait between `keydown` and `keyup` in milliseconds. Defaults to 0.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.press",
self._impl_obj.press(
key=key, delay=delay, timeout=timeout, noWaitAfter=no_wait_after
),
)
)
async def screenshot(
self,
*,
timeout: float = None,
type: Literal["jpeg", "png"] = None,
path: typing.Union[str, pathlib.Path] = None,
quality: int = None,
omit_background: bool = None,
animations: Literal["disabled"] = None,
mask: typing.List["Locator"] = None
) -> bytes:
"""Locator.screenshot
Returns the buffer with the captured screenshot.
This method waits for the [actionability](./actionability.md) checks, then scrolls element into view before taking a
screenshot. If the element is detached from DOM, the method throws an error.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
type : Union["jpeg", "png", NoneType]
Specify screenshot type, defaults to `png`.
path : Union[pathlib.Path, str, NoneType]
The file path to save the image to. The screenshot type will be inferred from file extension. If `path` is a relative
path, then it is resolved relative to the current working directory. If no path is provided, the image won't be saved to
the disk.
quality : Union[int, NoneType]
The quality of the image, between 0-100. Not applicable to `png` images.
omit_background : Union[bool, NoneType]
Hides default white background and allows capturing screenshots with transparency. Not applicable to `jpeg` images.
Defaults to `false`.
animations : Union["disabled", NoneType]
When set to `"disabled"`, stops CSS animations, CSS transitions and Web Animations. Animations get different treatment
depending on their duration:
- finite animations are fast-forwarded to completion, so they'll fire `transitionend` event.
- infinite animations are canceled to initial state, and then played over after the screenshot.
mask : Union[List[Locator], NoneType]
Specify locators that should be masked when the screenshot is taken. Masked elements will be overlayed with a pink box
`#FF00FF` that completely covers its bounding box.
Returns
-------
bytes
"""
return mapping.from_maybe_impl(
await self._async(
"locator.screenshot",
self._impl_obj.screenshot(
timeout=timeout,
type=type,
path=path,
quality=quality,
omitBackground=omit_background,
animations=animations,
mask=mapping.to_impl(mask),
),
)
)
async def scroll_into_view_if_needed(self, *, timeout: float = None) -> NoneType:
"""Locator.scroll_into_view_if_needed
This method waits for [actionability](./actionability.md) checks, then tries to scroll element into view, unless it is
completely visible as defined by
[IntersectionObserver](https://developer.mozilla.org/en-US/docs/Web/API/Intersection_Observer_API)'s `ratio`.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.scroll_into_view_if_needed",
self._impl_obj.scroll_into_view_if_needed(timeout=timeout),
)
)
async def select_option(
self,
value: typing.Union[str, typing.List[str]] = None,
*,
index: typing.Union[int, typing.List[int]] = None,
label: typing.Union[str, typing.List[str]] = None,
element: typing.Union["ElementHandle", typing.List["ElementHandle"]] = None,
timeout: float = None,
no_wait_after: bool = None,
force: bool = None
) -> typing.List[str]:
"""Locator.select_option
This method waits for [actionability](./actionability.md) checks, waits until all specified options are present in the
`<select>` element and selects these options.
If the target element is not a `<select>` element, this method throws an error. However, if the element is inside the
`<label>` element that has an associated
[control](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLabelElement/control), the control will be used instead.
Returns the array of option values that have been successfully selected.
Triggers a `change` and `input` event once all the provided options have been selected.
```py
# single selection matching the value
await element.select_option(\"blue\")
# single selection matching the label
await element.select_option(label=\"blue\")
# multiple selection
await element.select_option(value=[\"red\", \"green\", \"blue\"])
```
Parameters
----------
value : Union[List[str], str, NoneType]
Options to select by value. If the `<select>` has the `multiple` attribute, all given options are selected, otherwise
only the first option matching one of the passed options is selected. Optional.
index : Union[List[int], int, NoneType]
Options to select by index. Optional.
label : Union[List[str], str, NoneType]
Options to select by label. If the `<select>` has the `multiple` attribute, all given options are selected, otherwise
only the first option matching one of the passed options is selected. Optional.
element : Union[ElementHandle, List[ElementHandle], NoneType]
Option elements to select. Optional.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
Returns
-------
List[str]
"""
return mapping.from_maybe_impl(
await self._async(
"locator.select_option",
self._impl_obj.select_option(
value=mapping.to_impl(value),
index=mapping.to_impl(index),
label=mapping.to_impl(label),
element=mapping.to_impl(element),
timeout=timeout,
noWaitAfter=no_wait_after,
force=force,
),
)
)
async def select_text(
self, *, force: bool = None, timeout: float = None
) -> NoneType:
"""Locator.select_text
This method waits for [actionability](./actionability.md) checks, then focuses the element and selects all its text
content.
Parameters
----------
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.select_text",
self._impl_obj.select_text(force=force, timeout=timeout),
)
)
async def set_input_files(
self,
files: typing.Union[
str,
pathlib.Path,
FilePayload,
typing.List[typing.Union[str, pathlib.Path]],
typing.List[FilePayload],
],
*,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""Locator.set_input_files
This method expects `element` to point to an
[input element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input).
Sets the value of the file input to these file paths or files. If some of the `filePaths` are relative paths, then they
are resolved relative to the the current working directory. For empty array, clears the selected files.
Parameters
----------
files : Union[List[Union[pathlib.Path, str]], List[{name: str, mimeType: str, buffer: bytes}], pathlib.Path, str, {name: str, mimeType: str, buffer: bytes}]
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.set_input_files",
self._impl_obj.set_input_files(
files=mapping.to_impl(files),
timeout=timeout,
noWaitAfter=no_wait_after,
),
)
)
async def tap(
self,
*,
modifiers: typing.Optional[
typing.List[Literal["Alt", "Control", "Meta", "Shift"]]
] = None,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""Locator.tap
This method taps the element by performing the following steps:
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.touchscreen` to tap the center of the element, or the specified `position`.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
> NOTE: `element.tap()` requires that the `hasTouch` option of the browser context be set to true.
Parameters
----------
modifiers : Union[List[Union["Alt", "Control", "Meta", "Shift"]], NoneType]
Modifier keys to press. Ensures that only these modifiers are pressed during the operation, and then restores current
modifiers back. If not specified, currently pressed modifiers are used.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.tap",
self._impl_obj.tap(
modifiers=mapping.to_impl(modifiers),
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def text_content(self, *, timeout: float = None) -> typing.Optional[str]:
"""Locator.text_content
Returns the `node.textContent`.
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
Returns
-------
Union[str, NoneType]
"""
return mapping.from_maybe_impl(
await self._async(
"locator.text_content", self._impl_obj.text_content(timeout=timeout)
)
)
async def type(
self,
text: str,
*,
delay: float = None,
timeout: float = None,
no_wait_after: bool = None
) -> NoneType:
"""Locator.type
Focuses the element, and then sends a `keydown`, `keypress`/`input`, and `keyup` event for each character in the text.
To press a special key, like `Control` or `ArrowDown`, use `locator.press()`.
```py
await element.type(\"hello\") # types instantly
await element.type(\"world\", delay=100) # types slower, like a user
```
An example of typing into a text field and then submitting the form:
```py
element = page.locator(\"input\")
await element.type(\"some text\")
await element.press(\"Enter\")
```
Parameters
----------
text : str
A text to type into a focused element.
delay : Union[float, NoneType]
Time to wait between key presses in milliseconds. Defaults to 0.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.type",
self._impl_obj.type(
text=text, delay=delay, timeout=timeout, noWaitAfter=no_wait_after
),
)
)
async def uncheck(
self,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""Locator.uncheck
This method checks the element by performing the following steps:
1. Ensure that element is a checkbox or a radio input. If not, this method throws. If the element is already
unchecked, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the element, unless `force` option is set.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now unchecked. If not, this method throws.
If the element is detached from the DOM at any moment during the action, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.uncheck",
self._impl_obj.uncheck(
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def all_inner_texts(self) -> typing.List[str]:
"""Locator.all_inner_texts
Returns an array of `node.innerText` values for all matching nodes.
Returns
-------
List[str]
"""
return mapping.from_maybe_impl(
await self._async(
"locator.all_inner_texts", self._impl_obj.all_inner_texts()
)
)
async def all_text_contents(self) -> typing.List[str]:
"""Locator.all_text_contents
Returns an array of `node.textContent` values for all matching nodes.
Returns
-------
List[str]
"""
return mapping.from_maybe_impl(
await self._async(
"locator.all_text_contents", self._impl_obj.all_text_contents()
)
)
async def wait_for(
self,
*,
timeout: float = None,
state: Literal["attached", "detached", "hidden", "visible"] = None
) -> NoneType:
"""Locator.wait_for
Returns when element specified by locator satisfies the `state` option.
If target element already satisfies the condition, the method returns immediately. Otherwise, waits for up to `timeout`
milliseconds until the condition is met.
```py
order_sent = page.locator(\"#order-sent\")
await order_sent.wait_for()
```
Parameters
----------
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
state : Union["attached", "detached", "hidden", "visible", NoneType]
Defaults to `'visible'`. Can be either:
- `'attached'` - wait for element to be present in DOM.
- `'detached'` - wait for element to not be present in DOM.
- `'visible'` - wait for element to have non-empty bounding box and no `visibility:hidden`. Note that element without
any content or with `display:none` has an empty bounding box and is not considered visible.
- `'hidden'` - wait for element to be either detached from DOM, or have an empty bounding box or `visibility:hidden`.
This is opposite to the `'visible'` option.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.wait_for",
self._impl_obj.wait_for(timeout=timeout, state=state),
)
)
async def set_checked(
self,
checked: bool,
*,
position: Position = None,
timeout: float = None,
force: bool = None,
no_wait_after: bool = None,
trial: bool = None
) -> NoneType:
"""Locator.set_checked
This method checks or unchecks an element by performing the following steps:
1. Ensure that matched element is a checkbox or a radio input. If not, this method throws.
1. If the element already has the right checked state, this method returns immediately.
1. Wait for [actionability](./actionability.md) checks on the matched element, unless `force` option is set. If the
element is detached during the checks, the whole action is retried.
1. Scroll the element into view if needed.
1. Use `page.mouse` to click in the center of the element.
1. Wait for initiated navigations to either succeed or fail, unless `noWaitAfter` option is set.
1. Ensure that the element is now checked or unchecked. If not, this method throws.
When all steps combined have not finished during the specified `timeout`, this method throws a `TimeoutError`. Passing
zero timeout disables this.
Parameters
----------
checked : bool
Whether to check or uncheck the checkbox.
position : Union[{x: float, y: float}, NoneType]
A point to use relative to the top-left corner of element padding box. If not specified, uses some visible point of the
element.
timeout : Union[float, NoneType]
Maximum time in milliseconds, defaults to 30 seconds, pass `0` to disable timeout. The default value can be changed by
using the `browser_context.set_default_timeout()` or `page.set_default_timeout()` methods.
force : Union[bool, NoneType]
Whether to bypass the [actionability](./actionability.md) checks. Defaults to `false`.
no_wait_after : Union[bool, NoneType]
Actions that initiate navigations are waiting for these navigations to happen and for pages to start loading. You can
opt out of waiting via setting this flag. You would only need this option in the exceptional cases such as navigating to
inaccessible pages. Defaults to `false`.
trial : Union[bool, NoneType]
When set, this method only performs the [actionability](./actionability.md) checks and skips the action. Defaults to
`false`. Useful to wait until the element is ready for the action without performing it.
"""
return mapping.from_maybe_impl(
await self._async(
"locator.set_checked",
self._impl_obj.set_checked(
checked=checked,
position=position,
timeout=timeout,
force=force,
noWaitAfter=no_wait_after,
trial=trial,
),
)
)
async def highlight(self) -> NoneType:
"""Locator.highlight
Highlight the corresponding element(s) on the screen. Useful for debugging, don't commit the code that uses
`locator.highlight()`.
"""
return mapping.from_maybe_impl(
await self._async("locator.highlight", self._impl_obj.highlight())
)
mapping.register(LocatorImpl, Locator)
class APIResponse(AsyncBase):
@property
def ok(self) -> bool:
"""APIResponse.ok
Contains a boolean stating whether the response was successful (status in the range 200-299) or not.
Returns
-------
bool
"""
return mapping.from_maybe_impl(self._impl_obj.ok)
@property
def url(self) -> str:
"""APIResponse.url
Contains the URL of the response.
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.url)
@property
def status(self) -> int:
"""APIResponse.status
Contains the status code of the response (e.g., 200 for a success).
Returns
-------
int
"""
return mapping.from_maybe_impl(self._impl_obj.status)
@property
def status_text(self) -> str:
"""APIResponse.status_text
Contains the status text of the response (e.g. usually an \"OK\" for a success).
Returns
-------
str
"""
return mapping.from_maybe_impl(self._impl_obj.status_text)
@property
def headers(self) -> typing.Dict[str, str]:
"""APIResponse.headers
An object with all the response HTTP headers associated with this response.
Returns
-------
Dict[str, str]
"""
return mapping.from_maybe_impl(self._impl_obj.headers)
@property
def headers_array(self) -> typing.List[NameValue]:
"""APIResponse.headers_array
An array with all the request HTTP headers associated with this response. Header names are not lower-cased. Headers with
multiple entries, such as `Set-Cookie`, appear in the array multiple times.
Returns
-------
List[{name: str, value: str}]
"""
return mapping.from_impl_list(self._impl_obj.headers_array)
async def body(self) -> bytes:
"""APIResponse.body
Returns the buffer with response body.
Returns
-------
bytes
"""
return mapping.from_maybe_impl(
await self._async("api_response.body", self._impl_obj.body())
)
async def text(self) -> str:
"""APIResponse.text
Returns the text representation of response body.
Returns
-------
str
"""
return mapping.from_maybe_impl(
await self._async("api_response.text", self._impl_obj.text())
)
async def json(self) -> typing.Any:
"""APIResponse.json
Returns the JSON representation of response body.
This method will throw if the response body is not parsable via `JSON.parse`.
Returns
-------
Any
"""
return mapping.from_maybe_impl(
await self._async("api_response.json", self._impl_obj.json())
)
async def dispose(self) -> NoneType:
"""APIResponse.dispose
Disposes the body of this response. If not called then the body will stay in memory until the context closes.
"""
return mapping.from_maybe_impl(
await self._async("api_response.dispose", self._impl_obj.dispose())
)
mapping.register(APIResponseImpl, APIResponse)
class APIRequestContext(AsyncBase):
async def dispose(self) -> NoneType:
"""APIRequestContext.dispose
All responses returned by `a_pi_request_context.get()` and similar methods are stored in the memory, so that you
can later call `a_pi_response.body()`. This method discards all stored responses, and makes
`a_pi_response.body()` throw \"Response disposed\" error.
"""
return mapping.from_maybe_impl(
await self._async("api_request_context.dispose", self._impl_obj.dispose())
)
async def delete(
self,
url: str,
*,
params: typing.Optional[
typing.Dict[str, typing.Union[str, float, bool]]
] = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
data: typing.Union[typing.Any, bytes, str] = None,
form: typing.Optional[typing.Dict[str, typing.Union[str, float, bool]]] = None,
multipart: typing.Optional[
typing.Dict[str, typing.Union[bytes, bool, float, str, FilePayload]]
] = None,
timeout: float = None,
fail_on_status_code: bool = None,
ignore_https_errors: bool = None
) -> "APIResponse":
"""APIRequestContext.delete
Sends HTTP(S) [DELETE](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/DELETE) request and returns its
response. The method will populate request cookies from the context and update context cookies from the response. The
method will automatically follow redirects.
Parameters
----------
url : str
Target URL.
params : Union[Dict[str, Union[bool, float, str]], NoneType]
Query parameters to be sent with the URL.
headers : Union[Dict[str, str], NoneType]
Allows to set HTTP headers.
data : Union[Any, bytes, str, NoneType]
Allows to set post data of the request. If the data parameter is an object, it will be serialized to json string and
`content-type` header will be set to `application/json` if not explicitly set. Otherwise the `content-type` header will
be set to `application/octet-stream` if not explicitly set.
form : Union[Dict[str, Union[bool, float, str]], NoneType]
Provides an object that will be serialized as html form using `application/x-www-form-urlencoded` encoding and sent as
this request body. If this parameter is specified `content-type` header will be set to
`application/x-www-form-urlencoded` unless explicitly provided.
multipart : Union[Dict[str, Union[bool, bytes, float, str, {name: str, mimeType: str, buffer: bytes}]], NoneType]
Provides an object that will be serialized as html form using `multipart/form-data` encoding and sent as this request
body. If this parameter is specified `content-type` header will be set to `multipart/form-data` unless explicitly
provided. File values can be passed either as [`fs.ReadStream`](https://nodejs.org/api/fs.html#fs_class_fs_readstream)
or as file-like object containing file name, mime-type and its content.
timeout : Union[float, NoneType]
Request timeout in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout.
fail_on_status_code : Union[bool, NoneType]
Whether to throw on response codes other than 2xx and 3xx. By default response object is returned for all status codes.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
Returns
-------
APIResponse
"""
return mapping.from_impl(
await self._async(
"api_request_context.delete",
self._impl_obj.delete(
url=url,
params=mapping.to_impl(params),
headers=mapping.to_impl(headers),
data=mapping.to_impl(data),
form=mapping.to_impl(form),
multipart=mapping.to_impl(multipart),
timeout=timeout,
failOnStatusCode=fail_on_status_code,
ignoreHTTPSErrors=ignore_https_errors,
),
)
)
async def head(
self,
url: str,
*,
params: typing.Optional[
typing.Dict[str, typing.Union[str, float, bool]]
] = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
timeout: float = None,
fail_on_status_code: bool = None,
ignore_https_errors: bool = None
) -> "APIResponse":
"""APIRequestContext.head
Sends HTTP(S) [HEAD](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD) request and returns its response.
The method will populate request cookies from the context and update context cookies from the response. The method will
automatically follow redirects.
Parameters
----------
url : str
Target URL.
params : Union[Dict[str, Union[bool, float, str]], NoneType]
Query parameters to be sent with the URL.
headers : Union[Dict[str, str], NoneType]
Allows to set HTTP headers.
timeout : Union[float, NoneType]
Request timeout in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout.
fail_on_status_code : Union[bool, NoneType]
Whether to throw on response codes other than 2xx and 3xx. By default response object is returned for all status codes.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
Returns
-------
APIResponse
"""
return mapping.from_impl(
await self._async(
"api_request_context.head",
self._impl_obj.head(
url=url,
params=mapping.to_impl(params),
headers=mapping.to_impl(headers),
timeout=timeout,
failOnStatusCode=fail_on_status_code,
ignoreHTTPSErrors=ignore_https_errors,
),
)
)
async def get(
self,
url: str,
*,
params: typing.Optional[
typing.Dict[str, typing.Union[str, float, bool]]
] = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
timeout: float = None,
fail_on_status_code: bool = None,
ignore_https_errors: bool = None
) -> "APIResponse":
"""APIRequestContext.get
Sends HTTP(S) [GET](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/GET) request and returns its response. The
method will populate request cookies from the context and update context cookies from the response. The method will
automatically follow redirects.
Parameters
----------
url : str
Target URL.
params : Union[Dict[str, Union[bool, float, str]], NoneType]
Query parameters to be sent with the URL.
headers : Union[Dict[str, str], NoneType]
Allows to set HTTP headers.
timeout : Union[float, NoneType]
Request timeout in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout.
fail_on_status_code : Union[bool, NoneType]
Whether to throw on response codes other than 2xx and 3xx. By default response object is returned for all status codes.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
Returns
-------
APIResponse
"""
return mapping.from_impl(
await self._async(
"api_request_context.get",
self._impl_obj.get(
url=url,
params=mapping.to_impl(params),
headers=mapping.to_impl(headers),
timeout=timeout,
failOnStatusCode=fail_on_status_code,
ignoreHTTPSErrors=ignore_https_errors,
),
)
)
async def patch(
self,
url: str,
*,
params: typing.Optional[
typing.Dict[str, typing.Union[str, float, bool]]
] = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
data: typing.Union[typing.Any, bytes, str] = None,
form: typing.Optional[typing.Dict[str, typing.Union[str, float, bool]]] = None,
multipart: typing.Optional[
typing.Dict[str, typing.Union[bytes, bool, float, str, FilePayload]]
] = None,
timeout: float = None,
fail_on_status_code: bool = None,
ignore_https_errors: bool = None
) -> "APIResponse":
"""APIRequestContext.patch
Sends HTTP(S) [PATCH](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/PATCH) request and returns its response.
The method will populate request cookies from the context and update context cookies from the response. The method will
automatically follow redirects.
Parameters
----------
url : str
Target URL.
params : Union[Dict[str, Union[bool, float, str]], NoneType]
Query parameters to be sent with the URL.
headers : Union[Dict[str, str], NoneType]
Allows to set HTTP headers.
data : Union[Any, bytes, str, NoneType]
Allows to set post data of the request. If the data parameter is an object, it will be serialized to json string and
`content-type` header will be set to `application/json` if not explicitly set. Otherwise the `content-type` header will
be set to `application/octet-stream` if not explicitly set.
form : Union[Dict[str, Union[bool, float, str]], NoneType]
Provides an object that will be serialized as html form using `application/x-www-form-urlencoded` encoding and sent as
this request body. If this parameter is specified `content-type` header will be set to
`application/x-www-form-urlencoded` unless explicitly provided.
multipart : Union[Dict[str, Union[bool, bytes, float, str, {name: str, mimeType: str, buffer: bytes}]], NoneType]
Provides an object that will be serialized as html form using `multipart/form-data` encoding and sent as this request
body. If this parameter is specified `content-type` header will be set to `multipart/form-data` unless explicitly
provided. File values can be passed either as [`fs.ReadStream`](https://nodejs.org/api/fs.html#fs_class_fs_readstream)
or as file-like object containing file name, mime-type and its content.
timeout : Union[float, NoneType]
Request timeout in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout.
fail_on_status_code : Union[bool, NoneType]
Whether to throw on response codes other than 2xx and 3xx. By default response object is returned for all status codes.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
Returns
-------
APIResponse
"""
return mapping.from_impl(
await self._async(
"api_request_context.patch",
self._impl_obj.patch(
url=url,
params=mapping.to_impl(params),
headers=mapping.to_impl(headers),
data=mapping.to_impl(data),
form=mapping.to_impl(form),
multipart=mapping.to_impl(multipart),
timeout=timeout,
failOnStatusCode=fail_on_status_code,
ignoreHTTPSErrors=ignore_https_errors,
),
)
)
async def put(
self,
url: str,
*,
params: typing.Optional[
typing.Dict[str, typing.Union[str, float, bool]]
] = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
data: typing.Union[typing.Any, bytes, str] = None,
form: typing.Optional[typing.Dict[str, typing.Union[str, float, bool]]] = None,
multipart: typing.Optional[
typing.Dict[str, typing.Union[bytes, bool, float, str, FilePayload]]
] = None,
timeout: float = None,
fail_on_status_code: bool = None,
ignore_https_errors: bool = None
) -> "APIResponse":
"""APIRequestContext.put
Sends HTTP(S) [PUT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/PUT) request and returns its response. The
method will populate request cookies from the context and update context cookies from the response. The method will
automatically follow redirects.
Parameters
----------
url : str
Target URL.
params : Union[Dict[str, Union[bool, float, str]], NoneType]
Query parameters to be sent with the URL.
headers : Union[Dict[str, str], NoneType]
Allows to set HTTP headers.
data : Union[Any, bytes, str, NoneType]
Allows to set post data of the request. If the data parameter is an object, it will be serialized to json string and
`content-type` header will be set to `application/json` if not explicitly set. Otherwise the `content-type` header will
be set to `application/octet-stream` if not explicitly set.
form : Union[Dict[str, Union[bool, float, str]], NoneType]
Provides an object that will be serialized as html form using `application/x-www-form-urlencoded` encoding and sent as
this request body. If this parameter is specified `content-type` header will be set to
`application/x-www-form-urlencoded` unless explicitly provided.
multipart : Union[Dict[str, Union[bool, bytes, float, str, {name: str, mimeType: str, buffer: bytes}]], NoneType]
Provides an object that will be serialized as html form using `multipart/form-data` encoding and sent as this request
body. If this parameter is specified `content-type` header will be set to `multipart/form-data` unless explicitly
provided. File values can be passed either as [`fs.ReadStream`](https://nodejs.org/api/fs.html#fs_class_fs_readstream)
or as file-like object containing file name, mime-type and its content.
timeout : Union[float, NoneType]
Request timeout in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout.
fail_on_status_code : Union[bool, NoneType]
Whether to throw on response codes other than 2xx and 3xx. By default response object is returned for all status codes.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
Returns
-------
APIResponse
"""
return mapping.from_impl(
await self._async(
"api_request_context.put",
self._impl_obj.put(
url=url,
params=mapping.to_impl(params),
headers=mapping.to_impl(headers),
data=mapping.to_impl(data),
form=mapping.to_impl(form),
multipart=mapping.to_impl(multipart),
timeout=timeout,
failOnStatusCode=fail_on_status_code,
ignoreHTTPSErrors=ignore_https_errors,
),
)
)
async def post(
self,
url: str,
*,
params: typing.Optional[
typing.Dict[str, typing.Union[str, float, bool]]
] = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
data: typing.Union[typing.Any, bytes, str] = None,
form: typing.Optional[typing.Dict[str, typing.Union[str, float, bool]]] = None,
multipart: typing.Optional[
typing.Dict[str, typing.Union[bytes, bool, float, str, FilePayload]]
] = None,
timeout: float = None,
fail_on_status_code: bool = None,
ignore_https_errors: bool = None
) -> "APIResponse":
"""APIRequestContext.post
Sends HTTP(S) [POST](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/POST) request and returns its response.
The method will populate request cookies from the context and update context cookies from the response. The method will
automatically follow redirects.
Parameters
----------
url : str
Target URL.
params : Union[Dict[str, Union[bool, float, str]], NoneType]
Query parameters to be sent with the URL.
headers : Union[Dict[str, str], NoneType]
Allows to set HTTP headers.
data : Union[Any, bytes, str, NoneType]
Allows to set post data of the request. If the data parameter is an object, it will be serialized to json string and
`content-type` header will be set to `application/json` if not explicitly set. Otherwise the `content-type` header will
be set to `application/octet-stream` if not explicitly set.
form : Union[Dict[str, Union[bool, float, str]], NoneType]
Provides an object that will be serialized as html form using `application/x-www-form-urlencoded` encoding and sent as
this request body. If this parameter is specified `content-type` header will be set to
`application/x-www-form-urlencoded` unless explicitly provided.
multipart : Union[Dict[str, Union[bool, bytes, float, str, {name: str, mimeType: str, buffer: bytes}]], NoneType]
Provides an object that will be serialized as html form using `multipart/form-data` encoding and sent as this request
body. If this parameter is specified `content-type` header will be set to `multipart/form-data` unless explicitly
provided. File values can be passed either as [`fs.ReadStream`](https://nodejs.org/api/fs.html#fs_class_fs_readstream)
or as file-like object containing file name, mime-type and its content.
timeout : Union[float, NoneType]
Request timeout in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout.
fail_on_status_code : Union[bool, NoneType]
Whether to throw on response codes other than 2xx and 3xx. By default response object is returned for all status codes.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
Returns
-------
APIResponse
"""
return mapping.from_impl(
await self._async(
"api_request_context.post",
self._impl_obj.post(
url=url,
params=mapping.to_impl(params),
headers=mapping.to_impl(headers),
data=mapping.to_impl(data),
form=mapping.to_impl(form),
multipart=mapping.to_impl(multipart),
timeout=timeout,
failOnStatusCode=fail_on_status_code,
ignoreHTTPSErrors=ignore_https_errors,
),
)
)
async def fetch(
self,
url_or_request: typing.Union[str, "Request"],
*,
params: typing.Optional[
typing.Dict[str, typing.Union[str, float, bool]]
] = None,
method: str = None,
headers: typing.Optional[typing.Dict[str, str]] = None,
data: typing.Union[typing.Any, bytes, str] = None,
form: typing.Optional[typing.Dict[str, typing.Union[str, float, bool]]] = None,
multipart: typing.Optional[
typing.Dict[str, typing.Union[bytes, bool, float, str, FilePayload]]
] = None,
timeout: float = None,
fail_on_status_code: bool = None,
ignore_https_errors: bool = None
) -> "APIResponse":
"""APIRequestContext.fetch
Sends HTTP(S) request and returns its response. The method will populate request cookies from the context and update
context cookies from the response. The method will automatically follow redirects.
Parameters
----------
url_or_request : Union[Request, str]
Target URL or Request to get all parameters from.
params : Union[Dict[str, Union[bool, float, str]], NoneType]
Query parameters to be sent with the URL.
method : Union[str, NoneType]
If set changes the fetch method (e.g. [PUT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/PUT) or
[POST](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/POST)). If not specified, GET method is used.
headers : Union[Dict[str, str], NoneType]
Allows to set HTTP headers.
data : Union[Any, bytes, str, NoneType]
Allows to set post data of the request. If the data parameter is an object, it will be serialized to json string and
`content-type` header will be set to `application/json` if not explicitly set. Otherwise the `content-type` header will
be set to `application/octet-stream` if not explicitly set.
form : Union[Dict[str, Union[bool, float, str]], NoneType]
Provides an object that will be serialized as html form using `application/x-www-form-urlencoded` encoding and sent as
this request body. If this parameter is specified `content-type` header will be set to
`application/x-www-form-urlencoded` unless explicitly provided.
multipart : Union[Dict[str, Union[bool, bytes, float, str, {name: str, mimeType: str, buffer: bytes}]], NoneType]
Provides an object that will be serialized as html form using `multipart/form-data` encoding and sent as this request
body. If this parameter is specified `content-type` header will be set to `multipart/form-data` unless explicitly
provided. File values can be passed either as [`fs.ReadStream`](https://nodejs.org/api/fs.html#fs_class_fs_readstream)
or as file-like object containing file name, mime-type and its content.
timeout : Union[float, NoneType]
Request timeout in milliseconds. Defaults to `30000` (30 seconds). Pass `0` to disable timeout.
fail_on_status_code : Union[bool, NoneType]
Whether to throw on response codes other than 2xx and 3xx. By default response object is returned for all status codes.
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
Returns
-------
APIResponse
"""
return mapping.from_impl(
await self._async(
"api_request_context.fetch",
self._impl_obj.fetch(
urlOrRequest=url_or_request,
params=mapping.to_impl(params),
method=method,
headers=mapping.to_impl(headers),
data=mapping.to_impl(data),
form=mapping.to_impl(form),
multipart=mapping.to_impl(multipart),
timeout=timeout,
failOnStatusCode=fail_on_status_code,
ignoreHTTPSErrors=ignore_https_errors,
),
)
)
async def storage_state(
self, *, path: typing.Union[str, pathlib.Path] = None
) -> StorageState:
"""APIRequestContext.storage_state
Returns storage state for this request context, contains current cookies and local storage snapshot if it was passed to
the constructor.
Parameters
----------
path : Union[pathlib.Path, str, NoneType]
The file path to save the storage state to. If `path` is a relative path, then it is resolved relative to current
working directory. If no path is provided, storage state is still returned, but won't be saved to the disk.
Returns
-------
{cookies: List[{name: str, value: str, domain: str, path: str, expires: float, httpOnly: bool, secure: bool, sameSite: Union["Lax", "None", "Strict"]}], origins: List[{origin: str, localStorage: List[{name: str, value: str}]}]}
"""
return mapping.from_impl(
await self._async(
"api_request_context.storage_state",
self._impl_obj.storage_state(path=path),
)
)
mapping.register(APIRequestContextImpl, APIRequestContext)
class APIRequest(AsyncBase):
async def new_context(
self,
*,
base_url: str = None,
extra_http_headers: typing.Optional[typing.Dict[str, str]] = None,
http_credentials: HttpCredentials = None,
ignore_https_errors: bool = None,
proxy: ProxySettings = None,
user_agent: str = None,
timeout: float = None,
storage_state: typing.Union[StorageState, str, pathlib.Path] = None
) -> "APIRequestContext":
"""APIRequest.new_context
Creates new instances of `APIRequestContext`.
Parameters
----------
base_url : Union[str, NoneType]
Methods like `a_pi_request_context.get()` take the base URL into consideration by using the
[`URL()`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL) constructor for building the corresponding URL.
Examples:
- baseURL: `http://localhost:3000` and sending request to `/bar.html` results in `http://localhost:3000/bar.html`
- baseURL: `http://localhost:3000/foo/` and sending request to `./bar.html` results in
`http://localhost:3000/foo/bar.html`
- baseURL: `http://localhost:3000/foo` (without trailing slash) and navigating to `./bar.html` results in
`http://localhost:3000/bar.html`
extra_http_headers : Union[Dict[str, str], NoneType]
An object containing additional HTTP headers to be sent with every request.
http_credentials : Union[{username: str, password: str}, NoneType]
Credentials for [HTTP authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication).
ignore_https_errors : Union[bool, NoneType]
Whether to ignore HTTPS errors when sending network requests. Defaults to `false`.
proxy : Union[{server: str, bypass: Union[str, NoneType], username: Union[str, NoneType], password: Union[str, NoneType]}, NoneType]
Network proxy settings.
user_agent : Union[str, NoneType]
Specific user agent to use in this context.
timeout : Union[float, NoneType]
Maximum time in milliseconds to wait for the response. Defaults to `30000` (30 seconds). Pass `0` to disable timeout.
storage_state : Union[pathlib.Path, str, {cookies: List[{name: str, value: str, domain: str, path: str, expires: float, httpOnly: bool, secure: bool, sameSite: Union["Lax", "None", "Strict"]}], origins: List[{origin: str, localStorage: List[{name: str, value: str}]}]}, NoneType]
Populates context with given storage state. This option can be used to initialize context with logged-in information
obtained via `browser_context.storage_state()` or `a_pi_request_context.storage_state()`. Either a path to the
file with saved storage, or the value returned by one of `browser_context.storage_state()` or
`a_pi_request_context.storage_state()` methods.
Returns
-------
APIRequestContext
"""
return mapping.from_impl(
await self._async(
"api_request.new_context",
self._impl_obj.new_context(
baseURL=base_url,
extraHTTPHeaders=mapping.to_impl(extra_http_headers),
httpCredentials=http_credentials,
ignoreHTTPSErrors=ignore_https_errors,
proxy=proxy,
userAgent=user_agent,
timeout=timeout,
storageState=storage_state,
),
)
)
mapping.register(APIRequestImpl, APIRequest)
class PageAssertions(AsyncBase):
async def to_have_title(
self,
title_or_reg_exp: typing.Union[typing.Pattern, str],
*,
timeout: float = None
) -> NoneType:
"""PageAssertions.to_have_title
Ensures the page has the given title.
```py
import re
from playwright.async_api import expect
# ...
await expect(page).to_have_title(re.compile(r\".*checkout\"))
```
Parameters
----------
title_or_reg_exp : Union[Pattern, str]
Expected title or RegExp.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"page_assertions.to_have_title",
self._impl_obj.to_have_title(
title_or_reg_exp=title_or_reg_exp, timeout=timeout
),
)
)
async def not_to_have_title(
self,
title_or_reg_exp: typing.Union[typing.Pattern, str],
*,
timeout: float = None
) -> NoneType:
"""PageAssertions.not_to_have_title
The opposite of `page_assertions.to_have_title()`.
Parameters
----------
title_or_reg_exp : Union[Pattern, str]
Expected title or RegExp.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"page_assertions.not_to_have_title",
self._impl_obj.not_to_have_title(
title_or_reg_exp=title_or_reg_exp, timeout=timeout
),
)
)
async def to_have_url(
self,
url_or_reg_exp: typing.Union[str, typing.Pattern],
*,
timeout: float = None
) -> NoneType:
"""PageAssertions.to_have_url
Ensures the page is navigated to the given URL.
```py
import re
from playwright.async_api import expect
# ...
await expect(page).to_have_url(re.compile(\".*checkout\"))
```
Parameters
----------
url_or_reg_exp : Union[Pattern, str]
Expected substring or RegExp.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"page_assertions.to_have_url",
self._impl_obj.to_have_url(
url_or_reg_exp=url_or_reg_exp, timeout=timeout
),
)
)
async def not_to_have_url(
self,
url_or_reg_exp: typing.Union[typing.Pattern, str],
*,
timeout: float = None
) -> NoneType:
"""PageAssertions.not_to_have_url
The opposite of `page_assertions.to_have_url()`.
Parameters
----------
url_or_reg_exp : Union[Pattern, str]
Expected substring or RegExp.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"page_assertions.not_to_have_url",
self._impl_obj.not_to_have_url(
url_or_reg_exp=url_or_reg_exp, timeout=timeout
),
)
)
mapping.register(PageAssertionsImpl, PageAssertions)
class LocatorAssertions(AsyncBase):
async def to_contain_text(
self,
expected: typing.Union[
typing.List[typing.Union[typing.Pattern, str]], typing.Pattern, str
],
*,
use_inner_text: bool = None,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.to_contain_text
Ensures the `Locator` points to an element that contains the given text. You can use regular expressions for the value
as well.
```py
import re
from playwright.async_api import expect
locator = page.locator('.title')
await expect(locator).to_contain_text(\"substring\")
await expect(locator).to_contain_text(re.compile(r\"\\d messages\"))
```
Note that if array is passed as an expected value, entire lists of elements can be asserted:
```py
import re
from playwright.async_api import expect
locator = page.locator(\"list > .list-item\")
await expect(locator).to_contain_text([\"Text 1\", \"Text 4\", \"Text 5\"])
```
Parameters
----------
expected : Union[List[Union[Pattern, str]], Pattern, str]
Expected substring or RegExp or a list of those.
use_inner_text : Union[bool, NoneType]
Whether to use `element.innerText` instead of `element.textContent` when retrieving DOM node text.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_contain_text",
self._impl_obj.to_contain_text(
expected=mapping.to_impl(expected),
use_inner_text=use_inner_text,
timeout=timeout,
),
)
)
async def not_to_contain_text(
self,
expected: typing.Union[
typing.List[typing.Union[typing.Pattern, str]], typing.Pattern, str
],
*,
use_inner_text: bool = None,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.not_to_contain_text
The opposite of `locator_assertions.to_contain_text()`.
Parameters
----------
expected : Union[List[Union[Pattern, str]], Pattern, str]
Expected substring or RegExp or a list of those.
use_inner_text : Union[bool, NoneType]
Whether to use `element.innerText` instead of `element.textContent` when retrieving DOM node text.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_contain_text",
self._impl_obj.not_to_contain_text(
expected=mapping.to_impl(expected),
use_inner_text=use_inner_text,
timeout=timeout,
),
)
)
async def to_have_attribute(
self,
name: str,
value: typing.Union[str, typing.Pattern],
*,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.to_have_attribute
Ensures the `Locator` points to an element with given attribute.
```py
from playwright.async_api import expect
locator = page.locator(\"input\")
await expect(locator).to_have_attribute(\"type\", \"text\")
```
Parameters
----------
name : str
Attribute name.
value : Union[Pattern, str]
Expected attribute value.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_have_attribute",
self._impl_obj.to_have_attribute(
name=name, value=value, timeout=timeout
),
)
)
async def not_to_have_attribute(
self,
name: str,
value: typing.Union[str, typing.Pattern],
*,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.not_to_have_attribute
The opposite of `locator_assertions.to_have_attribute()`.
Parameters
----------
name : str
Attribute name.
value : Union[Pattern, str]
Expected attribute value.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_have_attribute",
self._impl_obj.not_to_have_attribute(
name=name, value=value, timeout=timeout
),
)
)
async def to_have_class(
self,
expected: typing.Union[
typing.List[typing.Union[typing.Pattern, str]], typing.Pattern, str
],
*,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.to_have_class
Ensures the `Locator` points to an element with given CSS class.
```py
from playwright.async_api import expect
locator = page.locator(\"#component\")
await expect(locator).to_have_class(re.compile(r\"selected\"))
```
Note that if array is passed as an expected value, entire lists of elements can be asserted:
```py
from playwright.async_api import expect
locator = page.locator(\"list > .component\")
await expect(locator).to_have_class([\"component\", \"component selected\", \"component\"])
```
Parameters
----------
expected : Union[List[Union[Pattern, str]], Pattern, str]
Expected class or RegExp or a list of those.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_have_class",
self._impl_obj.to_have_class(
expected=mapping.to_impl(expected), timeout=timeout
),
)
)
async def not_to_have_class(
self,
expected: typing.Union[
typing.List[typing.Union[typing.Pattern, str]], typing.Pattern, str
],
*,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.not_to_have_class
The opposite of `locator_assertions.to_have_class()`.
Parameters
----------
expected : Union[List[Union[Pattern, str]], Pattern, str]
Expected class or RegExp or a list of those.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_have_class",
self._impl_obj.not_to_have_class(
expected=mapping.to_impl(expected), timeout=timeout
),
)
)
async def to_have_count(self, count: int, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.to_have_count
Ensures the `Locator` resolves to an exact number of DOM nodes.
```py
from playwright.async_api import expect
locator = page.locator(\"list > .component\")
await expect(locator).to_have_count(3)
```
Parameters
----------
count : int
Expected count.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_have_count",
self._impl_obj.to_have_count(count=count, timeout=timeout),
)
)
async def not_to_have_count(self, count: int, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_have_count
The opposite of `locator_assertions.to_have_count()`.
Parameters
----------
count : int
Expected count.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_have_count",
self._impl_obj.not_to_have_count(count=count, timeout=timeout),
)
)
async def to_have_css(
self,
name: str,
value: typing.Union[str, typing.Pattern],
*,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.to_have_css
Ensures the `Locator` resolves to an element with the given computed CSS style.
```py
from playwright.async_api import expect
locator = page.locator(\"button\")
await expect(locator).to_have_css(\"display\", \"flex\")
```
Parameters
----------
name : str
CSS property name.
value : Union[Pattern, str]
CSS property value.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_have_css",
self._impl_obj.to_have_css(name=name, value=value, timeout=timeout),
)
)
async def not_to_have_css(
self,
name: str,
value: typing.Union[str, typing.Pattern],
*,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.not_to_have_css
The opposite of `locator_assertions.to_have_css()`.
Parameters
----------
name : str
CSS property name.
value : Union[Pattern, str]
CSS property value.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_have_css",
self._impl_obj.not_to_have_css(name=name, value=value, timeout=timeout),
)
)
async def to_have_id(
self, id: typing.Union[str, typing.Pattern], *, timeout: float = None
) -> NoneType:
"""LocatorAssertions.to_have_id
Ensures the `Locator` points to an element with the given DOM Node ID.
```py
from playwright.async_api import expect
locator = page.locator(\"input\")
await expect(locator).to_have_id(\"lastname\")
```
Parameters
----------
id : Union[Pattern, str]
Element id.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_have_id",
self._impl_obj.to_have_id(id=id, timeout=timeout),
)
)
async def not_to_have_id(
self, id: typing.Union[str, typing.Pattern], *, timeout: float = None
) -> NoneType:
"""LocatorAssertions.not_to_have_id
The opposite of `locator_assertions.to_have_id()`.
Parameters
----------
id : Union[Pattern, str]
Element id.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_have_id",
self._impl_obj.not_to_have_id(id=id, timeout=timeout),
)
)
async def to_have_js_property(
self, name: str, value: typing.Any, *, timeout: float = None
) -> NoneType:
"""LocatorAssertions.to_have_js_property
Ensures the `Locator` points to an element with given JavaScript property. Note that this property can be of a primitive
type as well as a plain serializable JavaScript object.
```py
from playwright.async_api import expect
locator = page.locator(\".component\")
await expect(locator).to_have_js_property(\"loaded\", True)
```
Parameters
----------
name : str
Property name.
value : Any
Property value.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_have_js_property",
self._impl_obj.to_have_js_property(
name=name, value=mapping.to_impl(value), timeout=timeout
),
)
)
async def not_to_have_js_property(
self, name: str, value: typing.Any, *, timeout: float = None
) -> NoneType:
"""LocatorAssertions.not_to_have_js_property
The opposite of `locator_assertions.to_have_js_property()`.
Parameters
----------
name : str
Property name.
value : Any
Property value.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_have_js_property",
self._impl_obj.not_to_have_js_property(
name=name, value=mapping.to_impl(value), timeout=timeout
),
)
)
async def to_have_value(
self, value: typing.Union[str, typing.Pattern], *, timeout: float = None
) -> NoneType:
"""LocatorAssertions.to_have_value
Ensures the `Locator` points to an element with the given input value. You can use regular expressions for the value as
well.
```py
import re
from playwright.async_api import expect
locator = page.locator(\"input[type=number]\")
await expect(locator).to_have_value(re.compile(r\"[0-9]\"))
```
Parameters
----------
value : Union[Pattern, str]
Expected value.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_have_value",
self._impl_obj.to_have_value(value=value, timeout=timeout),
)
)
async def not_to_have_value(
self, value: typing.Union[str, typing.Pattern], *, timeout: float = None
) -> NoneType:
"""LocatorAssertions.not_to_have_value
The opposite of `locator_assertions.to_have_value()`.
Parameters
----------
value : Union[Pattern, str]
Expected value.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_have_value",
self._impl_obj.not_to_have_value(value=value, timeout=timeout),
)
)
async def to_have_text(
self,
expected: typing.Union[
typing.List[typing.Union[typing.Pattern, str]], typing.Pattern, str
],
*,
use_inner_text: bool = None,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.to_have_text
Ensures the `Locator` points to an element with the given text. You can use regular expressions for the value as well.
```py
import re
from playwright.async_api import expect
locator = page.locator(\".title\")
await expect(locator).to_have_text(re.compile(r\"Welcome, Test User\"))
await expect(locator).to_have_text(re.compile(r\"Welcome, .*\"))
```
Note that if array is passed as an expected value, entire lists of elements can be asserted:
```py
from playwright.async_api import expect
locator = page.locator(\"list > .component\")
await expect(locator).to_have_text([\"Text 1\", \"Text 2\", \"Text 3\"])
```
Parameters
----------
expected : Union[List[Union[Pattern, str]], Pattern, str]
Expected substring or RegExp or a list of those.
use_inner_text : Union[bool, NoneType]
Whether to use `element.innerText` instead of `element.textContent` when retrieving DOM node text.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_have_text",
self._impl_obj.to_have_text(
expected=mapping.to_impl(expected),
use_inner_text=use_inner_text,
timeout=timeout,
),
)
)
async def not_to_have_text(
self,
expected: typing.Union[
typing.List[typing.Union[typing.Pattern, str]], typing.Pattern, str
],
*,
use_inner_text: bool = None,
timeout: float = None
) -> NoneType:
"""LocatorAssertions.not_to_have_text
The opposite of `locator_assertions.to_have_text()`.
Parameters
----------
expected : Union[List[Union[Pattern, str]], Pattern, str]
Expected substring or RegExp or a list of those.
use_inner_text : Union[bool, NoneType]
Whether to use `element.innerText` instead of `element.textContent` when retrieving DOM node text.
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_have_text",
self._impl_obj.not_to_have_text(
expected=mapping.to_impl(expected),
use_inner_text=use_inner_text,
timeout=timeout,
),
)
)
async def to_be_checked(
self, *, timeout: float = None, checked: bool = None
) -> NoneType:
"""LocatorAssertions.to_be_checked
Ensures the `Locator` points to a checked input.
```py
from playwright.async_api import expect
locator = page.locator(\".subscribe\")
await expect(locator).to_be_checked()
```
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
checked : Union[bool, NoneType]
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_be_checked",
self._impl_obj.to_be_checked(timeout=timeout, checked=checked),
)
)
async def not_to_be_checked(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_be_checked
The opposite of `locator_assertions.to_be_checked()`.
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_be_checked",
self._impl_obj.not_to_be_checked(timeout=timeout),
)
)
async def to_be_disabled(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.to_be_disabled
Ensures the `Locator` points to a disabled element.
```py
from playwright.async_api import expect
locator = page.locator(\"button.submit\")
await expect(locator).to_be_disabled()
```
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_be_disabled",
self._impl_obj.to_be_disabled(timeout=timeout),
)
)
async def not_to_be_disabled(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_be_disabled
The opposite of `locator_assertions.to_be_disabled()`.
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_be_disabled",
self._impl_obj.not_to_be_disabled(timeout=timeout),
)
)
async def to_be_editable(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.to_be_editable
Ensures the `Locator` points to an editable element.
```py
from playwright.async_api import expect
locator = page.locator(\".input\")
await expect(locator).to_be_editable()
```
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_be_editable",
self._impl_obj.to_be_editable(timeout=timeout),
)
)
async def not_to_be_editable(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_be_editable
The opposite of `locator_assertions.to_be_editable()`.
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_be_editable",
self._impl_obj.not_to_be_editable(timeout=timeout),
)
)
async def to_be_empty(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.to_be_empty
Ensures the `Locator` points to an empty editable element or to a DOM node that has no text.
```py
from playwright.async_api import expect
locator = page.locator(\"div.warning\")
await expect(locator).to_be_empty()
```
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_be_empty",
self._impl_obj.to_be_empty(timeout=timeout),
)
)
async def not_to_be_empty(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_be_empty
The opposite of `locator_assertions.to_be_empty()`.
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_be_empty",
self._impl_obj.not_to_be_empty(timeout=timeout),
)
)
async def to_be_enabled(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.to_be_enabled
Ensures the `Locator` points to an enabled element.
```py
from playwright.async_api import expect
locator = page.locator(\"button.submit\")
await expect(locator).to_be_enabled()
```
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_be_enabled",
self._impl_obj.to_be_enabled(timeout=timeout),
)
)
async def not_to_be_enabled(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_be_enabled
The opposite of `locator_assertions.to_be_enabled()`.
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_be_enabled",
self._impl_obj.not_to_be_enabled(timeout=timeout),
)
)
async def to_be_hidden(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.to_be_hidden
Ensures the `Locator` points to a hidden DOM node, which is the opposite of [visible](./actionability.md#visible).
```py
from playwright.async_api import expect
locator = page.locator('.my-element')
await expect(locator).to_be_hidden()
```
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_be_hidden",
self._impl_obj.to_be_hidden(timeout=timeout),
)
)
async def not_to_be_hidden(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_be_hidden
The opposite of `locator_assertions.to_be_hidden()`.
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_be_hidden",
self._impl_obj.not_to_be_hidden(timeout=timeout),
)
)
async def to_be_visible(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.to_be_visible
Ensures the `Locator` points to a [visible](./actionability.md#visible) DOM node.
```py
from playwright.async_api import expect
locator = page.locator('.my-element')
await expect(locator).to_be_visible()
```
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_be_visible",
self._impl_obj.to_be_visible(timeout=timeout),
)
)
async def not_to_be_visible(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_be_visible
The opposite of `locator_assertions.to_be_visible()`.
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_be_visible",
self._impl_obj.not_to_be_visible(timeout=timeout),
)
)
async def to_be_focused(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.to_be_focused
Ensures the `Locator` points to a focused DOM node.
```py
from playwright.async_api import expect
locator = page.locator('input')
await expect(locator).to_be_focused()
```
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.to_be_focused",
self._impl_obj.to_be_focused(timeout=timeout),
)
)
async def not_to_be_focused(self, *, timeout: float = None) -> NoneType:
"""LocatorAssertions.not_to_be_focused
The opposite of `locator_assertions.to_be_focused()`.
Parameters
----------
timeout : Union[float, NoneType]
Time to retry the assertion for.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"locator_assertions.not_to_be_focused",
self._impl_obj.not_to_be_focused(timeout=timeout),
)
)
mapping.register(LocatorAssertionsImpl, LocatorAssertions)
class APIResponseAssertions(AsyncBase):
async def to_be_ok(self) -> NoneType:
"""APIResponseAssertions.to_be_ok
Ensures the response status code is within [200..299] range.
```py
from playwright.async_api import expect
# ...
await expect(response).to_be_ok()
```
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"api_response_assertions.to_be_ok", self._impl_obj.to_be_ok()
)
)
async def not_to_be_ok(self) -> NoneType:
"""APIResponseAssertions.not_to_be_ok
The opposite of `a_pi_response_assertions.to_be_ok()`.
"""
__tracebackhide__ = True
return mapping.from_maybe_impl(
await self._async(
"api_response_assertions.not_to_be_ok", self._impl_obj.not_to_be_ok()
)
)
mapping.register(APIResponseAssertionsImpl, APIResponseAssertions)
| 40.348066
| 287
| 0.608277
|
21bea60146ac987bed3e96b45d7f3ef1d3283dda
| 38,998
|
py
|
Python
|
guild/util.py
|
jukiewiczm/guildai
|
478cc29cb102a8bd0bed693ce9626fe4949257a2
|
[
"Apache-2.0"
] | null | null | null |
guild/util.py
|
jukiewiczm/guildai
|
478cc29cb102a8bd0bed693ce9626fe4949257a2
|
[
"Apache-2.0"
] | null | null | null |
guild/util.py
|
jukiewiczm/guildai
|
478cc29cb102a8bd0bed693ce9626fe4949257a2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017-2019 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import chardet
import datetime
import errno
import fnmatch
import os
import logging
import platform
import re
import shlex
import shutil
import struct
import subprocess
import sys
import tempfile
import time
import threading
import six
log = logging.getLogger("guild")
PLATFORM = platform.system()
OS_ENVIRON_BLACKLIST = set(["_"])
class Stop(Exception):
"""Raise to stop loops started with `loop`."""
class TryFailed(RuntimeError):
"""Raise to indicate an attempt in try_apply failed."""
def find_apply(funs, *args, **kw):
for f in funs:
result = f(*args)
if result is not None:
return result
return kw.get("default")
def try_apply(funs, *args):
for f in funs:
try:
return f(*args)
except TryFailed:
continue
raise TryFailed(funs, args)
def ensure_dir(d):
d = os.path.realpath(d)
try:
os.makedirs(d)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def ensure_deleted(path):
try:
os.remove(path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def try_read(path, default=None, apply=None):
try:
f = open(path, "r")
except IOError as e:
if e.errno != errno.ENOENT:
raise
return default
else:
out = f.read()
if apply:
if not isinstance(apply, list):
apply = [apply]
for f in apply:
out = f(out)
return out
def pid_exists(pid, default=True):
return find_apply([
_proc_pid_exists,
_psutil_pid_exists,
lambda _: default,
], pid)
def _proc_pid_exists(pid):
if os.path.exists("/proc"):
return os.path.exists(os.path.join("/proc", str(pid)))
return None
def _psutil_pid_exists(pid):
try:
import psutil
except Exception as e:
log.warning("cannot get status for pid %s: %s", pid, e)
if log.getEffectiveLevel() <= logging.DEBUG:
log.exception("importing psutil")
return None
return psutil.pid_exists(pid)
def free_port(start=None):
import random
import socket
min_port = 49152
max_port = 65535
max_attempts = 100
attempts = 0
if start is None:
next_port = lambda _p: random.randint(min_port, max_port)
port = next_port(None)
else:
next_port = lambda p: p + 1
port = start
while True:
if attempts > max_attempts:
raise RuntimeError("too many free port attempts")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.1)
try:
sock.connect(('localhost', port))
except socket.timeout:
return port
except socket.error as e:
if e.errno == errno.ECONNREFUSED:
return port
else:
sock.close()
attempts += 1
port = next_port(port)
def open_url(url):
try:
_open_url_with_cmd(url)
except (OSError, URLOpenError):
_open_url_with_webbrowser(url)
class URLOpenError(Exception):
pass
def _open_url_with_cmd(url):
if sys.platform == "darwin":
args = ["open", url]
else:
args = ["xdg-open", url]
with open(os.devnull, "w") as null:
try:
subprocess.check_call(args, stderr=null, stdout=null)
except subprocess.CalledProcessError as e:
raise URLOpenError(url, e)
def _open_url_with_webbrowser(url):
import webbrowser
webbrowser.open(url)
def loop(cb, wait, interval, first_interval=None):
try:
_loop(cb, wait, interval, first_interval)
except Stop:
pass
except KeyboardInterrupt:
pass
def _loop(cb, wait, interval, first_interval):
loop_interval = first_interval if first_interval is not None else interval
start = time.time()
while True:
sleep = _sleep_interval(loop_interval, start)
loop_interval = interval
should_stop = wait(sleep)
if should_stop:
break
cb()
def _sleep_interval(interval, start):
if interval <= 0:
return 0
now_ms = int(time.time() * 1000)
interval_ms = int(interval * 1000)
start_ms = int(start * 1000)
sleep_ms = (
((now_ms - start_ms) // interval_ms + 1)
* interval_ms + start_ms - now_ms)
return sleep_ms / 1000
class LoopingThread(threading.Thread):
def __init__(self, cb, interval, first_interval=None, stop_timeout=0):
super(LoopingThread, self).__init__()
self._cb = cb
self._interval = interval
self._first_interval = first_interval
self._stop_timeout = stop_timeout
self._stop = threading.Event()
self._stopped = threading.Event()
def run(self):
loop(
cb=self._cb,
wait=self._stop.wait,
interval=self._interval,
first_interval=self._first_interval)
self._stopped.set()
def stop(self):
self._stop.set()
self._stopped.wait(self._stop_timeout)
def safe_osenv():
return {
name: val
for name, val in os.environ.items()
if name not in OS_ENVIRON_BLACKLIST
}
def match_filters(filters, vals, match_any=False):
test_fun = any if match_any else all
vals_lower = [val.lower() for val in vals]
filters_lower = [f.lower() for f in filters]
return test_fun(
(any((f in val for val in vals_lower))
for f in filters_lower)
)
def split_description(s):
lines = s.split("\n")
return lines[0], _format_details(lines[1:])
def _format_details(details):
lines = []
for i, line in enumerate(details):
if i > 0:
lines.append("")
lines.append(line)
return lines
def file_sha256(path, use_cache=True):
if use_cache:
cached_sha = try_cached_sha(path)
if cached_sha:
return cached_sha
import hashlib
hash = hashlib.sha256()
with open(path, "rb") as f:
while True:
data = f.read(102400)
if not data:
break
hash.update(data)
return hash.hexdigest()
def try_cached_sha(for_file):
try:
f = open(_cached_sha_filename(for_file), "r")
except IOError:
return None
else:
return f.read().rstrip()
def _cached_sha_filename(for_file):
parent, name = os.path.split(for_file)
return os.path.join(parent, ".guild-cache-%s.sha" % name)
def write_cached_sha(sha, for_file):
with open(_cached_sha_filename(for_file), "w") as f:
f.write(sha)
def file_md5(path):
import hashlib
hash = hashlib.md5()
with open(path, "rb") as f:
while True:
data = f.read(102400)
if not data:
break
hash.update(data)
return hash.hexdigest()
def parse_url(url):
try:
from urlparse import urlparse
except ImportError:
# pylint: disable=import-error,no-name-in-module
from urllib.parse import urlparse
return urlparse(url)
class TempBase(object):
def __init__(self, prefix="guild-", suffix="", keep=False):
self._prefix = prefix
self._suffix = suffix
self._keep = keep
self.path = self._init_temp(self._prefix, self._suffix)
def __enter__(self):
return self
@staticmethod
def _init_temp(prefix, suffix):
raise NotImplementedError()
def __exit__(self, *_exc):
if not self._keep:
self.delete()
@staticmethod
def delete():
raise NotImplementedError()
class TempDir(TempBase):
@staticmethod
def _init_temp(prefix, suffix):
return tempfile.mkdtemp(prefix=prefix, suffix=suffix)
def delete(self):
rmtempdir(self.path)
class TempFile(TempBase):
@staticmethod
def _init_temp(prefix, suffix):
f, path = tempfile.mkstemp(prefix=prefix, suffix=suffix)
os.close(f)
return path
def delete(self):
os.remove(self.path)
def mktempdir(prefix=None):
return tempfile.mkdtemp(prefix=prefix)
def rmtempdir(path):
assert os.path.dirname(path) == tempfile.gettempdir(), path
try:
shutil.rmtree(path)
except Exception as e:
if log.getEffectiveLevel() <= logging.DEBUG:
log.exception("rmtree %s", path)
else:
log.error("error removing %s: %s", path, e)
def safe_rmtree(path):
"""Removes path if it's not top level or user dir."""
assert not _top_level_dir(path), path
assert path != os.path.expanduser("~"), path
shutil.rmtree(path)
def _top_level_dir(path):
abs_path = os.path.abspath(path)
parts = [p for p in re.split(r"[/\\]", abs_path) if p]
if PLATFORM == "Windows":
return len(parts) <= 2
return len(parts) <= 1
class LogCapture(object):
def __init__(self, use_root_handler=False, stdout=False,
strip_ansi_format=False):
self._records = []
self._use_root_handler = use_root_handler
self._stdout = stdout
self._strip_ansi_format = strip_ansi_format
def __enter__(self):
for logger in self._iter_loggers():
logger.addFilter(self)
self._records = []
return self
def __exit__(self, *exc):
for logger in self._iter_loggers():
logger.removeFilter(self)
@staticmethod
def _iter_loggers():
yield logging.root
for logger in logging.Logger.manager.loggerDict.values():
if isinstance(logger, logging.Logger):
yield logger
def filter(self, record):
self._records.append(record)
if self._stdout:
sys.stdout.write(self._format_record(record))
sys.stdout.write("\n")
def _format_record(self, r):
msg = self._handler().format(r)
if self._strip_ansi_format:
msg = re.sub(r"\033\[[0-9]+m", "", msg)
return msg
def print_all(self):
for r in self._records:
sys.stdout.write(self._format_record(r))
sys.stdout.write("\n")
def _handler(self):
if self._use_root_handler:
return logging.root.handlers[0]
from guild import log
return log.ConsoleLogHandler()
def get_all(self):
return self._records
def format_timestamp(ts, fmt=None):
if not ts:
return ""
dt = datetime.datetime.fromtimestamp(ts / 1000000)
return dt.strftime(fmt or "%Y-%m-%d %H:%M:%S")
def utcformat_timestamp(ts, fmt=None):
if not ts:
return None
dt = datetime.datetime.utcfromtimestamp(ts / 1000000)
return dt.strftime(fmt or "%Y-%m-%d %H:%M:%S UTC")
_raise_error_marker = object()
def resolve_refs(val, kv, undefined=_raise_error_marker):
return _resolve_refs_recurse(val, kv, undefined, [])
def resolve_all_refs(kv, undefined=_raise_error_marker):
return {
name: _resolve_refs_recurse(kv[name], kv, undefined, [])
for name in sorted(kv)
}
def _resolve_refs_recurse(val, kv, undefined, stack):
if not isinstance(val, six.string_types):
return val
parts = [part for part in re.split(r"(\\?\${.+?})", val) if part != ""]
resolved = list(_iter_resolved_ref_parts(parts, kv, undefined, stack))
if len(resolved) == 1:
return resolved[0]
else:
return "".join([_resolved_part_str(part) for part in resolved])
def _resolved_part_str(part):
if part is None:
return "null"
return str(part)
def resolve_rel_paths(kv):
return {
name: _resolve_rel_path(kv[name])
for name in kv
}
def _resolve_rel_path(maybe_path):
if os.path.exists(maybe_path) and not os.path.isabs(maybe_path):
return os.path.abspath(maybe_path)
return maybe_path
class ReferenceCycleError(Exception):
pass
class UndefinedReferenceError(Exception):
def __init__(self, reference):
super(UndefinedReferenceError, self).__init__(reference)
self.reference = reference
def _iter_resolved_ref_parts(parts, kv, undefined, stack):
for part in parts:
if part.startswith("${") and part.endswith("}"):
ref_name = part[2:-1]
if ref_name in stack:
raise ReferenceCycleError(stack + [ref_name])
stack.append(ref_name)
ref_val = kv.get(ref_name, undefined)
if ref_val is _raise_error_marker:
raise UndefinedReferenceError(ref_name)
yield _resolve_refs_recurse(ref_val, kv, undefined, stack)
stack.pop()
elif part.startswith("\\${") and part.endswith("}"):
yield part[1:-1]
else:
yield part
def strip_trailing_sep(path):
if path and path[-1] in ("/", "\\"):
return path[:-1]
return path
def strip_leading_sep(path):
if path and path[0] in ("/", "\\"):
return path[1:]
return path
def ensure_trailing_sep(path, sep=None):
sep = sep or os.path.sep
if path[-1:] != sep:
path += sep
return path
def subpath(path, start, sep=None):
if path == start:
raise ValueError(path, start)
start_with_sep = ensure_trailing_sep(start, sep)
if path.startswith(start_with_sep):
return path[len(start_with_sep):]
raise ValueError(path, start)
def which(cmd):
which_cmd = "where" if PLATFORM == "Windows" else "which"
devnull = open(os.devnull, "w")
try:
out = subprocess.check_output([which_cmd, cmd], stderr=devnull)
except subprocess.CalledProcessError:
return None
else:
return out.strip().decode("utf-8")
def symlink(target, link):
if PLATFORM == "Windows":
_windows_symlink(target, link)
else:
os.symlink(target, link)
def _windows_symlink(target, link):
if os.path.isdir(target):
args = ["mklink", "/D", link, target]
else:
args = ["mklink", link, target]
try:
subprocess.check_output(args, shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
log.error(e.output)
raise
_text_ext = set([
".csv",
".md",
".py",
".sh",
".txt",
])
_binary_ext = set([
".ai", ".bmp", ".gif", ".ico", ".jpeg", ".jpg", ".png", ".ps", ".psd",
".svg", ".tif", ".tiff",
".aif", ".mid", ".midi", ".mpa", ".mp3", ".ogg", ".wav", ".wma",
".avi", ".mov", ".mp4", ".mpeg", ".swf" ".wmv",
".7z", ".deb", ".gz", ".pkg", ".rar", ".rpm", ".tar", ".xz", ".z", ".zip",
".doc", ".docx", ".key", ".pdf", ".ppt", ".pptx", ".xlr", ".xls", ".xlsx",
".bin", ".pickle", ".pkl", ".pyc",
])
_control_chars = b'\n\r\t\f\b'
if bytes is str:
_printable_ascii = _control_chars + b"".join(
[chr(x) for x in range(32, 127)])
_printable_high_ascii = b"".join(
[chr(x) for x in range(127, 256)])
else:
_printable_ascii = _control_chars + bytes(range(32, 127))
_printable_high_ascii = bytes(range(127, 256))
def is_text_file(path, ignore_ext=False):
# Adapted from https://github.com/audreyr/binaryornot under the
# BSD 3-clause License
if not os.path.exists(path):
raise OSError("%s does not exist" % path)
if not os.path.isfile(path):
return False
if not ignore_ext:
ext = os.path.splitext(path)[1].lower()
if ext in _text_ext:
return True
if ext in _binary_ext:
return False
try:
with open(path, 'rb') as f:
sample = f.read(1024)
except IOError:
return False
if not sample:
return True
low_chars = sample.translate(None, _printable_ascii)
nontext_ratio1 = float(len(low_chars)) / float(len(sample))
high_chars = sample.translate(None, _printable_high_ascii)
nontext_ratio2 = float(len(high_chars)) / float(len(sample))
likely_binary = (
(nontext_ratio1 > 0.3 and nontext_ratio2 < 0.05) or
(nontext_ratio1 > 0.8 and nontext_ratio2 > 0.8)
)
detected_encoding = chardet.detect(sample)
decodable_as_unicode = False
if (detected_encoding["confidence"] > 0.9 and
detected_encoding["encoding"] != "ascii"):
try:
try:
sample.decode(encoding=detected_encoding["encoding"])
except TypeError:
# pylint: disable=undefined-variable
unicode(sample, encoding=detected_encoding["encoding"])
decodable_as_unicode = True
except LookupError:
pass
except UnicodeDecodeError:
pass
if likely_binary:
return decodable_as_unicode
else:
if decodable_as_unicode:
return True
else:
if b'\x00' in sample or b'\xff' in sample:
return False
return True
def safe_is_text_file(path, ignore_ext=False):
try:
return is_text_file(path, ignore_ext)
except OSError as e:
log.warning("could not check for text file %s: %s", path, e)
return False
def touch(filename):
open(filename, "ab").close()
now = time.time()
os.utime(filename, (now, now))
def getmtime(filename):
try:
return os.path.getmtime(filename)
except OSError:
return None
def kill_process_tree(pid, force=False, timeout=None):
import psutil
import signal
if force:
sig = signal.SIGKILL
else:
sig = signal.SIGTERM
root = psutil.Process(pid)
procs = [root] + root.children(recursive=True)
for proc in procs:
proc.send_signal(sig)
return psutil.wait_procs(procs, timeout=timeout)
def safe_filesize(path):
try:
return os.path.getsize(path)
except OSError:
return None
def safe_mtime(path):
try:
return os.path.getmtime(path)
except OSError:
return None
def safe_list_remove(x, l):
safe_list_remove_all([x], l)
def safe_list_remove_all(xs, l):
for x in xs:
try:
l.remove(x)
except ValueError:
pass
def local_server_url(host, port):
import socket
if not host or host == "0.0.0.0":
host = socket.gethostname()
try:
# Verify that configured hostname is valid
socket.gethostbyname(host)
except socket.gaierror:
host = "localhost"
return "http://{}:{}".format(host, port)
def format_duration(start_time, end_time=None):
if start_time is None:
return None
if end_time is None:
end_time = time.time() * 1000000
seconds = (end_time - start_time) // 1000000
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return "%d:%02d:%02d" % (h, m, s)
def format_dir(dir):
return format_user_dir(os.path.abspath(dir))
def format_user_dir(s):
user_dir = os.path.expanduser("~")
if s.startswith(user_dir):
return os.path.join("~", s[len(user_dir)+1:])
return s
def apply_env(target, source, names):
for name in names:
try:
target[name] = source[name]
except KeyError:
pass
def safe_filename(s):
if PLATFORM == "Windows":
return s.replace(":", "_")
return re.sub(r"[/\\]+", "_", s)
def wait_forever(sleep_interval=0.1):
while True:
time.sleep(sleep_interval)
class RunOutputReader(object):
def __init__(self, run_dir):
self.run_dir = run_dir
self._lines = []
self._output = None
self._index = None
def read(self, start=0, end=None):
"""Read run output from start to end.
Both start and end are zero-based indexes to run output lines
and are both inclusive. Note this is different from the Python
slice function where end is exclusive.
"""
self._read_next(end)
if end is None:
slice_end = None
else:
slice_end = end + 1
return self._lines[start:slice_end]
def _read_next(self, end):
if end is not None and end < len(self._lines):
return
try:
output, index = self._ensure_open()
except IOError as e:
if e.errno != errno.EEXIST:
raise
else:
lines = self._lines
while True:
line = output.readline().rstrip().decode()
if not line:
break
time, stream = struct.unpack("!QB", index.read(9))
lines.append((time, stream, line))
if end is not None and end < len(self._lines):
break
def _ensure_open(self):
if self._output is None:
guild_path = os.path.join(self.run_dir, ".guild")
output = open(os.path.join(guild_path, "output"), "rb")
index = open(os.path.join(guild_path, "output.index"), "rb")
self._output, self._index = output, index
assert self._output is not None
assert self._index is not None
return self._output, self._index
def close(self):
self._try_close(self._output)
self._try_close(self._index)
@staticmethod
def _try_close(f):
if f is None:
return
try:
f.close()
except IOError:
pass
def gpu_available():
import ctypes
if "linux" in sys.platform:
lib = "libcublas.so"
elif sys.platform == "darwin":
lib = "libcublas.dylib"
elif sys.platform == "win32":
lib = "cublas.dll"
else:
log.warning("unable to detect GPU for platform '%s'", sys.platform)
lib = None
if lib:
log.debug("checking for GPU by loading %s", lib)
try:
ctypes.CDLL(lib)
except OSError as e:
log.debug("error loading '%s': %s", lib, e)
else:
log.debug("%s loaded", lib)
return True
return False
def render_label(template, vals):
tokens = re.split(r"(\${.+?})", template)
return "".join([_rendered_str(_render_token(t, vals)) for t in tokens])
def _render_token(token, vals):
m = re.match(r"\${(.+?)}", token)
if not m:
return token
ref_parts = m.group(1).split("|")
name = ref_parts[0]
transforms = ref_parts[1:]
val = vals.get(name)
for t in transforms:
val = _apply_template_transform(t, val)
return val
def _apply_template_transform(t, val):
if hasattr(val, "wrapped_value"):
val = val.wrapped_value
parts = t.split(":", 1)
if len(parts) == 1:
name, arg = parts[0], None
else:
name, arg = parts
if name[:1] == "%":
return _t_python_format(val, name)
elif name == "default":
return _t_default(val, arg)
elif name == "basename":
if arg:
log.warning("ignoring argment to baseline in %r", t)
return _t_basename(val)
else:
log.warning("unsupported template transform: %r", t)
return "#error#"
def _t_python_format(val, fmt):
try:
return fmt % val
except TypeError as e:
log.warning("error formatting %r with %r: %s", val, fmt, e)
return val
def _t_default(val, arg):
if val is None:
return arg or ""
return val
def _t_basename(val):
if not val:
return ""
return os.path.basename(strip_trailing_sep(val))
def _rendered_str(s):
if s is None:
return ""
return str(s)
def del_env(names):
for name in names:
try:
del os.environ[name]
except KeyError:
pass
def python_interpreters():
import glob
bin_dir = os.path.dirname(sys.executable)
ret = []
for path in glob.glob(os.path.join(bin_dir, "python*")):
m = re.match(r"python([0-9\.]+)$", os.path.basename(path))
if m:
ret.append((path, m.group(1)))
return ret
def find_python_interpreter(version_spec):
import pkg_resources
try:
# Requirement.parse wants a package name, so we use 'python'
# here, but anything would do.
req = pkg_resources.Requirement.parse("python%s" % version_spec)
except pkg_resources.RequirementParseError:
raise ValueError(version_spec)
python_interps = {ver: path for path, ver in python_interpreters()}
matching = list(req.specifier.filter(sorted(python_interps)))
if matching:
matching_ver = matching[0]
return python_interps[matching_ver], matching_ver
return None
def is_executable_file(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
def copytree(src, dest, preserve_links=True):
from distutils import dir_util
dir_util.copy_tree(src, dest, preserve_symlinks=preserve_links)
def select_copytree(src, dest, config, copy_filter=None):
if not isinstance(config, list):
raise ValueError("invalid config: expected list got %r" % config)
log.debug("copying files from %s to %s", src, dest)
to_copy = _select_files_to_copy(src, config, copy_filter)
if not to_copy:
log.debug("no files to copy")
return
for file_src, file_src_rel_path in to_copy:
file_dest = os.path.join(dest, file_src_rel_path)
log.debug("copying file %s to %s", file_src, file_dest)
ensure_dir(os.path.dirname(file_dest))
_try_copy_file(file_src, file_dest)
def _select_files_to_copy(src_dir, config, copy_filter):
to_copy = []
seen_dirs = set()
log.debug("generating file list from %s", src_dir)
for root, dirs, files in os.walk(src_dir, followlinks=True):
seen_dirs.add(os.path.realpath(root))
_del_excluded_select_copy_dirs(
dirs, src_dir, root, seen_dirs,
config, copy_filter)
for name in files:
path = os.path.join(root, name)
if not os.path.isfile(path):
continue
rel_path = os.path.relpath(path, src_dir)
log.debug("considering file to copy %s", path)
if _select_to_copy(path, rel_path, config, copy_filter):
log.debug("seleted file to copy %s", path)
to_copy.append((path, rel_path))
# Sort before notifying copy_filter to have deterministic result.
to_copy.sort()
if copy_filter:
copy_filter.pre_copy(to_copy)
return to_copy
def _del_excluded_select_copy_dirs(dirs, src_dir, root, seen_dirs,
config, copy_filter):
_del_seen_dirs(dirs, root, seen_dirs)
_del_config_excluded_dirs(dirs, src_dir, root, config)
if copy_filter:
copy_filter.delete_excluded_dirs(root, dirs)
def _del_seen_dirs(dirs, root, seen):
for dir_name in dirs:
real_path = os.path.realpath(os.path.join(root, dir_name))
if real_path in seen:
dirs.remove(dir_name)
def _del_config_excluded_dirs(dirs, src_dir, root, config):
for name in list(dirs):
path = os.path.join(root, name)
rel_path = os.path.relpath(path, src_dir)
if not _select_to_copy(path, rel_path, config):
dirs.remove(name)
def _select_to_copy(path, rel_path, config, copy_filter=None):
assert isinstance(config, list)
last_match = None
for config_item in config:
for spec in config_item.specs:
if _select_file_match(rel_path, spec):
last_match = spec
if last_match:
return _select_to_copy_for_spec(last_match)
if copy_filter:
return copy_filter.default_select_path(path)
return True
def _select_file_match(rel_path, spec):
return any((fnmatch.fnmatch(rel_path, p) for p in spec.patterns))
def _select_to_copy_for_spec(spec):
return spec.type == "include"
def _try_copy_file(src, dest):
try:
shutil.copyfile(src, dest)
except (IOError, OSError) as e:
# This is not an error we want to stop an operation for. Log
# and continue.
if log.getEffectiveLevel() <= logging.DEBUG:
log.exception("copy %s to %s", src, dest)
else:
log.warning("could not copy source code file %s: %s", src, e)
def hostname():
return os.getenv("HOST") or _real_hostname()
def _real_hostname():
import socket
try:
return socket.gethostname()
except Exception:
if log.getEffectiveLevel() <= logging.DEBUG:
log.exception("socket.gethostname()")
return ""
def user():
return os.getenv("USER") or ""
def shlex_split(s):
# If s is None, this call will block (see
# https://bugs.python.org/issue27775)
s = s or ""
return shlex.split(s)
def shlex_quote(s):
# If s can't be None in case where pipes.quote is used by six.
s = s or ""
return _simplify_shlex_quote(six.moves.shlex_quote(s))
def _simplify_shlex_quote(s):
repls = [
("''\"'\"'", "\"'"),
]
for pattern_start, repl_start in repls:
if not s.startswith(pattern_start):
continue
pattern_end = "".join(reversed(pattern_start))
if not s.endswith(pattern_end):
continue
repl_end = "".join(reversed(repl_start))
stripped = s[len(pattern_start):-len(pattern_end)]
return repl_start + stripped + repl_end
return s
def format_bytes(n):
units = [None, "K", "M", "G", "T", "P", "E", "Z"]
for unit in units[:-1]:
if abs(n) < 1024:
if not unit:
return str(n)
return "%3.1f%s" % (n, unit)
n /= 1024.0
return "%.1f%s" % (n, units[-1])
class Chdir(object):
_save = None
def __init__(self, path):
self._path = path
def __enter__(self):
self._save = os.getcwd()
os.chdir(self._path)
def __exit__(self, *_args):
os.chdir(self._save)
def log_apply(f, *args, **kw):
level = kw.pop("logging_level", logging.DEBUG)
prefix = kw.pop("logging_prefix", "CALLING")
log.log(level, "%s %s", prefix, _log_apply_msg(f, args, kw))
return f(*args, **kw)
class _log_apply_msg(object):
def __init__(self, f, args, kw):
self.f = f
self.args = args
self.kw = kw
def __str__(self):
return "%s %s %s %s" % (
self.f.__module__, self.f.__name__, self.args, self.kw)
def encode_yaml(val):
import yaml
encoded = yaml.safe_dump(
val,
default_flow_style=False,
indent=2)
if encoded.endswith("\n...\n"):
encoded = encoded[:-4]
return encoded
def decode_yaml(s):
import yaml
try:
return yaml.safe_load(s)
except yaml.scanner.ScannerError as e:
raise ValueError(e)
def dir_size(dir):
size = 0
for root, dirs, names in os.walk(dir):
for name in dirs + names:
size += os.path.getsize(os.path.join(root, name))
return size
def platform_info():
"""Returns a dict of system info."""
info = _platform_base_info()
info.update(_platform_psutil_info())
return info
def _platform_base_info():
return {
"architecture": " ".join(platform.architecture()),
"processor": platform.processor(),
"python_version": sys.version.replace("\n", ""),
"uname": " ".join(platform.uname()),
}
def _platform_psutil_info():
try:
import psutil
except ImportError:
return {}
else:
return {
"cpus": psutil.cpu_count(),
}
def guild_user_agent():
import guild
system, _node, release, _ver, machine, _proc = platform.uname()
return (
"python-guildai/%s (%s; %s; %s)" % (
guild.__version__,
system,
machine,
release))
def nested_config(kv, nested=None):
nested = nested or {}
for name, val in sorted(kv.items()):
_apply_nested(name, val, nested)
return nested
def _apply_nested(name, val, nested):
parts = name.split(".")
cur = nested
for i in range(0, len(parts) - 1):
cur = cur.setdefault(parts[i], {})
if not isinstance(cur, dict):
conflicts_with = ".".join(parts[0:i + 1])
raise ValueError(
"%r cannot be nested: conflicts with {%r: %s}"
% (name, conflicts_with, cur))
cur[parts[-1]] = val
def short_digest(s):
if not s:
return ""
return s[:8]
def safe_listdir(path):
try:
return os.listdir(path)
except OSError:
return []
def compare_paths(p1, p2):
return _abs_path_with_cache(p1) == _abs_path_with_cache(p2)
__abs_path = {}
def _abs_path_with_cache(p):
try:
return __abs_path[p]
except KeyError:
__abs_path[p] = abs = os.path.abspath(os.path.expanduser(p))
return abs
def shorten_dir(path, max_len=28, ellipsis="...", sep=os.path.sep):
if len(path) <= max_len:
return path
parts = _shorten_dir_split_path(path, sep)
if len(parts) == 1:
return parts[0]
assert all(parts), parts
r = [parts.pop()] # Always include rightmost part
if parts[0][0] == sep:
l = []
pop_r = False
else:
# Relative path, always include leftmost part
l = [parts.pop(0)]
pop_r = True
while parts:
len_l = sum([len(s) + 1 for s in l])
len_r = sum([len(s) + 1 for s in r])
part = parts.pop() if pop_r else parts.pop(0)
side = r if pop_r else l
if len_l + len_r + len(part) + len(ellipsis) >= max_len:
break
side.append(part)
pop_r = not pop_r
shortened = os.path.sep.join([
os.path.sep.join(l),
ellipsis,
os.path.sep.join(reversed(r))])
if len(shortened) >= len(path):
return path
return shortened
def _shorten_dir_split_path(path, sep):
"""Splits path into parts.
Leading and repeated '/' chars are prepended to the
part. E.g. "/foo/bar" is returned as ["/foo", "bar"] and
"foo//bar" as ["foo", "/bar"].
"""
if not path:
return []
parts = path.split(sep)
packed = []
blanks = []
for part in parts:
if part == "":
blanks.append("")
else:
packed.append(sep.join(blanks + [part]))
blanks = []
if len(blanks) > 1:
packed.append(sep.join(blanks))
return packed
class HTTPResponse(object):
def __init__(self, resp):
self.status_code = resp.status
self.text = resp.read()
class HTTPConnectionError(Exception):
pass
def http_post(url, data, timeout=None):
headers = {
"User-Agent": guild_user_agent(),
"Content-type": "application/x-www-form-urlencoded",
}
return _http_request(url, headers, data, "POST", timeout)
def http_get(url, timeout=None):
return _http_request(url, timeout=timeout)
def _http_request(url, headers=None, data=None, method="GET", timeout=None):
import socket
from six.moves import urllib
headers = headers or {}
url_parts = urllib.parse.urlparse(url)
conn = _HTTPConnection(url_parts.scheme, url_parts.netloc, timeout)
params = urllib.parse.urlencode(data) if data else ""
try:
conn.request(method, url_parts.path, params, headers)
except socket.error as e:
if e.errno == errno.ECONNREFUSED:
raise HTTPConnectionError(url)
raise
else:
return HTTPResponse(conn.getresponse())
def _HTTPConnection(scheme, netloc, timeout):
from six.moves import http_client
if scheme == "http":
return http_client.HTTPConnection(netloc, timeout=timeout)
elif scheme == "https":
return http_client.HTTPSConnection(netloc, timeout=timeout)
else:
raise ValueError(
"unsupported scheme '%s' - must be 'http' or 'https'"
% scheme)
class StdIOContextManager(object):
def __init__(self, stream):
self.stream = stream
def __enter__(self):
return self.stream
def __exit__(self, *_exc):
pass
def check_env(env):
for name, val in env.items():
if not isinstance(name, six.string_types):
raise ValueError("non-string env name %r" % name)
if not isinstance(val, six.string_types):
raise ValueError("non-string env value for '%s': %r" % (name, val))
class SysArgv(object):
def __init__(self, args):
self._args = args
self._save = None
def __enter__(self):
assert self._save is None, self._save
self._save = sys.argv[1:]
sys.argv[1:] = self._args
def __exit__(self, *_exc):
assert self._save is not None
sys.argv[1:] = self._save
self._save = None
class StdinReader(object):
__enter__ = lambda self, *_args: self
__exit__ = lambda *_args: None
@staticmethod
def __iter__():
while True:
line = sys.stdin.readline()
if not line.strip():
break
yield line
def env_var_name(s):
return re.sub("[^A-Z0-9_]", "_", s.upper())
def env_var_quote(s):
if s == "":
return ""
return shlex_quote(s)
def realpath(path):
# Workaround for https://bugs.python.org/issue9949
try:
link = os.readlink(path)
except OSError:
return os.path.realpath(path)
else:
path_dir = os.path.dirname(path)
return os.path.abspath(os.path.join(path_dir, link))
def norm_path_sep(path):
return path.replace(os.path.sep, "/")
def bind_method(obj, method_name, function):
setattr(obj, method_name, function.__get__(obj, obj.__class__))
def editor(s):
import click
try:
edited = click.edit(s, _try_editor())
except click.UsageError as e:
raise ValueError(e)
else:
if edited is not None:
return edited
return s
def _try_editor():
return find_apply([
_try_editor_env,
_try_editor_bin,
])
def _try_editor_env():
names = ("VISUAL", "EDITOR")
for name in names:
val = os.getenv(name)
if val:
return val
return None
def _try_editor_bin():
"""Returns /usr/bin/editor if it exists.
This is the path configured by `update-alternatives` on Ubuntu
systems.
"""
editor_bin = "/usr/bin/editor"
if os.path.exists(editor_bin):
return editor_bin
return None
| 28.116799
| 79
| 0.605467
|
bc5982c5b8d892b0cb6de24f3efc01c078e08bf5
| 1,000
|
py
|
Python
|
Code/Scripts/pyplot_test_hour_March_1.py
|
vr97/finalyearproject2018-19
|
4a19fa5f06d6ea37df3aa2cf50ad888278081300
|
[
"MIT"
] | 1
|
2020-03-11T14:32:28.000Z
|
2020-03-11T14:32:28.000Z
|
Code/Scripts/pyplot_test_hour_March_1.py
|
vr97/finalyearproject2018-19
|
4a19fa5f06d6ea37df3aa2cf50ad888278081300
|
[
"MIT"
] | null | null | null |
Code/Scripts/pyplot_test_hour_March_1.py
|
vr97/finalyearproject2018-19
|
4a19fa5f06d6ea37df3aa2cf50ad888278081300
|
[
"MIT"
] | null | null | null |
# plotly.offline doesn't push your charts to the clouds
import plotly.offline as pyo
# allows us to create the Data and Figure objects
from plotly.graph_objs import *
# plotly.plotly pushes your charts to the cloud
import plotly.plotly as py
import plotly.io as pio
# pandas is a data analysis library
import pandas as pd
from pandas import DataFrame
popularityData = pd.read_excel("D:\\Users\\yashk\\Campaign-Assistant\\Data\\Annotated\\graph_hour_output_March_1.xls") ##get DATA
trace1 = {'type': 'scatter',
'mode': 'lines',
'name': 'trace1',
'x': popularityData['hour'],
'y': popularityData['popularity']}
data = Data([trace1])
layout = {'title': 'Rahul Gandhi ',
'xaxis': {'title': 'Hour'},
'yaxis': {'title': 'Popularity in %'}}
fig = Figure(data=data, layout=layout)
pyo.plot(fig,filename = 'line-demo')
# static_image_bytes = pio.to_image(fig, format='png')
# pio.write_image(fig, file='plotly_static_image.png', format='png')
| 33.333333
| 130
| 0.683
|
8681dc9beb7ce1fcfe008337221dc6feb16aedb5
| 1,888
|
py
|
Python
|
conan/tools/env/virtualrunenv.py
|
dscole/conan
|
ff7b8e6703e8407773968517d68424b9ec59aa30
|
[
"MIT"
] | null | null | null |
conan/tools/env/virtualrunenv.py
|
dscole/conan
|
ff7b8e6703e8407773968517d68424b9ec59aa30
|
[
"MIT"
] | 1
|
2019-06-07T03:02:02.000Z
|
2019-06-07T03:02:02.000Z
|
conan/tools/env/virtualrunenv.py
|
dscole/conan
|
ff7b8e6703e8407773968517d68424b9ec59aa30
|
[
"MIT"
] | 1
|
2021-08-20T19:47:51.000Z
|
2021-08-20T19:47:51.000Z
|
from conan.tools.env import Environment
def runenv_from_cpp_info(conanfile, cpp_info):
""" return an Environment deducing the runtime information from a cpp_info
"""
dyn_runenv = Environment(conanfile)
if cpp_info is None: # This happens when the dependency is a private one = BINARY_SKIP
return dyn_runenv
if cpp_info.bin_paths: # cpp_info.exes is not defined yet
dyn_runenv.prepend_path("PATH", cpp_info.bin_paths)
# If it is a build_require this will be the build-os, otherwise it will be the host-os
if cpp_info.lib_paths:
dyn_runenv.prepend_path("LD_LIBRARY_PATH", cpp_info.lib_paths)
dyn_runenv.prepend_path("DYLD_LIBRARY_PATH", cpp_info.lib_paths)
if cpp_info.framework_paths:
dyn_runenv.prepend_path("DYLD_FRAMEWORK_PATH", cpp_info.framework_paths)
return dyn_runenv
class VirtualRunEnv:
""" captures the conanfile environment that is defined from its
dependencies, and also from profiles
"""
def __init__(self, conanfile):
self._conanfile = conanfile
def environment(self):
""" collects the runtime information from dependencies. For normal libraries should be
very occasional
"""
runenv = Environment(self._conanfile)
# FIXME: Missing profile info
# FIXME: Cache value?
host_req = self._conanfile.dependencies.host
test_req = self._conanfile.dependencies.test
for _, dep in list(host_req.items()) + list(test_req.items()):
if dep.runenv_info:
runenv.compose_env(dep.runenv_info)
runenv.compose_env(runenv_from_cpp_info(self._conanfile, dep.cpp_info))
return runenv
def generate(self, auto_activate=False):
run_env = self.environment()
if run_env:
run_env.save_script("conanrunenv", auto_activate=auto_activate)
| 37.76
| 94
| 0.697034
|
e837b70dfe5858ccfaf8848ed5e2a36045d84919
| 2,976
|
py
|
Python
|
django_evolution/tests/delete_app.py
|
clones/django-evolution
|
34b1131873da463d22801d8b845a72f35cb367bd
|
[
"BSD-3-Clause"
] | 1
|
2016-05-09T04:23:18.000Z
|
2016-05-09T04:23:18.000Z
|
django_evolution/tests/delete_app.py
|
clones/django-evolution
|
34b1131873da463d22801d8b845a72f35cb367bd
|
[
"BSD-3-Clause"
] | null | null | null |
django_evolution/tests/delete_app.py
|
clones/django-evolution
|
34b1131873da463d22801d8b845a72f35cb367bd
|
[
"BSD-3-Clause"
] | null | null | null |
from django_evolution.tests.utils import test_sql_mapping
tests = r"""
>>> from datetime import datetime
>>> from pprint import PrettyPrinter
>>> from django.db import models
>>> from django_evolution.mutations import AddField, DeleteField, DeleteApplication
>>> from django_evolution.tests.utils import test_proj_sig, execute_test_sql, register_models, deregister_models
>>> from django_evolution.diff import Diff
>>> from django_evolution import signature
>>> from django_evolution import models as test_app
>>> import copy
>>> class AppDeleteAnchor1(models.Model):
... value = models.IntegerField()
>>> class AppDeleteAnchor2(models.Model):
... value = models.IntegerField()
... class Meta:
... db_table = 'app_delete_custom_add_anchor_table'
>>> class AppDeleteBaseModel(models.Model):
... char_field = models.CharField(max_length=20)
... int_field = models.IntegerField()
... anchor_fk = models.ForeignKey(AppDeleteAnchor1)
... anchor_m2m = models.ManyToManyField(AppDeleteAnchor2)
>>> class AppDeleteCustomTableModel(models.Model):
... value = models.IntegerField()
... alt_value = models.CharField(max_length=20)
... class Meta:
... db_table = 'app_delete_custom_table_name'
# Store the base signatures, and populate the app cache
>>> anchors = [('AppDeleteAnchor1', AppDeleteAnchor1), ('AppDeleteAnchor2',AppDeleteAnchor2)]
>>> test_model = [('TestModel', AppDeleteBaseModel)]
>>> custom_model = [('CustomTestModel', AppDeleteCustomTableModel)]
>>> all_models = []
>>> all_models.extend(anchors)
>>> all_models.extend(test_model)
>>> all_models.extend(custom_model)
>>> start = register_models(*all_models)
>>> start_sig = test_proj_sig(*all_models)
# Copy the base signature, and delete the tests app.
>>> deleted_app_sig = copy.deepcopy(start_sig)
>>> deleted_app_sig = deleted_app_sig.pop('tests')
>>> d = Diff(start_sig, deleted_app_sig)
>>> print d.deleted
{'tests': ['AppDeleteAnchor1', 'AppDeleteAnchor2', 'TestModel', 'CustomTestModel']}
>>> test_sig = copy.deepcopy(start_sig)
>>> test_sql = []
>>> delete_app = DeleteApplication()
>>> for app_label in d.deleted.keys():
... test_sql.append(delete_app.mutate(app_label, test_sig))
... delete_app.simulate(app_label, test_sig)
>>> Diff(test_sig, deleted_app_sig).is_empty(ignore_apps=True)
True
>>> for sql_list in test_sql:
... for sql in sql_list:
... print sql
%(DeleteApplicationWithoutDatabase)s
>>> test_sql = []
>>> delete_app = DeleteApplication()
>>> for app_label in d.deleted.keys():
... test_sql.append(delete_app.mutate(app_label, test_sig, 'default'))
... delete_app.simulate(app_label, test_sig)
>>> Diff(test_sig, deleted_app_sig).is_empty(ignore_apps=True)
True
>>> for sql_list in test_sql:
... for sql in sql_list:
... print sql
%(DeleteApplication)s
# Clean up after the applications that were installed
>>> deregister_models()
""" % test_sql_mapping('delete_application')
| 32.703297
| 112
| 0.72379
|
f7617c3cfd4be1e85e7376d2ce6ae6e779351b16
| 5,746
|
py
|
Python
|
riptide_vision/scripts/riptide_vision/oculus.py
|
clabough2/riptide_software
|
3ac70fac9e2fb6a5a7f761939a9b5c401605ad27
|
[
"BSD-2-Clause"
] | null | null | null |
riptide_vision/scripts/riptide_vision/oculus.py
|
clabough2/riptide_software
|
3ac70fac9e2fb6a5a7f761939a9b5c401605ad27
|
[
"BSD-2-Clause"
] | null | null | null |
riptide_vision/scripts/riptide_vision/oculus.py
|
clabough2/riptide_software
|
3ac70fac9e2fb6a5a7f761939a9b5c401605ad27
|
[
"BSD-2-Clause"
] | 1
|
2019-08-29T03:39:56.000Z
|
2019-08-29T03:39:56.000Z
|
#!/usr/bin/env python
# oculus.py
# Subscribes to camera output, publishes data about what it sees.
# Determines what to look for based on what is being subscribed to.
import rospy
from cv_bridge import CvBridge
from sensor_msgs.msg import Image, CompressedImage
from riptide_vision import RiptideVision
from gate_processor import GateProcessor
from pole_processor import PoleProcessor
from riptide_msgs.msg import TaskAlignment, BoundingBox
from geometry_msgs.msg import Point
import time
class Oculus:
# Class constants
SHAKE_THRESHOLD = 20 # Allowable amount of difference between positions
MAX_SAMPLES = 5 # Number of previous positions to store for averaging
DEBUG = True # Setting to true will publish processed images on debug topic
MODE_NONE = -1
MODE_GATE = 0 # Detect gate mode
MODE_POLE = 1 # Detect pole mode
def __init__(self):
self.image_pub = rospy.Publisher("/forward/processed/compressed", CompressedImage, queue_size=1)
self.alignment_pub = rospy.Publisher("/task/gate/alignment", TaskAlignment, queue_size=1)
self.fwd_sub = rospy.Subscriber("/forward/image_raw", Image, self.image_callback, queue_size=1)
self.bridge = CvBridge()
self.prev_pos = list()
self.mode = self.MODE_NONE
self.gate_processor = GateProcessor()
self.pole_processor = PoleProcessor()
def update_mode(self, mode, topic=None):
self.alignment_pub.unregister()
if (mode is not self.MODE_NONE):
self.alignment_pub = rospy.Publisher(topic, TaskAlignment, queue_size=1)
print "Publishing on " + topic + "."
self.mode = mode
print "Switched to mode " + str(mode)
# Called whenever a camera frame is availale.
def image_callback(self, data):
# Convert image message to something OpenCV can deal with
cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
pos = None
bbox = None
# Process the image based on which topic is being subscribed to
# Set the object data pub to publish on the correct topic
# Use mode to avoid creating a new publisher each time
if (self.gate_processor.IsConnected()):
if (self.mode != self.MODE_GATE):
self.update_mode(self.MODE_GATE, "task/gate/alignment")
t = time.time()
pos, bbox = self.gate_processor.Process(cv_image, self.image_pub)
elif (self.pole_processor.IsConnected()):
if (self.mode != self.MODE_POLE):
self.update_mode(self.MODE_POLE, "task/pole/alignment")
pos, bbox = self.pole_processor.Process(cv_image, self.image_pub)
else:
if self.mode is not self.MODE_NONE:
self.update_mode(self.MODE_NONE)
self.reset_processor()
if self.mode is not self.MODE_NONE:
self.process_alignment_data(pos, bbox)
# Function: reset_processor
# Parameters:
# self
# Description:
# Deletes any stored information in the processor to allow it to switch
# into a different processing mode with a clean slate
def reset_processor(self):
del self.prev_pos[:]
# Function: pos_is_valid
# Parameters:
# self
# pos: Position to check
# Description:
# Returns whether or not a position is within the given
# SHAKE_THRESHOLD. Prevents sporatic false positives from skewing
# the average position.
def pos_is_valid(self, pos):
x = True
y = True
z = True
if (len(self.prev_pos) > 0):
x = abs(self.prev_pos[0].x - pos.x) < self.SHAKE_THRESHOLD
y = abs(self.prev_pos[0].y - pos.y) < self.SHAKE_THRESHOLD
z = abs(self.prev_pos[0].z - pos.z) < self.SHAKE_THRESHOLD
return x and y and z
# Function: get_new_average_pos
# Parameters:
# self
# new_pos: Position to be added to the average
# Description:
# Returns an average position of *new_pos* and the previous
# *MAX_SAMPLES* positions
def get_new_average_pos(self, new_pos):
avg_pos = Point()
length = len(self.prev_pos)
if (length == self.MAX_SAMPLES):
self.prev_pos.pop()
self.prev_pos.insert(0, new_pos)
length += 1
xt = 0
yt = 0
zt = 0
for p in self.prev_pos:
xt += p.x
yt += p.y
zt += p.z
avg_pos.x = xt / length
avg_pos.y = yt / length
avg_pos.z = zt / length
return avg_pos
# Function: process_object_data
# Parameters:
# self
# pos: Position of the object
# Description:
# Publishes an object data message using the *alignment_pub*.
# *pos* is used to generate a new average position that is added to
# the message.
def process_alignment_data(self, pos, bbox):
align_msg = TaskAlignment()
align_msg.header.stamp = rospy.Time.now() # Timestamp
# Check if we saw the object
# If yes, add the new position to the average and publish
# If no, set visible to false and publish
if (pos is not None):
align_msg.visible = True
if (self.pos_is_valid(pos)):
align_msg.relative_pos = self.get_new_average_pos(pos)
if bbox is not None:
align_msg.bbox = bbox
else:
self.reset_processor()
else:
align_msg.visible = False
self.alignment_pub.publish(align_msg)
def main():
rospy.init_node('oculus')
oc = Oculus()
rospy.spin()
if __name__ == "__main__":
main()
| 35.251534
| 104
| 0.626349
|
3c134ae24735cd132070e47f5cc33959d1fb05c4
| 19,708
|
py
|
Python
|
pointnav_vo/utils/rotation_utils.py
|
rxlqn/PointNav-VO
|
6f7672482a3f1628a3b11025709518ee166e812b
|
[
"Apache-2.0"
] | 25
|
2021-08-28T04:06:31.000Z
|
2022-03-02T23:03:13.000Z
|
pointnav_vo/utils/rotation_utils.py
|
rxlqn/PointNav-VO
|
6f7672482a3f1628a3b11025709518ee166e812b
|
[
"Apache-2.0"
] | 11
|
2021-10-01T07:03:11.000Z
|
2022-03-26T02:28:44.000Z
|
pointnav_vo/utils/rotation_utils.py
|
rxlqn/PointNav-VO
|
6f7672482a3f1628a3b11025709518ee166e812b
|
[
"Apache-2.0"
] | 5
|
2021-09-01T09:05:42.000Z
|
2022-01-27T10:11:37.000Z
|
# based on https://github.com/facebookresearch/pytorch3d/blob/7e986cfba8e8e09fbd24ffc1cbfef2914681e02c/pytorch3d/transforms/rotation_conversions.py#L216
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import functools
import numpy as np
from typing import Optional
import torch
import torch.nn.functional as F
"""
The transformation matrices returned from the functions in this file assume
the points on which the transformation will be applied are column vectors.
i.e. the R matrix is structured as
R = [
[Rxx, Rxy, Rxz],
[Ryx, Ryy, Ryz],
[Rzx, Rzy, Rzz],
] # (3, 3)
This matrix can be applied to column vectors by post multiplication
by the points e.g.
points = [[0], [1], [2]] # (3 x 1) xyz coordinates of a point
transformed_points = R * points
To apply the same matrix to points which are row vectors, the R matrix
can be transposed and pre multiplied by the points:
e.g.
points = [[0, 1, 2]] # (1 x 3) xyz coordinates of a point
transformed_points = points * R.transpose(1, 0)
"""
def quaternion_to_matrix(quaternions):
"""
Convert rotations given as quaternions to rotation matrices.
Args:
quaternions: quaternions with real part first,
as tensor of shape (..., 4).
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
r, i, j, k = torch.unbind(quaternions, -1)
two_s = 2.0 / (quaternions * quaternions).sum(-1)
o = torch.stack(
(
1 - two_s * (j * j + k * k),
two_s * (i * j - k * r),
two_s * (i * k + j * r),
two_s * (i * j + k * r),
1 - two_s * (i * i + k * k),
two_s * (j * k - i * r),
two_s * (i * k - j * r),
two_s * (j * k + i * r),
1 - two_s * (i * i + j * j),
),
-1,
)
return o.reshape(quaternions.shape[:-1] + (3, 3))
def _copysign(a, b):
"""
Return a tensor where each element has the absolute value taken from the,
corresponding element of a, with sign taken from the corresponding
element of b. This is like the standard copysign floating-point operation,
but is not careful about negative 0 and NaN.
Args:
a: source tensor.
b: tensor whose signs will be used, of the same shape as a.
Returns:
Tensor of the same shape as a with the signs of b.
"""
signs_differ = (a < 0) != (b < 0)
return torch.where(signs_differ, -a, a)
def _sqrt_positive_part(x):
"""
Returns torch.sqrt(torch.max(0, x))
but with a zero subgradient where x is 0.
"""
ret = torch.zeros_like(x)
positive_mask = x > 0
ret[positive_mask] = torch.sqrt(x[positive_mask])
return ret
def matrix_to_quaternion(matrix):
"""
Convert rotations given as rotation matrices to quaternions.
Args:
matrix: Rotation matrices as tensor of shape (..., 3, 3).
Returns:
quaternions with real part first, as tensor of shape (..., 4).
"""
if matrix.size(-1) != 3 or matrix.size(-2) != 3:
raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.")
m00 = matrix[..., 0, 0]
m11 = matrix[..., 1, 1]
m22 = matrix[..., 2, 2]
o0 = 0.5 * _sqrt_positive_part(1 + m00 + m11 + m22)
x = 0.5 * _sqrt_positive_part(1 + m00 - m11 - m22)
y = 0.5 * _sqrt_positive_part(1 - m00 + m11 - m22)
z = 0.5 * _sqrt_positive_part(1 - m00 - m11 + m22)
o1 = _copysign(x, matrix[..., 2, 1] - matrix[..., 1, 2])
o2 = _copysign(y, matrix[..., 0, 2] - matrix[..., 2, 0])
o3 = _copysign(z, matrix[..., 1, 0] - matrix[..., 0, 1])
return torch.stack((o0, o1, o2, o3), -1)
def _axis_angle_rotation(axis: str, angle):
"""
Return the rotation matrices for one of the rotations about an axis
of which Euler angles describe, for each value of the angle given.
Args:
axis: Axis label "X" or "Y or "Z".
angle: any shape tensor of Euler angles in radians
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
cos = torch.cos(angle)
sin = torch.sin(angle)
one = torch.ones_like(angle)
zero = torch.zeros_like(angle)
if axis == "X":
R_flat = (one, zero, zero, zero, cos, -sin, zero, sin, cos)
if axis == "Y":
R_flat = (cos, zero, sin, zero, one, zero, -sin, zero, cos)
if axis == "Z":
R_flat = (cos, -sin, zero, sin, cos, zero, zero, zero, one)
return torch.stack(R_flat, -1).reshape(angle.shape + (3, 3))
def euler_angles_to_matrix(euler_angles, convention: str):
"""
Convert rotations given as Euler angles in radians to rotation matrices.
Args:
euler_angles: Euler angles in radians as tensor of shape (..., 3).
convention: Convention string of three uppercase letters from
{"X", "Y", and "Z"}.
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
if euler_angles.dim() == 0 or euler_angles.shape[-1] != 3:
raise ValueError("Invalid input euler angles.")
if len(convention) != 3:
raise ValueError("Convention must have 3 letters.")
if convention[1] in (convention[0], convention[2]):
raise ValueError(f"Invalid convention {convention}.")
for letter in convention:
if letter not in ("X", "Y", "Z"):
raise ValueError(f"Invalid letter {letter} in convention string.")
matrices = map(_axis_angle_rotation, convention, torch.unbind(euler_angles, -1))
return functools.reduce(torch.matmul, matrices)
def _angle_from_tan(
axis: str, other_axis: str, data, horizontal: bool, tait_bryan: bool
):
"""
Extract the first or third Euler angle from the two members of
the matrix which are positive constant times its sine and cosine.
Args:
axis: Axis label "X" or "Y or "Z" for the angle we are finding.
other_axis: Axis label "X" or "Y or "Z" for the middle axis in the
convention.
data: Rotation matrices as tensor of shape (..., 3, 3).
horizontal: Whether we are looking for the angle for the third axis,
which means the relevant entries are in the same row of the
rotation matrix. If not, they are in the same column.
tait_bryan: Whether the first and third axes in the convention differ.
Returns:
Euler Angles in radians for each matrix in data as a tensor
of shape (...).
"""
i1, i2 = {"X": (2, 1), "Y": (0, 2), "Z": (1, 0)}[axis]
if horizontal:
i2, i1 = i1, i2
even = (axis + other_axis) in ["XY", "YZ", "ZX"]
if horizontal == even:
return torch.atan2(data[..., i1], data[..., i2])
if tait_bryan:
return torch.atan2(-data[..., i2], data[..., i1])
return torch.atan2(data[..., i2], -data[..., i1])
def _index_from_letter(letter: str):
if letter == "X":
return 0
if letter == "Y":
return 1
if letter == "Z":
return 2
def matrix_to_euler_angles(matrix, convention: str):
"""
Convert rotations given as rotation matrices to Euler angles in radians.
Args:
matrix: Rotation matrices as tensor of shape (..., 3, 3).
convention: Convention string of three uppercase letters.
Returns:
Euler angles in radians as tensor of shape (..., 3).
"""
if len(convention) != 3:
raise ValueError("Convention must have 3 letters.")
if convention[1] in (convention[0], convention[2]):
raise ValueError(f"Invalid convention {convention}.")
for letter in convention:
if letter not in ("X", "Y", "Z"):
raise ValueError(f"Invalid letter {letter} in convention string.")
if matrix.size(-1) != 3 or matrix.size(-2) != 3:
raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.")
i0 = _index_from_letter(convention[0])
i2 = _index_from_letter(convention[2])
tait_bryan = i0 != i2
if tait_bryan:
central_angle = torch.asin(
matrix[..., i0, i2] * (-1.0 if i0 - i2 in [-1, 2] else 1.0)
)
else:
central_angle = torch.acos(matrix[..., i0, i0])
o = (
_angle_from_tan(
convention[0], convention[1], matrix[..., i2], False, tait_bryan
),
central_angle,
_angle_from_tan(
convention[2], convention[1], matrix[..., i0, :], True, tait_bryan
),
)
return torch.stack(o, -1)
def random_quaternions(
n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False
):
"""
Generate random quaternions representing rotations,
i.e. versors with nonnegative real part.
Args:
n: Number of quaternions in a batch to return.
dtype: Type to return.
device: Desired device of returned tensor. Default:
uses the current device for the default tensor type.
requires_grad: Whether the resulting tensor should have the gradient
flag set.
Returns:
Quaternions as tensor of shape (N, 4).
"""
o = torch.randn((n, 4), dtype=dtype, device=device, requires_grad=requires_grad)
s = (o * o).sum(1)
o = o / _copysign(torch.sqrt(s), o[:, 0])[:, None]
return o
def random_rotations(
n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False
):
"""
Generate random rotations as 3x3 rotation matrices.
Args:
n: Number of rotation matrices in a batch to return.
dtype: Type to return.
device: Device of returned tensor. Default: if None,
uses the current device for the default tensor type.
requires_grad: Whether the resulting tensor should have the gradient
flag set.
Returns:
Rotation matrices as tensor of shape (n, 3, 3).
"""
quaternions = random_quaternions(
n, dtype=dtype, device=device, requires_grad=requires_grad
)
return quaternion_to_matrix(quaternions)
def random_rotation(
dtype: Optional[torch.dtype] = None, device=None, requires_grad=False
):
"""
Generate a single random 3x3 rotation matrix.
Args:
dtype: Type to return
device: Device of returned tensor. Default: if None,
uses the current device for the default tensor type
requires_grad: Whether the resulting tensor should have the gradient
flag set
Returns:
Rotation matrix as tensor of shape (3, 3).
"""
return random_rotations(1, dtype, device, requires_grad)[0]
def standardize_quaternion(quaternions):
"""
Convert a unit quaternion to a standard form: one in which the real
part is non negative.
Args:
quaternions: Quaternions with real part first,
as tensor of shape (..., 4).
Returns:
Standardized quaternions as tensor of shape (..., 4).
"""
return torch.where(quaternions[..., 0:1] < 0, -quaternions, quaternions)
def quaternion_raw_multiply(a, b):
"""
Multiply two quaternions.
Usual torch rules for broadcasting apply.
Args:
a: Quaternions as tensor of shape (..., 4), real part first.
b: Quaternions as tensor of shape (..., 4), real part first.
Returns:
The product of a and b, a tensor of quaternions shape (..., 4).
"""
aw, ax, ay, az = torch.unbind(a, -1)
bw, bx, by, bz = torch.unbind(b, -1)
ow = aw * bw - ax * bx - ay * by - az * bz
ox = aw * bx + ax * bw + ay * bz - az * by
oy = aw * by - ax * bz + ay * bw + az * bx
oz = aw * bz + ax * by - ay * bx + az * bw
return torch.stack((ow, ox, oy, oz), -1)
def quaternion_multiply(a, b):
"""
Multiply two quaternions representing rotations, returning the quaternion
representing their composition, i.e. the versor with nonnegative real part.
Usual torch rules for broadcasting apply.
Args:
a: Quaternions as tensor of shape (..., 4), real part first.
b: Quaternions as tensor of shape (..., 4), real part first.
Returns:
The product of a and b, a tensor of quaternions of shape (..., 4).
"""
ab = quaternion_raw_multiply(a, b)
return standardize_quaternion(ab)
def quaternion_invert(quaternion):
"""
Given a quaternion representing rotation, get the quaternion representing
its inverse.
Args:
quaternion: Quaternions as tensor of shape (..., 4), with real part
first, which must be versors (unit quaternions).
Returns:
The inverse, a tensor of quaternions of shape (..., 4).
"""
return quaternion * quaternion.new_tensor([1, -1, -1, -1])
def quaternion_apply(quaternion, point):
"""
Apply the rotation given by a quaternion to a 3D point.
Usual torch rules for broadcasting apply.
Args:
quaternion: Tensor of quaternions, real part first, of shape (..., 4).
point: Tensor of 3D points of shape (..., 3).
Returns:
Tensor of rotated points of shape (..., 3).
"""
if point.size(-1) != 3:
raise ValueError(f"Points are not in 3D, f{point.shape}.")
real_parts = point.new_zeros(point.shape[:-1] + (1,))
point_as_quaternion = torch.cat((real_parts, point), -1)
out = quaternion_raw_multiply(
quaternion_raw_multiply(quaternion, point_as_quaternion),
quaternion_invert(quaternion),
)
return out[..., 1:]
def axis_angle_to_matrix(axis_angle):
"""
Convert rotations given as axis/angle to rotation matrices.
Args:
axis_angle: Rotations given as a vector in axis angle form,
as a tensor of shape (..., 3), where the magnitude is
the angle turned anticlockwise in radians around the
vector's direction.
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle))
def matrix_to_axis_angle(matrix):
"""
Convert rotations given as rotation matrices to axis/angle.
Args:
matrix: Rotation matrices as tensor of shape (..., 3, 3).
Returns:
Rotations given as a vector in axis angle form, as a tensor
of shape (..., 3), where the magnitude is the angle
turned anticlockwise in radians around the vector's
direction.
"""
return quaternion_to_axis_angle(matrix_to_quaternion(matrix))
def axis_angle_to_quaternion(axis_angle):
"""
Convert rotations given as axis/angle to quaternions.
Args:
axis_angle: Rotations given as a vector in axis angle form,
as a tensor of shape (..., 3), where the magnitude is
the angle turned anticlockwise in radians around the
vector's direction.
Returns:
quaternions with real part first, as tensor of shape (..., 4).
"""
angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True)
half_angles = 0.5 * angles
eps = 1e-6
small_angles = angles.abs() < eps
sin_half_angles_over_angles = torch.empty_like(angles)
sin_half_angles_over_angles[~small_angles] = (
torch.sin(half_angles[~small_angles]) / angles[~small_angles]
)
# for x small, sin(x/2) is about x/2 - (x/2)^3/6
# so sin(x/2)/x is about 1/2 - (x*x)/48
sin_half_angles_over_angles[small_angles] = (
0.5 - (angles[small_angles] * angles[small_angles]) / 48
)
quaternions = torch.cat(
[torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], dim=-1
)
return quaternions
def quaternion_to_axis_angle(quaternions):
"""
Convert rotations given as quaternions to axis/angle.
Args:
quaternions: quaternions with real part first,
as tensor of shape (..., 4).
Returns:
Rotations given as a vector in axis angle form, as a tensor
of shape (..., 3), where the magnitude is the angle
turned anticlockwise in radians around the vector's
direction.
"""
norms = torch.norm(quaternions[..., 1:], p=2, dim=-1, keepdim=True)
half_angles = torch.atan2(norms, quaternions[..., :1])
angles = 2 * half_angles
eps = 1e-6
small_angles = angles.abs() < eps
sin_half_angles_over_angles = torch.empty_like(angles)
sin_half_angles_over_angles[~small_angles] = (
torch.sin(half_angles[~small_angles]) / angles[~small_angles]
)
# for x small, sin(x/2) is about x/2 - (x/2)^3/6
# so sin(x/2)/x is about 1/2 - (x*x)/48
sin_half_angles_over_angles[small_angles] = (
0.5 - (angles[small_angles] * angles[small_angles]) / 48
)
return quaternions[..., 1:] / sin_half_angles_over_angles
def rotation_6d_to_matrix(d6: torch.Tensor) -> torch.Tensor:
"""
Converts 6D rotation representation by Zhou et al. [1] to rotation matrix
using Gram--Schmidt orthogonalisation per Section B of [1].
Args:
d6: 6D rotation representation, of size (*, 6)
Returns:
batch of rotation matrices of size (*, 3, 3)
[1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H.
On the Continuity of Rotation Representations in Neural Networks.
IEEE Conference on Computer Vision and Pattern Recognition, 2019.
Retrieved from http://arxiv.org/abs/1812.07035
"""
a1, a2 = d6[..., :3], d6[..., 3:]
b1 = F.normalize(a1, dim=-1)
b2 = a2 - (b1 * a2).sum(-1, keepdim=True) * b1
b2 = F.normalize(b2, dim=-1)
b3 = torch.cross(b1, b2, dim=-1)
return torch.stack((b1, b2, b3), dim=-2)
def matrix_to_rotation_6d(matrix: torch.Tensor) -> torch.Tensor:
"""
Converts rotation matrices to 6D rotation representation by Zhou et al. [1]
by dropping the last row. Note that 6D representation is not unique.
Args:
matrix: batch of rotation matrices of size (*, 3, 3)
Returns:
6D rotation representation, of size (*, 6)
[1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H.
On the Continuity of Rotation Representations in Neural Networks.
IEEE Conference on Computer Vision and Pattern Recognition, 2019.
Retrieved from http://arxiv.org/abs/1812.07035
"""
return matrix[..., :2, :].clone().reshape(*matrix.size()[:-2], 6)
# ---------------------------------------------------------------------------------
# Lie functions
# adapted from https://github.com/sarthaksharma13/DeepVO/blob/master/lieFunctions.py
# Log map for SO(3). Returns a 3-vector of so(3) exponential coordinates
# Actually, this does vee(SO3_log(R)), also denoted Log(R) in a few conventions
# Not to be confused with log(R), which simply returns an element on the tangent space
# Convert a rotation matrix to an axis-angle vector. Does vee(SO3_log(rot))
def rotMat_to_axisAngle(rot):
trace = rot[0, 0] + rot[1, 1] + rot[2, 2]
trace = np.clip(trace, 0.0, 2.99999)
theta = np.arccos((trace - 1.0) / 2.0)
omega_cross = (theta / (2 * np.sin(theta))) * (rot - np.transpose(rot))
return [omega_cross[2, 1], omega_cross[0, 2], omega_cross[1, 0]]
# Map an axis-angle vector to a rotation matrix. Does SO3_exp(hat(omega))
def axisAngle_to_rotMat(omega):
theta = np.sqrt(omega[0] * omega[0] + omega[1] * omega[1] + omega[2] * omega[2])
if theta < 1e-8:
return np.eye(3, 3)
omega_cross = np.stack(
[0.0, -omega[2], omega[1], omega[2], 0.0, -omega[0], -omega[1], omega[0], 0.0]
)
omega_cross = np.reshape(omega_cross, [3, 3])
A = np.sin(theta) / theta
B = (1.0 - np.cos(theta)) / (theta ** 2)
C = (1.0 - A) / (theta ** 2)
omega_cross_square = np.matmul(omega_cross, omega_cross)
R = np.eye(3, 3) + A * omega_cross + B * omega_cross_square
return R
| 33.178451
| 152
| 0.61782
|
44b0cfe3848ff552fe398a86ccb35dd75072bd64
| 25,392
|
py
|
Python
|
swagger_client/api/alliance_api.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
swagger_client/api/alliance_api.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
swagger_client/api/alliance_api.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online # noqa: E501
OpenAPI spec version: 0.8.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class AllianceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_alliances(self, **kwargs): # noqa: E501
"""List all alliances # noqa: E501
List all active player alliances --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances(async=True)
>>> result = thread.get()
:param async bool
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[int]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_alliances_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_alliances_with_http_info(**kwargs) # noqa: E501
return data
def get_alliances_with_http_info(self, **kwargs): # noqa: E501
"""List all alliances # noqa: E501
List all active player alliances --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[int]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alliances" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/alliances/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[int]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alliances_alliance_id(self, alliance_id, **kwargs): # noqa: E501
"""Get alliance information # noqa: E501
Public information about an alliance --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_alliance_id(alliance_id, async=True)
>>> result = thread.get()
:param async bool
:param int alliance_id: An EVE alliance ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetAlliancesAllianceIdOk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_alliances_alliance_id_with_http_info(alliance_id, **kwargs) # noqa: E501
else:
(data) = self.get_alliances_alliance_id_with_http_info(alliance_id, **kwargs) # noqa: E501
return data
def get_alliances_alliance_id_with_http_info(self, alliance_id, **kwargs): # noqa: E501
"""Get alliance information # noqa: E501
Public information about an alliance --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_alliance_id_with_http_info(alliance_id, async=True)
>>> result = thread.get()
:param async bool
:param int alliance_id: An EVE alliance ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetAlliancesAllianceIdOk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['alliance_id', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alliances_alliance_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'alliance_id' is set
if ('alliance_id' not in params or
params['alliance_id'] is None):
raise ValueError("Missing the required parameter `alliance_id` when calling `get_alliances_alliance_id`") # noqa: E501
if 'alliance_id' in params and params['alliance_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `alliance_id` when calling `get_alliances_alliance_id`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'alliance_id' in params:
path_params['alliance_id'] = params['alliance_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v3/alliances/{alliance_id}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetAlliancesAllianceIdOk', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alliances_alliance_id_corporations(self, alliance_id, **kwargs): # noqa: E501
"""List alliance's corporations # noqa: E501
List all current member corporations of an alliance --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_alliance_id_corporations(alliance_id, async=True)
>>> result = thread.get()
:param async bool
:param int alliance_id: An EVE alliance ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[int]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_alliances_alliance_id_corporations_with_http_info(alliance_id, **kwargs) # noqa: E501
else:
(data) = self.get_alliances_alliance_id_corporations_with_http_info(alliance_id, **kwargs) # noqa: E501
return data
def get_alliances_alliance_id_corporations_with_http_info(self, alliance_id, **kwargs): # noqa: E501
"""List alliance's corporations # noqa: E501
List all current member corporations of an alliance --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_alliance_id_corporations_with_http_info(alliance_id, async=True)
>>> result = thread.get()
:param async bool
:param int alliance_id: An EVE alliance ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[int]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['alliance_id', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alliances_alliance_id_corporations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'alliance_id' is set
if ('alliance_id' not in params or
params['alliance_id'] is None):
raise ValueError("Missing the required parameter `alliance_id` when calling `get_alliances_alliance_id_corporations`") # noqa: E501
if 'alliance_id' in params and params['alliance_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `alliance_id` when calling `get_alliances_alliance_id_corporations`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'alliance_id' in params:
path_params['alliance_id'] = params['alliance_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/alliances/{alliance_id}/corporations/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[int]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alliances_alliance_id_icons(self, alliance_id, **kwargs): # noqa: E501
"""Get alliance icon # noqa: E501
Get the icon urls for a alliance --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_alliance_id_icons(alliance_id, async=True)
>>> result = thread.get()
:param async bool
:param int alliance_id: An EVE alliance ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetAlliancesAllianceIdIconsOk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_alliances_alliance_id_icons_with_http_info(alliance_id, **kwargs) # noqa: E501
else:
(data) = self.get_alliances_alliance_id_icons_with_http_info(alliance_id, **kwargs) # noqa: E501
return data
def get_alliances_alliance_id_icons_with_http_info(self, alliance_id, **kwargs): # noqa: E501
"""Get alliance icon # noqa: E501
Get the icon urls for a alliance --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_alliance_id_icons_with_http_info(alliance_id, async=True)
>>> result = thread.get()
:param async bool
:param int alliance_id: An EVE alliance ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetAlliancesAllianceIdIconsOk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['alliance_id', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alliances_alliance_id_icons" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'alliance_id' is set
if ('alliance_id' not in params or
params['alliance_id'] is None):
raise ValueError("Missing the required parameter `alliance_id` when calling `get_alliances_alliance_id_icons`") # noqa: E501
if 'alliance_id' in params and params['alliance_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `alliance_id` when calling `get_alliances_alliance_id_icons`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'alliance_id' in params:
path_params['alliance_id'] = params['alliance_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/alliances/{alliance_id}/icons/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetAlliancesAllianceIdIconsOk', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alliances_names(self, alliance_ids, **kwargs): # noqa: E501
"""Get alliance names # noqa: E501
Resolve a set of alliance IDs to alliance names --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_names(alliance_ids, async=True)
>>> result = thread.get()
:param async bool
:param list[int] alliance_ids: A comma separated list of alliance IDs (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetAlliancesNames200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_alliances_names_with_http_info(alliance_ids, **kwargs) # noqa: E501
else:
(data) = self.get_alliances_names_with_http_info(alliance_ids, **kwargs) # noqa: E501
return data
def get_alliances_names_with_http_info(self, alliance_ids, **kwargs): # noqa: E501
"""Get alliance names # noqa: E501
Resolve a set of alliance IDs to alliance names --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_alliances_names_with_http_info(alliance_ids, async=True)
>>> result = thread.get()
:param async bool
:param list[int] alliance_ids: A comma separated list of alliance IDs (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetAlliancesNames200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['alliance_ids', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alliances_names" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'alliance_ids' is set
if ('alliance_ids' not in params or
params['alliance_ids'] is None):
raise ValueError("Missing the required parameter `alliance_ids` when calling `get_alliances_names`") # noqa: E501
if ('alliance_ids' in params and
len(params['alliance_ids']) > 100):
raise ValueError("Invalid value for parameter `alliance_ids` when calling `get_alliances_names`, number of items must be less than or equal to `100`") # noqa: E501
if ('alliance_ids' in params and
len(params['alliance_ids']) < 1):
raise ValueError("Invalid value for parameter `alliance_ids` when calling `get_alliances_names`, number of items must be greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'alliance_ids' in params:
query_params.append(('alliance_ids', params['alliance_ids'])) # noqa: E501
collection_formats['alliance_ids'] = 'csv' # noqa: E501
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/alliances/names/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetAlliancesNames200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.16
| 187
| 0.631301
|
a0463744e8bf0eaa9e6650f0b00b58e52abf5a62
| 764
|
py
|
Python
|
denoiser.py
|
rodrivaldivia/TPP
|
c9e158d663c34fc9a4fedf116d2930f4d9ea675b
|
[
"MIT"
] | 1
|
2020-01-11T17:58:45.000Z
|
2020-01-11T17:58:45.000Z
|
denoiser.py
|
rodrivaldivia/TPP
|
c9e158d663c34fc9a4fedf116d2930f4d9ea675b
|
[
"MIT"
] | null | null | null |
denoiser.py
|
rodrivaldivia/TPP
|
c9e158d663c34fc9a4fedf116d2930f4d9ea675b
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image, ImageEnhance
def load_train_img(fileName):
im = Image.open(fileName)
im = im.point(lambda i:i*(1./256)).convert('L')
return im
def load_img(num):
fileName = 'Images/canal_1/s_C001T'
fileName += '%0*d' % (3, num)
fileName += '.tif'
return load_train_img(fileName)
def save_img(img, num):
fileName = 'Images/results/s_C001T'
fileName += '%0*d' % (3, num)
fileName += '.tif'
img.save(fileName)
# for i in range(2,93):
for i in range(2,94):
before = load_img(i-1)
current = load_img(i)
after = load_img(i+1)
blendIm = Image.blend(after, before, 0.5)
current = ImageEnhance.Contrast(current).enhance(1.2)
final = Image.blend(current, blendIm, 0.75)
save_img(final, i)
| 23.151515
| 54
| 0.689791
|
a4954502f262a460938cfd0fc0909d31e453805d
| 19,664
|
py
|
Python
|
test/test_python_dispatch.py
|
PaliC/pytorch
|
d1a5612a3ea501b83a80b9ecfd6b4dd5eb125181
|
[
"Intel"
] | 60,067
|
2017-01-18T17:21:31.000Z
|
2022-03-31T21:37:45.000Z
|
test/test_python_dispatch.py
|
Jam3/pytorch
|
33d8769c285b51922c378d11a90a442a28e06762
|
[
"Intel"
] | 66,955
|
2017-01-18T17:21:38.000Z
|
2022-03-31T23:56:11.000Z
|
test/test_python_dispatch.py
|
Jam3/pytorch
|
33d8769c285b51922c378d11a90a442a28e06762
|
[
"Intel"
] | 19,210
|
2017-01-18T17:45:04.000Z
|
2022-03-31T23:51:56.000Z
|
import torch
from torch.testing._internal.common_utils import TestCase, run_tests
from torch.utils._pytree import tree_map
from torch.utils._python_dispatch import enable_python_mode
from typing import Iterator, List
import logging
import contextlib
import itertools
# TODO: move this into library proper
@contextlib.contextmanager
def no_dispatch() -> Iterator[None]:
guard = torch._C._DisableTorchDispatch()
try:
yield
finally:
del guard
# How the chain of calls works for LoggingTensor:
# 1. Call torch.sin
# 2. Attempt __torch_function__. In LoggingTensor torch function is disabled so we bypass it entirely
# 3. Enter dispatcher, wind your way through Autograd
# 4. Hit Python dispatch key, call __torch_dispatch__
# TODO: TensorBase should work
class LoggingTensor(torch.Tensor):
elem: torch.Tensor
__slots__ = ['elem']
@staticmethod
def __new__(cls, elem, *args, **kwargs):
# The wrapping tensor (LoggingTensor) shouldn't hold any
# memory for the class in question, but it should still
# advertise the same device as before
r = torch.Tensor._make_wrapper_subclass(
cls, elem.size(),
# TODO: clone strides and storage aliasing
dtype=elem.dtype, layout=elem.layout,
device=elem.device, requires_grad=elem.requires_grad
)
# ...the real tensor is held as an element on the tensor.
r.elem = elem
return r
def __repr__(self):
return f"LoggingTensor({self.elem})"
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
def unwrap(e):
return e.elem if isinstance(e, LoggingTensor) else e
def wrap(e):
return LoggingTensor(e) if isinstance(e, torch.Tensor) else e
# no_dispatch is only needed if you use enable_python_mode.
# It prevents infinite recursion.
with no_dispatch():
rs = tree_map(wrap, func(*tree_map(unwrap, args), **tree_map(unwrap, kwargs)))
logging.getLogger("LoggingTensor").info(f"{func.__module__}.{func.__name__}", args, kwargs, rs)
return rs
# https://stackoverflow.com/questions/36408496/python-logging-handler-to-append-to-list
class LoggingTensorHandler(logging.Handler):
log_list: List[str]
next_shortid: int
def __init__(self, log_list: List[str]) -> None:
logging.Handler.__init__(self)
self.log_list = log_list
self.next_shortid = 0
# WARNING: not deterministic over multiple threads, this matters for
# autograd
def _shortid(self, o: object) -> int:
if not hasattr(o, '_shortid'):
o._shortid = self.next_shortid
self.next_shortid += 1
return o._shortid
def _fmt(self, a: object) -> str:
return f'${self._shortid(a)}' if isinstance(a, LoggingTensor) else repr(a)
def emit(self, record):
fmt_args = ", ".join(itertools.chain(
(self._fmt(a) for a in record.args[0]),
(f"{k}={self._fmt(v)}" for k, v in record.args[1].items())
))
fmt_rets = ", ".join(self._fmt(a) for a in record.args[2]) \
if isinstance(record.args[2], (list, tuple)) else self._fmt(record.args[2])
self.log_list.append(f'{fmt_rets} = {record.msg}({fmt_args})')
def log_input(name: str, var: object):
logging.getLogger("LoggingTensor").info("input", (name,), {}, (var,))
@contextlib.contextmanager
def capture_logs() -> Iterator[List[str]]:
logger = logging.getLogger("LoggingTensor")
log_list = []
handler = LoggingTensorHandler(log_list)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
logger.propagate = False
try:
yield log_list
finally:
logger.removeHandler(handler)
class TestPythonDispatch(TestCase):
def test_basic(self) -> None:
with capture_logs() as logs:
x = LoggingTensor(torch.tensor([3.0], requires_grad=True))
log_input("x", x)
y = x * x
saved_x = y.grad_fn._saved_self
grad_y = LoggingTensor(torch.tensor([1.0]))
log_input("grad_y", grad_y)
g, = torch.autograd.grad((y,), (x,), (grad_y,))
self.assertEqual(g.elem, torch.tensor([6.0]))
with torch.no_grad():
self.assertEqual(saved_x, x)
self.assertEqual(saved_x._version, x._version)
x.add_(2)
self.assertEqual(saved_x, x)
# TODO: figure out why broken
# self.assertEqual(saved_x._version, x._version)
self.assertExpectedInline('\n'.join(logs), '''\
$0 = input('x')
$1 = torch._ops.aten.mul($0, $0)
$2 = input('grad_y')
$3 = torch._ops.aten.mul($2, $0)
$4 = torch._ops.aten.mul($2, $0)
$5 = torch._ops.aten.add($4, $3)''')
def test_out(self) -> None:
with capture_logs() as logs:
x = LoggingTensor(torch.ones(1))
y = LoggingTensor(torch.zeros(1))
log_input("x", x)
log_input("y", y)
torch.abs(x, out=y)
self.assertEqual(y.elem, torch.ones(1))
# TODO: arguably this shouldn't pass and we should complain
# that out isn't a kwarg
self.assertExpectedInline('\n'.join(logs), '''\
$0 = input('x')
$1 = input('y')
$2 = torch._ops.aten.abs($0, out=$1)''')
def test_kwarg_only(self) -> None:
with capture_logs() as logs:
x = LoggingTensor(torch.ones(1))
y = LoggingTensor(torch.ones(1, 1))
z = LoggingTensor(torch.ones(1))
log_input("x", x)
log_input("y", y)
log_input("z", z)
torch.addmv(x, y, z)
torch.addmv(x, y, z, beta=1)
torch.addmv(x, y, z, beta=2)
torch.addmv(x, y, z, alpha=2)
torch.addmv(x, y, z, beta=2, alpha=2)
# The expectation is that beta/alpha don't show up when they're
# defaulted. This is even if the user explicitly specified it.
self.assertExpectedInline('\n'.join(logs), '''\
$0 = input('x')
$1 = input('y')
$2 = input('z')
$3 = torch._ops.aten.addmv($0, $1, $2)
$4 = torch._ops.aten.addmv($0, $1, $2)
$5 = torch._ops.aten.addmv($0, $1, $2, beta=2)
$6 = torch._ops.aten.addmv($0, $1, $2, alpha=2)
$7 = torch._ops.aten.addmv($0, $1, $2, beta=2, alpha=2)''')
def test_kwarg_only_and_positional_default(self) -> None:
with capture_logs() as logs:
x = LoggingTensor(torch.ones(1))
y = LoggingTensor(torch.ones(1))
log_input("x", x)
log_input("y", y)
torch.ops.aten.kl_div(x, y)
torch.ops.aten.kl_div(x, y, 2)
torch.ops.aten.kl_div(x, y, log_target=True)
torch.ops.aten.kl_div(x, y, 2, log_target=True)
# What we are testing here is that we omit reduction
# if it is defaulted, even if a kwarg is set
self.assertExpectedInline('\n'.join(logs), '''\
$0 = input('x')
$1 = input('y')
$2 = torch._ops.aten.kl_div($0, $1)
$3 = torch._ops.aten.kl_div($0, $1, 2)
$4 = torch._ops.aten.kl_div($0, $1, log_target=True)
$5 = torch._ops.aten.kl_div($0, $1, 2, log_target=True)''')
def test_list_ret(self) -> None:
# test all sequence types are permissible returns
for list_type in (list, tuple):
class A(torch._C._TensorBase):
@staticmethod
def __new__(cls, elem):
return torch.Tensor._make_subclass(cls, elem, elem.requires_grad)
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
if func == torch.ops.aten.split:
with no_dispatch():
return list_type(torch.split(*args))
else:
raise AssertionError(f"unrecognized func: {func}")
self.assertEqual(
torch.split(A(torch.tensor([0, 1])), 2),
torch.split(torch.tensor([0, 1]), 2)
)
def test_invalid_ret(self) -> None:
# test invalid return gets reasonable error message
class A(torch._C._TensorBase):
@staticmethod
def __new__(cls, elem):
return torch.Tensor._make_subclass(cls, elem, elem.requires_grad)
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
return "arf"
# Wobbles depending on NDEBUG mode of pybind11
self.assertRaisesRegexp(
RuntimeError, "Unable to cast", lambda: A(torch.zeros(1)).neg(),
)
self.assertExpectedRaisesInline(
RuntimeError, lambda: A(torch.zeros(1)).detach(),
"""detach returned invalid type str, expected Tensor"""
)
def test_metadata_change_not_allowed(self) -> None:
x = LoggingTensor(torch.ones(1))
y = x.data
self.assertIsInstance(y, LoggingTensor)
self.assertRaises(RuntimeError, lambda: y.resize_(4))
def test_storage(self) -> None:
# For now, just make sure it doesn't crash. Ideally, we should
# return some virtual storage that is safe to work with
x = LoggingTensor(torch.ones(1))
self.assertRaises(RuntimeError, lambda: x.storage())
def test_make_wrapper_subclass_noalloc(self) -> None:
# This is ludicrously big (8TB) and this should pass because wrapper
# subclasses don't allocate
torch.Tensor._make_wrapper_subclass(LoggingTensor, (1000000000000,))
def test_version(self) -> None:
x = LoggingTensor(torch.ones(1))
prev_vc = x._version
x.detach().add_(2)
cur_vc = x._version
self.assertNotEqual(prev_vc, cur_vc)
x.data.add_(2)
self.assertEqual(cur_vc, x._version)
def test_subclass_priority(self) -> None:
class ErrorA(RuntimeError):
pass
class ErrorB(RuntimeError):
pass
# The big tests for code coverage are test_precedence_semantics in
# test_overrides.py; this is just to make sure it is wired up at all
# correctly for __torch_dispatch__
class A(torch.Tensor):
@staticmethod
def __new__(cls, elem):
return torch.Tensor._make_subclass(cls, elem, elem.requires_grad)
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
raise ErrorA
class B(A):
@staticmethod
def __new__(cls, elem):
return torch.Tensor._make_subclass(cls, elem, elem.requires_grad)
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
raise ErrorB
self.assertRaises(ErrorA, lambda: torch.add(A(torch.empty(1)), A(torch.empty(1))))
self.assertRaises(ErrorB, lambda: torch.add(A(torch.empty(1)), B(torch.empty(1))))
self.assertRaises(ErrorB, lambda: torch.add(B(torch.empty(1)), A(torch.empty(1))))
self.assertRaises(ErrorB, lambda: torch.add(B(torch.empty(1)), B(torch.empty(1))))
def test_format(self) -> None:
x = LoggingTensor(torch.ones(1))
s1 = str(x)
s2 = repr(x)
s3 = f"{x}"
self.assertExpectedInline(s1, """LoggingTensor(tensor([1.]))""")
self.assertEqual(s1, s2)
self.assertEqual(s1, s3)
def test_custom_autograd(self) -> None:
escape = [None]
class Square(torch.autograd.Function):
@staticmethod
def forward(ctx, x):
y = x ** 2
ctx.save_for_backward(x)
return y
@staticmethod
def backward(ctx, grad_output):
assert isinstance(grad_output, LoggingTensor)
x, = ctx.saved_tensors
assert isinstance(x, LoggingTensor)
escape[0] = x
return grad_output * 2 * x
with capture_logs() as logs:
x = LoggingTensor(torch.ones(1, requires_grad=True))
log_input("x", x)
x.grad = LoggingTensor(torch.zeros(1))
log_input("x.grad", x.grad)
y = Square.apply(x)
grad_output = LoggingTensor(torch.ones(1))
log_input("grad_output", grad_output)
y.backward(grad_output)
with torch.no_grad():
self.assertEqual(escape[0], x)
self.assertEqual(escape[0]._version, x._version)
# TODO: figure out why x.requires_grad = False doesn't
# trigger an error for LoggingTensor
x.add_(2)
self.assertEqual(escape[0], x)
# TODO: figure out why this is broken
# self.assertEqual(escape[0]._version, x._version)
self.assertExpectedInline('\n'.join(logs), '''\
$0 = input('x')
$1 = input('x.grad')
$2 = torch._ops.aten.pow($0, 2)
$3 = input('grad_output')
$4 = torch._ops.aten.mul($3, tensor(2))
$5 = torch._ops.aten.mul($4, $0)
$6 = torch._ops.aten.add_($1, $5)''')
def test_subclass_creation(self):
# Make sure these statements runs without error
# In particular checking that when internal detach returns
# subclasses, these are cleanly overwritten.
class Foo(torch.Tensor):
pass
err_msg = "subclass Foo but.*already associated to a python object of type LoggingTensor"
with self.assertRaisesRegex(RuntimeError, err_msg):
a = torch.Tensor._make_subclass(Foo, LoggingTensor(torch.rand(2)))
with self.assertRaisesRegex(RuntimeError, err_msg):
b = LoggingTensor(torch.rand(2)).as_subclass(Foo)
with self.assertRaisesRegex(RuntimeError, err_msg):
Foo(LoggingTensor(torch.rand(2)))
with self.assertRaisesRegex(TypeError, "Foo must define __torch_dispatch__"):
torch.Tensor._make_wrapper_subclass(Foo, (2, 2))
def test_new_ones(self) -> None:
class MyTensor(torch.Tensor):
__torch_function__ = torch._C._disabled_torch_function_impl
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
return MyTensor(3)
self.assertEqual(type(MyTensor(2).new_ones(3)), MyTensor)
def test_like(self) -> None:
class MyTensor(torch.Tensor):
__torch_function__ = torch._C._disabled_torch_function_impl
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
return MyTensor(3)
for f in ["empty", "ones", "rand", "randn", "zeros"]:
f_name = f + "_like"
self.assertEqual(type(getattr(torch, f_name)(MyTensor(2))), MyTensor)
self.assertEqual(type(torch.full_like(MyTensor(2), 1.)), MyTensor)
self.assertEqual(type(torch.randint_like(MyTensor(2), high=3)), MyTensor)
def test_enable_python_mode_error(self) -> None:
with self.assertRaisesRegex(ValueError, "__torch_dispatch__"):
with enable_python_mode(torch.Tensor):
pass
z = LoggingTensor(torch.empty([]))
with self.assertRaisesRegex(ValueError, "must be the type"):
with enable_python_mode(z):
pass
def test_enable_python_mode_basic(self) -> None:
with enable_python_mode(LoggingTensor):
z = torch.empty([])
self.assertTrue(isinstance(z, LoggingTensor))
def test_enable_python_mode_unrelated_tensors(self) -> None:
x = torch.randn([])
y = torch.randn([])
with enable_python_mode(LoggingTensor):
z = x + y
self.assertTrue(isinstance(z, LoggingTensor))
def test_enable_python_mode_subclass_priority(self) -> None:
class ErrorA(RuntimeError):
pass
class ErrorB(RuntimeError):
pass
class A(torch.Tensor):
@staticmethod
def __new__(cls, elem):
return torch.Tensor._make_subclass(cls, elem, elem.requires_grad)
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
raise ErrorA
class B(A):
@staticmethod
def __new__(cls, elem):
return torch.Tensor._make_subclass(cls, elem, elem.requires_grad)
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
raise ErrorB
a = A(torch.empty(1))
b = B(torch.empty(1))
with self.assertRaises(ErrorA):
a + a
# B has precedence over A due to the subclass relationship
with self.assertRaises(ErrorB):
with enable_python_mode(A):
b + b
with self.assertRaises(ErrorB):
with enable_python_mode(B):
a + a
with self.assertRaises(ErrorB):
with enable_python_mode(B):
a + b
def test_enable_python_mode_respects_no_dispatch(self) -> None:
with enable_python_mode(LoggingTensor):
z = torch.ones([2, 3])
self.assertTrue(isinstance(z, LoggingTensor))
with no_dispatch():
expected = torch.ones([2, 3])
self.assertEqual(z.elem, expected)
def test_nested_enable_python_mode(self) -> None:
with self.assertRaisesRegex(RuntimeError, "has already been set"):
with enable_python_mode(LoggingTensor):
with enable_python_mode(LoggingTensor):
pass
def test_tolist_numpy_with_python_mode(self) -> None:
x = LoggingTensor(torch.tensor([2.0, 3.0]))
with self.assertRaisesRegex(RuntimeError, "is not supported for tensor subclasses."):
x.tolist()
with self.assertRaisesRegex(RuntimeError, "is not supported for tensor subclasses."):
x.numpy()
with self.assertRaises(AssertionError):
self.assertEqual(x, None)
def test_enable_python_mode_subclass_autograd_device_check(self) -> None:
class NonWrapperSublass(torch.Tensor):
elem: torch.Tensor
__slots__ = ['elem']
@staticmethod
def __new__(cls, elem, *args, **kwargs):
# Wrong device here!
r = torch.Tensor._make_subclass(cls, elem.to("meta"), elem.requires_grad)
# ...the real tensor is held as an element on the tensor.
r.elem = elem
return r
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
def unwrap(e):
return e.elem if isinstance(e, NonWrapperSublass) else e
def wrap(e):
return NonWrapperSublass(e) if isinstance(e, torch.Tensor) else e
# no_dispatch is only needed if you use enable_python_mode.
# It prevents infinite recursion.
with no_dispatch():
rs = tree_map(wrap, func(*tree_map(unwrap, args), **tree_map(unwrap, kwargs)))
logging.getLogger("NonWrapperSublass").info(f"{func.__module__}.{func.__name__}", args, kwargs, rs)
return rs
x = NonWrapperSublass(torch.tensor([3.0, 4.0], requires_grad=True))
y = torch.randn(2, requires_grad=True)
z = x * y
self.assertIsInstance(z, NonWrapperSublass)
z.sum().backward(torch.tensor(1))
self.assertEqual(x.grad, y)
self.assertEqual(y.grad, x)
if __name__ == '__main__':
run_tests()
| 37.38403
| 115
| 0.596928
|
9b7eacbed08c76a22d14a2b1abaf23e1bc3fc951
| 2,208
|
py
|
Python
|
tests/integration/internal/test_cache.py
|
Sage-Bionetworks/spccore
|
c63a88ef472b83be11594b820a072f6d79080a73
|
[
"Apache-2.0"
] | 1
|
2019-06-13T20:47:59.000Z
|
2019-06-13T20:47:59.000Z
|
tests/integration/internal/test_cache.py
|
Sage-Bionetworks/spccore
|
c63a88ef472b83be11594b820a072f6d79080a73
|
[
"Apache-2.0"
] | 12
|
2019-06-13T23:32:59.000Z
|
2019-08-27T01:24:57.000Z
|
tests/integration/internal/test_cache.py
|
Sage-Bionetworks/spccore
|
c63a88ef472b83be11594b820a072f6d79080a73
|
[
"Apache-2.0"
] | 3
|
2019-06-13T20:50:01.000Z
|
2019-08-29T19:34:31.000Z
|
from spccore.internal.cache import *
def test_setup(cache, file_handle_id, file_path):
exist_file_paths = cache.get_all_unmodified_cached_file_paths(file_handle_id)
file_path = normalize_path(file_path)
assert file_path not in exist_file_paths
def test_download_default_location_empty_cache(cache, file_handle_id, file_path):
download_dir = cache.get_cache_dir(file_handle_id)
# assume that we downloaded the file
os.makedirs(download_dir)
download_path = normalize_path(shutil.copy(file_path, download_dir))
cache.register(file_handle_id, download_path)
exist_file_paths = cache.get_all_unmodified_cached_file_paths(file_handle_id)
assert download_path in exist_file_paths
def test_download_default_location_not_empty_cache(cache, file_handle_id, file_path):
cache.register(file_handle_id, file_path)
exist_file_paths = cache.get_all_unmodified_cached_file_paths(file_handle_id)
download_dir = cache.get_cache_dir(file_handle_id)
download_path = normalize_path(shutil.copy(file_path, download_dir))
assert download_path not in exist_file_paths
cache.register(file_handle_id, download_path)
exist_file_paths = cache.get_all_unmodified_cached_file_paths(file_handle_id)
assert download_path in exist_file_paths
def test_after_modified(cache, file_handle_id, file_path):
cache.register(file_handle_id, file_path)
exist_file_paths = cache.get_all_unmodified_cached_file_paths(file_handle_id)
assert normalize_path(file_path) in exist_file_paths
time.sleep(1)
with open(file_path, 'w') as f:
f.write("some other text")
time.sleep(1)
exist_file_paths = cache.get_all_unmodified_cached_file_paths(file_handle_id)
assert len(exist_file_paths) == 0
def test_purge(cache, file_handle_id, file_path):
cache.register(file_handle_id, file_path)
exist_file_paths = cache.get_all_unmodified_cached_file_paths(file_handle_id)
assert normalize_path(file_path) in exist_file_paths
cache.purge(from_epoch_time_to_datetime(time.time()))
exist_file_paths = cache.get_all_unmodified_cached_file_paths(file_handle_id)
assert normalize_path(file_path) not in exist_file_paths
| 37.423729
| 85
| 0.806159
|
78d8609ad1eb50a393467ff76ac367831a839562
| 3,677
|
py
|
Python
|
selfdrive/car/hyundai/hyundaican.py
|
DYI-Hi/First
|
a79e53a98f424b54f078c617178493d643af71e0
|
[
"MIT"
] | null | null | null |
selfdrive/car/hyundai/hyundaican.py
|
DYI-Hi/First
|
a79e53a98f424b54f078c617178493d643af71e0
|
[
"MIT"
] | null | null | null |
selfdrive/car/hyundai/hyundaican.py
|
DYI-Hi/First
|
a79e53a98f424b54f078c617178493d643af71e0
|
[
"MIT"
] | null | null | null |
import crcmod
import copy
from common.params import Params
from selfdrive.car.hyundai.values import CAR, CHECKSUM
hyundai_checksum = crcmod.mkCrcFun(0x11D, initCrc=0xFD, rev=False, xorOut=0xdf)
def create_lkas11(packer, frame, car_fingerprint, apply_steer, steer_req,
lkas11, sys_warning, sys_state, CC, enabled, bus):
values = copy.deepcopy( lkas11 )
values["CF_Lkas_LdwsSysState"] = 3 if enabled else 1
#values["CF_Lkas_LdwsSysState"] = sys_state
values["CF_Lkas_SysWarning"] = 3 if sys_warning else 0
#values["CF_Lkas_LdwsLHWarning"] = left_lane_depart
#values["CF_Lkas_LdwsRHWarning"] = right_lane_depart
values["CR_Lkas_StrToqReq"] = apply_steer
values["CF_Lkas_ActToi"] = steer_req
values["CF_Lkas_ToiFlt"] = 0
values["CF_Lkas_MsgCount"] = frame % 0x10
values["CF_Lkas_Chksum"] = 0
if car_fingerprint == CAR.GENESIS: # 제네시스DH 인게이지 목표 작업중
values["CF_Lkas_Bca_R"] = 2 # 제네시스DH 계열등에서
values["CF_Lkas_SysWarning"] = lkas11["CF_Lkas_SysWarning"]
if car_fingerprint in [CAR.PALISADE, CAR.SANTAFE, CAR.KONA_EV]:
values["CF_Lkas_Bca_R"] = int(CC.hudControl.leftLaneVisible) + (int(CC.hudControl.rightLaneVisible) << 1)
values["CF_Lkas_LdwsOpt_USM"] = 2
# FcwOpt_USM 5 = Orange blinking car + lanes
# FcwOpt_USM 4 = Orange car + lanes
# FcwOpt_USM 3 = Green blinking car + lanes
# FcwOpt_USM 2 = Green car + lanes
# FcwOpt_USM 1 = White car + lanes
# FcwOpt_USM 0 = No car + lanes
values["CF_Lkas_FcwOpt_USM"] = 2 if enabled else 1
# SysWarning 4 = keep hands on wheel
# SysWarning 5 = keep hands on wheel (red)
# SysWarning 6 = keep hands on wheel (red) + beep
# Note: the warning is hidden while the blinkers are on
values["CF_Lkas_SysWarning"] = 4 if sys_warning else 0
params = Params()
ldws_car_fix = params.get("LdwsCarFix", encoding='utf8') == "1"
if ldws_car_fix:
values["CF_Lkas_LdwsOpt_USM"] = 3
dat = packer.make_can_msg("LKAS11", 0, values)[2]
if car_fingerprint in CHECKSUM["crc8"]:
# CRC Checksum as seen on 2019 Hyundai Santa Fe
dat = dat[:6] + dat[7:8]
checksum = hyundai_checksum(dat)
elif car_fingerprint in CHECKSUM["6B"]:
# Checksum of first 6 Bytes, as seen on 2018 Kia Sorento
checksum = sum(dat[:6]) % 256
else:
# Checksum of first 6 Bytes and last Byte as seen on 2018 Kia Stinger
checksum = (sum(dat[:6]) + dat[7]) % 256
values["CF_Lkas_Chksum"] = checksum
return packer.make_can_msg("LKAS11", bus, values)
def create_clu11(packer, frame, bus, clu11, button, speed = None):
values = copy.deepcopy( clu11 )
if speed != None:
values["CF_Clu_Vanz"] = speed
values["CF_Clu_CruiseSwState"] = button
values["CF_Clu_AliveCnt1"] = frame % 0x10
return packer.make_can_msg("CLU11", bus, values)
def create_mdps12(packer, frame, mdps12):
values = copy.deepcopy(mdps12) # values = mdps12
values["CF_Mdps_ToiActive"] = 0
values["CF_Mdps_ToiUnavail"] = 1
values["CF_Mdps_MsgCount2"] = frame % 0x100
values["CF_Mdps_Chksum2"] = 0
dat = packer.make_can_msg("MDPS12", 2, values)[2]
checksum = sum(dat) % 256
values["CF_Mdps_Chksum2"] = checksum
return packer.make_can_msg("MDPS12", 2, values)
def create_lfa_mfa(packer, frame, enabled):
values = {
"ACTIVE": enabled,
}
# ACTIVE 1 = Green steering wheel icon
# LFA_USM 2 & 3 = LFA cancelled, fast loud beeping
# LFA_USM 0 & 1 = No mesage
# LFA_SysWarning 1 = "Switching to HDA", short beep
# LFA_SysWarning 2 = "Switching to Smart Cruise control", short beep
# LFA_SysWarning 3 = LFA error
# ACTIVE2: nothing
# HDA_USM: nothing
return packer.make_can_msg("LFAHDA_MFC", 0, values)
| 34.364486
| 109
| 0.699483
|
6cfdaa48475cdbc9517a8bf0051c53aad5500508
| 10,118
|
py
|
Python
|
EHR_Only/GBT/Comp_D_SMOTE_GBT_EHR.py
|
shreyaskar123/EHR-Discontinuity
|
8d2becfd784b9cbe697f8308d60023701971ef5d
|
[
"MIT"
] | null | null | null |
EHR_Only/GBT/Comp_D_SMOTE_GBT_EHR.py
|
shreyaskar123/EHR-Discontinuity
|
8d2becfd784b9cbe697f8308d60023701971ef5d
|
[
"MIT"
] | null | null | null |
EHR_Only/GBT/Comp_D_SMOTE_GBT_EHR.py
|
shreyaskar123/EHR-Discontinuity
|
8d2becfd784b9cbe697f8308d60023701971ef5d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
medicare = pd.read_csv("/netapp2/home/se197/data/CMS/Data/medicare.csv")
train_set = medicare[medicare.Hospital != 'BWH'] # MGH; n = 204014
validation_set = medicare[medicare.Hospital == 'BWH'] # BWH and Neither; n = 115726
import numpy as np
fifty_perc_EHR_cont = np.percentile(medicare['Cal_MPEC_R0'],50)
train_set_high = train_set[train_set.Cal_MPEC_R0 >= fifty_perc_EHR_cont]
train_set_low= train_set[train_set.Cal_MPEC_R0 < fifty_perc_EHR_cont]
validation_set_high = validation_set[validation_set.Cal_MPEC_R0 >= fifty_perc_EHR_cont]
validation_set_low = validation_set[validation_set.Cal_MPEC_R0 < fifty_perc_EHR_cont]
predictor_variable = [
'Co_CAD_R0', 'Co_Embolism_R0', 'Co_DVT_R0', 'Co_PE_R0', 'Co_AFib_R0',
'Co_Hypertension_R0', 'Co_Hyperlipidemia_R0', 'Co_Atherosclerosis_R0',
'Co_HF_R0', 'Co_HemoStroke_R0', 'Co_IscheStroke_R0', 'Co_OthStroke_R0',
'Co_TIA_R0', 'Co_COPD_R0', 'Co_Asthma_R0', 'Co_Pneumonia_R0', 'Co_Alcoholabuse_R0',
'Co_Drugabuse_R0', 'Co_Epilepsy_R0', 'Co_Cancer_R0', 'Co_MorbidObesity_R0',
'Co_Dementia_R0', 'Co_Depression_R0', 'Co_Bipolar_R0', 'Co_Psychosis_R0',
'Co_Personalitydisorder_R0', 'Co_Adjustmentdisorder_R0', 'Co_Anxiety_R0',
'Co_Generalizedanxiety_R0', 'Co_OldMI_R0', 'Co_AcuteMI_R0', 'Co_PUD_R0',
'Co_UpperGIbleed_R0', 'Co_LowerGIbleed_R0', 'Co_Urogenitalbleed_R0',
'Co_Othbleed_R0', 'Co_PVD_R0', 'Co_LiverDisease_R0', 'Co_MRI_R0',
'Co_ESRD_R0', 'Co_Obesity_R0', 'Co_Sepsis_R0', 'Co_Osteoarthritis_R0',
'Co_RA_R0', 'Co_NeuroPain_R0', 'Co_NeckPain_R0', 'Co_OthArthritis_R0',
'Co_Osteoporosis_R0', 'Co_Fibromyalgia_R0', 'Co_Migraine_R0', 'Co_Headache_R0',
'Co_OthPain_R0', 'Co_GeneralizedPain_R0', 'Co_PainDisorder_R0',
'Co_Falls_R0', 'Co_CoagulationDisorder_R0', 'Co_WhiteBloodCell_R0', 'Co_Parkinson_R0',
'Co_Anemia_R0', 'Co_UrinaryIncontinence_R0', 'Co_DecubitusUlcer_R0',
'Co_Oxygen_R0', 'Co_Mammography_R0', 'Co_PapTest_R0', 'Co_PSATest_R0',
'Co_Colonoscopy_R0', 'Co_FecalOccultTest_R0', 'Co_FluShot_R0', 'Co_PneumococcalVaccine_R0', 'Co_RenalDysfunction_R0', 'Co_Valvular_R0', 'Co_Hosp_Prior30Days_R0',
'Co_RX_Antibiotic_R0', 'Co_RX_Corticosteroid_R0', 'Co_RX_Aspirin_R0', 'Co_RX_Dipyridamole_R0',
'Co_RX_Clopidogrel_R0', 'Co_RX_Prasugrel_R0', 'Co_RX_Cilostazol_R0', 'Co_RX_Ticlopidine_R0',
'Co_RX_Ticagrelor_R0', 'Co_RX_OthAntiplatelet_R0', 'Co_RX_NSAIDs_R0',
'Co_RX_Opioid_R0', 'Co_RX_Antidepressant_R0', 'Co_RX_AAntipsychotic_R0', 'Co_RX_TAntipsychotic_R0',
'Co_RX_Anticonvulsant_R0', 'Co_RX_PPI_R0', 'Co_RX_H2Receptor_R0', 'Co_RX_OthGastro_R0',
'Co_RX_ACE_R0', 'Co_RX_ARB_R0', 'Co_RX_BBlocker_R0', 'Co_RX_CCB_R0', 'Co_RX_Thiazide_R0',
'Co_RX_Loop_R0', 'Co_RX_Potassium_R0', 'Co_RX_Nitrates_R0', 'Co_RX_Aliskiren_R0',
'Co_RX_OthAntihypertensive_R0', 'Co_RX_Antiarrhythmic_R0', 'Co_RX_OthAnticoagulant_R0',
'Co_RX_Insulin_R0', 'Co_RX_Noninsulin_R0', 'Co_RX_Digoxin_R0', 'Co_RX_Statin_R0',
'Co_RX_Lipid_R0', 'Co_RX_Lithium_R0', 'Co_RX_Benzo_R0', 'Co_RX_ZDrugs_R0',
'Co_RX_OthAnxiolytic_R0', 'Co_RX_Barbiturate_R0', 'Co_RX_Dementia_R0', 'Co_RX_Hormone_R0',
'Co_RX_Osteoporosis_R0', 'Co_N_Drugs_R0', 'Co_N_Hosp_R0', 'Co_Total_HospLOS_R0',
'Co_N_MDVisit_R0', 'Co_RX_AnyAspirin_R0', 'Co_RX_AspirinMono_R0', 'Co_RX_ClopidogrelMono_R0',
'Co_RX_AspirinClopidogrel_R0', 'Co_RX_DM_R0', 'Co_RX_Antipsychotic_R0'
]
co_train_gpop = train_set[predictor_variable]
co_train_high = train_set_high[predictor_variable]
co_train_low = train_set_low[predictor_variable]
co_validation_gpop = validation_set[predictor_variable]
co_validation_high = validation_set_high[predictor_variable]
co_validation_low = validation_set_low[predictor_variable]
# In[4]:
# THIS IS WITH DEATH
"""
out_train_cardio_gpop = train_set['Out_comp_cardiovascular_nd_RC1']
out_train_cardio_high = train_set_high['Out_comp_cardiovascular_nd_RC1']
out_train_cardio_low = train_set_low['Out_comp_cardiovascular_nd_RC1']
out_validation_cardio_gpop = validation_set['Out_comp_cardiovascular_nd_RC1']
out_validation_cardio_high = validation_set_high['Out_comp_cardiovascular_nd_RC1']
out_validation_cardio_low = validation_set_low['Out_comp_cardiovascular_nd_RC1']
"""
out_train_cardio_gpop = train_set['Out_comp_cardiovascular_R1']
out_train_cardio_high = train_set_high['Out_comp_cardiovascular_R1']
out_train_cardio_low = train_set_low['Out_comp_cardiovascular_R1']
out_validation_cardio_gpop = validation_set['Out_comp_cardiovascular_R1']
out_validation_cardio_high = validation_set_high['Out_comp_cardiovascular_R1']
out_validation_cardio_low = validation_set_low['Out_comp_cardiovascular_R1']
# In[5]:
'''
NOT USING THIS
INSTEAD USING XGBOOST: A FASTER IMPLEMENTATION OF Gradient Boost
https://github.com/dmlc/xgboost/tree/master/python-package
def GBT(X,y):
from sklearn.model_selection import GridSearchCV
from sklearn.ensemble import GradientBoostingRegressor
from imblearn.over_sampling import SMOTE
param_grid = [{
'learning_rate': [0.05,0.1,0.2],
'n_estimators': [100,150,200]
}]
boost_clf = GradientBoostingRegressor()
boosting_grid_search = GridSearchCV(estimator = boost_clf, param_grid = param_grid)
best_clf = boosting_grid_search.fit(X, y)
return best_clf
'''
# In[6]:
def xgBoost(X,y):
from xgboost import XGBClassifier
from sklearn.model_selection import GridSearchCV
model = XGBClassifier()
param_grid = [{
'max_depth': [2,3],
'n_estimators': [60,160],
}]
grid_search = GridSearchCV(
estimator=model,
param_grid=param_grid,
n_jobs = 10,
cv = 5,
verbose=True
)
best_clf = grid_search.fit(X,y)
return best_clf
# In[7]:
def scores(X,y):
from sklearn.metrics import accuracy_score
from sklearn.metrics import f1_score
from sklearn.metrics import fbeta_score
from sklearn.metrics import roc_auc_score
from sklearn.metrics import log_loss
pred = best_clf.predict(X)
actual = y
print(accuracy_score(actual,pred), file = open('comp_smote_gbt_ehr.out', 'a'))
print(f1_score(actual,pred), file = open('comp_smote_gbt_ehr.out', 'a'))
print(fbeta_score(actual,pred, average = 'macro', beta = 2), file = open('comp_smote_gbt_ehr.out', 'a'))
print(roc_auc_score(actual, best_clf.predict_proba(X)[:,1]), file = open('comp_smote_gbt_ehr.out', 'a'))
print(log_loss(actual,best_clf.predict_proba(X)[:,1]), file = open('comp_smote_gbt_ehr.out', 'a'))
# In[8]:
def cross_val(X,y):
from sklearn.model_selection import KFold
from sklearn.model_selection import cross_validate
from sklearn.metrics import log_loss
from sklearn.metrics import roc_auc_score
from sklearn.metrics import fbeta_score
import sklearn
import numpy as np
cv = KFold(n_splits=5, random_state=1, shuffle=True)
log_loss = []
auc = []
accuracy = []
f1 = []
f2 = []
for train_index, test_index in cv.split(X):
X_train, X_test, y_train, y_test = X.iloc[train_index], X.iloc[test_index], y.iloc[train_index], y.iloc[test_index]
model = xgBoost(X_train, y_train)
prob = model.predict_proba(X_test)[:,1] # prob is a vector of probabilities
pred = np.round(model.predict_proba(X_test)[:,1]) # pred is the rounded predictions
log_loss.append(sklearn.metrics.log_loss(y_test, prob))
auc.append(sklearn.metrics.roc_auc_score(y_test, prob))
accuracy.append(sklearn.metrics.accuracy_score(y_test, pred))
f1.append(sklearn.metrics.f1_score(y_test, pred, average = 'macro'))
f2.append(fbeta_score(y_test,pred, average = 'macro', beta = 2))
print(np.mean(accuracy), file = open('comp_smote_gbt_ehr.out', 'a'))
print(np.mean(f1), file = open('comp_smote_gbt_ehr.out', 'a'))
print(np.mean(f2), file = open('comp_smote_gbt_ehr.out', 'a'))
print(np.mean(auc), file = open('comp_smote_gbt_ehr.out', 'a'))
print(np.mean(log_loss), file = open('comp_smote_gbt_ehr.out', 'a'))
# # General Population
from imblearn.over_sampling import SMOTE
sm = SMOTE(random_state = 42)
co_train_gpop_sm,out_train_cardio_gpop_sm = sm.fit_resample(co_train_gpop,out_train_cardio_gpop)
best_clf = xgBoost(co_train_gpop_sm, out_train_cardio_gpop_sm)
print("Train gpop", file = open('comp_smote_gbt_ehr.out', 'a'))
cross_val(co_train_gpop_sm, out_train_cardio_gpop_sm)
print()
print("Original Train gpop", file = open('comp_smote_gbt_ehr.out', 'a'))
scores(co_train_gpop, out_train_cardio_gpop)
print()
print("Test gpop", file = open('comp_smote_gbt_ehr.out', 'a'))
scores(co_validation_gpop, out_validation_cardio_gpop)
# # High Continuity
from imblearn.over_sampling import SMOTE
sm = SMOTE(random_state = 42)
co_train_high_sm,out_train_cardio_high_sm = sm.fit_resample(co_train_high,out_train_cardio_high)
best_clf = xgBoost(co_train_high_sm, out_train_cardio_high_sm)
print("Train high", file = open('comp_smote_gbt_ehr.out', 'a'))
cross_val(co_train_high_sm, out_train_cardio_high_sm)
print()
print("Original Train high", file = open('comp_smote_gbt_ehr.out', 'a'))
scores(co_train_high, out_train_cardio_high)
print()
print("Test high", file = open('comp_smote_gbt_ehr.out', 'a'))
scores(co_validation_high, out_validation_cardio_high)
# # Low Continuity
from imblearn.over_sampling import SMOTE
sm = SMOTE(random_state = 42)
co_train_low_sm,out_train_cardio_low_sm = sm.fit_resample(co_train_low,out_train_cardio_low)
best_clf = xgBoost(co_train_low_sm, out_train_cardio_low_sm)
print("Train low", file = open('comp_smote_gbt_ehr.out', 'a'))
cross_val(co_train_low_sm, out_train_cardio_low_sm)
print()
print("Original Train low", file = open('comp_smote_gbt_ehr.out', 'a'))
scores(co_train_low, out_train_cardio_low)
print()
print("Test low", file = open('comp_smote_gbt_ehr.out', 'a'))
scores(co_validation_low, out_validation_cardio_low)
| 36.792727
| 169
| 0.746788
|
7049111a66ab9ff1170ef5dfd1392d143a82ddb8
| 4,035
|
py
|
Python
|
glearn/networks/layers/conv2d.py
|
glennpow/glearn
|
e50046cb76173668fec12c20b446be7457482528
|
[
"MIT"
] | null | null | null |
glearn/networks/layers/conv2d.py
|
glennpow/glearn
|
e50046cb76173668fec12c20b446be7457482528
|
[
"MIT"
] | null | null | null |
glearn/networks/layers/conv2d.py
|
glennpow/glearn
|
e50046cb76173668fec12c20b446be7457482528
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from .layer import NetworkLayer
class Conv2dLayer(NetworkLayer):
def __init__(self, network, index, filters, input_shape=None, strides=1,
padding="SAME", activation=tf.nn.relu, lrn=None,
pooling="max", pool_k=2, pool_strides=2,
batch_norm=None, weights_initializer=None, biases_initializer=None):
super().__init__(network, index, batch_norm=batch_norm)
self.filters = filters
self.input_shape = input_shape
self.strides = strides
self.padding = padding
self.activation = activation
self.lrn = lrn
self.pooling = pooling
self.pool_k = pool_k
self.pool_strides = pool_strides
self.weights_initializer = weights_initializer
self.biases_initializer = biases_initializer
def build(self, inputs):
# initializers
weights_initializer = self.load_initializer(self.weights_initializer,
tf.contrib.layers.xavier_initializer())
biases_initializer = self.load_initializer(self.biases_initializer,
tf.constant_initializer(0.0))
# prepare input
if self.input_shape is not None:
x = tf.reshape(inputs, [-1] + self.input_shape)
else:
x = inputs
x = tf.cast(x, tf.float32)
input_channels = x.shape[-1]
# create convolution layers
features = []
for i, filter in enumerate(self.filters):
scope = f"conv2d_{self.index}_{i}"
with tf.variable_scope(scope):
# create variables
height, width, output_channels = filter
W = self.get_variable("W", (height, width, input_channels, output_channels),
initializer=weights_initializer,
trainable=self.trainable, cpu=True)
b = self.get_variable("b", (output_channels),
initializer=biases_initializer,
trainable=self.trainable, cpu=True)
# batch normalization variables
self.prepare_batch_norm(output_channels)
# conv2d and biases
Z = tf.nn.conv2d(x, W, [1, self.strides, self.strides, 1], self.padding)
Z = tf.nn.bias_add(Z, b)
self.references["Z"] = Z
# apply batch norm
Z = self.apply_batch_norm(Z, [0, 1, 2])
# activation
if self.activation is not None:
activation_func = self.load_callable(self.activation)
A = activation_func(Z)
else:
A = Z
self.references["activation"] = A
# local response normalization (before max pooling)
lrn_order = None
if self.lrn is not None:
lrn_order, lrn_bias, lrn_alpha, lrn_beta = self.lrn
if lrn_order:
A = tf.nn.lrn(A, bias=lrn_bias, alpha=lrn_alpha, beta=lrn_beta)
# pooling
if self.pooling is not None:
self.references["unpooled"] = A
ksize = [1, self.pool_k, self.pool_k, 1]
strides = [1, self.pool_strides, self.pool_strides, 1]
pooling_op = tf.nn.max_pool if self.pooling == "max" else tf.nn.avg_pool
A = pooling_op(Z, ksize=ksize, strides=strides, padding=self.padding)
# local response normalization (after max pooling)
if lrn_order is False:
A = tf.nn.lrn(A, bias=lrn_bias, alpha=lrn_alpha, beta=lrn_beta)
features.append(A)
x = A
input_channels = output_channels
self.references["features"] = features
return x
| 41.597938
| 92
| 0.537794
|
1de56fda9da25d322af2c842d4f086e1ba02789f
| 6,279
|
py
|
Python
|
rcsdk/http/response_test.py
|
vyshakhbabji/ringcentral-python-sdk-with-slate-sample-code
|
489f35c779b7a2f617c767c489deefcfb8f446f7
|
[
"MIT"
] | null | null | null |
rcsdk/http/response_test.py
|
vyshakhbabji/ringcentral-python-sdk-with-slate-sample-code
|
489f35c779b7a2f617c767c489deefcfb8f446f7
|
[
"MIT"
] | null | null | null |
rcsdk/http/response_test.py
|
vyshakhbabji/ringcentral-python-sdk-with-slate-sample-code
|
489f35c779b7a2f617c767c489deefcfb8f446f7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
import unittest
from ..test import TestCase, Spy
from .response import Response
class TestSubscription(TestCase):
def test_multipart(self):
goodMultipartMixedResponse = "Content-Type: multipart/mixed; boundary=Boundary_1245_945802293_1394135045248\n" + \
"\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\r\n" + \
"\r\n" + \
"{\n" + \
" \"response\" : [ {\n" + \
" \"status\" : 200\n" + \
" }, {\n" + \
" \"status\" : 200\n" + \
" } ]\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"{\n" + \
" \"foo\" : \"bar\"\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"{\n" + \
" \"baz\" : \"qux\"\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248--\n"
multipartMixedResponseWithErrorPart = "Content-Type: multipart/mixed; boundary=Boundary_1245_945802293_1394135045248\n" + \
"\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"{\n" + \
" \"response\" : [ {\n" + \
" \"status\" : 200\n" + \
" }, {\n" + \
" \"status\" : 404\n" + \
" }, {\n" + \
" \"status\" : 200\n" + \
" } ]\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"{\n" + \
" \"foo\" : \"bar\"\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"{\n" + \
" \"message\" : \"object not found\"\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"{\n" + \
" \"baz\" : \"qux\"\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248--\n"
badMultipartMixedResponse = "Content-Type: \n" + \
"\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"THIS IS JUNK AND CANNOT BE PARSED AS JSON\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"{\n" + \
" \"foo\" : \"bar\"\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248\n" + \
"Content-Type: application/json\n" + \
"\n" + \
"{\n" + \
" \"baz\" : \"qux\"\n" + \
"}\n" + \
"--Boundary_1245_945802293_1394135045248--\n"
headers = {'content-type': 'multipart/mixed; boundary=Boundary_1245_945802293_1394135045248'}
r1 = Response(207, goodMultipartMixedResponse, headers)
self.assertEqual(2, len(r1.get_responses()))
self.assertEqual('bar', r1.get_responses()[0].get_json().foo)
self.assertEqual('qux', r1.get_responses()[1].get_json().baz)
r2 = Response(207, multipartMixedResponseWithErrorPart, headers)
self.assertEqual('bar', r2.get_responses()[0].get_json().foo)
self.assertEqual('object not found', r2.get_responses()[1].get_error())
self.assertEqual('qux', r2.get_responses()[2].get_json().baz)
r3 = Response(207, badMultipartMixedResponse, headers)
caught = False
try:
r3.get_responses()
except Exception:
caught = True
self.assertTrue(caught)
if __name__ == '__main__':
unittest.main()
| 54.6
| 131
| 0.309763
|
1cd997f4cd1f7a188881677333e404e1f5b3dff5
| 7,486
|
py
|
Python
|
syspy/spatial/geometries.py
|
opensystra/systra
|
442a884ba17dc0a04d0421d0072f7239ef28ab0d
|
[
"CECILL-B"
] | 3
|
2017-10-02T10:03:30.000Z
|
2019-02-25T15:35:52.000Z
|
syspy/spatial/geometries.py
|
opensystra/systra
|
442a884ba17dc0a04d0421d0072f7239ef28ab0d
|
[
"CECILL-B"
] | null | null | null |
syspy/spatial/geometries.py
|
opensystra/systra
|
442a884ba17dc0a04d0421d0072f7239ef28ab0d
|
[
"CECILL-B"
] | null | null | null |
"""
This module provides tools for geometry processing.
"""
__author__ = 'qchasserieau'
from tqdm import tqdm
import shapely
import json
from math import pi
import numpy as np
import pandas as pd
def reversed_polyline(polyline):
coords = list(polyline.coords)
return shapely.geometry.LineString(reversed(coords))
def linestring_geometry(row):
return shapely.geometry.LineString(
[
[row['x_origin'], row['y_origin']],
[row['x_destination'], row['y_destination']]
]
)
def point_geometry(row):
return shapely.geometry.Point(row['stop_lon'], row['stop_lat'])
def linestring_from_indexed_point_geometries(indexed, points):
try:
geometries = indexed.loc[points]
coordinates = []
for geometry in list(geometries):
coordinates += list(geometry.coords)
return shapely.geometry.linestring.LineString(coordinates)
except ValueError:
return None
def line_list_to_polyline(geometries):
coord_sequence = []
last = False
for geometry in geometries:
coords = list(geometry.coords)
coord_sequence += coords[1:] if last == coords[0] else coords
last = coords[-1]
try:
return shapely.geometry.linestring.LineString(coord_sequence)
except ValueError:
return None
def polyline_to_line_list(geometry, tolerance=0):
sequence = geometry.simplify(tolerance).coords if tolerance else geometry.coords
couples = [(sequence[i], sequence[i+1]) for i in range(len(sequence) - 1)]
return [shapely.geometry.linestring.LineString(couple) for couple in couples]
def string_to_geometry(string_series):
iterator = tqdm(list(string_series), 'string_to_geometry')
return [shapely.geometry.shape(json.loads(x)) for x in iterator]
def geometry_to_string(geometry_series):
iterator = tqdm(list(geometry_series), 'geometry_to_string')
return [json.dumps(shapely.geometry.mapping(x)) for x in iterator]
def coexist(
line_a,
line_b,
rate=0.25,
buffer=1e-4,
check_collinearity=True
):
buffer_a = line_a.buffer(buffer)
buffer_b = line_b.buffer(buffer)
min_area = min(buffer_a.area, buffer_b.area)
inter = buffer_a.intersection(buffer_b)
intersect = (inter.area / min_area) > rate
clause = True
if check_collinearity:
clause = collinear(line_a, line_b)
return intersect * clause
def angle(geometry):
xa = geometry.coords[0][0]
ya = geometry.coords[0][1]
xb = geometry.coords[-1][0]
yb = geometry.coords[-1][1]
if xb != xa:
tan = (yb - ya) / (xb - xa)
a = np.arctan(tan)
else:
a = 0
return (a + 2*pi) % (2*pi)
def delta_angle(g_a, g_b):
delta = angle(g_a) - angle(g_b)
return (delta + 2*pi) % (2*pi)
def collinear(g_a, g_b, tol=pi/4):
return np.absolute(delta_angle(g_a, g_b) - pi) >= (pi - tol)
def dissociate_collinear_lines(lines, coexist_kwargs={}):
conflicts = [
[
coexist(line_a, line_b, **coexist_kwargs)
for line_a in lines
]
for line_b in tqdm(lines)
]
df = pd.DataFrame(conflicts)
uniques = {i: None for i in range(len(conflicts))}
sorted_lines = list(df.sum().sort_values(ascending=True).index)
possibilities = {i for i in range(len(lines))}
for line in sorted_lines:
taken = {
uniques[other_line]
for other_line in sorted_lines
if conflicts[line][other_line] and
other_line != line
}
uniques[line] = min(possibilities - taken)
return uniques
def line_rows(row, tolerance):
"""
Splits the geometry of a row and returns the list of chunks as a series
The initial geometry is a polyline. It is simplified then cut at its checkpoints.
"""
line_list = polyline_to_line_list(row['geometry'], tolerance)
df = pd.DataFrame([row]*(len(line_list))).reset_index(drop=True)
df['geometry'] = pd.Series(line_list)
return df
def simplify(dataframe, tolerance=False):
"""
from a dataframe of polylines,
returns a longer dataframe of straight lines
"""
to_concat = []
for name, row in dataframe.iterrows():
to_concat.append(line_rows(row, tolerance))
return pd.concat(to_concat)
def cut_ab_at_c(geometry, intersection):
"""
Geometry is a line. intersection is a point.
returns two lines : origin->intersection and intersection->destination
"""
coords = list(geometry.coords)
a = coords[0]
b = coords[-1]
c = list(intersection.coords)[0]
if c in {a, b}:
return [geometry]
else:
return shapely.geometry.LineString([a, c]), shapely.geometry.LineString([c, b])
def add_centroid_to_polyline(polyline, polygon):
"""
polyline is actualy two points geometry. Returns a three points geometry
if the line intersects the polygon. The centroid of the polygon is added
to the line (in the midle)
"""
lines = polyline_to_line_list(polyline)
to_concatenate = []
centroid = polygon.centroid
for line in lines:
to_concatenate += cut_ab_at_c(line, centroid) if polygon.intersects(line) else [line]
chained = line_list_to_polyline(to_concatenate)
return chained
def add_centroids_to_polyline(geometry, intersections, buffer=1e-9):
"""
Recursive:
geometry is a line. Every point in itersections is added to it recursively.
In the end, a polyline is returned. All the points that were in intersections can
be found in the coordinates of the polyline.
"""
if not len(intersections):
return [geometry]
coords = list(geometry.coords)
remaining_intersections = intersections - set(geometry.coords)
coord_intersections = set(intersections).intersection(coords)
sequence_dict = {coords[i]: i for i in range(len(coords))}
cuts = sorted([0] + [sequence_dict[coord] for coord in coord_intersections] + [len(coords)-1])
coord_lists = [coords[cuts[i]: cuts[i+1] + 1] for i in range(len(cuts)-1)]
polylines = [shapely.geometry.LineString(coord_list) for coord_list in coord_lists if len(coord_list) > 1]
if len(remaining_intersections) == 0:
return polylines
else:
polygons = [shapely.geometry.point.Point(i).buffer(buffer) for i in remaining_intersections]
centroids = [polygon.centroid for polygon in polygons]
centroid_coords = {list(centroid.coords)[0] for centroid in centroids if len(centroid.coords)}
while len(polygons):
polygon = polygons.pop()
polylines = [add_centroid_to_polyline(polyline, polygon) for polyline in polylines]
# recursive
return add_centroids_to_polyline(
line_list_to_polyline(polylines),
coord_intersections.union(centroid_coords),
buffer
)
def intersects_in_between(geometry_a, geometry_b):
"""
Returns True if :
geometry_a and geometry_b form a T intersection or a cross intersection
"""
# they dont even intersect, it is not an intersection
if not geometry_a.intersects(geometry_b):
return False
boundaries_a = [list(geometry_a.coords)[0], list(geometry_a.coords)[-1]]
boundaries_b = [list(geometry_b.coords)[0], list(geometry_b.coords)[-1]]
# the two geometries share an endpoint.
if set(boundaries_a).intersection(set(boundaries_b)):
return False
return True
| 29.472441
| 110
| 0.669249
|
a5175332fd9c8c7c2e0269b553f925e31594efa9
| 13,926
|
py
|
Python
|
sdk/python/pulumi_azure_native/apimanagement/v20180601preview/api_operation_policy.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/apimanagement/v20180601preview/api_operation_policy.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/apimanagement/v20180601preview/api_operation_policy.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = ['ApiOperationPolicyArgs', 'ApiOperationPolicy']
@pulumi.input_type
class ApiOperationPolicyArgs:
def __init__(__self__, *,
api_id: pulumi.Input[str],
operation_id: pulumi.Input[str],
policy_content: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
service_name: pulumi.Input[str],
content_format: Optional[pulumi.Input[Union[str, 'PolicyContentFormat']]] = None,
policy_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ApiOperationPolicy resource.
:param pulumi.Input[str] api_id: API revision identifier. Must be unique in the current API Management service instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:param pulumi.Input[str] operation_id: Operation identifier within an API. Must be unique in the current API Management service instance.
:param pulumi.Input[str] policy_content: Json escaped Xml Encoded contents of the Policy.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] service_name: The name of the API Management service.
:param pulumi.Input[Union[str, 'PolicyContentFormat']] content_format: Format of the policyContent.
:param pulumi.Input[str] policy_id: The identifier of the Policy.
"""
pulumi.set(__self__, "api_id", api_id)
pulumi.set(__self__, "operation_id", operation_id)
pulumi.set(__self__, "policy_content", policy_content)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "service_name", service_name)
if content_format is None:
content_format = 'xml'
if content_format is not None:
pulumi.set(__self__, "content_format", content_format)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
@property
@pulumi.getter(name="apiId")
def api_id(self) -> pulumi.Input[str]:
"""
API revision identifier. Must be unique in the current API Management service instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
"""
return pulumi.get(self, "api_id")
@api_id.setter
def api_id(self, value: pulumi.Input[str]):
pulumi.set(self, "api_id", value)
@property
@pulumi.getter(name="operationId")
def operation_id(self) -> pulumi.Input[str]:
"""
Operation identifier within an API. Must be unique in the current API Management service instance.
"""
return pulumi.get(self, "operation_id")
@operation_id.setter
def operation_id(self, value: pulumi.Input[str]):
pulumi.set(self, "operation_id", value)
@property
@pulumi.getter(name="policyContent")
def policy_content(self) -> pulumi.Input[str]:
"""
Json escaped Xml Encoded contents of the Policy.
"""
return pulumi.get(self, "policy_content")
@policy_content.setter
def policy_content(self, value: pulumi.Input[str]):
pulumi.set(self, "policy_content", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
The name of the API Management service.
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter(name="contentFormat")
def content_format(self) -> Optional[pulumi.Input[Union[str, 'PolicyContentFormat']]]:
"""
Format of the policyContent.
"""
return pulumi.get(self, "content_format")
@content_format.setter
def content_format(self, value: Optional[pulumi.Input[Union[str, 'PolicyContentFormat']]]):
pulumi.set(self, "content_format", value)
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The identifier of the Policy.
"""
return pulumi.get(self, "policy_id")
@policy_id.setter
def policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_id", value)
class ApiOperationPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_id: Optional[pulumi.Input[str]] = None,
content_format: Optional[pulumi.Input[Union[str, 'PolicyContentFormat']]] = None,
operation_id: Optional[pulumi.Input[str]] = None,
policy_content: Optional[pulumi.Input[str]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Policy Contract details.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_id: API revision identifier. Must be unique in the current API Management service instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:param pulumi.Input[Union[str, 'PolicyContentFormat']] content_format: Format of the policyContent.
:param pulumi.Input[str] operation_id: Operation identifier within an API. Must be unique in the current API Management service instance.
:param pulumi.Input[str] policy_content: Json escaped Xml Encoded contents of the Policy.
:param pulumi.Input[str] policy_id: The identifier of the Policy.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] service_name: The name of the API Management service.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ApiOperationPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Policy Contract details.
:param str resource_name: The name of the resource.
:param ApiOperationPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ApiOperationPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_id: Optional[pulumi.Input[str]] = None,
content_format: Optional[pulumi.Input[Union[str, 'PolicyContentFormat']]] = None,
operation_id: Optional[pulumi.Input[str]] = None,
policy_content: Optional[pulumi.Input[str]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ApiOperationPolicyArgs.__new__(ApiOperationPolicyArgs)
if api_id is None and not opts.urn:
raise TypeError("Missing required property 'api_id'")
__props__.__dict__["api_id"] = api_id
if content_format is None:
content_format = 'xml'
__props__.__dict__["content_format"] = content_format
if operation_id is None and not opts.urn:
raise TypeError("Missing required property 'operation_id'")
__props__.__dict__["operation_id"] = operation_id
if policy_content is None and not opts.urn:
raise TypeError("Missing required property 'policy_content'")
__props__.__dict__["policy_content"] = policy_content
__props__.__dict__["policy_id"] = policy_id
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__.__dict__["service_name"] = service_name
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement/v20170301:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement/v20180101:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement/v20190101:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement/v20191201:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement/v20191201preview:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement/v20200601preview:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20200601preview:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement/v20201201:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20201201:ApiOperationPolicy"), pulumi.Alias(type_="azure-native:apimanagement/v20210101preview:ApiOperationPolicy"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20210101preview:ApiOperationPolicy")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ApiOperationPolicy, __self__).__init__(
'azure-native:apimanagement/v20180601preview:ApiOperationPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ApiOperationPolicy':
"""
Get an existing ApiOperationPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ApiOperationPolicyArgs.__new__(ApiOperationPolicyArgs)
__props__.__dict__["content_format"] = None
__props__.__dict__["name"] = None
__props__.__dict__["policy_content"] = None
__props__.__dict__["type"] = None
return ApiOperationPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="contentFormat")
def content_format(self) -> pulumi.Output[Optional[str]]:
"""
Format of the policyContent.
"""
return pulumi.get(self, "content_format")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="policyContent")
def policy_content(self) -> pulumi.Output[str]:
"""
Json escaped Xml Encoded contents of the Policy.
"""
return pulumi.get(self, "policy_content")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type for API Management resource.
"""
return pulumi.get(self, "type")
| 49.208481
| 1,593
| 0.670185
|
6734dd59a05a8782b0d4e666c59e512550989cf0
| 104
|
py
|
Python
|
models/experiment_5_no_dropout.py
|
VictorAtPL/Pascal-VOC12_Class-segmentation_Tensorflow-2.0.0
|
ec79e4347874550601f5c9f75b42f099d10b5a95
|
[
"MIT"
] | 1
|
2020-11-07T07:12:15.000Z
|
2020-11-07T07:12:15.000Z
|
models/experiment_5_no_dropout.py
|
VictorAtPL/Pascal-VOC12_Class-segmentation_Tensorflow-2.0.0
|
ec79e4347874550601f5c9f75b42f099d10b5a95
|
[
"MIT"
] | 1
|
2020-09-25T19:51:13.000Z
|
2020-09-25T19:51:13.000Z
|
models/experiment_5_no_dropout.py
|
VictorAtPL/Pascal-VOC12_Class-segmentation_Tensorflow-2.0.0
|
ec79e4347874550601f5c9f75b42f099d10b5a95
|
[
"MIT"
] | 1
|
2020-11-07T08:57:46.000Z
|
2020-11-07T08:57:46.000Z
|
from models import experiment_5
class Model(experiment_5.Model):
n_filters = 32
dropout = .0
| 13
| 32
| 0.711538
|
a58fe5bad04cbad047d60dfa984c348f1f7c380d
| 6,174
|
py
|
Python
|
tests/integration/test_table.py
|
firebolt-db/firebolt-cli
|
6ca12722646d2a7a0f0abb1664d5eb48076354ed
|
[
"Apache-2.0"
] | 1
|
2022-03-09T18:57:43.000Z
|
2022-03-09T18:57:43.000Z
|
tests/integration/test_table.py
|
firebolt-db/firebolt-cli
|
6ca12722646d2a7a0f0abb1664d5eb48076354ed
|
[
"Apache-2.0"
] | 23
|
2022-02-15T09:49:38.000Z
|
2022-03-21T13:20:18.000Z
|
tests/integration/test_table.py
|
firebolt-db/firebolt-cli
|
6ca12722646d2a7a0f0abb1664d5eb48076354ed
|
[
"Apache-2.0"
] | 1
|
2022-03-18T08:37:35.000Z
|
2022-03-18T08:37:35.000Z
|
import pytest
import yaml
from click.testing import CliRunner
from firebolt_cli.main import main
def drop_table(table_name: str, cli_runner: CliRunner):
"""
Drop table by its name
"""
sql = f"DROP table {table_name}"
result = cli_runner.invoke(main, ["query", "--sql", sql])
assert result.exit_code == 0
def check_table_exists(
table_config: dict,
cli_runner: CliRunner,
with_metadata: bool,
):
"""
Check that the table exists, and has the same amount of columns
"""
sql = (
f"SELECT count(*) AS column_count "
f"FROM information_schema.columns "
f"WHERE table_name = '{table_config['table_name']}'"
)
result = cli_runner.invoke(main, ["query", "--sql", sql])
assert str(len(table_config["columns"]) + 2 * with_metadata) in result.stdout
assert result.exit_code == 0
def check_tables_equal_row_count(
cli_runner: CliRunner,
table_name_1: str,
table_name_2: str,
):
"""
Check that the provided tables have the same number of rows
"""
sql = f"""
SELECT (SELECT count(*) FROM {table_name_1}) ==
(SELECT count(*) FROM {table_name_2}) as result
"""
result = cli_runner.invoke(main, ["query", "--sql", sql, "--csv"])
assert result.exit_code == 0
assert "1" in result.stdout
@pytest.mark.parametrize("with_metadata", [True, False])
def test_create_internal_table(
configure_cli: None,
mock_table_config: dict,
cli_runner: CliRunner,
with_metadata: bool,
):
"""
create fact table and verify it exists
"""
with open("table_config.yaml", "w") as f:
f.write(yaml.dump(mock_table_config))
result = cli_runner.invoke(
main,
f"table create-fact "
f"--file table_config.yaml "
f"{'--add-file-metadata ' if with_metadata else ''}".split(),
)
assert result.exit_code == 0
check_table_exists(
mock_table_config,
cli_runner,
with_metadata=with_metadata,
)
drop_table(
mock_table_config["table_name"],
cli_runner,
)
def test_create_external_table(
configure_cli: None,
mock_table_config: dict,
cli_runner: CliRunner,
s3_url: str,
):
"""
create external table and verify it exists
"""
with open("table_config.yaml", "w") as f:
f.write(yaml.dump(mock_table_config))
result = cli_runner.invoke(
main,
f"table create-external "
f"--file table_config.yaml "
f"--s3-url {s3_url}".split(),
)
assert result.exit_code == 0
mock_table_config["table_name"] = f"ex_{mock_table_config['table_name']}"
check_table_exists(
mock_table_config,
cli_runner,
with_metadata=False,
)
drop_table(
mock_table_config["table_name"],
cli_runner,
)
@pytest.mark.parametrize("mode", ["append", "overwrite"])
def test_ingest_full_overwrite(
configure_cli: None,
mock_table_config: dict,
cli_runner: CliRunner,
s3_url: str,
mode: str,
):
"""
create external and fact tables, do ingestion with different modes
"""
with open("table_config.yaml", "w") as f:
f.write(yaml.dump(mock_table_config))
result = cli_runner.invoke(
main,
f"table create-external "
f"--file table_config.yaml "
f"--s3-url {s3_url}".split(),
)
assert result.exit_code == 0
result = cli_runner.invoke(
main,
f"table create-fact " f"--file table_config.yaml --add-file-metadata".split(),
)
assert result.exit_code == 0
fact_table_name = mock_table_config["table_name"]
external_table_name = f"ex_{fact_table_name}"
result = cli_runner.invoke(
main,
f"ingest "
f"--fact-table-name {fact_table_name} "
f"--external-table-name {external_table_name} "
f"--mode {mode}".split(),
)
assert result.exit_code == 0, result.stderr
check_tables_equal_row_count(cli_runner, fact_table_name, external_table_name)
for table_name in [fact_table_name, external_table_name]:
drop_table(
table_name,
cli_runner,
)
def test_ingest_append(
configure_cli: None,
mock_table_config: dict,
cli_runner: CliRunner,
s3_url: str,
):
"""
create external and fact tables, do ingest data partially
"""
mock_table_config_sub = mock_table_config.copy()
mock_table_config_sub["object_pattern"] = ["*2.parquet"]
mock_table_config_sub["table_name"] = "lineitem_sub"
for mock_table in [mock_table_config_sub, mock_table_config]:
with open("table_config.yaml", "w") as f:
f.write(yaml.dump(mock_table))
result = cli_runner.invoke(
main,
f"table create-external "
f"--file table_config.yaml "
f"--s3-url {s3_url}".split(),
)
assert result.exit_code == 0
result = cli_runner.invoke(
main,
f"table create-fact " f"--file table_config.yaml --add-file-metadata".split(),
)
assert result.exit_code == 0
fact_table_name = mock_table_config["table_name"]
external_table_name = f"ex_{mock_table_config['table_name']}"
external_table_name_sub = f"ex_{mock_table_config_sub['table_name']}"
result = cli_runner.invoke(
main,
f"ingest "
f"--fact-table-name {fact_table_name} "
f"--external-table-name {external_table_name_sub} "
f"--mode append".split(),
)
assert result.exit_code == 0
check_tables_equal_row_count(cli_runner, fact_table_name, external_table_name_sub)
result = cli_runner.invoke(
main,
f"ingest "
f"--fact-table-name {fact_table_name} "
f"--external-table-name {external_table_name} "
f"--mode append".split(),
)
assert result.exit_code == 0
check_tables_equal_row_count(cli_runner, fact_table_name, external_table_name)
for table_name in [fact_table_name, external_table_name, external_table_name_sub]:
drop_table(
table_name,
cli_runner,
)
| 25.725
| 86
| 0.631357
|
6e851e34e3972da0576f9d3e9be1882ca6b98dca
| 1,453
|
py
|
Python
|
manage.py
|
zacwolfe/songfoo-reports
|
e5c76f4209da064098ba7798eca34481b1ef931a
|
[
"MIT"
] | null | null | null |
manage.py
|
zacwolfe/songfoo-reports
|
e5c76f4209da064098ba7798eca34481b1ef931a
|
[
"MIT"
] | null | null | null |
manage.py
|
zacwolfe/songfoo-reports
|
e5c76f4209da064098ba7798eca34481b1ef931a
|
[
"MIT"
] | null | null | null |
import os
import json
import argparse
import requests
from songfoo_reports.core import db
from songfoo_reports.models import Post
def create_sample_db_entry(api_endpoint, payload):
url = 'http://localhost:5000/' + api_endpoint
r = requests.post(
url, data=json.dumps(payload),
headers={'Content-Type': 'application/json'})
print r.text
def create_db():
db.create_all()
def drop_db():
db.drop_all()
def main():
parser = argparse.ArgumentParser(
description='Manage this Flask application.')
parser.add_argument(
'command', help='the name of the command you want to run')
parser.add_argument(
'--seedfile', help='the file with data for seeding the database')
args = parser.parse_args()
if args.command == 'create_db':
create_db()
print "DB created!"
elif args.command == 'delete_db':
drop_db()
print "DB deleted!"
elif args.command == 'seed_db' and args.seedfile:
with open(args.seedfile, 'r') as f:
seed_data = json.loads(f.read())
for item_class in seed_data:
items = seed_data[item_class]
print items
for item in items:
print item
create_sample_db_entry('api/' + item_class, item)
print "\nSample data added to database!"
else:
raise Exception('Invalid command')
if __name__ == '__main__':
main()
| 24.216667
| 73
| 0.624914
|
0e994dee750b9342ceb580bab82f7f25e18071fe
| 1,778
|
py
|
Python
|
shoppingcart/shoppingcart/urls.py
|
bsurajbh/shopping-cart
|
04588d5fe7b4afffc831f82209ec2e9949bd4ba0
|
[
"MIT"
] | null | null | null |
shoppingcart/shoppingcart/urls.py
|
bsurajbh/shopping-cart
|
04588d5fe7b4afffc831f82209ec2e9949bd4ba0
|
[
"MIT"
] | null | null | null |
shoppingcart/shoppingcart/urls.py
|
bsurajbh/shopping-cart
|
04588d5fe7b4afffc831f82209ec2e9949bd4ba0
|
[
"MIT"
] | null | null | null |
"""shoppingcart URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from shop import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.all_products_catelog, name='index'),
path('shop/', include('shop.urls')),
path('search/', include('search.urls')),
path('cart/', include('cart.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL,
document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
import debug_toolbar
urlpatterns = [
path('__debug__/', include(debug_toolbar.urls)),
# For django versions before 2.0:
# url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
# if settings.DEBUG:
# import debug_toolbar
# urlpatterns = [
# path('__debug__/', include(debug_toolbar.urls)),
#
# # For django versions before 2.0:
# # url(r'^__debug__/', include(debug_toolbar.urls)),
#
# ] + urlpatterns
| 32.925926
| 77
| 0.663105
|
ef168d2d9217519aa2a2247011b45b1101e39337
| 290,845
|
py
|
Python
|
python/paddle/fluid/optimizer.py
|
xingjing1/Paddle
|
af886995ac38bd26588de33205a19eb1e72fecbf
|
[
"Apache-2.0"
] | 3
|
2017-05-11T11:10:13.000Z
|
2017-10-23T09:13:14.000Z
|
python/paddle/fluid/optimizer.py
|
betterpig/paddle_npu
|
74ad4b6a700795d5edce8dd49d6c2df6f15e8935
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/optimizer.py
|
betterpig/paddle_npu
|
74ad4b6a700795d5edce8dd49d6c2df6f15e8935
|
[
"Apache-2.0"
] | 1
|
2021-08-12T02:27:50.000Z
|
2021-08-12T02:27:50.000Z
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import six
import os
import logging
from collections import defaultdict
import paddle
from paddle.fluid.distribute_lookup_table import find_distributed_lookup_table
from paddle.fluid.framework import Program, Variable, name_scope, default_main_program, default_startup_program, device_guard
from . import framework
from . import layers
from . import unique_name
from .backward import append_backward, _some_in_set_, _append_grad_suffix_, _get_no_grad_set_name
from .clip import GradientClipBase, GradientClipByNorm, error_clip_callback, append_gradient_clip_ops, ClipGradByGlobalNorm
from .framework import program_guard
from .initializer import Constant
from .layer_helper import LayerHelper
from .layers import ops
from .dygraph import base as imperative_base
from .dygraph import no_grad
from .dygraph.learning_rate_scheduler import LearningRateDecay, _LearningRateEpochDecay
from paddle.fluid import core
from paddle.fluid.layers import tensor
from functools import reduce
from functools import cmp_to_key
from .wrapped_decorator import signature_safe_contextmanager
from .. import compat as cpt
import warnings
from paddle import _C_ops
__all__ = [
'SGD', 'Momentum', 'Adagrad', 'Adam', 'Adamax', 'Dpsgd', 'DecayedAdagrad',
'Ftrl', 'SGDOptimizer', 'MomentumOptimizer', 'AdagradOptimizer',
'AdamOptimizer', 'AdamaxOptimizer', 'DpsgdOptimizer',
'DecayedAdagradOptimizer', 'RMSPropOptimizer', 'FtrlOptimizer', 'Adadelta',
'AdadeltaOptimizer', 'ModelAverage', 'LarsMomentum',
'LarsMomentumOptimizer', 'LambOptimizer', 'ExponentialMovingAverage',
'PipelineOptimizer', 'LookaheadOptimizer', 'RecomputeOptimizer'
]
class Optimizer(object):
"""Optimizer Base class.
Define the common interface of an optimizer.
User should not use this class directly,
but need to use one of it's implementation.
"""
@imperative_base.no_grad
def __init__(self,
learning_rate,
parameter_list=None,
regularization=None,
grad_clip=None,
flatten_param_grads=False,
align_size=-1,
name=None):
"""
Args:
flatten_param_grads (bool, optional): Whether to flatten all the parameters and grads.
If true, the parameters and gradients will be coalesce to contiguous mempry,
and the grad_clip ops / optimizer ops will be fuse to one operator.
"""
# Because of the loop import, so place it in the function body
from paddle.optimizer.lr import LRScheduler
self._parameter_list = list(
parameter_list) if parameter_list is not None else None
self._name = name
if framework.in_dygraph_mode():
if not isinstance(learning_rate,
(float, LearningRateDecay, LRScheduler)):
raise TypeError(
"learning rate should be float or LRScheduler, got %s here"
% type(learning_rate))
if self._parameter_list is None:
raise AttributeError(
"parameter_list argument given to the Optimizer should not be None in dygraph mode."
)
if regularization is not None:
for param in self._parameter_list:
if param.regularizer is not None:
logging.info(
"If regularizer of a Parameter has been set by 'fluid.ParamAttr' or 'fluid.WeightNormParamAttr' already. "
"The Regularization[%s] in Optimizer will not take effect, and it will only be applied to other Parameters!"
% regularization.__str__())
break
else:
if not isinstance(learning_rate,
(float, framework.Variable, LRScheduler)):
raise TypeError(
"learning rate should be float or LRScheduler, got %s here"
% type(learning_rate))
if grad_clip is not None:
if not isinstance(grad_clip, GradientClipBase):
raise TypeError(
"'grad_clip' should be an instance of GradientClipBase's derived class"
)
self.regularization = regularization
self._grad_clip = grad_clip
self._learning_rate = learning_rate
self._flatten_param_grads = flatten_param_grads
self._align_size = align_size
self._dtype = None
# Infer the dtype form parameter
if self._parameter_list:
self._dtype = self._parameter_list[0].dtype
# each program should have a independent learning rate
# program -> Variable(learning_rate)
self._learning_rate_map = dict()
if isinstance(self._learning_rate, framework.Variable):
self._learning_rate_map[framework.default_main_program(
)] = self._learning_rate
# Dictionary of accumulators. Some optimizer subclasses need to
# allocate and manage extra variables associated with the parameters
# to train. These variables are called accumulators.
# {accum_name : { paramter_name : accumulator_for_parameter, ...}, ...}
self._accumulators = defaultdict(lambda: dict())
# global_accumulator dict, {accum_name : acc_variable, ...}
self._global_accumulators = {}
self.helper = LayerHelper(self.__class__.__name__)
self._opti_name_list = []
self._accumulators_holder = {}
self._param_device_map = dict()
# NOTE(zhiqiu): sometimes we want to add some variables(Tenosr) to the optimizer for a specific optimization,
# for example, we want to pass 'found_inf' to adam optimizer so it can skip update when found_inf is True.
# And these variables should not be the parameters of Optimizer's construnctor (because not commonly used).
# Use _auxiliary_vars together with _set_auxiliary_var/_get_auxiliary_var to achieve that.
self._auxiliary_vars = dict()
@framework.dygraph_only
def state_dict(self):
'''
Get state dict information from optimizer. It contain all the variable used by optimizer. For Adam optimizer, contains beta1, beta2, momentum etc. If LearningRateDecay have been used, global_step will be include in state dict.
If the optimizer never be called(minimize function), the state_dict is empty.
Args: None
Return:
state_dict(dict) : dict contains all the variable used by optimizer
Examples:
.. code-block:: python
import paddle.fluid as fluid
with fluid.dygraph.guard():
emb = fluid.dygraph.Embedding([10, 10])
adam = fluid.optimizer.Adam(0.001, parameter_list=emb.parameters())
state_dict = adam.state_dict()
'''
from paddle.optimizer.lr import LRScheduler
state_dict = {}
for k, v in self._accumulators.items():
for para_name, var_tmp in v.items():
state_dict[var_tmp.name] = var_tmp
for k, v in self._global_accumulators.items():
state_dict[v.name] = v
# global step if use lr decay
if isinstance(self._learning_rate, LRScheduler):
state_dict["LR_Scheduler"] = self._learning_rate.state_dict()
return state_dict
if isinstance(self._learning_rate, LearningRateDecay):
state_dict["LR_Scheduler"] = self._learning_rate.state_dict()
if not isinstance(self._learning_rate, _LearningRateEpochDecay):
var_tmp = None
var_temp = framework._varbase_creator(
None, name='global_step', dtype='int32')
tensor.fill_constant(
[1], "int32", self._learning_rate.step_num, out=var_temp)
state_dict['global_step'] = var_temp
return state_dict
@framework.dygraph_only
def set_state_dict(self, state_dict):
'''
Load optimizer state dict. For Adam optimizer, contains beta1, beta2, momentum etc. If LearningRateDecay have been used, global_step will be changed.
Args:
state_dict(dict) : Dict contains all the Variable needed by optimizer
Return:
None
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
paddle.disable_static()
emb = paddle.nn.Embedding(10, 10)
state_dict = emb.state_dict()
fluid.save_dygraph(state_dict, "paddle_dy")
scheduler = paddle.optimizer.lr.NoamDecay(
d_model=0.01, warmup_steps=100, verbose=True)
adam = paddle.optimizer.Adam(
learning_rate=scheduler,
parameters=emb.parameters())
state_dict = adam.state_dict()
fluid.save_dygraph(state_dict, "paddle_dy")
para_state_dict, opti_state_dict = fluid.load_dygraph("paddle_dy")
'''
from paddle.optimizer.lr import LRScheduler
if isinstance(self._learning_rate, LRScheduler):
self._learning_rate.set_dict(state_dict["LR_Scheduler"])
if isinstance(self._learning_rate, LearningRateDecay):
self._learning_rate.set_dict(state_dict["LR_Scheduler"])
if not isinstance(self._learning_rate, _LearningRateEpochDecay):
assert 'global_step' in state_dict, \
'Global step not in state dict, Dygraph use LearningRateDecay, global_step must in state_dict'
global_step = state_dict['global_step']
if isinstance(global_step, Variable):
step_np = global_step
step_np = np.array(step_np.value().get_tensor())
assert step_np.shape == (1,), \
"global step shape is (1,), the shape is {}".format( step_np.shape )
self._learning_rate.step_num = int(step_np[0])
elif isinstance(global_step, np.ndarray):
assert global_step.shape == (1,), \
"global step shape is (1,), the shape is {}".format( global_step.shape )
self._learning_rate.step_num = global_step[0]
else:
raise RuntimeError(
"Type not supprt, value in state dict must be [VarBase, Variable, numpy], the type is ",
type(global_step))
def _load_state_para(state_dict, param):
var = param.value()
tensor = var.get_tensor()
model_np = np.array(tensor)
load_para = state_dict[param.name]
if isinstance(load_para, Variable):
load_para_np = load_para.numpy()
elif isinstance(load_para, core.VarBase):
load_para_np = load_para.numpy()
elif isinstance(load_para, np.ndarray):
load_para_np = load_para
else:
raise RuntimeError("State dict type {} not supprt".format(
str(type(load_para))))
assert model_np.shape == load_para_np.shape, \
"Parameter shape not match, Dygraph Parameter [ {} ] need tensor with shape {} but load tensor with shape {}".format(
param.name, model_np.shape, load_para_np.shape)
assert model_np.dtype == load_para_np.dtype, \
"Parameter dtype not match, Dygraph Parameter [ {} ] need tensor with dtype {} but load tensor with dtype {}".format(
param.name, model_np.dtype, load_para_np.dtype)
tensor.set(load_para_np, framework._current_expected_place())
self._accumulators_holder = state_dict
for k, v in self._accumulators.items():
for para_name, var_tmp in v.items():
assert var_tmp.name in state_dict, \
"optimizer variable {} not found".format( var_tmp.name )
_load_state_para(state_dict, var_tmp)
for k, v in self._global_accumulators.items():
assert v.name in state_dict, \
"optimizer variable {} not found".format( v.name )
_load_state_para(state_dict, v)
# [aliases] Compatible with old method names
set_dict = set_state_dict
def get_opti_var_name_list(self):
return self._opti_name_list
def _set_auxiliary_var(self, key, val):
self._auxiliary_vars[key] = val
def _get_auxiliary_var(self, key):
if key in self._auxiliary_vars:
return self._auxiliary_vars[key]
else:
return None
def _create_global_learning_rate(self):
from paddle.optimizer.lr import LRScheduler
if isinstance(self._learning_rate, LRScheduler):
lr_var = self._global_learning_rate()
# only create global lr_var once
if not isinstance(lr_var, framework.Variable):
lr_name = unique_name.generate('learning_rate')
self._learning_rate._var_name = lr_name
lr_var = self.helper.create_global_variable(
name=lr_name,
shape=[1],
persistable=True,
stop_gradient=True,
dtype='float32' if self._dtype is None else self._dtype)
main_prog = framework.default_main_program()
main_prog.lr_sheduler = self._learning_rate
main_prog.lr_var = lr_var
self._learning_rate_map[framework.default_main_program(
)] = lr_var
lr_value = float(self._learning_rate())
self.helper.set_variable_initializer(
lr_var, initializer=Constant(value=lr_value))
return
if imperative_base.enabled():
# create learning rate Variable
if isinstance(self._learning_rate, float):
lr = self._global_learning_rate()
if isinstance(lr, framework.Variable):
return
else:
self._learning_rate_map[framework.default_main_program(
)] = layers.create_global_var(
name=unique_name.generate("learning_rate"),
shape=[1],
value=float(self._learning_rate),
dtype='float32' if self._dtype is None else self._dtype,
persistable=True)
# get learning rate Variable from LearningRateDecay
elif isinstance(self._learning_rate, LearningRateDecay):
self._learning_rate_map[framework.default_main_program(
)] = self._learning_rate()
else:
raise TypeError(
"optimizer's learning rate must be float or LearningRateDecay"
)
else:
lr = self._global_learning_rate()
if isinstance(lr, framework.Variable):
return
else:
if not isinstance(self._learning_rate, float):
raise TypeError(
"learning rate variable is create outside optimizer,"
"can not create new learning rate variable for new program"
)
# create learning rate in the current main program
self._learning_rate_map[framework.default_main_program(
)] = layers.create_global_var(
name=unique_name.generate("learning_rate"),
shape=[1],
value=float(self._learning_rate),
dtype='float32' if self._dtype is None else self._dtype,
persistable=True)
@framework.dygraph_only
def set_lr(self, value):
"""
:api_attr: imperative
Set the value of the learning rate manually in the optimizer. If the optimizer use LearningRateDecay,
this API cannot be invoked, because it will lead to conflict.
Args:
value (float|Variable): the value of learning rate
Returns:
None
Examples:
.. code-block:: python
import paddle.fluid as fluid
with fluid.dygraph.guard():
linear = fluid.dygraph.nn.Linear(10, 10)
adam = fluid.optimizer.Adam(0.1, parameter_list=linear.parameters())
# set learning rate manually by python float value
lr_list = [0.2, 0.3, 0.4, 0.5, 0.6]
for i in range(5):
adam.set_lr(lr_list[i])
lr = adam.current_step_lr()
print("current lr is {}".format(lr))
# Print:
# current lr is 0.2
# current lr is 0.3
# current lr is 0.4
# current lr is 0.5
# current lr is 0.6
# set learning rate manually by framework Variable
lr_var = fluid.layers.create_global_var(
shape=[1], value=0.7, dtype='float32')
adam.set_lr(lr_var)
lr = adam.current_step_lr()
print("current lr is {}".format(lr))
# Print:
# current lr is 0.7
"""
if not isinstance(value, (framework.Variable, float)):
raise TypeError(
"The type of 'value' in optimizer.set_lr must be (float, Variable), but received %s."
% (type(value)))
if isinstance(self._learning_rate, LearningRateDecay):
raise RuntimeError(
"optimizer's learning rate can't be LearningRateDecay when invoke this API, because this will lead to conflict."
)
if isinstance(value, float):
self._learning_rate = value
current_lr = self._global_learning_rate()
if current_lr is not None:
global_block = framework.default_main_program().global_block()
global_block.append_op(
type='fill_constant',
outputs={'Out': [current_lr]},
attrs={
'dtype': current_lr.dtype,
'shape': list(current_lr.shape),
'value': float(value)
},
stop_gradient=True)
else:
assert len(value.shape) == 1 and value.shape[
0] == 1, "optimizer's learning rate must be 1-D Tensor with shape[1]"
self._learning_rate_map[framework.default_main_program()] = value
@framework.dygraph_only
def current_step_lr(self):
"""
:api_attr: imperative
Get current step learning rate. The return value is all the same When LearningRateDecay is not used,
otherwise return the step learning rate.
Returns:
float: The learning rate of the current step.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
# example1: LearningRateDecay is not used, return value is all the same
with fluid.dygraph.guard():
emb = fluid.dygraph.Embedding([10, 10])
adam = fluid.optimizer.Adam(0.001, parameter_list = emb.parameters())
lr = adam.current_step_lr()
print(lr) # 0.001
# example2: PiecewiseDecay is used, return the step learning rate
with fluid.dygraph.guard():
inp = np.random.uniform(-0.1, 0.1, [10, 10]).astype("float32")
linear = fluid.dygraph.nn.Linear(10, 10)
inp = fluid.dygraph.to_variable(inp)
out = linear(inp)
loss = fluid.layers.reduce_mean(out)
bd = [2, 4, 6, 8]
value = [0.2, 0.4, 0.6, 0.8, 1.0]
adam = fluid.optimizer.Adam(fluid.dygraph.PiecewiseDecay(bd, value, 0),
parameter_list=linear.parameters())
# first step: learning rate is 0.2
np.allclose(adam.current_step_lr(), 0.2, rtol=1e-06, atol=0.0) # True
# learning rate for different steps
ret = [0.2, 0.2, 0.4, 0.4, 0.6, 0.6, 0.8, 0.8, 1.0, 1.0, 1.0, 1.0]
for i in range(12):
adam.minimize(loss)
lr = adam.current_step_lr()
np.allclose(lr, ret[i], rtol=1e-06, atol=0.0) # True
"""
current_lr = self._global_learning_rate()
if isinstance(current_lr, framework.Variable):
return self._global_learning_rate().numpy()[0]
if isinstance(self._learning_rate, float):
return self._learning_rate
elif isinstance(self._learning_rate, _LearningRateEpochDecay):
step_lr = self._learning_rate()
return step_lr.numpy()[0]
else:
step_lr = self._learning_rate.step()
if isinstance(step_lr, (float, int)):
return step_lr
else:
return step_lr.numpy()[0]
def _global_learning_rate(self, program=None):
"""
get global decayed learning rate
:return:
"""
if program is None:
program = framework.default_main_program()
return self._learning_rate_map.get(program, None)
def _append_optimize_op(self, block, param_and_grad):
""" append optimize operator to block and return all the added optimize_op
"""
raise NotImplementedError()
def _create_param_lr(self, param_and_grad):
# create learning rate variable for every parameter
param = param_and_grad[0]
param_lr = param.optimize_attr['learning_rate']
if type(param_lr) == Variable:
return param_lr
else:
if param_lr == 1.0:
return self._global_learning_rate()
else:
with default_main_program()._lr_schedule_guard(
is_with_opt=True), framework.name_scope(
'scale_with_param_lr'):
return self._global_learning_rate() * param_lr
def _create_accumulators(self, block, parameters):
"""Create all accumulators needed by the parameters
Args:
block: the block in which the loss variable is present
parameters: list of parameter variables for the optimizer
"""
pass
def _finish_update(self, block, parameters_and_grads):
"""Finish any custom updates needed
before completing an optimization step
Args:
block: the block in which the loss variable is present
parameters: list of parameter variables for the optimizer
Returns:
None
"""
pass
def _add_accumulator(self,
name,
param,
dtype=None,
fill_value=0.0,
shape=None,
type=None,
device=None):
"""Utility function to add an accumulator for a parameter
Args:
block: the block in which the loss variable is present
name: name of the accumulator
param: parameter variable for which accumulator is to be added
dtype: data type of the accumulator variable
fill_value: value to initialize the accumulator variable
"""
if self._name is not None:
name = self._name + "_" + name
if (name in self._accumulators and
param.name in self._accumulators[name]):
if framework.in_dygraph_mode():
return self._accumulators[name][param.name]
raise Exception("Accumulator {} already exists for parameter {}".
format(name, param.name))
if shape == None:
shape = param.shape
assert isinstance(self.helper, LayerHelper)
var_name = param.name + "_" + name
var_name = unique_name.generate(var_name)
self._opti_name_list.append(var_name)
var = self.helper.create_global_variable(
name=var_name,
persistable=True,
dtype=dtype or param.dtype,
type=param.type if type is None else type,
shape=shape,
belong_to_optimizer=True)
if device is None:
device = self._get_device_for_param(param.name)
with device_guard(device):
self.helper.set_variable_initializer(
var, initializer=Constant(value=float(fill_value)))
if framework.in_dygraph_mode():
if len(self._accumulators_holder) > 0:
assert var_name in self._accumulators_holder, \
"Optimizer set error, {} should in state dict".format( var_name )
var.set_value(self._accumulators_holder[var_name])
self._accumulators[name][param.name] = var
return var
def _add_global_accumulator(self,
name,
dtype=None,
fill_value=0.0,
shape=None,
type=None,
device=None):
"""Utility function to add a global accumulator for all parameters in the model
Args:
block: the block in which the loss variable is present
name: name of the accumulator
dtype: data type of the accumulator variable
fill_value: value to initialize the accumulator variable
shape: the shape of the accumulator
type: the variable type of the accumulator
device: the target place of the accumulator
"""
if self._name is not None:
name = self._name + "_" + name
if (name in self._global_accumulators):
if framework.in_dygraph_mode():
return self._global_accumulators[name]
raise Exception("Global accumulator {} already exists".format(name))
if shape == None:
shape = [1] # most case, global accumulator is of shape [1]
assert isinstance(self.helper, LayerHelper)
var_name = name
var_name = unique_name.generate(var_name)
self._opti_name_list.append(var_name)
var = self.helper.create_global_variable(
name=var_name,
persistable=True,
dtype=dtype if dtype else self._dtype,
type=type,
shape=shape,
belong_to_optimizer=True)
if device is None:
device = 'cpu'
with device_guard(device):
self.helper.set_variable_initializer(
var, initializer=Constant(value=float(fill_value)))
if framework.in_dygraph_mode():
if len(self._accumulators_holder) > 0:
assert var_name in self._accumulators_holder, \
"Optimizer set error, {} should in state dict".format( var_name )
var.set_value(self._accumulators_holder[var_name])
self._global_accumulators[name] = var
return var
def _get_accumulator(self, name, param):
"""Utility function to fetch an accumulator for a parameter
Args:
name: name of the accumulator
param: parameter variable for which accumulator is to be fetched
Returns:
accumulator variable
"""
if self._name is not None:
name = self._name + "_" + name
if (name not in self._accumulators or
param.name not in self._accumulators[name]):
raise Exception("Accumulator {} does not exist for parameter {}".
format(name, param.name))
return self._accumulators[name][param.name]
def _get_global_accumulator(self, name):
"""Utility function to fetch a global accumulator
Args:
name: name of the accumulator
Returns:
accumulator variable
"""
if self._name is not None:
name = self._name + "_" + name
if (name not in self._global_accumulators):
raise Exception("Global accumulator {} does not exist".format(name))
return self._global_accumulators[name]
def _update_param_device_map(self, parameters_and_grads, target_block):
for param_and_grad in parameters_and_grads:
if param_and_grad[0].trainable is True:
param_name = param_and_grad[0].name
ops = target_block.ops
device_attr_name = core.op_proto_and_checker_maker.kOpDeviceAttrName(
)
for op in ops:
input_arg_names = op.input_arg_names
if param_name in input_arg_names:
self._param_device_map[param_name] = op.attr(
device_attr_name)
break
def _get_device_for_param(self, param_name):
device = None
if param_name in self._param_device_map:
device = self._param_device_map[param_name]
return device
def _create_optimization_pass(self, parameters_and_grads):
"""Add optimization operators to update gradients to variables.
Args:
parameters_and_grads(list(tuple(Variable, Variable))):
a list of (variable, gradient) pair to update.
Returns:
return_op_list: a list of operators that will complete one step of
optimization. This will include parameter update ops, global step
update ops and any other custom ops required by subclasses to manage
their internal state.
"""
# This is a default implementation of create_optimization_pass that
# can be shared by most optimizers. This implementation assumes that
# the subclass will implement the _append_optimize_op method and the
# _initialize_tensors method. The subclass can extend the
# _create_accumulators method if it needs to create accumulators
# for parameters and extend _finish_update method to add custom ops.
# Allways called under program_guard use global block as loss block
# But if current block is in control flow, append optimize op in the
# grad block of current block
global_block = framework.default_main_program().global_block()
target_block = global_block
current_block = framework.default_main_program().current_block()
if current_block.idx != global_block.idx:
assert current_block.backward_block_idx != -1, \
"current block is not global_block, but it doesn't have backward block."
target_block = framework.default_main_program().blocks[
current_block.backward_block_idx]
start = len(target_block.ops)
self._update_param_device_map(parameters_and_grads, target_block)
self._create_accumulators(
target_block,
[p[0] for p in parameters_and_grads if p[0].trainable])
self._create_global_learning_rate()
if framework.in_dygraph_mode():
for param_and_grad in parameters_and_grads:
if param_and_grad[1] is None:
continue
if param_and_grad[0].trainable is True:
self._append_optimize_op(target_block, param_and_grad)
else:
for param_and_grad in parameters_and_grads:
if param_and_grad[1] is None:
continue
with param_and_grad[0].block.program._optimized_guard(
param_and_grad), name_scope("optimizer"):
if param_and_grad[0].trainable is True:
device = self._get_device_for_param(param_and_grad[0]
.name)
with device_guard(device):
optimize_op = self._append_optimize_op(
target_block, param_and_grad)
# Get custom finish ops for subclasses
# FIXME: Need to fix this once we figure out how to handle dependencies
self._finish_update(target_block, parameters_and_grads)
end = len(target_block.ops)
return target_block._slice_ops(start, end)
def _process_distribute_lookuptable(self, param_grads):
"""
Because distribute lookup table only support SGD optimizer for now, not support
other optimizer and regularization, so we should find the table parameter out,
and avoid to add regularization and other op for it, and add sgd optimize op
for it independently.
:param param_grads(list((Var, Var))): list of (param, grad) pair.
:param loss: the loss variable.
:param startup_program: the startup program
"""
program = framework.default_main_program()
global_block = framework.default_main_program().global_block()
table_name = find_distributed_lookup_table(program)
table_param = None
table_grad = None
new_param_grads = []
for p, g in param_grads:
if p.name == table_name:
if table_param is not None:
raise RuntimeError(
"multi dist table var found, only support one now!")
table_param = p
table_grad = g
else:
new_param_grads.append((p, g))
sgd_op = None
if table_param is not None:
param_and_grad = [table_param, table_grad]
with table_param.block.program._optimized_guard(param_and_grad), \
framework.name_scope("optimizer"):
self._create_global_learning_rate()
# create the optimize op
sgd_op = global_block.append_op(
type='sgd',
inputs={
"Param": table_param,
"Grad": table_grad,
"LearningRate": self._create_param_lr(param_and_grad)
},
outputs={"ParamOut": param_and_grad[0]})
return new_param_grads, (table_param, table_grad), sgd_op
def backward(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None,
callbacks=None):
"""
The first part of ``minimize``, do auto-diff to append backward operations for
the current program.
Args:
loss (Variable): ``loss`` variable to run optimizations.
startup_program (Program, optional): :ref:`api_fluid_Program` for
initializing parameters in ``parameter_list``. The default value
is None, at this time :ref:`api_fluid_default_startup_program` will be used.
parameter_list (Iterable, optional): Iterable of ``Variable`` or ``Variable.name`` to update
to minimize ``loss``. The default value is None, at this time all parameters
will be updated.
no_grad_set (set, optional): Set of ``Variable`` or ``Variable.name`` that don't need
to be updated. The default value is None.
callbacks (list, optional): list of callable objects to run when appending backward
operator for one parameter. The default value is None.
Return:
list: list of (param, grad) variable pairs, param is ``Parameter``,
grad is the gradient value corresponding to the parameter.
Examples:
See examples in ``apply_gradients``.
"""
act_no_grad_set = None
if framework.in_dygraph_mode():
pass
else:
act_no_grad_set = self._get_no_grad_set(loss, no_grad_set)
# Infer dtype by loss if None
if self._dtype is None:
self._dtype = loss.dtype
if framework.in_dygraph_mode():
parameter_list = parameter_list if parameter_list \
else self._parameter_list
params_grads = []
for param in parameter_list:
if not param.trainable:
continue
if param._grad_ivar() is not None:
# create gradient variable
grad_var = param._grad_ivar()
params_grads.append((param, grad_var))
else:
if callbacks is None:
callbacks = [error_clip_callback]
else:
assert (isinstance(callbacks, list))
program = loss.block.program
assert len(loss.shape) == 1 and loss.shape[0] == 1, \
"The loss.shape should be (1L,), but the current loss.shape is {}. " \
"Maybe that you should call fluid.layers.mean to process the current loss.".format(
loss.shape)
parameter_list = parameter_list if parameter_list \
else self._parameter_list
with program_guard(program, startup_program):
params_grads = append_backward(loss, parameter_list,
act_no_grad_set, callbacks)
return params_grads
def _create_regularization_of_grad(self, param, grad, regularization=None):
""" Create and add backward regularization Operators
Function helper of append_regularization_ops.
"""
# If no gradient or no regularization is specified, then we don't need to do anything
if grad is None or ((not hasattr(param, 'regularizer') or
(hasattr(param, 'regularizer') and
param.regularizer is None)) and
regularization is None):
return grad
regularization_term = None
if hasattr(param, 'regularizer') and param.regularizer is not None:
# Add variable for regularization term in grad block
regularization_term = param.regularizer(param, grad, grad.block)
elif regularization is not None:
regularization_term = regularization(param, grad, grad.block)
assert regularization_term is not None
if framework.in_dygraph_mode():
return _C_ops.sum([grad, regularization_term])
new_grad = grad
if grad.type == core.VarDesc.VarType.SELECTED_ROWS:
# FIXME(zcd): If the grad is SELECTED_ROWS, after regularization,
# the grad's type and name will be changed. But the gradient's name
# is used in ParallelExecutor Reduce mode, so I add a flag for
# the new_grad here.
new_grad = grad.block.create_var(
name=grad.name + core.kNewGradSuffix(),
dtype=param.dtype,
shape=param.shape,
lod_level=param.lod_level,
type=core.VarDesc.VarType.LOD_TENSOR)
inputs = {"X": [grad, regularization_term]}
outputs = {"Out": [new_grad]}
grad.block.append_op(type='sum', inputs=inputs, outputs=outputs)
return new_grad
def append_regularization_ops(self,
parameters_and_grads,
regularization=None):
r"""Create and add backward regularization Operators
Creates and adds backward regularization operators in the BlockDesc.
This will add gradients of the regularizer function to the gradients
of the parameters and return these modified gradients. This is the
same as implementing weight decay in optimizers for regularization.
Args:
parameters_and_grads: A list of (parameters, gradients) pairs
that need to be regularized.
regularization: A global regularizer. If the parameter is not
set. It will be applied with regularizer.
Returns:
list[(Variable, Variable)]: list of (parameters, gradients) \
pair with the regularized gradient
Raises:
Exception: Unknown regularization type
"""
params_and_grads = []
if framework.in_dygraph_mode():
for param, grad in parameters_and_grads:
new_grad = self._create_regularization_of_grad(param, grad,
regularization)
params_and_grads.append((param, new_grad))
else:
repeate_regularizer = False
with framework.name_scope('regularization'):
for param, grad in parameters_and_grads:
if not repeate_regularizer and getattr(
param, 'regularizer',
None) is not None and regularization is not None:
repeate_regularizer = True
logging.info(
"If regularizer of a Parameter has been set by 'fluid.ParamAttr' or 'fluid.WeightNormParamAttr' already. "
"The Regularization[%s] in Optimizer will not take effect, and it will only be applied to other Parameters!"
% regularization.__str__())
with param.block.program._optimized_guard([param, grad]):
new_grad = self._create_regularization_of_grad(
param, grad, regularization)
params_and_grads.append((param, new_grad))
return params_and_grads
def flatten_param_grads(self, params_grads):
need_flatten_params = []
need_flatten_grads = []
for p, g in params_grads:
if g is None:
continue
g.persistable = True
if getattr(p, 'need_clip', True) is False or getattr(
p, 'regularizer', None) is not None:
warnings.warn(
"flatten_param_grads=True will be discarded since paramter '{}''s need_clip is False or "
"the regularizer is set".format(p.name))
self._flatten_param_grads = False
return params_grads
need_flatten_params.append(p)
need_flatten_grads.append(g)
shape = [np.prod(p.shape) for p in need_flatten_params]
block = need_flatten_params[0].block
flatten_param = self.helper.create_global_variable(
name='flatten_param',
persistable=True,
dtype=need_flatten_params[0].dtype,
shape=[np.sum(shape)],
belong_to_optimizer=True)
flatten_param.trainable = True
flatten_param.optimize_attr = need_flatten_params[0].optimize_attr
flatten_param.regularizer = need_flatten_params[0].regularizer
flatten_grad = self.helper.create_global_variable(
name='flatten_grad',
persistable=True,
dtype=need_flatten_grads[0].dtype,
shape=[np.sum(shape)],
belong_to_optimizer=True)
with program_guard(default_main_program()):
block.append_op(
type="coalesce_tensor",
inputs={"Input": need_flatten_params},
outputs={
"Output": need_flatten_params,
"FusedOutput": flatten_param
},
attrs={
"copy_data": True,
"use_align": True,
"align_size": self._align_size,
"dtype": need_flatten_params[0].dtype
})
block.append_op(
type="coalesce_tensor",
inputs={"Input": need_flatten_grads},
outputs={
"Output": need_flatten_grads,
"FusedOutput": flatten_grad
},
attrs={
"copy_data": True,
"use_align": True,
"align_size": self._align_size,
"dtype": need_flatten_grads[0].dtype
})
#NOTE(zhiqiu): the initializer should be set after coalesce_tensor op,
# so the shape of flatten_param and flatten_grad will be inferred.
self.helper.set_variable_initializer(
flatten_param, initializer=Constant(0.0))
self.helper.set_variable_initializer(
flatten_grad, initializer=Constant(0.0))
return [(flatten_param, flatten_grad)]
def apply_gradients(self, params_grads):
"""
Second part of `minimize`, appending optimization operators for
given `params_grads` pairs.
Args:
params_grads (list): list of (param, grad) pair to do optimization.
Returns:
list: A list of operators appended to the current program.
Examples:
.. code-block:: python
import paddle.fluid as fluid
loss = network()
optimizer = fluid.optimizer.SGD(learning_rate=0.1)
params_grads = optimizer.backward(loss)
# you may append operations for params_grads here
# ...
optimizer.apply_gradients(params_grads)
"""
params_grads = sorted(params_grads, key=lambda x: x[0].name)
# NOTE(zhiqiu): currently, only support ClipGradByGlobalNorm and without regularization.
if self._flatten_param_grads and self.regularization is None:
if self._grad_clip == None or isinstance(self._grad_clip,
ClipGradByGlobalNorm):
params_grads = self.flatten_param_grads(params_grads)
# 'optimizer(grad_clip)' or 'set_gradient_clip'
if self._grad_clip is not None:
params_grads = self._grad_clip(params_grads)
else:
params_grads = append_gradient_clip_ops(params_grads)
# Add regularization if any
params_grads = self.append_regularization_ops(params_grads,
self.regularization)
optimize_ops = self._create_optimization_pass(params_grads)
return optimize_ops
def apply_optimize(self, loss, startup_program, params_grads):
"""
Second part of `minimize`, appending optimization operators for
given `params_grads` pairs.
Args:
loss (Variable): loss variable to run optimizations.
startup_program (Program): startup_program for initializing parameters
in `parameter_list`.
params_grads (list): list of (param, grad) pair to do optimization.
Returns:
list: A list of operators appended to the current program.
"""
if framework.in_dygraph_mode():
with program_guard(framework.default_main_program(),
framework.default_startup_program()):
if self._grad_clip is not None:
params_grads = self._grad_clip(params_grads)
params_grads = self.append_regularization_ops(
params_grads, self.regularization)
optimize_ops = self._create_optimization_pass(params_grads)
else:
program = loss.block.program
with program_guard(program, startup_program):
optimize_ops = self.apply_gradients(params_grads)
return optimize_ops
def _get_no_grad_set(self, loss, no_grad_set=None):
no_grad_set = _get_no_grad_set_name(no_grad_set)
parameters = loss.block.program.global_block().all_parameters()
param_no_trainable = set(
[param.name for param in parameters if param.trainable is False])
# If the parameter is no trainable, it should not have a gradient.
no_grad_set.update(param_no_trainable)
return no_grad_set
@framework.dygraph_only
def clear_gradients(self):
"""
Clear the gradients of all optimized parameters for model.
If not, new gradient will accumulat on previous gradient.
Returns:
None
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
with fluid.dygraph.guard():
value = np.arange(26).reshape(2, 13).astype("float32")
a = fluid.dygraph.to_variable(value)
linear = fluid.Linear(13, 5, dtype="float32")
# This can be any optimizer supported by dygraph.
adam = fluid.optimizer.Adam(learning_rate = 0.01,
parameter_list = linear.parameters())
out = linear(a)
out.backward()
adam.minimize(out)
adam.clear_gradients()
"""
for p in self._parameter_list:
if p.trainable:
p.clear_gradient()
@imperative_base.no_grad
def minimize(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None):
"""
Add operations to minimize ``loss`` by updating ``parameter_list``.
Args:
loss (Variable): A ``Variable`` containing the value to minimize.
startup_program (Program, optional): :ref:`api_fluid_Program` for
initializing parameters in ``parameter_list``. The default value
is None, at this time :ref:`api_fluid_default_startup_program` will be used.
parameter_list (Iterable, optional): Iterable of ``Variable`` or ``Variable.name`` to update
to minimize ``loss``. The default value is None, at this time all parameters
will be updated.
no_grad_set (set, optional): Set of ``Variable`` or ``Variable.name`` that don't need
to be updated. The default value is None.
Returns:
tuple: tuple (optimize_ops, params_grads), A list of operators appended
by minimize and a list of (param, grad) variable pairs, param is
``Parameter``, grad is the gradient value corresponding to the parameter.
The returned tuple can be passed to ``fetch_list`` in ``Executor.run()`` to
indicate program pruning. If so, the program will be pruned by ``feed`` and
``fetch_list`` before run, see details in ``Executor``.
Examples:
Please refer to the example of current Optimizer.
"""
assert isinstance(loss, Variable), "The loss should be an Variable."
parameter_list = parameter_list if parameter_list \
else self._parameter_list
params_grads = self.backward(
loss,
startup_program=startup_program,
parameter_list=parameter_list,
no_grad_set=no_grad_set)
optimize_ops = self.apply_optimize(
loss, startup_program=startup_program, params_grads=params_grads)
return optimize_ops, params_grads
class SGDOptimizer(Optimizer):
r"""
Optimizer of the stochastic gradient descent algorithm.
.. math::
param\_out = param - learning\_rate * grad
Parameters:
learning_rate (float|Variable): The learning rate used to update parameters. \
Can be a float value or a Variable with one float value as data element.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): This parameter is used by developers to print debugging information. \
For details, please refer to :ref:`api_guide_Name`. Default is None.
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
import numpy as np
place = fluid.CPUPlace()
main = fluid.Program()
with fluid.program_guard(main):
x = fluid.layers.data(name='x', shape=[13], dtype='float32')
y = fluid.layers.data(name='y', shape=[1], dtype='float32')
y_predict = fluid.layers.fc(input=x, size=1, act=None)
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
sgd_optimizer = fluid.optimizer.SGD(learning_rate=0.001)
sgd_optimizer.minimize(avg_cost)
fetch_list = [avg_cost]
train_reader = paddle.batch(
paddle.dataset.uci_housing.train(), batch_size=1)
feeder = fluid.DataFeeder(place=place, feed_list=[x, y])
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
for data in train_reader():
exe.run(main, feed=feeder.feed(data), fetch_list=fetch_list)
"""
def __init__(self,
learning_rate,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None):
assert learning_rate is not None
super(SGDOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
self.type = "sgd"
@no_grad
def _append_optimize_op(self, block, param_and_grad):
lr = self._create_param_lr(param_and_grad)
if framework.in_dygraph_mode():
_C_ops.sgd(param_and_grad[0], lr, param_and_grad[1],
param_and_grad[0])
return None
assert isinstance(block, framework.Block)
# create the optimize op
sgd_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"LearningRate": lr
},
outputs={"ParamOut": param_and_grad[0]},
stop_gradient=True)
return sgd_op
class MomentumOptimizer(Optimizer):
r"""
Simple Momentum optimizer with velocity state
This optimizer has a flag for Nestrov Momentum.
The update equations are as follows:
.. math::
& velocity = mu * velocity + gradient
& if (use\_nesterov):
&\quad param = param - (gradient + mu * velocity) * learning\_rate
& else:
&\quad param = param - learning\_rate * velocity
Parameters:
learning_rate (float|Variable): The learning rate used to update parameters. \
Can be a float value or a Variable with one float value as data element.
momentum (float): Momentum factor
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
use_nesterov (bool, optional): Enables Nesterov momentum, default is false.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): This parameter is used by developers to print debugging information. \
For details, please refer to :ref:`api_guide_Name`. Default is None.
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
import numpy as np
place = fluid.CPUPlace()
main = fluid.Program()
with fluid.program_guard(main):
x = fluid.layers.data(name='x', shape=[13], dtype='float32')
y = fluid.layers.data(name='y', shape=[1], dtype='float32')
y_predict = fluid.layers.fc(input=x, size=1, act=None)
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
moment_optimizer = fluid.optimizer.MomentumOptimizer(learning_rate=0.001, momentum=0.9)
moment_optimizer.minimize(avg_cost)
fetch_list = [avg_cost]
train_reader = paddle.batch(
paddle.dataset.uci_housing.train(), batch_size=1)
feeder = fluid.DataFeeder(place=place, feed_list=[x, y])
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
for data in train_reader():
exe.run(main, feed=feeder.feed(data), fetch_list=fetch_list)
"""
_velocity_acc_str = "velocity"
def __init__(self,
learning_rate,
momentum,
parameter_list=None,
use_nesterov=False,
regularization=None,
grad_clip=None,
name=None):
assert learning_rate is not None
assert momentum is not None
super(MomentumOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
self.type = "momentum"
self._momentum = momentum
self._use_nesterov = bool(use_nesterov)
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
for p in parameters:
self._add_accumulator(self._velocity_acc_str, p)
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
velocity_acc = self._get_accumulator(self._velocity_acc_str,
param_and_grad[0])
lr = self._create_param_lr(param_and_grad)
if framework.in_dygraph_mode():
_, _ = _C_ops.momentum(param_and_grad[0], param_and_grad[1],
velocity_acc, lr, param_and_grad[0],
velocity_acc, 'mu', self._momentum,
'use_nesterov', self._use_nesterov)
return None
attrs = {"mu": self._momentum, "use_nesterov": self._use_nesterov}
inputs = {
"Param": [param_and_grad[0]],
"Grad": [param_and_grad[1]],
"Velocity": [velocity_acc],
"LearningRate": [lr]
}
outputs = {
"ParamOut": [param_and_grad[0]],
"VelocityOut": [velocity_acc]
}
# create the momentum optimize op
momentum_op = block.append_op(
type=self.type,
inputs=inputs,
outputs=outputs,
attrs=attrs,
stop_gradient=True)
return momentum_op
class DGCMomentumOptimizer(Optimizer):
r"""
:api_attr: Static Graph
DGC (Deep Gradient Compression) Momentum Optimizer. Original paper is https://arxiv.org/abs/1712.01887
DGC reduces the communication bandwidth by sending only the important gradients (sparse update):\
only gradients larger than a threshold are transmitted.
To avoid losing information, DGC accumulates the rest of the gradients locally.
Eventually, these gradients become large enough to be transmitted.
Thus, DGC sends the large gradients immediately but eventually sends all of the gradients over time.
To ensure no loss of accuracy, DGC employs momentum correction and local gradient clipping on top of the gradient sparsification to maintain model performance.
DGC also uses momentum factor masking and warmup training to overcome the staleness problem caused by reduced communication.
This optimizer will do two things:
1. Compress the gradient by get TopK import value from tensor \
and use it for allreduce to reduce network bandwidth.
2. Call momentum to optimize the cost.
Args:
learning_rate (float|Variable): The learning rate used to update parameters. \
It can be a float value or a Variable with one float value as a data element.
momentum (float): Momentum factor.
rampup_begin_step (int): The beginning step from which gradient compression is implemented.
rampup_step (int): Time steps used in sparsity warm-up periods. Default is 1.
For example, if the sparsity is [0.75, 0.9375, 0.984375, 0.996, 0.999], and the rampup_step is 100, \
it will use 0.75 at 0~19 steps, and 0.9375 at 20~39 steps, and so on. \
And when reach sparsity array ends, it will use 0.999 then and after.
sparsity (list[float]): Get top important element from gradient tensor, the ratio is (1 - current sparsity). \
Default is [0.999]. For example, if the sparsity is [0.99, 0.999], \
the top [1%, 0.1%] important element will be transmitted.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
use_nesterov (bool): Enables Nesterov momentum. True means use Nesterov. Default is False.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipByNorm, optional): Gradient cliping strategy. ``DGCMomentumOptimizer`` only support
:ref:`api_fluid_clip_GradientClipByNorm` , and if not, it will raise TypeError. Default None,
meaning there is no gradient clipping.
name (str, optional): This parameter is used by developers to print debugging information. \
For details, please refer to :ref:`api_guide_Name`. Default is None.
Examples:
.. code-block:: python
import paddle.fluid as fluid
optimizer = fluid.optimizer.DGCMomentumOptimizer(
learning_rate=0.0001,
momentum=0.9,
rampup_step=1000,
rampup_begin_step=1252,
sparsity=[0.999, 0.999])
"""
_u_velocity_acc_str = "_dgc_u_"
_v_velocity_acc_str = "_dgc_v_"
def __init__(self,
learning_rate,
momentum,
rampup_begin_step,
rampup_step=1,
sparsity=[0.999],
parameter_list=None,
use_nesterov=False,
num_trainers=None,
regularization=None,
grad_clip=None,
name=None):
if framework.in_dygraph_mode():
raise Exception("In dygraph, don't support DGCMomentumOptimizer.")
assert core.is_compiled_with_cuda(), \
"Paddle is not compiled with CUDA. DGC is only support GPU for now."
assert learning_rate is not None
assert momentum is not None
super(DGCMomentumOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
self.type = "dgc_momentum"
self._momentum = momentum
self._use_nesterov = bool(use_nesterov)
assert rampup_begin_step >= 0, "rampup_begin_step must >= 0"
self._rampup_begin_step = rampup_begin_step
self._rampup_step = rampup_step
self._sparsity = sparsity
self._rampup_begin_step_var = None
self._global_step_var = None
self._dgc_clip_norm = None
if grad_clip is not None:
if not isinstance(grad_clip, GradientClipByNorm):
raise TypeError(
"The type of grad_clip should be 'GradientClipByNorm', because DGCMomentumOptimizer only support GradientClipByNorm"
)
assert isinstance(
num_trainers, int
), "The type of num_trainers should be 'int', but received %s" % type(
num_trainers)
assert num_trainers > 0, "The value of num_trainers should be greater than 0!"
self._num_trainers = num_trainers
self._dgc_clip_norm = grad_clip.clip_norm * (num_trainers**-0.5)
self.regular_type, self.regular_coeff = self._get_regularization_param(
self.regularization)
def _get_regularization_param(self, regularization):
regular_type = 0
regular_coeff = 0.0
if regularization is not None:
regular_coeff = regularization._regularization_coeff
from .regularizer import L1Decay, L2Decay
if isinstance(regularization, L1Decay):
regular_type = 1
elif isinstance(regularization, L2Decay):
regular_type = 2
else:
assert False, 'regularization must be None|L1Decay|L2Deacy'
return regular_type, regular_coeff
def _is_use_dgc(self, param_var, grad_var):
var_numel = abs(reduce(lambda x, y: x * y, param_var.shape))
if var_numel < 16384 or \
param_var.type == core.VarDesc.VarType.SELECTED_ROWS or \
grad_var.type == core.VarDesc.VarType.SELECTED_ROWS or \
param_var.dtype != core.VarDesc.VarType.FP32 :
return False
return True
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
velocity_acc = self._get_accumulator(self._u_velocity_acc_str,
param_and_grad[0])
assert velocity_acc is not None
inputs = {
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"Velocity": velocity_acc,
"LearningRate": self._create_param_lr(param_and_grad),
}
outputs = {
"ParamOut": param_and_grad[0],
"VelocityOut": velocity_acc,
}
attrs = {"mu": self._momentum, "use_nesterov": self._use_nesterov}
if not self._is_use_dgc(param_and_grad[0], param_and_grad[1]):
type = "momentum"
else:
type = "dgc_momentum"
inputs.update({
"current_step": self._global_step_var,
"nranks": self._nranks_var
})
outputs.update({'Grad_out': param_and_grad[1]})
attrs.update({"rampup_begin_step": float(self._rampup_begin_step)})
# create the dgc momentum optimize op
dgc_momentum_op = block.append_op(
type=type,
inputs=inputs,
outputs=outputs,
attrs=attrs,
stop_gradient=True)
return dgc_momentum_op
def _add_auto_increment_var(self, counter_name, begin, step=1):
helper = LayerHelper('global_step_counter')
counter, is_new_var = helper.create_or_get_global_variable(
name=counter_name, dtype='float32', shape=[1], persistable=True)
if is_new_var:
helper.set_variable_initializer(
counter,
initializer=Constant(
value=float(begin - 1), force_cpu=True))
helper.main_program.global_block()._prepend_op(
type='increment',
inputs={'X': [counter]},
outputs={'Out': [counter]},
attrs={'step': float(step)},
stop_gradient=True)
counter.stop_gradient = True
return counter
def _add_nranks_var(self, name, value=-1):
helper = LayerHelper('global_step_counter')
counter, is_new_var = helper.create_or_get_global_variable(
name=name, dtype='float32', shape=[1], persistable=True)
if is_new_var:
helper.set_variable_initializer(
counter,
initializer=Constant(
value=float(value), force_cpu=True))
counter.stop_gradient = True
return counter
def _append_dgc_ops(self, param_and_grads):
main_program = default_main_program()
main_program._enable_dgc = True
# step counter
self._global_step_var = self._add_auto_increment_var(
counter_name=core.dgc.kDGCCounterName(), begin=0)
self._nranks_var = self._add_nranks_var(
name=core.dgc.kDGCNRanksName(), value=-1)
# rampup begin step var for all_reduce_op_handle
self._rampup_begin_step_var = tensor.create_global_var(
shape=[1],
dtype=core.VarDesc.VarType.FP32,
persistable=True,
name=core.dgc.kDGCRampUpBeginStepName(),
value=self._rampup_begin_step * 1.0,
force_cpu=True)
self.helper = LayerHelper(self.__class__.__name__)
for param_var, grad_var in param_and_grads:
# reuse velocity in dgc_op and dgc_momentum_op
u_var = self._add_accumulator(self._u_velocity_acc_str, param_var)
if not self._is_use_dgc(param_var, grad_var):
continue
v_var = self._add_accumulator(self._v_velocity_acc_str, param_var)
k_var = tensor.create_global_var(
shape=[1],
dtype=param_var.dtype,
persistable=True,
name=param_var.name + core.dgc.kDGCKName(),
value=0.0,
force_cpu=True)
encoded_var = tensor.create_global_var(
shape=[1],
dtype=param_var.dtype,
persistable=True,
name=param_var.name + core.dgc.kDGCEncodedName(),
value=0.0,
force_cpu=False)
gather_var = tensor.create_global_var(
shape=[1],
dtype=param_var.dtype,
persistable=True,
name=param_var.name + core.dgc.kDGCGatherName(),
value=0.0,
force_cpu=False)
# del back oprolevarname
op_maker = core.op_proto_and_checker_maker
backward = core.op_proto_and_checker_maker.OpRole.Backward
for op in main_program.global_block().ops:
if not self._is_the_backward_op(op):
continue
var_attr = op.all_attrs()[op_maker.kOpRoleVarAttrName()]
if param_var.name not in var_attr:
continue
var_attr.remove(param_var.name)
var_attr.remove(grad_var.name)
if len(var_attr) > 1:
op._set_attr(op_maker.kOpRoleVarAttrName(), var_attr)
else:
op._remove_attr(op_maker.kOpRoleVarAttrName())
clip_var = grad_var
if self._dgc_clip_norm is not None:
clip_var = self._append_clip_norm(grad_var, self._dgc_clip_norm)
self._dgc_op(param_var, clip_var, grad_var, u_var, v_var, k_var,
encoded_var, gather_var)
def _is_the_backward_op(self, op):
op_maker = core.op_proto_and_checker_maker
backward = core.op_proto_and_checker_maker.OpRole.Backward
if op_maker.kOpRoleVarAttrName() in op.attr_names and \
int(op.all_attrs()[op_maker.kOpRoleAttrName()]) == int(backward):
return True
return False
def _clip_by_norm(self, x, max_norm, name=None):
args = {'x': x, 'max_norm': max_norm, 'name': name}
helper = LayerHelper("dgc_clip_by_norm_op", **args)
if name is None:
name = unique_name.generate_with_ignorable_key(".".join(
[helper.name, 'tmp']))
out = helper.create_variable(
type=x.type, name=name, dtype=x.dtype, persistable=False)
helper.append_op(
type="dgc_clip_by_norm",
inputs={"X": x,
"current_step": self._global_step_var},
attrs={
"max_norm": max_norm,
"rampup_begin_step": float(self._rampup_begin_step)
},
outputs={"Out": out})
return out
def _append_clip_norm(self, grad_var, clip_norm):
with grad_var.block.program._backward_role_guard():
return self._clip_by_norm(
x=grad_var, max_norm=clip_norm, name=grad_var.name)
def _dgc_op(self, param_var, clip_var, grad_var, u_var, v_var, k_var,
encoded_var, gather_var):
block = framework.default_main_program().global_block()
op_maker = core.op_proto_and_checker_maker
regular_type = self.regular_type
regular_coeff = self.regular_coeff
# The regularizer of the Parameters have higher priority
if param_var.regularizer is not None:
regular_type, regular_coeff = self._get_regularization_param(
param_var.regularizer)
dgc_op = block.append_op(
type="dgc",
inputs={
"U": u_var,
"V": v_var,
"Grad": clip_var,
"Param": param_var,
"current_step": self._global_step_var,
"nranks": self._nranks_var,
},
outputs={
"U_out": u_var,
"V_out": v_var,
"EncodeGrad": encoded_var,
"k": k_var,
"Grad_out": grad_var,
"GatherBuff": gather_var,
},
attrs={
"m": self._momentum,
"sparsity": self._sparsity,
"use_nesterov": self._use_nesterov,
"rampup_begin_step": float(self._rampup_begin_step),
"rampup_step": float(self._rampup_step),
"regular_coeff": float(regular_coeff),
"regular_type": int(regular_type),
},
stop_gradient=True)
backward = op_maker.OpRole.Backward
dgc_op._set_attr(op_maker.kOpRoleAttrName(), backward)
dgc_op._set_attr(op_maker.kOpRoleVarAttrName(),
[param_var.name, grad_var.name])
@imperative_base.no_grad
def apply_gradients(self, params_grads):
# Note: since we can't use all_reduce_op now,
# dgc_op should be the last op of one grad.
# Maybe need a grad allreduce pass.
self._append_dgc_ops(params_grads)
params_grads = sorted(params_grads, key=lambda x: x[0].name)
params_grads, table_param_and_grad, table_optimize_op = \
self._process_distribute_lookuptable(params_grads)
not_dgc_params_grads = []
dgc_params_grads = []
# DGC clip and regularization in optimizer.backward
for param, grad in params_grads:
if not self._is_use_dgc(param, grad):
not_dgc_params_grads.append((param, grad))
else:
dgc_params_grads.append((param, grad))
# 'optimizer(grad_clip)' or 'set_gradient_clip'
if self._grad_clip is not None:
not_dgc_params_grads = self._grad_clip(not_dgc_params_grads)
else:
not_dgc_params_grads = append_gradient_clip_ops(
not_dgc_params_grads)
not_dgc_params_grads = self.append_regularization_ops(
not_dgc_params_grads, self.regularization)
params_grads = not_dgc_params_grads + dgc_params_grads
params_grads = sorted(params_grads, key=lambda x: x[0].name)
optimize_ops = self._create_optimization_pass(params_grads)
if table_optimize_op is not None:
optimize_ops.append(table_optimize_op)
params_grads.append(table_param_and_grad)
return optimize_ops
class LarsMomentumOptimizer(Optimizer):
r"""
Momentum optimizer with LARS support
The update equations are as follows:
.. math::
& local\_learning\_rate = learning\_rate * lars\_coeff * \\
\\frac{||param||}{||gradient|| + lars\_weight\_decay * ||param||}
& velocity = mu * velocity + local\_learning\_rate * (gradient + lars\_weight\_decay * param + epsilon)
& param = param - velocity
Parameters:
learning_rate (float|Variable): The learning rate used to update parameters. \
Can be a float value or a Variable with one float value as data element. \
momentum (float): momentum factor
lars_coeff (float): Defines how much we trust the layer to change its weights.
lars_weight_decay (float): Weight decay coefficient for decaying using LARS.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): This parameter is used by developers to print debugging information. \
For details, please refer to :ref:`api_guide_Name`. Default is None.
exclude_from_weight_decay (list[str], optional): Name string of layers which will be exclude from lars weight decay. Default is None.
epsilon (float, optional): Epsilon to avoid Division by Zero when calculate local lr. Default is 0.
multi_precision (bool, optional): Whether to use multi-precision during weight updating.
rescale_grad (float, optional): Multiply the gradient with `rescale_grad` \
before updating. Often choose to be `1.0/batch_size`.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
np_inp = np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32)
inp = fluid.layers.data(
name="inp", shape=[2, 2], append_batch_size=False)
out = fluid.layers.fc(inp, size=3)
out = fluid.layers.reduce_sum(out)
optimizer = fluid.optimizer.LarsMomentumOptimizer(learning_rate=0.001, momentum=0.9)
optimizer.minimize(out)
exe = fluid.Executor(fluid.CPUPlace())
exe.run(fluid.default_startup_program())
exe.run(
feed={"inp": np_inp},
fetch_list=[out.name])
"""
_velocity_acc_str = "velocity"
def __init__(self,
learning_rate,
momentum,
lars_coeff=0.001,
lars_weight_decay=0.0005,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None,
exclude_from_weight_decay=None,
epsilon=0,
multi_precision=False,
rescale_grad=1.0):
assert learning_rate is not None
assert momentum is not None
super(LarsMomentumOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
self.type = "lars_momentum"
self._momentum = momentum
self._lars_coeff = float(lars_coeff)
self._lars_weight_decay = float(lars_weight_decay)
self._epsilon = float(epsilon)
if exclude_from_weight_decay is None:
self._exclude_from_weight_decay = []
else:
self._exclude_from_weight_decay = exclude_from_weight_decay
self._multi_precision = multi_precision
self._rescale_grad = float(rescale_grad)
self._master_weights = {}
def _create_master_weight(self, param):
assert isinstance(self.helper, LayerHelper)
var_name = param.name + '_fp32_master'
var_name = unique_name.generate(var_name)
var = layers.create_global_var(
name=var_name,
shape=param.shape,
value=0,
dtype='float32',
persistable=True)
block = self.helper.startup_program.global_block()
block.append_op(
type="cast",
inputs={"X": [param]},
outputs={"Out": [var]},
attrs={
"in_dtype": param.dtype,
"out_dtype": core.VarDesc.VarType.FP32
})
self._master_weights[param.name] = var
return var
def _get_accumulator(self, name, param):
"""Utility function to fetch an accumulator for a parameter
Args:
name: name of the accumulator
param: parameter variable for which accumulator is to be fetched
Returns:
accumulator variable for the parameter
"""
if self._name is not None:
name = self._name + "_" + name
find_master = self._multi_precision and param.dtype == core.VarDesc.VarType.FP16
target_param = self._master_weights[
param.name] if find_master else param
target_name = target_param.name
if (name not in self._accumulators or
target_name not in self._accumulators[name]):
raise Exception("Accumulator {} does not exist for parameter {}".
format(name, target_name))
return self._accumulators[name][target_name]
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
for p in parameters:
if self._multi_precision and p.dtype == core.VarDesc.VarType.FP16:
master_p = self._create_master_weight(p)
self._add_accumulator(self._velocity_acc_str, master_p)
continue
if p.dtype == core.VarDesc.VarType.FP16 and not self._multi_precision:
warnings.warn(
"Accumulating with FP16 in optimizer can lead to poor accuracy or slow convergence."
"Consider using multi_precision=True option of the Lars optimizer."
)
self._add_accumulator(self._velocity_acc_str, p)
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
_lars_weight_decay = self._lars_weight_decay
param_name = param_and_grad[0].name
if len(self._exclude_from_weight_decay) > 0:
for name in self._exclude_from_weight_decay:
if name in param_name:
_lars_weight_decay = 0.0
break
velocity_acc = self._get_accumulator(self._velocity_acc_str,
param_and_grad[0])
lr = self._create_param_lr(param_and_grad)
find_master = self._multi_precision and param_and_grad[
0].dtype == core.VarDesc.VarType.FP16
master_weight = (self._master_weights[param_and_grad[0].name]
if find_master else None)
attrs = {
"mu": self._momentum,
"lars_coeff": self._lars_coeff,
"lars_weight_decay": _lars_weight_decay,
"multi_precision": find_master,
"rescale_grad": self._rescale_grad
}
inputs = {
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"Velocity": velocity_acc,
"LearningRate": lr
}
outputs = {"ParamOut": param_and_grad[0], "VelocityOut": velocity_acc}
if find_master:
inputs["MasterParam"] = master_weight
outputs["MasterParamOut"] = master_weight
# create the momentum optimize op
momentum_op = block.append_op(
type=self.type,
inputs=inputs,
outputs=outputs,
attrs=attrs,
stop_gradient=True)
return momentum_op
class AdagradOptimizer(Optimizer):
r"""
The Adaptive Gradient optimizer (Adagrad for short) can adaptively assign
different learning rates to individual parameters.
The parameter ``param_out`` update rule with gradient ``grad``:
.. math::
moment\_out &= moment + grad * grad
param\_out &= param - \\frac{learning\_rate * grad}{\sqrt{moment\_out} + \epsilon}
Related paper: `Adaptive Subgradient Methods for Online Learning and
Stochastic Optimization <http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf>`_.
The original paper does not have the ``epsilon`` attribute. It is added here
in our implementation as also proposed `Per-parameter adaptive learning rate
methods <http://cs231n.github.io/neural-networks-3/#ada>`_
for numerical stability to avoid the division by zero error.
Args:
learning_rate (float|Variable): The learning rate used to update ``Parameter``.
It can be a float value or a ``Variable`` with a float type.
epsilon (float, optional): A small float value for numerical stability.
The default value is 1e-06.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): Normally there is no need for user to set this property.
For more information, please refer to :ref:`api_guide_Name`.
The default value is None.
initial_accumulator_value (float, optional): Initial value for moment accumulator.
The default value is 0.0.
Examples:
.. code-block:: python
import numpy as np
import paddle.fluid as fluid
np_inp = np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32)
inp = fluid.data(name="inp", shape=[2, 2])
out = fluid.layers.fc(inp, size=3)
out = fluid.layers.reduce_sum(out)
optimizer = fluid.optimizer.AdagradOptimizer(learning_rate=0.2)
optimizer.minimize(out)
exe = fluid.Executor(fluid.CPUPlace())
exe.run(fluid.default_startup_program())
exe.run(
feed={"inp": np_inp},
fetch_list=[out.name])
"""
_moment_acc_str = "moment"
def __init__(self,
learning_rate,
epsilon=1.0e-6,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None,
initial_accumulator_value=0.0):
assert learning_rate is not None
assert epsilon is not None
super(AdagradOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
self.type = "adagrad"
self._epsilon = epsilon
self.initial_accumulator_value = initial_accumulator_value
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
for p in parameters:
self._add_accumulator(
self._moment_acc_str,
p,
fill_value=self.initial_accumulator_value)
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
moment_acc = self._get_accumulator(self._moment_acc_str,
param_and_grad[0])
# Create the adagrad optimizer op
adagrad_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"Moment": moment_acc,
"LearningRate": self._create_param_lr(param_and_grad)
},
outputs={"ParamOut": param_and_grad[0],
"MomentOut": moment_acc},
attrs={"epsilon": self._epsilon},
stop_gradient=True)
return adagrad_op
class AdamOptimizer(Optimizer):
r"""
The Adam optimizer uses an optimization described at the end
of section 2 of `Adam paper <https://arxiv.org/abs/1412.6980>`_ ,
it can dynamically adjusts the learning rate of each parameter using
the 1st moment estimates and the 2nd moment estimates of the gradient.
The parameter ``param_out`` update rule with gradient ``grad``:
.. math::
t & = t + 1
moment\_1\_out & = {\\beta}_1 * moment\_1 + (1 - {\\beta}_1) * grad
moment\_2\_out & = {\\beta}_2 * moment\_2 + (1 - {\\beta}_2) * grad * grad
learning\_rate & = learning\_rate * \\
\\frac{\sqrt{1 - {\\beta}_2^t}}{1 - {\\beta}_1^t}
param\_out & = param - learning\_rate * \\frac{moment\_1}{\sqrt{moment\_2} + \epsilon}
Related paper: `Adam: A Method for Stochastic Optimization <https://arxiv.org/abs/1412.6980>`_
Args:
learning_rate (float|Variable, optional): The learning rate used to update ``Parameter``.
It can be a float value or a ``Variable`` with a float type. The default value is 0.001.
beta1 (float|Variable, optional): The exponential decay rate for the 1st moment estimates.
It should be a float number or a Variable with shape [1] and data type as float32.
The default value is 0.9.
beta2 (float|Variable, optional): The exponential decay rate for the 2nd moment estimates.
It should be a float number or a Variable with shape [1] and data type as float32.
The default value is 0.999.
epsilon (float|Tensor, optional): A small float value for numerical stability.
It should be a float number or a Variable with shape [1] and data type as float32.
The default value is 1e-08.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): Normally there is no need for user to set this property.
For more information, please refer to :ref:`api_guide_Name`.
The default value is None.
lazy_mode (bool, optional): The official Adam algorithm has two moving-average accumulators.
The accumulators are updated at every step. Every element of the two moving-average
is updated in both dense mode and sparse mode. If the size of parameter is very large,
then the update may be very slow. The lazy mode only update the element that has
gradient in current mini-batch, so it will be much more faster. But this mode has
different semantics with the original Adam algorithm and may lead to different result.
The default value is False.
use_global_beta_pow (bool, optional): Whether to use global beta_pow. If true, Adam will use global beta_pow
for whole model instead of creating beta_pow for each parameter. Default is false.
flatten_param_grads (bool, optional): Whether to flatten all parameters and gradients. Default is false.
align_size (int, optional): The alignment size when flatten parameters and gradients. Default is -1, which means
use same align_size as allocator.
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
place = fluid.CPUPlace()
main = fluid.Program()
with fluid.program_guard(main):
x = fluid.data(name='x', shape=[None, 13], dtype='float32')
y = fluid.data(name='y', shape=[None, 1], dtype='float32')
y_predict = fluid.layers.fc(input=x, size=1, act=None)
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
adam_optimizer = fluid.optimizer.AdamOptimizer(0.01)
adam_optimizer.minimize(avg_cost)
fetch_list = [avg_cost]
train_reader = paddle.batch(
paddle.dataset.uci_housing.train(), batch_size=1)
feeder = fluid.DataFeeder(place=place, feed_list=[x, y])
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
for data in train_reader():
exe.run(main, feed=feeder.feed(data), fetch_list=fetch_list)
.. code-block:: python
# Adam with beta1/beta2 as Variable
import paddle
import paddle.fluid as fluid
import paddle.fluid.layers.learning_rate_scheduler as lr_scheduler
place = fluid.CPUPlace()
main = fluid.Program()
with fluid.program_guard(main):
x = fluid.data(name='x', shape=[None, 13], dtype='float32')
y = fluid.data(name='y', shape=[None, 1], dtype='float32')
y_predict = fluid.layers.fc(input=x, size=1, act=None)
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
# define beta decay variable
def get_decayed_betas(beta1_init, beta2_init, decay_steps, decay_rate, epsilon_init):
global_step = lr_scheduler._decay_step_counter()
beta1 = fluid.layers.create_global_var(
shape=[1],
value=float(beta1_init),
dtype='float32',
# set persistable for save checkpoints and resume
persistable=True,
name="beta1")
beta2 = fluid.layers.create_global_var(
shape=[1],
value=float(beta2_init),
dtype='float32',
# set persistable for save checkpoints and resume
persistable=True,
name="beta2")
epsilon = fluid.layers.create_global_var(
shape=[1],
value=float(epsilon_init),
dtype='float32',
# set persistable for save checkpoints and resume
persistable=True,
name="epsilon")
div_res = global_step / decay_steps
decayed_beta1 = beta1_init * (decay_rate**div_res)
decayed_beta2 = beta2_init * (decay_rate**div_res)
fluid.layers.assign(decayed_beta1, beta1)
fluid.layers.assign(decayed_beta2, beta2)
return beta1, beta2, epsilon
beta1, beta2, epsilon = get_decayed_betas(0.9, 0.99, 1e5, 0.9, 1e-8)
adam_optimizer = fluid.optimizer.AdamOptimizer(
learning_rate=0.01,
beta1=beta1,
beta2=beta2,
epsilon=epsilon)
adam_optimizer.minimize(avg_cost)
fetch_list = [avg_cost]
train_reader = paddle.batch(
paddle.dataset.uci_housing.train(), batch_size=1)
feeder = fluid.DataFeeder(place=place, feed_list=[x, y])
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
for data in train_reader():
exe.run(main, feed=feeder.feed(data), fetch_list=fetch_list)
"""
_moment1_acc_str = "moment1"
_moment2_acc_str = "moment2"
_beta1_pow_acc_str = "beta1_pow_acc"
_beta2_pow_acc_str = "beta2_pow_acc"
def __init__(self,
learning_rate=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None,
lazy_mode=False,
use_global_beta_pow=False,
flatten_param_grads=False,
align_size=-1):
assert learning_rate is not None
assert beta1 is not None
assert beta2 is not None
assert epsilon is not None
super(AdamOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
flatten_param_grads=flatten_param_grads,
align_size=align_size,
name=name)
self.type = "adam"
self._beta1 = beta1
self._beta2 = beta2
self._epsilon = epsilon
self._lazy_mode = lazy_mode
self._use_global_beta_pow = use_global_beta_pow
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
# Create accumulator tensors for first and second moments
for p in parameters:
self._add_accumulator(self._moment1_acc_str, p)
self._add_accumulator(self._moment2_acc_str, p)
if not self._use_global_beta_pow:
self._add_accumulator(
name=self._beta1_pow_acc_str,
param=p,
fill_value=0.9 if isinstance(self._beta1, Variable) \
else self._beta1,
shape=[1],
type=core.VarDesc.VarType.LOD_TENSOR, device='cpu')
self._add_accumulator(
name=self._beta2_pow_acc_str,
param=p,
fill_value=0.999 if isinstance(self._beta2, Variable) \
else self._beta2,
shape=[1],
type=core.VarDesc.VarType.LOD_TENSOR, device='cpu')
if self._use_global_beta_pow:
self._add_global_accumulator(
name=self._beta1_pow_acc_str,
fill_value=0.9 if isinstance(self._beta1, Variable) \
else self._beta1,
shape=[1],
type=core.VarDesc.VarType.LOD_TENSOR, device='cpu')
self._add_global_accumulator(
name=self._beta2_pow_acc_str,
fill_value=0.999 if isinstance(self._beta2, Variable) \
else self._beta2,
shape=[1],
type=core.VarDesc.VarType.LOD_TENSOR, device='cpu')
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
moment1 = self._get_accumulator(self._moment1_acc_str,
param_and_grad[0])
moment2 = self._get_accumulator(self._moment2_acc_str,
param_and_grad[0])
if self._use_global_beta_pow:
beta1_pow_acc = self._get_global_accumulator(
self._beta1_pow_acc_str)
beta2_pow_acc = self._get_global_accumulator(
self._beta2_pow_acc_str)
else:
beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,
param_and_grad[0])
beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str,
param_and_grad[0])
lr = self._create_param_lr(param_and_grad)
# create the adam optimize op
if framework.in_dygraph_mode():
_beta1 = self._beta1 if not isinstance(
self._beta1, Variable) else self._beta1.numpy().item(0)
_beta2 = self._beta2 if not isinstance(
self._beta2, Variable) else self._beta2.numpy().item(0)
_, _, _, _, _ = _C_ops.adam(
param_and_grad[0], param_and_grad[1], lr, moment1, moment2,
beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1,
moment2, beta1_pow_acc, beta2_pow_acc, 'epsilon', self._epsilon,
'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread',
1000, 'beta1', _beta1, 'beta2', _beta2, 'use_global_beta_pow',
self._use_global_beta_pow)
return None
inputs = {
"Param": [param_and_grad[0]],
"Grad": [param_and_grad[1]],
"LearningRate": [lr],
"Moment1": [moment1],
"Moment2": [moment2],
"Beta1Pow": [beta1_pow_acc],
"Beta2Pow": [beta2_pow_acc]
}
# Pass found_inf to adam, to skip update for not only param, but also momentum and beta_pow
found_inf = self._get_auxiliary_var('found_inf')
if found_inf:
inputs['SkipUpdate'] = found_inf
outputs = {
"ParamOut": [param_and_grad[0]],
"Moment1Out": [moment1],
"Moment2Out": [moment2],
"Beta1PowOut": [beta1_pow_acc],
"Beta2PowOut": [beta2_pow_acc],
}
attrs = {
"lazy_mode": self._lazy_mode,
"min_row_size_to_use_multithread": 1000,
'use_global_beta_pow': self._use_global_beta_pow
}
if isinstance(self._beta1, Variable):
inputs['Beta1Tensor'] = self._beta1
else:
attrs['beta1'] = self._beta1
if isinstance(self._beta2, Variable):
inputs['Beta2Tensor'] = self._beta2
else:
attrs['beta2'] = self._beta2
if isinstance(self._epsilon, Variable):
inputs['EpsilonTensor'] = self._epsilon
else:
attrs['epsilon'] = self._epsilon
adam_op = block.append_op(
type=self.type,
inputs=inputs,
outputs=outputs,
attrs=attrs,
stop_gradient=True)
return adam_op
def _finish_update(self, block, parameters_and_grads):
r"""Update beta1_pow and beta2_pow accumulator
"""
assert isinstance(block, framework.Block)
if self._use_global_beta_pow:
beta1_pow_acc = self._get_global_accumulator(
self._beta1_pow_acc_str)
beta2_pow_acc = self._get_global_accumulator(
self._beta2_pow_acc_str)
with block.program._optimized_guard([]):
inputs = {"X": beta1_pow_acc}
outputs = {"Out": beta1_pow_acc}
attrs = {}
if isinstance(self._beta1, Variable):
inputs["Y"] = self._beta1
# use elementwise_mul for better performance
block.append_op(
type="elementwise_mul",
inputs=inputs,
outputs=outputs,
attrs=attrs,
stop_gradient=True)
else:
attrs['scale'] = self._beta1
block.append_op(
type="scale",
inputs=inputs,
outputs=outputs,
attrs=attrs,
stop_gradient=True)
inputs = {"X": beta2_pow_acc}
outputs = {"Out": beta2_pow_acc}
attrs = {}
if isinstance(self._beta2, Variable):
inputs["Y"] = self._beta2
# use elementwise_mul for better performance
block.append_op(
type="elementwise_mul",
inputs=inputs,
outputs=outputs,
attrs=attrs,
stop_gradient=True)
else:
attrs['scale'] = self._beta2
block.append_op(
type="scale",
inputs=inputs,
outputs=outputs,
attrs=attrs,
stop_gradient=True)
class AdamaxOptimizer(Optimizer):
r"""
The Adamax optimizer is implemented based on the Adamax Optimization
in Section 7 of `Adam paper <https://arxiv.org/abs/1412.6980>`_.
The Adamax algorithm is a variant of the Adam algorithm based on the infinite norm,
which makes the learning rate update algorithm more stable and simple.
The parameter ``param_out`` update rule with gradient ``grad``:
.. math::
t & = t + 1
moment\_out & = {\\beta}_1 * moment + (1 - {\\beta}_1) * grad
inf\_norm\_out & = max({\\beta}_2 * inf\_norm + \epsilon, |grad|)
learning\_rate & = \\frac{learning\_rate}{1 - {\\beta}_1^t}
param\_out & = param - learning\_rate * \\frac{moment\_out}{inf\_norm\_out}
Related paper: `Adam: A Method for Stochastic Optimization <https://arxiv.org/abs/1412.6980>`_
The original paper does not have an ``epsilon`` attribute,
it is added here for numerical stability to prevent the division by 0 error.
Args:
learning_rate (float|Variable, optional): The learning rate used to update ``Parameter``.
It can be a float value or a ``Variable`` with a float type. The default value is 0.001.
beta1 (float, optional): The exponential decay rate for the 1st moment estimates.
The default value is 0.9.
beta2 (float, optional): The exponential decay rate for the 2nd moment estimates.
The default value is 0.999.
epsilon (float, optional): A small float value for numerical stability.
The default value is 1e-08.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): Normally there is no need for user to set this property.
For more information, please refer to :ref:`api_guide_Name`.
The default value is None.
**Notes**:
**Currently, AdamaxOptimizer doesn't support sparse parameter optimization.**
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy
# First create the Executor.
place = fluid.CPUPlace() # fluid.CUDAPlace(0)
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
with fluid.program_guard(train_program, startup_program):
data = fluid.data(name='X', shape=[None, 1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
adam = fluid.optimizer.AdamaxOptimizer(learning_rate=0.2)
adam.minimize(loss)
# Run the startup program once and only once.
exe.run(startup_program)
x = numpy.random.random(size=(10, 1)).astype('float32')
outs = exe.run(program=train_program,
feed={'X': x},
fetch_list=[loss.name])
"""
_moment_acc_str = "moment"
_inf_norm_acc_str = "inf_norm"
_beta1_pow_acc_str = "beta1_pow_acc"
def __init__(self,
learning_rate=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None):
assert learning_rate is not None
assert beta1 is not None
assert beta2 is not None
assert epsilon is not None
super(AdamaxOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
self.type = "adamax"
self._beta1 = beta1
self._beta2 = beta2
self._epsilon = epsilon
def _create_accumulators(self, block, parameters):
# Create accumulator tensors for first moment and infinity norm
for p in parameters:
self._add_accumulator(self._moment_acc_str, p)
self._add_accumulator(self._inf_norm_acc_str, p)
self._add_accumulator(
name=self._beta1_pow_acc_str,
param=p,
fill_value=self._beta1,
shape=[1])
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
moment = self._get_accumulator(self._moment_acc_str, param_and_grad[0])
inf_norm = self._get_accumulator(self._inf_norm_acc_str,
param_and_grad[0])
beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,
param_and_grad[0])
# create the adamax optimize op
adamax_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"LearningRate": self._create_param_lr(param_and_grad),
"Moment": moment,
"InfNorm": inf_norm,
"Beta1Pow": beta1_pow_acc
},
outputs={
"ParamOut": param_and_grad[0],
"MomentOut": moment,
"InfNormOut": inf_norm
},
attrs={
"beta1": self._beta1,
"beta2": self._beta2,
"epsilon": self._epsilon
},
stop_gradient=True)
return adamax_op
def _finish_update(self, block, parameters_and_grads):
"""Update Beta1 Power accumulator
"""
assert isinstance(block, framework.Block)
for param, grad in parameters_and_grads:
if grad is None or param.trainable is False:
continue
with param.block.program._optimized_guard(
[param, grad]), name_scope('adamx'):
beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,
param)
block.append_op(
type="scale",
inputs={"X": beta1_pow_acc},
outputs={"Out": beta1_pow_acc},
attrs={"scale": self._beta1},
stop_gradient=True)
class DpsgdOptimizer(Optimizer):
r"""
We implement the Dpsgd optimizer according to CCS16 paper -
Deep Learning with Differential Privacy.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy
# First create the Executor.
place = fluid.CPUPlace() # fluid.CUDAPlace(0)
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
optimizer = fluid.optimizer.Dpsgd(learning_rate=0.01, clip=10.0, batch_size=16.0, sigma=1.0)
optimizer.minimize(loss)
# Run the startup program once and only once.
exe.run(startup_program)
x = numpy.random.random(size=(10, 1)).astype('float32')
outs = exe.run(program=train_program,
feed={'X': x},
fetch_list=[loss.name])
Args:
learning_rate (float|Variable): the learning rate used to update parameters. \
Can be a float value or a Variable with one float value as data element.
clip (float): clipping threshold
batch_size (float): batch size.
sigma (float): for gaussian noise.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
Notes:
Currently, DpsgdOptimizer doesn't support sparse parameter optimization.
"""
def __init__(self,
learning_rate=0.001,
clip=0.9,
batch_size=0.999,
sigma=1e-8,
parameter_list=None):
assert learning_rate is not None
assert clip is not None
assert batch_size is not None
assert sigma is not None
super(DpsgdOptimizer, self).__init__(
learning_rate=learning_rate, parameter_list=parameter_list)
self.type = "dpsgd"
self._clip = clip
self._batch_size = batch_size
self._sigma = sigma
'''
Note(wangzhongpu):
This property is only used for debugging, do not need to set it!
Dpsgd operator use time(NULL) as random seed to generate random number.
However, during debugging, we need determinated result, so we will set self._seed to a fixed number.
'''
self._seed = None
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
# create the dpsgd optimize op
if self._seed == None:
self._seed = 0
dpsgd_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"LearningRate": self._create_param_lr(param_and_grad)
},
outputs={"ParamOut": param_and_grad[0]},
attrs={
"clip": self._clip,
"batch_size": self._batch_size,
"sigma": self._sigma,
"seed": self._seed
},
stop_gradient=True)
return dpsgd_op
class DecayedAdagradOptimizer(Optimizer):
r"""
The Decayed Adagrad optimizer can be seen as an Adagrad algorithm that introduces
the decay rate to solve the problem of a sharp drop in the learning rate
during model training when using the AdagradOptimizer.
The parameter ``param_out`` update rule with gradient ``grad``:
.. math::
moment\_out & = decay * moment + (1 - decay) * grad * grad
param\_out & = param - \\frac{learning\_rate * grad}{\sqrt{moment\_out} + \epsilon}
Related paper: `Adaptive Subgradient Methods for Online Learning and Stochastic
Optimization <http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf>`_.
The original paper does not have an ``epsilon`` attribute. It is added here for numerical
stability to avoid the division by zero error.
Args:
learning_rate (float|Variable): The learning rate used to update ``Parameter``.
It can be a float value or a ``Variable`` with a float type.
decay (float, optional): The decay rate. The default value is 0.95.
epsilon (float, optional): A small float value for numerical stability.
The default value is 1e-06.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): Normally there is no need for user to set this property.
For more information, please refer to :ref:`api_guide_Name`.
The default value is None.
**Notes**:
**Currently, DecayedAdagradOptimizer doesn't support sparse parameter optimization.**
Examples:
.. code-block:: python
import paddle.fluid as fluid
x = fluid.data( name='x', shape=[None, 10], dtype='float32' )
trans = fluid.layers.fc( x, 100 )
cost = fluid.layers.reduce_mean( trans )
optimizer = fluid.optimizer.DecayedAdagradOptimizer(learning_rate=0.2)
optimizer.minimize(cost)
"""
_moment_acc_str = "moment"
def __init__(self,
learning_rate,
decay=0.95,
epsilon=1.0e-6,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None):
assert learning_rate is not None
assert decay is not None
assert epsilon is not None
super(DecayedAdagradOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
self.type = "decayed_adagrad"
self._decay = decay
self._epsilon = epsilon
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
for p in parameters:
self._add_accumulator(self._moment_acc_str, p)
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
moment_acc = self._get_accumulator(self._moment_acc_str,
param_and_grad[0])
# Create the decayed adagrad optimizer op
decayed_adagrad_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"Moment": moment_acc,
"LearningRate": self._create_param_lr(param_and_grad)
},
outputs={"ParamOut": param_and_grad[0],
"MomentOut": moment_acc},
attrs={"epsilon": self._epsilon,
"decay": self._decay},
stop_gradient=True)
return decayed_adagrad_op
class AdadeltaOptimizer(Optimizer):
r"""
**Notes: This API does not support sparse parameter optimization.**
Adadelta Optimizer. Please refer to this for details:
`ADADELTA: AN ADAPTIVE LEARNING RATE METHOD <https://arxiv.org/abs/1212.5701>`_.
The update is done as follows:
.. math::
E(g_t^2) &= \\rho * E(g_{t-1}^2) + (1-\\rho) * g^2
learning\_rate &= \sqrt{ ( E(dx_{t-1}^2) + \\epsilon ) / ( E(g_t^2) + \\epsilon ) }
E(dx_t^2) &= \\rho * E(dx_{t-1}^2) + (1-\\rho) * (-g*learning\_rate)^2
Args:
learning_rate (float|Variable): global learning rate.
epsilon (float): a small float number for numeric stability. Default 1.0e-6.
rho (float): a floating point value indicating the decay rate. Default 0.95.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): The default value is None. Normally there is no need for user
to set this property. For more information, please refer to
:ref:`api_guide_Name` .
Examples:
.. code-block:: python
import paddle.fluid as fluid
image = fluid.data(name='image', shape=[None, 28], dtype='float32')
fc = fluid.layers.fc(image, size=10)
cost = fluid.layers.reduce_mean(fc)
optimizer = fluid.optimizer.Adadelta(
learning_rate=0.0003, epsilon=1.0e-6, rho=0.95)
# optimizer_ops is a list of optimizer operators to update parameters
# params_grads is a list of (param, param_grad), where param is each
# parameter and param_grad is the gradient variable of param.
optimizer_ops, params_grads = optimizer.minimize(cost)
"""
_avg_squared_grad_acc_str = "_avg_squared_grad"
_avg_squared_update_acc_str = "_avg_squared_update"
def __init__(self,
learning_rate,
epsilon=1.0e-6,
rho=0.95,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None):
if learning_rate is None:
raise ValueError("learning_rate is not set.")
if epsilon is None:
raise ValueError("epsilon is not set.")
if rho is None:
raise ValueError("rho is not set.")
super(AdadeltaOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
self.type = "adadelta"
self._epsilon = epsilon
self._rho = rho
def _create_accumulators(self, block, parameters):
if not isinstance(block, framework.Block):
raise TypeError("block is not instance of framework.Block.")
for p in parameters:
self._add_accumulator(self._avg_squared_grad_acc_str, p)
self._add_accumulator(self._avg_squared_update_acc_str, p)
def _append_optimize_op(self, block, param_and_grad):
if not isinstance(block, framework.Block):
raise TypeError("block is not instance of framework.Block.")
avg_squared_grad_acc = self._get_accumulator(
self._avg_squared_grad_acc_str, param_and_grad[0])
avg_squared_update_acc = self._get_accumulator(
self._avg_squared_update_acc_str, param_and_grad[0])
# Create the adadelta optimizer op
adadelta_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"AvgSquaredGrad": avg_squared_grad_acc,
"AvgSquaredUpdate": avg_squared_update_acc
},
outputs={
"ParamOut": param_and_grad[0],
"AvgSquaredGradOut": avg_squared_grad_acc,
"AvgSquaredUpdateOut": avg_squared_update_acc
},
attrs={"epsilon": self._epsilon,
"rho": self._rho},
stop_gradient=True)
return adadelta_op
class RMSPropOptimizer(Optimizer):
r"""
Root Mean Squared Propagation (RMSProp) is an unpublished, adaptive learning
rate method. The original slides proposed RMSProp: Slide 29 of
http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf .
The original equation is as follows:
.. math::
r(w, t) & = \\rho r(w, t-1) + (1 - \\rho)(\\nabla Q_{i}(w))^2
w & = w - \\frac{\\eta} {\\sqrt{r(w,t) + \\epsilon}} \\nabla Q_{i}(w)
The first equation calculates moving average of the squared gradient for
each weight. Then dividing the gradient by :math:`sqrt{v(w,t)}`.
In some cases, adding a momentum term :math: `\\beta` is beneficial.
In our implementation, Nesterov momentum is used:
.. math::
r(w, t) & = \\rho r(w, t-1) + (1 - \\rho)(\\nabla Q_{i}(w))^2
v(w, t) & = \\beta v(w, t-1) + \\frac{\\eta} {\\sqrt{r(w,t) +
\\epsilon}} \\nabla Q_{i}(w)
w & = w - v(w, t)
if centered is True:
.. math::
r(w, t) & = \\rho r(w, t-1) + (1 - \\rho)(\\nabla Q_{i}(w))^2
g(w, t) & = \\rho g(w, t-1) + (1 - \\rho)\\nabla Q_{i}(w)
v(w, t) & = \\beta v(w, t-1) + \\frac{\\eta} {\\sqrt{r(w,t) - (g(w, t))^2 +
\\epsilon}} \\nabla Q_{i}(w)
w & = w - v(w, t)
where, :math:`\\rho` is a hyperparameter and typical values are 0.9, 0.95
and so on. :math: `beta` is the momentum term. :math: `\\epsilon` is a
smoothing term to avoid division by zero, usually set somewhere in range
from 1e-4 to 1e-8.
Parameters:
learning_rate(float): Global learning rate.
rho(float): rho is :math: `\\rho` in equation, default is 0.95.
epsilon(float): :math: `\\epsilon` in equation is smoothing term to
avoid division by zero, default is 1e-6.
momentum(float): :math:`\\beta` in equation is the momentum term,
default is 0.0.
centered(bool): If True, gradients are normalized by the estimated variance of
the gradient; if False, by the uncentered second moment. Setting this to
True may help with training, but is slightly more expensive in terms of
computation and memory. Defaults to False.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): This parameter is used by developers to print debugging information. \
For details, please refer to :ref:`api_guide_Name`. Default is None.
Raises:
ValueError: If learning_rate, rho, epsilon, momentum are None.
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
import numpy as np
place = fluid.CPUPlace()
main = fluid.Program()
with fluid.program_guard(main):
x = fluid.layers.data(name='x', shape=[13], dtype='float32')
y = fluid.layers.data(name='y', shape=[1], dtype='float32')
y_predict = fluid.layers.fc(input=x, size=1, act=None)
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
rms_optimizer = fluid.optimizer.RMSProp(learning_rate=0.1)
rms_optimizer.minimize(avg_cost)
fetch_list = [avg_cost]
train_reader = paddle.batch(
paddle.dataset.uci_housing.train(), batch_size=1)
feeder = fluid.DataFeeder(place=place, feed_list=[x, y])
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
for data in train_reader():
exe.run(main, feed=feeder.feed(data), fetch_list=fetch_list)
"""
_momentum_acc_str = "momentum"
_mean_square_acc_str = "mean_square"
_mean_grad_acc_str = "mean_grad"
def __init__(self,
learning_rate,
rho=0.95,
epsilon=1.0e-6,
momentum=0.0,
centered=False,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None):
super(RMSPropOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
if learning_rate is None:
raise ValueError("learning_rate is not set.")
if rho is None:
raise ValueError("rho is not set.")
if epsilon is None:
raise ValueError("epsilon is not set.")
if momentum is None:
raise ValueError("momentum is not set.")
self.type = "rmsprop"
self._rho = rho
self._epsilon = epsilon
self._momentum = momentum
self._centered = centered
def _create_accumulators(self, block, parameters):
if not isinstance(block, framework.Block):
raise TypeError("block is not instance of framework.Block.")
for p in parameters:
self._add_accumulator(self._momentum_acc_str, p)
self._add_accumulator(self._mean_square_acc_str, p)
self._add_accumulator(self._mean_grad_acc_str, p)
def _append_optimize_op(self, block, param_and_grad):
if not isinstance(block, framework.Block):
raise TypeError("block is not instance of framework.Block.")
momentum_acc = self._get_accumulator(self._momentum_acc_str,
param_and_grad[0])
mean_square_acc = self._get_accumulator(self._mean_square_acc_str,
param_and_grad[0])
mean_grad_acc = self._get_accumulator(self._mean_grad_acc_str,
param_and_grad[0])
rmsprop_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"Moment": momentum_acc,
"MeanSquare": mean_square_acc,
"MeanGrad": mean_grad_acc,
"LearningRate": self._create_param_lr(param_and_grad),
},
outputs={
"ParamOut": param_and_grad[0],
"MomentOut": momentum_acc,
"MeanSquareOut": mean_square_acc,
"MeanGradOut": mean_grad_acc
},
attrs={
"epsilon": self._epsilon,
"decay": self._rho,
"momentum": self._momentum,
"centered": self._centered
},
stop_gradient=True)
return rmsprop_op
class FtrlOptimizer(Optimizer):
r"""
FTRL (Follow The Regularized Leader) Optimizer.
The paper that proposed Follow The Regularized Leader (FTRL):
(https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf)
.. math::
&new\_accum = squared\_accum + grad^2
&if (lr\_power == -0.5):
&\quad linear\_accum += grad - \\frac{\\sqrt{new\_accum} - \\sqrt{squared\_accum}}{learning\_rate * param}
&else:
&\quad linear\_accum += grad - \\frac{new\_accum^{-lr\_power} - accum^{-lr\_power}}{learning\_rate * param}
&x = l1 * sign(linear\_accum) - linear\_accum
&if (lr\_power == -0.5):
&\quad y = \\frac{\\sqrt{new\_accum}}{learning\_rate} + (2 * l2)
&\quad pre\_shrink = \\frac{x}{y}
&\quad param = (abs(linear\_accum) > l1).select(pre\_shrink, 0.0)
&else:
&\quad y = \\frac{new\_accum^{-lr\_power}}{learning\_rate} + (2 * l2)
&\quad pre\_shrink = \\frac{x}{y}
&\quad param = (abs(linear\_accum) > l1).select(pre\_shrink, 0.0)
&squared\_accum += grad^2
Parameters:
learning_rate (float|Variable): Global learning rate.
l1 (float): L1 regularization strength, default is 0.0.
l2 (float): L2 regularization strength, default is 0.0.
lr_power (float): Learning Rate Power, default is -0.5.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,
:ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.
name (str, optional): This parameter is used by developers to print debugging information. \
For details, please refer to :ref:`api_guide_Name`. Default is None.
Raises:
ValueError: If learning_rate, rho, epsilon, momentum are None.
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
import numpy as np
place = fluid.CPUPlace()
main = fluid.Program()
with fluid.program_guard(main):
x = fluid.layers.data(name='x', shape=[13], dtype='float32')
y = fluid.layers.data(name='y', shape=[1], dtype='float32')
y_predict = fluid.layers.fc(input=x, size=1, act=None)
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
ftrl_optimizer = fluid.optimizer.Ftrl(learning_rate=0.1)
ftrl_optimizer.minimize(avg_cost)
fetch_list = [avg_cost]
train_reader = paddle.batch(
paddle.dataset.uci_housing.train(), batch_size=1)
feeder = fluid.DataFeeder(place=place, feed_list=[x, y])
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
for data in train_reader():
exe.run(main, feed=feeder.feed(data), fetch_list=fetch_list)
NOTE:
Currently, FtrlOptimizer doesn't support sparse parameter optimization.
"""
_squared_acc_str = "squared"
_linear_acc_str = "linear"
def __init__(self,
learning_rate,
l1=0.0,
l2=0.0,
lr_power=-0.5,
parameter_list=None,
regularization=None,
grad_clip=None,
name=None):
super(FtrlOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
name=name)
if learning_rate is None:
raise ValueError("learning_rate is not set.")
self.type = "ftrl"
self._l1 = l1
self._l2 = l2
self._lr_power = lr_power
def _create_accumulators(self, block, parameters):
if not isinstance(block, framework.Block):
raise TypeError("block is not instance of framework.Block.")
for p in parameters:
self._add_accumulator(self._squared_acc_str, p)
self._add_accumulator(self._linear_acc_str, p)
def _append_optimize_op(self, block, param_and_grad):
if not isinstance(block, framework.Block):
raise TypeError("block is not instance of framework.Block.")
squared_acc = self._get_accumulator(self._squared_acc_str,
param_and_grad[0])
linear_acc = self._get_accumulator(self._linear_acc_str,
param_and_grad[0])
ftrl_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"SquaredAccumulator": squared_acc,
"LinearAccumulator": linear_acc,
"LearningRate": self._create_param_lr(param_and_grad),
},
outputs={
"ParamOut": param_and_grad[0],
"SquaredAccumOut": squared_acc,
"LinearAccumOut": linear_acc
},
attrs={"l1": self._l1,
"l2": self._l2,
"lr_power": self._lr_power},
stop_gradient=True)
return ftrl_op
class LambOptimizer(AdamOptimizer):
r"""
LAMB (Layer-wise Adaptive Moments optimizer for Batching training) Optimizer.
LAMB Optimizer is designed to scale up the batch size of training without losing
accuracy, which supports adaptive element-wise updating and accurate layer-wise
correction. For more information, please refer to `Large Batch Optimization for
Deep Learning: Training BERT in 76 minutes <https://arxiv.org/abs/1904.00962>`_ .
The updating of parameters follows:
.. math::
m_t &= \\beta_1 m_{t - 1}+ (1 - \\beta_1)g_t
v_t &= \\beta_2 v_{t - 1} + (1 - \\beta_2)g_t^2
m_t &= \\frac{m_t}{\\beta_1^t}
v_t &= \\frac{v_t}{\\beta_2^t}
r_t &= \\frac{m_t}{\\sqrt{v_t}+\\epsilon}
w_t &= w_{t-1} -\\eta_t \\frac{\\left \| w_{t-1}\\right \|}{\\left \| r_t + \\lambda w_{t-1}\\right \|} (r_t + \\lambda w_{t-1})
where :math:`m` is the 1st moment, and :math:`v` the 2nd moment, :math:`\\eta` the
learning rate, :math:`\\lambda` the LAMB weight decay rate.
Args:
learning_rate (float|Variable, optional): the learning rate used to update parameters. \
Can be a float value or a Variable with data type float32. Default 0.001.
lamb_weight_decay (float, optional): The LAMB weight decay rate. Default 0.01.
beta1 (float, optional): The exponential decay rate for the 1st moment estimates.
Default 0.9.
beta2 (float, optional): The exponential decay rate for the 2nd moment estimates.
Default 0.999.
epsilon (float, optional): A small float value for numerical stability. Default 1e-6.
parameter_list (Iterable, optional): Iterable of ``Variable`` names to update to minimize ``loss``. \
This parameter is required in dygraph mode. \
The default value is None in static mode, at this time all parameters will be updated.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of
some derived class of ``GradientClipBase`` . There are three cliping strategies
( :ref:`api_paddle_fluid_clip_ClipGradByGlobalNorm` , :ref:`api_paddle_fluid_clip_ClipGradByNorm` ,
:ref:`api_paddle_fluid_clip_ClipGradByValue` ). If you want better convergence, it is recommended
to use :ref:`api_paddle_fluid_clip_ClipGradByGlobalNorm` . Default None, meaning there is no gradient clipping.
exclude_from_weight_decay_fn (function|None): Exclude a parameter from weight
decay when **exclude_from_weight_decay_fn(parameter)** returns true.
Default None.
name(str|None): For detailed information, please refer to
:ref:`api_guide_Name` . Usually name is no need to set and None by default.
Examples:
.. code-block:: python
import paddle.fluid as fluid
data = fluid.data(name='x', shape=[-1, 5], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
cost = fluid.layers.mean(hidden)
def exclude_fn(param):
return param.name.endswith('.b_0')
optimizer = fluid.optimizer.Lamb(learning_rate=0.002,
exclude_from_weight_decay_fn=exclude_fn)
optimizer.minimize(cost)
"""
_moment1_acc_str = "moment1"
_moment2_acc_str = "moment2"
_beta1_pow_acc_str = "beta1_pow_acc"
_beta2_pow_acc_str = "beta2_pow_acc"
def __init__(self,
learning_rate=0.001,
lamb_weight_decay=0.01,
beta1=0.9,
beta2=0.999,
epsilon=1e-6,
parameter_list=None,
regularization=None,
grad_clip=None,
exclude_from_weight_decay_fn=None,
name=None):
assert learning_rate is not None
assert lamb_weight_decay is not None
assert beta1 is not None
assert beta2 is not None
assert epsilon is not None
super(LambOptimizer, self).__init__(
learning_rate=learning_rate,
parameter_list=parameter_list,
regularization=regularization,
grad_clip=grad_clip,
beta1=beta1,
beta2=beta2,
epsilon=epsilon,
name=name)
self.type = "lamb"
self._weight_decay = lamb_weight_decay
self._exclude_from_weight_decay_fn = exclude_from_weight_decay_fn
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
block.program._use_lamb = True
moment1 = self._get_accumulator(self._moment1_acc_str,
param_and_grad[0])
moment2 = self._get_accumulator(self._moment2_acc_str,
param_and_grad[0])
beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,
param_and_grad[0])
beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str,
param_and_grad[0])
if self._exclude_from_weight_decay_fn is not None \
and self._exclude_from_weight_decay_fn(param_and_grad[0]):
weight_decay = 0.0
else:
weight_decay = self._weight_decay
lr = self._create_param_lr(param_and_grad)
if framework.in_dygraph_mode():
_, _, _, _, _ = _C_ops.lamb(
param_and_grad[0], param_and_grad[1], lr, moment1, moment2,
beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1,
moment2, beta1_pow_acc, beta2_pow_acc, 'beta1', self._beta1,
'beta2', self._beta2, 'epsilon', self._epsilon, 'weight_decay',
weight_decay)
return None
# create the lamb optimize op
lamb_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"LearningRate": lr,
"Moment1": moment1,
"Moment2": moment2,
"Beta1Pow": beta1_pow_acc,
"Beta2Pow": beta2_pow_acc
},
outputs={
"ParamOut": param_and_grad[0],
"Moment1Out": moment1,
"Moment2Out": moment2,
"Beta1PowOut": beta1_pow_acc,
"Beta2PowOut": beta2_pow_acc
},
attrs={
"beta1": self._beta1,
"beta2": self._beta2,
"epsilon": self._epsilon,
"weight_decay": weight_decay
},
stop_gradient=True)
return lamb_op
# We short the class name, since users will use the optimizer with the package
# name. The sample code:
#
# import paddle.fluid as fluid
#
# sgd = fluid.optimizer.SGD(...)
#
# It is no need to add an `Optimizer` as the class suffix
SGD = SGDOptimizer
Momentum = MomentumOptimizer
Adagrad = AdagradOptimizer
Adam = AdamOptimizer
Adamax = AdamaxOptimizer
Dpsgd = DpsgdOptimizer
DecayedAdagrad = DecayedAdagradOptimizer
Adadelta = AdadeltaOptimizer
RMSProp = RMSPropOptimizer
Ftrl = FtrlOptimizer
LarsMomentum = LarsMomentumOptimizer
Lamb = LambOptimizer
class ModelAverage(Optimizer):
r"""
:api_attr: Static Graph
The ModelAverage optimizer accumulates specific continuous historical parameters
during training. The accumulated historical range can be controlled by the passed
``average_window_rate`` argument. The averaged ``Parameter`` are used in the prediction,
which usually can improve the accuracy of the prediction.
Accumulate the average of the ``Parameter`` in the sliding window, the result will be saved
in a temporary variable, can be applied to the current model's ``Parameter`` by calling
the ``apply()`` method, and the current model ``Parameter`` can be restored by calling
the ``restore()`` method.
The window size for calculating the average is determined by ``average_window_rate``,
``min_average_window``, ``max_average_window`` and the current ``Parameter`` update times (num_updates).
When the cumulative times (num_accumulates) is greater than the specific window
threshold (average_window), the accumulated ``Parameter`` temporary variable is set to 0.0.
The following example will help to understand the role of these arguments:
::
if num_accumulates >= min_average_window and num_accumulates >= min(max_average_window, num_updates * average_window_rate):
num_accumulates = 0
In the above conditional judgment statement, ``num_accumulates`` indicates the current
accumulated number, which can be abstractly understood as the length of the cumulative window.
The length of the window must be at least the length set by the ``min_average_window`` argument,
and cannot exceed the length specified by the ``max_average_window`` argument or
``num_updates * average_window_rate``, where ``num_updates`` indicates the current ``Parameter``
update times, ``average_window_rate`` is a coefficient that calculates the length of the window.
Args:
average_window_rate (float): The calculate ratio of the window length relative to ``Parameter`` update times.
min_average_window (int, optional): the minimum size of average window length. The default value is 10000.
max_average_window (int, optional): The maximum size of average window length. The default value is 10000.
regularization (WeightDecayRegularizer, optional): The strategy of regularization. There are two method: \
:ref:`api_fluid_regularizer_L1Decay` , :ref:`api_fluid_regularizer_L2Decay` . If a parameter has set \
regularizer using :ref:`api_fluid_ParamAttr` already, the regularization setting here in optimizer will be \
ignored for this parameter. Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization.
name (str, optional): Normally there is no need for user to set this property.
For more information, please refer to :ref:`api_guide_Name`.
The default value is None.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy
# First create the Executor.
place = fluid.CPUPlace() # fluid.CUDAPlace(0)
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
with fluid.program_guard(train_program, startup_program):
# build net
data = fluid.data(name='X', shape=[None, 1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
optimizer = fluid.optimizer.Momentum(learning_rate=0.2, momentum=0.1)
optimizer.minimize(loss)
# build ModelAverage optimizer
model_average = fluid.optimizer.ModelAverage(0.15,
min_average_window=10000,
max_average_window=12500)
exe.run(startup_program)
for i in range(12500):
x = numpy.random.random(size=(10, 1)).astype('float32')
outs = exe.run(program=train_program,
feed={'X': x},
fetch_list=[loss.name])
# apply ModelAverage
with model_average.apply(exe):
x = numpy.random.random(size=(10, 1)).astype('float32')
exe.run(program=train_program,
feed={'X': x},
fetch_list=[loss.name])
"""
def __init__(self,
average_window_rate,
min_average_window=10000,
max_average_window=10000,
regularization=None,
name=None):
if framework.in_dygraph_mode():
raise Exception("In dygraph, don't support ModelAverage.")
super(ModelAverage, self).__init__(
0.0, regularization=regularization, name=name)
self.average_window = average_window_rate
self.min_average_window = min_average_window
self.max_average_window = max_average_window
self.params_grads = []
for param in framework.default_main_program().global_block(
).all_parameters():
if param.do_model_average != False:
grad = param.block.create_var(
name=unique_name.generate_with_ignorable_key(".".join(
[param.name, 'tmp'])),
dtype=param.dtype,
persistable=False,
stop_gradient=True)
self.params_grads.append((param, grad))
for param, grad in self.params_grads:
if grad is None:
continue
with param.block.program._optimized_guard(
[param, grad]), name_scope('move_average'):
self._append_average_accumulate_op(param)
self.apply_program = Program()
block = self.apply_program.global_block()
with program_guard(main_program=self.apply_program):
for param_grad in self.params_grads:
self._add_average_apply_op(block, param_grad)
self.restore_program = Program()
block = self.restore_program.global_block()
with program_guard(main_program=self.restore_program):
for param_grad in self.params_grads:
self._add_average_restore_op(block, param_grad)
def _add_average_apply_op(self, block, param_grad):
param = block._clone_variable(param_grad[0])
grad = block._clone_variable(param_grad[1])
sum_1 = block._clone_variable(self._get_accumulator('sum_1', param))
sum_2 = block._clone_variable(self._get_accumulator('sum_2', param))
sum_3 = block._clone_variable(self._get_accumulator('sum_3', param))
num_accumulates = block._clone_variable(
self._get_accumulator('num_accumulates', param))
old_num_accumulates = block._clone_variable(
self._get_accumulator('old_num_accumulates', param))
num_updates = block._clone_variable(
self._get_accumulator('num_updates', param))
# backup param value to grad
layers.assign(input=param, output=grad)
# param = (sum_1 + sum_2 + sum_3) / (num_accumulates + old_num_accumulates)
tmp = layers.sum(x=[num_accumulates, old_num_accumulates])
sum = layers.sum(x=[sum_1, sum_2, sum_3])
tmp = layers.cast(
x=tmp, dtype='float32' if self._dtype == None else self._dtype)
sum = layers.cast(
x=sum, dtype='float32' if self._dtype == None else self._dtype)
ops._elementwise_div(x=sum, y=tmp, out=param)
def _add_average_restore_op(self, block, param_grad):
param = block._clone_variable(param_grad[0])
grad = block._clone_variable(param_grad[1])
layers.assign(input=grad, output=param)
def _append_average_accumulate_op(self, param):
self.helper = LayerHelper("average_accumulate")
sum_1 = self._add_accumulator('sum_1', param)
sum_2 = self._add_accumulator('sum_2', param)
sum_3 = self._add_accumulator('sum_3', param)
num_accumulates = self._add_accumulator(
'num_accumulates', param, dtype='int64', shape=[1])
old_num_accumulates = self._add_accumulator(
'old_num_accumulates', param, dtype='int64', shape=[1])
num_updates = self._add_accumulator(
'num_updates', param, dtype='int64', shape=[1])
self.helper.append_op(
type='average_accumulates',
inputs={
"param": param,
"in_sum_1": sum_1,
"in_sum_2": sum_2,
"in_sum_3": sum_3,
"in_num_accumulates": num_accumulates,
"in_old_num_accumulates": old_num_accumulates,
"in_num_updates": num_updates
},
outputs={
"out_sum_1": sum_1,
"out_sum_2": sum_2,
"out_sum_3": sum_3,
"out_num_accumulates": num_accumulates,
"out_old_num_accumulates": old_num_accumulates,
"out_num_updates": num_updates,
},
attrs={
"average_window": self.average_window,
"min_average_window": self.min_average_window,
"max_average_window": self.max_average_window,
},
stop_gradient=True)
@signature_safe_contextmanager
def apply(self, executor, need_restore=True):
"""
Apply the average of the cumulative ``Parameter`` to the parameters of the current model.
Args:
executor(fluid.Executor): The current network executor.
need_restore(bool): Restore flag variable, if set to True, the network will restore
the parameters of the network to the default value, if set to False,
it will not be restored. The default value is True.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy
# First create the Executor.
place = fluid.CPUPlace() # fluid.CUDAPlace(0)
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
with fluid.program_guard(train_program, startup_program):
# build net
data = fluid.data(name='X', shape=[None, 1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
optimizer = fluid.optimizer.Momentum(learning_rate=0.2, momentum=0.1)
optimizer.minimize(loss)
# build ModelAverage optimizer
model_average = fluid.optimizer.ModelAverage(0.15,
min_average_window=10000,
max_average_window=12500)
exe.run(startup_program)
for i in range(12500):
x = numpy.random.random(size=(10, 1)).astype('float32')
outs = exe.run(program=train_program,
feed={'X': x},
fetch_list=[loss.name])
# apply ModelAverage
with model_average.apply(exe):
x = numpy.random.random(size=(10, 1)).astype('float32')
exe.run(program=train_program,
feed={'X': x},
fetch_list=[loss.name])
"""
executor.run(self.apply_program)
try:
yield
finally:
if need_restore:
self.restore(executor)
def restore(self, executor):
"""
Restore ``Parameter`` values of current model.
Args:
executor(fluid.Executor): The current network executor.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy
# First create the Executor.
place = fluid.CPUPlace() # fluid.CUDAPlace(0)
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
with fluid.program_guard(train_program, startup_program):
# build net
data = fluid.data(name='X', shape=[None, 1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
optimizer = fluid.optimizer.Momentum(learning_rate=0.2, momentum=0.1)
optimizer.minimize(loss)
# build ModelAverage optimizer
model_average = fluid.optimizer.ModelAverage(0.15,
min_average_window=10000,
max_average_window=12500)
exe.run(startup_program)
for i in range(12500):
x = numpy.random.random(size=(10, 1)).astype('float32')
outs = exe.run(program=train_program,
feed={'X': x},
fetch_list=[loss.name])
# apply ModelAverage
with model_average.apply(exe, False):
x = numpy.random.random(size=(10, 1)).astype('float32')
exe.run(program=train_program,
feed={'X': x},
fetch_list=[loss.name])
# restore Parameters
model_average.restore(exe)
"""
executor.run(self.restore_program)
class ExponentialMovingAverage(object):
r"""
:api_attr: Static Graph
Compute the moving average of parameters with exponential decay.
Given a parameter :math:`\\theta`, its exponential moving average (EMA)
will be
.. math::
\\text{EMA}_0 & = 0
\\text{EMA}_t & = \\text{decay} * \\text{EMA}_{t-1} + (1 - \\text{decay}) * \\theta_t
The average results calculated by **update()** method will be saved in
temporary variables which are created and maintained by the object, and can
be applied to parameters of current model by calling **apply()** method. And
the **restore()** method is used to restore the parameters.
**Bias correction**. All EMAs are initialized to :math:`0` and hence they will be
zero biased, which can be corrected by divided by a factor
:math:`(1 - \\text{decay}^t)` , i.e., the actual EMAs applied to parameters
when calling **apply()** method would be
.. math::
\\widehat{\\text{EMA}}_t = \\frac{\\text{EMA}_t}{1 - \\text{decay}^t}
**Decay rate scheduling**. A large decay rate very close to 1 would result
in that the averages move very slowly. And a better strategy is to set a
relative smaller decay rate in the very beginning. The argument **thres_steps**
allows users to pass a Variable to schedule the decay rate, in this case,
the actual decay rate becomes
.. math::
\\min(\\text{decay}, \\frac{1 + \\text{thres_steps}}{10 + \\text{thres_steps}})
Usually **thres_steps** can be the global training steps.
Args:
decay (float, optional): The exponential decay rate, usually close to 1, such as
0.999, 0.9999, ... . Default 0.999.
thres_steps (Variable|None): If not `None`, schedule the decay rate.
Default None.
name (str|None): For detailed information, please refer to
:ref:`api_guide_Name`. Usually name is no need to set and None by
default.
Examples:
.. code-block:: python
import numpy
import paddle
import paddle.fluid as fluid
data = fluid.data(name='x', shape=[-1, 5], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
cost = fluid.layers.mean(hidden)
test_program = fluid.default_main_program().clone(for_test=True)
optimizer = fluid.optimizer.Adam(learning_rate=0.001)
optimizer.minimize(cost)
global_steps = fluid.layers.autoincreased_step_counter()
ema = fluid.optimizer.ExponentialMovingAverage(0.999, thres_steps=global_steps)
ema.update()
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
for pass_id in range(3):
for batch_id in range(6):
data = numpy.random.random(size=(10, 5)).astype('float32')
exe.run(program=fluid.default_main_program(),
feed={'x': data},
fetch_list=[cost.name])
# usage 1
with ema.apply(exe):
data = numpy.random.random(size=(10, 5)).astype('float32')
exe.run(program=test_program,
feed={'x': data},
fetch_list=[hidden.name])
# usage 2
with ema.apply(exe, need_restore=False):
data = numpy.random.random(size=(10, 5)).astype('float32')
exe.run(program=test_program,
feed={'x': data},
fetch_list=[hidden.name])
ema.restore(exe)
"""
def __init__(self, decay=0.999, thres_steps=None, name=None):
if framework.in_dygraph_mode():
raise Exception(
"In dygraph, don't support ExponentialMovingAverage.")
self._decay = decay
self._thres_steps = thres_steps
self._name = name if name is not None else ''
self._decay_var = self._get_ema_decay()
self._step_counter_name = "@EMA_STEP_COUNTER@"
self._params_tmps = []
for param in default_main_program().global_block().all_parameters():
if param.do_model_average != False:
tmp = param.block.create_var(
name=unique_name.generate(".".join(
[self._name + param.name, 'ema_tmp'])),
dtype=param.dtype,
persistable=False,
stop_gradient=True)
self._params_tmps.append((param, tmp))
self._ema_vars = {}
for param, tmp in self._params_tmps:
with param.block.program._optimized_guard(
[param, tmp]), name_scope('moving_average'):
self._ema_vars[param.name] = self._create_ema_vars(param)
self.apply_program = Program()
block = self.apply_program.global_block()
with program_guard(main_program=self.apply_program):
decay_pow, global_step = self._get_decay_pow(block)
for param, tmp in self._params_tmps:
param = block._clone_variable(param)
tmp = block._clone_variable(tmp)
ema = block._clone_variable(self._ema_vars[param.name])
layers.assign(input=param, output=tmp)
# bias correction
with layers.control_flow.Switch() as switch:
with switch.case(global_step > 0):
layers.assign(
output=param, input=ema / (1.0 - decay_pow))
with switch.default():
layers.assign(output=param, input=ema)
self.restore_program = Program()
block = self.restore_program.global_block()
with program_guard(main_program=self.restore_program):
for param, tmp in self._params_tmps:
tmp = block._clone_variable(tmp)
param = block._clone_variable(param)
layers.assign(input=tmp, output=param)
def _get_ema_decay(self):
with default_main_program()._lr_schedule_guard():
decay_var = layers.tensor.create_global_var(
shape=[1],
value=self._decay,
dtype='float32',
persistable=True,
name="scheduled_ema_decay_rate")
if self._thres_steps is not None:
decay_t = (self._thres_steps + 1.0) / (self._thres_steps + 10.0)
with layers.control_flow.Switch() as switch:
with switch.case(decay_t < self._decay):
layers.tensor.assign(decay_t, decay_var)
with switch.default():
layers.tensor.assign(
np.array(
[self._decay], dtype=np.float32),
decay_var)
return decay_var
def _get_decay_pow(self, block):
global_step = layers.create_global_var(
name=self._step_counter_name,
shape=[1],
value=0,
dtype='int64',
persistable=True)
global_step = layers.cast(global_step, "float32")
decay_var = block._clone_variable(self._decay_var)
decay_pow_acc = layers.elementwise_pow(decay_var, global_step)
return decay_pow_acc, global_step
def _create_ema_vars(self, param):
param_ema = layers.create_global_var(
name=unique_name.generate(self._name + param.name + '_ema'),
shape=param.shape,
value=0.0,
dtype=param.dtype,
persistable=True)
return param_ema
def update(self):
"""
Update Exponential Moving Average. Should only call this method in
train program.
"""
global_step = layers.autoincreased_step_counter(
counter_name=self._step_counter_name)
param_master_emas = []
for param, tmp in self._params_tmps:
with param.block.program._optimized_guard(
[param, tmp]), name_scope('moving_average'):
param_ema = self._ema_vars[param.name]
if param.name + '.master' in self._ema_vars:
master_ema = self._ema_vars[param.name + '.master']
param_master_emas.append([param_ema, master_ema])
else:
ema_t = param_ema * self._decay_var + param * (
1 - self._decay_var)
layers.assign(input=ema_t, output=param_ema)
# for fp16 params
for param_ema, master_ema in param_master_emas:
default_main_program().global_block().append_op(
type="cast",
inputs={"X": master_ema},
outputs={"Out": param_ema},
attrs={
"in_dtype": master_ema.dtype,
"out_dtype": param_ema.dtype
})
@signature_safe_contextmanager
def apply(self, executor, need_restore=True):
"""
Apply moving average to parameters for evaluation.
Args:
executor (Executor): The Executor to execute applying.
need_restore (bool, optional): Whether to restore parameters after
applying. Default True.
"""
executor.run(self.apply_program)
try:
yield
finally:
if need_restore:
self.restore(executor)
def restore(self, executor):
"""Restore parameters.
Args:
executor (Executor): The Executor to execute restoring.
"""
executor.run(self.restore_program)
class PipelineOptimizer(object):
"""
:api_attr: Static Graph
Pipeline Optimizer: Make a program to run as pipeline, that is splitting a
program into multiple sections (sub-programs) and each section run on a
device to enable the training of large scale models and the use of
heterogeneous devices. Meanwhile, all sections run in the stype of pipeline.
Args:
optimizer (Optimizer): The optimizer to use, such as SGD.
num_microbatches (int): Number of microbatches. [Optional. Default:1].
start_cpu_core_id (int): The first cpu core id to use. [Optional. Default:0].
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle.fluid.layers as layers
with fluid.device_guard("gpu:0"):
x = fluid.layers.data(name='x', shape=[1], dtype='int64', lod_level=0)
y = fluid.layers.data(name='y', shape=[1], dtype='int64', lod_level=0)
data_loader = fluid.io.DataLoader.from_generator(
feed_list=[x, y],
capacity=64,
use_double_buffer=True,
iterable=False)
emb_x = layers.embedding(input=x, param_attr=fluid.ParamAttr(name="embx"), size=[10,2], is_sparse=False)
emb_y = layers.embedding(input=y, param_attr=fluid.ParamAttr(name="emby",learning_rate=0.9), size=[10,2], is_sparse=False)
with fluid.device_guard("gpu:1"):
concat = layers.concat([emb_x, emb_y], axis=1)
fc = layers.fc(input=concat, name="fc", size=1, num_flatten_dims=1, bias_attr=False)
loss = layers.reduce_mean(fc)
optimizer = fluid.optimizer.SGD(learning_rate=0.5)
optimizer = fluid.optimizer.PipelineOptimizer(optimizer)
optimizer.minimize(loss)
def train_reader():
for _ in range(4):
x = np.random.random(size=[1]).astype('int64')
y = np.random.random(size=[1]).astype('int64')
yield x, y
data_loader.set_sample_generator(train_reader, batch_size=1)
place = fluid.CUDAPlace(0)
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
batch_size = 1
data_loader.start()
exe.train_from_dataset(
fluid.default_main_program())
data_loader.reset()
"""
def __init__(self, optimizer, num_microbatches=1, start_cpu_core_id=0):
self._device = 'cpu'
if core.is_compiled_with_npu():
self._device = "npu"
elif core.is_compiled_with_cuda():
self._device = "gpu"
if framework.in_dygraph_mode():
raise Exception("In dygraph, don't support PipelineOptimizer.")
if not isinstance(optimizer, Optimizer) and not isinstance(
optimizer, paddle.optimizer.Optimizer) and not isinstance(
optimizer, paddle.fluid.contrib.mixed_precision.decorator.
OptimizerWithMixedPrecision):
raise ValueError("The 'optimizer' parameter for "
"PipelineOptimizer must be an instance of "
"Optimizer, but the given type is {}.".format(
type(optimizer)))
self._optimizer = optimizer
# Get the original optimizer defined by users, such as SGD
self._origin_optimizer = self._optimizer
while hasattr(self._origin_optimizer, "inner_opt"):
self._origin_optimizer = self._origin_optimizer.inner_opt
assert num_microbatches >= 1, (
"num_microbatches must be a positive value.")
self._num_microbatches = num_microbatches
assert start_cpu_core_id >= 0, (
"start_cpu_core_id must be a non-negative integer.")
self._start_cpu_core_id = start_cpu_core_id
self._place_list = None
op_maker = core.op_proto_and_checker_maker
self._op_role = op_maker.OpRole
self._op_role_key = op_maker.kOpRoleAttrName()
self._op_role_var_key = op_maker.kOpRoleVarAttrName()
self._op_device_key = op_maker.kOpDeviceAttrName()
self._param_device_map = None
self._pipeline_pair = []
self._pp_ring_map = dict()
self.output_var_to_op = None
self.input_var_to_op = None
# insert allreduce op to sync global information for global
# gradient clip and amp
def _insert_allreduce_op(self, op_idx, block):
"""
Insert allreduce op to sync global information for global
gradient clip and amp.
"""
op = block.ops[op_idx]
out_name = op.desc.output_arg_names()[0]
out_var = block.var(out_name)
offset = 0
if op.type == "reduce_any":
# cast the bool var to int32 to use allreduce_max op
temp_var_name = unique_name.generate(out_name + "_cast_int32")
temp_var = block.create_var(
name=temp_var_name, shape=[1], dtype="int32")
block._insert_op(
op_idx + 1 + offset,
type='cast',
inputs={'X': out_var},
outputs={'Out': temp_var},
attrs={
'in_dtype': out_var.dtype,
'out_dtype': temp_var.dtype,
self._op_role_key: self._op_role.Optimize
})
offset += 1
block._insert_op(
op_idx + 1 + offset,
type='c_allreduce_max'
if op.type == "reduce_any" else 'c_allreduce_sum',
inputs={'X': temp_var if op.type == "reduce_any" else out_var},
outputs={'Out': temp_var if op.type == "reduce_any" else out_var},
attrs={
'ring_id': self.global_ring_id,
self._op_role_key: self._op_role.Optimize,
'use_calc_stream': True
})
offset += 1
if op.type == "reduce_any":
block._insert_op(
op_idx + 1 + offset,
type='cast',
inputs={'X': temp_var},
outputs={'Out': out_var},
attrs={
'in_dtype': temp_var.dtype,
'out_dtype': out_var.dtype,
self._op_role_key: self._op_role.Optimize
})
offset += 1
return offset
def _create_vars(self, block, ori_block):
# Create vars for block, copied from ori_block
used_var_set = set()
added_op_num = 0
op_idx = 0
op_size = block.desc.op_size()
while op_idx < op_size + added_op_num:
# Whether to insert allreduce_sum or allreduce_max op.
# For amp and global gradient clip strategies, we should
# get the global information, so allreduce op is needed.
should_insert = False
op = block.ops[op_idx]
# For op process vars on all devices, remove its input
# vars not in this block
reserved_x = []
if op.type == 'reduce_any' and self._is_optimize_op(op):
should_insert = True
elif op.type == 'concat' and self._is_optimize_op(op):
for input_name in op.desc.input("X"):
if block._find_var_recursive(input_name):
reserved_x.append(input_name)
op.desc.set_input('X', reserved_x)
elif op.type == 'update_loss_scaling':
for input_name in op.desc.input("X"):
if block._find_var_recursive(input_name):
reserved_x.append(input_name)
op.desc.set_input('X', reserved_x)
op.desc.set_output('Out', reserved_x)
elif op.type == 'check_finite_and_unscale':
for input_name in op.desc.input("X"):
if block._find_var_recursive(input_name):
reserved_x.append(input_name)
op.desc.set_input('X', reserved_x)
op.desc.set_output('Out', reserved_x)
if len(reserved_x) == 0:
block._remove_op(op_idx)
op_size -= 1
continue
elif op.type == 'sum' and self._is_gradient_clip_op(op):
for input_name in op.desc.input("X"):
if block._find_var_recursive(input_name):
reserved_x.append(input_name)
op.desc.set_input('X', reserved_x)
should_insert = True
vars = op.desc.input_arg_names() + op.desc.output_arg_names()
for var in vars:
# a var whose name contains "blocking_queue"
# only exists in startup program
if var in used_var_set or "_blocking_queue" in var:
continue
used_var_set.add(var)
if block._find_var_recursive(str(var)): continue
source_var = ori_block._var_recursive(str(var))
if source_var.type == core.VarDesc.VarType.READER:
dest_var = block.create_var(
name=var,
type=core.VarDesc.VarType.READER,
persistable=source_var.persistable)
else:
dest_var = block._clone_variable(source_var, False)
dest_var.stop_gradient = source_var.stop_gradient
# When use with sharding, allreduce_sum and allreduce_max
# used for global gradient clip and amp will be added by sharding.
op_idx += 1
if self.use_sharding or not should_insert: continue
inserted_ops = self._insert_allreduce_op(op_idx - 1, block)
added_op_num += inserted_ops
op_idx += inserted_ops
block._sync_with_cpp()
def _is_loss_grad_op(self, op):
assert self._op_role_key in op.attr_names
op_role = int(op.attr(self._op_role_key))
return op_role & int(self._op_role.Backward) and op_role & int(
self._op_role.Loss)
def _is_backward_op(self, op):
return self._op_role_key in op.attr_names and (
int(op.attr(self._op_role_key)) & int(self._op_role.Backward))
def _is_loss_op(self, op):
assert self._op_role_key in op.attr_names
return int(op.attr(self._op_role_key)) == int(self._op_role.Loss)
def _is_optimize_op(self, op):
return self._op_role_key in op.attr_names and (
int(op.attr(self._op_role_key)) & int(self._op_role.Optimize))
def _is_update_op(self, op):
return 'Param' in op.input_names and 'Grad' in op.input_names and (
"LearningRate" in op.input_names)
def _split_program(self, main_program, devices):
"""
Split a program into sections according to devices that ops run on.
The op whose op_device attr is "gpu:all" is copied to all sections.
Args:
main_program (Program): the main program
devices: all used devices
"""
# Map from device to its corresponding section program info
device_program_map = defaultdict(Program)
block = main_program.block(0)
for op in block.ops:
device = op.attr(self._op_device_key)
# Copy ops whose op_device set to "gpu:all" to all sections.
if device == f"{self._device}:all":
for device in devices:
program = device_program_map[device]
op_desc = op.desc
ap_op = program.global_block().desc.append_op()
ap_op.copy_from(op_desc)
ap_op._set_attr(self._op_device_key, "")
else:
program = device_program_map[device]
op_desc = op.desc
ap_op = program.global_block().desc.append_op()
ap_op.copy_from(op_desc)
ap_op._set_attr(self._op_device_key, "")
program_list = []
for key in devices:
program = device_program_map[key]
program._sync_with_cpp()
program_list.append(program)
return program_list
def _get_op_device_for_startup_program(self, var_name):
"""
For adam optimizer, it will add accumulators and initialize them
with fill_constant, and force the op device to cpu. Hence, we should
get the real op_device attribute of the fill_constant as the device
where the corresponding parameters on.
"""
assert "beta1_pow_acc" in var_name or "beta2_pow_acc" in var_name, \
'For accumulators for Adam, the name must contain beta1_pow_acc ' \
'or beta2_pow_acc.'
param_name = var_name[0:var_name.index('_beta')]
device = self._param_device_map[param_name]
return device
def _split_startup_program(self, startup_program, device_id):
block = startup_program.global_block()
new_startup_program = Program()
for op in block.ops:
device = op.attr(self._op_device_key)
if device == "cpu":
assert op.type == "fill_constant", (
"For ops in startup program with the op_device attribute "
"of cpu, they must be of type fill_constant.")
output_var = op.output_arg_names[0]
device = self._get_op_device_for_startup_program(output_var)
if device:
device_index = int(device.split(':')[1])
else:
# LR related ops
device = None
if device and device_index != device_id: continue
op_desc = op.desc
ap_op = new_startup_program.global_block().desc.append_op()
ap_op.copy_from(op_desc)
ap_op._set_attr(self._op_device_key, "")
new_startup_program._sync_with_cpp()
self._create_vars(new_startup_program.global_block(), block)
return new_startup_program
def _find_post_op(self, index, var_name):
"""
Find the post op that has variable named var_name as input.
"""
# bugfix for uniform hybrid parallelism
if '.cast_fp32' in var_name:
var_name = var_name.replace('.cast_fp32', '')
if '.cast_fp16' in var_name:
var_name = var_name.replace('.cast_fp16', '')
post_ops = self.input_var_to_op[var_name]
if post_ops == None: return None
result_op = None
for post_op, post_idx in reversed(post_ops):
if post_idx > index:
result_op = post_op
break
return result_op
def _find_prev_op(self, index, var_name):
"""
Find the previous op of op with index that outputs
variable named var_name.
"""
prev_ops = self.output_var_to_op[var_name]
if prev_ops == None: return None
result_op = None
for prev_op, prev_idx in reversed(prev_ops):
if prev_idx < index:
result_op = prev_op
break
return result_op
def _rename_arg(self, op, old_name, new_name):
op._rename_input(old_name, new_name)
op._rename_output(old_name, new_name)
def _create_var(self, block, ref_var, name):
"""
Create a new var for block, which has the same type,
shape and dtype as ref_var, then rename it with the
name `name`.
"""
new_var = block.create_var(
name=name,
shape=ref_var.shape,
dtype=ref_var.dtype,
type=ref_var.type,
lod_level=ref_var.lod_level,
persistable=ref_var.persistable,
is_data=ref_var.is_data,
need_check_feed=ref_var.desc.need_check_feed())
new_var.stop_gradient = ref_var.stop_gradient
return new_var
def _strip_grad_suffix(self, name):
"""
Strip the grad suffix from the given variable name
"""
pos = name.find(core.grad_var_suffix())
return name[:pos] if pos != -1 else name
def _append_grad_suffix(self, name):
"""
Append grad suffix to the given variable name
"""
return name + core.grad_var_suffix()
def _get_op_device_attr(self, op):
"""
Get the op_device attribute of a op.
"""
device = op.attr(self._op_device_key) \
if op.has_attr(self._op_device_key) else None
if device:
assert device[0:3] == 'gpu' or device[0:3] == 'npu', "Now, only gpu and npu devices are " \
"supported in pipeline parallemism."
return device
def _add_op_device_attr_for_op(self, op, idx, block):
"""
Add op_device attrribute for ops that have not that attribute set.
We use "gpu:all" to represent the op should be put on all
sub-programs, such as lr-related ops. Note that: "gpu:all"
is only used by pipeline as an indicator.
"""
lrsched_role = int(self._op_role.LRSched)
if op.attr(self._op_role_key) == lrsched_role:
# For LRSched ops, we should put them on all sub-programs to
# make sure each sub-program update the lr correctly
op._set_attr(self._op_device_key, f"{self._device}:all")
# bugfix in hybrid parallelism
elif op.type == "sum" and self._is_backward_op(op):
# For sum ops that compute the sum of @RENAMED@ vars
for name in op.desc.input_arg_names():
assert '@RENAME@' in name, \
"The op must be sum used to accumulate renamed vars."
assert len(op.desc.output_arg_names()) == 1
out_name = op.desc.output_arg_names()[0]
post_op = self._find_post_op(idx, out_name)
assert post_op.has_attr(
'op_device'), "{} has no op_device attr for var {}".format(
post_op.type, out_name)
device = post_op.attr(self._op_device_key)
assert device, "The post op must have op_device set."
op._set_attr(self._op_device_key, device)
elif (op.type == "cast" or
op.type == "scale") and self._is_backward_op(op):
prev_op = self._find_prev_op(idx, op.desc.input("X")[0])
op._set_attr(self._op_device_key, prev_op.attr(self._op_device_key))
elif op.type == "memcpy" and not self._is_optimize_op(op):
# for checkpoint offloading
assert len(op.input_arg_names) == 1 and len(
op.output_arg_names) == 1
input_name = op.input_arg_names[0]
output_name = op.output_arg_names[0]
if '@Fetch' in output_name:
post_op = self._find_post_op(idx, output_name)
op._set_attr(self._op_device_key,
post_op.attr(self._op_device_key))
else:
prev_op = self._find_prev_op(idx, op.desc.input("X")[0])
op._set_attr(self._op_device_key,
prev_op.attr(self._op_device_key))
elif self._is_loss_op(op):
# For loss * loss_scaling op added by AMP
offset = 1
while (not block.ops[idx + offset].has_attr(self._op_device_key) or
not block.ops[idx + offset].attr(self._op_device_key)):
offset += 1
device = block.ops[idx + offset].attr(self._op_device_key)
assert device, "Please put you program within device_guard scope."
for i in range(offset):
block.ops[idx + i]._set_attr(self._op_device_key, device)
elif self._is_optimize_op(op) and op.type == "cast":
# For fp16-->fp32 cast added by AMP
grad_name = op.output('Out')
assert len(grad_name) == 1
param_name = self._strip_grad_suffix(grad_name[0])
device = self._param_device_map[param_name]
op._set_attr(self._op_device_key, device)
elif self._is_gradient_clip_op(op) or self._is_regularization_op(op):
# For gradient clip and regularization ops, we set their op_device
# attribute to the device where their corresponding parameters on.
assert self._op_role_var_key in op.attr_names, "gradient_clip " \
"and regularization ops must have op_role_var attribute."
op_role_var = op.attr(self._op_role_var_key)
assert len(op_role_var) == 2, "op_role_var for gradient_clip " \
"regularization ops must have two elements."
param_name = op_role_var[0]
device = self._param_device_map[param_name]
# For sum op added by global gradient clip, it must be
# put on all devices
if (op.type == 'sum' or op.type == 'sqrt' or
op.type == 'fill_constant' or
op.type == 'elementwise_max' or
op.type == 'elementwise_div'):
device = f"{self._device}:all"
op._set_attr(self._op_device_key, device)
elif self._is_weight_decay_op(op) and op.type == 'scale':
# set AdamW decay_coeff to device:all
op._set_attr(self._op_device_key, f"{self._device}:all")
elif op.type == "alloc_float_status" or op.type == "clear_float_status":
op._set_attr(self._op_device_key, f"{self._device}:all")
else:
other_known_ops = [
'update_loss_scaling',
'reduce_any',
'concat',
'sum',
'check_finite_and_unscale',
'alloc_float_status',
]
assert op.type in other_known_ops, "For other ops without " \
"op_device set, they must be one of {}, but it " \
"is {}".format(other_known_ops, op.type)
assert self._is_optimize_op(op)
op._set_attr(self._op_device_key, f"{self._device}:all")
def _add_op_device_attr(self, block):
"""
Add op_device attrribute for ops in block that have
not that attribute set.
"""
for idx, op in enumerate(list(block.ops)):
if (op.type == "create_py_reader" or op.type == "read" or
op.type == "create_double_buffer_reader"):
# Copy read related ops to all section to make them exit
# after each epoch.
# We use "gpu:all" to represent the op should be put on all
# sub-programs, such as lr-related ops. Note that: "gpu:all"
# is only used by pipeline as an indicator.
op._set_attr(self._op_device_key, f"{self._device}:all")
continue
# op_device attribute has been set
if self._get_op_device_attr(op): continue
self._add_op_device_attr_for_op(op, idx, block)
def _check_validation(self, block):
"""
Check whether ops in a block have both the op_device and the
op_role attributes set.
Then, return all devices in order.
"""
device_list = []
# Section worker only supports the following op_role
valid_op_role_value = [
int(self._op_role.LRSched),
int(self._op_role.Forward),
int(self._op_role.Backward),
int(self._op_role.Loss),
int(self._op_role.Optimize),
int(self._op_role.Backward) | int(self._op_role.Loss),
]
pre_stage_id = None
decrease_flag = False
in_optimize = False
in_forward = True
for op in block.ops:
if not op._has_kernel(op.type):
assert op.type == "conditional_block" and (
op.attr(self._op_role_key) == int(self._op_role.LRSched)), (
"Now, the only supported op without kernel is "
"conditional_block, and its op role must be LRSched.")
assert op.has_attr(self._op_role_key), (
"op ({}) has no {} attribute.".format(op.type,
self._op_role_key))
op_role = op.attr(self._op_role_key)
assert int(op_role) in valid_op_role_value, \
"op_role {} for op {} must be one of {}".format(
op_role,
op.type,
valid_op_role_value)
if int(op_role) == int(self._op_role.Optimize):
in_optimize = True
if int(op_role) == int(self._op_role.Backward):
in_forward = False
assert op.has_attr(self._op_device_key), (
"op ({}) has no {} attribute.".format(op.type,
self._op_device_key))
device = op.attr(self._op_device_key)
assert device, ("op_device attribute for op "
"{} has not been set.".format(op.type))
if device == f"{self._device}:all": continue
dev_type = device.split(':')[0]
stage_id = int(device.split(':')[1])
assert dev_type == "gpu" or dev_type == 'npu', (
"Now only gpu and npu devices are supported "
"for pipeline parallelism.")
if device not in device_list:
device_list.append(device)
if not in_optimize:
if pre_stage_id is not None:
interval = stage_id - pre_stage_id
assert abs(interval) <= 1, \
"The stage interval of two consecutive ops in the pipeline must be < = 1," \
"but the interval of op={} and prev op is {}".format(op, interval)
# stage must be in order, such as Forward(0 1 2 3 4), Backward(4 3 2 1 0)
# if stage is unordered, such as Forward(0 1 2 3 4 3 4), will report error
if in_forward:
assert interval >= 0, \
"Pipeline stage must be sequential increment in Forward, prev_stage={}, " \
"please check the stage of op={}".format(pre_stage_id, op)
else:
# FIXME(wangxi): recompute check failed
pass
#assert interval <=0, \
# "Pipeline stage must be sequential decrement in Backward, prev_stage={}, " \
# "please check the stage of op={}".format(pre_stage_id, op)
pre_stage_id = stage_id
return device_list
def _insert_sendrecv_ops_for_boundaries(self, block):
"""
Insert a pair of send and recv ops for every two
consecutive ops on different devices.
"""
# A map from var to device where op takes it as input,
# avoiding multiple send and recv ops.
input_var_to_device = dict()
# bugfix hybrid parallelism
first_optimize_index = None
for index, op in enumerate(list(block.ops)):
if self._is_optimize_op(op):
first_optimize_index = index
break
extra_index_info = {
'index': 0,
'first_optimize_index': first_optimize_index
}
for index, op in enumerate(list(block.ops)):
cur_device = op.attr(self._op_device_key)
if cur_device == f"{self._device}:all": continue
for var_name in op.input_arg_names:
var = block.var(var_name)
# skip data var
if var.is_data: continue
prev_device = None
generate_ops = self.output_var_to_op.get(var_name)
if generate_ops is None:
if var_name not in self._param_device_map:
continue
prev_device = self._param_device_map[var_name]
prev_op = self._find_prev_op(index, var_name)
if not prev_device:
prev_device = prev_op.attr(self._op_device_key) \
if prev_op else None
if prev_device is None or prev_device == f"{self._device}:all":
continue
if prev_device == cur_device: continue
if var_name not in input_var_to_device:
input_var_to_device[var_name] = []
if (cur_device, prev_device) in input_var_to_device[var_name]:
continue
device_type = cur_device.split(':')[0] + ':'
def _insert_send_recv(cur_id, prev_id):
cur_dev = device_type + str(cur_id)
prev_dev = device_type + str(prev_id)
if (cur_dev, prev_dev) in input_var_to_device[var_name]:
return
if cur_id - prev_id > 1:
_insert_send_recv(cur_id - 1, prev_id)
_insert_send_recv(cur_id, cur_id - 1)
input_var_to_device[var_name].append(
(cur_dev, prev_dev))
return
elif cur_id - prev_id < -1:
_insert_send_recv(cur_id + 1, prev_id)
_insert_send_recv(cur_id, cur_id + 1)
input_var_to_device[var_name].append(
(cur_dev, prev_dev))
return
assert abs(cur_id - prev_id) == 1
input_var_to_device[var_name].append((cur_dev, prev_dev))
op_role = op.attr(self._op_role_key)
var = block.vars[var_name]
pair = (prev_id, cur_id)
# 1000 is just a magic number
pair_key = prev_id * 1000 + cur_id
if pair not in self._pipeline_pair:
self._pipeline_pair.append(pair)
self._pp_ring_map[pair_key] = self.ring_id
ring_id = self.ring_id
self.ring_id += 1
else:
ring_id = self._pp_ring_map[pair_key]
if self.schedule_mode == 'F-then-B': # F-then-B
block._insert_op_without_sync(
index=index + extra_index_info['index'],
type='send_v2',
inputs={'X': var},
attrs={
self._op_device_key: prev_dev,
self._op_role_key: op_role,
'use_calc_stream': True,
'peer': 1,
'ring_id': ring_id
})
extra_index_info['index'] += 1
var_shape = list(var.shape)
var_shape[0] = self.micro_batch_size if var_shape[
0] < 0 else var_shape[0]
block._insert_op_without_sync(
index=index + extra_index_info['index'],
type='recv_v2',
outputs={'Out': [var]},
attrs={
'out_shape': var_shape,
'dtype': var.dtype,
self._op_device_key: cur_dev,
self._op_role_key: op_role,
'use_calc_stream': True,
'peer': 0,
'ring_id': ring_id
})
extra_index_info['index'] += 1
elif self.schedule_mode == '1F1B': # 1F1B
var_shape = list(var.shape)
var_shape[0] = self.micro_batch_size if var_shape[
0] < 0 else var_shape[0]
numel = np.prod(var.shape)
assert numel % self.mp_degree == 0, \
"The numel={} must be divisible by mp_degree={}".format(numel, self.mp_degree)
if 'subprog' in var.name:
# For recompute, if the checkpoints var is layer_norm_6.tmp_2
# this var will be sent twice, layer_norm_6.tmp_2 for forward pass,
# layer_norm_6.tmp_2.subprog_* for recompute pass.
# We can store the first sent var and copy the value to the
# second one to reduce one send/recv op.
# The origin_ckpt_name is layer_norm_6.tmp_2, which will be used
# to find the stored var for the forward pass.
origin_name = var.name.split('subprog')[0][0:-1]
associate_var = block.var(origin_name)
block._insert_op_without_sync(
index=index + extra_index_info['index'],
type='assign',
inputs={'X': [associate_var]},
outputs={'Out': [var]},
attrs={
'out_shape': var_shape,
'dtype': var.dtype,
self._op_device_key: cur_dev,
self._op_role_key: op_role,
'use_calc_stream': True,
})
extra_index_info['index'] += 1
return
block._insert_op_without_sync(
index=index + extra_index_info['index'],
type='c_sync_calc_stream',
inputs={'X': [var]},
outputs={'Out': [var]},
attrs={
self._op_device_key: prev_dev,
self._op_role_key: op_role,
})
extra_index_info['index'] += 1
block._insert_op_without_sync(
index=index + extra_index_info['index'],
type='send_v2'
if self.mp_degree == 1 else 'partial_send',
inputs={'X': var},
attrs={
self._op_device_key: prev_dev,
self._op_role_key: op_role,
'use_calc_stream': False,
'ring_id': ring_id,
'peer': 1,
# if send_v2, num&id attr is not in op_attrs, will not insert
'num': self.mp_degree,
'id': self.mp_rank,
})
extra_index_info['index'] += 1
insert_index = None
if int(op_role) == int(self._op_role.Backward):
insert_index = extra_index_info[
'first_optimize_index']
new_op_role = self._op_role.Optimize
else:
insert_index = index
new_op_role = self._op_role.Backward
sync_comm_op = block._insert_op_without_sync(
index=insert_index + extra_index_info['index'],
type='c_sync_comm_stream',
inputs={'X': [var]},
outputs={'Out': [var]},
attrs={
self._op_device_key: prev_dev,
self._op_role_key: new_op_role,
'ring_id': ring_id,
})
if int(op_role) == int(self._op_role.Forward):
sync_comm_op._set_attr('pipeline_flag', '')
extra_index_info['index'] += 1
block._insert_op_without_sync(
index=index + extra_index_info['index'],
type='recv_v2'
if self.mp_degree == 1 else 'partial_recv',
outputs={'Out': [var]},
attrs={
'out_shape': var_shape,
'dtype': var.dtype,
self._op_device_key: cur_dev,
self._op_role_key: op_role,
'use_calc_stream': True,
'peer': 0,
'ring_id': ring_id,
# if recv_v2, num&id attr is not in op_attrs, will not insert
'num': self.mp_degree,
'id': self.mp_rank,
})
extra_index_info['index'] += 1
if self.mp_degree > 1:
block._insert_op_without_sync(
index=index + extra_index_info['index'],
type='partial_allgather',
inputs={'X': [var]},
outputs={'Out': [var]},
attrs={
self._op_device_key: cur_dev,
self._op_role_key: op_role,
'use_calc_stream': True,
'ring_id': 0,
# if recv_v2, num&id attr is not in op_attrs, will not insert
'nranks': self.mp_degree,
'rank': self.mp_rank,
})
extra_index_info['index'] += 1
else:
raise ValueError(
"Now only 'F-then-B' and '1F1B' are supported."
"The given value is {}.".format(self.schedule_mode))
_insert_send_recv(
int(cur_device.split(':')[1]),
int(prev_device.split(':')[1]))
block._sync_with_cpp()
def _insert_loss_scale(self, block):
"""
Scale the loss corresponding to number of micro-batches.
"""
if self._num_microbatches == 1: return
for index, op in reversed(tuple(enumerate(list(block.ops)))):
if self._is_loss_grad_op(op):
loss_grad_var = block.vars[op.output_arg_names[0]]
block._insert_op(
index=index + 1,
type='scale',
inputs={'X': loss_grad_var},
outputs={'Out': loss_grad_var},
attrs={
'scale': 1.0 / self._num_microbatches,
self._op_role_key: self._op_role.Backward
})
break
def _rename_gradient_var_name(self, block):
for index, op in enumerate(block.ops):
if not self._is_optimize_op(op): continue
input_names = op.input_arg_names
output_names = op.output_arg_names
in_out_names = input_names + output_names
if op.type == 'cast' or op.type == "c_sync_comm_stream": continue
# append "MERGED" to the names of parameter gradients,
# and mofify the op_role_var attribute (by rename_arg func).
for name in in_out_names:
if not core.grad_var_suffix() in name: continue
param_name = name.strip(core.grad_var_suffix())
new_grad_name = name + "@MERGED"
self._rename_arg(op, name, new_grad_name)
def _accumulate_gradients(self, block, pp_allreduce_in_optimize=False):
"""
Create a new merged gradient for each parameter and accumulate the
corresponding gradient to it.
"""
merged_gradient_names = []
first_opt_op_idx = None
for index, op in reversed(tuple(enumerate(list(block.ops)))):
# remove the cast op of fp16 grad to fp32 grad
if self._is_optimize_op(op) and op.type == 'cast':
in_name = op.input_arg_names[0]
out_name = op.output_arg_names[0]
if out_name.strip('@GRAD') in self._param_device_map:
assert in_name.replace('.cast_fp16', '') == out_name
block._remove_op(index)
continue
if self._is_backward_op(op) and not first_opt_op_idx:
first_opt_op_idx = index + 1
# no optimize phase
if first_opt_op_idx == len(block.ops): return
if block.ops[first_opt_op_idx].type == "c_sync_comm_stream":
first_opt_op_idx += 1
if self._is_backward_op(op) and (
self._op_role_var_key in op.attr_names):
op_role_var = op.attr(self._op_role_var_key)
if len(op_role_var) == 0: continue
assert len(op_role_var) % 2 == 0
for i in range(0, len(op_role_var), 2):
offset = 0
param_name = op_role_var[i]
if not block.has_var(param_name): continue
if '@BroadCast' in param_name: continue
param_grad_name = param_name + core.grad_var_suffix()
merged_param_grad_name = param_grad_name + '@MERGED'
if not block.has_var(merged_param_grad_name):
self._create_var(block, block.vars[param_name],
merged_param_grad_name)
assert block.has_var(merged_param_grad_name)
param_grad_var = block.var(param_grad_name)
merged_param_grad_var = block.var(merged_param_grad_name)
merged_param_grad_var.persistable = True
block._insert_op(
index=first_opt_op_idx + offset,
type='fill_constant',
inputs={},
outputs={'Out': [merged_param_grad_var]},
attrs={
'shape': merged_param_grad_var.shape,
'dtype': merged_param_grad_var.dtype,
'value': float(0),
# a trick to run this op once per mini-batch
self._op_role_key: self._op_role.Optimize.LRSched,
})
offset += 1
grad_name = op_role_var[i + 1]
grad_var = block.vars[grad_name]
if not 'cast_fp16' in grad_name:
block._insert_op(
index=first_opt_op_idx + offset,
type='sum',
inputs={'X': [grad_var, merged_param_grad_var]},
outputs={'Out': merged_param_grad_var},
attrs={
self._op_role_key: self._op_role.Backward,
})
offset += 1
merged_gradient_names.append(merged_param_grad_name)
else:
# cast gradient to fp32 to accumulate to merged gradient
cast_grad_var_name = param_grad_name + '@TMP'
cast_grad_var = self._create_var(block, param_grad_var,
cast_grad_var_name)
cast_grad_var.persistable = False
block._insert_op(
index=first_opt_op_idx + offset,
type='cast',
inputs={'X': grad_var},
outputs={'Out': cast_grad_var},
attrs={
'in_dtype': grad_var.dtype,
'out_dtype': cast_grad_var.dtype,
self._op_role_key: self._op_role.Backward,
})
offset += 1
block._insert_op(
index=first_opt_op_idx + offset,
type='sum',
inputs={
'X': [merged_param_grad_var, cast_grad_var]
},
outputs={'Out': merged_param_grad_var},
attrs={
self._op_role_key: self._op_role.Backward,
})
offset += 1
merged_gradient_names.append(merged_param_grad_name)
return merged_gradient_names
def _add_sub_blocks(self, main_block, program_list):
main_program = main_block.program
for prog in program_list:
for op in prog.block(0).ops:
if not op.has_attr('sub_block'):
continue
origin_sub_block_id = op.attr('sub_block').id
origin_sub_block = main_program.block(origin_sub_block_id)
new_sub_block = prog._create_block(parent_idx=0)
for sub_op in origin_sub_block.ops:
op_desc = sub_op.desc
ap_op = new_sub_block.desc.append_op()
ap_op.copy_from(op_desc)
new_sub_block._sync_with_cpp()
self._create_vars(new_sub_block, origin_sub_block)
op._set_attr('sub_block', new_sub_block)
def _get_device_info(self, block):
for op in block.ops:
if not op._has_kernel(op.type): continue
op_device = op.attr(self._op_device_key)
return op_device
def _process_persistable_vars_in_multi_sections(self, main_program,
startup_prog, program_list):
"""
Special Case: process persistable vars that exist in
multiple sections, e.g., shared weight
"""
# var_info = {var_name: [program1, program2...]},
# persistable var only
var_info = dict()
for prog in program_list:
block = prog.block(0)
for var_name in block.vars:
if var_name == "double_buffer_0": continue
var = block.var(var_name)
if not var.persistable: continue
if not var_name in var_info:
var_info[var_name] = []
if not prog in var_info[var_name]:
var_info[var_name].append(prog)
for var_name in list(var_info.keys()):
if len(var_info[var_name]) == 1:
var_info.pop(var_name)
# write_info = {var_name: program}, where program is the only program
# in which the var named var_name is written.
write_info = dict()
for var_name in var_info.keys():
for prog in var_info[var_name]:
block = prog.block(0)
for op in block.ops:
if op.type == "recv_v2" or op.type == "create_py_reader" or \
op.type == "read" or op.type == "update_loss_scaling":
continue
# We have processed lr related vars
if op.attr(self._op_role_key) == int(
self._op_role.Optimize.LRSched):
continue
if var_name in op.desc.output_arg_names():
assert var_name not in write_info, (
"two sections write the same var({}): second "
"op {}.".format(var_name, op))
write_info[var_name] = prog
break
for var_name in var_info.keys():
# Case 1: read only variables, no special process
if not var_name in write_info: continue
# Case 2: one write multiple reads
write_prog = write_info[var_name]
write_block = write_prog.block(0)
write_device = self._get_device_info(write_block)
write_dev_index = int(write_device.split(':')[1])
all_progs = var_info[var_name]
for prog in all_progs:
if prog == write_prog: continue
read_block = prog.block(0)
read_device = self._get_device_info(read_block)
read_dev_index = int(read_device.split(':')[1])
pair = (write_dev_index, read_dev_index)
pair_key = write_dev_index * 1000 + read_dev_index
if pair not in self._pipeline_pair:
self._pipeline_pair.append(pair)
self._pp_ring_map[pair_key] = self.ring_id
ring_id = self.ring_id
self.ring_id += 1
else:
ring_id = self._pp_ring_map[pair_key]
write_block._insert_op(
index=0,
type='send_v2',
inputs={'X': write_block.var(var_name), },
attrs={
self._op_device_key: write_device,
'use_calc_stream': False,
# A trick to make the role LRSched to avoid copy every
# microbatch
self._op_role_key: self._op_role.LRSched,
'peer': read_dev_index,
'ring_id': ring_id
})
read_block._insert_op(
index=0,
type='recv_v2',
outputs={'Out': [read_block.var(var_name)]},
attrs={
'out_shape': read_block.var(var_name).shape,
'dtype': read_block.var(var_name).dtype,
self._op_device_key: read_device,
'use_calc_stream': False,
# A trick to make the role LRSched to avoid copy every
# microbatch
self._op_role_key: self._op_role.LRSched,
'peer': write_dev_index,
'ring_id': ring_id
})
read_block._insert_op(
index=1,
type='c_sync_comm_stream',
inputs={'X': [read_block.var(var_name)]},
outputs={'Out': [read_block.var(var_name)]},
attrs={
self._op_device_key: read_device,
# A trick to make the role LRSched to avoid copy every
# microbatch
self._op_role_key: self._op_role.LRSched,
'ring_id': ring_id
})
def _is_gradient_clip_op(self, op):
return op.desc.has_attr("op_namescope") \
and op.desc.attr("op_namescope").startswith("/gradient_clip")
def _is_regularization_op(self, op):
return op.desc.has_attr("op_namescope") \
and op.desc.attr("op_namescope").startswith("/regularization")
def _is_weight_decay_op(self, op):
# in AdamW namescope is /optimizer_*/weight decay/
return op.desc.has_attr("op_namescope") \
and 'weight decay' in op.desc.attr("op_namescope")
def _get_input_output_info(self, block):
'''
Get info of op input and output.
'''
# A map from output var to op which generate it.
output_var_to_op = defaultdict(list)
# A map from var to op which takes it as input.
input_var_to_op = defaultdict(list)
for index, op in enumerate(block.ops):
for var_name in op.input_arg_names:
input_var_to_op[var_name].append([op, index])
for var_name in op.output_arg_names:
output_var_to_op[var_name].append([op, index])
return output_var_to_op, input_var_to_op
def _optimize_forward_send_sync(self, program):
"""
optimize forward send's sync_comm_stream schedule
"""
if self.schedule_mode != '1F1B': return
block = program.block(0)
recv_type = 'recv_v2' if self.mp_degree == 1 else 'partial_recv'
backward_recv_index = None
for index, op in enumerate(block.ops):
if op.type == recv_type and self._is_backward_op(op):
backward_recv_index = index
break
# last pipeline stage
if backward_recv_index is None: return
offset = 0
for index, op in enumerate(list(block.ops)):
if index >= backward_recv_index: break
if op.type == 'c_sync_comm_stream' and op.has_attr('pipeline_flag'):
var_name = op.input_arg_names[0]
var = block.var(var_name)
block._remove_op(index + offset, sync=False)
offset -= 1
# NOTE:
# 1. When the backward recv is completed, it indicates
# that the forward send is completed too. So we only need
# to use the NOP op to prevent memory release.
# 2. Because we removed sync_comm_op,
# we will insert NOP after recv_op.
block._insert_op_without_sync(
index=backward_recv_index,
type='nop',
inputs={'X': [var]},
outputs={'Out': [var]},
attrs={self._op_role_key: self._op_role.Backward})
block._sync_with_cpp()
def _mv_head_recv(self, program):
"""
A pass to move the recv op to the beginning of
the forward/backward phase
"""
forward_insert_index = 0
backward_insert_index = None
block = program.global_block()
num_ops = len(program.global_block().ops)
for i in range(num_ops):
insert_index = None
op = program.global_block().ops[i]
op_role = int(op.attr(self._op_role_key))
if op_role == int(
self._op_role.Backward) and backward_insert_index is None:
backward_insert_index = i
if op.type != "partial_recv" and op.type != "partial_allgather" and op.type != "nop" and op.type != "recv_v2":
continue
if op_role == int(self._op_role.Forward):
if i == forward_insert_index:
forward_insert_index += 1
continue
insert_index = forward_insert_index
elif op_role == int(self._op_role.Backward):
if i == backward_insert_index:
backward_insert_index += 1
continue
insert_index = backward_insert_index
else:
raise ValueError("Unknown op_role: {}".format(op_role))
op_inputs = dict()
for name in op.input_names:
op_inputs[name] = op.input(name)
op_outputs = dict()
for name in op.output_names:
op_outputs[name] = op.output(name)
block._insert_op_without_sync(
index=insert_index,
type=op.type,
inputs=op_inputs,
outputs=op_outputs,
attrs=op.all_attrs())
block._remove_op(i + 1)
if op_role == int(self._op_role.Forward):
forward_insert_index += 1
elif op_role == int(self._op_role.Backward):
backward_insert_index += 1
block._sync_with_cpp()
def minimize(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None):
main_block = loss.block
self.origin_main_block = main_block
main_program = main_block.program
if startup_program is None:
startup_program = default_startup_program()
pipeline_opt = main_program._pipeline_opt
assert pipeline_opt, 'Please use pipeline with fleet.'
required_keys = [
'local_rank',
'schedule_mode',
'micro_batch_size',
'ring_id',
'global_ring_id',
'use_sharding',
'mp_degree',
'mp_rank',
]
for key in required_keys:
assert key in pipeline_opt, \
'Please use pipeline with fleet to use {}.'.format(key)
self.local_rank = pipeline_opt['local_rank']
self.schedule_mode = pipeline_opt['schedule_mode']
self.micro_batch_size = pipeline_opt['micro_batch_size']
self.use_sharding = pipeline_opt['use_sharding']
self.ring_id = pipeline_opt['ring_id']
self.global_ring_id = pipeline_opt['global_ring_id']
self.mp_degree = pipeline_opt['mp_degree']
self.mp_rank = pipeline_opt['mp_rank']
assert self.mp_degree >= 1
assert 0 <= self.mp_rank < self.mp_degree
optimize_ops, params_grads = self._optimizer.minimize(
loss, startup_program, parameter_list, no_grad_set)
self._param_device_map = self._origin_optimizer._param_device_map
self.output_var_to_op, self.input_var_to_op = \
self._get_input_output_info(main_block)
# Step1: add default op_device attribute for ops.
self._add_op_device_attr(main_block)
device_list = self._check_validation(main_block)
def device_cmp(device1, device2):
dev1_id = int(device1.split(':')[1])
dev2_id = int(device2.split(':')[1])
if dev1_id < dev2_id:
return -1
elif dev1_id > dev2_id:
return 1
else:
return 0
sorted_device_list = sorted(device_list, key=cmp_to_key(device_cmp))
assert sorted_device_list == device_list, (
"With pipeline parallelism, you must use gpu devices one after "
"another in the order of their ids.")
# Step2: add send and recv ops between section boundaries
self._insert_sendrecv_ops_for_boundaries(main_block)
# Step3: split program into sections and add pairs of
# send and recv ops for data var.
main_program = main_block.program
program_list = self._split_program(main_program, device_list)
for p in program_list:
self._create_vars(p.global_block(), main_block)
self.local_rank %= len(device_list)
# Step3.5: optimize forward send sync_comm to overlap send and recv
self._optimize_forward_send_sync(program_list[self.local_rank])
# Step4: Special Case: process persistable vars that exist in
# multiple sections
# FIXME
# self._process_persistable_vars_in_multi_sections(
# main_program, startup_program, program_list)
# Step5: Add sub blocks for section programs
self._add_sub_blocks(main_block, program_list)
place_list = []
for dev in device_list:
dev_index = int(dev.split(":")[1])
if core.is_compiled_with_cuda():
place_list.append(core.CUDAPlace(dev_index % 1))
elif core.is_compiled_with_npu():
place_list.append(core.NPUPlace(dev_index % 1))
# Step6: Split startup program
new_startup_program = self._split_startup_program(startup_program,
self.local_rank)
startup_program._pipeline_opt = {
"startup_program": new_startup_program,
}
real_block = program_list[self.local_rank].global_block()
self._insert_loss_scale(real_block)
if not self.use_sharding:
# Step7: clear gradients before each mini-batch and
# accumulate gradients during backward
self._rename_gradient_var_name(real_block)
real_block._sync_with_cpp()
self._accumulate_gradients(real_block)
real_block._sync_with_cpp()
if core.is_compiled_with_cuda():
place_id = int(os.getenv("FLAGS_selected_gpus", "0"))
elif core.is_compiled_with_npu():
place_id = int(os.getenv("FLAGS_selected_npus", "0"))
# A pass to move the recv op to the beginning of
# the forward/backward phase
self._mv_head_recv(program_list[self.local_rank])
main_program._pipeline_opt = {
"trainer": "PipelineTrainer",
"device_worker": "Section",
"pipeline_stage": self.local_rank,
"num_pipeline_stages": len(device_list),
"schedule_mode": self.schedule_mode,
"inner_parallelism": len(device_list),
"section_program": program_list[self.local_rank],
"place": place_list[self.local_rank],
"place_id": place_id,
"sync_steps": -1,
"num_microbatches": self._num_microbatches,
"start_cpu_core_id": self._start_cpu_core_id,
}
return optimize_ops, params_grads, program_list, self._pipeline_pair, self._pp_ring_map
class RecomputeOptimizer(Optimizer):
"""
:api_attr: Static Graph
Recompute Optimizer Wrapper
Normally, a training step contains three sub-steps: first, run forward
Operators to calculate the loss; second, run backward Operators to
calculate gradient of the parameters; third, apply optimization method
to update the value of the parameters.
In the forward computation process, all variables that are needed by
backward computation process will be kept in memory, which occupy a great
amount of memory when the network becomes very deep.
Recompute split the network to k segments. In each segment, It will
recompute the forward Operators, before running backward operators. It is
very helpful for saving memory.
The Variables that separate a network to segments are called as checkpoints,
and users should set it manually. The usage is very simple:
Args:
optimizer (Optimizer): The optimizer that is applied to parameters.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
def gen_data():
return {"x": np.random.random(size=(32, 32)).astype('float32'),
"y": np.random.randint(2, size=(32, 1)).astype('int64')}
def mlp(input_x, input_y, hid_dim=128, label_dim=2):
print(input_x)
fc_1 = fluid.layers.fc(input=input_x, size=hid_dim)
prediction = fluid.layers.fc(input=[fc_1], size=label_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
sum_cost = fluid.layers.reduce_mean(cost)
return sum_cost, fc_1, prediction
input_x = fluid.layers.data(name="x", shape=[32], dtype='float32')
input_y = fluid.layers.data(name="y", shape=[1], dtype='int64')
cost, fc_1, pred = mlp(input_x, input_y)
sgd = fluid.optimizer.Adam(learning_rate=0.01)
sgd = fluid.optimizer.RecomputeOptimizer(sgd)
sgd._set_checkpoints([fc_1, pred])
sgd.minimize(cost)
print("Finished optimize")
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
step = 10
for i in range(step):
cost_val = exe.run(feed=gen_data(),
program=fluid.default_main_program(),
fetch_list=[cost.name])
print("step=%d cost=%f" % (i, cost_val[0]))
"""
def __init__(self, optimizer):
if framework.in_dygraph_mode():
raise Exception("In dygraph, don't support RecomputeOptimizer.")
self._optimizer = optimizer
self._checkpoints = None
self._learning_rate = self._optimizer._learning_rate
self._learning_rate_map = self._optimizer._learning_rate_map
self.enable_offload = False
def _set_checkpoints(self, checkpoints):
"""
Args:
checkpoints (list): List of Variable or string
"""
assert isinstance(
checkpoints, list
), "_checkpoints should be a list of Variable or a list of String"
for ckpt in checkpoints:
assert (
isinstance(ckpt, six.string_types) or isinstance(ckpt, Variable)
), "_checkpoints should be a list of Variable or a list of String"
self._checkpoints = checkpoints
# should enable offload before calling backward
def _enable_offload(self):
self.enable_offload = True
@framework.deprecate_stat_dict
def load(self, state_dict):
"""
:api_attr: Static Graph
load function is not supported by Recompute Optimizer for now.
:return: None
Args:
state_dict: the dict load by load_persistable method
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle.compat as cpt
def mlp(input_x, input_y, hid_dim=128, label_dim=2):
fc_1 = fluid.layers.fc(input=input_x, size=hid_dim)
prediction = fluid.layers.fc(input=[fc_1], size=label_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
sum_cost = fluid.layers.reduce_mean(cost)
return sum_cost, fc_1, prediction
input_x = fluid.layers.data(name="x", shape=[32], dtype='float32')
input_y = fluid.layers.data(name="y", shape=[1], dtype='int64')
cost, fc_1, pred = mlp(input_x, input_y)
print("Finished FF")
sgd = fluid.optimizer.Adam(learning_rate=0.01)
sgd = fluid.optimizer.RecomputeOptimizer(sgd)
sgd._set_checkpoints([fc_1, pred])
try:
state_dict = {}
sgd.load(state_dict)
except NotImplementedError as e:
print(cpt.get_exception_message(e))
"""
raise NotImplementedError(
"load function is not supported by Recompute Optimizer for now")
def apply_gradients(self, params_grads):
"""
call apply_gradients function of self._optimizer.
Args:
params_grads (list): list of (param, grad) pair to do optimization.
Returns:
list: A list of operators appended to the current program.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle.fluid.framework as framework
def mlp(input_x, input_y, hid_dim=128, label_dim=2):
fc_1 = fluid.layers.fc(input=input_x, size=hid_dim)
prediction = fluid.layers.fc(input=[fc_1], size=label_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
sum_cost = fluid.layers.reduce_mean(cost)
return sum_cost, fc_1, prediction
input_x = fluid.layers.data(name="x", shape=[32], dtype='float32')
input_y = fluid.layers.data(name="y", shape=[1], dtype='int64')
cost, fc_1, pred = mlp(input_x, input_y)
print("Finished FF")
sgd = fluid.optimizer.Adam(learning_rate=0.01)
sgd = fluid.optimizer.RecomputeOptimizer(sgd)
sgd._set_checkpoints([fc_1, pred])
params_grads = sgd.backward(
cost,
startup_program=None,
parameter_list=None,
no_grad_set=None)
program = cost.block.program
with framework.program_guard(program, None):
optimize_ops = sgd.apply_gradients(params_grads)
print("Finished apply gradients")
"""
return self._optimizer.apply_gradients(params_grads=params_grads)
def _creat_vars(self, varname):
pinned_var_name = unique_name.generate(varname + "@Pinned")
fetched_var_name = unique_name.generate(varname + "@Fetch")
pinned_var = self._main_program.global_block().create_var(
name=pinned_var_name,
shape=self.checkpoint_shape,
dtype=self._main_program.global_block().var(varname).dtype,
persistable=False,
stop_gradient=True)
fetch_var = self._main_program.global_block().create_var(
name=fetched_var_name,
shape=self.checkpoint_shape,
dtype=self._main_program.global_block().var(varname).dtype,
persistable=False,
stop_gradient=False)
return pinned_var_name, fetched_var_name
def _append_fill_constant_ops(self, startup_program):
"""
add fill_constant_ops to the end of the prog
we should fill the pinned vars before runing the main_prog
to instantiate their tensor hold_, which could tell us whether
the host memory could hold all the checkpoints from all the
GPU devices in this node.
"""
op_role = 0
block = startup_program.global_block()
fill_constant_vars = self.checkpoint_name2pinned_name.values()
OP_ROLE_KEY = core.op_proto_and_checker_maker.kOpRoleAttrName()
for varname in fill_constant_vars:
var = self._main_program.global_block().var(varname)
# NOTE (JZ-LIANG) to pre-allocate the CUDAPinned MEM
pinned_var = block.create_var(
name=varname,
shape=self.checkpoint_shape,
dtype=self._main_program.global_block().var(var.name).dtype,
persistable=False,
stop_gradient=True)
block.append_op(
type='fill_constant',
outputs={'Out': varname},
attrs={
"shape": var.shape,
"dtype": var.dtype,
"value": 0.0,
"place_type": 2,
OP_ROLE_KEY: op_role,
})
return
def _insert_async_memcpy_op(self, insert_idx, src_varname, dst_varname,
op_role, dst_place_type):
OP_ROLE_KEY = core.op_proto_and_checker_maker.kOpRoleAttrName()
self.block._insert_op_without_sync(
insert_idx,
type='memcpy',
inputs={'X': [self._main_program.global_block().var(src_varname)]},
outputs={
'Out': [self._main_program.global_block().var(dst_varname)]
},
attrs={
"dst_place_type": int(dst_place_type),
OP_ROLE_KEY: op_role
})
def _insert_fetch_op(self, idx, varname):
assert varname in self.checkpoint_name2pinned_name, "Try to fetch {} from Pinned Memory, but it is NOT a checkpoint".format(
varname)
pinned_varname = self.checkpoint_name2pinned_name[varname]
fetch_varname = self.checkpoint_name2fetch_name[varname]
self._insert_async_memcpy_op(idx, pinned_varname, fetch_varname, 1, 1)
def _insert_offload_op(self, idx, varname):
assert varname in self.checkpoint_name2pinned_name, "Try to offload {} to Pinned Memory, but it is NOT a checkpoint".format(
varname)
pinned_varname = self.checkpoint_name2pinned_name[varname]
self._insert_async_memcpy_op(idx, varname, pinned_varname, 0, 2)
def _insert_sync_op(self, op_idx, checkpoint_name):
# single stream offload no need sync
pass
def _record_fetch_op(self, idx):
assert len(self.un_fetch_checkpoint_names
) > 0, "Could NOT found checkpoint to fetch"
checkpoint_name = self.un_fetch_checkpoint_names.pop(-1)
logging.debug("Record fetch [{}]".format(checkpoint_name))
self.idx2insertions[idx] = ("fetch", checkpoint_name)
return checkpoint_name
def _record_offload_op(self, idx, checkpoint_name):
expected_checkpoint_name = self.un_offload_checkpoint_names.pop(0)
assert checkpoint_name == expected_checkpoint_name, "expected to offload [{}] but got [{}]".format(
expected_checkpoint_name, checkpoint_name)
logging.debug("Record offload [{}]".format(checkpoint_name))
self.idx2insertions[idx] = ("offload", checkpoint_name)
def _record_sync_op(self, idx, checkpoint_name):
assert checkpoint_name not in self.synced_checkpoints, "Try to sync the checkpoint [{}] twice".format(
checkpoint_name)
self.synced_checkpoints.add(checkpoint_name)
logging.debug("Record offload sync [{}]".format(checkpoint_name))
self.idx2insertions[idx] = ("sync", checkpoint_name)
def _parse_backward(self):
self.idx2insertions = {}
# don't offload the last checkpoints, to favor throughput
self.un_fetch_checkpoint_names = self.sorted_checkpoint_names[:]
self.un_fetch_checkpoint_names.pop(-1)
need_fetch_checkpoint_names = self.un_fetch_checkpoint_names[:]
self.checkpoint_usage_count = {}
for checkpoint_name in self.un_fetch_checkpoint_names:
self.checkpoint_usage_count[checkpoint_name] = 0
self.bw_strart_op_idx = len(self.block.ops)
for idx, op in enumerate(self.block.ops):
if int(op.desc.attr("op_role")) == 1:
self.bw_strart_op_idx = idx
break
assert self.bw_strart_op_idx < len(
self.block.ops), "Could NOT found backword op in prog"
# fetch second to last checkpoint at the beginning of BW
fetched_checkpoint_varname = self._record_fetch_op(
self.bw_strart_op_idx)
last_last_fetch_checkpoint = None
for i, op in enumerate(self.block.ops[self.bw_strart_op_idx:]):
idx = self.bw_strart_op_idx + i
input_vars = op.desc.input_arg_names()
for input_var in input_vars:
if input_var in need_fetch_checkpoint_names:
if input_var not in self.un_fetch_checkpoint_names:
# fetch the offloade checkpoint when the first usage of its previous one
if self.checkpoint_usage_count[input_var] == 0:
# TODO (JZ-LIANG) sync memcpy_stream if extra stream for memcpy
second_to_last_fetch_checkpoint = fetched_checkpoint_varname
# there is NO fetch ahead the first checkpoint
if input_var != self.sorted_checkpoint_names[0]:
fetched_checkpoint_varname = self._record_fetch_op(
idx)
# should check the current used checkpoint is ths last fetch one
assert second_to_last_fetch_checkpoint == input_var, "Current recompute segment should use [{}] BUT got [{}]".format(
second_to_last_fetch_checkpoint, input_var)
# rename
self.block.ops[idx]._rename_input(
input_var,
self.checkpoint_name2fetch_name[input_var])
self.checkpoint_usage_count[input_var] += 1
else:
raise ValueError(
"use checkpoint [{}] before fetch in BW".format(
input_var))
assert len(self.un_fetch_checkpoint_names
) == 0, "{} checkpoints have NOT been Recorded".format(
self.un_fetch_checkpoint_names)
def _update_backward(self):
if len(self.idx2insertions) == 0:
return
total_op = len(self.block.ops)
for op_idx in reversed(range(self.bw_strart_op_idx, total_op)):
if op_idx in self.idx2insertions:
operation, checkpoint_name = self.idx2insertions[op_idx]
if operation == "fetch":
self._insert_fetch_op(op_idx, checkpoint_name)
logging.debug("Insert [{}] fetch op.".format(
checkpoint_name))
del self.idx2insertions[op_idx]
elif operation == "sync":
self._insert_sync_op(op_idx, checkpoint_name)
logging.debug("Sync [{}] fetch op.".format(checkpoint_name))
self.block._sync_with_cpp()
assert len(
self.idx2insertions) == 0, "{} checkpoints left un-Fecthed".format(
[ele[1] for ele in self.idx2insertions.values()])
def _parse_forward(self):
self.idx2insertions = {}
# don't offload the last checkpoints, faster, less memory saving
self.un_offload_checkpoint_names = self.sorted_checkpoint_names[:]
last_checkpoint = self.un_offload_checkpoint_names.pop(-1)
need_offload_checkpoint_names = self.un_offload_checkpoint_names[:]
self.checkpoint_usage_count_and_idx = {}
for checkpoint_name in self.un_offload_checkpoint_names:
self.checkpoint_usage_count_and_idx[checkpoint_name] = {
'count': 0,
'idx': -1
}
self.synced_checkpoints = set()
self.fw_strart_op_idx = len(self.block.ops)
for idx, op in enumerate(self.block.ops):
if int(op.desc.attr("op_role")) == 0:
self.fw_strart_op_idx = idx
break
assert self.fw_strart_op_idx < len(
self.block.ops), "Could NOT found Forward op in prog"
last_offload_checkpoint = None
for i, op in enumerate(self.block.ops[self.fw_strart_op_idx:
self.bw_strart_op_idx]):
idx = self.fw_strart_op_idx + i
output_vars = op.desc.output_arg_names()
input_vars = op.desc.input_arg_names()
for output_var in output_vars:
if output_var in need_offload_checkpoint_names:
assert len(
output_vars
) == 1, "chekpoint should be the only Output of a certain op, but [{}] is from [{}]".format(
output_var, op)
if output_var in self.un_offload_checkpoint_names:
# insert sync op if last checkpoint has not been sync
if last_offload_checkpoint != None:
if self.checkpoint_usage_count_and_idx[
last_offload_checkpoint]['count'] == 0:
self._record_sync_op(idx,
last_offload_checkpoint)
else:
last_usage_idx = self.checkpoint_usage_count_and_idx[
last_offload_checkpoint]['idx']
assert last_usage_idx > 0, "last_usage_idx of checkpoint [{}] should large than 0".format(
last_offload_checkpoint)
self._record_sync_op(last_usage_idx + 1,
last_offload_checkpoint)
# insert offload op after the checkpoint's generation op
self._record_offload_op(idx + 1, output_var)
last_offload_checkpoint = output_var
else:
raise ValueError(
"There should be just ONE op that output checkpoint [{}]".
format(output_var))
# need to sync the last need to offload checkpoint before the last checkpoint as output op
if output_var == last_checkpoint:
assert len(
output_vars
) == 1, "chekpoint should be the only Output of a certain op, but [{}] is from [{}]".format(
output_var, op)
assert last_offload_checkpoint == self.sorted_checkpoint_names[
-2], "the last offload chekpoint before [{}] is suppose to be [{}], but got [{}]".format(
last_checkpoint, self.sorted_checkpoint_names[-2],
last_offload_checkpoint)
# sync if last checkpoint has not been sync
if self.checkpoint_usage_count_and_idx[
last_offload_checkpoint]['idx'] == 0:
self._record_sync_op(idx, last_offload_checkpoint)
else:
last_usage_idx = self.checkpoint_usage_count_and_idx[
last_offload_checkpoint]['idx']
assert last_usage_idx > 0, "last_usage_idx of checkpoint [{}] should large than 0".format(
last_offload_checkpoint)
self._record_sync_op(last_usage_idx + 1,
last_offload_checkpoint)
# record checkpoint usage
for input_var in input_vars:
if input_var in need_offload_checkpoint_names:
assert input_var not in self.synced_checkpoints, "checkpoint [{}] used after sync".format(
input_var)
self.checkpoint_usage_count_and_idx[input_var]['count'] += 1
self.checkpoint_usage_count_and_idx[input_var]['idx'] = idx
assert len(self.un_offload_checkpoint_names
) == 0, "{} checkpoints have NOT been Recorded".format(
self.un_fetch_checkpoint_names)
assert len(self.synced_checkpoints) == len(
need_offload_checkpoint_names
), "{} checkpoints have NOT been Recorded".format(
set(need_offload_checkpoint_names) - set(self.synced_checkpoints))
def _update_forward(self):
if len(self.idx2insertions) == 0:
return
for op_idx in reversed(
range(self.fw_strart_op_idx, self.bw_strart_op_idx)):
if op_idx in self.idx2insertions:
operation, checkpoint_name = self.idx2insertions[op_idx]
if operation == "offload":
self._insert_offload_op(op_idx, checkpoint_name)
logging.debug("Insert [{}] offload op.".format(
checkpoint_name))
del self.idx2insertions[op_idx]
elif operation == "sync":
self._insert_sync_op(op_idx, checkpoint_name)
logging.debug("Insert [{}] offload_sync op.".format(
checkpoint_name))
del self.idx2insertions[op_idx]
self.block._sync_with_cpp()
assert len(self.idx2insertions
) == 0, "{} checkpoints left un-Offloaded".format(
[ele[1] for ele in self.idx2insertions.values()])
def _check_offload_fetch(self):
# TODO(JZ-LIANG) the single stream offload need no sync
pass
def _offload(self, loss, startup_program=None):
"""
core steps for recompute offload
1. create pinned vars and temp vars
2. parse & update Forward pass: offload, sync
3. parse & update Backward pass: rename, fetch, sync
4. verify the correctness
"""
self._main_program = loss.block.program
self.block = loss.block
if startup_program == None:
startup_program = fluid.default_startup_program()
with program_guard(self._main_program, startup_program):
assert len(self.checkpoint_shape) > 0, (
"checkpoints shape {} should be an non empty list like: [12, 512, 1024]".
format(self.checkpoint_shape))
assert all([ele > 0 for ele in self.checkpoint_shape]), (
"all ele in checkpoints shape {} should be a determined integer larger than 0".
format(self.checkpoint_shape))
self.checkpoint_name2pinned_name = dict()
self.checkpoint_name2fetch_name = dict()
for checkpoint_varname in self.sorted_checkpoint_names:
pinned_var_name, fetch_var_name = self._creat_vars(
checkpoint_varname)
self.checkpoint_name2pinned_name[
checkpoint_varname] = pinned_var_name
self.checkpoint_name2fetch_name[
checkpoint_varname] = fetch_var_name
self._append_fill_constant_ops(startup_program)
# TODO (JZ-LIANG) to provide two offload stragtegy in future
# step 2. parse & update FW: rename, offload, sync
self._parse_backward()
self._update_backward()
# step 3. parse & update BW: rename, offload, sync
self._parse_forward()
self._update_forward()
# step 4. verify the correctness
self._check_offload_fetch()
return
def backward(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None,
callbacks=None):
"""
call append_backward with checkpoints.
Args:
loss (Variable): loss variable to run optimizations.
startup_program (Program): startup_program for initializing parameters
in `parameter_list`.
parameter_list (list): list of Variables or Variable.names to update.
no_grad_set (set|None): set of Variables or Variables.names should be ignored.
callbacks (list|None): list of callables to run when appending backward
operator for one parameter.
checkpoints (list): list of Variables as checkpoints
Examples:
.. code-block:: python
import paddle.fluid as fluid
def mlp(input_x, input_y, hid_dim=128, label_dim=2):
fc_1 = fluid.layers.fc(input=input_x, size=hid_dim)
prediction = fluid.layers.fc(input=[fc_1], size=label_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
sum_cost = fluid.layers.reduce_mean(cost)
return sum_cost, fc_1, prediction
input_x = fluid.layers.data(name="x", shape=[32], dtype='float32')
input_y = fluid.layers.data(name="y", shape=[1], dtype='int64')
cost, fc_1, pred = mlp(input_x, input_y)
print("Finished FF")
sgd = fluid.optimizer.Adam(learning_rate=0.01)
sgd = fluid.optimizer.RecomputeOptimizer(sgd)
sgd._set_checkpoints([fc_1, pred])
params_grads = sgd.backward(
cost,
startup_program=None,
parameter_list=None,
no_grad_set=None)
print("Finished backward")
"""
assert (self._checkpoints is not None
), "You should call _set_checkpoints first"
if framework.in_dygraph_mode():
raise NotImplementedError(
"DyGraph current does not support recompute")
self._dtype = loss.dtype
program = loss.block.program
with program_guard(program, startup_program):
checkpoint_vars = []
for ckpt in self._checkpoints:
if isinstance(ckpt, Variable):
checkpoint_vars.append(ckpt)
else:
checkpoint_vars.append(loss.block.var(ckpt))
# allow return to non-recompute when checkpoints is empty
if len(checkpoint_vars) > 0:
params_grads, sorted_checkpoint_names = append_backward(
loss,
parameter_list,
no_grad_set,
checkpoints=checkpoint_vars)
else:
params_grads = append_backward(
loss,
parameter_list,
no_grad_set,
checkpoints=checkpoint_vars)
if self.enable_offload:
self.sorted_checkpoint_names = sorted_checkpoint_names
self._offload(loss, startup_program=startup_program)
return params_grads
def apply_optimize(self, loss, startup_program, params_grads):
"""
call the apply_optimize function of self._optimizer
Args:
loss (Variable): loss variable to run optimizations.
startup_program (Program): startup_program for initializing parameters
in `parameter_list`.
params_grads (list): list of (param, grad) pair to do optimization.
Examples:
.. code-block:: python
import paddle.fluid as fluid
def mlp(input_x, input_y, hid_dim=128, label_dim=2):
fc_1 = fluid.layers.fc(input=input_x, size=hid_dim)
prediction = fluid.layers.fc(input=[fc_1], size=label_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
sum_cost = fluid.layers.reduce_mean(cost)
return sum_cost, fc_1, prediction
input_x = fluid.layers.data(name="x", shape=[32], dtype='float32')
input_y = fluid.layers.data(name="y", shape=[1], dtype='int64')
cost, fc_1, pred = mlp(input_x, input_y)
print("Finished FF")
sgd = fluid.optimizer.Adam(learning_rate=0.01)
sgd = fluid.optimizer.RecomputeOptimizer(sgd)
sgd._set_checkpoints([fc_1, pred])
params_grads = sgd.backward(
cost,
startup_program=None,
parameter_list=None,
no_grad_set=None)
optimize_ops = sgd.apply_optimize(
cost, startup_program=None, params_grads=params_grads)
print("Finished apply_optimize")
"""
return self._optimizer.apply_optimize(
loss, startup_program=startup_program, params_grads=params_grads)
def minimize(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None):
assert isinstance(loss, Variable), "The loss should be an Variable."
assert (self._checkpoints is not None
), "You should call _set_checkpoints first"
if framework.in_dygraph_mode():
raise NotImplementedError(
"DyGraph current does not support recompute")
params_grads = self.backward(
loss,
startup_program=startup_program,
parameter_list=parameter_list,
no_grad_set=no_grad_set)
optimize_ops = self.apply_optimize(
loss, startup_program=startup_program, params_grads=params_grads)
return optimize_ops, params_grads
class LookaheadOptimizer(object):
r"""
:api_attr: Static Graph
This implements the Lookahead optimizer of the
paper : https://arxiv.org/abs/1907.08610.
Lookahead keeps two sets of params: the fast_params and
the slow_params. inner_optimizer update fast_params every
training step. Lookahead updates the slow_params and fast_params
every k training steps as follows:
.. math::
slow\_param_t &= slow\_param_{t-1} + \\alpha * (fast\_param_{t-1} - slow\_param_{t-1})
fast\_param_t &= slow\_param_t
Args:
inner_optimizer (Optimizer): The optimizer that update fast params step by step.
alpha (float): The learning rate of Lookahead.
k (int): The slow params is updated every k steps.
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
import numpy as np
import numpy.random as random
paddle.enable_static()
x = fluid.layers.data(name='x', shape=[2], dtype='float32')
label = fluid.layers.data(name="label", shape=[1], dtype="int64")
y = fluid.layers.fc(input=[x], size=2, act="softmax")
loss = fluid.layers.cross_entropy(input=y, label=label)
loss = fluid.layers.mean(x=loss)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
optimizer = fluid.optimizer.LookaheadOptimizer(sgd,
alpha=0.5,
k=5)
optimizer.minimize(loss)
main_program = fluid.default_main_program()
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
def train_reader(limit=5):
for i in range(limit):
yield random.random([2]).astype('float32'), random.random([1]).astype('int64')
feeder = fluid.DataFeeder(feed_list=[x, label], place=place)
reader = paddle.batch(paddle.reader.shuffle(train_reader, buf_size=50000),batch_size=1)
for batch_data in reader():
exe.run(fluid.default_main_program(),
feed=feeder.feed(batch_data))
"""
def __init__(self, inner_optimizer, alpha=0.5, k=5):
if framework.in_dygraph_mode():
raise Exception("In dygraph, don't support LookaheadOptimizer.")
assert (inner_optimizer is not None), "inner optimizer can not be None"
assert (
0.0 <= alpha <= 1.0
), "alpha should be larger or equal to 0.0, and less or equal than 1.0"
assert (isinstance(k, int) and k > 0), "k should be a positive integer"
self.inner_optimizer = inner_optimizer
self.alpha = alpha
self.k = k
self.type = "lookahead"
def minimize(self, loss, startup_program=None):
# Apply inner optimizer to the main_program
mini_out = self.inner_optimizer.minimize(
loss, startup_program=startup_program)
# Get startup_program and main_program
if startup_program is None:
startup_program = default_startup_program()
main_block = loss.block
# add some vars to the main_program
params = [param.name for param in main_block.all_parameters()]
param_to_slow = {}
for param in params:
fast_var = main_block.var(param)
assert (fast_var is not None)
slow_var = main_block.create_var(
name=param + "@SLOW",
shape=fast_var.shape,
dtype=fast_var.dtype,
persistable=True)
param_to_slow[param] = slow_var
# add some vars to the startup_program
startup_block = startup_program.global_block()
for param in params:
fast_var = startup_block.var(param)
assert (fast_var is not None)
slow_var = startup_block.create_var(
name=param + "@SLOW",
shape=fast_var.shape,
dtype=fast_var.dtype,
persistable=True)
startup_block.append_op(
type="assign",
inputs={"X": fast_var},
outputs={"Out": slow_var})
with framework.program_guard(main_block.program, startup_program):
# Add Var k to main prog and startup prog
k = layers.create_global_var(
name="lookahead_k",
shape=[1],
value=int(self.k),
dtype='int32',
persistable=True)
# Add Var alpha to main prog and startup prog
alpha = layers.create_global_var(
name="lookahead_alpha",
shape=[1],
value=float(self.alpha),
dtype='float32',
persistable=True)
# Add Var step
step = layers.create_global_var(
name="lookahead_step",
shape=[1],
value=int(0),
dtype='int32',
persistable=True)
layers.increment(x=step, value=1.0, in_place=True)
# lookahead
zero_var = layers.fill_constant(
shape=[1], dtype='float32', value=0.0)
one_var = layers.fill_constant(
shape=[1], dtype='float32', value=1.0)
mod = layers.elementwise_mod(step, k)
with layers.control_flow.Switch() as switch:
with switch.case(step == one_var):
for param_name in params:
fast_var = main_block.var(param_name)
slow_var = param_to_slow[param_name]
layers.assign(input=fast_var, output=slow_var)
with switch.case(mod == zero_var):
for param_name in params:
fast_var = main_block.var(param_name)
slow_var = param_to_slow[param_name]
tmp_var = layers.elementwise_add(
layers.elementwise_mul(fast_var, alpha),
layers.elementwise_mul(
slow_var,
layers.elementwise_sub(one_var, alpha)))
layers.assign(input=tmp_var, output=slow_var)
layers.assign(input=tmp_var, output=fast_var)
with switch.default():
pass
return mini_out
class GradientMergeOptimizer(object):
"""
Gradient Merge, also called as Gradient Accumulation,
is a training strategy for larger batches. With this strategy,
the parameter will not be updated until specific steps.
For each step, the forward network and the backward network
will run to calculate the gradient of the parameters.
For every k step, the optimization network will run,
applying a specific optimization method (such as SGD, Adam)
to the parameters.
Args:
inner_optimizer (Optimizer): The specific optimization (such as SGD, Adam)
which update the parameters
k_steps (int): the update period of the parameters
avg (bool): whether to average the gradients of each mini-batch,
the default value is `True`
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
def gen_data(batch_size):
return {"x": np.random.random(size=(batch_size, 32)).astype('float32'),
"y": np.random.random(size=(batch_size, 1)).astype('int64')}
def mlp(input_x, input_y, hid_dim=128, label_dim=2):
fc_1 = fluid.layers.fc(input=input_x, size=hid_dim)
prediction = fluid.layers.fc(input=[fc_1], size=label_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
sum_cost = fluid.layers.reduce_mean(cost)
return sum_cost, fc_1, prediction
input_x = fluid.layers.data(name="x", shape=[32], dtype='float32')
input_y = fluid.layers.data(name="y", shape=[1], dtype='int64')
cost, fc_1, pred = mlp(input_x, input_y)
sgd = fluid.optimizer.Adam(learning_rate=0.01)
sgd = fluid.optimizer.GradientMergeOptimizer(sgd, k_steps=4, avg=True)
sgd.minimize(cost)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
for i in range(10):
cost_val = exe.run(feed=gen_data(32),
program=fluid.default_main_program(),
fetch_list=[cost.name])
print("step=%d, cost=%f" % (i, cost_val[0]))
"""
GRAD_MERGE_COND_NAME = "grad_merge_cond_name"
def __init__(self, inner_optimizer, k_steps=1, avg=True):
if framework.in_dygraph_mode():
raise Exception(
"In dygraph, we don't support GradientMergeOptimizer."
"You can do Gradient merge by yourself with k-times forward + backward, "
"and one-time optimizer.minimize()")
assert (inner_optimizer is not None), "inner optimizer can not be None"
assert (isinstance(k_steps, int) and
k_steps > 0), "k_steps should be a positive integer"
self.inner_optimizer = inner_optimizer
self.k_steps = k_steps
self.type = "gradient_merge"
self.avg = avg
self._optimize_ops = None
def _set_k_steps(self, k_steps):
self.k_steps = k_steps
def _set_avg(self, avg):
self.avg = avg
def backward(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None,
callbacks=None):
assert isinstance(loss, Variable), "The loss should be an Variable."
assert (
parameter_list is None
), "The parameter_list should be None when using GradientMergeOptimizer"
assert (
no_grad_set is None
), "The no_grad_set should be None when using GradientMergeOptimizer"
params_grads = self.inner_optimizer.backward(
loss, startup_program=startup_program)
return params_grads
def apply_optimize(self, loss, startup_program, params_grads):
program = loss.block.program
with program_guard(program, startup_program):
optimize_ops = self.apply_gradients(params_grads)
return optimize_ops
def _is_the_backward_op(self, op):
op_maker = core.op_proto_and_checker_maker
backward = core.op_proto_and_checker_maker.OpRole.Backward
if op_maker.kOpRoleVarAttrName() in op.attr_names and \
int(op.all_attrs()[op_maker.kOpRoleAttrName()]) == int(backward):
return True
return False
def _remove_op_role_var(self, param, grad):
op_maker = core.op_proto_and_checker_maker
op = grad.op
assert self._is_the_backward_op(op), \
'grad.op={} is not the backward op which produces the grad={}' \
.format(op, grad.name)
block = grad.block
var_attr = op.all_attrs()[op_maker.kOpRoleVarAttrName()]
assert param.name in var_attr, \
'when using GradientMergeOptimizer, param={} must be in var_attr={}' \
.format(param.name, var_attr)
assert grad.name in var_attr, \
'when using GradientMergeOptimizer, grad={} must be in var_attr={}' \
.format(param.name, var_attr)
# remove (param, grad) from op_role_var
var_attr.remove(param.name)
var_attr.remove(grad.name)
if len(var_attr) > 1:
op._set_attr(op_maker.kOpRoleVarAttrName(), var_attr)
else:
op._remove_attr(op_maker.kOpRoleVarAttrName())
def _add_gm_op_role_var(self, op, param, grad, cond):
grad.op = op
op_maker = core.op_proto_and_checker_maker
backward = op_maker.OpRole.Backward
# NOTE(wangxi). When distributed, we will insert grad_merge_all_reduce_op_handle
# in multi_devices_graph_pass, which will allreduce(grad) if cond is True, else
# do nothing.
# In this way, the gradient can be merged first, and then communicate when the
# condition is met, reducing the number of communications to increase the
# speed.
op._set_attr(self.GRAD_MERGE_COND_NAME, cond.name)
op._set_attr(op_maker.kOpRoleAttrName(), backward)
op._set_attr(op_maker.kOpRoleVarAttrName(), [param.name, grad.name])
def _get_gm_cond_var(self, main_block):
# Add const var
k_step_var = layers.create_global_var(
name="gradient_merge_k",
shape=[1],
value=int(self.k_steps),
dtype='int32',
persistable=True,
force_cpu=True)
zero_var = layers.create_global_var(
name="gradient_merge_zero",
shape=[1],
value=int(0),
dtype='int32',
persistable=True,
force_cpu=True)
# Add step var & cond var
step_var = layers.create_global_var(
name="gradient_merge_step",
shape=[1],
value=int(0),
dtype='int32',
persistable=True,
force_cpu=True)
cond_var = layers.create_global_var(
name="gradient_merge_cond",
shape=[1],
value=bool(0),
dtype='bool',
persistable=True,
force_cpu=True)
with device_guard("cpu"):
# step_var = (step_var + 1) % k_step
layers.increment(x=step_var, value=1.0, in_place=True)
main_block.append_op(
type='elementwise_mod',
inputs={'X': step_var,
'Y': k_step_var},
outputs={'Out': step_var},
attrs={'axis': -1,
'use_mkldnn': False})
# cond_var = (step_var == 0)
main_block.append_op(
type='equal',
inputs={'X': step_var,
'Y': zero_var},
outputs={'Out': cond_var})
return cond_var
def apply_gradients(self, params_grads):
main_program = default_main_program()
startup_program = default_startup_program()
main_block = main_program.global_block()
startup_block = startup_program.global_block()
cond = self._get_gm_cond_var(main_block)
#TODO(mapingshuo) support sparse embedding
# step1: remove grad.op's op_role_var
for param, grad in params_grads:
assert (
param.type != core.VarDesc.VarType.SELECTED_ROWS
), "SELECTED_ROWS is not supported in GradientMergeOptimizer for now"
self._remove_op_role_var(param, grad)
param_to_grad = {k.name: v for (k, v) in params_grads}
param_names = param_to_grad.keys()
param_to_gradient_merge = {}
new_params_grads = []
# step2: create gradient_merge var and init with 0
# and update op_role_var
for param, grad in params_grads:
param_name = param.name
param_var = main_block.var(param_name)
assert (param_var is not None)
gradient_merge_var = main_block.create_var(
name=param_name + "@GRAD@GradientMerge",
shape=param_var.shape,
dtype=param_var.dtype,
persistable=True)
param_to_gradient_merge[param_name] = gradient_merge_var
startup_gradient_merge_var = startup_block.create_var(
name=param_name + "@GRAD@GradientMerge",
shape=param_var.shape,
dtype=param_var.dtype,
persistable=True)
startup_block.append_op(
type="fill_constant",
outputs={"Out": startup_gradient_merge_var},
attrs={
"shape": param_var.shape,
"dtype": param_var.dtype,
"value": float(0),
})
# grad_merge += grad
new_grad_op = main_block.append_op(
type="elementwise_add",
inputs={'X': grad,
'Y': gradient_merge_var},
outputs={'Out': gradient_merge_var},
attrs={'axis': -1,
'use_mkldnn': False})
self._add_gm_op_role_var(new_grad_op, param, gradient_merge_var,
cond)
new_params_grads.append([param, gradient_merge_var])
def true_apply_gradient():
cur_block_idx = main_program.current_block_idx
cur_block = main_program.current_block()
# cur_block's forward_block & backward_block is itself
cur_block._set_forward_block_idx(cur_block_idx)
if self.avg:
for param, new_grad in new_params_grads:
# grad /= k_steps
cur_block.append_op(
type='scale',
inputs={'X': new_grad},
outputs={'Out': new_grad},
attrs={
'scale': 1.0 / self.k_steps,
'bias': 0.0,
'bias_after_scale': False
})
for param, new_grad in new_params_grads:
# NOTE. regularization will append ops to grad.block,
# while new_grad's real block is global_block,
# but we want append regularization ops to cur_block,
# so we set new_grad.block = cur_block
new_grad.block = cur_block
self._optimize_ops = self.inner_optimizer.apply_gradients(
new_params_grads)
# clear gradient_merge_vars
for param, new_grad in new_params_grads:
layers.fill_constant(
shape=new_grad.shape,
dtype=new_grad.dtype,
value=0.0,
out=new_grad)
# step3. apply gradient
layers.cond(cond, true_fn=true_apply_gradient, false_fn=None)
return self._optimize_ops
def minimize(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None):
assert isinstance(loss, Variable), "The loss should be an Variable."
params_grads = self.backward(
loss,
startup_program=startup_program,
parameter_list=parameter_list,
no_grad_set=no_grad_set)
optimize_ops = self.apply_optimize(
loss, startup_program=startup_program, params_grads=params_grads)
return optimize_ops, params_grads
| 43.325637
| 234
| 0.574653
|
54594900f1ceb25b6798dc6d10a3b867195106de
| 2,271
|
py
|
Python
|
migrations/versions/ae445547adca_.py
|
Andrewowalla/pitches
|
eb15ae12ab33241b09e6e4e00a9e99a6c341159c
|
[
"MIT"
] | null | null | null |
migrations/versions/ae445547adca_.py
|
Andrewowalla/pitches
|
eb15ae12ab33241b09e6e4e00a9e99a6c341159c
|
[
"MIT"
] | null | null | null |
migrations/versions/ae445547adca_.py
|
Andrewowalla/pitches
|
eb15ae12ab33241b09e6e4e00a9e99a6c341159c
|
[
"MIT"
] | null | null | null |
"""empty message
Revision ID: ae445547adca
Revises: 746a4f80890c
Create Date: 2022-02-14 16:37:48.478006
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ae445547adca'
down_revision = '746a4f80890c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('pitches',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('category', sa.String(), nullable=True),
sa.Column('posted', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('pitch', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_pitches_category'), 'pitches', ['category'], unique=False)
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('comment', sa.String(length=240), nullable=True),
sa.Column('posted', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('pitch_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['pitch_id'], ['pitches.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.add_column('users', sa.Column('email', sa.String(length=255), nullable=True))
op.add_column('users', sa.Column('bio', sa.String(length=255), nullable=True))
op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True))
op.add_column('users', sa.Column('password_hash', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_column('users', 'password_hash')
op.drop_column('users', 'profile_pic_path')
op.drop_column('users', 'bio')
op.drop_column('users', 'email')
op.drop_table('comments')
op.drop_index(op.f('ix_pitches_category'), table_name='pitches')
op.drop_table('pitches')
# ### end Alembic commands ###
| 37.85
| 92
| 0.674593
|
a07cc7aa610c0a4ed4ccf9df508952d1e6f04364
| 2,193
|
py
|
Python
|
hbridgeTest.py
|
Jelby/HatalogicoPython
|
2cc00fd3ec4d5f3dfcca2608c2cfcbe62dc75520
|
[
"MIT"
] | null | null | null |
hbridgeTest.py
|
Jelby/HatalogicoPython
|
2cc00fd3ec4d5f3dfcca2608c2cfcbe62dc75520
|
[
"MIT"
] | null | null | null |
hbridgeTest.py
|
Jelby/HatalogicoPython
|
2cc00fd3ec4d5f3dfcca2608c2cfcbe62dc75520
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# ===========================================================================
# Hatalogico Example H-Bridge Motor Driver - powered by Adafruit's Libraries
# -------------------------------------------------
# Date: 21/4/2015
# SWritten By: John Lumley
#
# BIG THANKS TO ADAFRUIT INDUSTRIES FOR MAKING THIS POSSIBLE AND EASY
# ===========================================================================
import time, os, sys
# DETERMINE CURRENT PATH
scriptPath = os.path.realpath(os.path.dirname(sys.argv[0]))
os.chdir(scriptPath)
# APPEND FOLDER OF REQUIRED LIBRARY
sys.path.append("Adafruit/Adafruit_PWM_Servo_Driver")
# FINALLY LOAD THE LIBRARY
from Adafruit_PWM_Servo_Driver import PWM
scriptPath = os.path.realpath(os.path.dirname(sys.argv[0]))
os.chdir(scriptPath)
sys.path.append("Adafruit/Adafruit_ADS1x15")
from Adafruit_ADS1x15 import ADS1x15
fMotorMin = 1500 #MINIMUM TO TURN THE MOTOR OVER
fMotorMax = 4095
bMotorMin = 2500 #MINIMUM TO TURN THE MOTOR OVER
bMotorMax = 4095
pwm = PWM(0x70)
# SET FREQUENCY
pwm.setPWMFreq(120)
pwm.setPWM(0, 0, 0)
pwm.setPWM(1, 0, 0)
pwm.setPWM(2, 0, 0)
pwm.setPWM(3, 0, 0)
time.sleep(5)
gain = 6144 # +/- 6.144V
sps = 250 # 250 samples per second
ADS1015 = 0x00
# FIRE UP THE ADCS
adc1 = ADS1x15(address=0x49, ic=ADS1015)
def goForward(speedReq):
if(speedReq > 4095):
speedReq = 4095
pwm.setPWM(3, 0, 0)
pwm.setPWM(2, 0, speedReq)
def goBackward(speedReq):
if(speedReq > 4095):
speedReq = 4095
pwm.setPWM(2, 0, 0)
pwm.setPWM(3, 0, speedReq)
def goLeft(speedReq):
if(speedReq > 4095):
speedReq = 4095
pwm.setPWM(0, 0, 0)
pwm.setPWM(1, 0, speedReq)
def goRight(speedReq):
if(speedReq > 4095):
speedReq = 4095
pwm.setPWM(1, 0, 0)
pwm.setPWM(0, 0, speedReq)
# BASIC LOOP (NOT CONTROLLER INPUT)
while (True):
# RANDOM DRIVING TO TEST FUNCTIONALITY
goLeft(2400)
goForward(1800)
time.sleep(5)
goBackward(1000)
time.sleep(5)
goRight(2400)
goForward(2500)
time.sleep(5)
goBackward(1800)
time.sleep(5)
goLeft(4095)
goForward(4095)
time.sleep(5)
goBackward(4095)
time.sleep(5)
goRight(4095)
goForward(0)
time.sleep(2)
goLeft(4095)
time.sleep(1)
goRight(4095)
time.sleep(1)
| 20.305556
| 77
| 0.652987
|
295191dadc479ae027a77ddec1159f387c5c9bce
| 9,367
|
py
|
Python
|
tests/test_resolver.py
|
ulb-sachsen-anhalt/core
|
9567190aaceba7f6d24d169633d698382065533a
|
[
"Apache-2.0"
] | 91
|
2018-05-23T12:52:11.000Z
|
2022-03-19T20:43:49.000Z
|
tests/test_resolver.py
|
ulb-sachsen-anhalt/core
|
9567190aaceba7f6d24d169633d698382065533a
|
[
"Apache-2.0"
] | 636
|
2018-04-23T15:57:31.000Z
|
2022-03-31T11:46:11.000Z
|
tests/test_resolver.py
|
ulb-sachsen-anhalt/core
|
9567190aaceba7f6d24d169633d698382065533a
|
[
"Apache-2.0"
] | 25
|
2018-05-22T11:53:09.000Z
|
2021-07-20T13:07:43.000Z
|
# -*- coding: utf-8 -*-
import os
import shutil
from pathlib import (
Path
)
from unittest import (
mock
)
from PIL import (
Image
)
from ocrd_models.ocrd_page import OcrdPage
import pytest
from tests.base import (
assets,
main
)
from ocrd.resolver import Resolver
from ocrd_utils import pushd_popd
# set pylint once on module level
# pylint: disable=protected-access
METS_HEROLD = assets.url_of('SBB0000F29300010000/data/mets.xml')
FOLDER_KANT = assets.path_to('kant_aufklaerung_1784')
DATA_KANT = {'mets.xml': (os.path.join(FOLDER_KANT, 'data', 'mets.xml'), 'text/xml'),
'INPUT_0017.tif': (os.path.join(FOLDER_KANT, 'data', 'OCR-D-IMG', 'INPUT_0017.tif'), 'image/tiff'),
'INPUT_0020.tif': (os.path.join(FOLDER_KANT, 'data', 'OCR-D-IMG', 'INPUT_0020.tif'), 'image/tiff'),
'PAGE_0017_ALTO.xml': (os.path.join(FOLDER_KANT, 'data', 'OCR-D-GT-ALTO', 'PAGE_0017_ALTO.xml'), 'text/xml'),
'PAGE_0020_ALTO.xml': (os.path.join(FOLDER_KANT, 'data', 'OCR-D-GT-ALTO', 'PAGE_0020_ALTO.xml'), 'text/xml'),
'PAGE_0017_PAGE.xml': (os.path.join(FOLDER_KANT, 'data', 'OCR-D-GT-PAGE', 'PAGE_0017_PAGE.xml'), 'text/xml'),
'PAGE_0020_PAGE.xml': (os.path.join(FOLDER_KANT, 'data', 'OCR-D-GT-PAGE', 'PAGE_0020_PAGE.xml'), 'text/xml'),
}
def _get_kant_data(key):
if key in DATA_KANT.keys():
(path, mime) = DATA_KANT[key]
with open(path, mode='rb') as _file:
return (_file.read(), mime)
def request_behavior(*args):
resp = mock.Mock()
resp.status_code = 200
resp.headers = {}
the_key = args[0].split('/')[-1]
if the_key in DATA_KANT:
(cnt, mime) = _get_kant_data(the_key)
resp.content = cnt
resp.headers = {'Content-Type': mime}
return resp
def test_workspace_from_url_bad():
with pytest.raises(Exception) as exc:
Resolver().workspace_from_url(None)
# check exception
assert "Must pass 'mets_url'" in str(exc)
@mock.patch("requests.get")
def test_workspace_from_url_kant(mock_request, tmp_path):
# arrange
url_src = 'https://raw.githubusercontent.com/OCR-D/assets/master/data/kant_aufklaerung_1784/data/mets.xml'
mock_request.side_effect = request_behavior
dst_dir = tmp_path / 'workspace_kant'
dst_dir.mkdir()
# act
resolver = Resolver()
resolver.workspace_from_url(url_src, mets_basename='foo.xml', dst_dir=dst_dir)
# assert
local_path = dst_dir / 'foo.xml'
assert os.path.isfile(str(local_path))
# 1 time data was requested
assert mock_request.call_count == 1
@mock.patch("requests.get")
def test_workspace_from_url_kant_with_resources(mock_request, tmp_path):
# arrange
url_src = 'https://raw.githubusercontent.com/OCR-D/assets/master/data/kant_aufklaerung_1784/data/mets.xml'
mock_request.side_effect = request_behavior
dst_dir = tmp_path / 'workspace_kant'
dst_dir.mkdir()
# act
resolver = Resolver()
resolver.workspace_from_url(url_src, mets_basename='kant_aufklaerung_1784.xml', dst_dir=dst_dir, download=True)
# assert files present under local tmp_path
local_path_mets = dst_dir / 'kant_aufklaerung_1784.xml'
assert os.path.isfile(str(local_path_mets))
local_path_img1 = dst_dir / 'OCR-D-IMG' / 'INPUT_0017.tif'
assert os.path.isfile(str(local_path_img1))
local_path_page1 = dst_dir / 'OCR-D-GT-PAGE' / 'PAGE_0017_PAGE.xml'
assert os.path.isfile(str(local_path_page1))
# 1 METS/MODS + 2 images + 4 OCR files = 7 requests
assert mock_request.call_count == 7
@mock.patch("requests.get")
def test_workspace_from_url_kant_with_resources_existing_local(mock_request, tmp_path):
# arrange
url_src = 'https://raw.githubusercontent.com/OCR-D/assets/master/data/kant_aufklaerung_1784/data/mets.xml'
mock_request.side_effect = request_behavior
dst_dir = tmp_path / 'workspace_kant'
dst_dir.mkdir()
src_mets = Path(assets.path_to('kant_aufklaerung_1784-binarized/data/mets.xml'))
dst_mets = Path(dst_dir, 'mets.xml')
shutil.copyfile(src_mets, dst_mets)
# act
Resolver().workspace_from_url(url_src, clobber_mets=False, dst_dir=dst_dir)
# assert
# no real request was made, since mets already present
assert mock_request.call_count == 0
@mock.patch("requests.get")
def test_workspace_from_url_404(mock_request):
"""Expected behavior when try create workspace from invalid online target
"""
# arrange
url_404 = 'https://raw.githubusercontent.com/OCR-D/assets/master/data/kant_aufklaerung_1784/data/mets.xmlX'
mock_request.side_effect = Exception('HTTP request failed')
with pytest.raises(Exception) as exc:
Resolver().workspace_from_url(mets_url=url_404)
# assert
assert "HTTP request failed" in str(exc)
assert mock_request.call_count == 1
def test_workspace_from_url_with_rel_dir(tmp_path):
bogus_dst_dir = '../../../../../../../../../../../../../../../../%s' % str(tmp_path)[1:]
# act
with pushd_popd(FOLDER_KANT):
ws1 = Resolver().workspace_from_url('data/mets.xml', dst_dir=bogus_dst_dir)
# assert
assert os.path.join(tmp_path, 'mets.xml') == ws1.mets_target
assert str(tmp_path) == ws1.directory
def test_workspace_from_url0():
# act
workspace = Resolver().workspace_from_url(METS_HEROLD)
input_files = workspace.mets.find_all_files(fileGrp='OCR-D-IMG')
image_file = input_files[0]
f = workspace.download_file(image_file)
# assert
assert '%s.tif' % f.ID == 'FILE_0001_IMAGE.tif'
assert f.local_filename == 'OCR-D-IMG/FILE_0001_IMAGE.tif'
def test_resolve_image0():
workspace = Resolver().workspace_from_url(METS_HEROLD)
input_files = workspace.mets.find_all_files(fileGrp='OCR-D-IMG')
f = input_files[0]
img_pil1 = workspace._resolve_image_as_pil(f.url)
assert img_pil1.size == (2875, 3749)
img_pil2 = workspace._resolve_image_as_pil(f.url, [[0, 0], [1, 1]])
assert img_pil2.size == (1, 1)
@pytest.mark.parametrize(
"image_url,size_pil",
[('OCR-D-IMG-NRM/OCR-D-IMG-NRM_0017.png', (1, 1)),
('OCR-D-IMG-1BIT/OCR-D-IMG-1BIT_0017.png', (1, 1)),
])
def test_resolve_image_as_pil(image_url, size_pil):
url_path = assets.url_of('kant_aufklaerung_1784-binarized/data/mets.xml')
workspace = Resolver().workspace_from_url(url_path)
img_pil = workspace._resolve_image_as_pil(image_url, [[0, 0], [1, 1]])
assert img_pil.size == size_pil
def test_resolve_image_as_pil_deprecated():
url_path = os.path.join(assets.url_of('kant_aufklaerung_1784-binarized'), 'data/mets.xml')
workspace = Resolver().workspace_from_url(url_path)
with pytest.warns(DeprecationWarning) as record:
workspace.resolve_image_as_pil('OCR-D-IMG-NRM/OCR-D-IMG-NRM_0017.png')
# assert
assert len(record) == 1
assert 'Call to deprecated method resolve_image_as_pil.' in str(record[0].message)
def test_workspace_from_nothing():
ws1 = Resolver().workspace_from_nothing(None)
assert ws1.mets
def test_workspace_from_nothing_makedirs(tmp_path):
non_existant_dir = tmp_path / 'target'
ws1 = Resolver().workspace_from_nothing(non_existant_dir)
assert ws1.directory == non_existant_dir
def test_workspace_from_nothing_noclobber(tmp_path):
"""Attempt to re-create workspace shall fail because already created
"""
ws2 = Resolver().workspace_from_nothing(tmp_path)
assert ws2.directory == tmp_path
with pytest.raises(Exception) as exc:
Resolver().workspace_from_nothing(tmp_path)
# assert
the_msg = "METS 'mets.xml' already exists in '%s' and clobber_mets not set" % tmp_path
assert the_msg in str(exc)
@pytest.mark.parametrize("url,basename,exc_msg",
[(None, None, "'url' must be a string"),
(None, 'foo', "'directory' must be a string")]
)
def test_download_to_directory_with_badargs(url, basename, exc_msg):
with pytest.raises(Exception) as exc:
Resolver().download_to_directory(url, basename)
# assert exception message contained
assert exc_msg in str(exc)
@pytest.fixture(name='fixture_copy_kant')
def _fixture_copy_kant(tmp_path):
temporary_phil = tmp_path / 'kant_aufklaerung_1784'
shutil.copytree(FOLDER_KANT, temporary_phil)
yield temporary_phil
def test_download_to_directory_default(fixture_copy_kant):
tmp_root = fixture_copy_kant.parent
phil_data = fixture_copy_kant / 'data' / 'mets.xml'
fn = Resolver().download_to_directory(str(tmp_root), str(phil_data))
assert Path(tmp_root, fn).exists()
assert fn == 'mets.xml'
def test_download_to_directory_basename(fixture_copy_kant):
tmp_root = fixture_copy_kant.parent
phil_data = fixture_copy_kant / 'data' / 'mets.xml'
fn = Resolver().download_to_directory(str(tmp_root), str(phil_data), basename='foo')
assert Path(tmp_root, fn).exists()
assert fn == 'foo'
def test_download_to_directory_subdir(fixture_copy_kant):
tmp_root = fixture_copy_kant.parent
phil_data = fixture_copy_kant / 'data' / 'mets.xml'
fn = Resolver().download_to_directory(str(tmp_root), str(phil_data), subdir='baz')
assert Path(tmp_root, fn).exists()
assert fn == 'baz/mets.xml'
if __name__ == '__main__':
main(__file__)
| 33.09894
| 122
| 0.69873
|
2659ccf977a89d6aa984c105d9d2c397596298fd
| 4,269
|
py
|
Python
|
back-end/www/model/pytorch_cnn_tc.py
|
yenchiah/deep-smoke-machine
|
5f779f723a3c891145db43663c8825f9ab55dc74
|
[
"BSD-3-Clause"
] | 88
|
2019-05-29T07:38:45.000Z
|
2022-03-17T01:50:50.000Z
|
back-end/www/model/pytorch_cnn_tc.py
|
yenchiah/deep-smoke-machine
|
5f779f723a3c891145db43663c8825f9ab55dc74
|
[
"BSD-3-Clause"
] | 6
|
2019-05-30T08:47:07.000Z
|
2021-09-01T07:45:54.000Z
|
back-end/www/model/pytorch_cnn_tc.py
|
yenchiah/deep-smoke-machine
|
5f779f723a3c891145db43663c8825f9ab55dc74
|
[
"BSD-3-Clause"
] | 22
|
2019-06-17T01:15:35.000Z
|
2021-11-17T10:29:00.000Z
|
import torch
import torch.nn as nn
import torchvision
import numpy as np
from model.pytorch_i3d import Unit3D
from model.timeception.nets import timeception_pytorch
# 2D CNN + Timeception
# Timeception for Complex Action Recognition
# https://arxiv.org/abs/1812.01289
class CnnTc(nn.Module):
def __init__(self, input_size, num_classes=2, num_tc_layers=1, dropout_keep_prob=0.5, freeze_cnn=False):
super().__init__()
print("Initialize R2D+Timeception model")
print("num_tc_layers: " + str(num_tc_layers))
print("freeze_cnn: " + str(freeze_cnn))
# Set the first dimension of the input size to be 4, to reduce the amount of computation
input_size[0] = 4
# Input has shape (batch_size, 3, 36, 224, 224)
# (batch_size, channel, time, height, width)
a = torch.tensor(np.zeros(input_size), dtype=torch.float32)
print("Input size:")
print("\t", a.size())
# 2D CNN (we use ResNet18)
b = a.transpose(1, 2) # (batch_size, time, channel, height, width)
bs = b.size()
b = b.reshape(bs[0]*bs[1], bs[2], bs[3], bs[4]) # (batch_size X time, channel, height, width)
self.cnn = torchvision.models.googlenet(pretrained=True, progress=True)
num_features = self.cnn.fc.in_features
self.cnn.fc = nn.Identity()
if freeze_cnn:
print("Freeze CNN model")
self.cnn.train(False)
b = self.cnn(b) # (batch_size X time, num_features)
print("CNN model output size:")
print("\t", b.size())
# Timeception
c = b.reshape(bs[0], bs[1], -1) # (batch_size, time, num_features)
cs = c.size()
c = c.reshape(cs[0], cs[1], cs[2], 1, 1) # (batch_size, time, num_features, 1, 1)
c = c.transpose(1, 2) # (batch_size, num_features, time, 1, 1)
self.tc = timeception_pytorch.Timeception(c.size(), n_layers=num_tc_layers)
c = self.tc(c) # (batch_size, 640, 18, 1, 1) if num_tc_layers=1
print("Timeception model output size:")
print("\t", c.size())
# Logits
self.avg_pool = nn.AvgPool3d(kernel_size=[2, 1, 1], stride=(1, 1, 1))
self.dropout = nn.Dropout(dropout_keep_prob)
self.logits_in_channels = c.size(1)
self.logits = Unit3D(in_channels=self.logits_in_channels, output_channels=num_classes,
kernel_shape=[1, 1, 1],
padding=0,
activation_fn=None,
use_batch_norm=False,
use_bias=True,
name='logits')
d = self.logits(self.dropout(self.avg_pool(c))).squeeze(3).squeeze(3) # (batch, num_classes, time)
print("Final layer output size:")
print("\t", d.size())
# We need to set the fully connected layer for loading self-trained models
self.cnn.fc = nn.Linear(num_features, num_classes)
def replace_logits(self, num_classes):
self.cnn.fc = nn.Identity() # delete the fully connected layer
self.logits = Unit3D(in_channels=self.logits_in_channels, output_channels=num_classes,
kernel_shape=[1, 1, 1],
padding=0,
activation_fn=None,
use_batch_norm=False,
use_bias=True,
name='logits')
def forward(self, x):
# x has shape (batch_size, channel, time, height, width)
x = x.transpose(1, 2) # (batch_size, time, channel, height, width)
xs = x.size()
x = x.reshape(xs[0]*xs[1], xs[2], xs[3], xs[4]) # (batch_size X time, channel, height, width)
x = self.cnn(x) # (batch_size X time, num_features)
x = x.reshape(xs[0], xs[1], -1) # (batch_size, time, num_features)
xs = x.size()
x = x.reshape(xs[0], xs[1], xs[2], 1, 1) # (batch_size, time, num_features, 1, 1)
x = x.transpose(1, 2) # (batch_size, num_features, time, 1, 1)
x = self.tc(x) # (batch_size, 640, 18, 1, 1) if num_tc_layers=1
x = self.logits(self.dropout(self.avg_pool(x))).squeeze(3).squeeze(3) # (batch, num_classes, time)
return x
| 45.414894
| 108
| 0.57859
|
702e5e22e6840dd3214132bba22046f5e2e80ed2
| 52
|
py
|
Python
|
ssseg/modules/models/segmentors/__init__.py
|
zhizhangxian/sssegmentation
|
90613f6e0abf4cdd729cf382ab2a915e106d8649
|
[
"MIT"
] | 41
|
2021-08-28T01:29:19.000Z
|
2022-03-30T11:28:37.000Z
|
ssseg/modules/models/segmentors/__init__.py
|
zhizhangxian/sssegmentation
|
90613f6e0abf4cdd729cf382ab2a915e106d8649
|
[
"MIT"
] | 6
|
2021-08-31T08:54:39.000Z
|
2021-11-02T10:45:47.000Z
|
ssseg/modules/models/segmentors/__init__.py
|
zhizhangxian/sssegmentation
|
90613f6e0abf4cdd729cf382ab2a915e106d8649
|
[
"MIT"
] | 1
|
2021-09-08T01:41:10.000Z
|
2021-09-08T01:41:10.000Z
|
'''initialize'''
from .builder import BuildSegmentor
| 26
| 35
| 0.788462
|
7284377bc0d41aa1ceb1b9232a038e9cad01d1f0
| 6,909
|
py
|
Python
|
bindings/python/ensmallen_graph/datasets/string/microbacteriumparaoxydansdh1b.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/string/microbacteriumparaoxydansdh1b.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/string/microbacteriumparaoxydansdh1b.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
"""
This file offers the methods to automatically retrieve the graph Microbacterium paraoxydans DH1b.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-03 22:46:39.280906
The undirected graph Microbacterium paraoxydans DH1b has 3360 nodes and
226410 weighted edges, of which none are self-loops. The graph is dense
as it has a density of 0.04012 and has 17 connected components, where the
component with most nodes has 3317 nodes and the component with the least
nodes has 2 nodes. The graph median node degree is 107, the mean node degree
is 134.77, and the node degree mode is 2. The top 5 most central nodes
are 1416752.AYME01000003_gene2842 (degree 900), 1416752.AYME01000003_gene2850
(degree 894), 1416752.AYME01000003_gene2443 (degree 892), 1416752.AYME01000004_gene2026
(degree 882) and 1416752.AYME01000004_gene2044 (degree 843).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import MicrobacteriumParaoxydansDh1b
# Then load the graph
graph = MicrobacteriumParaoxydansDh1b()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def MicrobacteriumParaoxydansDh1b(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Microbacterium paraoxydans DH1b graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Microbacterium paraoxydans DH1b graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-03 22:46:39.280906
The undirected graph Microbacterium paraoxydans DH1b has 3360 nodes and
226410 weighted edges, of which none are self-loops. The graph is dense
as it has a density of 0.04012 and has 17 connected components, where the
component with most nodes has 3317 nodes and the component with the least
nodes has 2 nodes. The graph median node degree is 107, the mean node degree
is 134.77, and the node degree mode is 2. The top 5 most central nodes
are 1416752.AYME01000003_gene2842 (degree 900), 1416752.AYME01000003_gene2850
(degree 894), 1416752.AYME01000003_gene2443 (degree 892), 1416752.AYME01000004_gene2026
(degree 882) and 1416752.AYME01000004_gene2044 (degree 843).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import MicrobacteriumParaoxydansDh1b
# Then load the graph
graph = MicrobacteriumParaoxydansDh1b()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="MicrobacteriumParaoxydansDh1b",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 36.172775
| 223
| 0.712404
|
b92861d6ad8e471b26f744fa26062be7f6686dcb
| 389
|
py
|
Python
|
solved/solved/asgi.py
|
Keril-png/solved-project
|
48082adc66f0e738b23e6e11693b4000a4ca39fd
|
[
"MIT"
] | null | null | null |
solved/solved/asgi.py
|
Keril-png/solved-project
|
48082adc66f0e738b23e6e11693b4000a4ca39fd
|
[
"MIT"
] | null | null | null |
solved/solved/asgi.py
|
Keril-png/solved-project
|
48082adc66f0e738b23e6e11693b4000a4ca39fd
|
[
"MIT"
] | null | null | null |
"""
ASGI config for solved project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'solved.settings')
application = get_asgi_application()
| 22.882353
| 78
| 0.784062
|
18bf14ea5f79026caa63ee777b20ebf73cec5152
| 1,068
|
py
|
Python
|
rpcgrid/aio/providers/local.py
|
urands/rpcgrid
|
352d757828918798053d583596cdd3e174a9ec51
|
[
"Apache-2.0"
] | 2
|
2021-05-19T14:08:29.000Z
|
2021-10-06T09:18:51.000Z
|
rpcgrid/aio/providers/local.py
|
urands/rpcgrid
|
352d757828918798053d583596cdd3e174a9ec51
|
[
"Apache-2.0"
] | null | null | null |
rpcgrid/aio/providers/local.py
|
urands/rpcgrid
|
352d757828918798053d583596cdd3e174a9ec51
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
from rpcgrid.aio.providers.base import AsyncBaseProvider
class LocalProvider(AsyncBaseProvider):
_protocol = None
_queue = None
_remote_queue = None
_timeout = None
def __init__(self, protocol):
self._protocol = protocol
self._queue = asyncio.Queue()
def is_connected(self):
return True
def set_remote_provider(self, remote):
self._remote_queue = remote.provider._queue
# Server side
async def create(self):
pass
# Client side
async def open(self):
pass
async def close(self):
await self._queue.put(None)
# Any side
async def send(self, task):
return await self._remote_queue.put(self._protocol.encode(task))
async def recv(self):
try:
bindata = await asyncio.wait_for(
self._queue.get(), timeout=self._timeout
)
data = self._protocol.decode(bindata)
except (asyncio.CancelledError, asyncio.TimeoutError):
return None
return data
| 23.217391
| 72
| 0.627341
|
e8e4d43f10fe5e673f9038194c5c83c396faec73
| 4,093
|
py
|
Python
|
ppocr/data/imaug/make_shrink_map.py
|
lzmisscc/PaddleOCR
|
c4620dbdb4757858c3d2986dc9cdebe96e66beca
|
[
"Apache-2.0"
] | 4
|
2021-03-09T03:31:51.000Z
|
2021-06-11T04:13:35.000Z
|
ppocr/data/imaug/make_shrink_map.py
|
lzmisscc/PaddleOCR
|
c4620dbdb4757858c3d2986dc9cdebe96e66beca
|
[
"Apache-2.0"
] | null | null | null |
ppocr/data/imaug/make_shrink_map.py
|
lzmisscc/PaddleOCR
|
c4620dbdb4757858c3d2986dc9cdebe96e66beca
|
[
"Apache-2.0"
] | 1
|
2021-04-13T08:08:47.000Z
|
2021-04-13T08:08:47.000Z
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import cv2
from shapely.geometry import Polygon
import pyclipper
__all__ = ['MakeShrinkMap']
class MakeShrinkMap(object):
r'''
Making binary mask from detection data with ICDAR format.
Typically following the process of class `MakeICDARData`.
'''
def __init__(self, min_text_size=8, shrink_ratio=0.4, **kwargs):
self.min_text_size = min_text_size
self.shrink_ratio = shrink_ratio
def __call__(self, data):
image = data['image']
text_polys = data['polys']
ignore_tags = data['ignore_tags']
h, w = image.shape[:2]
text_polys, ignore_tags = self.validate_polygons(text_polys,
ignore_tags, h, w)
gt = np.zeros((h, w), dtype=np.float32)
# gt = np.zeros((1, h, w), dtype=np.float32)
mask = np.ones((h, w), dtype=np.float32)
for i in range(len(text_polys)):
polygon = text_polys[i]
height = max(polygon[:, 1]) - min(polygon[:, 1])
width = max(polygon[:, 0]) - min(polygon[:, 0])
if ignore_tags[i] or min(height, width) < self.min_text_size:
cv2.fillPoly(mask,
polygon.astype(np.int32)[np.newaxis, :, :], 0)
ignore_tags[i] = True
else:
polygon_shape = Polygon(polygon)
subject = [tuple(l) for l in polygon]
padding = pyclipper.PyclipperOffset()
padding.AddPath(subject, pyclipper.JT_ROUND,
pyclipper.ET_CLOSEDPOLYGON)
shrinked = []
# Increase the shrink ratio every time we get multiple polygon returned back
possible_ratios = np.arange(self.shrink_ratio, 1, self.shrink_ratio)
np.append(possible_ratios, 1)
# print(possible_ratios)
for ratio in possible_ratios:
# print(f"Change shrink ratio to {ratio}")
distance = polygon_shape.area * (
1 - np.power(ratio, 2)) / polygon_shape.length
shrinked = padding.Execute(-distance)
if len(shrinked) == 1:
break
if shrinked == []:
cv2.fillPoly(mask,
polygon.astype(np.int32)[np.newaxis, :, :], 0)
ignore_tags[i] = True
continue
for each_shirnk in shrinked:
shirnk = np.array(each_shirnk).reshape(-1, 2)
cv2.fillPoly(gt, [shirnk.astype(np.int32)], 1)
# cv2.fillPoly(gt[0], [shrinked.astype(np.int32)], 1)
data['shrink_map'] = gt
data['shrink_mask'] = mask
return data
def validate_polygons(self, polygons, ignore_tags, h, w):
'''
polygons (numpy.array, required): of shape (num_instances, num_points, 2)
'''
if len(polygons) == 0:
return polygons, ignore_tags
assert len(polygons) == len(ignore_tags)
for polygon in polygons:
polygon[:, 0] = np.clip(polygon[:, 0], 0, w - 1)
polygon[:, 1] = np.clip(polygon[:, 1], 0, h - 1)
for i in range(len(polygons)):
area = self.polygon_area(polygons[i])
if abs(area) < 1:
ignore_tags[i] = True
if area > 0:
polygons[i] = polygons[i][::-1, :]
return polygons, ignore_tags
def polygon_area(self, polygon):
# return cv2.contourArea(polygon.astype(np.float32))
edge = 0
for i in range(polygon.shape[0]):
next_index = (i + 1) % polygon.shape[0]
edge += (polygon[next_index, 0] - polygon[i, 0]) * (
polygon[next_index, 1] - polygon[i, 1])
return edge / 2.
| 38.252336
| 93
| 0.536281
|
637fdbeb5f19566016822566fefd39854900a8c3
| 121
|
py
|
Python
|
my3d/__init__.py
|
zachbateman/my3d
|
27ad10bd3025727fedc71ba0a3efda11c1e783dc
|
[
"MIT"
] | null | null | null |
my3d/__init__.py
|
zachbateman/my3d
|
27ad10bd3025727fedc71ba0a3efda11c1e783dc
|
[
"MIT"
] | null | null | null |
my3d/__init__.py
|
zachbateman/my3d
|
27ad10bd3025727fedc71ba0a3efda11c1e783dc
|
[
"MIT"
] | null | null | null |
from .world import World
from . import entities
from .entities import Entity, Point, Line, TextPane, Pipe, Sphere, Plane
| 30.25
| 72
| 0.77686
|
3fe5de342d8cbb6ed28a458cff56e33f0a180054
| 1,160
|
py
|
Python
|
messengerext/home/migrations/0011_event.py
|
groupsome/groupsome
|
4edcf30d66ff458c4df37d3198ef187219a768d7
|
[
"MIT"
] | 6
|
2016-10-07T13:43:17.000Z
|
2017-10-07T22:34:44.000Z
|
messengerext/home/migrations/0011_event.py
|
groupsome/groupsome
|
4edcf30d66ff458c4df37d3198ef187219a768d7
|
[
"MIT"
] | null | null | null |
messengerext/home/migrations/0011_event.py
|
groupsome/groupsome
|
4edcf30d66ff458c4df37d3198ef187219a768d7
|
[
"MIT"
] | 1
|
2020-07-15T04:29:31.000Z
|
2020-07-15T04:29:31.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-01 08:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0010_auto_20160530_2015'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('date', models.DateField(verbose_name='Date')),
('location', models.CharField(max_length=150, verbose_name='Location')),
('allday', models.NullBooleanField(verbose_name='Allday')),
('start', models.TimeField(verbose_name='Start')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='home.Group')),
],
options={
'verbose_name': 'Event',
'verbose_name_plural': 'Events',
},
),
]
| 35.151515
| 114
| 0.583621
|
5d8c3c593f147358a7c14aaec1c1f2bc347ee464
| 3,175
|
py
|
Python
|
src/win.py
|
Mattias1/graph-tools
|
173c3601b91e12ebc8dbb8c27d243a74f6f4613b
|
[
"MIT"
] | null | null | null |
src/win.py
|
Mattias1/graph-tools
|
173c3601b91e12ebc8dbb8c27d243a74f6f4613b
|
[
"MIT"
] | null | null | null |
src/win.py
|
Mattias1/graph-tools
|
173c3601b91e12ebc8dbb8c27d243a74f6f4613b
|
[
"MIT"
] | null | null | null |
"""
Module containing Win class.
The Win class is meant to hide some common interaction with the canvas
and provide some default or needed functionality for all window objects.
"""
from tkinter import *
from PIL import Image, ImageTk, ImageDraw
from .settings import *
from .colors import *
class Win:
"""Abstract window class"""
def __init__(self, settings, app, pos):
self.settings = settings
self.colors = settings.colors
self.enabled = True
self.app = app
self.g = app.canvas
self.pos = pos
self.size = Size(50, 50) # Should be set in the resize method
def enable(self):
"""Enable this window."""
self.enabled = True
def disable(self):
"""Disable this window."""
self.enabled = False
def quit(self):
"""Quit the application"""
self.app.quit()
def containsPos(self, p):
return self.pos.x <= p.x <= self.pos.x + self.size.w and self.pos.y <= p.y <= self.pos.y + self.size.h
def onMouseDown(self, p, btnNr):
pass
def onMouseMove(self, p, btnNr):
pass
def onMouseUp(self, p, btnNr):
pass
def onKeyDown(self, c):
pass
def onMouseScroll(self, p, factor):
pass
def onMouseDownDouble(self, p, btnNr):
pass
def onMouseDownTriple(self, p, btnNr):
pass
def resize(self, draw=True):
"""Resize window."""
pass
def loop(self):
"""This method is being called every X miliseconds"""
return False
def draw(self):
"""This draw method needs to be overridden to draw the window content."""
pass
# Some draw methods to make sure all my subclasses don't have to bother about tkinters canvas
def drawString(self, text, c, p, anchor='nw'):
self.g.create_text((self.pos + p).t, anchor=anchor, text=text, fill=c, font=self.settings.font)
def drawLine(self, c, p, q, w=1):
self.g.create_line((self.pos + p).t, (self.pos + q).t, fill=c)
# TODO: Use width
def drawHorizontalLine(self, c, h, w=1):
self.drawLine(c, Pos(0, h), Pos(self.size.w, h), w)
def drawRect(self, c, p, s):
self.drawRectBorder(c, p, s, 0)
def drawRectBorder(self, c, p, s, borderw=1):
self.g.create_rectangle((self.pos + p).t, (self.pos + p + s).t, fill=c, width=borderw)
def drawDisc(self, c, p, r, **kwargs):
self.g.create_oval(p.x-r, p.y-r, p.x+r, p.y+r, fill=c, outline="", **kwargs)
def loadImgPIL(self, path):
return Image.open('img/' + path)
def loadImgTk(self, img):
return ImageTk.PhotoImage(img)
def loadImg(self, path):
return self.loadImgTk(self.loadImgPIL(path))
def drawImg(self, p, img, anchor='nw'):
self.g.create_image((self.pos + p).t, image=img, anchor=anchor)
def fullClear(self):
self.g.delete(ALL)
self.clear(self.colors.bg)
def clear(self, c):
self.drawRect(c, Pos(0, 0), self.size)
def drawCursorLine(self, p, cursorvisible):
if cursorvisible:
self.drawLine(self.colors.text, p, p + (0, self.settings.userfontsize.h))
| 30.825243
| 110
| 0.605039
|
d4c60b349ed62b3c6dd41c698bfc6a76c56c6fe8
| 3,611
|
py
|
Python
|
tempest/api/compute/admin/test_simple_tenant_usage.py
|
mail2nsrajesh/tempest
|
1a3b3dc50b418d3a15839830d7d1ff88c8c76cff
|
[
"Apache-2.0"
] | 2
|
2015-08-13T00:07:49.000Z
|
2020-08-07T06:38:44.000Z
|
tempest/api/compute/admin/test_simple_tenant_usage.py
|
mail2nsrajesh/tempest
|
1a3b3dc50b418d3a15839830d7d1ff88c8c76cff
|
[
"Apache-2.0"
] | 1
|
2019-08-08T10:36:44.000Z
|
2019-08-09T05:58:23.000Z
|
tempest/api/compute/admin/test_simple_tenant_usage.py
|
mail2nsrajesh/tempest
|
1a3b3dc50b418d3a15839830d7d1ff88c8c76cff
|
[
"Apache-2.0"
] | 5
|
2016-06-24T20:03:52.000Z
|
2020-02-05T10:14:54.000Z
|
# Copyright 2013 NEC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from tempest.api.compute import base
from tempest.lib.common.utils import test_utils
from tempest.lib import decorators
from tempest.lib import exceptions as e
# Time that waits for until returning valid response
# TODO(takmatsu): Ideally this value would come from configuration.
VALID_WAIT = 30
class TenantUsagesTestJSON(base.BaseV2ComputeAdminTest):
@classmethod
def setup_clients(cls):
super(TenantUsagesTestJSON, cls).setup_clients()
cls.adm_client = cls.os_admin.tenant_usages_client
cls.client = cls.os_primary.tenant_usages_client
@classmethod
def resource_setup(cls):
super(TenantUsagesTestJSON, cls).resource_setup()
cls.tenant_id = cls.client.tenant_id
# Create a server in the demo tenant
cls.create_test_server(wait_until='ACTIVE')
now = datetime.datetime.now()
cls.start = cls._parse_strtime(now - datetime.timedelta(days=1))
cls.end = cls._parse_strtime(now + datetime.timedelta(days=1))
@classmethod
def _parse_strtime(cls, at):
# Returns formatted datetime
return at.strftime('%Y-%m-%dT%H:%M:%S.%f')
def call_until_valid(self, func, duration, *args, **kwargs):
# Call until get valid response for "duration"
# because tenant usage doesn't become available immediately
# after create VM.
def is_valid():
try:
self.resp = func(*args, **kwargs)
return True
except e.InvalidHTTPResponseBody:
return False
self.assertEqual(test_utils.call_until_true(is_valid, duration, 1),
True, "%s not return valid response in %s secs" % (
func.__name__, duration))
return self.resp
@decorators.idempotent_id('062c8ae9-9912-4249-8b51-e38d664e926e')
def test_list_usage_all_tenants(self):
# Get usage for all tenants
tenant_usage = self.call_until_valid(
self.adm_client.list_tenant_usages, VALID_WAIT,
start=self.start, end=self.end, detailed="1")['tenant_usages'][0]
self.assertEqual(len(tenant_usage), 8)
@decorators.idempotent_id('94135049-a4c5-4934-ad39-08fa7da4f22e')
def test_get_usage_tenant(self):
# Get usage for a specific tenant
tenant_usage = self.call_until_valid(
self.adm_client.show_tenant_usage, VALID_WAIT,
self.tenant_id, start=self.start, end=self.end)['tenant_usage']
self.assertEqual(len(tenant_usage), 8)
@decorators.idempotent_id('9d00a412-b40e-4fd9-8eba-97b496316116')
def test_get_usage_tenant_with_non_admin_user(self):
# Get usage for a specific tenant with non admin user
tenant_usage = self.call_until_valid(
self.client.show_tenant_usage, VALID_WAIT,
self.tenant_id, start=self.start, end=self.end)['tenant_usage']
self.assertEqual(len(tenant_usage), 8)
| 39.25
| 78
| 0.683744
|
457e24632a032a8570bd0202286ce93c186089e2
| 1,285
|
py
|
Python
|
tests/test_init.py
|
Oprax/backup-utils
|
8de928d5257c9a67c65ca906e49596abe1e3b1ba
|
[
"MIT"
] | null | null | null |
tests/test_init.py
|
Oprax/backup-utils
|
8de928d5257c9a67c65ca906e49596abe1e3b1ba
|
[
"MIT"
] | null | null | null |
tests/test_init.py
|
Oprax/backup-utils
|
8de928d5257c9a67c65ca906e49596abe1e3b1ba
|
[
"MIT"
] | null | null | null |
import sys
import json
from pathlib import Path
from unittest.mock import patch
import pytest
from conftest import FakeFile, config
@patch.object(sys, "argv", ["backup_utils", "-v"])
def test_version(capsys):
from backup_utils import main, __version__
with pytest.raises(SystemExit):
main()
captured = capsys.readouterr()
assert __version__ in captured.out
@patch.object(sys, "argv", ["backup_utils", "-d" "./"])
def test_dir():
p = str(Path(".").resolve())
cfg = {}
with FakeFile("~/.config/bak-utils/config.json") as cfg_file:
from backup_utils import main
main()
cfg.update(json.loads(cfg_file.read_text()))
assert "directories" in cfg.keys()
dirs = cfg.get("directories")
assert len(dirs) == 1
assert p in dirs
# @patch.object(sys, "argv", ["backup_utils", "-r"])
# @patch("backup_utils.databases.databases")
# @patch("backup_utils.syncs.syncs")
# @patch("backup_utils.tasks.tasks")
# def test_run(mock_tasks, mock_syncs, mock_databases, config):
# cfg = config.copy()
# cfg.update({"notifier": {"driver": "print"}})
# with FakeFile(
# "~/.config/bak-utils/config.json", content=json.dumps(cfg)
# ) as cfg_file:
# from backup_utils import main
# main()
| 25.7
| 68
| 0.650584
|
5450f219423e469bc6eecfbd4687158cb6c3b9a7
| 2,420
|
py
|
Python
|
evaluate/Fly-LSH-master/.ipynb_checkpoints/flylsh-checkpoint.py
|
andrewluetgers/flylsh
|
7a1210e59a0e16ea7fcf111e94b6dd5e5f51a56f
|
[
"0BSD"
] | 5
|
2017-11-19T09:06:31.000Z
|
2021-07-29T21:23:32.000Z
|
evaluate/Fly-LSH-master/.ipynb_checkpoints/flylsh-checkpoint.py
|
andrewluetgers/flylsh
|
7a1210e59a0e16ea7fcf111e94b6dd5e5f51a56f
|
[
"0BSD"
] | null | null | null |
evaluate/Fly-LSH-master/.ipynb_checkpoints/flylsh-checkpoint.py
|
andrewluetgers/flylsh
|
7a1210e59a0e16ea7fcf111e94b6dd5e5f51a56f
|
[
"0BSD"
] | 2
|
2019-10-29T02:32:40.000Z
|
2021-07-29T16:54:11.000Z
|
import numpy as np
class flylsh(object):
def __init__(self,data,hash_length,sampling_ratio,embedding_size):
"""
data: Nxd matrix
hash_length: scalar
sampling_ratio: fraction of input dims to sample from when producing a hash
embedding_size: dimensionality of projection space, m
Note that in Flylsh, the hash length and embedding_size are NOT the same
whereas in usual LSH they are
"""
self.embedding_size=embedding_size
self.data=(data-np.mean(data,axis=1)[:,None])
weights=np.random.random((data.shape[1],embedding_size))
self.weights=(weights>1-sampling_ratio) #sparse projection vectors
all_activations=(self.data@self.weights)
threshold=np.sort(all_activations,axis=1)[:,-hash_length][:,None]
#print(threshold[0])
self.hashes=(all_activations>=threshold) #choose topk activations
def query(self,qidx,nnn):
L1_distances=np.sum(np.abs(self.hashes[qidx,:]-self.hashes),axis=1)
NNs=L1_distances.argsort()[1:nnn+1]
#print(L1_distances[NNs]) #an interesting property of this hash is that the L1 distances are always even
return NNs
def true_nns(self,qidx,nnn):
sample=self.data[qidx,:]
return np.sum((self.data-sample)**2,axis=1).argsort()[1:nnn+1]
def construct_true_nns(self,indices,nnn):
all_NNs=np.zeros((len(indices),nnn))
for idx1,idx2 in enumerate(indices):
all_NNs[idx1,:]=self.true_nns(idx2,nnn)
return all_NNs
def AP(self,predictions,truth):
assert len(predictions)==len(truth)
precisions=[len(list(set(predictions[:idx]).intersection(set(truth[:idx]))))/idx for\
idx in range(1,len(truth)+1)]
return np.mean(precisions)
def findmAP(self,nnn,n_points):
start=np.random.randint(low=0,high=self.data.shape[0]-n_points)
sample_indices=np.arange(start,start+n_points)
all_NNs=self.construct_true_nns(sample_indices,nnn)
self.allAPs=[]
for eidx,didx in enumerate(sample_indices):
#eidx: enumeration id, didx: index of sample in self.data
this_nns=self.query(didx,nnn)
this_AP=self.AP(list(this_nns),list(all_NNs[eidx,:]))
#print(this_AP)
self.allAPs.append(this_AP)
return np.mean(self.allAPs)
| 43.214286
| 112
| 0.647521
|
936977ad51968a090a61b7defaf1447962400b72
| 1,500
|
py
|
Python
|
scrapers/reddit-scraper/cloudsearch.py
|
BU-Spark/scraping_resources
|
09fc426d16977caab2dd85246e0b56cd773103f2
|
[
"MIT"
] | 8
|
2020-10-14T13:38:14.000Z
|
2021-10-03T19:14:41.000Z
|
scrapers/reddit-scraper/cloudsearch.py
|
BU-Spark/scraping_resources
|
09fc426d16977caab2dd85246e0b56cd773103f2
|
[
"MIT"
] | null | null | null |
scrapers/reddit-scraper/cloudsearch.py
|
BU-Spark/scraping_resources
|
09fc426d16977caab2dd85246e0b56cd773103f2
|
[
"MIT"
] | 4
|
2020-10-14T13:38:46.000Z
|
2021-03-02T23:35:28.000Z
|
import os
import sys
import praw
from datetime import datetime
from config import *
from image_resolver import *
from task_downloader import *
if __name__ == '__main__':
reddit = praw.Reddit(client_id=CLIENT_ID,
client_secret=CLIENT_SECRET,
password=PASSWORD,
user_agent=USER_AGENT,
username=USERNAME)
last_upper = int((datetime.utcnow() - datetime(1970, 1, 1)).total_seconds())
download_data = []
# Checkpoint
if os.path.isfile('checkpoint_cs.txt'):
with open('checkpoint_cs.txt', 'r') as file:
last_upper = int(file.read())
print("Loaded Checkpoint,", last_upper)
while True:
print("Collecting")
download_data = []
upper = last_upper
lower = upper - 86400
query = 'timestamp:%d..%d' % (lower, upper)
generator = reddit.subreddit(SUBREDDIT).search(query, sort='new', limit=100, syntax='cloudsearch')
for submission in generator:
link = parse_url(submission.url)
id_ = submission.fullname
if link is not None:
download_data.append((link, id_))
print("Downloading", len(download_data))
download_images(download_data)
print('Done')
with open('checkpoint_cs.txt', 'w') as file:
file.write(str(last_upper))
print('Checkpointing')
print('')
last_upper = lower
| 27.777778
| 106
| 0.586
|
3b47bf1604e75bd21e58551829ea7a20fae2097e
| 5,097
|
py
|
Python
|
pybamm/models/submodels/thermal/pouch_cell/pouch_cell_1D_current_collectors.py
|
NunoEdgarGFlowHub/PyBaMM
|
4e4e1ab8c488b0c0a6efdb9934c5ac59e947a190
|
[
"BSD-3-Clause"
] | 1
|
2021-03-06T15:10:34.000Z
|
2021-03-06T15:10:34.000Z
|
pybamm/models/submodels/thermal/pouch_cell/pouch_cell_1D_current_collectors.py
|
NunoEdgarGFlowHub/PyBaMM
|
4e4e1ab8c488b0c0a6efdb9934c5ac59e947a190
|
[
"BSD-3-Clause"
] | null | null | null |
pybamm/models/submodels/thermal/pouch_cell/pouch_cell_1D_current_collectors.py
|
NunoEdgarGFlowHub/PyBaMM
|
4e4e1ab8c488b0c0a6efdb9934c5ac59e947a190
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Class for one-dimensional thermal submodel for use in the "1+1D" pouch cell model
#
import pybamm
from ..base_thermal import BaseThermal
class CurrentCollector1D(BaseThermal):
"""
Class for one-dimensional thermal submodel for use in the "1+1D" pouch cell
model. The thermal model is averaged in the x-direction and is therefore referred
to as 'x-lumped'. For more information see [1]_ and [2]_.
Parameters
----------
param : parameter class
The parameters to use for this submodel
References
----------
.. [1] R Timms, SG Marquis, V Sulzer, CP Please and SJ Chapman. “Asymptotic
Reduction of a Lithium-ion Pouch Cell Model”. In preparation, 2020.
.. [2] SG Marquis, R Timms, V Sulzer, CP Please and SJ Chapman. “A Suite of
Reduced-Order Models of a Single-Layer Lithium-ion Pouch Cell”. In
preparation, 2020.
**Extends:** :class:`pybamm.thermal.BaseThermal`
"""
def __init__(self, param):
super().__init__(param, cc_dimension=1)
def get_fundamental_variables(self):
T_x_av = pybamm.standard_variables.T_av
T_vol_av = self._yz_average(T_x_av)
T_cn = T_x_av
T_n = pybamm.PrimaryBroadcast(T_x_av, "negative electrode")
T_s = pybamm.PrimaryBroadcast(T_x_av, "separator")
T_p = pybamm.PrimaryBroadcast(T_x_av, "positive electrode")
T_cp = T_x_av
variables = self._get_standard_fundamental_variables(
T_cn, T_n, T_s, T_p, T_cp, T_x_av, T_vol_av
)
return variables
def get_coupled_variables(self, variables):
variables.update(self._get_standard_coupled_variables(variables))
return variables
def set_rhs(self, variables):
T_av = variables["X-averaged cell temperature"]
Q_av = variables["X-averaged total heating"]
T_amb = variables["Ambient temperature"]
# Account for surface area to volume ratio of pouch cell in cooling
# coefficient. Note: the factor 1/delta^2 comes from the choice of
# non-dimensionalisation
cell_volume = self.param.l * self.param.l_y * self.param.l_z
yz_surface_area = self.param.l_y * self.param.l_z
yz_surface_cooling_coefficient = (
-(self.param.h_cn + self.param.h_cp)
* yz_surface_area
/ cell_volume
/ (self.param.delta ** 2)
)
side_edge_area = 2 * self.param.l_z * self.param.l
side_edge_cooling_coefficient = (
-self.param.h_edge * side_edge_area / cell_volume / self.param.delta
)
total_cooling_coefficient = (
yz_surface_cooling_coefficient + side_edge_cooling_coefficient
)
self.rhs = {
T_av: (
pybamm.laplacian(T_av)
+ self.param.B * Q_av
+ total_cooling_coefficient * (T_av - T_amb)
)
/ (self.param.C_th * self.param.rho)
}
def set_boundary_conditions(self, variables):
T_amb = variables["Ambient temperature"]
T_av = variables["X-averaged cell temperature"]
T_av_top = pybamm.boundary_value(T_av, "right")
T_av_bottom = pybamm.boundary_value(T_av, "left")
# Tab cooling only implemented for both tabs at the top.
negative_tab_area = self.param.l_tab_n * self.param.l_cn
positive_tab_area = self.param.l_tab_p * self.param.l_cp
total_top_area = self.param.l * self.param.l_y
non_tab_top_area = total_top_area - negative_tab_area - positive_tab_area
negative_tab_cooling_coefficient = (
self.param.h_tab_n / self.param.delta * negative_tab_area / total_top_area
)
positive_tab_cooling_coefficient = (
self.param.h_tab_p / self.param.delta * positive_tab_area / total_top_area
)
top_edge_cooling_coefficient = (
self.param.h_edge / self.param.delta * non_tab_top_area / total_top_area
)
bottom_edge_cooling_coefficient = (
self.param.h_edge / self.param.delta * total_top_area / total_top_area
)
total_top_cooling_coefficient = (
negative_tab_cooling_coefficient
+ positive_tab_cooling_coefficient
+ top_edge_cooling_coefficient
)
total_bottom_cooling_coefficient = bottom_edge_cooling_coefficient
# just use left and right for clarity
# left = bottom of cell (z=0)
# right = top of cell (z=L_z)
self.boundary_conditions = {
T_av: {
"left": (
total_bottom_cooling_coefficient * (T_av_bottom - T_amb),
"Neumann",
),
"right": (
-total_top_cooling_coefficient * (T_av_top - T_amb),
"Neumann",
),
}
}
def set_initial_conditions(self, variables):
T_av = variables["X-averaged cell temperature"]
self.initial_conditions = {T_av: self.param.T_init}
| 35.151724
| 86
| 0.623112
|
b6d9ad9a1bf29c6923ac0de39f72ea76eb967892
| 3,121
|
py
|
Python
|
fairing/deployers/gcp/gcpserving.py
|
rbrishabh/fairing
|
929cfdb455366a9bebc0df676f3461308db32f1b
|
[
"Apache-2.0"
] | null | null | null |
fairing/deployers/gcp/gcpserving.py
|
rbrishabh/fairing
|
929cfdb455366a9bebc0df676f3461308db32f1b
|
[
"Apache-2.0"
] | null | null | null |
fairing/deployers/gcp/gcpserving.py
|
rbrishabh/fairing
|
929cfdb455366a9bebc0df676f3461308db32f1b
|
[
"Apache-2.0"
] | null | null | null |
from fairing.deployers.deployer import DeployerInterface
from fairing.cloud.gcp import guess_project_name
from fairing import http_utils
from googleapiclient import discovery
from googleapiclient import errors
# TODO: Implement predict and delete methods.
class GCPServingDeployer(DeployerInterface):
"""Handle deploying a trained model to GCP."""
def __init__(self, model_dir, model_name, version_name, project_id=None,
**deploy_kwargs):
self._project_id = project_id or guess_project_name()
self._model_dir = model_dir
self._model_name = model_name
self._version_name = version_name
self._deploy_kwargs = deploy_kwargs
self._ml = discovery.build('ml', 'v1')
self._ml._http = http_utils.configure_http_instance(self._ml._http) #pylint:disable=protected-access
# Set default deploy kwargs
if 'runtime_version' not in self._deploy_kwargs:
self._deploy_kwargs['runtime_version'] = '1.13'
if 'python_version' not in self._deploy_kwargs:
self._deploy_kwargs['python_version'] = '3.5'
def deploy(self, pod_template_spec):
"""Deploys the model to Cloud ML Engine."""
# Check if the model exists
try:
res = self._ml.projects().models().get(
name='projects/{}/models/{}'.format(self._project_id, self._model_name)
).execute()
except errors.HttpError as err:
if err.resp['status'] == '404':
# Model not found
res = None
else:
# Other error with the command
print('Error retrieving the model: {}'.format(err))
return
if res is None:
# Create the model
try:
model_body = {'name': self._model_name}
res = self._ml.projects().models().create(
parent='projects/{}'.format(self._project_id),
body=model_body
).execute()
except errors.HttpError as err:
print('Error creating the model: {}'.format(err))
return
# Create the version
try:
version_body = self._deploy_kwargs
version_body['name'] = self._version_name
version_body['deploymentUri'] = self._model_dir
res = self._ml.projects().models().versions().create(
parent='projects/{}/models/{}'.format(
self._project_id, self._model_name),
body=version_body
).execute()
except errors.HttpError as err:
print('Error creating the version: {}'.format(err))
return
print('Version submitted successfully. Access the version at the following URL:')
print('https://console.cloud.google.com/mlengine/models/{}/versions/{}?project={}'.format(
self._model_name, self._version_name, self._project_id))
def get_logs(self):
raise NotImplementedError('Retrieving logs is not supported for the GCP Serving deployer.')
| 40.532468
| 108
| 0.607498
|
82470e90dac3e8ff0d90a0b59c52cf15f084e038
| 3,065
|
py
|
Python
|
drafts/overdata/overdata/worker.py
|
tekhnus/misc
|
cf4c6e29434c546e3c29f24f7bb16a0ac65005f5
|
[
"Unlicense"
] | null | null | null |
drafts/overdata/overdata/worker.py
|
tekhnus/misc
|
cf4c6e29434c546e3c29f24f7bb16a0ac65005f5
|
[
"Unlicense"
] | null | null | null |
drafts/overdata/overdata/worker.py
|
tekhnus/misc
|
cf4c6e29434c546e3c29f24f7bb16a0ac65005f5
|
[
"Unlicense"
] | null | null | null |
import json
import pathlib
class Worker:
def process(self, key, row):
raise NotImplementedError("process not implemented")
def close(self):
return iter([])
class Compose(Worker):
def __init__(self, left, right):
self._left = left
self._right = right
def process(self, key, row):
for left_key, left_row in self._left.process(key, row):
for right_key, right_row in self._right.process(left_key, left_row):
yield right_key, right_row
def close(self):
for left_key, left_row in self._left.close():
for right_key, right_row in self._right.process(left_key, left_row):
yield right_key, right_row
for right_key, right_row in self._right.close():
yield right_key, right_row
class ExtractColumn(Worker):
def __init__(self, column):
self._column = column
def process(self, key, row):
yield key, {self._column: row[self._column]}
class Product(Worker):
def __init__(self, one, another):
self._one = one
self._another = another
def process(self, key, row):
for (ok, one), (tk, two) in zip(self._one.process(key, row), self._another.process(key, row)):
yield ok, dict(one, **two)
def close(self):
for (ok, one), (tk, two) in zip(self._one.close(), self._another.close()):
yield ok, dict(one, **two)
class ConcatenateJSON(Worker):
def __init__(self, filename):
self._filename = pathlib.Path(filename)
def process(self, key, row):
yield None, row
def close(self):
with self._filename.open() as reader:
for line in reader:
yield None, json.loads(line)
class BinaryOperator(Worker):
def __init__(self, left, operation, right):
self._left = left
self._operation = operation
self._right = right
def process(self, key, row):
for (lk, left), (rk, right) in zip(self._left.process(key, row), self._right.process(key, row)):
left_val = next(iter(left.values()))
right_val = next(iter(right.values()))
yield lk, {"_unnamed_": self._operation(left_val, right_val)}
class Accumulator(Worker):
def __init__(self, root, accumulator):
self._root = root
self._accumulator = accumulator
self._init = {}
def process(self, key, row):
for outkey, root in self._root.process(key, row):
val = next(iter(root.values()))
if outkey in self._init:
self._init[outkey] = self._accumulator(self._init[outkey], val)
else:
self._init[outkey] = val
return iter([])
def close(self):
for key, val in self._init.items():
yield key, {"_unnamed_": val}
class PartitionBy(Worker):
def __init__(self, keyer):
self._keyer = keyer
def process(self, key, row):
_, actual_key = next(self._keyer.process(key, row))
yield repr(actual_key), row
| 28.915094
| 104
| 0.602284
|
a8a3596ff22145fe57876339448ce76ab7bb8a47
| 1,445
|
py
|
Python
|
mlmodels/linearregression.py
|
BoxFishLab/Pandora.Wiki
|
d6bb80b740493a52e7e75729b36b487c91d173f6
|
[
"MIT"
] | 2
|
2018-12-13T08:46:09.000Z
|
2019-09-23T03:57:40.000Z
|
mlmodels/linearregression.py
|
BoxFishLab/Pandora.Wiki
|
d6bb80b740493a52e7e75729b36b487c91d173f6
|
[
"MIT"
] | 2
|
2021-09-25T02:52:26.000Z
|
2021-09-25T02:52:42.000Z
|
mlmodels/linearregression.py
|
Charben/Pandora.Wiki
|
d6bb80b740493a52e7e75729b36b487c91d173f6
|
[
"MIT"
] | 1
|
2021-06-28T06:13:27.000Z
|
2021-06-28T06:13:27.000Z
|
#/usr/bin/env python
#-*- coding: utf-8 -*-
'''
线性回归模型
'''
from .basicmodel import BasicModule #基本模型
import torch as t
from torch import nn
from torch.autograd import Variable as V
from torch import optim
class LinearRegression(BasicModule):
def __init__(self,data,max_item=100000,aphla=0.01):
super(LinearRegression,self).__init__()
self.DefaultParameter(data)
self.aphla = aphla
self.max_item = max_item
#默认参数设置
def DefaultParameter(self,data):
data_size = data.size()
self.wights = nn.Parameter(t.randn(data_size[1],1))
self.b = nn.Parameter(t.randn(data_size[0],1))
#前馈函数
def forward(self,x):
y = x.mm(self.wights) + self.b
return y
def fit(self,lr,x,y,flag=t.cuda.is_available()):
if flag:
model = lr.cuda()
else:
model = lr
loss_func = nn.MSELoss()
optimizer = optim.SGD(params=model.parameters(),lr=model.aphla)
for epoch in range(model.max_item):
if flag:
x = V(x).cuda()
y = V(y).cuda()
else:
x = V(x)
y = V(y)
y_out = model(x)
loss = loss_func(y_out,y)
break_loss = loss.data[0]
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (epoch+1)%20 == 0 :
items = (epoch+1)/20
print("Epoch{0}次,代价函数为loss:{1:.6f}".format(items,loss.data))
#满足一定条件就实现早停
if break_loss < 1e-3:
break
model.save(model)
def predict(self,lr,x,path):
'''
还需要做预测处理
'''
lr.eval()
lr.load(path)
y = lr(x)
print(y)
| 21.567164
| 66
| 0.642907
|
2b82b446ea3d6a41404d6547d21bf6a5ff86de84
| 6,770
|
py
|
Python
|
test/functional/feature_nulldummy.py
|
taler-project/taler
|
0abb53afc6c8c4593411499f012fa75114313f3e
|
[
"MIT"
] | 16
|
2017-11-21T10:42:25.000Z
|
2021-02-26T11:30:53.000Z
|
test/functional/feature_nulldummy.py
|
taler-project/taler
|
0abb53afc6c8c4593411499f012fa75114313f3e
|
[
"MIT"
] | 2
|
2018-01-07T12:47:43.000Z
|
2018-12-06T11:42:20.000Z
|
test/functional/feature_nulldummy.py
|
taler-project/taler
|
0abb53afc6c8c4593411499f012fa75114313f3e
|
[
"MIT"
] | 15
|
2017-11-22T05:46:27.000Z
|
2021-02-06T18:10:27.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2016-2017 The Taler Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test NULLDUMMY softfork.
Connect to a single node.
Generate 2 blocks (save the coinbases for later).
Generate 427 more blocks.
[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in the 430th block.
[Policy] Check that non-NULLDUMMY transactions are rejected before activation.
[Consensus] Check that the new NULLDUMMY rules are not enforced on the 431st block.
[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on the 432nd block.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, network_thread_start
from test_framework.blocktools import create_coinbase, create_block, add_witness_commitment
from test_framework.script import CScript
from io import BytesIO
import time
NULLDUMMY_ERROR = "64: non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)"
def trueDummy(tx):
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
assert(len(i) == 0)
newscript.append(b'\x51')
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
tx.rehash()
class NULLDUMMYTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
# This script tests NULLDUMMY activation, which is part of the 'segwit' deployment, so we go through
# normal segwit activation here (and don't use the default always-on behaviour).
self.extra_args = [['-whitelist=127.0.0.1', '-walletprematurewitness', '-vbparams=segwit:0:999999999999', '-addresstype=legacy', "-deprecatedrpc=addwitnessaddress"]]
def run_test(self):
self.address = self.nodes[0].getnewaddress()
self.ms_address = self.nodes[0].addmultisigaddress(1,[self.address])['address']
self.wit_address = self.nodes[0].addwitnessaddress(self.address)
self.wit_ms_address = self.nodes[0].addmultisigaddress(1, [self.address], '', 'p2sh-segwit')['address']
network_thread_start()
self.coinbase_blocks = self.nodes[0].generate(2) # Block 2
coinbase_txid = []
for i in self.coinbase_blocks:
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
self.nodes[0].generate(427) # Block 429
self.lastblockhash = self.nodes[0].getbestblockhash()
self.tip = int("0x" + self.lastblockhash, 0)
self.lastblockheight = 429
self.lastblocktime = int(time.time()) + 429
self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]")
test1txs = [self.create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, 49)]
txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
test1txs.append(self.create_transaction(self.nodes[0], txid1, self.ms_address, 48))
txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
test1txs.append(self.create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, 49))
txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True)
self.block_submit(self.nodes[0], test1txs, False, True)
self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
test2tx = self.create_transaction(self.nodes[0], txid2, self.ms_address, 47)
trueDummy(test2tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)
self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
self.block_submit(self.nodes[0], [test2tx], False, True)
self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
test4tx = self.create_transaction(self.nodes[0], test2tx.hash, self.address, 46)
test6txs=[CTransaction(test4tx)]
trueDummy(test4tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test4tx])
self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
test5tx = self.create_transaction(self.nodes[0], txid3, self.wit_address, 48)
test6txs.append(CTransaction(test5tx))
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test5tx], True)
self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]")
for i in test6txs:
self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True)
self.block_submit(self.nodes[0], test6txs, True, True)
def create_transaction(self, node, txid, to_address, amount):
inputs = [{ "txid" : txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def block_submit(self, node, txs, witness = False, accept = False):
block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1)
block.nVersion = 4
for tx in txs:
tx.rehash()
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
witness and add_witness_commitment(block)
block.rehash()
block.solve()
node.submitblock(bytes_to_hex_str(block.serialize(True)))
if (accept):
assert_equal(node.getbestblockhash(), block.hash)
self.tip = block.sha256
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
else:
assert_equal(node.getbestblockhash(), self.lastblockhash)
if __name__ == '__main__':
NULLDUMMYTest().main()
| 50.522388
| 173
| 0.701625
|
55a44853afb41d148d9232da59faeb1541bb0890
| 1,661
|
py
|
Python
|
goatools/grouper/tasks.py
|
flying-sheep/goatools
|
1e3a74faa17cbdeef02550c7ddf17b65cf47d34a
|
[
"BSD-2-Clause"
] | 477
|
2015-02-10T06:54:42.000Z
|
2022-03-15T12:36:11.000Z
|
goatools/grouper/tasks.py
|
flying-sheep/goatools
|
1e3a74faa17cbdeef02550c7ddf17b65cf47d34a
|
[
"BSD-2-Clause"
] | 174
|
2015-02-05T18:11:14.000Z
|
2022-03-29T10:24:19.000Z
|
goatools/grouper/tasks.py
|
flying-sheep/goatools
|
1e3a74faa17cbdeef02550c7ddf17b65cf47d34a
|
[
"BSD-2-Clause"
] | 202
|
2015-01-21T12:29:23.000Z
|
2022-03-01T13:26:05.000Z
|
"""Tasks for grouping."""
__copyright__ = "Copyright (C) 2016-2018, DV Klopfenstein, H Tang, All rights reserved."
__author__ = "DV Klopfenstein"
from goatools.grouper.hdrgos import HdrgosSections
class SummarySec2dHdrGos(object):
"""Summary for a sections variable containing sets of header GO IDs."""
def summarize_sec2hdrgos(self, sec2d_hdrgos):
"""Get counts of header GO IDs and sections."""
hdrgos_all = set([])
hdrgos_grouped = set()
hdrgos_ungrouped = set()
sections_grouped = set()
for sectionname, hdrgos in sec2d_hdrgos:
self._chk_hdrgoids(hdrgos)
hdrgos_all.update(hdrgos)
if sectionname != HdrgosSections.secdflt:
hdrgos_grouped.update(hdrgos)
sections_grouped.add(sectionname)
else:
hdrgos_ungrouped.update(hdrgos)
return {'G': hdrgos_grouped,
'S': sections_grouped,
'U': hdrgos_all.difference(hdrgos_grouped)}
def summarize_sec2hdrnts(self, sec2d_hdrnts):
"""Given namedtuples in each sectin, get counts of header GO IDs and sections."""
sec2d_hdrgos = [(s, set(nt.GO for nt in nts)) for s, nts in sec2d_hdrnts]
return self.summarize_sec2hdrgos(sec2d_hdrgos)
@staticmethod
def _chk_hdrgoids(hdrgos):
"""Check that hdrgo set is a set of GO IDs."""
goid = next(iter(hdrgos))
if isinstance(goid, str) and goid[:3] == "GO:":
return
assert False, "HDRGOS DO NOT CONTAIN GO IDs: {E}".format(E=goid)
# Copyright (C) 2016-2018, DV Klopfenstein, H Tang, All rights reserved.
| 37.75
| 89
| 0.637568
|
9cfb810d45bbb8049f282d0d6e369131f7fb9ae0
| 10,337
|
py
|
Python
|
budgetportal/tests/test_summaries.py
|
d3ft0uch/datamanager
|
60f2f9d5278d20ae553bb063dcedaf206bb3ab29
|
[
"MIT"
] | 3
|
2019-08-31T03:08:22.000Z
|
2020-04-03T13:09:20.000Z
|
budgetportal/tests/test_summaries.py
|
d3ft0uch/datamanager
|
60f2f9d5278d20ae553bb063dcedaf206bb3ab29
|
[
"MIT"
] | 97
|
2019-04-16T07:54:38.000Z
|
2022-02-10T07:25:48.000Z
|
budgetportal/tests/test_summaries.py
|
OpenUpSA/budget-portal
|
879c5875b1d438b9287c38d6730c86be69051ac5
|
[
"MIT"
] | 14
|
2019-04-23T09:48:17.000Z
|
2021-04-13T17:48:40.000Z
|
"""
Tests of budgetportal.summaries
"""
import json
from budgetportal.models import Department, FinancialYear, Government, Sphere
from budgetportal.openspending import BabbageFiscalDataset
from budgetportal.summaries import (
get_consolidated_expenditure_treemap,
get_focus_area_preview,
get_preview_page,
)
from django.test import TestCase
from mock import MagicMock, Mock, patch
with open("budgetportal/tests/test_data/consolidated_treemap.json", "r") as f:
CONSOLIDATED_MOCK_DATA = json.load(f)
with open(
"budgetportal/tests/test_data/test_summaries_focus_area_pages_provincial.json", "r"
) as f:
FOCUS_AREA_PROVINCIAL_MOCK_DATA = json.load(f)
with open(
"budgetportal/tests/test_data/test_summaries_focus_area_pages_national.json", "r"
) as f:
FOCUS_AREA_NATIONAL_MOCK_DATA = json.load(f)
with open(
"budgetportal/tests/test_data/test_summaries_focus_area_pages_national_subprogrammes.json",
"r",
) as f:
FOCUS_AREA_NATIONAL_SUBPROGRAMMES_MOCK_DATA = json.load(f)
with open(
"budgetportal/tests/test_data/test_national_department_preview.json", "r"
) as f:
NATIONAL_DEPARTMENT_PREVIEW_MOCK_DATA = json.load(f)
class ConsolidatedTreemapTestCase(TestCase):
""" Unit tests for the consolidated treemap function(s) """
def setUp(self):
self.year = FinancialYear.objects.create(slug="2019-20")
self.mock_openspending_api = Mock()
self.mock_openspending_api.get_adjustment_kind_ref = Mock(
return_value="adjustment_kind_ref"
)
self.mock_openspending_api.aggregate = Mock(
return_value={"cells": CONSOLIDATED_MOCK_DATA["complete"]}
)
self.mock_openspending_api.get_function_ref = Mock(
return_value="function_group.function_group"
)
self.mock_openspending_api.get_year_ref = Mock(
return_value="function_group.function_group"
)
self.mock_openspending_api.get_financial_year_ref = Mock(
return_value="financial_year.financial_year"
)
self.mock_dataset = Mock()
self.mock_dataset.get_openspending_api = Mock(
return_value=self.mock_openspending_api
)
@patch("budgetportal.summaries.get_consolidated_expenditure_budget_dataset")
def test_complete_data(self, mock_get_dataset):
mock_get_dataset.return_value = self.mock_dataset
result = get_consolidated_expenditure_treemap(self.year)
data = result["data"]
self.assertEqual(len(data), 2)
data_keys = data.keys()
self.assertIn("items", data_keys)
self.assertIn("total", data_keys)
expenditure_keys = data["items"][0].keys()
self.assertIn("name", expenditure_keys)
self.assertIn("amount", expenditure_keys)
self.assertIn("percentage", expenditure_keys)
self.assertIn("id", expenditure_keys)
self.assertIn("url", expenditure_keys)
class FocusAreaPagesTestCase(TestCase):
""" Integration test focus area page data generation """
def setUp(self):
self.year = FinancialYear.objects.create(slug="2019-20")
self.year.save()
national = Sphere(financial_year=self.year, name="national")
national.save()
provincial = Sphere(financial_year=self.year, name="provincial")
provincial.save()
southafrica = Government(sphere=national, name="South Africa")
southafrica.save()
province = Government(sphere=provincial, name="Test Province 1")
province.save()
Department(
government=southafrica,
name="TP1 National Test Dept 2",
vote_number=1,
is_vote_primary=True,
intro="",
).save()
Department(
government=province,
name="TP1 Provincial Test Dept 2",
vote_number=1,
is_vote_primary=True,
intro="",
).save()
self.mock_dataset = Mock()
self.mock_openspending_api = Mock()
self.mock_openspending_api.get_geo_ref = Mock(
return_value="geo_source.government"
)
self.mock_openspending_api.get_function_ref = Mock(
return_value="function_group.function_group"
)
self.mock_openspending_api.get_year_ref = Mock(
return_value="function_group.function_group"
)
self.mock_openspending_api.get_financial_year_ref = Mock(
return_value="financial_year.financial_year"
)
self.mock_openspending_api.get_department_name_ref = Mock(
return_value="vote_number.department"
)
self.mock_openspending_api.get_subprogramme_name_ref = Mock(
return_value="subprogramme.subprogramme"
)
self.mock_dataset.get_openspending_api = Mock(
return_value=self.mock_openspending_api
)
def mock_get_focus_area_data(__, sphere):
if sphere == "provincial":
cells = FOCUS_AREA_PROVINCIAL_MOCK_DATA
elif sphere == "national":
cells = FOCUS_AREA_NATIONAL_MOCK_DATA
return cells, self.mock_openspending_api
self.mock_get_focus_area_data = mock_get_focus_area_data
@patch("budgetportal.summaries.get_prov_eq_share")
@patch("budgetportal.summaries.get_focus_area_data")
def test_get_focus_area_preview(
self, mock_get_focus_area_data, mock_get_prov_eq_share
):
mock_get_focus_area_data.side_effect = self.mock_get_focus_area_data
mock_get_prov_eq_share.return_value = ("untested", 123)
result = get_focus_area_preview(self.year)
focus_areas = result["data"]["items"]
fg1 = [f for f in focus_areas if f["slug"] == "test-fg-1"][0]
fg2 = [f for f in focus_areas if f["slug"] == "test-fg-2"][0]
self.assertEqual("Test FG 1", fg1["title"])
self.assertEqual("Test FG 2", fg2["title"])
self.assertEqual(6, len(fg1["national"]["data"]))
self.assertEqual(6, len(fg1["provincial"]["data"]))
nat_dept_data = [
dept
for dept in fg1["national"]["data"]
if dept["title"] == "TP1 National Test Dept 2"
][0]
self.assertTrue(nat_dept_data["slug"] in nat_dept_data["url"])
self.assertTrue("2019-20" in nat_dept_data["url"])
prov_dept_data = [
dept
for dept in fg1["provincial"]["data"]
if dept["name"] == "TP1 Provincial Test Dept 2"
][0]
self.assertTrue(prov_dept_data["slug"] in prov_dept_data["url"])
self.assertTrue("2019-20" in prov_dept_data["url"])
class NationalDepartmentPreviewTestCase(TestCase):
""" Unit tests for the national department preview department function. """
def setUp(self):
self.mock_data = NATIONAL_DEPARTMENT_PREVIEW_MOCK_DATA
year = FinancialYear.objects.create(slug="2019-20")
sphere = Sphere.objects.create(financial_year=year, name="national")
government = Government.objects.create(sphere=sphere, name="South Africa")
self.department = Department(
government=government,
name="Fake",
vote_number=1,
is_vote_primary=True,
intro="",
)
self.mock_openspending_api = Mock()
self.mock_openspending_api.get_adjustment_kind_ref = Mock(
return_value="adjustment_kind_ref"
)
self.mock_openspending_api.get_phase_ref = Mock(
return_value="budget_phase.budget_phase"
)
self.mock_openspending_api.get_programme_name_ref = Mock(
return_value="programme_number.programme"
)
self.mock_openspending_api.get_department_name_ref = Mock(
return_value="vote_number.department"
)
self.mock_openspending_api.get_geo_ref = Mock(
return_value="geo_source.government"
)
self.mock_openspending_api.get_function_ref = Mock(
return_value="function_group_1.function_group_1"
)
self.mock_openspending_api.get_financial_year_ref = Mock(
return_value="financial_year.financial_year"
)
self.mock_openspending_api.aggregate = Mock(
return_value={"cells": self.mock_data["programmes"]}
)
self.mock_openspending_api.filter_by_ref_exclusion = Mock(
return_value=self.mock_data["programmes"]
)
self.mock_openspending_api.aggregate_url = Mock
self.mock_dataset = Mock()
self.mock_dataset.get_openspending_api = Mock(
return_value=self.mock_openspending_api
)
vote_number = 1
for mock_object in self.mock_data["departments"]:
Department.objects.create(
government=government,
is_vote_primary=True,
name=mock_object["vote_number.department"],
vote_number=vote_number,
)
vote_number += 1
@patch(
"budgetportal.models.Department.get_all_budget_totals_by_year_and_phase",
return_value=MagicMock(),
)
@patch("budgetportal.summaries.get_expenditure_time_series_dataset")
def test_complete_data(self, mock_get_dataset, total_budgets_mock):
self.mock_openspending_api.aggregate_by_refs = (
BabbageFiscalDataset.aggregate_by_refs
)
mock_get_dataset.return_value = self.mock_dataset
result = get_preview_page(
financial_year_id="2019-20",
phase_slug="original",
government_slug="south-africa",
sphere_slug="national",
)
data = result["data"]
self.assertEqual(len(data), 1)
data_keys = data.keys()
self.assertIn("items", data_keys)
expenditure_keys = data["items"][0].keys()
self.assertIn("title", expenditure_keys)
self.assertIn("description", expenditure_keys)
self.assertIn("percentage_of_budget", expenditure_keys)
self.assertIn("programmes", expenditure_keys)
self.assertIn("slug", expenditure_keys)
self.assertIn("focus_areas", expenditure_keys)
self.assertEqual(
data["items"][0]["focus_areas"][0]["slug"], "economic-development"
)
| 37.31769
| 95
| 0.657154
|
be62e93e415d1f3a3507ac0e577d0bf5feca52c9
| 542
|
py
|
Python
|
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/black-sunset-28769
|
61ebb01b9f40182a0936a8846c8277f754d93938
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/black-sunset-28769
|
61ebb01b9f40182a0936a8846c8277f754d93938
|
[
"FTL",
"AML",
"RSA-MD"
] | 17
|
2021-07-12T18:32:12.000Z
|
2021-07-12T18:32:14.000Z
|
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/black-sunset-28769
|
61ebb01b9f40182a0936a8846c8277f754d93938
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "black-sunset-28769.botics.co"
site_params = {
"name": "Black Sunset",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| 20.846154
| 61
| 0.656827
|
9d95b60ec8bc8452c6517b793e89b78acb297efe
| 354
|
py
|
Python
|
pacman-arch/test/pacman/tests/remove040.py
|
Maxython/pacman-for-termux
|
3b208eb9274cbfc7a27fca673ea8a58f09ebad47
|
[
"MIT"
] | 23
|
2021-05-21T19:11:06.000Z
|
2022-03-31T18:14:20.000Z
|
source/pacman-6.0.1/test/pacman/tests/remove040.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | 11
|
2021-05-21T12:08:44.000Z
|
2021-12-21T08:30:08.000Z
|
source/pacman-6.0.1/test/pacman/tests/remove040.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | 1
|
2021-09-26T08:44:40.000Z
|
2021-09-26T08:44:40.000Z
|
self.description = "Remove a package that requires another package"
lp1 = pmpkg("pkg1")
lp1.depends = ["imaginary"]
self.addpkg2db("local", lp1)
lp2 = pmpkg("pkg2")
lp2.provides = ["imaginary"]
self.addpkg2db("local", lp2)
self.args = "-R %s" % lp1.name
self.addrule("PACMAN_RETCODE=0")
self.addrule("!PKG_EXIST=pkg1")
self.addrule("PKG_EXIST=pkg2")
| 22.125
| 67
| 0.70904
|
c367da53e32b17cfa309050b81548e03bc1ece88
| 2,014
|
py
|
Python
|
evalml/tests/data_checks_tests/test_ts_splitting_data_check.py
|
Mahesh1822/evalml
|
aa0ec2379aeba12bbd0dcaaa000f9a2a62064169
|
[
"BSD-3-Clause"
] | null | null | null |
evalml/tests/data_checks_tests/test_ts_splitting_data_check.py
|
Mahesh1822/evalml
|
aa0ec2379aeba12bbd0dcaaa000f9a2a62064169
|
[
"BSD-3-Clause"
] | null | null | null |
evalml/tests/data_checks_tests/test_ts_splitting_data_check.py
|
Mahesh1822/evalml
|
aa0ec2379aeba12bbd0dcaaa000f9a2a62064169
|
[
"BSD-3-Clause"
] | null | null | null |
import pandas as pd
import pytest
from evalml.data_checks import (
DataCheckMessageCode,
TimeSeriesSplittingDataCheck,
)
def test_time_series_splitting_data_check_raises_value_error():
with pytest.raises(
ValueError,
match="Valid splitting of labels in time series",
):
TimeSeriesSplittingDataCheck("time series regression", n_splits=3)
@pytest.mark.parametrize(
"problem_type", ["time series binary", "time series multiclass"]
)
@pytest.mark.parametrize("is_valid", [True, False])
def test_time_series_param_data_check(is_valid, problem_type):
X = None
invalid_splits = {}
if not is_valid:
if problem_type == "time series binary":
y = pd.Series([i % 2 if i < 25 else 1 for i in range(100)])
invalid_splits = {
1: {"Validation": [25, 50]},
2: {"Validation": [50, 75]},
3: {"Validation": [75, 100]},
}
elif problem_type == "time series multiclass":
y = pd.Series([i % 3 if i > 65 else 2 for i in range(100)])
invalid_splits = {
1: {"Training": [0, 25], "Validation": [25, 50]},
2: {"Training": [0, 50]},
}
else:
if problem_type == "time series binary":
y = pd.Series([i % 2 for i in range(100)])
elif problem_type == "time series multiclass":
y = pd.Series([i % 3 for i in range(100)])
data_check = TimeSeriesSplittingDataCheck("time series binary", 3)
results = data_check.validate(X, y)
code = DataCheckMessageCode.TIMESERIES_TARGET_NOT_COMPATIBLE_WITH_SPLIT.name
if not is_valid:
assert len(results["errors"]) == 1
assert results["errors"][0]["details"] == {
"columns": None,
"rows": None,
"invalid_splits": invalid_splits,
}
assert results["errors"][0]["code"] == code
else:
assert results == {"warnings": [], "errors": [], "actions": []}
| 33.566667
| 80
| 0.583913
|
4ba28075cca782f5f46e9f1d1bee6277f1ca7129
| 6,007
|
py
|
Python
|
shutterstock_api/models/redownload_image.py
|
Lumen5/shutterstock-api
|
d26db2c9cd6688cf828ad15478bf1b4701150a3f
|
[
"Adobe-Glyph"
] | 1
|
2021-02-23T16:15:16.000Z
|
2021-02-23T16:15:16.000Z
|
shutterstock_api/models/redownload_image.py
|
Lumen5/shutterstock-api
|
d26db2c9cd6688cf828ad15478bf1b4701150a3f
|
[
"Adobe-Glyph"
] | 17
|
2019-07-13T01:23:08.000Z
|
2022-03-21T07:17:35.000Z
|
shutterstock_api/models/redownload_image.py
|
Lumen5/shutterstock-api
|
d26db2c9cd6688cf828ad15478bf1b4701150a3f
|
[
"Adobe-Glyph"
] | 1
|
2021-03-07T19:16:27.000Z
|
2021-03-07T19:16:27.000Z
|
# coding: utf-8
"""
Shutterstock API Reference
The Shutterstock API provides access to Shutterstock's library of media, as well as information about customers' accounts and the contributors that provide the media. # noqa: E501
OpenAPI spec version: 1.0.11
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from shutterstock_api.models.cookie import Cookie # noqa: F401,E501
class RedownloadImage(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'auth_cookie': 'Cookie',
'show_modal': 'bool',
'size': 'str',
'verification_code': 'str'
}
attribute_map = {
'auth_cookie': 'auth_cookie',
'show_modal': 'show_modal',
'size': 'size',
'verification_code': 'verification_code'
}
def __init__(self, auth_cookie=None, show_modal=None, size=None, verification_code=None): # noqa: E501
"""RedownloadImage - a model defined in Swagger""" # noqa: E501
self._auth_cookie = None
self._show_modal = None
self._size = None
self._verification_code = None
self.discriminator = None
if auth_cookie is not None:
self.auth_cookie = auth_cookie
if show_modal is not None:
self.show_modal = show_modal
if size is not None:
self.size = size
if verification_code is not None:
self.verification_code = verification_code
@property
def auth_cookie(self):
"""Gets the auth_cookie of this RedownloadImage. # noqa: E501
:return: The auth_cookie of this RedownloadImage. # noqa: E501
:rtype: Cookie
"""
return self._auth_cookie
@auth_cookie.setter
def auth_cookie(self, auth_cookie):
"""Sets the auth_cookie of this RedownloadImage.
:param auth_cookie: The auth_cookie of this RedownloadImage. # noqa: E501
:type: Cookie
"""
self._auth_cookie = auth_cookie
@property
def show_modal(self):
"""Gets the show_modal of this RedownloadImage. # noqa: E501
(Deprecated) # noqa: E501
:return: The show_modal of this RedownloadImage. # noqa: E501
:rtype: bool
"""
return self._show_modal
@show_modal.setter
def show_modal(self, show_modal):
"""Sets the show_modal of this RedownloadImage.
(Deprecated) # noqa: E501
:param show_modal: The show_modal of this RedownloadImage. # noqa: E501
:type: bool
"""
self._show_modal = show_modal
@property
def size(self):
"""Gets the size of this RedownloadImage. # noqa: E501
Size of the image # noqa: E501
:return: The size of this RedownloadImage. # noqa: E501
:rtype: str
"""
return self._size
@size.setter
def size(self, size):
"""Sets the size of this RedownloadImage.
Size of the image # noqa: E501
:param size: The size of this RedownloadImage. # noqa: E501
:type: str
"""
allowed_values = ["small", "medium", "huge", "supersize"] # noqa: E501
if size not in allowed_values:
raise ValueError(
"Invalid value for `size` ({0}), must be one of {1}" # noqa: E501
.format(size, allowed_values)
)
self._size = size
@property
def verification_code(self):
"""Gets the verification_code of this RedownloadImage. # noqa: E501
(Deprecated) # noqa: E501
:return: The verification_code of this RedownloadImage. # noqa: E501
:rtype: str
"""
return self._verification_code
@verification_code.setter
def verification_code(self, verification_code):
"""Sets the verification_code of this RedownloadImage.
(Deprecated) # noqa: E501
:param verification_code: The verification_code of this RedownloadImage. # noqa: E501
:type: str
"""
self._verification_code = verification_code
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(RedownloadImage, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RedownloadImage):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.879808
| 184
| 0.586149
|
0096bf6eb3a4a5f5cb2b518ce96211d0309e4f72
| 164
|
py
|
Python
|
job/urls.py
|
pincoin/thai-online-judge
|
454c129541783a67b33113851318b808c722de9e
|
[
"MIT"
] | null | null | null |
job/urls.py
|
pincoin/thai-online-judge
|
454c129541783a67b33113851318b808c722de9e
|
[
"MIT"
] | 5
|
2020-06-05T22:01:14.000Z
|
2021-06-09T18:10:50.000Z
|
job/urls.py
|
pincoin/pincoin-online-judge
|
454c129541783a67b33113851318b808c722de9e
|
[
"MIT"
] | null | null | null |
from django.urls import (
path
)
from . import views
app_name = 'job'
urlpatterns = [
path('',
views.JobListView.as_view(), name='job_list'),
]
| 12.615385
| 55
| 0.609756
|
3cf461f93e8078801c2e0b395e8dae554ad6ef5b
| 517
|
py
|
Python
|
ex18_zed.py
|
iXylph/Zedd
|
2b93bb3293019d6d3c9e2a65f1a91e7702980797
|
[
"bzip2-1.0.6"
] | null | null | null |
ex18_zed.py
|
iXylph/Zedd
|
2b93bb3293019d6d3c9e2a65f1a91e7702980797
|
[
"bzip2-1.0.6"
] | null | null | null |
ex18_zed.py
|
iXylph/Zedd
|
2b93bb3293019d6d3c9e2a65f1a91e7702980797
|
[
"bzip2-1.0.6"
] | null | null | null |
# this one is like your scripts with argv
def print_two(*args):
arg1, arg2 = args
print "arg1: %r, arg2: %r" % (arg1, arg2)
# ok, that *args is actually pointless, we can just do this
def print_two_again(arg1, arg2):
print "arg1: %r, arg2: %r" % (arg1, arg2)
# this just takes one argument
def print_one(arg1):
print "arg1: %r" % arg1
# this one takes no arguments
def print_none():
print "I got nothin'."
print_two("Zed","Shaw")
print_two_again("Zed","Shaw")
print_one("First!")
print_none()
| 23.5
| 59
| 0.663443
|
258e1d9dbcf871e601a5284b9779dee8d44207c8
| 2,611
|
py
|
Python
|
examples/webgl/line_compare.py
|
goncaloperes/bokeh
|
b857d2d17d7c19779bb0a7be2601d8238fb1d5e9
|
[
"BSD-3-Clause"
] | 1
|
2021-10-30T00:32:00.000Z
|
2021-10-30T00:32:00.000Z
|
examples/webgl/line_compare.py
|
Deng-Fankang/bokeh
|
894731860c53b7c9ddd0057dee85cf064278dc0e
|
[
"BSD-3-Clause"
] | 12
|
2020-08-26T20:19:29.000Z
|
2020-08-26T20:19:52.000Z
|
examples/webgl/line_compare.py
|
Deng-Fankang/bokeh
|
894731860c53b7c9ddd0057dee85cf064278dc0e
|
[
"BSD-3-Clause"
] | 2
|
2021-01-12T18:22:24.000Z
|
2021-10-30T00:32:02.000Z
|
""" Compare WebGL, SVG with canvas line.
"""
import numpy as np
from bokeh.layouts import column, row
from bokeh.models import CustomJS, Dropdown, Slider
from bokeh.plotting import figure, output_file, show
p1 = figure(title="Canvas", plot_width=400, plot_height= 400, output_backend="canvas")
p2 = figure(title="SVG", plot_width=400, plot_height= 400, output_backend="svg")
p3 = figure(title="WebGL", plot_width=400, plot_height= 400, output_backend="webgl")
ys = 10 # yscale, to increase anisotropy
lines = []
for p in (p1, p2, p3):
t = np.linspace(0, 2 * np.pi, 50)
x = np.sin(t) * 10
y = np.cos(t) * 10
l1 = p.line(x, y * ys, color="#2222aa",
line_width=6, line_cap='butt',
line_join='round', line_dash=(10, 6, 3, 6, 3, 6))
t = np.linspace(0, 4 * np.pi, 150)
x = t - 5
y = np.sin(t) + 5
l2 = p.line(x, y * ys, color="#22aa22",
line_width=6, line_cap='butt', line_join='round')
t = np.arange(10)
x = t - 5
y = 0.3 * (t + 1) * ((t % 2) * 2 - 1) - 6
l3 = p.line(x, y * ys, color="#aa2222",
line_width=6, line_cap='butt',
line_join='round', line_dash=(10, 10))
l4 = p.line(y, x * ys, color="#aa2222",
line_width=6, line_cap='butt',
line_join='round', line_dash=(10, 10))
lines.extend([l1, l2, l3, l4])
def make_callback(widget, prop):
return
def make_slider(prop, start, end, value):
slider = Slider(title=prop, start=start, end=end, value=value)
cb = CustomJS(args=dict(lines=lines, prop=prop), code="""
for (var i = 0; i < lines.length; i++) {
const glyph = lines[i].glyph;
glyph[prop] = cb_obj.value;
}
""")
slider.js_on_change('value', cb)
return slider
def make_dropdown(prop, menu):
dropdown = Dropdown(label=prop, menu=menu)
cb = CustomJS(args=dict(lines=lines, prop=prop), code="""
for (var i = 0; i < lines.length; i++) {
const glyph = lines[i].glyph;
glyph[prop] = cb_obj.item;
}
""")
dropdown.js_on_click(cb)
return dropdown
sliders = [
make_slider('line_width', start=0.2, end=16, value=5),
make_slider('line_dash_offset', start=0, end=100, value=1),
make_dropdown('line_cap', [("butt", "butt"), ("round", "round"), ("square", "square")]),
make_dropdown('line_join', [("miter", "miter"), ("round", "round"), ("bevel", "bevel")]),
]
sliders = column(*sliders)
output_file("line_compare.html", title="line_compare.py example")
show(row(column(sliders), column(p1, p2, p3)))
| 31.457831
| 93
| 0.585599
|
9bf9ebe0e6b0931c2e01e63b59c3ac51f8b5e98b
| 14,576
|
py
|
Python
|
usersimulator/ConfusionModel.py
|
hys90/pydial
|
8bf7e36a8126cc678e5ce518892d12e0335f56c6
|
[
"Apache-2.0"
] | 3
|
2019-09-27T06:07:12.000Z
|
2020-01-06T19:00:34.000Z
|
usersimulator/ConfusionModel.py
|
hys90/pydial
|
8bf7e36a8126cc678e5ce518892d12e0335f56c6
|
[
"Apache-2.0"
] | null | null | null |
usersimulator/ConfusionModel.py
|
hys90/pydial
|
8bf7e36a8126cc678e5ce518892d12e0335f56c6
|
[
"Apache-2.0"
] | 1
|
2019-11-27T09:28:10.000Z
|
2019-11-27T09:28:10.000Z
|
###############################################################################
# PyDial: Multi-domain Statistical Spoken Dialogue System Software
###############################################################################
#
# Copyright 2015 - 2017
# Cambridge University Engineering Department Dialogue Systems Group
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###############################################################################
'''
ConfusionModel.py - handcrafted SemI error creator
===================================================
Copyright CUED Dialogue Systems Group 2015 - 2017
.. seealso:: CUED Imports/Dependencies:
import :mod:`utils.DiaAct` |.|
import :mod:`ontology.Ontology` |.|
import :mod:`utils.Settings` |.|
import :mod:`utils.ContextLogger`
************************
'''
__author__ = "cued_dialogue_systems_group"
import copy
from utils import Settings
from utils import DiaAct
from ontology import Ontology
from utils import ContextLogger
import numpy as np
logger = ContextLogger.getLogger('')
class EMConfusionModel(object):
'''Base class for EMRandomConfusionModel.
.. Note::
Used through derived class only.
'''
def create_wrong_hyp(self, a_u):
'''Create a wrong hypothesis for a_u
:param a_u: of :class:`DiaAct`
:type a_u: instance
:returns: (instance) of :class:`DiaAct` - modified input act
'''
confact_is_same = True
num_attempts = 0
max_num_attempts = 25
conf_act = None
while confact_is_same and num_attempts < max_num_attempts:
conf_act = self.confuse_hyp(a_u)
confact_is_same = (conf_act == a_u)
if conf_act.act == 'bye':
confact_is_same = True # hack to avoid the system finishing the dialogue after a bye confusion
num_attempts += 1
if num_attempts == max_num_attempts:
logger.warning("Confused act same after %d attempts: return null() instead." % max_num_attempts)
#return DiaAct.DiaAct('null()')
return a_u
return conf_act
class EMRandomConfusionModel(EMConfusionModel):
'''Derived class from :class:`EMConfusionModel`.
:param None:
'''
def __init__(self, domainString):
self.domainString = domainString
self.CONFUSE_TYPE = 0.2
self.CONFUSE_SLOT = 0.3
self.CONFUSE_VALUE = 0.5
self.newUserActs = ['hello',
'thankyou',
'ack',
'bye',
'inform',
'request',
'reqalts',
'reqmore',
'confirm',
'affirm',
'negate',
'deny',
'repeat',
'null']
self.nNewUserActs = len(self.newUserActs)
def confuse_hyp(self, a_u):
'''Randomly confuse the act type, slot or value.
:param a_u: of :class:`DiaAct`
:type a_u: instance
:returns: (instance) of :class:`DiaAct` - modified input act
'''
wHyp = copy.deepcopy(a_u)
# Identify if this diaact type takes 0, 1, or 2 arguments
nSlotVal = wHyp.getDiaItemFormat()
# Make a choice to confuse the type, slot or value
choice = Settings.random.choice([0, 1, 2], p=[self.CONFUSE_TYPE, self.CONFUSE_SLOT, self.CONFUSE_VALUE])
choice = min(choice, nSlotVal)
if choice == 0:
wHyp = self._confuse_type(wHyp)
elif choice == 1:
wHyp = self._confuse_slot(wHyp)
elif choice == 2:
wHyp = self._confuse_value(wHyp)
else:
logger.error('Invalid choice ' + str(choice))
return wHyp
def _confuse_dia_act_type(self, oldtype):
'''
Randomly select a dialogue act type different from oldtype.
'''
acttypeset = copy.copy(self.newUserActs)
acttypeset.remove(oldtype)
return Settings.random.choice(acttypeset)
def _confuse_slot_name(self, old_name):
'''
Randomly select a slot name that is different from the given old_name
'''
slots = Ontology.global_ontology.get_requestable_slots(self.domainString)
if old_name in slots:
slots.remove(old_name)
# if old_name not in slots:
# logger.error('Slot "%s" is not found in ontology.' % old_name)
return Settings.random.choice(slots)
def _get_confused_value_for_slot(self, slot, old_val):
'''
Randomly select a slot value for the given slot s different from old_val.
'''
return Ontology.global_ontology.getRandomValueForSlot(self.domainString, slot=slot, notthese=[old_val])
def _confuse_type(self, hyp):
'''
Create a wrong hypothesis, where the dialogue act type is different.
'''
hyp.items = []
hyp.act = self._confuse_dia_act_type(hyp.act)
item_format = DiaAct.actTypeToItemFormat[hyp.act]
if item_format == 0:
return hyp
elif item_format == 1:
new_slot_name = Ontology.global_ontology.get_random_slot_name(self.domainString)
hyp.append(new_slot_name, None)
elif item_format == 2:
new_slot_name = Ontology.global_ontology.get_random_slot_name(self.domainString)
assert new_slot_name is not None
new_slot_val = Ontology.global_ontology.getRandomValueForSlot(self.domainString, slot=new_slot_name)
hyp.append(new_slot_name, new_slot_val)
# TODO: If item_format is 3, it doesn't confuse slot-values.
# This might be a bug in the original implementation.
return hyp
def _confuse_slot(self, hyp):
'''
Create a wrong hypothesis, where the slot names are different.
'''
for dip in hyp.items:
# If the slot is empty, just break
if dip.slot is None:
break
slot = dip.slot
if slot == 'more':
break
dip.slot = self._confuse_slot_name(slot)
if dip.val is not None:
dip.val = Ontology.global_ontology.getRandomValueForSlot(self.domainString, slot=dip.slot)
return hyp
def _confuse_value(self, a_u):
'''
Create a wrong hypothesis, where one slot value is different.
'''
rand = Settings.random.randint(len(a_u.items))
a_u_i = a_u.items[rand]
if a_u_i.slot is not None and a_u_i.val is not None:
a_u.items[rand].val = self._get_confused_value_for_slot(a_u_i.slot, a_u_i.val)
return a_u
class EMLevenshteinConfusionModel(EMConfusionModel):
'''Derived class from :class:`EMConfusionModel`.
:param None:
'''
def __init__(self, domainString):
self.domainString = domainString
self.CONFUSE_TYPE = 0.2
self.CONFUSE_SLOT = 0.3
self.CONFUSE_VALUE = 0.5
self.len_confusion_list = 6
self.newUserActs = ['hello',
'thankyou',
'ack',
'bye',
'inform',
'request',
'reqalts',
'reqmore',
'confirm',
'affirm',
'negate',
'deny',
'repeat',
'null']
self.nNewUserActs = len(self.newUserActs)
self.type_confusions = self.get_confusion_distributions(self.newUserActs, offset=0.15)
self.slot_confusions = self.get_confusion_distributions(Ontology.global_ontology.get_requestable_slots(self.domainString), offset=0.15)
self.slot_value_confusions = {}
for slot in Ontology.global_ontology.get_system_requestable_slots(self.domainString) + [unicode('name')]:
self.slot_value_confusions[slot] = self.get_confusion_distributions(
Ontology.global_ontology.get_informable_slot_values(self.domainString, slot) + [unicode('dontcare')], offset=0.15)
def get_confusion_distributions(self, word_list, offset=0.15):
'''
:param word_list: The list of words to be confused
:param offset: Distribution softening factor, the largest the softer the distribution will be
:return: dictionary
'''
wlist = list(word_list)
Settings.random.shuffle(wlist)
distributions = {}
distances = [[self.levenshteinDistance(w1,w2) for w1 in wlist] for w2 in wlist]
for i in range(len(wlist)):
word = wlist[i]
distributions[word] = {}
sorted_indexes = np.argsort(distances[i])[1:self.len_confusion_list+1]
sorted_wordlist = np.array(wlist)[sorted_indexes]
distribution = np.array(distances[i])[sorted_indexes]
distribution = 1./distribution
distribution /= sum(distribution)
distribution += offset
distribution /= sum(distribution)
distributions[word]['wlist'] = sorted_wordlist
distributions[word]['dist'] = distribution
return distributions
def confuse_hyp(self, a_u):
'''Randomly confuse the act type, slot or value.
:param a_u: of :class:`DiaAct`
:type a_u: instance
:returns: (instance) of :class:`DiaAct` - modified input act
'''
wHyp = copy.deepcopy(a_u)
# Identify if this diaact type takes 0, 1, or 2 arguments
nSlotVal = wHyp.getDiaItemFormat()
# Make a choice to confuse the type, slot or value
choice = Settings.random.choice([0, 1, 2], p=[self.CONFUSE_TYPE, self.CONFUSE_SLOT, self.CONFUSE_VALUE])
choice = min(choice, nSlotVal)
if choice == 0:
wHyp = self._confuse_type(wHyp)
elif choice == 1:
wHyp = self._confuse_slot(wHyp)
elif choice == 2:
wHyp = self._confuse_value(wHyp)
else:
logger.error('Invalid choice ' + str(choice))
return wHyp
def _confuse_dia_act_type(self, oldtype):
'''
Select a dialogue act type different from oldtype.
'''
return Settings.random.choice(self.type_confusions[oldtype]['wlist'], p=self.type_confusions[oldtype]['dist'])
def _confuse_slot_name(self, old_name):
'''
Randomly select a slot name that is different from the given old_name
'''
return Settings.random.choice(self.slot_confusions[old_name]['wlist'], p=self.slot_confusions[old_name]['dist'])
def _get_confused_value_for_slot(self, slot, old_val):
'''
Randomly select a slot value for the given slot s different from old_val.
'''
if slot not in self.slot_value_confusions.keys():
return Ontology.global_ontology.getRandomValueForSlot(self.domainString, slot=slot, notthese=[old_val])
elif old_val not in self.slot_value_confusions[slot]:
return Ontology.global_ontology.getRandomValueForSlot(self.domainString, slot=slot, notthese=[old_val])
else:
return Settings.random.choice(self.slot_value_confusions[slot][old_val]['wlist'], p=self.slot_value_confusions[slot][old_val]['dist'])
def _confuse_type(self, hyp):
'''
Create a wrong hypothesis, where the dialogue act type is different.
'''
hyp.items = []
hyp.act = self._confuse_dia_act_type(hyp.act)
item_format = DiaAct.actTypeToItemFormat[hyp.act]
if item_format == 0:
return hyp
elif item_format == 1:
new_slot_name = Ontology.global_ontology.get_random_slot_name(self.domainString)
hyp.append(new_slot_name, None)
elif item_format == 2:
new_slot_name = Ontology.global_ontology.get_random_slot_name(self.domainString)
assert new_slot_name is not None
new_slot_val = Ontology.global_ontology.getRandomValueForSlot(self.domainString, slot=new_slot_name)
hyp.append(new_slot_name, new_slot_val)
# TODO: If item_format is 3, it doesn't confuse slot-values.
# This might be a bug in the original implementation.
return hyp
def _confuse_slot(self, hyp):
'''
Create a wrong hypothesis, where the slot names are different.
'''
for dip in hyp.items:
# If the slot is empty, just break
if dip.slot is None:
break
slot = dip.slot
if slot == 'more' or slot == 'type':
break
dip.slot = self._confuse_slot_name(slot)
if dip.val is not None:
dip.val = Ontology.global_ontology.getRandomValueForSlot(self.domainString, slot=dip.slot)
return hyp
def _confuse_value(self, a_u):
'''
Create a wrong hypothesis, where one slot value is different.
'''
rand = Settings.random.randint(len(a_u.items))
a_u_i = a_u.items[rand]
if a_u_i.slot is not None and a_u_i.val is not None and a_u_i.slot != 'type':
a_u.items[rand].val = self._get_confused_value_for_slot(a_u_i.slot, a_u_i.val)
return a_u
def levenshteinDistance(self, s1, s2):
if len(s1) > len(s2):
s1, s2 = s2, s1
distances = range(len(s1) + 1)
for i2, c2 in enumerate(s2):
distances_ = [i2 + 1]
for i1, c1 in enumerate(s1):
if c1 == c2:
distances_.append(distances[i1])
else:
distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1])))
distances = distances_
return distances[-1]
#END OF FILE
| 36.901266
| 146
| 0.587953
|
957fcb3bbb961803a60c0aee3892f74f0caa452f
| 993
|
py
|
Python
|
hard/python/c0066_329_longest-increasing-path-in-a-matrix/00_leetcode_0066.py
|
drunkwater/leetcode
|
8cc4a07763e71efbaedb523015f0c1eff2927f60
|
[
"Ruby"
] | null | null | null |
hard/python/c0066_329_longest-increasing-path-in-a-matrix/00_leetcode_0066.py
|
drunkwater/leetcode
|
8cc4a07763e71efbaedb523015f0c1eff2927f60
|
[
"Ruby"
] | null | null | null |
hard/python/c0066_329_longest-increasing-path-in-a-matrix/00_leetcode_0066.py
|
drunkwater/leetcode
|
8cc4a07763e71efbaedb523015f0c1eff2927f60
|
[
"Ruby"
] | 3
|
2018-02-09T02:46:48.000Z
|
2021-02-20T08:32:03.000Z
|
# DRUNKWATER TEMPLATE(add description and prototypes)
# Question Title and Description on leetcode.com
# Function Declaration and Function Prototypes on leetcode.com
#329. Longest Increasing Path in a Matrix
#Given an integer matrix, find the length of the longest increasing path.
#From each cell, you can either move to four directions: left, right, up or down. You may NOT move diagonally or move outside of the boundary (i.e. wrap-around is not allowed).
#Example 1:
#nums = [
# [9,9,4],
# [6,6,8],
# [2,1,1]
#]
#Return 4
#The longest increasing path is [1, 2, 6, 9].
#Example 2:
#nums = [
# [3,4,5],
# [3,2,6],
# [2,2,1]
#]
#Return 4
#The longest increasing path is [3, 4, 5, 6]. Moving diagonally is not allowed.
#Credits:
#Special thanks to @dietpepsi for adding this problem and creating all test cases.
#class Solution(object):
# def longestIncreasingPath(self, matrix):
# """
# :type matrix: List[List[int]]
# :rtype: int
# """
# Time Is Money
| 29.205882
| 176
| 0.679758
|
d82f1f0da7c8837570ea12a98c049e115ca4bb9c
| 9,585
|
py
|
Python
|
mcp/service/sftp.py
|
fkmclane/MCP
|
e80c3e12c163e9a67870d83340f434c5ed94e075
|
[
"MIT"
] | null | null | null |
mcp/service/sftp.py
|
fkmclane/MCP
|
e80c3e12c163e9a67870d83340f434c5ed94e075
|
[
"MIT"
] | 15
|
2015-06-13T22:37:25.000Z
|
2018-07-07T12:56:35.000Z
|
mcp/service/sftp.py
|
lilyinstarlight/MCP
|
e80c3e12c163e9a67870d83340f434c5ed94e075
|
[
"MIT"
] | null | null | null |
import multiprocessing
import os
import os.path
import re
import select
import socket
import paramiko
import mcp.config
import mcp.error
import mcp.model.server
import mcp.model.user
# inspired by https://github.com/rspivak/sftpserver/blob/master/src/sftpserver/stub_sftp.py
class SFTPHandle(paramiko.SFTPHandle):
def stat(self):
try:
return paramiko.SFTPAttributes.from_stat(os.fstat(self.readfile.fileno()))
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
def chattr(self, attr):
try:
paramiko.SFTPServer.set_file_attr(self.filename, attr)
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
return paramiko.SFTP_OK
class SFTPServerInterface(paramiko.SFTPServerInterface):
def __init__(self, server, *args, **kwargs):
self.user = server.user
super().__init__(server, *args, **kwargs)
def forbidden(self, path, modify=False):
if path == os.path.join(mcp.config.prefix, ''):
return modify
match = re.match('^' + os.path.join(mcp.config.prefix, '') + '(?P<server>' + mcp.model.server.servers_allowed + ')(?:$|/)', path)
if not match:
return True
server = match.group('server')
if modify and (path == os.path.join(mcp.config.prefix, server) or path == os.path.join(mcp.config.prefix, server, '')):
return True
if self.user.admin:
return False
return server not in self.user.servers
def realpath(self, path, readlink=False):
rpath = os.path.join(mcp.config.prefix, self.canonicalize(path)[1:])
if readlink:
while os.path.islink(rpath):
rpath = os.path.join(mcp.config.prefix, self.canonicalize(os.path.join(os.path.dirname(path), os.readlink(rpath)))[1:])
return rpath
def chattr(self, path, attr):
if self.forbidden(self.realpath(path), True):
return paramiko.SFTP_PERMISSION_DENIED
try:
paramiko.SFTPServer.set_file_attr(self.realpath(path), attr)
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
return paramiko.SFTP_OK
def list_folder(self, path):
if self.forbidden(self.realpath(path, True)):
return paramiko.SFTP_PERMISSION_DENIED
try:
rpath = self.realpath(path, True)
folder = []
files = os.listdir(rpath)
for filename in files:
if rpath != os.path.join(mcp.config.prefix, '') or self.user.admin or filename in self.user.servers:
attr = paramiko.SFTPAttributes.from_stat(os.lstat(os.path.join(rpath, filename)))
attr.filename = filename
folder.append(attr)
return folder
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
def lstat(self, path):
if self.forbidden(self.realpath(path)):
return paramiko.SFTP_PERMISSION_DENIED
try:
return paramiko.SFTPAttributes.from_stat(os.lstat(self.realpath(path)))
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
def mkdir(self, path, attr):
if self.forbidden(self.realpath(path), True):
return paramiko.SFTP_PERMISSION_DENIED
try:
os.mkdir(self.realpath(path))
if attr is not None:
paramiko.SFTPServer.set_file_attr(self.realpath(path), attr)
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
return paramiko.SFTP_OK
def open(self, path, flags, attr):
if self.forbidden(self.realpath(path, True), True):
return paramiko.SFTP_PERMISSION_DENIED
rpath = self.realpath(path)
rflags = flags
rattr = attr
try:
binary_flag = getattr(os, 'O_BINARY', 0)
rflags |= binary_flag
mode = getattr(attr, 'st_mode', None)
fd = os.open(rpath, rflags, mode if mode is not None else 0o666)
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
if flags & os.O_CREAT and rattr is not None:
rattr._flags &= ~rattr.FLAG_PERMISSIONS
paramiko.SFTPServer.set_file_attr(rpath, rattr)
if flags & os.O_WRONLY:
if flags & os.O_APPEND:
fstr = 'ab'
else:
fstr = 'wb'
elif flags & os.O_RDWR:
if flags & os.O_APPEND:
fstr = 'a+b'
else:
fstr = 'r+b'
else:
fstr = 'rb'
try:
f = os.fdopen(fd, fstr)
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
fobj = SFTPHandle(rflags)
fobj.filename = rpath
fobj.readfile = f
fobj.writefile = f
return fobj
def posix_rename(self, oldpath, newpath):
if self.forbidden(self.realpath(oldpath), True) or self.forbidden(self.realpath(newpath), True):
return paramiko.SFTP_PERMISSION_DENIED
try:
os.rename(self.realpath(oldpath), self.realpath(newpath))
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
return paramiko.SFTP_OK
def readlink(self, path):
if self.forbidden(self.realpath(path)):
return paramiko.SFTP_PERMISSION_DENIED
try:
symlink = os.readlink(self.realpath(path))
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
if not symlink.startswith(mcp.config.prefix + os.sep):
return paramiko.SFTP_NO_SUCH_FILE
return symlink[len(mcp.config.prefix + os.sep):]
def remove(self, path):
if self.forbidden(self.realpath(path), True):
return paramiko.SFTP_PERMISSION_DENIED
try:
os.remove(self.realpath(path))
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
return paramiko.SFTP_OK
def rename(self, oldpath, newpath):
if self.forbidden(self.realpath(oldpath), True) or self.forbidden(self.realpath(newpath), True):
return paramiko.SFTP_PERMISSION_DENIED
try:
os.replace(self.realpath(oldpath), self.realpath(newpath))
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
return paramiko.SFTP_OK
def rmdir(self, path):
if self.forbidden(self.realpath(path), True):
return paramiko.SFTP_PERMISSION_DENIED
try:
os.rmdir(self.realpath(path))
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
return paramiko.SFTP_OK
def stat(self, path):
if self.forbidden(self.realpath(path, True)):
return paramiko.SFTP_PERMISSION_DENIED
try:
return paramiko.SFTPAttributes.from_stat(os.stat(self.realpath(path, True)))
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
def symlink(self, target_path, path):
if self.forbidden(self.realpath(path), True):
return paramiko.SFTP_PERMISSION_DENIED
rpath = self.realpath(path)
if len(target_path) > 0 and target_path[0] == '/':
rtpath = self.realpath(target_path)
else:
rtpath = os.path.normpath(target_path)
try:
os.symlink(rtpath, rpath)
except OSError as err:
return paramiko.SFTPServer.convert_errno(err.errno)
return paramiko.SFTP_OK
class ServerInterface(paramiko.ServerInterface):
def check_auth_password(self, username, password):
try:
self.user = mcp.model.user.check_user(username, password)
return paramiko.AUTH_SUCCESSFUL
except mcp.error.NoUserError:
return paramiko.AUTH_FAILED
def check_channel_request(self, kind, chanid):
if kind == 'session':
return paramiko.OPEN_SUCCEEDED
return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
def get_allowed_auths(self, username):
return 'password'
def run():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
sock.bind(mcp.config.sftpaddr)
sock.listen()
while running.value:
read, _, _ = select.select([sock], [], [], mcp.config.poll_interval)
if read:
conn, addr = sock.accept()
host_key = paramiko.RSAKey.from_private_key_file(mcp.config.sftpkey)
transport = paramiko.Transport(conn)
transport.add_server_key(host_key)
transport.set_subsystem_handler('sftp', paramiko.SFTPServer, SFTPServerInterface)
server = ServerInterface()
transport.start_server(server=server)
channel = transport.accept()
running = multiprocessing.Value('b', False)
process = None
def start():
global process
if process:
return
running.value = True
process = multiprocessing.Process(target=run, name='mcp-sftp')
process.start()
def stop():
global running, process
if not process:
return
running.value = False
process.join()
process = None
| 29.859813
| 137
| 0.621701
|
9463ca90cc580772e7ea0e7f804a60617f8e8d46
| 202
|
py
|
Python
|
crudproject/crudapp/admin.py
|
KonstantineDM/django-crud-tokenauth
|
4e432619ae388d38325e9501c2e3d5a693e36ed8
|
[
"MIT"
] | null | null | null |
crudproject/crudapp/admin.py
|
KonstantineDM/django-crud-tokenauth
|
4e432619ae388d38325e9501c2e3d5a693e36ed8
|
[
"MIT"
] | null | null | null |
crudproject/crudapp/admin.py
|
KonstantineDM/django-crud-tokenauth
|
4e432619ae388d38325e9501c2e3d5a693e36ed8
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from crudapp import models
class AccountAdmin(admin.ModelAdmin):
list_display = ['id', 'username', 'email', ]
admin.site.register(models.Account, AccountAdmin)
| 18.363636
| 49
| 0.752475
|
7f91c9925c15676fe33b4e3087f89ca94c91f186
| 568
|
py
|
Python
|
backend.py
|
YamiAtem/YTDownloader
|
01839122a8fecdca2a62e621b4edfa606c8745b8
|
[
"MIT"
] | 6
|
2021-07-05T04:31:39.000Z
|
2021-12-17T01:48:15.000Z
|
backend.py
|
YamiAtem/YTDownloader
|
01839122a8fecdca2a62e621b4edfa606c8745b8
|
[
"MIT"
] | 1
|
2021-12-17T03:36:03.000Z
|
2021-12-17T03:36:03.000Z
|
backend.py
|
YamiAtem/YTDownloader
|
01839122a8fecdca2a62e621b4edfa606c8745b8
|
[
"MIT"
] | 2
|
2021-12-16T11:12:36.000Z
|
2022-01-22T04:17:18.000Z
|
import os
import requests
from PySide6.QtCore import Slot, QObject
from pytube import YouTube, exceptions
class BackEnd(QObject):
def __init__(self, app):
super().__init__()
self.app = app
self.file_size = 0
@Slot(result=bool)
def check_internet(self):
url = "https://yamiatem.github.io/YTDownloader/"
timeout = 5
try:
request = requests.get(url, timeout=timeout)
return True
except (requests.ConnectionError, requests.Timeout) as exception:
return False
| 24.695652
| 73
| 0.626761
|
9de24defeeb5ea2c18d40bfc2aa3c6d5b121a9e4
| 3,796
|
py
|
Python
|
scripts/checking/check_feature.py
|
yeastgenome/SGDBackend-Nex2
|
8bd14e45def10361d1c176ce2825a8eff621dea1
|
[
"MIT"
] | 5
|
2015-11-24T23:09:46.000Z
|
2019-11-06T17:48:13.000Z
|
scripts/checking/check_feature.py
|
yeastgenome/SGDBackend-Nex2
|
8bd14e45def10361d1c176ce2825a8eff621dea1
|
[
"MIT"
] | 188
|
2017-08-28T22:39:03.000Z
|
2022-03-02T14:53:46.000Z
|
scripts/checking/check_feature.py
|
yeastgenome/SGDBackend-Nex2
|
8bd14e45def10361d1c176ce2825a8eff621dea1
|
[
"MIT"
] | 7
|
2018-05-13T01:58:07.000Z
|
2021-06-25T19:08:33.000Z
|
from sqlalchemy import or_
import sys
from src.models import Taxonomy, Dbentity, Locusdbentity, So, Sgdid, Dnasequenceannotation
from scripts.loading.database_session import get_session
import logging
__author__ = 'sweng66'
logging.basicConfig(format='%(message)s')
log = logging.getLogger()
log.setLevel(logging.INFO)
TAXON = 'TAX:559292'
def check_data():
nex_session = get_session()
taxonomy = nex_session.query(Taxonomy).filter_by(taxid=TAXON).one_or_none()
taxonomy_id = taxonomy.taxonomy_id
locusData = nex_session.query(Locusdbentity).all()
dbentity_id_to_contig_so_id = dict([(x.dbentity_id, (x.contig_id, x.so_id)) for x in nex_session.query(Dnasequenceannotation).filter_by(dna_type='GENOMIC', taxonomy_id=taxonomy_id).all()])
###### features
log.info("\n* Features with not unique gene name:\n")
check_unique_gene_name(nex_session, locusData)
log.info("\n* Features with genetic position information not associated with a chromosome:\n")
check_genetic_mapped_genes(nex_session, locusData, dbentity_id_to_contig_so_id)
log.info("\n* Features with qualifier not associated with ORF:\n")
check_orf_features(nex_session, locusData, dbentity_id_to_contig_so_id)
log.info("\n* Features ('Active') with 'Deleted' SGDIDs:\n")
# check_sgdid_status_for_active_dbentity_rows(nex_session)
nex_session.close()
def check_sgdid_status_for_active_dbentity_rows(nex_session):
# select * from nex.dbentity where dbentity_status = 'Active' and sgdid in
# (select display_name from nex.sgdid where sgdid_status = 'Deleted');
sgdid_to_status= dict([(x.display_name, x.sgdid_status) for x in nex_session.query(Sgdid).all()])
for subclass in ['LOCUS', 'REFERENCE', 'ALLELE', 'STRAIN', 'COMPLEX', 'FILE', 'PATHWAY', 'TRANSCRIPT']:
for x in nex_session.query(Dbentity).filter_by(subclass=subclass).all():
if x.sgdid not in sgdid_to_status:
log.info("\tSGDID: " + x.sgdid + " is not in sgdid table.")
continue
if sgdid_to_status[x.sgdid] == 'Deleted':
log.info("\t" + x.subclass + ": " + x.display_name)
def check_orf_features(nex_session, locusData, dbentity_id_to_contig_so_id):
## STA1
so = nex_session.query(So).filter_by(display_name='ORF').one_or_none()
for x in locusData:
if x.dbentity_status != 'Active':
continue
if x.qualifier:
if x.dbentity_id not in dbentity_id_to_contig_so_id:
log.info("\t" + x.systematic_name)
continue
(contig_id, so_id) = dbentity_id_to_contig_so_id[x.dbentity_id]
if so_id != so_id:
log.info("\t" + x.systematic_name)
def check_genetic_mapped_genes(nex_session, locusData, dbentity_id_to_contig_so_id):
for x in locusData:
if x.genetic_position is None:
continue
if x.dbentity_id not in dbentity_id_to_contig_so_id:
name = x.systematic_name
if x.gene_name and x.gene_name != name:
name = x.gene_name + "/" + name
log.info("\t" + name)
def check_unique_gene_name(nex_session, locusData):
# update nex.locusdbentity set gene_name = 'ACT1' where systematic_name = 'YDL041W';
gene2systematicNm = {}
for x in locusData:
if x.gene_name is None:
continue
if x.gene_name in gene2systematicNm:
log.info("\t" + x.gene_name + ": associated with " + gene2systematicNm[x.gene_name] + " and " + x.systematic_name)
continue
gene2systematicNm[x.gene_name] = x.systematic_name
if __name__ == '__main__':
check_data()
| 37.584158
| 192
| 0.662276
|
bb558ce5580160c1bf22266a4cfe9c422d18060c
| 4,026
|
py
|
Python
|
embiggen/embedders/ensmallen_embedders/spine.py
|
monarch-initiative/N2V
|
8ae02ca125f1d24ca158c2849f2d9bb1711920b9
|
[
"BSD-3-Clause"
] | 2
|
2020-01-30T11:57:37.000Z
|
2020-05-02T00:05:49.000Z
|
embiggen/embedders/ensmallen_embedders/spine.py
|
monarch-initiative/N2V
|
8ae02ca125f1d24ca158c2849f2d9bb1711920b9
|
[
"BSD-3-Clause"
] | 93
|
2020-01-26T00:43:51.000Z
|
2020-05-10T03:29:54.000Z
|
embiggen/embedders/ensmallen_embedders/spine.py
|
monarch-initiative/N2V
|
8ae02ca125f1d24ca158c2849f2d9bb1711920b9
|
[
"BSD-3-Clause"
] | 5
|
2020-02-13T07:18:11.000Z
|
2020-03-19T08:03:34.000Z
|
"""Module providing abstract Node2Vec implementation."""
from typing import Optional, Dict, Any, Union
from ensmallen import Graph
import numpy as np
import pandas as pd
from ensmallen import models
from embiggen.utils.abstract_models import AbstractEmbeddingModel, EmbeddingResult
class SPINE(AbstractEmbeddingModel):
"""Class implementing the SPINE algorithm."""
def __init__(
self,
embedding_size: int = 100,
dtype: Optional[str] = "u8",
enable_cache: bool = False
):
"""Create new abstract Node2Vec method.
Parameters
--------------------------
embedding_size: int = 100
Dimension of the embedding.
dtype: Optional[str] = "u8"
Dtype to use for the embedding. Note that an improper dtype may cause overflows.
enable_cache: bool = False
Whether to enable the cache, that is to
store the computed embedding.
"""
self._dtype = dtype
self._model = models.SPINE(embedding_size=embedding_size)
super().__init__(
embedding_size=embedding_size,
enable_cache=enable_cache
)
def parameters(self) -> Dict[str, Any]:
"""Returns parameters of the model."""
return {
**super().parameters(),
**dict(
dtype=self._dtype
)
}
@staticmethod
def smoke_test_parameters() -> Dict[str, Any]:
"""Returns parameters for smoke test."""
return dict(
embedding_size=5,
)
def _fit_transform(
self,
graph: Graph,
return_dataframe: bool = True,
verbose: bool = True
) -> EmbeddingResult:
"""Return node embedding."""
node_embedding = self._model.fit_transform(
graph,
dtype=self._dtype,
verbose=verbose,
).T
if return_dataframe:
node_embedding = pd.DataFrame(
node_embedding,
index=graph.get_node_names()
)
return EmbeddingResult(
embedding_method_name=self.model_name(),
node_embeddings=node_embedding
)
@staticmethod
def task_name() -> str:
return "Node Embedding"
@staticmethod
def model_name() -> str:
"""Returns name of the model."""
return "SPINE"
@staticmethod
def library_name() -> str:
return "Ensmallen"
@staticmethod
def requires_nodes_sorted_by_decreasing_node_degree() -> bool:
return False
@staticmethod
def is_topological() -> bool:
return True
@staticmethod
def requires_node_types() -> bool:
return False
@staticmethod
def requires_edge_types() -> bool:
return False
@staticmethod
def requires_edge_weights() -> bool:
return False
@staticmethod
def requires_positive_edge_weights() -> bool:
return False
@staticmethod
def can_use_edge_weights() -> bool:
"""Returns whether the model can optionally use edge weights."""
return False
def is_using_edge_weights(self) -> bool:
"""Returns whether the model is parametrized to use edge weights."""
return False
@staticmethod
def can_use_node_types() -> bool:
"""Returns whether the model can optionally use node types."""
return False
def is_using_node_types(self) -> bool:
"""Returns whether the model is parametrized to use node types."""
return False
@staticmethod
def can_use_edge_types() -> bool:
"""Returns whether the model can optionally use edge types."""
return False
def is_using_edge_types(self) -> bool:
"""Returns whether the model is parametrized to use edge types."""
return False
@staticmethod
def is_stocastic() -> bool:
"""Returns whether the model is stocastic and has therefore a random state."""
return False
| 28.153846
| 92
| 0.604322
|
dc7eea5567e4eaa1c9abda4fbe4d6990d013e22d
| 657
|
py
|
Python
|
20_golang/day01/map/a.py
|
HanTianPeng/fullstackreview
|
250523e017e2f87a2792751e64db9515e5395209
|
[
"MIT"
] | 2
|
2020-11-26T16:06:26.000Z
|
2020-11-26T16:07:10.000Z
|
20_golang/day01/map/a.py
|
HanTianPeng/fullstackreview
|
250523e017e2f87a2792751e64db9515e5395209
|
[
"MIT"
] | 1
|
2020-11-25T16:48:26.000Z
|
2020-11-25T16:48:26.000Z
|
20_golang/day01/map/a.py
|
HanTianPeng/fullstackreview
|
250523e017e2f87a2792751e64db9515e5395209
|
[
"MIT"
] | null | null | null |
def lengthOfLongestSubstring(s):
"""
:type s: str
:rtype: int
"""
m = {}
start = 0
max = 0
for index, v in enumerate(s):
mv = m.get(v)
if str(mv) and m.get(v) >= start:
print('---', m.get(v))
start = m.get(v) + 1
print("be-", index, v, start, max)
if max < index - start + 1:
max = index - start + 1
m[v] = index
print("af-", index, v, start, max, m)
print(m)
return max
value = lengthOfLongestSubstring("abcabcbb")
print("value===>", value)
| 25.269231
| 49
| 0.412481
|
1e06ccdbef2234bdecca4b6840b9c6e8c983ea90
| 4,784
|
py
|
Python
|
blade_runner/controllers/verification_controller.py
|
univ-of-utah-marriott-library-apple/blade_runner
|
8581055553ba1f8094f64d96222f4e87c6c981b1
|
[
"MIT"
] | 23
|
2019-05-22T01:49:14.000Z
|
2021-09-01T18:02:35.000Z
|
blade_runner/controllers/verification_controller.py
|
univ-of-utah-marriott-library-apple/blade_runner
|
8581055553ba1f8094f64d96222f4e87c6c981b1
|
[
"MIT"
] | 1
|
2019-05-30T16:12:00.000Z
|
2019-05-30T16:12:00.000Z
|
blade_runner/controllers/verification_controller.py
|
univ-of-utah-marriott-library-apple/blade_runner
|
8581055553ba1f8094f64d96222f4e87c6c981b1
|
[
"MIT"
] | 3
|
2019-05-30T04:12:44.000Z
|
2020-07-10T16:40:14.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
################################################################################
# Copyright (c) 2019 University of Utah Student Computing Labs.
# All Rights Reserved.
#
# Author: Thackery Archuletta
# Creation Date: Oct 2018
# Last Updated: March 2019
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appears in all copies and
# that both that copyright notice and this permission notice appear
# in supporting documentation, and that the name of The University
# of Utah not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission. This software is supplied as is without expressed or
# implied warranties of any kind.
################################################################################
import logging
from blade_runner.controllers.entry_controller import EntryController
from blade_runner.views.verification_view import VerifyView
logging.getLogger(__name__).addHandler(logging.NullHandler())
class VerificationController(EntryController):
"""Controller for VerificationView."""
def __init__(self, master, computer, verify_params, search_params):
"""Set up the verification controller.
Args:
master: Parent Tk window.
computer (Computer): Stores information about the computer.
verify_params (VerifyParams): Verification parameters.
search_params (SearchParams): Search parameters.
"""
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
self.logger = logging.getLogger(__name__)
# Save the view in the superclass (EntryController).
view = VerifyView(master, self)
super(VerificationController, self).__init__(computer, view)
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Store inputs.
self.proceed = False
self.verify_params = verify_params
self.search_params = search_params
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Grid user entries.
self._grid_user_widgets()
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Fil user entries.
self._fill_user_entries()
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Set view to middle of screen.
self._set_to_middle(view)
def proceed_operation(self):
"""If user continues with operation, store entries, set proceed to True, and destroy the view.
Returns:
void
"""
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Store user entries into computer object.
self._store_user_entries()
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Set proceed flag
self.proceed = True
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Destroy the view.
self.entry_view.destroy()
def _store_user_entries(self):
"""Store user entries.
Returns:
void
"""
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# For every enabled verification parameter, store the value in its corresponding entry.
for param in self.verify_params.enabled:
self._store_user_entry(self.computer, param)
def _fill_user_entries(self):
"""Fill user entries with information from the computer object.
Returns:
void
"""
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# For every enabled verification parameter, set its value in its corresponding entry.
for param in self.verify_params.enabled:
self._fill_user_entry(self.computer, param)
def _grid_user_widgets(self):
"""Grid user widgets into the view.
Returns:
void
"""
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Grid the user label.
self.entry_view.user_lbl.grid(row=0, column=1)
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# For every enabled verification parameter, grid its corresponding widget.
for param in self.verify_params.enabled:
self._grid_user_widget(param)
def cancel_operation(self):
"""If user cancels operation, set proceed to False and destroy the view.
Returns:
void
"""
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
self.proceed = False
self.entry_view.destroy()
| 38.580645
| 102
| 0.519231
|
9c0cb890a9febc8d6b10eb6b566365aaa84b5a3b
| 4,133
|
py
|
Python
|
swagger_client/models/get_universe_system_jumps_200_ok.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
swagger_client/models/get_universe_system_jumps_200_ok.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
swagger_client/models/get_universe_system_jumps_200_ok.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online # noqa: E501
OpenAPI spec version: 0.8.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class GetUniverseSystemJumps200Ok(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'system_id': 'int',
'ship_jumps': 'int'
}
attribute_map = {
'system_id': 'system_id',
'ship_jumps': 'ship_jumps'
}
def __init__(self, system_id=None, ship_jumps=None): # noqa: E501
"""GetUniverseSystemJumps200Ok - a model defined in Swagger""" # noqa: E501
self._system_id = None
self._ship_jumps = None
self.discriminator = None
self.system_id = system_id
self.ship_jumps = ship_jumps
@property
def system_id(self):
"""Gets the system_id of this GetUniverseSystemJumps200Ok. # noqa: E501
system_id integer # noqa: E501
:return: The system_id of this GetUniverseSystemJumps200Ok. # noqa: E501
:rtype: int
"""
return self._system_id
@system_id.setter
def system_id(self, system_id):
"""Sets the system_id of this GetUniverseSystemJumps200Ok.
system_id integer # noqa: E501
:param system_id: The system_id of this GetUniverseSystemJumps200Ok. # noqa: E501
:type: int
"""
if system_id is None:
raise ValueError("Invalid value for `system_id`, must not be `None`") # noqa: E501
self._system_id = system_id
@property
def ship_jumps(self):
"""Gets the ship_jumps of this GetUniverseSystemJumps200Ok. # noqa: E501
ship_jumps integer # noqa: E501
:return: The ship_jumps of this GetUniverseSystemJumps200Ok. # noqa: E501
:rtype: int
"""
return self._ship_jumps
@ship_jumps.setter
def ship_jumps(self, ship_jumps):
"""Sets the ship_jumps of this GetUniverseSystemJumps200Ok.
ship_jumps integer # noqa: E501
:param ship_jumps: The ship_jumps of this GetUniverseSystemJumps200Ok. # noqa: E501
:type: int
"""
if ship_jumps is None:
raise ValueError("Invalid value for `ship_jumps`, must not be `None`") # noqa: E501
self._ship_jumps = ship_jumps
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GetUniverseSystemJumps200Ok):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.503448
| 96
| 0.586257
|
9085654a6c093166ce117df1ec0e0aad54fb22bb
| 237
|
py
|
Python
|
PROJECTS/p002/PSEF_SCRIPTS/pa_cfg_correction.py
|
nihole/PSEFABRIC
|
366461ab86f99665bf310425c6ce05a216343ec9
|
[
"Apache-2.0",
"MIT"
] | 11
|
2017-06-29T10:12:39.000Z
|
2020-03-12T07:19:11.000Z
|
PROJECTS/p002/PSEF_SCRIPTS/pa_cfg_correction.py
|
nihole/PSEFABRIC
|
366461ab86f99665bf310425c6ce05a216343ec9
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
PROJECTS/p002/PSEF_SCRIPTS/pa_cfg_correction.py
|
nihole/PSEFABRIC
|
366461ab86f99665bf310425c6ce05a216343ec9
|
[
"Apache-2.0",
"MIT"
] | 3
|
2019-04-18T06:44:21.000Z
|
2021-06-26T14:22:55.000Z
|
def pa_cli_correction(cfg_txt):
cmd_list = cfg_txt.splitlines()
cmd_list_new = []
for elem in cmd_list:
if elem not in cmd_list_new:
cmd_list_new.append(elem)
return '\n'.join(cmd_list_new)
| 15.8
| 37
| 0.628692
|
643be27a5e2810563c7d28e56afc4163edbfe3e4
| 1,716
|
py
|
Python
|
rules/quant/scripts/unitas_mirtable.py
|
gcfntnu/small-rna
|
459f946dfb338017e8beb350da215aaac6a37c59
|
[
"MIT"
] | 1
|
2021-01-20T08:49:12.000Z
|
2021-01-20T08:49:12.000Z
|
rules/quant/scripts/unitas_mirtable.py
|
gcfntnu/small-rna
|
459f946dfb338017e8beb350da215aaac6a37c59
|
[
"MIT"
] | 4
|
2021-01-06T12:47:14.000Z
|
2021-01-19T14:05:45.000Z
|
rules/quant/scripts/unitas_mirtable.py
|
gcfntnu/small-rna
|
459f946dfb338017e8beb350da215aaac6a37c59
|
[
"MIT"
] | null | null | null |
#!/usr/bin env python
""" Parse the unitas simplified mirna table.
Counts in this table is different from counts of micro-rna in allfeatures table as this table is not doing fractionated counts on sequences with multiple annotations.
"""
import sys
import os
import glob
import argparse
import warnings
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
def samplesheet_ids(fn, sep='\t'):
sample_ids = []
with open(fn) as fh:
txt = fh.read().splitlines()
header = txt.pop(0).split(sep)
if not 'Sample_ID' in header:
raise ValueError('`Sample_ID` column not found in samplesheet')
for line in txt:
sample_ids.append(line.split('\t')[0])
return sample_ids
def argparser():
parser = argparse.ArgumentParser(description='Aggregate unitas mirtables')
parser.add_argument('--sample-sheet', help='Optional sample sheet. Will subset aggregated table if needed', dest='samples')
parser.add_argument('-o ', '--output', help='Output filename. Will default to stdout.')
parser.add_argument('filenames', nargs='+')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = argparser()
import pandas as pd
df_list = []
for fn in args.filenames:
sample_id = os.path.dirname(fn).split(os.path.sep)[-1]
df = pd.read_csv(fn, sep='\t', index_col=0)
df.columns = [sample_id]
df_list.append(df)
DF = pd.concat(df_list, axis=1, join='outer', sort=False)
if args.output is None:
out_fn = sys.stdout
else:
out_fn = args.output
DF.fillna(0, inplace=True)
DF.index.name = 'mirna_id'
DF.to_csv(out_fn, sep='\t')
| 31.777778
| 166
| 0.660839
|
f566f73ef367d317cd0566765d6ae757d8880743
| 13,330
|
py
|
Python
|
resources/usr/local/lib/python2.7/dist-packages/sklearn/externals/joblib/test/test_memory.py
|
edawson/parliament2
|
2632aa3484ef64c9539c4885026b705b737f6d1e
|
[
"Apache-2.0"
] | null | null | null |
resources/usr/local/lib/python2.7/dist-packages/sklearn/externals/joblib/test/test_memory.py
|
edawson/parliament2
|
2632aa3484ef64c9539c4885026b705b737f6d1e
|
[
"Apache-2.0"
] | null | null | null |
resources/usr/local/lib/python2.7/dist-packages/sklearn/externals/joblib/test/test_memory.py
|
edawson/parliament2
|
2632aa3484ef64c9539c4885026b705b737f6d1e
|
[
"Apache-2.0"
] | 1
|
2020-05-28T23:01:44.000Z
|
2020-05-28T23:01:44.000Z
|
"""
Test the memory module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import pickle
import warnings
import io
import sys
import nose
from ..memory import Memory, MemorizedFunc
from .common import with_numpy, np
###############################################################################
# Module-level variables for the tests
def f(x, y=1):
""" A module-level function for testing purposes.
"""
return x ** 2 + y
###############################################################################
# Test fixtures
env = dict()
def setup_module():
""" Test setup.
"""
cachedir = mkdtemp()
#cachedir = 'foobar'
env['dir'] = cachedir
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
# Don't make the cachedir, Memory should be able to do that on the fly
print(80 * '_')
print('test_memory setup')
print(80 * '_')
def _rmtree_onerror(func, path, excinfo):
print('!' * 79)
print('os function failed: %r' % func)
print('file to be removed: %s' % path)
print('exception was: %r' % excinfo[1])
print('!' * 79)
def teardown_module():
""" Test teardown.
"""
shutil.rmtree(env['dir'], False, _rmtree_onerror)
print(80 * '_')
print('test_memory teardown')
print(80 * '_')
###############################################################################
# Helper function for the tests
def check_identity_lazy(func, accumulator):
""" Given a function and an accumulator (a list that grows every
time the function is called, check that the function can be
decorated by memory to be a lazy identity.
"""
# Call each function with several arguments, and check that it is
# evaluated only once per argument.
memory = Memory(cachedir=env['dir'], verbose=0)
memory.clear(warn=False)
func = memory.cache(func)
for i in range(3):
for _ in range(2):
yield nose.tools.assert_equal, func(i), i
yield nose.tools.assert_equal, len(accumulator), i + 1
###############################################################################
# Tests
def test_memory_integration():
""" Simple test of memory lazy evaluation.
"""
accumulator = list()
# Rmk: this function has the same name than a module-level function,
# thus it serves as a test to see that both are identified
# as different.
def f(l):
accumulator.append(1)
return l
for test in check_identity_lazy(f, accumulator):
yield test
# Now test clearing
for compress in (False, True):
# We turn verbosity on to smoke test the verbosity code, however,
# we capture it, as it is ugly
try:
# To smoke-test verbosity, we capture stdout
orig_stdout = sys.stdout
orig_stderr = sys.stdout
if sys.version_info[0] == 3:
sys.stderr = io.StringIO()
sys.stderr = io.StringIO()
else:
sys.stdout = io.BytesIO()
sys.stderr = io.BytesIO()
memory = Memory(cachedir=env['dir'], verbose=10, compress=compress)
# First clear the cache directory, to check that our code can
# handle that
# NOTE: this line would raise an exception, as the database file is
# still open; we ignore the error since we want to test what
# happens if the directory disappears
shutil.rmtree(env['dir'], ignore_errors=True)
g = memory.cache(f)
g(1)
g.clear(warn=False)
current_accumulator = len(accumulator)
out = g(1)
finally:
sys.stdout = orig_stdout
sys.stderr = orig_stderr
yield nose.tools.assert_equal, len(accumulator), \
current_accumulator + 1
# Also, check that Memory.eval works similarly
yield nose.tools.assert_equal, memory.eval(f, 1), out
yield nose.tools.assert_equal, len(accumulator), \
current_accumulator + 1
# Now do a smoke test with a function defined in __main__, as the name
# mangling rules are more complex
f.__module__ = '__main__'
memory = Memory(cachedir=env['dir'], verbose=0)
memory.cache(f)(1)
def test_no_memory():
""" Test memory with cachedir=None: no memoize """
accumulator = list()
def ff(l):
accumulator.append(1)
return l
mem = Memory(cachedir=None, verbose=0)
gg = mem.cache(ff)
for _ in range(4):
current_accumulator = len(accumulator)
gg(1)
yield nose.tools.assert_equal, len(accumulator), \
current_accumulator + 1
def test_memory_kwarg():
" Test memory with a function with keyword arguments."
accumulator = list()
def g(l=None, m=1):
accumulator.append(1)
return l
for test in check_identity_lazy(g, accumulator):
yield test
memory = Memory(cachedir=env['dir'], verbose=0)
g = memory.cache(g)
# Smoke test with an explicit keyword argument:
nose.tools.assert_equal(g(l=30, m=2), 30)
def test_memory_lambda():
" Test memory with a function with a lambda."
accumulator = list()
def helper(x):
""" A helper function to define l as a lambda.
"""
accumulator.append(1)
return x
l = lambda x: helper(x)
for test in check_identity_lazy(l, accumulator):
yield test
def test_memory_name_collision():
" Check that name collisions with functions will raise warnings"
memory = Memory(cachedir=env['dir'], verbose=0)
@memory.cache
def name_collision(x):
""" A first function called name_collision
"""
return x
a = name_collision
@memory.cache
def name_collision(x):
""" A second function called name_collision
"""
return x
b = name_collision
if not hasattr(warnings, 'catch_warnings'):
# catch_warnings is new in Python 2.6
return
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
a(1)
b(1)
yield nose.tools.assert_equal, len(w), 1
yield nose.tools.assert_true, "collision" in str(w[-1].message)
def test_memory_warning_lambda_collisions():
# Check that multiple use of lambda will raise collisions
memory = Memory(cachedir=env['dir'], verbose=0)
# For isolation with other tests
memory.clear()
a = lambda x: x
a = memory.cache(a)
b = lambda x: x + 1
b = memory.cache(b)
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
nose.tools.assert_equal(0, a(0))
nose.tools.assert_equal(2, b(1))
nose.tools.assert_equal(1, a(1))
# In recent Python versions, we can retrieve the code of lambdas,
# thus nothing is raised
nose.tools.assert_equal(len(w), 4)
def test_memory_warning_collision_detection():
# Check that collisions impossible to detect will raise appropriate
# warnings.
memory = Memory(cachedir=env['dir'], verbose=0)
# For isolation with other tests
memory.clear()
a1 = eval('lambda x: x')
a1 = memory.cache(a1)
b1 = eval('lambda x: x+1')
b1 = memory.cache(b1)
if not hasattr(warnings, 'catch_warnings'):
# catch_warnings is new in Python 2.6
return
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
a1(1)
b1(1)
a1(0)
yield nose.tools.assert_equal, len(w), 2
yield nose.tools.assert_true, \
"cannot detect" in str(w[-1].message).lower()
def test_memory_partial():
" Test memory with functools.partial."
accumulator = list()
def func(x, y):
""" A helper function to define l as a lambda.
"""
accumulator.append(1)
return y
import functools
function = functools.partial(func, 1)
for test in check_identity_lazy(function, accumulator):
yield test
def test_memory_eval():
" Smoke test memory with a function with a function defined in an eval."
memory = Memory(cachedir=env['dir'], verbose=0)
m = eval('lambda x: x')
mm = memory.cache(m)
yield nose.tools.assert_equal, 1, mm(1)
def count_and_append(x=[]):
""" A function with a side effect in its arguments.
Return the lenght of its argument and append one element.
"""
len_x = len(x)
x.append(None)
return len_x
def test_argument_change():
""" Check that if a function has a side effect in its arguments, it
should use the hash of changing arguments.
"""
mem = Memory(cachedir=env['dir'], verbose=0)
func = mem.cache(count_and_append)
# call the function for the first time, is should cache it with
# argument x=[]
assert func() == 0
# the second time the argument is x=[None], which is not cached
# yet, so the functions should be called a second time
assert func() == 1
@with_numpy
def test_memory_numpy():
" Test memory with a function with numpy arrays."
# Check with memmapping and without.
for mmap_mode in (None, 'r'):
accumulator = list()
def n(l=None):
accumulator.append(1)
return l
memory = Memory(cachedir=env['dir'], mmap_mode=mmap_mode,
verbose=0)
memory.clear(warn=False)
cached_n = memory.cache(n)
rnd = np.random.RandomState(0)
for i in range(3):
a = rnd.random_sample((10, 10))
for _ in range(3):
yield nose.tools.assert_true, np.all(cached_n(a) == a)
yield nose.tools.assert_equal, len(accumulator), i + 1
def test_memory_exception():
""" Smoketest the exception handling of Memory.
"""
memory = Memory(cachedir=env['dir'], verbose=0)
class MyException(Exception):
pass
@memory.cache
def h(exc=0):
if exc:
raise MyException
# Call once, to initialise the cache
h()
for _ in range(3):
# Call 3 times, to be sure that the Exception is always raised
yield nose.tools.assert_raises, MyException, h, 1
def test_memory_ignore():
" Test the ignore feature of memory "
memory = Memory(cachedir=env['dir'], verbose=0)
accumulator = list()
@memory.cache(ignore=['y'])
def z(x, y=1):
accumulator.append(1)
yield nose.tools.assert_equal, z.ignore, ['y']
z(0, y=1)
yield nose.tools.assert_equal, len(accumulator), 1
z(0, y=1)
yield nose.tools.assert_equal, len(accumulator), 1
z(0, y=2)
yield nose.tools.assert_equal, len(accumulator), 1
def test_func_dir():
# Test the creation of the memory cache directory for the function.
memory = Memory(cachedir=env['dir'], verbose=0)
path = __name__.split('.')
path.append('f')
path = os.path.join(env['dir'], 'joblib', *path)
g = memory.cache(f)
# Test that the function directory is created on demand
yield nose.tools.assert_equal, g._get_func_dir(), path
yield nose.tools.assert_true, os.path.exists(path)
# Test that the code is stored.
yield nose.tools.assert_false, \
g._check_previous_func_code()
yield nose.tools.assert_true, \
os.path.exists(os.path.join(path, 'func_code.py'))
yield nose.tools.assert_true, \
g._check_previous_func_code()
# Test the robustness to failure of loading previous results.
dir, _ = g.get_output_dir(1)
a = g(1)
yield nose.tools.assert_true, os.path.exists(dir)
os.remove(os.path.join(dir, 'output.pkl'))
yield nose.tools.assert_equal, a, g(1)
def test_persistence():
# Test the memorized functions can be pickled and restored.
memory = Memory(cachedir=env['dir'], verbose=0)
g = memory.cache(f)
output = g(1)
h = pickle.loads(pickle.dumps(g))
output_dir, _ = g.get_output_dir(1)
yield nose.tools.assert_equal, output, h.load_output(output_dir)
memory2 = pickle.loads(pickle.dumps(memory))
yield nose.tools.assert_equal, memory.cachedir, memory2.cachedir
# Smoke test that pickling a memory with cachedir=None works
memory = Memory(cachedir=None, verbose=0)
pickle.loads(pickle.dumps(memory))
def test_format_signature():
# Test the signature formatting.
func = MemorizedFunc(f, cachedir=env['dir'])
path, sgn = func.format_signature(f, list(range(10)))
yield nose.tools.assert_equal, \
sgn, \
'f([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])'
path, sgn = func.format_signature(f, list(range(10)),
y=list(range(10)))
yield nose.tools.assert_equal, \
sgn, \
'f([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], y=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])'
@with_numpy
def test_format_signature_numpy():
""" Test the format signature formatting with numpy.
"""
| 29.041394
| 79
| 0.607652
|
4f36c7a01493d0d5f6aff9be78c03a5961787a71
| 814
|
py
|
Python
|
try_django/views.py
|
filwillian/try_django
|
c9807add932ecf697cf85622fbdbdf6c00b6fd0f
|
[
"MIT"
] | null | null | null |
try_django/views.py
|
filwillian/try_django
|
c9807add932ecf697cf85622fbdbdf6c00b6fd0f
|
[
"MIT"
] | 6
|
2019-12-04T23:51:23.000Z
|
2021-06-10T18:33:24.000Z
|
try_django/views.py
|
filwillian/try_django
|
c9807add932ecf697cf85622fbdbdf6c00b6fd0f
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.models import User
from blog.models import BlogPost
from django.shortcuts import render
from rest_framework import viewsets
from .serializers import UserSerializer, PostSerializer
def homepage(request):
return render(request, 'home.html')
def aboutpage(request):
return render(request, 'about.html')
def contactpage(request):
return render(request, 'contacts.html')
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = User.objects.all().order_by('-date_joined')
serializer_class = UserSerializer
class PostViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = BlogPost.objects.all()
serializer_class = PostSerializer
| 29.071429
| 58
| 0.746929
|
2967b191e23ab9b47cef930546e02b26cbce4b28
| 47
|
py
|
Python
|
python/fire_rs/__init__.py
|
arthur-bit-monnot/fire-rs-saop
|
321e16fceebf44e8e97b482c24f37fbf6dd7d162
|
[
"BSD-2-Clause"
] | 13
|
2018-11-19T15:51:23.000Z
|
2022-01-16T11:24:21.000Z
|
python/fire_rs/__init__.py
|
fire-rs-laas/fire-rs-saop
|
321e16fceebf44e8e97b482c24f37fbf6dd7d162
|
[
"BSD-2-Clause"
] | 14
|
2017-10-12T16:19:19.000Z
|
2018-03-12T12:07:56.000Z
|
python/fire_rs/__init__.py
|
fire-rs-laas/fire-rs-saop
|
321e16fceebf44e8e97b482c24f37fbf6dd7d162
|
[
"BSD-2-Clause"
] | 4
|
2018-03-12T12:28:55.000Z
|
2021-07-07T18:32:17.000Z
|
__all__ = ['firemodel', 'geodata', 'planning']
| 23.5
| 46
| 0.659574
|
449aa57efdd42a91edce7f45950a4eaa584c6ee5
| 3,732
|
py
|
Python
|
robotidy/transformers/ext_AddMissingResources.py
|
josflorap/robotframework-tidy
|
9d4e1ccc6a50c415187468305235830f80f3373b
|
[
"Apache-2.0"
] | null | null | null |
robotidy/transformers/ext_AddMissingResources.py
|
josflorap/robotframework-tidy
|
9d4e1ccc6a50c415187468305235830f80f3373b
|
[
"Apache-2.0"
] | null | null | null |
robotidy/transformers/ext_AddMissingResources.py
|
josflorap/robotframework-tidy
|
9d4e1ccc6a50c415187468305235830f80f3373b
|
[
"Apache-2.0"
] | null | null | null |
from robot.api.parsing import ModelTransformer, get_model, ModelVisitor, Token, ResourceImport
import os, sys
wholekeywordlist = {}
importedkeywordlist = {}
used_keywords = []
resourcestoadd = []
class ext_AddMissingResources(ModelTransformer):
def __init__(self):
self.cont = 0
def visit_File(self, node):
# Get keywords in resource files
for path, subdirs, files in os.walk(os.getcwd().replace('\\', '\\\\') + '\\\\keywords'):
for name in files:
if('.robot' in name):
model = get_model(os.path.join(path, name))
printer = TestNamePrinter()
printer.visit(model)
# Get keywords already imported in the test
model = get_model(node.source)
printer = GetImportedResources()
printer.visit(model)
# Get keywords used in the test
model = get_model(node.source)
printer = KeywordsNamePrinter()
printer.visit(model)
used_keywords_clean = list(dict.fromkeys(used_keywords))
for keywordused in used_keywords_clean:
alreadyImported = False
for implist in list(importedkeywordlist.values()):
if keywordused in implist:
alreadyImported = True
# print(keywordused + ' IS ALREADY IMPORTED')
if not alreadyImported:
for wholelist in list(wholekeywordlist.values()):
if keywordused in wholelist:
key = [k for k, v in wholekeywordlist.items() if keywordused in v][0]
# print(keywordused + ' CAN BE IMPORTED FROM ' + key)
if('${KEYWORD_PATH}' + key.split('keywords')[1].replace('\\', '/') not in resourcestoadd):
resourcestoadd.append('${KEYWORD_PATH}' + key.split('keywords')[1].replace('\\', '/'))
self.generic_visit(node)
def visit_SettingSection(self,node):
pos_to_insert = 0
for i, token in enumerate(node.body):
if 'ResourceImport' in str(token):
indent = token.get_token(Token.SEPARATOR)
pos_to_insert = i + 1
for resourcetoadd in resourcestoadd:
node.body.insert(pos_to_insert, ResourceImport.from_params(resourcetoadd, separator=indent.value))
return node
class TestNamePrinter(ModelVisitor):
def __init__(self):
self.klist = []
def visit_File(self, node):
self.generic_visit(node)
wholekeywordlist[str(node.source)] = self.klist
def visit_KeywordName(self, node):
# print(node.name)
self.klist.append(node.name.lower())
class KeywordsNamePrinter(ModelVisitor):
def visit_KeywordCall(self, node):
for token in node.data_tokens:
used_keywords.append(token.value.lower())
class GetImportedResources(ModelVisitor):
def visit_ResourceImport(self,node):
if any('${KEYWORD_PATH}' in string for string in [token.value for token in node.data_tokens]):
for res in node.data_tokens:
if '${KEYWORD_PATH}' in res.value:
path = os.getcwd().replace('\\', '\\\\') + '\\\\keywords' + str(res).lstrip('${KEYWORD_PATH}').replace('/', '\\\\')
model = get_model(path)
printer = ImportedTestNamePrinter()
printer.visit(model)
class ImportedTestNamePrinter(ModelVisitor):
def __init__(self):
self.klist = []
def visit_File(self, node):
self.generic_visit(node)
importedkeywordlist[str(node.source)] = self.klist
def visit_KeywordName(self, node):
self.klist.append(node.name.lower())
| 40.565217
| 135
| 0.60209
|
70edc56f6e0704534a1289041d8e292a4d0d53c6
| 227
|
py
|
Python
|
happy_repo/__init__.py
|
przemekkot/python_happy_repo
|
29355a43d6c5d722b2b1eb5820ce56bdda1eccf8
|
[
"MIT"
] | null | null | null |
happy_repo/__init__.py
|
przemekkot/python_happy_repo
|
29355a43d6c5d722b2b1eb5820ce56bdda1eccf8
|
[
"MIT"
] | null | null | null |
happy_repo/__init__.py
|
przemekkot/python_happy_repo
|
29355a43d6c5d722b2b1eb5820ce56bdda1eccf8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .happy_repo import simple_function
"""Top-level package for happy_repo."""
__all__ = ['simple_function', ]
__author__ = """Oren Kot"""
__email__ = 'przemyslaw.kot@gmail.com'
__version__ = '0.1.0'
| 22.7
| 39
| 0.687225
|
e5a053bc5f5239b284402ddc2f75355069f2329f
| 1,501
|
py
|
Python
|
src/server/introducer_peers.py
|
13767849/chia-blockchain
|
ad7d7e0cced7f2f6deddc9e006dbaeee6dab8f66
|
[
"Apache-2.0"
] | null | null | null |
src/server/introducer_peers.py
|
13767849/chia-blockchain
|
ad7d7e0cced7f2f6deddc9e006dbaeee6dab8f66
|
[
"Apache-2.0"
] | null | null | null |
src/server/introducer_peers.py
|
13767849/chia-blockchain
|
ad7d7e0cced7f2f6deddc9e006dbaeee6dab8f66
|
[
"Apache-2.0"
] | null | null | null |
import random
import time
from typing import Dict, List, Optional
from src.types.blockchain_format.sized_bytes import bytes32
from src.types.peer_info import PeerInfo
from src.util.ints import uint64
class IntroducerPeers:
"""
Has the list of known full node peers that are already connected or may be
connected to, and the time that they were last added.
"""
def __init__(self) -> None:
self._peers: List[PeerInfo] = []
self.time_added: Dict[bytes32, uint64] = {}
def add(self, peer: Optional[PeerInfo]) -> bool:
if peer is None or not peer.port:
return False
if peer not in self._peers:
self._peers.append(peer)
self.time_added[peer.get_hash()] = uint64(int(time.time()))
return True
def remove(self, peer: Optional[PeerInfo]) -> bool:
if peer is None or not peer.port:
return False
try:
self._peers.remove(peer)
return True
except ValueError:
return False
def get_peers(self, max_peers: int = 0, randomize: bool = False, recent_threshold=9999999) -> List[PeerInfo]:
target_peers = [
peer for peer in self._peers if time.time() - self.time_added[peer.get_hash()] < recent_threshold
]
if not max_peers or max_peers > len(target_peers):
max_peers = len(target_peers)
if randomize:
random.shuffle(target_peers)
return target_peers[:max_peers]
| 32.630435
| 113
| 0.636909
|
02ae756ba6a3a70f01f3dac7e2e0a77c25bb4914
| 3,288
|
py
|
Python
|
utils.py
|
nicholasneo78/wav2vec2-kenlm
|
08d473322fe879faf397f5558f4bf6066cbcc9ba
|
[
"MIT"
] | 43
|
2021-09-15T05:12:44.000Z
|
2022-03-31T06:49:42.000Z
|
utils.py
|
nicholasneo78/wav2vec2-kenlm
|
08d473322fe879faf397f5558f4bf6066cbcc9ba
|
[
"MIT"
] | 1
|
2021-09-28T15:53:05.000Z
|
2021-09-28T15:53:05.000Z
|
utils.py
|
nicholasneo78/wav2vec2-kenlm
|
08d473322fe879faf397f5558f4bf6066cbcc9ba
|
[
"MIT"
] | 7
|
2021-09-17T18:53:30.000Z
|
2022-03-28T05:36:00.000Z
|
"""
@author
______ _ _
| ____| (_) /\ | |
| |__ __ _ _ __ _ ___ / \ | | __ _ ___ _ __ ___ __ _ _ __ _ _
| __/ _` | '__| / __| / /\ \ | |/ _` / __| '_ ` _ \ / _` | '__| | | |
| | | (_| | | | \__ \ / ____ \| | (_| \__ \ | | | | | (_| | | | |_| |
|_| \__,_|_| |_|___/ /_/ \_\_|\__,_|___/_| |_| |_|\__,_|_| \__, |
__/ |
|___/
Email: farisalasmary@gmail.com
Date: Oct 11, 2021
"""
import torch
import librosa
import ctc_segmentation
def load_audio_files(audio_files_paths):
batch_audio_files = []
for audio_file_path in audio_files_paths:
speech_array, sampling_rate = librosa.load(audio_file_path, sr=16_000)
batch_audio_files.append(speech_array)
return batch_audio_files, sampling_rate
def get_logits(batch_audio_files, model, processor, device='cpu'):
processed_batch = processor(batch_audio_files, sampling_rate=16_000, return_tensors="pt", padding=True).input_values
net_input = processed_batch.to(device)
net_input = net_input.to(device)
model = model.eval().to(device)
with torch.no_grad():
logits = model(net_input).logits
# the Wav2Vec2Processor will pad the batch with the max signal length in the batch
# so that ALL audio files have the same length
max_signal_length = processed_batch.shape[1]
return logits, max_signal_length
def get_segments(logits, decoded_output, max_signal_length, sampling_rate, vocab_list):
# CTC log posteriors inference
with torch.no_grad():
softmax = torch.nn.LogSoftmax(dim=-1)
lpzs = softmax(logits).cpu().numpy()
batch_segments_list = []
for i in range(len(decoded_output)):
lpz = lpzs[i] # select the logits of ith file
transcription = decoded_output[i][0] # 0 means the most probable transcription
text = transcription.split()
# CTC segmentation preparation
config = ctc_segmentation.CtcSegmentationParameters(char_list=vocab_list)
config.index_duration = max_signal_length / lpz.shape[0] / sampling_rate
# CTC segmentation
ground_truth_mat, utt_begin_indices = ctc_segmentation.prepare_text(config, text)
timings, char_probs, state_list = ctc_segmentation.ctc_segmentation(config, lpz, ground_truth_mat)
segments = ctc_segmentation.determine_utterance_segments(config, utt_begin_indices, char_probs, timings, text)
segments_list = []
for word, segment in zip(text, segments):
start_time, end_time, min_avg = segment
segment_dict = {
'start_time': start_time,
'end_time': end_time,
'duration': (end_time - start_time),
'word': word
}
segments_list.append(segment_dict)
batch_segments_list.append(segments_list)
return batch_segments_list
| 38.682353
| 120
| 0.565998
|
f87197ccac1900c66ba5ec9f6832c6871ffdec18
| 636
|
py
|
Python
|
src/python/pants/backend/codegen/thrift/java/register.py
|
revl/pants
|
8ad83e4ca80c095d44efceafd8b41e575da39c65
|
[
"Apache-2.0"
] | 1
|
2021-05-05T18:58:28.000Z
|
2021-05-05T18:58:28.000Z
|
src/python/pants/backend/codegen/thrift/java/register.py
|
revl/pants
|
8ad83e4ca80c095d44efceafd8b41e575da39c65
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/backend/codegen/thrift/java/register.py
|
revl/pants
|
8ad83e4ca80c095d44efceafd8b41e575da39c65
|
[
"Apache-2.0"
] | 3
|
2020-06-30T08:28:13.000Z
|
2021-07-28T09:35:57.000Z
|
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.backend.codegen.thrift.java.apache_thrift_java_gen import ApacheThriftJavaGen
from pants.backend.codegen.thrift.java.java_thrift_library import JavaThriftLibrary
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.goal.task_registrar import TaskRegistrar as task
def build_file_aliases():
return BuildFileAliases(targets={"java_thrift_library": JavaThriftLibrary,})
def register_goals():
task(name="thrift-java", action=ApacheThriftJavaGen).install("gen")
| 39.75
| 88
| 0.825472
|
9ec247e268dcb511469f43c74abf312dae6ffce9
| 404
|
py
|
Python
|
episimmer/location.py
|
healthbadge/episimmer
|
fcb3f7df812be045e2a6d031cac42080ad850d60
|
[
"BSD-3-Clause"
] | 16
|
2021-04-26T14:52:32.000Z
|
2022-01-22T07:13:06.000Z
|
episimmer/location.py
|
healthbadge/episimmer
|
fcb3f7df812be045e2a6d031cac42080ad850d60
|
[
"BSD-3-Clause"
] | 34
|
2021-05-21T12:53:24.000Z
|
2022-02-09T16:30:40.000Z
|
episimmer/location.py
|
healthbadge/episimmer
|
fcb3f7df812be045e2a6d031cac42080ad850d60
|
[
"BSD-3-Clause"
] | 4
|
2021-04-08T07:52:06.000Z
|
2021-05-29T05:58:15.000Z
|
class Location():
def __init__(self, info_dict):
self.info = info_dict
self.index = info_dict['Location Index']
self.events = []
self.lock_down_state = False
def new_time_step(self):
self.lock_down_state = False
self.events = []
def add_event(self, event_info):
if not self.lock_down_state:
self.events.append(event_info)
| 26.933333
| 48
| 0.616337
|
334af6bfbfdf26d382f0228655a20eab40a95654
| 7,889
|
py
|
Python
|
workflows/management/commands/export_package.py
|
xflows/clowdflows
|
697b36ebc976d1ba4ab726bda2fc4593422af080
|
[
"MIT"
] | 38
|
2015-11-21T08:16:14.000Z
|
2021-06-22T16:14:12.000Z
|
workflows/management/commands/export_package.py
|
chimeng089/clowdflows
|
e19bf57906e893d8f0be93329168b76eae758384
|
[
"MIT"
] | 21
|
2015-12-29T16:34:48.000Z
|
2022-03-11T23:14:48.000Z
|
workflows/management/commands/export_package.py
|
chimeng089/clowdflows
|
e19bf57906e893d8f0be93329168b76eae758384
|
[
"MIT"
] | 26
|
2016-01-11T17:51:07.000Z
|
2022-02-24T11:49:40.000Z
|
from unicodedata import category
from django.core.management.base import BaseCommand, CommandError
from workflows.models import Category, AbstractWidget, AbstractInput, AbstractOutput, AbstractOption
from django.core import serializers
from optparse import make_option
import uuid
import os
import sys
from django.conf import settings
from django.core.management.color import color_style
import json
def add_category(category,categories):
categories.add(category.pk)
if category.parent:
add_category(category.parent,categories)
def ensure_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def choice(choices,question="Your choice: "):
choice = None
while 1:
if not choice:
input_msg = ""
for i in range(0,len(choices)):
input_msg += "["+str(i)+"] "+str(choices[i])+"\n"
choice_number = raw_input(input_msg + question)
try:
choice = choices[int(choice_number)]
return choice
except:
sys.stderr.write("Error: Wrong choice.\n")
def serialize_widget(aw):
data = json.loads(serializers.serialize("json",[aw,]))[0]
if data.has_key('pk'):
data.pop('pk')
if data['fields'].has_key('user'):
data['fields'].pop('user')
if not data['fields']['category'] is None:
data['fields']['category'] = aw.category.uid
input_data = json.loads(serializers.serialize("json",aw.inputs.all().order_by('uid')))
for i in input_data:
if i.has_key('pk'):
i.pop('pk')
i['fields']['widget']=aw.uid
output_data = json.loads(serializers.serialize("json",aw.outputs.all().order_by('uid')))
for i in output_data:
if i.has_key('pk'):
i.pop('pk')
i['fields']['widget']=aw.uid
options_data = json.loads(serializers.serialize("json",AbstractOption.objects.filter(abstract_input__widget=aw).order_by('uid')))
for o in options_data:
if o.has_key('pk'):
o.pop('pk')
o['fields']['abstract_input']=AbstractInput.objects.get(id=o['fields']['abstract_input']).uid
return [data,]+input_data+output_data+options_data
def serialize_category(c):
data = json.loads(serializers.serialize("json",[c,]))[0]
if data.has_key('pk'):
data.pop('pk')
if not data['fields']['parent'] is None:
c2 = Category.objects.get(id=data['fields']['parent'])
data['fields']['parent'] = c2.uid
if data['fields'].has_key('workflow'):
data['fields'].pop('workflow')
if data['fields'].has_key('user'):
data['fields'].pop('user')
return data
def export_package(package_name,writer,dest_folder=None):
style = color_style()
external = package_name in settings.INSTALLED_APPS_EXTERNAL_PACKAGES
if external and not dest_folder:
raise CommandError("You must provide a destination folder when exporting external packages.")
if not external and dest_folder:
raise CommandError("You can't use a custom destination folder when exporting local packages.")
if 'workflows.'+package_name not in settings.INSTALLED_APPS and not external:
raise CommandError("Package not found in INSTALLED_APPS.")
#here we check the integrity of the package
aws = AbstractWidget.objects.filter(package=package_name)
for aw in aws:
if aw.uid:
for bw in aws:
if bw.uid == aw.uid and bw.id != aw.id:
writer.write("Found two widgets with the same UID. Please select a widget to assign new UID to.\n")
selected_widget = choice([aw,bw],"Select a widget: ")
selected_widget.set_uid(commit=True)
#first we check if package_data directory exists and make it if it doesn't
if external:
package_directory = os.path.join(dest_folder,'package_data')
else:
package_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)),'../../'+package_name+"/package_data/")
ensure_dir(package_directory)
widgets_directory = os.path.join(package_directory,"widgets")
deprecated_widgets_directory = os.path.join(package_directory,"deprecated_widgets")
ensure_dir(widgets_directory)
categories_directory = os.path.join(package_directory,"categories")
ensure_dir(categories_directory)
writer.write(" > Ensuring package directory for "+package_name+".\n")
categories = set()
writer.write(" > Exporting widgets\n")
global_change = False
for aw in aws:
aw.update_uid()
if os.path.isfile(os.path.join(deprecated_widgets_directory,aw.uid+'.json')):
writer.write(style.ERROR(" - Deprecated widget "+str(aw)+" found! Please import package to remove it. This widget has NOT been exported.\n"))
continue
add_category(aw.category,categories)
serialized_widget = serialize_widget(aw)
created = True
change = True
try:
widget_file = open(os.path.join(widgets_directory,aw.uid+'.json'),'r')
created = False
w_data = json.loads(widget_file.read())
widget_file.close()
if w_data == serialized_widget:
change = False
except:
created = True
change = True
if change:
global_change = True
if created:
writer.write(" + Exporting widget "+str(aw)+"\n")
else:
writer.write(" + Updating widget "+str(aw)+"\n")
widget_data = json.dumps(serialized_widget,indent=2)
widget_file = open(os.path.join(widgets_directory,aw.uid+'.json'),'w')
widget_file.write(widget_data)
widget_file.close()
if not global_change:
writer.write(" No changes in the widgets detected!\n")
writer.write(" > Exporting categories\n")
global_change = False
for category in categories:
c = Category.objects.get(id=category)
c.update_uid()
data = serialize_category(c)
created = True
change = True
try:
category_file = open(os.path.join(categories_directory,c.uid+'.json'),'r')
created = False
c_data = json.loads(category_file.read())
category_file.close()
if c_data == data:
change = False
except:
created = True
change = True
if change:
global_change = True
if created:
writer.write(" + Exporting category "+str(c)+"\n")
else:
writer.write(" + Updating category "+str(c)+"\n")
category_data = json.dumps(data,indent=2)
category_file = open(os.path.join(categories_directory,c.uid+'.json'),'w')
category_file.write(category_data)
category_file.close()
if not global_change:
writer.write(" No changes in the categories detected!\n")
class Command(BaseCommand):
args = 'package_name [external_destination_folder]'
help = 'Exports the package "package_name".'
def handle(self, *args, **options):
if len(args) < 1:
raise CommandError('Argument "package_name" is required.')
dest_folder = None
if len(args) == 2:
dest_folder = args[1]
package_name = args[0]
writer = self.stdout
export_package(package_name,writer,dest_folder=dest_folder)
writer.write('Thanks for using the new export command. You rock.\n')
| 37.746411
| 158
| 0.604259
|
07fbe50e32198ee47703f0ffdc7c9317d07ffab1
| 2,559
|
py
|
Python
|
videointelligence/synth.py
|
Kami/google-cloud-python
|
a14ffbaa50f7823c2792e91413a37cbc3ce687f5
|
[
"Apache-2.0"
] | 1
|
2019-06-14T10:11:59.000Z
|
2019-06-14T10:11:59.000Z
|
videointelligence/synth.py
|
Kami/google-cloud-python
|
a14ffbaa50f7823c2792e91413a37cbc3ce687f5
|
[
"Apache-2.0"
] | null | null | null |
videointelligence/synth.py
|
Kami/google-cloud-python
|
a14ffbaa50f7823c2792e91413a37cbc3ce687f5
|
[
"Apache-2.0"
] | 1
|
2020-04-14T10:47:41.000Z
|
2020-04-14T10:47:41.000Z
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
from synthtool import gcp
gapic = gcp.GAPICGenerator()
common = gcp.CommonTemplates()
versions = ["v1beta1", "v1beta2", "v1p1beta1", "v1p2beta1", "v1p3beta1", "v1"]
# ----------------------------------------------------------------------------
# Generate videointelligence GAPIC layer
# ----------------------------------------------------------------------------
for version in versions:
library = gapic.py_library(
"videointelligence",
version,
artman_output_name=f"video-intelligence-{version}",
include_protos=True,
)
# TODO: stop excluding tests and nox.py (excluded as we lack system tests)
s.move(
library,
excludes=[
"setup.py",
"nox*.py",
"README.rst",
"docs/index.rst",
f"tests/system/gapic/{version}/"
f"test_system_video_intelligence_service_{version}.py",
# f'tests/unit/gapic/{version}/'
# f'test_video_intelligence_service_client_{version}.py',
],
)
s.replace(
f"google/cloud/videointelligence_{version}/gapic/"
f"*video_intelligence_service_client.py",
"google-cloud-video-intelligence",
"google-cloud-videointelligence",
)
s.replace(
"tests/unit/gapic/**/test_video_intelligence_service_client_*.py",
"^(\s+)expected_request = video_intelligence_pb2.AnnotateVideoRequest\(\)",
"\g<1>expected_request = video_intelligence_pb2.AnnotateVideoRequest(\n"
"\g<1> input_uri=input_uri, features=features)",
)
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = common.py_library(unit_cov_level=97, cov_level=100)
s.move(templated_files)
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
| 35.541667
| 79
| 0.601407
|
3fe783de5731d486fe7aa1a4987e5218b07150b9
| 6,500
|
py
|
Python
|
src/evalutate_test.py
|
juliandarley/neural-punctuator
|
2b3ff7e052380ec463b90a74c6960e1e90515c05
|
[
"MIT"
] | 31
|
2021-01-15T11:31:07.000Z
|
2022-03-31T14:56:25.000Z
|
src/evalutate_test.py
|
juliandarley/neural-punctuator
|
2b3ff7e052380ec463b90a74c6960e1e90515c05
|
[
"MIT"
] | 8
|
2021-02-05T13:17:17.000Z
|
2021-09-03T03:01:05.000Z
|
src/evalutate_test.py
|
juliandarley/neural-punctuator
|
2b3ff7e052380ec463b90a74c6960e1e90515c05
|
[
"MIT"
] | 10
|
2021-01-29T18:18:07.000Z
|
2022-03-09T23:13:50.000Z
|
import os
from glob import glob
from tqdm.notebook import tqdm
import matplotlib.pyplot as plt
from neural_punctuator.utils.data import get_config_from_yaml
from neural_punctuator.models.BertPunctuator import BertPunctuator
import torch
from torch.utils.data import DataLoader
from tqdm import tqdm
from neural_punctuator.data.dataloader import collate, get_data_loaders, get_datasets
from neural_punctuator.models.BertPunctuator import BertPunctuator
from torch.optim import AdamW
from torch import nn
from neural_punctuator.utils.io import save, load
from neural_punctuator.utils.metrics import get_total_grad_norm, get_eval_metrics
import numpy as np
import pickle
from torch.utils.data import Dataset, DataLoader
from itertools import product
def load_scores(model_path):
checkpoint = torch.load(model_path)
return checkpoint['metrics']
def get_strict_f_score(report):
return sum(float(report['cls_report'][output]['f1-score']) for output in ('period', 'question', 'comma')) / 3
def best_epoch_by_f_score(metrics):
best_score = metrics[0]['strict_f_score']
best_epoch = 0
for i, m in enumerate(metrics):
if m['strict_f_score'] > best_score:
best_score = m['strict_f_score']
best_epoch = i
return best_epoch, best_score
def best_epoch_by_loss(metrics):
best_loss = metrics[0]['loss']
best_epoch = 0
for i, m in enumerate(metrics):
if m['loss'] < best_loss:
best_loss = m['loss']
best_epoch = i
return best_epoch, best_loss
def combine(pred_num, all_valid_preds):
relevant_preds = all_valid_preds[::pred_num]
ps = []
for i in range(relevant_preds.shape[0]): # +512//pred_num-1):
# ps.append(relevant_preds[i, :pred_num])
start_idx = max(0, i - 512 // pred_num + 1)
end_idx = min(relevant_preds.shape[0], i + 1)
p = []
for j, k in enumerate(range(start_idx, end_idx)):
j = end_idx - start_idx - j - 1
# print(k, j, relevant_preds[k][j*pred_num:(j+1)*pred_num].mean())
p.append(relevant_preds[k][j * pred_num:(j + 1) * pred_num])
# print()
p = np.stack(p)
if p.shape[0] > 2:
p = p[1:-1, :, :]
ps.append(np.log(np.exp(p).mean(0)))
ps = np.concatenate(ps)
return ps
def combine(pred_num, preds):
step_num = 512 // pred_num
multi_preds = [preds[i::pred_num].reshape(-1, preds.shape[-1]) for i in range(pred_num)]
for i in range(pred_num):
start_idx = (pred_num - i - 1) * step_num
end_idx = start_idx + (preds.shape[0] - (pred_num-1)*2) * step_num
multi_preds[i] = multi_preds[i][start_idx:end_idx]
multi_preds = np.stack(multi_preds)
multi_preds = np.log(np.exp(multi_preds).mean(0))
return multi_preds
class BertDataset(Dataset):
def __init__(self, prefix, config, is_train=False):
self.config = config
self.is_train = is_train
with open(self.config.data.data_path + prefix + "_data.pkl", 'rb') as f:
texts, targets = pickle.load(f)
self.encoded_texts = 512 * [0] + [word for t in texts for word in t] + 512 * [0] # Add padding to both ends
self.targets = 512 * [-1] + [t for ts in targets for t in ts] + 512 * [-1]
def __getitem__(self, idx):
if idx == 164:
pass
start_idx = (1+idx) * self.config.model.predict_step
end_idx = start_idx + self.config.model.seq_len
return torch.LongTensor(self.encoded_texts[start_idx: end_idx]),\
torch.LongTensor(self.targets[start_idx: end_idx])
def __len__(self):
return int(np.ceil((len(self.encoded_texts)-1024)//self.config.model.predict_step))
def evaluate_multiple_predictions(model_name, model_type, predict_step, device):
print(model_name, model_type)
if model_type == 'by_f_score':
epoch, _ = best_epoch_by_f_score(metrics[model_name])
elif model_type == 'by_loss':
epoch, _ = best_epoch_by_loss(metrics[model_name])
else:
raise ValueError("Model type not valid, options: by_f_score/by_loss")
config = get_config_from_yaml(f'neural_punctuator/configs/config-{model_name}-unfreeze.yaml')
config.trainer.load_model = f"{model_name}-epoch-{epoch + 1}.pth"
config.model.predict_step = predict_step
config.predict.batch_size = 128
model = BertPunctuator(config)
model.to(device)
load(model, None, config)
test_dataset = BertDataset("test", config)
test_loader = DataLoader(test_dataset, batch_size=config.predict.batch_size, collate_fn=collate)
model.eval()
all_test_preds = []
for data in tqdm(test_loader):
text, targets = data
with torch.no_grad():
preds, _ = model(text.to(device))
all_test_preds.append(preds.detach().cpu().numpy())
all_test_target = test_dataset.targets[512:-512]
all_test_preds = np.concatenate(all_test_preds)
pred_num = config.model.seq_len // config.model.predict_step
ps = combine(pred_num, all_test_preds)
_targets = np.array(all_test_target[:ps.shape[0]])
ps = ps[_targets != -1]
_targets = _targets[_targets != -1]
report = get_eval_metrics(_targets, ps, config)
return report
if __name__ == "__main__":
data_path = "/userhome/student/bial/neural-punctuator/models/"
model_names = ["bert-base-uncased", "bert-base-cased", "albert-base-v1"]
files = {}
for model_name in model_names:
f = sorted(glob(data_path + f"{model_name}-epoch*.*"), key=os.path.getmtime)
files[model_name] = f
# metrics = {}
# for model_name in model_names:
# m = []
# for file in tqdm(files[model_name]):
# m.append(load_scores(file))
# metrics[model_name] = m
# with open('metrics.pkl', 'wb') as f:
# pickle.dump(metrics, f)
with open('metrics.pkl', 'rb') as f:
metrics = pickle.load(f)
for _, m in metrics.items():
for epoch in m:
epoch['strict_f_score'] = get_strict_f_score(epoch)
device = torch.device('cuda:0')
torch.cuda.set_device(device)
# for model_name, model_type in product(model_names, ('by_loss', 'by_f_score')):
model_name = "albert-base-v1"
model_type = "by_f_score"
pred_num_for_token = 4
predict_step = 512 // pred_num_for_token
report = evaluate_multiple_predictions(model_name, model_type, predict_step, device)
| 31.862745
| 120
| 0.660615
|
8b777a64796d64b6656ae65886b40556e55a0cd8
| 3,297
|
py
|
Python
|
Views/UserContributedManagementView.py
|
JHP4911/PyDoc
|
34cadcd2ebce134171d1522d9c499a720d8f311f
|
[
"MIT"
] | 1
|
2019-12-02T01:07:59.000Z
|
2019-12-02T01:07:59.000Z
|
Views/UserContributedManagementView.py
|
tdamdouni/PyDoc
|
34cadcd2ebce134171d1522d9c499a720d8f311f
|
[
"MIT"
] | null | null | null |
Views/UserContributedManagementView.py
|
tdamdouni/PyDoc
|
34cadcd2ebce134171d1522d9c499a720d8f311f
|
[
"MIT"
] | null | null | null |
import ui
class UserContributedManagementView (object):
def __init__(self, download_action, refresh_main_view, delete_action, refresh_usercontributed_action):
self.data = []
self.delete_action = delete_action
self.download_action = download_action
self.refresh_main_view = refresh_main_view
self.refresh_usercontributed_action = refresh_usercontributed_action
def tableview_did_select(self, tableview, section, row):
pass
def tableview_number_of_sections(self, tableview):
return 1
def tableview_number_of_rows(self, tableview, section):
return len(self.data)
def tableview_cell_for_row(self, tableview, section, row):
status = self.data[row].status
cell = ui.TableViewCell('subtitle')
cell.text_label.text = self.data[row].name
if not status == 'downloading':
cell.detail_text_label.text = status
else:
cell.detail_text_label.text = self.data[row].stats
cell.detail_text_label.text = cell.detail_text_label.text + ' - Contributed by ' + self.data[row].authorName
if not self.data[row].image == None:
cell.image_view.image = self.data[row].image
iv = self.__getDetailButtonForStatus(status, cell.height, self.action, self.data[row])
iv.x = cell.content_view.width - (iv.width * 1.5)
iv.y = (cell.content_view.height) - (iv.height * 1.05)
iv.flex = 'L'
cell.content_view.add_subview(iv)
cell.selectable = False
return cell
def __getDetailImageForStatus(self, status):
if status == 'online' or status == 'updateAvailable':
return 'iob:ios7_cloud_download_outline_24'
else:
return 'iob:ios7_close_outline_24'
def __getDetailButtonForStatus(self, status, height, action, row):
img = ui.Image.named(self.__getDetailImageForStatus(status))
button = ui.Button()
button.image = img
size = img.size
ratio = size.y / size.x
button.height = height * 0.9
button.width = button.height * ratio
ca = CustomAction(button)
ca.action = self.action
ca.row = row
button.action = ca
return button
def refresh_all_views(self):
self.refresh_main_view()
d = self.refresh_usercontributed_action()
refresh_view(d)
def action(self, sender):
if not sender.action.row.path == None:
self.delete_action(sender.action.row, self.refresh_all_views)
sender.action.row.path = None
else:
self.download_action(sender.action.row, self.refresh, self.refresh_all_views)
def refresh(self):
tv.reload()
class CustomAction(object):
def __init__(self, parent):
self.obj = parent
self.action = self.real_action
self.row = None
def __call__(self, sender):
return self.action(sender)
def real_action(self, sender):
print('Did you need to set the action?')
tv = ui.TableView()
def get_view(download_action, refresh_all_views, delete_action, refresh_usercontributed_action):
w,h = ui.get_screen_size()
tv.width = w
tv.height = h
tv.flex = 'WH'
tv.name = 'User Contributed Docsets'
data = UserContributedManagementView(download_action, refresh_all_views, delete_action, refresh_usercontributed_action)
tv.delegate = data
tv.data_source = data
return tv
def refresh_view(data):
tv.data_source.data = data
tv.reload_data()
if __name__ == '__main__':
view = get_view([{'name':'test','status':'online'},{'name':'test2','status':'downloaded'}])
view.present()
| 31.103774
| 120
| 0.737944
|
ab1b700b8f48773db1a7d03f2c6e041dfe2ece3c
| 2,940
|
py
|
Python
|
mvpa2/testing/regress.py
|
nno/PyMVPA
|
a125596bf81b8e9848768852f697bd3cff9674c4
|
[
"MIT"
] | 227
|
2015-01-17T20:13:54.000Z
|
2022-01-26T21:14:30.000Z
|
mvpa2/testing/regress.py
|
nno/PyMVPA
|
a125596bf81b8e9848768852f697bd3cff9674c4
|
[
"MIT"
] | 364
|
2015-01-05T21:55:09.000Z
|
2021-09-09T20:37:55.000Z
|
mvpa2/testing/regress.py
|
nno/PyMVPA
|
a125596bf81b8e9848768852f697bd3cff9674c4
|
[
"MIT"
] | 111
|
2015-01-06T19:26:41.000Z
|
2022-01-26T21:14:31.000Z
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyMVPA package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Convinience functions to generate/update datasets for regression testing
"""
__docformat__ = 'restructuredtext'
from os.path import join as pathjoin
import hashlib
import mvpa2
from mvpa2 import pymvpa_dataroot, externals
def get_testing_fmri_dataset_filename():
"""Generate path to the testing filename based on mvpa2/nibabel versions
"""
# explicitly so we do not anyhow depend on dict ordering
versions_hash = hashlib.md5(
"_".join(["%s:%s" % (k, externals.versions[k])
for k in sorted(externals.versions)])
).hexdigest()[:6]
filename = 'mvpa-%s_nibabel-%s-%s.hdf5' % (
mvpa2.__version__,
externals.versions['nibabel'],
versions_hash)
return pathjoin(pymvpa_dataroot, 'testing', 'fmri_dataset', filename)
get_testing_fmri_dataset_filename.__test__ = False
def generate_testing_fmri_dataset(filename=None):
"""Helper to generate a dataset for regression testing of mvpa2/nibabel
Parameters
----------
filename : str
Filename of a dataset file to store. If not provided, it is composed
using :func:`get_testing_fmri_dataset_filename`
Returns
-------
Dataset, string
Generated dataset, filename to the HDF5 where it was stored
"""
import mvpa2
from mvpa2.base.hdf5 import h5save
from mvpa2.datasets.sources import load_example_fmri_dataset
# Load our sample dataset
ds_full = load_example_fmri_dataset(name='1slice', literal=False)
# Subselect a small "ROI"
ds = ds_full[20:23, 10:14]
# collect all versions/dependencies for possible need to troubleshoot later
ds.a['wtf'] = mvpa2.wtf()
ds.a['versions'] = mvpa2.externals.versions
# save to a file identified by version of PyMVPA and nibabel and hash of
# all other versions
out_filename = filename or get_testing_fmri_dataset_filename()
h5save(out_filename, ds, compression=9)
# ATM it produces >700kB .hdf5 which is this large because of
# the ds.a.mapper with both Flatten and StaticFeatureSelection occupying
# more than 190kB each, with ds.a.mapper as a whole generating 570kB file
# Among those .ca seems to occupy notable size, e.g. 130KB for the FlattenMapper
# even though no heavy storage is really needed for any available value --
# primarily all is meta-information embedded into hdf5 to describe our things
return ds, out_filename
generate_testing_fmri_dataset.__test__ = False
if __name__ == '__main__':
generate_testing_fmri_dataset()
| 36.296296
| 84
| 0.67483
|
0d24b71c50b03f5f3df533b944e0299c6f203ec5
| 5,699
|
py
|
Python
|
panel/pane/vega.py
|
Jacob-Barhak/panel
|
04cad38ea703e4e69fb76f063a27f4ffe40688e8
|
[
"BSD-3-Clause"
] | 1
|
2021-06-21T19:10:01.000Z
|
2021-06-21T19:10:01.000Z
|
panel/pane/vega.py
|
Jacob-Barhak/panel
|
04cad38ea703e4e69fb76f063a27f4ffe40688e8
|
[
"BSD-3-Clause"
] | 2
|
2022-01-13T03:54:51.000Z
|
2022-03-12T01:01:00.000Z
|
panel/pane/vega.py
|
Jacob-Barhak/panel
|
04cad38ea703e4e69fb76f063a27f4ffe40688e8
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
import param
import numpy as np
from bokeh.models import ColumnDataSource
from pyviz_comms import JupyterComm
from ..viewable import Layoutable
from ..util import lazy_load, string_types
from .base import PaneBase
def ds_as_cds(dataset):
"""
Converts Vega dataset into Bokeh ColumnDataSource data
"""
if len(dataset) == 0:
return {}
data = {k: [] for k, v in dataset[0].items()}
for item in dataset:
for k, v in item.items():
data[k].append(v)
data = {k: np.asarray(v) for k, v in data.items()}
return data
class Vega(PaneBase):
"""
Vega panes allow rendering Vega plots and traces.
For efficiency any array objects found inside a Figure are added
to a ColumnDataSource which allows using binary transport to sync
the figure on bokeh server and via Comms.
"""
margin = param.Parameter(default=(5, 5, 30, 5), doc="""
Allows to create additional space around the component. May
be specified as a two-tuple of the form (vertical, horizontal)
or a four-tuple (top, right, bottom, left).""")
priority = 0.8
_updates = True
@classmethod
def is_altair(cls, obj):
if 'altair' in sys.modules:
import altair as alt
return isinstance(obj, alt.api.TopLevelMixin)
return False
@classmethod
def applies(cls, obj):
if isinstance(obj, dict) and 'vega' in obj.get('$schema', '').lower():
return True
return cls.is_altair(obj)
@classmethod
def _to_json(cls, obj):
if isinstance(obj, dict):
json = dict(obj)
if 'data' in json:
data = json['data']
if isinstance(data, dict):
json['data'] = dict(data)
elif isinstance(data, list):
json['data'] = [dict(d) for d in data]
return json
return obj.to_dict()
def _get_sources(self, json, sources):
datasets = json.get('datasets', {})
for name in list(datasets):
if name in sources or isinstance(datasets[name], dict):
continue
data = datasets.pop(name)
if isinstance(data, list) and any(isinstance(d, dict) and 'geometry' in d for d in data):
# Handle geometry records types
datasets[name] = data
continue
columns = set(data[0]) if data else []
if self.is_altair(self.object):
import altair as alt
if (not isinstance(self.object.data, (alt.Data, alt.UrlData, type(alt.Undefined))) and
columns == set(self.object.data)):
data = ColumnDataSource.from_df(self.object.data)
else:
data = ds_as_cds(data)
sources[name] = ColumnDataSource(data=data)
else:
sources[name] = ColumnDataSource(data=ds_as_cds(data))
data = json.get('data', {})
if isinstance(data, dict):
data = data.pop('values', {})
if data:
sources['data'] = ColumnDataSource(data=ds_as_cds(data))
elif isinstance(data, list):
for d in data:
if 'values' in d:
sources[d['name']] = ColumnDataSource(data=ds_as_cds(d.pop('values')))
@classmethod
def _get_dimensions(cls, json, props):
if json is None:
return
if 'config' in json and 'view' in json['config']:
size_config = json['config']['view']
else:
size_config = json
view = {}
for w in ('width', 'continuousWidth'):
if w in size_config:
view['width'] = size_config[w]
for h in ('height', 'continuousHeight'):
if h in size_config:
view['height'] = size_config[h]
for p in ('width', 'height'):
if p not in view or isinstance(view[p], string_types):
continue
if props.get(p) is None or p in view and props.get(p) < view[p]:
v = view[p]
props[p] = v+22 if isinstance(v, int) else v
responsive_height = json.get('height') == 'container'
responsive_width = json.get('width') == 'container'
if responsive_height and responsive_width:
props['sizing_mode'] = 'stretch_both'
elif responsive_width:
props['sizing_mode'] = 'stretch_width'
elif responsive_height:
props['sizing_mode'] = 'stretch_height'
def _get_model(self, doc, root=None, parent=None, comm=None):
VegaPlot = lazy_load('panel.models.vega', 'VegaPlot', isinstance(comm, JupyterComm))
sources = {}
if self.object is None:
json = None
else:
json = self._to_json(self.object)
self._get_sources(json, sources)
props = self._process_param_change(self._init_params())
self._get_dimensions(json, props)
model = VegaPlot(data=json, data_sources=sources, **props)
if root is None:
root = model
self._models[root.ref['id']] = (model, parent)
return model
def _update(self, ref=None, model=None):
if self.object is None:
json = None
else:
json = self._to_json(self.object)
self._get_sources(json, model.data_sources)
props = {p : getattr(self, p) for p in list(Layoutable.param)
if getattr(self, p) is not None}
self._get_dimensions(json, props)
props['data'] = json
model.update(**props)
| 34.539394
| 102
| 0.565011
|
2690a552b7e02105ba110b4fa9324c31f1340e79
| 5,736
|
py
|
Python
|
gen_images.py
|
kromond/stylegan3
|
26593fc289e7ed3ae0219614a16917ffa5fe811f
|
[
"BSD-Source-Code"
] | 47
|
2021-10-12T07:53:24.000Z
|
2022-03-26T15:47:34.000Z
|
gen_images.py
|
kromond/stylegan3
|
26593fc289e7ed3ae0219614a16917ffa5fe811f
|
[
"BSD-Source-Code"
] | null | null | null |
gen_images.py
|
kromond/stylegan3
|
26593fc289e7ed3ae0219614a16917ffa5fe811f
|
[
"BSD-Source-Code"
] | 14
|
2021-10-11T22:24:22.000Z
|
2022-03-31T14:08:39.000Z
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
"""Generate images using pretrained network pickle."""
import os
import re
from typing import List, Optional, Tuple, Union
import click
import dnnlib
import numpy as np
import PIL.Image
import torch
import legacy
#----------------------------------------------------------------------------
def parse_range(s: Union[str, List]) -> List[int]:
'''Parse a comma separated list of numbers or ranges and return a list of ints.
Example: '1,2,5-10' returns [1, 2, 5, 6, 7]
'''
if isinstance(s, list): return s
ranges = []
range_re = re.compile(r'^(\d+)-(\d+)$')
for p in s.split(','):
m = range_re.match(p)
if m:
ranges.extend(range(int(m.group(1)), int(m.group(2))+1))
else:
ranges.append(int(p))
return ranges
#----------------------------------------------------------------------------
def parse_vec2(s: Union[str, Tuple[float, float]]) -> Tuple[float, float]:
'''Parse a floating point 2-vector of syntax 'a,b'.
Example:
'0,1' returns (0,1)
'''
if isinstance(s, tuple): return s
parts = s.split(',')
if len(parts) == 2:
return (float(parts[0]), float(parts[1]))
raise ValueError(f'cannot parse 2-vector {s}')
#----------------------------------------------------------------------------
def make_transform(translate: Tuple[float,float], angle: float):
m = np.eye(3)
s = np.sin(angle/360.0*np.pi*2)
c = np.cos(angle/360.0*np.pi*2)
m[0][0] = c
m[0][1] = s
m[0][2] = translate[0]
m[1][0] = -s
m[1][1] = c
m[1][2] = translate[1]
return m
#----------------------------------------------------------------------------
@click.command()
@click.option('--network', 'network_pkl', help='Network pickle filename', required=True)
@click.option('--seeds', type=parse_range, help='List of random seeds (e.g., \'0,1,4-6\')', required=True)
@click.option('--trunc', 'truncation_psi', type=float, help='Truncation psi', default=1, show_default=True)
@click.option('--class', 'class_idx', type=int, help='Class label (unconditional if not specified)')
@click.option('--noise-mode', help='Noise mode', type=click.Choice(['const', 'random', 'none']), default='const', show_default=True)
@click.option('--translate', help='Translate XY-coordinate (e.g. \'0.3,1\')', type=parse_vec2, default='0,0', show_default=True, metavar='VEC2')
@click.option('--rotate', help='Rotation angle in degrees', type=float, default=0, show_default=True, metavar='ANGLE')
@click.option('--outdir', help='Where to save the output images', type=str, required=True, metavar='DIR')
def generate_images(
network_pkl: str,
seeds: List[int],
truncation_psi: float,
noise_mode: str,
outdir: str,
translate: Tuple[float,float],
rotate: float,
class_idx: Optional[int]
):
"""Generate images using pretrained network pickle.
Examples:
\b
# Generate an image using pre-trained AFHQv2 model ("Ours" in Figure 1, left).
python gen_images.py --outdir=out --trunc=1 --seeds=2 \\
--network=https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/stylegan3-r-afhqv2-512x512.pkl
\b
# Generate uncurated images with truncation using the MetFaces-U dataset
python gen_images.py --outdir=out --trunc=0.7 --seeds=600-605 \\
--network=https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/stylegan3-t-metfacesu-1024x1024.pkl
"""
print('Loading networks from "%s"...' % network_pkl)
device = torch.device('cuda')
with dnnlib.util.open_url(network_pkl) as f:
G = legacy.load_network_pkl(f)['G_ema'].to(device) # type: ignore
os.makedirs(outdir, exist_ok=True)
# Labels.
label = torch.zeros([1, G.c_dim], device=device)
if G.c_dim != 0:
if class_idx is None:
raise click.ClickException('Must specify class label with --class when using a conditional network')
label[:, class_idx] = 1
else:
if class_idx is not None:
print ('warn: --class=lbl ignored when running on an unconditional network')
# Generate images.
for seed_idx, seed in enumerate(seeds):
print('Generating image for seed %d (%d/%d) ...' % (seed, seed_idx, len(seeds)))
z = torch.from_numpy(np.random.RandomState(seed).randn(1, G.z_dim)).to(device)
# Construct an inverse rotation/translation matrix and pass to the generator. The
# generator expects this matrix as an inverse to avoid potentially failing numerical
# operations in the network.
if hasattr(G.synthesis, 'input'):
m = make_transform(translate, rotate)
m = np.linalg.inv(m)
G.synthesis.input.transform.copy_(torch.from_numpy(m))
img = G(z, label, truncation_psi=truncation_psi, noise_mode=noise_mode)
img = (img.permute(0, 2, 3, 1) * 127.5 + 128).clamp(0, 255).to(torch.uint8)
PIL.Image.fromarray(img[0].cpu().numpy(), 'RGB').save(f'{outdir}/seed{seed:04d}.png')
#----------------------------------------------------------------------------
if __name__ == "__main__":
generate_images() # pylint: disable=no-value-for-parameter
#----------------------------------------------------------------------------
| 39.287671
| 144
| 0.605997
|
bd5fe1c76ed2274ffc8f6801bd85e12d6cbede66
| 2,703
|
py
|
Python
|
tests/mesh/test_grd.py
|
pmav99/pyschism
|
b533d29b881da6cd4f408e91a4749f86451c042a
|
[
"Apache-2.0"
] | 17
|
2020-02-02T09:48:20.000Z
|
2022-02-02T19:28:58.000Z
|
tests/mesh/test_grd.py
|
pmav99/pyschism
|
b533d29b881da6cd4f408e91a4749f86451c042a
|
[
"Apache-2.0"
] | 20
|
2020-03-04T13:40:22.000Z
|
2022-02-10T15:30:42.000Z
|
tests/mesh/test_grd.py
|
pmav99/pyschism
|
b533d29b881da6cd4f408e91a4749f86451c042a
|
[
"Apache-2.0"
] | 12
|
2020-03-04T09:54:57.000Z
|
2022-02-10T00:14:25.000Z
|
#! /usr/bin/env python
import unittest
import pathlib
import tempfile
from pyschism.mesh.grd import reader, writer, euclidean_mesh
class GrdTestCase(unittest.TestCase):
def setUp(self):
self.nodes = {
'1': ((0., 0.), -99999.),
'2': ((.5, 0.), -99999.),
'3': ((1., 0.), -99999.),
'4': ((1., 1.), -99999.),
'5': ((0., 1.), -99999.),
'6': ((.5, 1.5), -99999.),
'7': ((.33, .33), -99999.),
'8': ((.66, .33), -99999.),
'9': ((.5, .66), -99999.),
'10': ((-1., 1.), -99999.),
'11': ((-1., 0.), -99999.),
}
self.elements = {
'1': ['5', '7', '9'],
'2': ['1', '2', '7'],
'3': ['2', '3', '8'],
'4': ['8', '7', '2'],
'5': ['3', '4', '8'],
'6': ['4', '9', '8'],
'7': ['4', '6', '5'],
'8': ['5', '10', '11', '1'],
'9': ['9', '4', '5'],
'10': ['5', '1', '7']
}
self.boundaries = dict()
self.boundaries[None] = { # "open" boundaries
0: {'indexes': ['10', '11', '1', '2']},
1: {'indexes': ['2', '3', '4']}
}
self.boundaries[0] = { # "land" boundaries
0: {'indexes': ['4', '6']},
1: {'indexes': ['6', '5', '10']}
}
self.boundaries[1] = { # "interior" boundary
0: {'indexes': ['7', '8', '9', '7']}
}
self.grd = {
'nodes': self.nodes,
'elements': self.elements,
'boundaries': self.boundaries,
'description': 'grd_unittest'
}
def test_write_read(self):
tmpdir = tempfile.TemporaryDirectory()
tmpfile = pathlib.Path(tmpdir.name) / 'hgrid.grd'
writer(self.grd, pathlib.Path(tmpfile))
self.assertDictEqual(reader(pathlib.Path(tmpfile)), self.grd)
def test_overwrite(self):
tmpdir = tempfile.TemporaryDirectory()
writer(self.grd, pathlib.Path(tmpdir.name) / 'hgrid.grd')
self.assertRaises(
Exception,
writer,
self.grd,
pathlib.Path(tmpdir.name) / 'hgrid.grd'
)
def test_no_ocean_bnd(self):
tmpdir = tempfile.TemporaryDirectory()
tmpfile = pathlib.Path(tmpdir.name) / 'hgrid.grd'
self.grd['boundaries'].pop(None)
writer(self.grd, pathlib.Path(tmpfile))
self.assertDictEqual(reader(pathlib.Path(tmpfile)), self.grd)
def test_euclidean_mesh(self):
self.assertIsInstance(euclidean_mesh(self.grd), dict)
if __name__ == '__main__':
unittest.main()
| 30.370787
| 69
| 0.443951
|
76fa95c0c5b806b1c61f6abc88175d717a6af33e
| 42
|
py
|
Python
|
blotter/__init__.py
|
matthewgilbert/blotter
|
067cc55bd4b55252f4d8de160703884ff01b9391
|
[
"MIT"
] | 16
|
2017-06-06T10:11:02.000Z
|
2022-03-28T02:24:29.000Z
|
blotter/__init__.py
|
altfund/blotter
|
067cc55bd4b55252f4d8de160703884ff01b9391
|
[
"MIT"
] | null | null | null |
blotter/__init__.py
|
altfund/blotter
|
067cc55bd4b55252f4d8de160703884ff01b9391
|
[
"MIT"
] | 7
|
2017-06-23T15:51:35.000Z
|
2020-10-18T10:00:46.000Z
|
from ._version import __version__ # NOQA
| 21
| 41
| 0.785714
|
996f276f6a87761bb61ad151764cddd7184cecef
| 9,061
|
py
|
Python
|
sdk/python/pulumi_azure_native/hybridcompute/v20200815preview/get_machine_extension.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/hybridcompute/v20200815preview/get_machine_extension.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/hybridcompute/v20200815preview/get_machine_extension.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetMachineExtensionResult',
'AwaitableGetMachineExtensionResult',
'get_machine_extension',
]
@pulumi.output_type
class GetMachineExtensionResult:
"""
Describes a Machine Extension.
"""
def __init__(__self__, auto_upgrade_minor_version=None, force_update_tag=None, id=None, instance_view=None, location=None, name=None, protected_settings=None, provisioning_state=None, publisher=None, settings=None, tags=None, type=None, type_handler_version=None):
if auto_upgrade_minor_version and not isinstance(auto_upgrade_minor_version, bool):
raise TypeError("Expected argument 'auto_upgrade_minor_version' to be a bool")
pulumi.set(__self__, "auto_upgrade_minor_version", auto_upgrade_minor_version)
if force_update_tag and not isinstance(force_update_tag, str):
raise TypeError("Expected argument 'force_update_tag' to be a str")
pulumi.set(__self__, "force_update_tag", force_update_tag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if instance_view and not isinstance(instance_view, dict):
raise TypeError("Expected argument 'instance_view' to be a dict")
pulumi.set(__self__, "instance_view", instance_view)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if protected_settings and not isinstance(protected_settings, dict):
raise TypeError("Expected argument 'protected_settings' to be a dict")
pulumi.set(__self__, "protected_settings", protected_settings)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if publisher and not isinstance(publisher, str):
raise TypeError("Expected argument 'publisher' to be a str")
pulumi.set(__self__, "publisher", publisher)
if settings and not isinstance(settings, dict):
raise TypeError("Expected argument 'settings' to be a dict")
pulumi.set(__self__, "settings", settings)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if type_handler_version and not isinstance(type_handler_version, str):
raise TypeError("Expected argument 'type_handler_version' to be a str")
pulumi.set(__self__, "type_handler_version", type_handler_version)
@property
@pulumi.getter(name="autoUpgradeMinorVersion")
def auto_upgrade_minor_version(self) -> Optional[bool]:
"""
Indicates whether the extension should use a newer minor version if one is available at deployment time. Once deployed, however, the extension will not upgrade minor versions unless redeployed, even with this property set to true.
"""
return pulumi.get(self, "auto_upgrade_minor_version")
@property
@pulumi.getter(name="forceUpdateTag")
def force_update_tag(self) -> Optional[str]:
"""
How the extension handler should be forced to update even if the extension configuration has not changed.
"""
return pulumi.get(self, "force_update_tag")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="instanceView")
def instance_view(self) -> Optional['outputs.MachineExtensionPropertiesResponseInstanceView']:
"""
The machine extension instance view.
"""
return pulumi.get(self, "instance_view")
@property
@pulumi.getter
def location(self) -> str:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="protectedSettings")
def protected_settings(self) -> Optional[Any]:
"""
The extension can contain either protectedSettings or protectedSettingsFromKeyVault or no protected settings at all.
"""
return pulumi.get(self, "protected_settings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state, which only appears in the response.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def publisher(self) -> Optional[str]:
"""
The name of the extension handler publisher.
"""
return pulumi.get(self, "publisher")
@property
@pulumi.getter
def settings(self) -> Optional[Any]:
"""
Json formatted public settings for the extension.
"""
return pulumi.get(self, "settings")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="typeHandlerVersion")
def type_handler_version(self) -> Optional[str]:
"""
Specifies the version of the script handler.
"""
return pulumi.get(self, "type_handler_version")
class AwaitableGetMachineExtensionResult(GetMachineExtensionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetMachineExtensionResult(
auto_upgrade_minor_version=self.auto_upgrade_minor_version,
force_update_tag=self.force_update_tag,
id=self.id,
instance_view=self.instance_view,
location=self.location,
name=self.name,
protected_settings=self.protected_settings,
provisioning_state=self.provisioning_state,
publisher=self.publisher,
settings=self.settings,
tags=self.tags,
type=self.type,
type_handler_version=self.type_handler_version)
def get_machine_extension(extension_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetMachineExtensionResult:
"""
Describes a Machine Extension.
:param str extension_name: The name of the machine extension.
:param str name: The name of the machine containing the extension.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['extensionName'] = extension_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:hybridcompute/v20200815preview:getMachineExtension', __args__, opts=opts, typ=GetMachineExtensionResult).value
return AwaitableGetMachineExtensionResult(
auto_upgrade_minor_version=__ret__.auto_upgrade_minor_version,
force_update_tag=__ret__.force_update_tag,
id=__ret__.id,
instance_view=__ret__.instance_view,
location=__ret__.location,
name=__ret__.name,
protected_settings=__ret__.protected_settings,
provisioning_state=__ret__.provisioning_state,
publisher=__ret__.publisher,
settings=__ret__.settings,
tags=__ret__.tags,
type=__ret__.type,
type_handler_version=__ret__.type_handler_version)
| 40.09292
| 268
| 0.670456
|
643c2168fd6ad8701d8daa06bd874ea68bc9e819
| 944
|
py
|
Python
|
logRegProfiler/predict.py
|
tv-vicomtech/deviceCharacterisationMethods
|
1351da43b2fd469ff9c910ef2dcda9c2bd7f559a
|
[
"Apache-2.0"
] | null | null | null |
logRegProfiler/predict.py
|
tv-vicomtech/deviceCharacterisationMethods
|
1351da43b2fd469ff9c910ef2dcda9c2bd7f559a
|
[
"Apache-2.0"
] | null | null | null |
logRegProfiler/predict.py
|
tv-vicomtech/deviceCharacterisationMethods
|
1351da43b2fd469ff9c910ef2dcda9c2bd7f559a
|
[
"Apache-2.0"
] | null | null | null |
from sklearn.externals import joblib
import csv
cls = joblib.load('model.h5')
fo = open("userAgents5.csv", "r")
lines=fo.readlines()
vec = joblib.load('vec_count.joblib')
with open('results_logRegfinal.csv', mode='w') as results_file:
results_writer = csv.writer(results_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
correct = 0
cnt = 0
for x in range(1,len(lines)):
cnt += 1
if x <= 1000:
real_val = 'Mobile Phone'
elif x <= 2000:
real_val = 'Tablet'
elif x <= 3000:
real_val = 'Desktop'
else:
real_val = 'TV Device'
userAgent = lines[x]
features = vec.transform(
[userAgent.lower()]
)
features_userAgent = features.toarray()
pred = cls.predict(features_userAgent)[0]
results_writer.writerow([x, pred])
if real_val == pred:
correct += 1
#print(cls.predict(features_userAgent))
print('Model Logistic Regression - Accuracy on test data = {0:.4f}'.format(correct / cnt))
| 26.222222
| 99
| 0.680085
|
7f4a05ed45d16d76dc0e85bc54bc8197fb1c6e5d
| 6,267
|
py
|
Python
|
rant/build.py
|
lrvick/rant
|
1b2990448269f27d330b8df8d860429cf92b2529
|
[
"Apache-2.0"
] | 5
|
2016-07-09T13:18:36.000Z
|
2020-10-29T21:46:32.000Z
|
rant/build.py
|
lrvick/rant
|
1b2990448269f27d330b8df8d860429cf92b2529
|
[
"Apache-2.0"
] | 6
|
2016-07-11T23:50:10.000Z
|
2016-10-20T01:46:24.000Z
|
rant/build.py
|
lrvick/rant
|
1b2990448269f27d330b8df8d860429cf92b2529
|
[
"Apache-2.0"
] | 2
|
2016-07-11T17:05:09.000Z
|
2016-10-17T19:14:10.000Z
|
from io import open
import os
import re
import yaml
import time
import logging
from datetime import datetime
from fnmatch import fnmatch
from jinja2 import Environment, FileSystemLoader
from rant.parse import Parser
from distutils.dir_util import copy_tree
class Builder(object):
"""Generate web-ready static files from templates/data/config"""
def __init__(self, source_dir='.', dest_dir='./deploy'):
self._source_dir = source_dir
self._dest_dir = dest_dir
with open('%s/config.yml' % source_dir, 'r') as fh:
self.config = yaml.load(fh)
fh.close()
self._page_files = self._find_source_files('page')
self._post_files = self._find_source_files('post')
self._per_page = self.config['paginate']
self._navigation = self._get_navigation()
self._env = Environment(
loader=FileSystemLoader('%s/layouts/' % self._source_dir)
)
def _find_source_files(self, layout):
source_files = []
file_names = os.listdir('%s/%ss' % (self._source_dir, layout))
for file_name in file_names:
if fnmatch(file_name, '*.md'):
full_filename = '%s/%ss/%s' % (self._source_dir,
layout,
file_name)
source_files.append(full_filename)
return source_files
def _get_navigation(self):
navigation = ['blog']
for filepath in self._page_files:
filename = os.path.split(filepath)[1]
nav_item = filename.split('.')[0].replace('_', ' ').lower()
navigation.append(nav_item)
return navigation
def _render_html(self, context):
template = self._env.get_template('%s.html' % context['layout'])
current_page = context['permalink']
if context['layout'] == 'post':
current_page = 'blog'
context['config'] = self.config
context['navigation'] = self._navigation
context['current_page'] = current_page
return template.render(context)
def _write_file(self, content, permalink, filename='index.html'):
save_folder = '%s/%s' % (self._dest_dir, permalink)
if not os.path.isdir(save_folder):
os.makedirs(save_folder)
filepath = "%s/%s" % (save_folder, filename)
with open(filepath, 'w', 1) as save_fh:
save_fh.write(content)
save_fh.close()
logging.info("-> '%s'" % filepath)
def _gen_contexts(self, filenames):
contexts = []
for filename in filenames:
context = Parser(filename).parse()
if context is None:
break
context['rendered_html'] = self._render_html(context)
contexts.append(context)
contexts.sort(key=lambda c: c['date'])
return contexts
def _write_contexts(self, contexts):
for context in contexts:
self._write_file(context['rendered_html'], context['permalink'])
def _render_blog_index_page(self, page_posts, page_num):
post_count = len(self._post_files)
total_index_pages = int(round(post_count / self._per_page, 0))
index_template = self._env.get_template('blog_index.html')
rendered_page = index_template.render(
config=self.config,
page_posts=page_posts,
total_pages=total_index_pages,
page_num=page_num,
navigation=self._navigation,
current_page='blog',
)
return rendered_page
def _write_blog_index_page(self, page_posts, page_num):
rendered_page = self._render_blog_index_page(page_posts, page_num)
if page_num == 1:
self._write_file(rendered_page, '')
self._write_file(rendered_page, 'blog')
self._write_file(
rendered_page,
'blog/pages/%s' % page_num
)
def _write_blog_index(self, posts):
index_posts = []
processed = 0
page_num = 1
for post in posts:
index_posts.append(post)
processed += 1
if len(index_posts) == self._per_page or processed == len(posts):
self._write_blog_index_page(index_posts, page_num)
page_num += 1
index_posts = []
def _write_feed(self, schema, posts):
template = self._env.get_template('%s.xml' % schema)
rendered_feed = template.render(
config=self.config,
posts=posts,
current_date=datetime.fromtimestamp(time.time()),
)
self._write_file(rendered_feed, 'blog', '%s.xml' % schema)
def _write_sitemap(self, posts, pages):
template = self._env.get_template('sitemap.xml')
rendered_feed = template.render(
config=self.config,
posts=posts,
pages=pages,
current_date=datetime.fromtimestamp(time.time()),
)
self._write_file(rendered_feed, '', 'sitemap.xml')
def _copy_static(self):
copy_tree("%s/static" % self._source_dir, self._dest_dir)
def build(self):
start_time = time.time()
logging.info("\nGenerating Pages...")
logging.info(("="*50))
page_contexts = self._gen_contexts(self._page_files)
self._write_contexts(page_contexts)
logging.info("\nGenerating Posts...")
logging.info(("="*50))
post_contexts = self._gen_contexts(self._post_files)
self._write_contexts(post_contexts)
logging.info("\nGenerating Blog Index...")
logging.info(("="*50))
self._write_blog_index(post_contexts)
logging.info("\nGenerating Feeds...")
logging.info(("="*50))
self._write_feed('atom', post_contexts)
self._write_feed('rss', post_contexts)
logging.info("\nGenerating Sitemap...")
logging.info(("="*50))
self._write_sitemap(post_contexts, page_contexts)
logging.info("\nCopying Static Files...")
logging.info(("="*50))
total_time = round(time.time() - start_time, 2)
logging.info("\nGeneration Completed in %s seconds" % total_time)
self._copy_static()
| 35.40678
| 77
| 0.602042
|
e2e8bbd0ff6bdd1b14089292bae61896e3c2fef4
| 470
|
py
|
Python
|
pasta/app/models.py
|
andreassjoberg/pasta
|
2214c6c70b97cf5b758bd263eb3b808850fb69f1
|
[
"MIT"
] | 1
|
2019-02-06T07:48:07.000Z
|
2019-02-06T07:48:07.000Z
|
pasta/app/models.py
|
andreassjoberg/pasta
|
2214c6c70b97cf5b758bd263eb3b808850fb69f1
|
[
"MIT"
] | null | null | null |
pasta/app/models.py
|
andreassjoberg/pasta
|
2214c6c70b97cf5b758bd263eb3b808850fb69f1
|
[
"MIT"
] | null | null | null |
"""
Definition of models.
"""
import datetime
from django.db import models
class ProgramStat(models.Model):
program_name = models.CharField(max_length=64)
program_started_datetime = models.DateTimeField()
program_ended_datetime = models.DateTimeField()
customer_name = models.CharField(max_length=64)
def __str__(self):
return '%s %s (%s)' % (self.customer_name, self.program_name, self.program_started_datetime.strftime("%Y-%m-%d %H:%M"))
| 31.333333
| 127
| 0.729787
|
75f0465833d55efe92fcd107a423c7d3111d15d7
| 926
|
py
|
Python
|
weaved.py
|
hrishikesh195/noburglar
|
af78d826c16aac0c0199aa74d39ee8b2bfd24049
|
[
"BSD-2-Clause"
] | null | null | null |
weaved.py
|
hrishikesh195/noburglar
|
af78d826c16aac0c0199aa74d39ee8b2bfd24049
|
[
"BSD-2-Clause"
] | null | null | null |
weaved.py
|
hrishikesh195/noburglar
|
af78d826c16aac0c0199aa74d39ee8b2bfd24049
|
[
"BSD-2-Clause"
] | null | null | null |
"""Wrapper for operations to the Weaved Smart Plug"""
import os
import logging
# Until the IR blaster can be used through a REST API, we just use SSH for everything;
# This can then be changed to do REST calls
#
# Assumes key based authentication is already set up
class Plug(object):
def __init__(self, ip = '192.168.1.201', user='root'):
self.ip = ip
self.user = user
def power_on(self):
return self.run_ssh("weavediot_relay_on.sh")
def power_off(self):
return self.run_ssh("weavediot_relay_off.sh")
def send_ir_code(self, code):
return self.run_ssh("'stty -F /dev/ttyS0 115200; ir tx " + code + "'")
def run_ssh(self, cmd):
ret = os.system('ssh ' + self.user + '@' + self.ip + ' ' + cmd)
logging.debug('Running command - ' + `cmd`)
if ret != 0:
logging.error('Command ' + `cmd` + ' failed: ' + `ret`)
return ret
| 29.870968
| 86
| 0.614471
|
1eda8af5ba775e59f1ad69934de18bfd3aa67053
| 1,703
|
py
|
Python
|
opt/modems.py
|
SEL-Columbia/easygsm
|
f0185dee7c55aa0330754002202a586756fb4ecf
|
[
"Apache-2.0"
] | 1
|
2020-08-13T14:44:41.000Z
|
2020-08-13T14:44:41.000Z
|
opt/modems.py
|
SEL-Columbia/easygsm
|
f0185dee7c55aa0330754002202a586756fb4ecf
|
[
"Apache-2.0"
] | null | null | null |
opt/modems.py
|
SEL-Columbia/easygsm
|
f0185dee7c55aa0330754002202a586756fb4ecf
|
[
"Apache-2.0"
] | null | null | null |
# opt/modems.py
import sys, os.path
sys.path.append(os.path.abspath('.')) # really, nothing better?
from serialmodem.lib.protocol import SerialModemProtocol
class Telit(SerialModemProtocol):
init_sequence = [
"ATE0", # command echo disabled
"AT",
"AT+CMEE=2", # verbose debug
"AT#BND=3", # North America
"AT#AUTOBND=2", # quad-band
"AT+CNMI=2,1,0,0,0" # setup sms indicator
]
def __init__(self, mode=1, gsc=None, ssc=None):
SerialModemProtocol.__init__(self,
self.init_sequence,
mode,
got_sms_callback=gsc,
sent_sms_callback=ssc,
process_unread_messages=False,
delete_existing_messages=True,
stagger_sends=False,
send_interval=(0,0)
)
class Multitech(SerialModemProtocol):
init_sequence = [
"ATE0", # command echo disabled
"AT",
"AT+CNMI=2,1,0,0,0" # setup message indicator
]
def __init__(self, mode=1, gsc=None, ssc=None):
SerialModemProtocol.__init__(self,
self.init_sequence,
mode=1,
got_sms_callback=gsc,
sent_sms_callback=ssc,
stagger_sends=False,
send_interval=(4,10)
)
| 34.755102
| 67
| 0.439812
|
b621d1e6f9838cc1916573c8c0ee8645e223ef90
| 663
|
py
|
Python
|
ex42.py
|
SuMonHlaing/python-exercises
|
367315f76917a053e8854be14559061616b26d73
|
[
"MIT"
] | null | null | null |
ex42.py
|
SuMonHlaing/python-exercises
|
367315f76917a053e8854be14559061616b26d73
|
[
"MIT"
] | null | null | null |
ex42.py
|
SuMonHlaing/python-exercises
|
367315f76917a053e8854be14559061616b26d73
|
[
"MIT"
] | null | null | null |
class Animal(object):
pass
class Dog(Animal):
def __init__(self,name):
self.name = name
class Cat(Animal):
def __init__(self,name):
self.name = name
class Person(object):
def __init__(self,name):
self.name = name
self.pet = None
class Employee(Person):
def __init__(self,name,salary):
super(Employee,self).__init__(name)
self.salary = salary
class Fish(object):
pass
class Salmon(Fish):
pass
class Halibut(Fish):
pass
rover = Dog("Rover")
satan = Cat("Satan")
mary = Person("Mary")
mary.pet = satan
frank = Employee("Frank",120000)
frank.pet = rover
flipper = Fish()
crouse = Salmon()
harry = Halibut()
| 10.359375
| 39
| 0.665158
|
e5db3aac8a04235b88d29fcb61375b3b7345ccd0
| 117,432
|
py
|
Python
|
gui/qt/main_window.py
|
RdeWilde/ion-electrum-new
|
7d83d0a2316080ece628448dce879cdbe64a5c6c
|
[
"MIT"
] | 1
|
2018-02-19T01:23:59.000Z
|
2018-02-19T01:23:59.000Z
|
gui/qt/main_window.py
|
rdewilde-ion/ion-electrum
|
7d83d0a2316080ece628448dce879cdbe64a5c6c
|
[
"MIT"
] | null | null | null |
gui/qt/main_window.py
|
rdewilde-ion/ion-electrum
|
7d83d0a2316080ece628448dce879cdbe64a5c6c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys, time, threading
import os, json, traceback
import shutil
import socket
import weakref
import webbrowser
import csv
from decimal import Decimal
import base64
from functools import partial
import PyQt4
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
import icons_rc
from electrum import keystore
from electrum.bitcoin import COIN, is_valid, TYPE_ADDRESS
from electrum.plugins import run_hook
from electrum.i18n import _
from electrum.util import (block_explorer, block_explorer_info, format_time,
block_explorer_URL, format_satoshis, PrintError,
format_satoshis_plain, NotEnoughFunds,
UserCancelled)
from electrum import Transaction, mnemonic
from electrum import util, bitcoin, commands, coinchooser
from electrum import SimpleConfig, paymentrequest
from electrum.wallet import Wallet, Multisig_Wallet
from amountedit import AmountEdit, BTCAmountEdit, MyLineEdit, BTCkBEdit
from network_dialog import NetworkDialog
from qrcodewidget import QRCodeWidget, QRDialog
from qrtextedit import ShowQRTextEdit
from transaction_dialog import show_transaction
from fee_slider import FeeSlider
from electrum import ELECTRUM_VERSION
import re
from util import *
class StatusBarButton(QPushButton):
def __init__(self, icon, tooltip, func):
QPushButton.__init__(self, icon, '')
self.setToolTip(tooltip)
# self.setFlat(True)
self.setMaximumWidth(25)
self.clicked.connect(self.onPress)
self.func = func
self.setIconSize(QSize(25,25))
def onPress(self, checked=False):
'''Drops the unwanted PyQt4 "checked" argument'''
self.func()
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Return:
self.func()
from electrum.paymentrequest import PR_UNPAID, PR_PAID, PR_UNKNOWN, PR_EXPIRED
class ElectrumWindow(QMainWindow, MessageBoxMixin, PrintError):
def __init__(self, gui_object, wallet):
QMainWindow.__init__(self)
self.setObjectName("main_window_container")
self.gui_object = gui_object
self.config = config = gui_object.config
self.network = gui_object.daemon.network
self.fx = gui_object.daemon.fx
self.invoices = wallet.invoices
self.contacts = wallet.contacts
self.tray = gui_object.tray
self.app = gui_object.app
self.cleaned_up = False
self.is_max = False
self.payment_request = None
self.checking_accounts = False
self.qr_window = None
self.not_enough_funds = False
self.pluginsdialog = None
self.require_fee_update = False
self.tx_notifications = []
self.tl_windows = []
self.create_status_bar()
self.need_update = threading.Event()
self.decimal_point = config.get('decimal_point', 8)
self.num_zeros = int(config.get('num_zeros',0))
self.completions = QStringListModel()
self.tabs = tabs = QTabWidget(self)
self.send_tab = self.create_send_tab()
self.receive_tab = self.create_receive_tab()
self.addresses_tab = self.create_addresses_tab()
self.utxo_tab = self.create_utxo_tab()
tabs.addTab(self.create_history_tab(), _('History') )
tabs.addTab(self.send_tab, _('Send') )
tabs.addTab(self.receive_tab, _('Receive') )
if self.config.get('show_addresses_tab', False):
tabs.addTab(self.addresses_tab, _('Addresses'))
if self.config.get('show_utxo_tab', False):
tabs.addTab(self.utxo_tab, _('Coins'))
tabs.addTab(self.create_contacts_tab(), _('Contacts') )
tabs.addTab(self.create_console_tab(), _('Console') )
tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
tabs.setObjectName("main_window_nav_bar")
self.setCentralWidget(tabs)
if self.config.get("is_maximized"):
self.showMaximized()
self.setWindowIcon(QIcon(":icons/electrum.png"))
self.init_menubar()
wrtabs = weakref.proxy(tabs)
QShortcut(QKeySequence("Ctrl+W"), self, self.close)
QShortcut(QKeySequence("Ctrl+Q"), self, self.close)
QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)
QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() - 1)%wrtabs.count()))
QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() + 1)%wrtabs.count()))
for i in range(wrtabs.count()):
QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: wrtabs.setCurrentIndex(i))
self.connect(self, QtCore.SIGNAL('payment_request_ok'), self.payment_request_ok)
self.connect(self, QtCore.SIGNAL('payment_request_error'), self.payment_request_error)
self.history_list.setFocus(True)
# network callbacks
if self.network:
self.connect(self, QtCore.SIGNAL('network'), self.on_network_qt)
interests = ['updated', 'new_transaction', 'status',
'banner', 'verified', 'fee']
# To avoid leaking references to "self" that prevent the
# window from being GC-ed when closed, callbacks should be
# methods of this class only, and specifically not be
# partials, lambdas or methods of subobjects. Hence...
self.network.register_callback(self.on_network, interests)
# set initial message
self.console.showMessage(self.network.banner)
self.network.register_callback(self.on_quotes, ['on_quotes'])
self.network.register_callback(self.on_history, ['on_history'])
self.connect(self, SIGNAL('new_fx_quotes'), self.on_fx_quotes)
self.connect(self, SIGNAL('new_fx_history'), self.on_fx_history)
# update fee slider in case we missed the callback
self.fee_slider.update()
self.load_wallet(wallet)
self.connect_slots(gui_object.timer)
self.fetch_alias()
def on_history(self, b):
self.emit(SIGNAL('new_fx_history'))
def on_fx_history(self):
self.history_list.refresh_headers()
self.history_list.update()
def on_quotes(self, b):
self.emit(SIGNAL('new_fx_quotes'))
def on_fx_quotes(self):
self.update_status()
# Refresh edits with the new rate
edit = self.fiat_send_e if self.fiat_send_e.is_last_edited else self.amount_e
edit.textEdited.emit(edit.text())
edit = self.fiat_receive_e if self.fiat_receive_e.is_last_edited else self.receive_amount_e
edit.textEdited.emit(edit.text())
# History tab needs updating if it used spot
if self.fx.history_used_spot:
self.history_list.update()
def toggle_addresses_tab(self):
show = not self.config.get('show_addresses_tab', False)
self.config.set_key('show_addresses_tab', show)
if show:
self.tabs.insertTab(3, self.addresses_tab, _('Addresses'))
else:
i = self.tabs.indexOf(self.addresses_tab)
self.tabs.removeTab(i)
def toggle_utxo_tab(self):
show = not self.config.get('show_utxo_tab', False)
self.config.set_key('show_utxo_tab', show)
if show:
self.tabs.insertTab(3, self.utxo_tab, _('Coins'))
else:
i = self.tabs.indexOf(self.utxo_tab)
self.tabs.removeTab(i)
def push_top_level_window(self, window):
'''Used for e.g. tx dialog box to ensure new dialogs are appropriately
parented. This used to be done by explicitly providing the parent
window, but that isn't something hardware wallet prompts know.'''
self.tl_windows.append(window)
def pop_top_level_window(self, window):
self.tl_windows.remove(window)
def top_level_window(self):
'''Do the right thing in the presence of tx dialog windows'''
override = self.tl_windows[-1] if self.tl_windows else None
return self.top_level_window_recurse(override)
def diagnostic_name(self):
return "%s/%s" % (PrintError.diagnostic_name(self),
self.wallet.basename() if self.wallet else "None")
def is_hidden(self):
return self.isMinimized() or self.isHidden()
def show_or_hide(self):
if self.is_hidden():
self.bring_to_top()
else:
self.hide()
def bring_to_top(self):
self.show()
self.raise_()
def on_error(self, exc_info):
if not isinstance(exc_info[1], UserCancelled):
traceback.print_exception(*exc_info)
self.show_error(str(exc_info[1]))
def on_network(self, event, *args):
if event == 'updated':
self.need_update.set()
elif event == 'new_transaction':
self.tx_notifications.append(args[0])
elif event in ['status', 'banner', 'verified', 'fee']:
# Handle in GUI thread
self.emit(QtCore.SIGNAL('network'), event, *args)
else:
self.print_error("unexpected network message:", event, args)
def on_network_qt(self, event, *args):
# Handle a network message in the GUI thread
if event == 'status':
self.update_status()
elif event == 'banner':
self.console.showMessage(args[0])
elif event == 'verified':
self.history_list.update_item(*args)
elif event == 'fee':
if self.config.is_dynfee():
self.fee_slider.update()
self.do_update_fee()
else:
self.print_error("unexpected network_qt signal:", event, args)
def fetch_alias(self):
self.alias_info = None
alias = self.config.get('alias')
if alias:
alias = str(alias)
def f():
self.alias_info = self.contacts.resolve_openalias(alias)
self.emit(SIGNAL('alias_received'))
t = threading.Thread(target=f)
t.setDaemon(True)
t.start()
def close_wallet(self):
if self.wallet:
self.print_error('close_wallet', self.wallet.storage.path)
run_hook('close_wallet', self.wallet)
def load_wallet(self, wallet):
wallet.thread = TaskThread(self, self.on_error)
self.wallet = wallet
self.update_recently_visited(wallet.storage.path)
# address used to create a dummy transaction and estimate transaction fee
self.history_list.update()
self.address_list.update()
self.utxo_list.update()
self.need_update.set()
# Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized
self.notify_transactions()
# update menus
self.seed_menu.setEnabled(self.wallet.has_seed())
self.mpk_menu.setEnabled(self.wallet.is_deterministic())
self.update_lock_icon()
self.update_buttons_on_seed()
self.update_console()
self.clear_receive_tab()
self.request_list.update()
self.tabs.show()
self.init_geometry()
if self.config.get('hide_gui') and self.gui_object.tray.isVisible():
self.hide()
else:
self.show()
self.watching_only_changed()
run_hook('load_wallet', wallet, self)
def init_geometry(self):
winpos = self.wallet.storage.get("winpos-qt")
try:
screen = self.app.desktop().screenGeometry()
assert screen.contains(QRect(*winpos))
self.setGeometry(*winpos)
except:
self.print_error("using default geometry")
self.setGeometry(100, 100, 840, 400)
def watching_only_changed(self):
title = 'Electrum-ion %s - %s' % (self.wallet.electrum_version,
self.wallet.basename())
extra = [self.wallet.storage.get('wallet_type', '?')]
if self.wallet.is_watching_only():
self.warn_if_watching_only()
extra.append(_('watching only'))
title += ' [%s]'% ', '.join(extra)
self.setWindowTitle(title)
self.password_menu.setEnabled(self.wallet.can_change_password())
self.import_privkey_menu.setVisible(self.wallet.can_import_privkey())
self.import_address_menu.setVisible(self.wallet.can_import_address())
self.export_menu.setEnabled(self.wallet.can_export())
def warn_if_watching_only(self):
if self.wallet.is_watching_only():
msg = ' '.join([
_("This wallet is watching-only."),
_("This means you will not be able to spend ION with it."),
_("Make sure you own the seed phrase or the private keys, before you request ION to be sent to this wallet.")
])
self.show_warning(msg, title=_('Information'))
def open_wallet(self):
wallet_folder = self.get_wallet_folder()
filename = unicode(QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder))
if not filename:
return
self.gui_object.new_window(filename)
def backup_wallet(self):
path = self.wallet.storage.path
wallet_folder = os.path.dirname(path)
filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder) )
if not filename:
return
new_path = os.path.join(wallet_folder, filename)
if new_path != path:
try:
shutil.copy2(path, new_path)
self.show_message(_("A copy of your wallet file was created in")+" '%s'" % str(new_path), title=_("Wallet backup created"))
except (IOError, os.error), reason:
self.show_critical(_("electrum-ion was unable to copy your wallet file to the specified location.") + "\n" + str(reason), title=_("Unable to create backup"))
def update_recently_visited(self, filename):
recent = self.config.get('recently_open', [])
if filename in recent:
recent.remove(filename)
recent.insert(0, filename)
recent = recent[:5]
self.config.set_key('recently_open', recent)
self.recently_visited_menu.clear()
for i, k in enumerate(sorted(recent)):
b = os.path.basename(k)
def loader(k):
return lambda: self.gui_object.new_window(k)
self.recently_visited_menu.addAction(b, loader(k)).setShortcut(QKeySequence("Ctrl+%d"%(i+1)))
self.recently_visited_menu.setEnabled(len(recent))
def get_wallet_folder(self):
return os.path.dirname(os.path.abspath(self.config.get_wallet_path()))
def new_wallet(self):
wallet_folder = self.get_wallet_folder()
i = 1
while True:
filename = "wallet_%d" % i
if filename in os.listdir(wallet_folder):
i += 1
else:
break
filename = line_dialog(self, _('New Wallet'), _('Enter file name')
+ ':', _('OK'), filename)
if not filename:
return
full_path = os.path.join(wallet_folder, filename)
if os.path.exists(full_path):
self.show_critical(_("File exists"))
return
self.gui_object.start_new_window(full_path, None)
def init_menubar(self):
menubar = QMenuBar()
file_menu = menubar.addMenu(_("&File"))
self.recently_visited_menu = file_menu.addMenu(_("&Recently open"))
file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open)
file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New)
file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs)
file_menu.addSeparator()
file_menu.addAction(_("&Quit"), self.close)
wallet_menu = menubar.addMenu(_("&Wallet"))
self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog)
self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog)
self.mpk_menu = wallet_menu.addAction(_("&Master Public Keys"), self.show_master_public_keys)
self.private_keys_menu = wallet_menu.addMenu(_("&Private keys"))
self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog)
self.import_privkey_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey)
self.export_menu = self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog)
self.import_address_menu = wallet_menu.addAction(_("Import addresses"), self.import_addresses)
wallet_menu.addSeparator()
labels_menu = wallet_menu.addMenu(_("&Labels"))
labels_menu.addAction(_("&Import"), self.do_import_labels)
labels_menu.addAction(_("&Export"), self.do_export_labels)
contacts_menu = wallet_menu.addMenu(_("Contacts"))
contacts_menu.addAction(_("&New"), self.new_contact_dialog)
contacts_menu.addAction(_("Import"), lambda: self.contact_list.import_contacts())
invoices_menu = wallet_menu.addMenu(_("Invoices"))
invoices_menu.addAction(_("Import"), lambda: self.invoice_list.import_invoices())
hist_menu = wallet_menu.addMenu(_("&History"))
hist_menu.addAction("Plot", self.plot_history_dialog)
hist_menu.addAction("Export", self.export_history_dialog)
wallet_menu.addSeparator()
wallet_menu.addAction(_("Find"), self.toggle_search).setShortcut(QKeySequence("Ctrl+F"))
wallet_menu.addAction(_("Addresses"), self.toggle_addresses_tab).setShortcut(QKeySequence("Ctrl+A"))
wallet_menu.addAction(_("Coins"), self.toggle_utxo_tab)
tools_menu = menubar.addMenu(_("&Tools"))
# Settings / Preferences are all reserved keywords in OSX using this as work around
tools_menu.addAction(_("Electrum preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog)
tools_menu.addAction(_("&Network"), self.run_network_dialog)
tools_menu.addAction(_("&Plugins"), self.plugins_dialog)
tools_menu.addSeparator()
tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message)
tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message)
tools_menu.addSeparator()
paytomany_menu = tools_menu.addAction(_("&Pay to many"), self.paytomany)
raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))
raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file)
raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text)
raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid)
raw_transaction_menu.addAction(_("&From QR code"), self.read_tx_from_qrcode)
self.raw_transaction_menu = raw_transaction_menu
help_menu = menubar.addMenu(_("&Help"))
help_menu.addAction(_("&About"), self.show_about)
help_menu.addAction(_("&Official website"), lambda: webbrowser.open("http://electrum.org"))
help_menu.addSeparator()
help_menu.addAction(_("&Documentation"), lambda: webbrowser.open("http://docs.electrum.org/")).setShortcut(QKeySequence.HelpContents)
help_menu.addAction(_("&Report Bug"), self.show_report_bug)
help_menu.addSeparator()
help_menu.addAction(_("&Donate to server"), self.donate_to_server)
self.setMenuBar(menubar)
def donate_to_server(self):
d = self.network.get_donation_address()
if d:
host = self.network.get_parameters()[0]
self.pay_to_URI('ion:%s?message=donation for %s'%(d, host))
else:
self.show_error(_('No donation address for this server'))
def show_about(self):
QMessageBox.about(self, "electrum-ion",
_("Version")+" %s" % (self.wallet.electrum_version) + "\n\n" + _("electrum-ion's focus is speed, with low resource usage and simplifying ION. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the ION system."))
def show_report_bug(self):
msg = ' '.join([
_("Please report any bugs as issues on github:<br/>"),
"<a href=\"https://github.com/ionomy/electrum-ion/issues\">https://github.com/ionomy/electrum-ion/issues</a><br/><br/>",
_("Before reporting a bug, upgrade to the most recent version of electrum-ion (latest release or git HEAD), and include the version number in your report."),
_("Try to explain not only what the bug is, but how it occurs.")
])
self.show_message(msg, title="electrum-ion - " + _("Reporting Bugs"))
def notify_transactions(self):
if not self.network or not self.network.is_connected():
return
self.print_error("Notifying GUI")
if len(self.tx_notifications) > 0:
# Combine the transactions if there are more then three
tx_amount = len(self.tx_notifications)
if(tx_amount >= 3):
total_amount = 0
for tx in self.tx_notifications:
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if(v > 0):
total_amount += v
self.notify(_("%(txs)s new transactions received. Total amount received in the new transactions %(amount)s") \
% { 'txs' : tx_amount, 'amount' : self.format_amount_and_units(total_amount)})
self.tx_notifications = []
else:
for tx in self.tx_notifications:
if tx:
self.tx_notifications.remove(tx)
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if(v > 0):
self.notify(_("New transaction received. %(amount)s") % { 'amount' : self.format_amount_and_units(v)})
def notify(self, message):
if self.tray:
self.tray.showMessage("Electrum-ion", message, QSystemTrayIcon.Information, 20000)
# custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user
def getOpenFileName(self, title, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
fileName = unicode( QFileDialog.getOpenFileName(self, title, directory, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def getSaveFileName(self, title, filename, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
path = os.path.join( directory, filename )
fileName = unicode( QFileDialog.getSaveFileName(self, title, path, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def connect_slots(self, sender):
self.connect(sender, QtCore.SIGNAL('timersignal'), self.timer_actions)
def timer_actions(self):
# Note this runs in the GUI thread
if self.need_update.is_set():
self.need_update.clear()
self.update_wallet()
# resolve aliases
self.payto_e.resolve()
# update fee
if self.require_fee_update:
self.do_update_fee()
self.require_fee_update = False
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, is_diff, self.num_zeros, self.decimal_point, whitespaces)
def format_amount_and_units(self, amount):
text = self.format_amount(amount) + ' '+ self.base_unit()
x = self.fx.format_amount_and_units(amount)
if text and x:
text += ' (%s)'%x
return text
def get_decimal_point(self):
return self.decimal_point
def base_unit(self):
assert self.decimal_point in [2, 5, 8]
if self.decimal_point == 2:
return 'uION'
if self.decimal_point == 5:
return 'mION'
if self.decimal_point == 8:
return 'ION'
raise Exception('Unknown base unit')
def connect_fields(self, window, btc_e, fiat_e, fee_e):
def edit_changed(edit):
if edit.follows:
return
edit.setStyleSheet(BLACK_FG)
fiat_e.is_last_edited = (edit == fiat_e)
amount = edit.get_amount()
rate = self.fx.exchange_rate() if self.fx else None
if rate is None or amount is None:
if edit is fiat_e:
btc_e.setText("")
if fee_e:
fee_e.setText("")
else:
fiat_e.setText("")
else:
if edit is fiat_e:
btc_e.follows = True
btc_e.setAmount(int(amount / Decimal(rate) * COIN))
btc_e.setStyleSheet(BLUE_FG)
btc_e.follows = False
if fee_e:
window.update_fee()
else:
fiat_e.follows = True
fiat_e.setText(self.fx.ccy_amount_str(
amount * Decimal(rate) / COIN, False))
fiat_e.setStyleSheet(BLUE_FG)
fiat_e.follows = False
btc_e.follows = False
fiat_e.follows = False
fiat_e.textChanged.connect(partial(edit_changed, fiat_e))
btc_e.textChanged.connect(partial(edit_changed, btc_e))
fiat_e.is_last_edited = False
def update_status(self):
if not self.wallet:
return
if self.network is None or not self.network.is_running():
text = _("Offline")
icon = QIcon(":icons/status_disconnected.png")
elif self.network.is_connected():
server_height = self.network.get_server_height()
server_lag = self.network.get_local_height() - server_height
# Server height can be 0 after switching to a new server
# until we get a headers subscription request response.
# Display the synchronizing message in that case.
if not self.wallet.up_to_date or server_height == 0:
text = _("Synchronizing...")
icon = QIcon(":icons/status_waiting.png")
elif server_lag > 1:
text = _("Server is lagging (%d blocks)"%server_lag)
icon = QIcon(":icons/status_lagging.png")
else:
c, u, x = self.wallet.get_balance()
text = _("Balance" ) + ": %s "%(self.format_amount_and_units(c))
if u:
text += " [%s unconfirmed]"%(self.format_amount(u, True).strip())
if x:
text += " [%s unmatured]"%(self.format_amount(x, True).strip())
# append fiat balance and price
if self.fx.is_enabled():
text += self.fx.get_fiat_status_text(c + u + x) or ''
if not self.network.proxy:
icon = QIcon(":icons/status_connected.png")
else:
icon = QIcon(":icons/status_connected_proxy.png")
else:
text = _("Not connected")
icon = QIcon(":icons/status_disconnected.png")
self.tray.setToolTip("%s (%s)" % (text, self.wallet.basename()))
self.balance_label.setText(text)
self.status_button.setIcon( icon )
def update_wallet(self):
self.update_status()
if self.wallet.up_to_date or not self.network or not self.network.is_connected():
self.update_tabs()
def update_tabs(self):
self.history_list.update()
self.request_list.update()
self.address_list.update()
self.utxo_list.update()
self.contact_list.update()
self.invoice_list.update()
self.update_completions()
def create_history_tab(self):
from history_list import HistoryList
self.history_list = l = HistoryList(self)
l.setObjectName("history_container")
l.searchable_list = l
return l
def show_address(self, addr):
import address_dialog
d = address_dialog.AddressDialog(self, addr)
d.exec_()
def show_transaction(self, tx, tx_desc = None):
'''tx_desc is set only for txs created in the Send tab'''
show_transaction(tx, self, tx_desc)
def create_receive_tab(self):
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
self.receive_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
self.receive_address_e = ButtonsLineEdit()
self.receive_address_e.addCopyButton(self.app)
self.receive_address_e.setReadOnly(True)
msg = _('ION address where the payment should be received. Note that each payment request uses a different ION address.')
self.receive_address_label = HelpLabel(_('Receiving address'), msg)
self.receive_address_e.textChanged.connect(self.update_receive_qr)
self.receive_address_e.setFocusPolicy(Qt.NoFocus)
grid.addWidget(self.receive_address_label, 0, 0)
grid.addWidget(self.receive_address_e, 0, 1, 1, -1)
self.receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self.receive_message_e, 1, 1, 1, -1)
self.receive_message_e.textChanged.connect(self.update_receive_qr)
self.receive_amount_e = BTCAmountEdit(self.get_decimal_point)
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self.receive_amount_e, 2, 1)
self.receive_amount_e.textChanged.connect(self.update_receive_qr)
self.fiat_receive_e = AmountEdit(self.fx.get_currency if self.fx else '')
if not self.fx or not self.fx.is_enabled():
self.fiat_receive_e.setVisible(False)
grid.addWidget(self.fiat_receive_e, 2, 2, Qt.AlignLeft)
self.connect_fields(self, self.receive_amount_e, self.fiat_receive_e, None)
self.expires_combo = QComboBox()
self.expires_combo.addItems(map(lambda x:x[0], expiration_values))
self.expires_combo.setCurrentIndex(3)
self.expires_combo.setFixedWidth(self.receive_amount_e.width())
msg = ' '.join([
_('Expiration date of your request.'),
_('This information is seen by the recipient if you send them a signed payment request.'),
_('Expired requests have to be deleted manually from your list, in order to free the corresponding ION addresses.'),
_('The ION address never expires and will always be part of this electrum-ion wallet.'),
])
grid.addWidget(HelpLabel(_('Request expires'), msg), 3, 0)
grid.addWidget(self.expires_combo, 3, 1)
self.expires_label = QLineEdit('')
self.expires_label.setReadOnly(1)
self.expires_label.setFocusPolicy(Qt.NoFocus)
self.expires_label.hide()
grid.addWidget(self.expires_label, 3, 1)
self.save_request_button = QPushButton(_('Save'))
self.save_request_button.clicked.connect(self.save_payment_request)
self.new_request_button = QPushButton(_('New'))
self.new_request_button.clicked.connect(self.new_payment_request)
self.receive_qr = QRCodeWidget(fixedSize=200)
self.receive_qr.mouseReleaseEvent = lambda x: self.toggle_qr_window()
self.receive_qr.enterEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))
self.receive_qr.leaveEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))
self.receive_buttons = buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.save_request_button)
buttons.addWidget(self.new_request_button)
grid.addLayout(buttons, 4, 1, 1, 2)
self.receive_requests_label = QLabel(_('Requests'))
from request_list import RequestList
self.request_list = RequestList(self)
# layout
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addStretch()
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addWidget(self.receive_qr)
w = QWidget()
w.setObjectName("receive_container")
w.searchable_list = self.request_list
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.receive_requests_label)
vbox.addWidget(self.request_list)
vbox.setStretchFactor(self.request_list, 1000)
return w
def delete_payment_request(self, addr):
self.wallet.remove_payment_request(addr, self.config)
self.request_list.update()
self.clear_receive_tab()
def get_request_URI(self, addr):
req = self.wallet.receive_requests[addr]
message = self.wallet.labels.get(addr, '')
amount = req['amount']
URI = util.create_URI(addr, amount, message)
if req.get('time'):
URI += "&time=%d"%req.get('time')
if req.get('exp'):
URI += "&exp=%d"%req.get('exp')
if req.get('name') and req.get('sig'):
sig = req.get('sig').decode('hex')
sig = bitcoin.base_encode(sig, base=58)
URI += "&name=" + req['name'] + "&sig="+sig
return str(URI)
def sign_payment_request(self, addr):
alias = self.config.get('alias')
alias_privkey = None
if alias and self.alias_info:
alias_addr, alias_name, validated = self.alias_info
if alias_addr:
if self.wallet.is_mine(alias_addr):
msg = _('This payment request will be signed.') + '\n' + _('Please enter your password')
password = self.password_dialog(msg)
if password:
try:
self.wallet.sign_payment_request(addr, alias, alias_addr, password)
except Exception as e:
self.show_error(str(e))
return
else:
return
else:
return
def save_payment_request(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text())
if not message and not amount:
self.show_error(_('No message or amount'))
return False
i = self.expires_combo.currentIndex()
expiration = map(lambda x: x[1], expiration_values)[i]
req = self.wallet.make_payment_request(addr, amount, message, expiration)
self.wallet.add_payment_request(req, self.config)
self.sign_payment_request(addr)
self.request_list.update()
self.address_list.update()
self.save_request_button.setEnabled(False)
def view_and_paste(self, title, msg, data):
dialog = WindowModalDialog(self, title)
vbox = QVBoxLayout()
label = QLabel(msg)
label.setWordWrap(True)
vbox.addWidget(label)
pr_e = ShowQRTextEdit(text=data)
vbox.addWidget(pr_e)
vbox.addLayout(Buttons(CopyCloseButton(pr_e.text, self.app, dialog)))
dialog.setLayout(vbox)
dialog.exec_()
def export_payment_request(self, addr):
r = self.wallet.receive_requests.get(addr)
pr = paymentrequest.serialize_request(r).SerializeToString()
name = r['id'] + '.bip70'
fileName = self.getSaveFileName(_("Select where to save your payment request"), name, "*.bip70")
if fileName:
with open(fileName, "wb+") as f:
f.write(str(pr))
self.show_message(_("Request saved successfully"))
self.saved = True
def new_payment_request(self):
addr = self.wallet.get_unused_address()
if addr is None:
from electrum.wallet import Imported_Wallet
if not self.wallet.is_deterministic():
msg = [
_('No more addresses in your wallet.'),
_('You are using a non-deterministic wallet, which cannot create new addresses.'),
_('If you want to create new addresses, use a deterministic wallet instead.')
]
self.show_message(' '.join(msg))
return
if not self.question(_("Warning: The next address will not be recovered automatically if you restore your wallet from seed; you may need to add it manually.\n\nThis occurs because you have too many unused addresses in your wallet. To avoid this situation, use the existing addresses first.\n\nCreate anyway?")):
return
addr = self.wallet.create_new_address(False)
self.set_receive_address(addr)
self.expires_label.hide()
self.expires_combo.show()
self.new_request_button.setEnabled(False)
self.receive_message_e.setFocus(1)
def set_receive_address(self, addr):
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
def clear_receive_tab(self):
addr = self.wallet.get_receiving_address()
if addr:
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
self.expires_label.hide()
self.expires_combo.show()
def toggle_qr_window(self):
import qrwindow
if not self.qr_window:
self.qr_window = qrwindow.QR_Window(self)
self.qr_window.setVisible(True)
self.qr_window_geometry = self.qr_window.geometry()
else:
if not self.qr_window.isVisible():
self.qr_window.setVisible(True)
self.qr_window.setGeometry(self.qr_window_geometry)
else:
self.qr_window_geometry = self.qr_window.geometry()
self.qr_window.setVisible(False)
self.update_receive_qr()
def show_send_tab(self):
self.tabs.setCurrentIndex(self.tabs.indexOf(self.send_tab))
def show_receive_tab(self):
self.tabs.setCurrentIndex(self.tabs.indexOf(self.receive_tab))
def receive_at(self, addr):
if not bitcoin.is_address(addr):
return
self.show_receive_tab()
self.receive_address_e.setText(addr)
self.new_request_button.setEnabled(True)
def update_receive_qr(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text()).encode('utf8')
self.save_request_button.setEnabled((amount is not None) or (message != ""))
uri = util.create_URI(addr, amount, message)
self.receive_qr.setData(uri)
if self.qr_window and self.qr_window.isVisible():
self.qr_window.set_content(addr, amount, message, uri)
def create_send_tab(self):
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
self.send_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
from paytoedit import PayToEdit
self.amount_e = BTCAmountEdit(self.get_decimal_point)
self.payto_e = PayToEdit(self)
msg = _('Recipient of the funds.') + '\n\n'\
+ _('You may enter a ION address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a ION address)')
payto_label = HelpLabel(_('Pay to'), msg)
grid.addWidget(payto_label, 1, 0)
grid.addWidget(self.payto_e, 1, 1, 1, -1)
completer = QCompleter()
completer.setCaseSensitivity(False)
self.payto_e.setCompleter(completer)
completer.setModel(self.completions)
msg = _('Description of the transaction (not mandatory).') + '\n\n'\
+ _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')
description_label = HelpLabel(_('Description'), msg)
grid.addWidget(description_label, 2, 0)
self.message_e = MyLineEdit()
grid.addWidget(self.message_e, 2, 1, 1, -1)
self.from_label = QLabel(_('From'))
grid.addWidget(self.from_label, 3, 0)
self.from_list = MyTreeWidget(self, self.from_list_menu, ['',''])
self.from_list.setHeaderHidden(True)
self.from_list.setMaximumHeight(80)
grid.addWidget(self.from_list, 3, 1, 1, -1)
self.set_pay_from([])
msg = _('Amount to be sent.') + '\n\n' \
+ _('The amount will be displayed in red if you do not have enough funds in your wallet.') + ' ' \
+ _('Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') + '\n\n' \
+ _('Keyboard shortcut: type "!" to send all your coins.')
amount_label = HelpLabel(_('Amount'), msg)
grid.addWidget(amount_label, 4, 0)
grid.addWidget(self.amount_e, 4, 1)
self.fiat_send_e = AmountEdit(self.fx.get_currency if self.fx else '')
if not self.fx or not self.fx.is_enabled():
self.fiat_send_e.setVisible(False)
grid.addWidget(self.fiat_send_e, 4, 2)
self.amount_e.frozen.connect(
lambda: self.fiat_send_e.setFrozen(self.amount_e.isReadOnly()))
self.max_button = EnterButton(_("Max"), self.spend_max)
self.max_button.setFixedWidth(140)
grid.addWidget(self.max_button, 4, 3)
hbox = QHBoxLayout()
hbox.addStretch(1)
grid.addLayout(hbox, 4, 4)
msg = _('Bitcoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\
+ _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\
+ _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')
self.fee_e_label = HelpLabel(_('Fee'), msg)
def fee_cb(dyn, pos, fee_rate):
if dyn:
self.config.set_key('fee_level', pos, False)
else:
self.config.set_key('fee_per_kb', fee_rate, False)
self.spend_max() if self.is_max else self.update_fee()
self.fee_slider = FeeSlider(self, self.config, fee_cb)
self.fee_slider.setFixedWidth(140)
self.fee_e = BTCAmountEdit(self.get_decimal_point)
if not self.config.get('show_fee', False):
self.fee_e.setVisible(False)
self.fee_e.textEdited.connect(self.update_fee)
# This is so that when the user blanks the fee and moves on,
# we go back to auto-calculate mode and put a fee back.
self.fee_e.editingFinished.connect(self.update_fee)
self.connect_fields(self, self.amount_e, self.fiat_send_e, self.fee_e)
self.rbf_checkbox = QCheckBox(_('Replaceable'))
msg = [_('If you check this box, your transaction will be marked as non-final,'),
_('and you will have the possiblity, while it is unconfirmed, to replace it with a transaction that pays a higher fee.'),
_('Note that some merchants do not accept non-final transactions until they are confirmed.')]
self.rbf_checkbox.setToolTip('<p>' + ' '.join(msg) + '</p>')
self.rbf_checkbox.setVisible(False)
grid.addWidget(self.fee_e_label, 5, 0)
grid.addWidget(self.fee_slider, 5, 1)
grid.addWidget(self.fee_e, 5, 2)
grid.addWidget(self.rbf_checkbox, 5, 3)
self.preview_button = EnterButton(_("Preview"), self.do_preview)
self.preview_button.setToolTip(_('Display the details of your transactions before signing it.'))
self.send_button = EnterButton(_("Send"), self.do_send)
self.clear_button = EnterButton(_("Clear"), self.do_clear)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.clear_button)
buttons.addWidget(self.preview_button)
buttons.addWidget(self.send_button)
grid.addLayout(buttons, 6, 1, 1, 3)
self.amount_e.shortcut.connect(self.spend_max)
self.payto_e.textChanged.connect(self.update_fee)
self.amount_e.textEdited.connect(self.update_fee)
def reset_max(t):
self.is_max = False
self.max_button.setEnabled(not bool(t))
self.amount_e.textEdited.connect(reset_max)
self.fiat_send_e.textEdited.connect(reset_max)
def entry_changed():
text = ""
if self.not_enough_funds:
amt_color, fee_color = RED_FG, RED_FG
text = _( "Not enough funds" )
c, u, x = self.wallet.get_frozen_balance()
if c+u+x:
text += ' (' + self.format_amount(c+u+x).strip() + ' ' + self.base_unit() + ' ' +_("are frozen") + ')'
elif self.fee_e.isModified():
amt_color, fee_color = BLACK_FG, BLACK_FG
elif self.amount_e.isModified():
amt_color, fee_color = BLACK_FG, BLUE_FG
else:
amt_color, fee_color = BLUE_FG, BLUE_FG
self.statusBar().showMessage(text)
self.amount_e.setStyleSheet(amt_color)
self.fee_e.setStyleSheet(fee_color)
self.amount_e.textChanged.connect(entry_changed)
self.fee_e.textChanged.connect(entry_changed)
self.invoices_label = QLabel(_('Invoices'))
from invoice_list import InvoiceList
self.invoice_list = InvoiceList(self)
vbox0 = QVBoxLayout()
vbox0.addLayout(grid)
hbox = QHBoxLayout()
hbox.addLayout(vbox0)
w = QWidget()
w.setObjectName("send_container")
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.invoices_label)
vbox.addWidget(self.invoice_list)
vbox.setStretchFactor(self.invoice_list, 1000)
w.searchable_list = self.invoice_list
run_hook('create_send_tab', grid)
return w
def spend_max(self):
self.is_max = True
self.do_update_fee()
def update_fee(self):
self.require_fee_update = True
def get_payto_or_dummy(self):
r = self.payto_e.get_recipient()
if r:
return r
return (TYPE_ADDRESS, self.wallet.dummy_address())
def do_update_fee(self):
'''Recalculate the fee. If the fee was manually input, retain it, but
still build the TX to see if there are enough funds.
'''
if not self.config.get('offline') and self.config.is_dynfee() and not self.config.has_fee_estimates():
self.statusBar().showMessage(_('Waiting for fee estimates...'))
return False
freeze_fee = (self.fee_e.isModified()
and (self.fee_e.text() or self.fee_e.hasFocus()))
amount = '!' if self.is_max else self.amount_e.get_amount()
if amount is None:
if not freeze_fee:
self.fee_e.setAmount(None)
self.not_enough_funds = False
self.statusBar().showMessage('')
else:
fee = self.fee_e.get_amount() if freeze_fee else None
outputs = self.payto_e.get_outputs(self.is_max)
if not outputs:
_type, addr = self.get_payto_or_dummy()
outputs = [(_type, addr, amount)]
try:
tx = self.wallet.make_unsigned_transaction(self.get_coins(), outputs, self.config, fee)
self.not_enough_funds = False
except NotEnoughFunds:
self.not_enough_funds = True
return
except BaseException:
return
if not freeze_fee:
fee = None if self.not_enough_funds else tx.get_fee()
self.fee_e.setAmount(fee)
if self.is_max:
amount = tx.output_value()
self.amount_e.setAmount(amount)
if fee is None:
return
rbf_policy = self.config.get('rbf_policy', 1)
if rbf_policy == 0:
b = True
elif rbf_policy == 1:
fee_rate = fee * 1000 / tx.estimated_size()
try:
c = self.config.reverse_dynfee(fee_rate)
b = c in [-1, 25]
except:
b = False
elif rbf_policy == 2:
b = False
self.rbf_checkbox.setVisible(b)
self.rbf_checkbox.setChecked(b)
def from_list_delete(self, item):
i = self.from_list.indexOfTopLevelItem(item)
self.pay_from.pop(i)
self.redraw_from_list()
self.update_fee()
def from_list_menu(self, position):
item = self.from_list.itemAt(position)
menu = QMenu()
menu.addAction(_("Remove"), lambda: self.from_list_delete(item))
menu.exec_(self.from_list.viewport().mapToGlobal(position))
def set_pay_from(self, coins):
self.pay_from = coins
self.redraw_from_list()
def redraw_from_list(self):
self.from_list.clear()
self.from_label.setHidden(len(self.pay_from) == 0)
self.from_list.setHidden(len(self.pay_from) == 0)
def format(x):
h = x.get('prevout_hash')
return h[0:10] + '...' + h[-10:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address')
for item in self.pay_from:
self.from_list.addTopLevelItem(QTreeWidgetItem( [format(item), self.format_amount(item['value']) ]))
def get_contact_payto(self, key):
_type, label = self.contacts.get(key)
return label + ' <' + key + '>' if _type == 'address' else key
def update_completions(self):
l = [self.get_contact_payto(key) for key in self.contacts.keys()]
self.completions.setStringList(l)
def protected(func):
'''Password request wrapper. The password is passed to the function
as the 'password' named argument. "None" indicates either an
unencrypted wallet, or the user cancelled the password request.
An empty input is passed as the empty string.'''
def request_password(self, *args, **kwargs):
parent = self.top_level_window()
password = None
while self.wallet.has_password():
password = self.password_dialog(parent=parent)
if password is None:
# User cancelled password input
return
try:
self.wallet.check_password(password)
break
except Exception as e:
self.show_error(str(e), parent=parent)
continue
kwargs['password'] = password
return func(self, *args, **kwargs)
return request_password
def read_send_tab(self):
if self.payment_request and self.payment_request.has_expired():
self.show_error(_('Payment request has expired'))
return
label = unicode( self.message_e.text() )
if self.payment_request:
outputs = self.payment_request.get_outputs()
else:
errors = self.payto_e.get_errors()
if errors:
self.show_warning(_("Invalid Lines found:") + "\n\n" + '\n'.join([ _("Line #") + str(x[0]+1) + ": " + x[1] for x in errors]))
return
outputs = self.payto_e.get_outputs(self.is_max)
if self.payto_e.is_alias and self.payto_e.validated is False:
alias = self.payto_e.toPlainText()
msg = _('WARNING: the alias "%s" could not be validated via an additional security check, DNSSEC, and thus may not be correct.'%alias) + '\n'
msg += _('Do you wish to continue?')
if not self.question(msg):
return
if not outputs:
self.show_error(_('No outputs'))
return
for _type, addr, amount in outputs:
if addr is None:
self.show_error(_('Bitcoin Address is None'))
return
if _type == TYPE_ADDRESS and not bitcoin.is_address(addr):
self.show_error(_('Invalid Bitcoin Address'))
return
if amount is None:
self.show_error(_('Invalid Amount'))
return
freeze_fee = self.fee_e.isVisible() and self.fee_e.isModified() and (self.fee_e.text() or self.fee_e.hasFocus())
fee = self.fee_e.get_amount() if freeze_fee else None
coins = self.get_coins()
return outputs, fee, label, coins
def do_preview(self):
self.do_send(preview = True)
def do_send(self, preview = False):
if run_hook('abort_send', self):
return
r = self.read_send_tab()
if not r:
return
outputs, fee, tx_desc, coins = r
try:
tx = self.wallet.make_unsigned_transaction(coins, outputs, self.config, fee)
except NotEnoughFunds:
self.show_message(_("Insufficient funds"))
return
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
amount = tx.output_value() if self.is_max else sum(map(lambda x:x[2], outputs))
fee = tx.get_fee()
use_rbf = self.rbf_checkbox.isChecked()
if use_rbf:
tx.set_rbf(True)
if fee < self.wallet.relayfee() * tx.estimated_size() / 1000 and tx.requires_fee(self.wallet):
self.show_error(_("This transaction requires a higher fee, or it will not be propagated by the network"))
return
if preview:
self.show_transaction(tx, tx_desc)
return
# confirmation dialog
confirm_amount = self.config.get('confirm_amount', COIN)
msg = [
_("Amount to be sent") + ": " + self.format_amount_and_units(amount),
_("Fee") + ": " + self.format_amount_and_units(fee),
]
extra_fee = run_hook('get_additional_fee', self.wallet, tx)
if extra_fee:
msg.append( _("Additional fees") + ": " + self.format_amount_and_units(extra_fee) )
confirm_rate = 2 * self.config.max_fee_rate()
if fee > confirm_rate * tx.estimated_size() / 1000:
msg.append(_('Warning') + ': ' + _("The fee for this transaction seems unusually high."))
if self.wallet.has_password():
msg.append("")
msg.append(_("Enter your password to proceed"))
password = self.password_dialog('\n'.join(msg))
if not password:
return
else:
msg.append(_('Proceed?'))
password = None
if not self.question('\n'.join(msg)):
return
def sign_done(success):
if success:
if not tx.is_complete():
self.show_transaction(tx)
self.do_clear()
else:
self.broadcast_transaction(tx, tx_desc)
self.sign_tx_with_password(tx, sign_done, password)
@protected
def sign_tx(self, tx, callback, password):
self.sign_tx_with_password(tx, callback, password)
def sign_tx_with_password(self, tx, callback, password):
'''Sign the transaction in a separate thread. When done, calls
the callback with a success code of True or False.
'''
# call hook to see if plugin needs gui interaction
run_hook('sign_tx', self, tx)
def on_signed(result):
callback(True)
def on_failed(exc_info):
self.on_error(exc_info)
callback(False)
task = partial(self.wallet.sign_transaction, tx, password)
WaitingDialog(self, _('Signing transaction...'), task,
on_signed, on_failed)
def broadcast_transaction(self, tx, tx_desc):
def broadcast_thread():
# non-GUI thread
pr = self.payment_request
if pr and pr.has_expired():
self.payment_request = None
return False, _("Payment request has expired")
status, msg = self.network.broadcast(tx)
if pr and status is True:
self.invoices.set_paid(pr, tx.txid())
self.invoices.save()
self.payment_request = None
refund_address = self.wallet.get_receiving_addresses()[0]
ack_status, ack_msg = pr.send_ack(str(tx), refund_address)
if ack_status:
msg = ack_msg
return status, msg
# Capture current TL window; override might be removed on return
parent = self.top_level_window()
def broadcast_done(result):
# GUI thread
if result:
status, msg = result
if status:
if tx_desc is not None and tx.is_complete():
self.wallet.set_label(tx.txid(), tx_desc)
parent.show_message(_('Payment sent.') + '\n' + msg)
self.invoice_list.update()
self.do_clear()
else:
parent.show_error(msg)
WaitingDialog(self, _('Broadcasting transaction...'),
broadcast_thread, broadcast_done, self.on_error)
def query_choice(self, msg, choices):
# Needed by QtHandler for hardware wallets
dialog = WindowModalDialog(self.top_level_window())
clayout = ChoicesLayout(msg, choices)
vbox = QVBoxLayout(dialog)
vbox.addLayout(clayout.layout())
vbox.addLayout(Buttons(OkButton(dialog)))
if not dialog.exec_():
return None
return clayout.selected_index()
def lock_amount(self, b):
self.amount_e.setFrozen(b)
self.max_button.setEnabled(not b)
def prepare_for_payment_request(self):
self.show_send_tab()
self.payto_e.is_pr = True
for e in [self.payto_e, self.amount_e, self.message_e]:
e.setFrozen(True)
self.payto_e.setText(_("please wait..."))
return True
def delete_invoice(self, key):
self.invoices.remove(key)
self.invoice_list.update()
def payment_request_ok(self):
pr = self.payment_request
key = self.invoices.add(pr)
status = self.invoices.get_status(key)
self.invoice_list.update()
if status == PR_PAID:
self.show_message("invoice already paid")
self.do_clear()
self.payment_request = None
return
self.payto_e.is_pr = True
if not pr.has_expired():
self.payto_e.setGreen()
else:
self.payto_e.setExpired()
self.payto_e.setText(pr.get_requestor())
self.amount_e.setText(format_satoshis_plain(pr.get_amount(), self.decimal_point))
self.message_e.setText(pr.get_memo())
# signal to set fee
self.amount_e.textEdited.emit("")
def payment_request_error(self):
self.show_message(self.payment_request.error)
self.payment_request = None
self.do_clear()
def on_pr(self, request):
self.payment_request = request
if self.payment_request.verify(self.contacts):
self.emit(SIGNAL('payment_request_ok'))
else:
self.emit(SIGNAL('payment_request_error'))
def pay_to_URI(self, URI):
if not URI:
return
try:
out = util.parse_URI(unicode(URI), self.on_pr)
except BaseException as e:
self.show_error(_('Invalid bitcoin URI:') + '\n' + str(e))
return
self.show_send_tab()
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if r or (name and sig):
self.prepare_for_payment_request()
return
address = out.get('address')
amount = out.get('amount')
label = out.get('label')
message = out.get('message')
# use label as description (not BIP21 compliant)
if label and not message:
message = label
if address:
self.payto_e.setText(address)
if message:
self.message_e.setText(message)
if amount:
self.amount_e.setAmount(amount)
self.amount_e.textEdited.emit("")
def do_clear(self):
self.is_max = False
self.not_enough_funds = False
self.payment_request = None
self.payto_e.is_pr = False
for e in [self.payto_e, self.message_e, self.amount_e, self.fiat_send_e, self.fee_e]:
e.setText('')
e.setFrozen(False)
self.set_pay_from([])
self.rbf_checkbox.setChecked(False)
self.update_status()
run_hook('do_clear', self)
def set_frozen_state(self, addrs, freeze):
self.wallet.set_frozen_state(addrs, freeze)
self.address_list.update()
self.utxo_list.update()
self.update_fee()
def create_list_tab(self, l):
w = QWidget()
w.searchable_list = l
vbox = QVBoxLayout()
w.setLayout(vbox)
vbox.setMargin(0)
vbox.setSpacing(0)
vbox.addWidget(l)
buttons = QWidget()
vbox.addWidget(buttons)
return w
def create_addresses_tab(self):
from address_list import AddressList
self.address_list = l = AddressList(self)
l.setObjectName("addresses_container")
return self.create_list_tab(l)
def create_utxo_tab(self):
from utxo_list import UTXOList
self.utxo_list = l = UTXOList(self)
return self.create_list_tab(l)
def create_contacts_tab(self):
from contact_list import ContactList
self.contact_list = l = ContactList(self)
l.setObjectName("contacts_container")
return self.create_list_tab(l)
def remove_address(self, addr):
if self.question(_("Do you want to remove")+" %s "%addr +_("from your wallet?")):
self.wallet.delete_address(addr)
self.address_list.update()
self.history_list.update()
def get_coins(self):
if self.pay_from:
return self.pay_from
else:
domain = self.wallet.get_addresses()
return self.wallet.get_spendable_coins(domain)
def spend_coins(self, coins):
self.set_pay_from(coins)
self.show_send_tab()
self.update_fee()
def paytomany(self):
self.show_send_tab()
self.payto_e.paytomany()
msg = '\n'.join([
_('Enter a list of outputs in the \'Pay to\' field.'),
_('One output per line.'),
_('Format: address, amount'),
_('You may load a CSV file using the file icon.')
])
self.show_message(msg, title=_('Pay to many'))
def payto_contacts(self, labels):
paytos = [self.get_contact_payto(label) for label in labels]
self.show_send_tab()
if len(paytos) == 1:
self.payto_e.setText(paytos[0])
self.amount_e.setFocus()
else:
text = "\n".join([payto + ", 0" for payto in paytos])
self.payto_e.setText(text)
self.payto_e.setFocus()
def set_contact(self, label, address):
if not is_valid(address):
self.show_error(_('Invalid Address'))
self.contact_list.update() # Displays original unchanged value
return False
self.contacts[address] = ('address', label)
self.contact_list.update()
self.history_list.update()
self.update_completions()
return True
def delete_contacts(self, labels):
if not self.question(_("Remove %s from your list of contacts?")
% " + ".join(labels)):
return
for label in labels:
self.contacts.pop(label)
self.history_list.update()
self.contact_list.update()
self.update_completions()
def show_invoice(self, key):
pr = self.invoices.get(key)
pr.verify(self.contacts)
self.show_pr_details(pr)
def show_pr_details(self, pr):
key = pr.get_id()
d = WindowModalDialog(self, _("Invoice"))
vbox = QVBoxLayout(d)
grid = QGridLayout()
grid.addWidget(QLabel(_("Requestor") + ':'), 0, 0)
grid.addWidget(QLabel(pr.get_requestor()), 0, 1)
grid.addWidget(QLabel(_("Amount") + ':'), 1, 0)
outputs_str = '\n'.join(map(lambda x: self.format_amount(x[2])+ self.base_unit() + ' @ ' + x[1], pr.get_outputs()))
grid.addWidget(QLabel(outputs_str), 1, 1)
expires = pr.get_expiration_date()
grid.addWidget(QLabel(_("Memo") + ':'), 2, 0)
grid.addWidget(QLabel(pr.get_memo()), 2, 1)
grid.addWidget(QLabel(_("Signature") + ':'), 3, 0)
grid.addWidget(QLabel(pr.get_verify_status()), 3, 1)
if expires:
grid.addWidget(QLabel(_("Expires") + ':'), 4, 0)
grid.addWidget(QLabel(format_time(expires)), 4, 1)
vbox.addLayout(grid)
def do_export():
fn = self.getOpenFileName(_("Save invoice to file"), "*.bip70")
if not fn:
return
with open(fn, 'w') as f:
data = f.write(pr.raw)
self.show_message(_('Invoice saved as' + ' ' + fn))
exportButton = EnterButton(_('Save'), do_export)
def do_delete():
if self.question(_('Delete invoice?')):
self.invoices.remove(key)
self.history_list.update()
d.close()
deleteButton = EnterButton(_('Delete'), do_delete)
vbox.addLayout(Buttons(exportButton, deleteButton, CloseButton(d)))
d.exec_()
def do_pay_invoice(self, key):
pr = self.invoices.get(key)
self.payment_request = pr
self.prepare_for_payment_request()
if pr.verify(self.contacts):
self.payment_request_ok()
else:
self.payment_request_error()
def create_console_tab(self):
from console import Console
self.console = console = Console()
console.setObjectName("console_container")
return console
def update_console(self):
console = self.console
console.history = self.config.get("console-history",[])
console.history_index = len(console.history)
console.updateNamespace({'wallet' : self.wallet,
'network' : self.network,
'plugins' : self.gui_object.plugins,
'window': self})
console.updateNamespace({'util' : util, 'bitcoin':bitcoin})
c = commands.Commands(self.config, self.wallet, self.network, lambda: self.console.set_json(True))
methods = {}
def mkfunc(f, method):
return lambda *args: apply( f, (method, args, self.password_dialog ))
for m in dir(c):
if m[0]=='_' or m in ['network','wallet']: continue
methods[m] = mkfunc(c._run, m)
console.updateNamespace(methods)
def create_status_bar(self):
sb = QStatusBar()
sb.setFixedHeight(35)
qtVersion = qVersion()
self.balance_label = QLabel("")
self.balance_label.setObjectName("main_window_balance")
sb.addWidget(self.balance_label)
self.search_box = QLineEdit()
self.search_box.textChanged.connect(self.do_search)
self.search_box.hide()
sb.addPermanentWidget(self.search_box)
self.lock_icon = QIcon()
self.password_button = StatusBarButton(self.lock_icon, _("Password"), self.change_password_dialog )
sb.addPermanentWidget(self.password_button)
sb.addPermanentWidget(StatusBarButton(QIcon(":icons/preferences.png"), _("Preferences"), self.settings_dialog ) )
self.seed_button = StatusBarButton(QIcon(":icons/seed.png"), _("Seed"), self.show_seed_dialog )
sb.addPermanentWidget(self.seed_button)
self.status_button = StatusBarButton(QIcon(":icons/status_disconnected.png"), _("Network"), self.run_network_dialog )
sb.addPermanentWidget(self.status_button)
run_hook('create_status_bar', sb)
self.setStatusBar(sb)
def update_lock_icon(self):
icon = QIcon(":icons/lock.png") if self.wallet.has_password() else QIcon(":icons/unlock.png")
self.password_button.setIcon(icon)
def update_buttons_on_seed(self):
self.seed_button.setVisible(self.wallet.has_seed())
self.password_button.setVisible(self.wallet.can_change_password())
self.send_button.setVisible(not self.wallet.is_watching_only())
def change_password_dialog(self):
from password_dialog import ChangePasswordDialog
d = ChangePasswordDialog(self, self.wallet)
ok, password, new_password, encrypt_file = d.run()
if not ok:
return
try:
self.wallet.update_password(password, new_password, encrypt_file)
except BaseException as e:
self.show_error(str(e))
return
except:
traceback.print_exc(file=sys.stdout)
self.show_error(_('Failed to update password'))
return
msg = _('Password was updated successfully') if new_password else _('Password is disabled, this wallet is not protected')
self.show_message(msg, title=_("Success"))
self.update_lock_icon()
def toggle_search(self):
self.search_box.setHidden(not self.search_box.isHidden())
if not self.search_box.isHidden():
self.search_box.setFocus(1)
else:
self.do_search('')
def do_search(self, t):
tab = self.tabs.currentWidget()
if hasattr(tab, 'searchable_list'):
tab.searchable_list.filter(t)
def new_contact_dialog(self):
d = WindowModalDialog(self, _("New Contact"))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('New Contact') + ':'))
grid = QGridLayout()
line1 = QLineEdit()
line1.setFixedWidth(280)
line2 = QLineEdit()
line2.setFixedWidth(280)
grid.addWidget(QLabel(_("Address")), 1, 0)
grid.addWidget(line1, 1, 1)
grid.addWidget(QLabel(_("Name")), 2, 0)
grid.addWidget(line2, 2, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if d.exec_():
self.set_contact(unicode(line2.text()), str(line1.text()))
def show_master_public_keys(self):
dialog = WindowModalDialog(self, "Master Public Keys")
mpk_list = self.wallet.get_master_public_keys()
vbox = QVBoxLayout()
mpk_text = ShowQRTextEdit()
mpk_text.setMaximumHeight(100)
mpk_text.addCopyButton(self.app)
def show_mpk(index):
mpk_text.setText(mpk_list[index])
# only show the combobox in case multiple accounts are available
if len(mpk_list) > 1:
def label(key):
if isinstance(self.wallet, Multisig_Wallet):
return _("cosigner") + ' ' + str(i+1)
return ''
labels = [ label(i) for i in range(len(mpk_list))]
on_click = lambda clayout: show_mpk(clayout.selected_index())
labels_clayout = ChoicesLayout(_("Master Public Keys"), labels, on_click)
vbox.addLayout(labels_clayout.layout())
show_mpk(0)
vbox.addWidget(mpk_text)
vbox.addLayout(Buttons(CloseButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
@protected
def show_seed_dialog(self, password):
if not self.wallet.has_seed():
self.show_message(_('This wallet has no seed'))
return
keystore = self.wallet.get_keystore()
try:
seed = keystore.get_seed(password)
passphrase = keystore.get_passphrase(password)
except BaseException as e:
self.show_error(str(e))
return
from seed_dialog import SeedDialog
d = SeedDialog(self, seed, passphrase)
d.exec_()
def show_qrcode(self, data, title = _("QR code"), parent=None):
if not data:
return
d = QRDialog(data, parent or self, title)
d.exec_()
def show_public_keys(self, address):
if not address: return
try:
pubkey_list = self.wallet.get_public_keys(address)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = WindowModalDialog(self, _("Public key"))
d.setMinimumSize(600, 200)
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget(QLabel(_("Public key") + ':'))
keys_e = ShowQRTextEdit(text='\n'.join(pubkey_list))
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
@protected
def show_private_key(self, address, password):
if not address:
return
try:
pk_list = self.wallet.get_private_key(address, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = WindowModalDialog(self, _("Private key"))
d.setMinimumSize(600, 200)
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget( QLabel(_("Private key") + ':'))
keys_e = ShowQRTextEdit(text='\n'.join(pk_list))
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
@protected
def do_sign(self, address, message, signature, password):
address = str(address.text()).strip()
message = unicode(message.toPlainText()).encode('utf-8').strip()
if not bitcoin.is_address(address):
self.show_message('Invalid Bitcoin address.')
return
if not bitcoin.is_p2pkh(address):
self.show_message('Cannot sign messages with this type of address.')
return
if not self.wallet.is_mine(address):
self.show_message('Address not in wallet.')
return
task = partial(self.wallet.sign_message, address, message, password)
def show_signed_message(sig):
signature.setText(base64.b64encode(sig))
self.wallet.thread.add(task, on_success=show_signed_message)
def do_verify(self, address, message, signature):
address = str(address.text()).strip()
message = unicode(message.toPlainText()).encode('utf-8').strip()
if not bitcoin.is_address(address):
self.show_message('Invalid Bitcoin address.')
return
if not bitcoin.is_p2pkh(address):
self.show_message('Cannot verify messages with this type of address.')
return
try:
# This can throw on invalid base64
sig = base64.b64decode(str(signature.toPlainText()))
verified = bitcoin.verify_message(address, sig, message)
except:
verified = False
if verified:
self.show_message(_("Signature verified"))
else:
self.show_error(_("Wrong signature"))
def sign_verify_message(self, address=''):
d = WindowModalDialog(self, _('Sign/verify Message'))
d.setMinimumSize(410, 290)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
address_e = QLineEdit()
address_e.setText(address)
layout.addWidget(QLabel(_('Address')), 2, 0)
layout.addWidget(address_e, 2, 1)
signature_e = QTextEdit()
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(signature_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Sign"))
b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Verify"))
b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
@protected
def do_decrypt(self, message_e, pubkey_e, encrypted_e, password):
cyphertext = str(encrypted_e.toPlainText())
task = partial(self.wallet.decrypt_message, str(pubkey_e.text()),
cyphertext, password)
self.wallet.thread.add(task, on_success=message_e.setText)
def do_encrypt(self, message_e, pubkey_e, encrypted_e):
message = unicode(message_e.toPlainText())
message = message.encode('utf-8')
try:
encrypted = bitcoin.encrypt_message(message, str(pubkey_e.text()))
encrypted_e.setText(encrypted)
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_warning(str(e))
def encrypt_message(self, address = ''):
d = WindowModalDialog(self, _('Encrypt/decrypt Message'))
d.setMinimumSize(610, 490)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
pubkey_e = QLineEdit()
if address:
pubkey = self.wallet.get_public_key(address)
pubkey_e.setText(pubkey)
layout.addWidget(QLabel(_('Public key')), 2, 0)
layout.addWidget(pubkey_e, 2, 1)
encrypted_e = QTextEdit()
layout.addWidget(QLabel(_('Encrypted')), 3, 0)
layout.addWidget(encrypted_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Encrypt"))
b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Decrypt"))
b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
def password_dialog(self, msg=None, parent=None):
from password_dialog import PasswordDialog
parent = parent or self
d = PasswordDialog(parent, msg)
return d.run()
def tx_from_text(self, txt):
from electrum.transaction import tx_from_str, Transaction
try:
tx = tx_from_str(txt)
return Transaction(tx)
except:
traceback.print_exc(file=sys.stdout)
self.show_critical(_("Electrum was unable to parse your transaction"))
return
def read_tx_from_qrcode(self):
from electrum import qrscanner
try:
data = qrscanner.scan_barcode(self.config.get_video_device())
except BaseException as e:
self.show_error(str(e))
return
if not data:
return
# if the user scanned a bitcoin URI
if data.startswith("ion:"):
self.pay_to_URI(data)
return
# else if the user scanned an offline signed tx
# transactions are binary, but qrcode seems to return utf8...
data = data.decode('utf8')
z = bitcoin.base_decode(data, length=None, base=43)
data = ''.join(chr(ord(b)) for b in z).encode('hex')
tx = self.tx_from_text(data)
if not tx:
return
self.show_transaction(tx)
def read_tx_from_file(self):
fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")
if not fileName:
return
try:
with open(fileName, "r") as f:
file_content = f.read()
except (ValueError, IOError, os.error) as reason:
self.show_critical(_("Electrum was unable to open your transaction file") + "\n" + str(reason), title=_("Unable to read file or no transaction found"))
return
return self.tx_from_text(file_content)
def do_process_from_text(self):
text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))
if not text:
return
tx = self.tx_from_text(text)
if tx:
self.show_transaction(tx)
def do_process_from_file(self):
tx = self.read_tx_from_file()
if tx:
self.show_transaction(tx)
def do_process_from_txid(self):
from electrum import transaction
txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':')
if ok and txid:
txid = str(txid).strip()
try:
r = self.network.synchronous_get(('blockchain.transaction.get',[txid]))
except BaseException as e:
self.show_message(str(e))
return
tx = transaction.Transaction(r)
self.show_transaction(tx)
@protected
def export_privkeys_dialog(self, password):
if self.wallet.is_watching_only():
self.show_message(_("This is a watching-only wallet"))
return
d = WindowModalDialog(self, _('Private keys'))
d.setMinimumSize(850, 300)
vbox = QVBoxLayout(d)
msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."),
_("Exposing a single private key can compromise your entire wallet!"),
_("In particular, DO NOT use 'redeem private key' services proposed by third parties."))
vbox.addWidget(QLabel(msg))
e = QTextEdit()
e.setReadOnly(True)
vbox.addWidget(e)
defaultname = 'electrum-private-keys.csv'
select_msg = _('Select file to export your private keys to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
b = OkButton(d, _('Export'))
b.setEnabled(False)
vbox.addLayout(Buttons(CancelButton(d), b))
private_keys = {}
addresses = self.wallet.get_addresses()
done = False
def privkeys_thread():
for addr in addresses:
time.sleep(0.1)
if done:
break
private_keys[addr] = "\n".join(self.wallet.get_private_key(addr, password))
d.emit(SIGNAL('computing_privkeys'))
d.emit(SIGNAL('show_privkeys'))
def show_privkeys():
s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items()))
e.setText(s)
b.setEnabled(True)
d.connect(d, QtCore.SIGNAL('computing_privkeys'), lambda: e.setText("Please wait... %d/%d"%(len(private_keys),len(addresses))))
d.connect(d, QtCore.SIGNAL('show_privkeys'), show_privkeys)
threading.Thread(target=privkeys_thread).start()
if not d.exec_():
done = True
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_privkeys(filename, private_keys, csv_button.isChecked())
except (IOError, os.error) as reason:
txt = "\n".join([
_("Electrum was unable to produce a private key-export."),
str(reason)
])
self.show_critical(txt, title=_("Unable to create csv"))
except Exception as e:
self.show_message(str(e))
return
self.show_message(_("Private keys exported."))
def do_export_privkeys(self, fileName, pklist, is_csv):
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f)
transaction.writerow(["address", "private_key"])
for addr, pk in pklist.items():
transaction.writerow(["%34s"%addr,pk])
else:
import json
f.write(json.dumps(pklist, indent = 4))
def do_import_labels(self):
labelsFile = self.getOpenFileName(_("Open labels file"), "*.json")
if not labelsFile: return
try:
f = open(labelsFile, 'r')
data = f.read()
f.close()
for key, value in json.loads(data).items():
self.wallet.set_label(key, value)
self.show_message(_("Your labels were imported from") + " '%s'" % str(labelsFile))
except (IOError, os.error) as reason:
self.show_critical(_("Electrum was unable to import your labels.") + "\n" + str(reason))
def do_export_labels(self):
labels = self.wallet.labels
try:
fileName = self.getSaveFileName(_("Select file to save your labels"), 'electrum_labels.json', "*.json")
if fileName:
with open(fileName, 'w+') as f:
json.dump(labels, f, indent=4, sort_keys=True)
self.show_message(_("Your labels where exported to") + " '%s'" % str(fileName))
except (IOError, os.error), reason:
self.show_critical(_("Electrum was unable to export your labels.") + "\n" + str(reason))
def export_history_dialog(self):
d = WindowModalDialog(self, _('Export History'))
d.setMinimumSize(400, 200)
vbox = QVBoxLayout(d)
defaultname = os.path.expanduser('~/electrum-ion-history.csv')
select_msg = _('Select file to export your wallet transactions to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
vbox.addStretch(1)
hbox = Buttons(CancelButton(d), OkButton(d, _('Export')))
vbox.addLayout(hbox)
run_hook('export_history_dialog', self, hbox)
self.update()
if not d.exec_():
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_history(self.wallet, filename, csv_button.isChecked())
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a transaction export.")
self.show_critical(export_error_label + "\n" + str(reason), title=_("Unable to export history"))
return
self.show_message(_("Your wallet history has been successfully exported."))
def plot_history_dialog(self):
try:
from electrum.plot import plot_history
wallet = self.wallet
history = wallet.get_history()
if len(history) > 0:
plt = plot_history(self.wallet, history)
plt.show()
except BaseException as e:
self.show_error(str(e))
return
def do_export_history(self, wallet, fileName, is_csv):
history = wallet.get_history()
lines = []
for item in history:
tx_hash, height, confirmations, timestamp, value, balance = item
if height>0:
if timestamp is not None:
time_string = format_time(timestamp)
else:
time_string = _("unverified")
else:
time_string = _("unconfirmed")
if value is not None:
value_string = format_satoshis(value, True)
else:
value_string = '--'
if tx_hash:
label = wallet.get_label(tx_hash)
label = label.encode('utf-8')
else:
label = ""
if is_csv:
lines.append([tx_hash, label, confirmations, value_string, time_string])
else:
lines.append({'txid':tx_hash, 'date':"%16s"%time_string, 'label':label, 'value':value_string})
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f, lineterminator='\n')
transaction.writerow(["transaction_hash","label", "confirmations", "value", "timestamp"])
for line in lines:
transaction.writerow(line)
else:
import json
f.write(json.dumps(lines, indent = 4))
def sweep_key_dialog(self):
d = WindowModalDialog(self, title=_('Sweep private keys'))
d.setMinimumSize(600, 300)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_("Enter private keys:")))
keys_e = QTextEdit()
keys_e.setTabChangesFocus(True)
vbox.addWidget(keys_e)
addresses = self.wallet.get_unused_addresses()
h, address_e = address_field(addresses)
vbox.addLayout(h)
vbox.addStretch(1)
button = OkButton(d, _('Sweep'))
vbox.addLayout(Buttons(CancelButton(d), button))
button.setEnabled(False)
def get_address():
addr = str(address_e.text()).strip()
if bitcoin.is_address(addr):
return addr
def get_pk():
text = str(keys_e.toPlainText())
return keystore.get_private_keys(text)
f = lambda: button.setEnabled(get_address() is not None and get_pk() is not None)
on_address = lambda text: address_e.setStyleSheet(BLACK_FG if get_address() else RED_FG)
keys_e.textChanged.connect(f)
address_e.textChanged.connect(f)
address_e.textChanged.connect(on_address)
if not d.exec_():
return
try:
tx = self.wallet.sweep(get_pk(), self.network, self.config, get_address(), None)
except BaseException as e:
self.show_message(str(e))
return
self.warn_if_watching_only()
self.show_transaction(tx)
def _do_import(self, title, msg, func):
text = text_dialog(self, title, msg + ' :', _('Import'))
if not text:
return
bad = []
good = []
for key in str(text).split():
try:
addr = func(key)
good.append(addr)
except BaseException as e:
bad.append(key)
continue
if good:
self.show_message(_("The following addresses were added") + ':\n' + '\n'.join(good))
if bad:
self.show_critical(_("The following inputs could not be imported") + ':\n'+ '\n'.join(bad))
self.address_list.update()
self.history_list.update()
def import_addresses(self):
if not self.wallet.can_import_address():
return
title, msg = _('Import addresses'), _("Enter addresses")
self._do_import(title, msg, self.wallet.import_address)
@protected
def do_import_privkey(self, password):
if not self.wallet.can_import_privkey():
return
title, msg = _('Import private keys'), _("Enter private keys")
self._do_import(title, msg, lambda x: self.wallet.import_key(x, password))
def update_fiat(self):
b = self.fx and self.fx.is_enabled()
self.fiat_send_e.setVisible(b)
self.fiat_receive_e.setVisible(b)
self.history_list.refresh_headers()
self.history_list.update()
self.update_status()
def settings_dialog(self):
self.need_restart = False
d = WindowModalDialog(self, _('Preferences'))
vbox = QVBoxLayout()
tabs = QTabWidget()
gui_widgets = []
fee_widgets = []
tx_widgets = []
id_widgets = []
# language
lang_help = _('Select which language is used in the GUI (after restart).')
lang_label = HelpLabel(_('Language') + ':', lang_help)
lang_combo = QComboBox()
from electrum.i18n import languages
lang_combo.addItems(languages.values())
try:
index = languages.keys().index(self.config.get("language",''))
except Exception:
index = 0
lang_combo.setCurrentIndex(index)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
def on_lang(x):
lang_request = languages.keys()[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
self.need_restart = True
lang_combo.currentIndexChanged.connect(on_lang)
gui_widgets.append((lang_label, lang_combo))
nz_help = _('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')
nz_label = HelpLabel(_('Zeros after decimal point') + ':', nz_help)
nz = QSpinBox()
nz.setMinimum(0)
nz.setMaximum(self.decimal_point)
nz.setValue(self.num_zeros)
if not self.config.is_modifiable('num_zeros'):
for w in [nz, nz_label]: w.setEnabled(False)
def on_nz():
value = nz.value()
if self.num_zeros != value:
self.num_zeros = value
self.config.set_key('num_zeros', value, True)
self.history_list.update()
self.address_list.update()
nz.valueChanged.connect(on_nz)
gui_widgets.append((nz_label, nz))
def on_dynfee(x):
self.config.set_key('dynamic_fees', x == Qt.Checked)
self.fee_slider.update()
update_maxfee()
dynfee_cb = QCheckBox(_('Use dynamic fees'))
dynfee_cb.setChecked(self.config.is_dynfee())
dynfee_cb.setToolTip(_("Use fees recommended by the server."))
fee_widgets.append((dynfee_cb, None))
dynfee_cb.stateChanged.connect(on_dynfee)
def on_maxfee(x):
m = maxfee_e.get_amount()
if m: self.config.set_key('max_fee_rate', m)
self.fee_slider.update()
def update_maxfee():
d = self.config.is_dynfee()
maxfee_e.setDisabled(d)
maxfee_label.setDisabled(d)
maxfee_label = HelpLabel(_('Max static fee'), _('Max value of the static fee slider'))
maxfee_e = BTCkBEdit(self.get_decimal_point)
maxfee_e.setAmount(self.config.max_fee_rate())
maxfee_e.textChanged.connect(on_maxfee)
update_maxfee()
fee_widgets.append((maxfee_label, maxfee_e))
feebox_cb = QCheckBox(_('Edit fees manually'))
feebox_cb.setChecked(self.config.get('show_fee', False))
feebox_cb.setToolTip(_("Show fee edit box in send tab."))
def on_feebox(x):
self.config.set_key('show_fee', x == Qt.Checked)
self.fee_e.setVisible(bool(x))
feebox_cb.stateChanged.connect(on_feebox)
fee_widgets.append((feebox_cb, None))
rbf_policy = self.config.get('rbf_policy', 1)
rbf_label = HelpLabel(_('Propose Replace-By-Fee') + ':', '')
rbf_combo = QComboBox()
rbf_combo.addItems([_('Always'), _('If the fee is low'), _('Never')])
rbf_combo.setCurrentIndex(rbf_policy)
def on_rbf(x):
self.config.set_key('rbf_policy', x)
rbf_combo.currentIndexChanged.connect(on_rbf)
fee_widgets.append((rbf_label, rbf_combo))
msg = _('OpenAlias record, used to receive coins and to sign payment requests.') + '\n\n'\
+ _('The following alias providers are available:') + '\n'\
+ '\n'.join(['https://cryptoname.co/', 'http://xmr.link']) + '\n\n'\
+ 'For more information, see http://openalias.org'
alias_label = HelpLabel(_('OpenAlias') + ':', msg)
alias = self.config.get('alias','')
alias_e = QLineEdit(alias)
def set_alias_color():
if not self.config.get('alias'):
alias_e.setStyleSheet("")
return
if self.alias_info:
alias_addr, alias_name, validated = self.alias_info
alias_e.setStyleSheet(GREEN_BG if validated else RED_BG)
else:
alias_e.setStyleSheet(RED_BG)
def on_alias_edit():
alias_e.setStyleSheet("")
alias = str(alias_e.text())
self.config.set_key('alias', alias, True)
if alias:
self.fetch_alias()
set_alias_color()
self.connect(self, SIGNAL('alias_received'), set_alias_color)
alias_e.editingFinished.connect(on_alias_edit)
id_widgets.append((alias_label, alias_e))
# SSL certificate
msg = ' '.join([
_('SSL certificate used to sign payment requests.'),
_('Use setconfig to set ssl_chain and ssl_privkey.'),
])
if self.config.get('ssl_privkey') or self.config.get('ssl_chain'):
try:
SSL_identity = paymentrequest.check_ssl_config(self.config)
SSL_error = None
except BaseException as e:
SSL_identity = "error"
SSL_error = str(e)
else:
SSL_identity = ""
SSL_error = None
SSL_id_label = HelpLabel(_('SSL certificate') + ':', msg)
SSL_id_e = QLineEdit(SSL_identity)
SSL_id_e.setStyleSheet(RED_BG if SSL_error else GREEN_BG if SSL_identity else '')
if SSL_error:
SSL_id_e.setToolTip(SSL_error)
SSL_id_e.setReadOnly(True)
id_widgets.append((SSL_id_label, SSL_id_e))
units = ['ION', 'mION', 'ions']
msg = _('Base unit of your wallet.')\
+ '\n1ION=1000mION.\n' \
+ _(' These settings affects the fields in the Send tab')+' '
unit_label = HelpLabel(_('Base unit') + ':', msg)
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.base_unit()))
def on_unit(x):
unit_result = units[unit_combo.currentIndex()]
if self.base_unit() == unit_result:
return
edits = self.amount_e, self.fee_e, self.receive_amount_e
amounts = [edit.get_amount() for edit in edits]
if unit_result == 'ION':
self.decimal_point = 8
elif unit_result == 'mION':
self.decimal_point = 5
elif unit_result == 'ions':
self.decimal_point = 2
else:
raise Exception('Unknown base unit')
self.config.set_key('decimal_point', self.decimal_point, True)
self.history_list.update()
self.request_list.update()
self.address_list.update()
for edit, amount in zip(edits, amounts):
edit.setAmount(amount)
self.update_status()
unit_combo.currentIndexChanged.connect(on_unit)
gui_widgets.append((unit_label, unit_combo))
block_explorers = sorted(block_explorer_info.keys())
msg = _('Choose which online block explorer to use for functions that open a web browser')
block_ex_label = HelpLabel(_('Online Block Explorer') + ':', msg)
block_ex_combo = QComboBox()
block_ex_combo.addItems(block_explorers)
block_ex_combo.setCurrentIndex(block_ex_combo.findText(block_explorer(self.config)))
def on_be(x):
be_result = block_explorers[block_ex_combo.currentIndex()]
self.config.set_key('block_explorer', be_result, True)
block_ex_combo.currentIndexChanged.connect(on_be)
gui_widgets.append((block_ex_label, block_ex_combo))
from electrum import qrscanner
system_cameras = qrscanner._find_system_cameras()
qr_combo = QComboBox()
qr_combo.addItem("Default","default")
for camera, device in system_cameras.items():
qr_combo.addItem(camera, device)
#combo.addItem("Manually specify a device", config.get("video_device"))
index = qr_combo.findData(self.config.get("video_device"))
qr_combo.setCurrentIndex(index)
msg = _("Install the zbar package to enable this.")
qr_label = HelpLabel(_('Video Device') + ':', msg)
qr_combo.setEnabled(qrscanner.libzbar is not None)
on_video_device = lambda x: self.config.set_key("video_device", str(qr_combo.itemData(x).toString()), True)
qr_combo.currentIndexChanged.connect(on_video_device)
gui_widgets.append((qr_label, qr_combo))
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
def on_usechange(x):
usechange_result = x == Qt.Checked
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.storage.put('use_change', self.wallet.use_change)
multiple_cb.setEnabled(self.wallet.use_change)
usechange_cb.stateChanged.connect(on_usechange)
usechange_cb.setToolTip(_('Using change addresses makes it more difficult for other people to track your transactions.'))
tx_widgets.append((usechange_cb, None))
def on_multiple(x):
multiple = x == Qt.Checked
if self.wallet.multiple_change != multiple:
self.wallet.multiple_change = multiple
self.wallet.storage.put('multiple_change', multiple)
multiple_change = self.wallet.multiple_change
multiple_cb = QCheckBox(_('Use multiple change addresses'))
multiple_cb.setEnabled(self.wallet.use_change)
multiple_cb.setToolTip('\n'.join([
_('In some cases, use up to 3 change addresses in order to break '
'up large coin amounts and obfuscate the recipient address.'),
_('This may result in higher transactions fees.')
]))
multiple_cb.setChecked(multiple_change)
multiple_cb.stateChanged.connect(on_multiple)
tx_widgets.append((multiple_cb, None))
def fmt_docs(key, klass):
lines = [ln.lstrip(" ") for ln in klass.__doc__.split("\n")]
return '\n'.join([key, "", " ".join(lines)])
choosers = sorted(coinchooser.COIN_CHOOSERS.keys())
chooser_name = coinchooser.get_name(self.config)
msg = _('Choose coin (UTXO) selection method. The following are available:\n\n')
msg += '\n\n'.join(fmt_docs(*item) for item in coinchooser.COIN_CHOOSERS.items())
chooser_label = HelpLabel(_('Coin selection') + ':', msg)
chooser_combo = QComboBox()
chooser_combo.addItems(choosers)
i = choosers.index(chooser_name) if chooser_name in choosers else 0
chooser_combo.setCurrentIndex(i)
def on_chooser(x):
chooser_name = choosers[chooser_combo.currentIndex()]
self.config.set_key('coin_chooser', chooser_name)
chooser_combo.currentIndexChanged.connect(on_chooser)
tx_widgets.append((chooser_label, chooser_combo))
# Fiat Currency
hist_checkbox = QCheckBox()
ccy_combo = QComboBox()
ex_combo = QComboBox()
def update_currencies():
if not self.fx: return
currencies = sorted(self.fx.get_currencies(self.fx.get_history_config()))
ccy_combo.clear()
ccy_combo.addItems([_('None')] + currencies)
if self.fx.is_enabled():
ccy_combo.setCurrentIndex(ccy_combo.findText(self.fx.get_currency()))
def update_history_cb():
if not self.fx: return
hist_checkbox.setChecked(self.fx.get_history_config())
hist_checkbox.setEnabled(self.fx.is_enabled())
def update_exchanges():
if not self.fx: return
b = self.fx.is_enabled()
ex_combo.setEnabled(b)
if b:
h = self.fx.get_history_config()
c = self.fx.get_currency()
exchanges = self.fx.get_exchanges_by_ccy(c, h)
else:
exchanges = self.fx.get_exchanges_by_ccy('USD', False)
ex_combo.clear()
ex_combo.addItems(sorted(exchanges))
ex_combo.setCurrentIndex(ex_combo.findText(self.fx.config_exchange()))
def on_currency(hh):
if not self.fx: return
b = bool(ccy_combo.currentIndex())
ccy = str(ccy_combo.currentText()) if b else None
self.fx.set_enabled(b)
if b and ccy != self.fx.ccy:
self.fx.set_currency(ccy)
update_history_cb()
update_exchanges()
self.update_fiat()
def on_exchange(idx):
exchange = str(ex_combo.currentText())
if self.fx and self.fx.is_enabled() and exchange and exchange != self.fx.exchange.name():
self.fx.set_exchange(exchange)
def on_history(checked):
if not self.fx: return
self.fx.set_history_config(checked)
update_exchanges()
self.history_list.refresh_headers()
if self.fx.is_enabled() and checked:
# reset timeout to get historical rates
self.fx.timeout = 0
update_currencies()
update_history_cb()
update_exchanges()
ccy_combo.currentIndexChanged.connect(on_currency)
hist_checkbox.stateChanged.connect(on_history)
ex_combo.currentIndexChanged.connect(on_exchange)
fiat_widgets = []
fiat_widgets.append((QLabel(_('Fiat currency')), ccy_combo))
fiat_widgets.append((QLabel(_('Show history rates')), hist_checkbox))
fiat_widgets.append((QLabel(_('Source')), ex_combo))
tabs_info = [
(fee_widgets, _('Fees')),
(tx_widgets, _('Transactions')),
(gui_widgets, _('Appearance')),
(fiat_widgets, _('Fiat')),
(id_widgets, _('Identity')),
]
for widgets, name in tabs_info:
tab = QWidget()
grid = QGridLayout(tab)
grid.setColumnStretch(0,1)
for a,b in widgets:
i = grid.rowCount()
if b:
if a:
grid.addWidget(a, i, 0)
grid.addWidget(b, i, 1)
else:
grid.addWidget(a, i, 0, 1, 2)
tabs.addTab(tab, name)
vbox.addWidget(tabs)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
# run the dialog
d.exec_()
if self.fx:
self.fx.timeout = 0
self.disconnect(self, SIGNAL('alias_received'), set_alias_color)
run_hook('close_settings_dialog')
if self.need_restart:
self.show_warning(_('Please restart Electrum to activate the new GUI settings'), title=_('Success'))
def run_network_dialog(self):
if not self.network:
self.show_warning(_('You are using Electrum in offline mode; restart Electrum if you want to get connected'), title=_('Offline'))
return
NetworkDialog(self.wallet.network, self.config, self).do_exec()
def closeEvent(self, event):
# It seems in some rare cases this closeEvent() is called twice
if not self.cleaned_up:
self.cleaned_up = True
self.clean_up()
event.accept()
def clean_up(self):
self.wallet.thread.stop()
if self.network:
self.network.unregister_callback(self.on_network)
self.config.set_key("is_maximized", self.isMaximized())
if not self.isMaximized():
g = self.geometry()
self.wallet.storage.put("winpos-qt", [g.left(),g.top(),
g.width(),g.height()])
self.config.set_key("console-history", self.console.history[-50:],
True)
if self.qr_window:
self.qr_window.close()
self.close_wallet()
self.gui_object.close_window(self)
def plugins_dialog(self):
self.pluginsdialog = d = WindowModalDialog(self, _('Electrum Plugins'))
plugins = self.gui_object.plugins
vbox = QVBoxLayout(d)
# plugins
scroll = QScrollArea()
scroll.setEnabled(True)
scroll.setWidgetResizable(True)
scroll.setMinimumSize(400,250)
vbox.addWidget(scroll)
w = QWidget()
scroll.setWidget(w)
w.setMinimumHeight(plugins.count() * 35)
grid = QGridLayout()
grid.setColumnStretch(0,1)
w.setLayout(grid)
settings_widgets = {}
def enable_settings_widget(p, name, i):
widget = settings_widgets.get(name)
if not widget and p and p.requires_settings():
widget = settings_widgets[name] = p.settings_widget(d)
grid.addWidget(widget, i, 1)
if widget:
widget.setEnabled(bool(p and p.is_enabled()))
def do_toggle(cb, name, i):
p = plugins.toggle(name)
cb.setChecked(bool(p))
enable_settings_widget(p, name, i)
run_hook('init_qt', self.gui_object)
for i, descr in enumerate(plugins.descriptions.values()):
name = descr['__name__']
p = plugins.get(name)
if descr.get('registers_keystore'):
continue
try:
cb = QCheckBox(descr['fullname'])
cb.setEnabled(plugins.is_available(name, self.wallet))
cb.setChecked(p is not None and p.is_enabled())
grid.addWidget(cb, i, 0)
enable_settings_widget(p, name, i)
cb.clicked.connect(partial(do_toggle, cb, name, i))
msg = descr['description']
if descr.get('requires'):
msg += '\n\n' + _('Requires') + ':\n' + '\n'.join(map(lambda x: x[1], descr.get('requires')))
grid.addWidget(HelpButton(msg), i, 2)
except Exception:
self.print_msg("error: cannot display plugin", name)
traceback.print_exc(file=sys.stdout)
grid.setRowStretch(i+1,1)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
def cpfp(self, parent_tx, new_tx):
total_size = parent_tx.estimated_size() + new_tx.estimated_size()
d = WindowModalDialog(self, _('Child Pays for Parent'))
vbox = QVBoxLayout(d)
msg = (
"A CPFP is a transaction that sends an unconfirmed output back to "
"yourself, with a high fee. The goal is to have miners confirm "
"the parent transaction in order to get the fee attached to the "
"child transaction.")
vbox.addWidget(WWLabel(_(msg)))
msg2 = ("The proposed fee is computed using your "
"fee/kB settings, applied to the total size of both child and "
"parent transactions. After you broadcast a CPFP transaction, "
"it is normal to see a new unconfirmed transaction in your history.")
vbox.addWidget(WWLabel(_(msg2)))
grid = QGridLayout()
grid.addWidget(QLabel(_('Total size') + ':'), 0, 0)
grid.addWidget(QLabel('%d bytes'% total_size), 0, 1)
max_fee = new_tx.output_value()
grid.addWidget(QLabel(_('Input amount') + ':'), 1, 0)
grid.addWidget(QLabel(self.format_amount(max_fee) + ' ' + self.base_unit()), 1, 1)
output_amount = QLabel('')
grid.addWidget(QLabel(_('Output amount') + ':'), 2, 0)
grid.addWidget(output_amount, 2, 1)
fee_e = BTCAmountEdit(self.get_decimal_point)
def f(x):
a = max_fee - fee_e.get_amount()
output_amount.setText((self.format_amount(a) + ' ' + self.base_unit()) if a else '')
fee_e.textChanged.connect(f)
fee = self.config.fee_per_kb() * total_size / 1000
fee_e.setAmount(fee)
grid.addWidget(QLabel(_('Fee' + ':')), 3, 0)
grid.addWidget(fee_e, 3, 1)
def on_rate(dyn, pos, fee_rate):
fee = fee_rate * total_size / 1000
fee = min(max_fee, fee)
fee_e.setAmount(fee)
fee_slider = FeeSlider(self, self.config, on_rate)
fee_slider.update()
grid.addWidget(fee_slider, 4, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
fee = fee_e.get_amount()
if fee > max_fee:
self.show_error(_('Max fee exceeded'))
return
new_tx = self.wallet.cpfp(parent_tx, fee)
new_tx.set_rbf(True)
self.show_transaction(new_tx)
def bump_fee_dialog(self, tx):
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
tx_label = self.wallet.get_label(tx.txid())
tx_size = tx.estimated_size()
d = WindowModalDialog(self, _('Bump Fee'))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('Current fee') + ': %s'% self.format_amount(fee) + ' ' + self.base_unit()))
vbox.addWidget(QLabel(_('New fee' + ':')))
fee_e = BTCAmountEdit(self.get_decimal_point)
fee_e.setAmount(fee * 1.5)
vbox.addWidget(fee_e)
def on_rate(dyn, pos, fee_rate):
fee = fee_rate * tx_size / 1000
fee_e.setAmount(fee)
fee_slider = FeeSlider(self, self.config, on_rate)
vbox.addWidget(fee_slider)
cb = QCheckBox(_('Final'))
vbox.addWidget(cb)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
is_final = cb.isChecked()
new_fee = fee_e.get_amount()
delta = new_fee - fee
if delta < 0:
self.show_error("fee too low")
return
try:
new_tx = self.wallet.bump_fee(tx, delta)
except BaseException as e:
self.show_error(str(e))
return
if is_final:
new_tx.set_rbf(False)
self.show_transaction(new_tx, tx_label)
| 40.354639
| 448
| 0.606198
|
96611fa1e1a87c03f9e0ade56d643793fb4463f5
| 200
|
py
|
Python
|
python_work/cidade.py
|
lucas-jsvd/python_crash_course_2nd
|
8404e7769bef7b90b9b0897996c3a3f969bb72bd
|
[
"Unlicense"
] | null | null | null |
python_work/cidade.py
|
lucas-jsvd/python_crash_course_2nd
|
8404e7769bef7b90b9b0897996c3a3f969bb72bd
|
[
"Unlicense"
] | null | null | null |
python_work/cidade.py
|
lucas-jsvd/python_crash_course_2nd
|
8404e7769bef7b90b9b0897996c3a3f969bb72bd
|
[
"Unlicense"
] | null | null | null |
def describe_city(city, country="Brasil"):
print(f'A cidade {city} fica no país {country}')
describe_city("Aracaju")
describe_city("New York", "EUA")
describe_city(country="China", city="Huan")
| 25
| 52
| 0.715
|
2740319e04a441d5db844b0c5dde20eab4b273b4
| 118,205
|
py
|
Python
|
python/machinetalk/protobuf/status_pb2.py
|
luminize/machinetalk-protobuf
|
6ca7c99806401179ece164b07dc87852bfa8df9c
|
[
"MIT"
] | null | null | null |
python/machinetalk/protobuf/status_pb2.py
|
luminize/machinetalk-protobuf
|
6ca7c99806401179ece164b07dc87852bfa8df9c
|
[
"MIT"
] | null | null | null |
python/machinetalk/protobuf/status_pb2.py
|
luminize/machinetalk-protobuf
|
6ca7c99806401179ece164b07dc87852bfa8df9c
|
[
"MIT"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: machinetalk/protobuf/status.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from machinetalk.protobuf import nanopb_pb2 as machinetalk_dot_protobuf_dot_nanopb__pb2
from machinetalk.protobuf import types_pb2 as machinetalk_dot_protobuf_dot_types__pb2
from machinetalk.protobuf import preview_pb2 as machinetalk_dot_protobuf_dot_preview__pb2
from machinetalk.protobuf import emcclass_pb2 as machinetalk_dot_protobuf_dot_emcclass__pb2
from machinetalk.protobuf import motcmds_pb2 as machinetalk_dot_protobuf_dot_motcmds__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='machinetalk/protobuf/status.proto',
package='machinetalk',
syntax='proto2',
serialized_pb=_b('\n!machinetalk/protobuf/status.proto\x12\x0bmachinetalk\x1a!machinetalk/protobuf/nanopb.proto\x1a machinetalk/protobuf/types.proto\x1a\"machinetalk/protobuf/preview.proto\x1a#machinetalk/protobuf/emcclass.proto\x1a\"machinetalk/protobuf/motcmds.proto\"\xc6\x01\n\x0b\x45mcToolData\x12\r\n\x05index\x18\x01 \x02(\x05\x12\n\n\x02id\x18\x02 \x01(\x05\x12\x10\n\x08\x64iameter\x18\x0c \x01(\x01\x12\x12\n\nfrontangle\x18\r \x01(\x01\x12\x11\n\tbackangle\x18\x0e \x01(\x01\x12\x13\n\x0borientation\x18\x0f \x01(\x05\x12%\n\x06offset\x18\x10 \x01(\x0b\x32\x15.machinetalk.Position\x12\x0f\n\x07\x63omment\x18\x11 \x01(\t\x12\x0e\n\x06pocket\x18\x12 \x01(\x05:\x06\x92?\x03H\xcc\x08\"\xd5\x02\n\x13\x45mcStatusMotionAxis\x12\r\n\x05index\x18\x01 \x02(\x05\x12\x0f\n\x07\x65nabled\x18\x02 \x01(\x08\x12\r\n\x05\x66\x61ult\x18\x03 \x01(\x08\x12\x16\n\x0e\x66\x65rror_current\x18\x04 \x01(\x01\x12\x17\n\x0f\x66\x65rror_highmark\x18\x05 \x01(\x01\x12\r\n\x05homed\x18\x06 \x01(\x08\x12\x0e\n\x06homing\x18\x07 \x01(\x08\x12\r\n\x05inpos\x18\x08 \x01(\x08\x12\r\n\x05input\x18\t \x01(\x01\x12\x16\n\x0emax_hard_limit\x18\n \x01(\x08\x12\x16\n\x0emax_soft_limit\x18\x0b \x01(\x08\x12\x16\n\x0emin_hard_limit\x18\x0c \x01(\x08\x12\x16\n\x0emin_soft_limit\x18\r \x01(\x08\x12\x0e\n\x06output\x18\x0e \x01(\x01\x12\x17\n\x0foverride_limits\x18\x0f \x01(\x08\x12\x10\n\x08velocity\x18\x10 \x01(\x01:\x06\x92?\x03H\xcd\x08\"\xa6\x02\n\x13\x45mcStatusConfigAxis\x12\r\n\x05index\x18\x01 \x02(\x05\x12+\n\taxis_type\x18\x02 \x01(\x0e\x32\x18.machinetalk.EmcAxisType\x12\x10\n\x08\x62\x61\x63klash\x18\x03 \x01(\x01\x12\x12\n\nmax_ferror\x18\x04 \x01(\x01\x12\x1a\n\x12max_position_limit\x18\x05 \x01(\x01\x12\x12\n\nmin_ferror\x18\x06 \x01(\x01\x12\x1a\n\x12min_position_limit\x18\x07 \x01(\x01\x12\x15\n\rhome_sequence\x18\t \x01(\x05\x12\x18\n\x10max_acceleration\x18\n \x01(\x01\x12\x14\n\x0cmax_velocity\x18\x0b \x01(\x01\x12\x12\n\nincrements\x18\x0c \x01(\t:\x06\x92?\x03H\xce\x08\"?\n\x13\x45mcProgramExtension\x12\r\n\x05index\x18\x01 \x02(\x05\x12\x11\n\textension\x18\x02 \x01(\t:\x06\x92?\x03H\xcf\x08\"9\n\x11\x45mcStatusAnalogIO\x12\r\n\x05index\x18\x01 \x02(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x06\x92?\x03H\xd0\x08\":\n\x12\x45mcStatusDigitalIO\x12\r\n\x05index\x18\x01 \x02(\x05\x12\r\n\x05value\x18\x02 \x01(\x08:\x06\x92?\x03H\xd1\x08\"6\n\x0e\x45mcStatusLimit\x12\r\n\x05index\x18\x01 \x02(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x06\x92?\x03H\xd2\x08\"6\n\x0e\x45mcStatusGCode\x12\r\n\x05index\x18\x01 \x02(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x06\x92?\x03H\xd3\x08\"6\n\x0e\x45mcStatusMCode\x12\r\n\x05index\x18\x01 \x02(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x06\x92?\x03H\xd4\x08\"8\n\x10\x45mcStatusSetting\x12\r\n\x05index\x18\x01 \x02(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x06\x92?\x03H\xd5\x08\"\xba\t\n\x0f\x45mcStatusConfig\x12\x1c\n\x14\x64\x65\x66\x61ult_acceleration\x18\x01 \x01(\x01\x12\x0c\n\x04\x61xes\x18\x03 \x01(\x05\x12.\n\x04\x61xis\x18\x04 \x03(\x0b\x32 .machinetalk.EmcStatusConfigAxis\x12\x11\n\taxis_mask\x18\x05 \x01(\x05\x12\x12\n\ncycle_time\x18\x06 \x01(\x01\x12\r\n\x05\x64\x65\x62ug\x18\x07 \x01(\x05\x12\x37\n\x0fkinematics_type\x18\x08 \x01(\x0e\x32\x1e.machinetalk.EmcKinematicsType\x12\x18\n\x10max_acceleration\x18\n \x01(\x01\x12\x14\n\x0cmax_velocity\x18\x0b \x01(\x01\x12\x35\n\x0clinear_units\x18\x0c \x01(\x0e\x32\x1f.machinetalk.EmcLinearUnitsType\x12\x18\n\x10\x64\x65\x66\x61ult_velocity\x18\r \x01(\x01\x12;\n\x11program_extension\x18\x0e \x03(\x0b\x32 .machinetalk.EmcProgramExtension\x12;\n\x0fposition_offset\x18\x0f \x01(\x0e\x32\".machinetalk.EmcPositionOffsetType\x12?\n\x11position_feedback\x18\x10 \x01(\x0e\x32$.machinetalk.EmcPositionFeedbackType\x12\x19\n\x11max_feed_override\x18\x11 \x01(\x01\x12\x19\n\x11min_feed_override\x18\x12 \x01(\x01\x12\x1c\n\x14max_spindle_override\x18\x13 \x01(\x01\x12\x1c\n\x14min_spindle_override\x18\x14 \x01(\x01\x12\x1d\n\x15\x64\x65\x66\x61ult_spindle_speed\x18\x15 \x01(\x01\x12\x1f\n\x17\x64\x65\x66\x61ult_linear_velocity\x18\x16 \x01(\x01\x12\x14\n\x0cmin_velocity\x18\x17 \x01(\x01\x12\x1b\n\x13max_linear_velocity\x18\x18 \x01(\x01\x12\x1b\n\x13min_linear_velocity\x18\x19 \x01(\x01\x12 \n\x18\x64\x65\x66\x61ult_angular_velocity\x18\x1a \x01(\x01\x12\x1c\n\x14max_angular_velocity\x18\x1b \x01(\x01\x12\x1c\n\x14min_angular_velocity\x18\x1c \x01(\x01\x12\x12\n\nincrements\x18\x1d \x01(\t\x12\r\n\x05grids\x18\x1e \x01(\t\x12\r\n\x05lathe\x18\x1f \x01(\x08\x12\x10\n\x08geometry\x18 \x01(\t\x12\x13\n\x0b\x61rcdivision\x18! \x01(\r\x12\x17\n\x0fno_force_homing\x18\" \x01(\x08\x12\x13\n\x0bremote_path\x18# \x01(\t\x12\x31\n\ntime_units\x18$ \x01(\x0e\x32\x1d.machinetalk.EmcTimeUnitsType\x12\x0c\n\x04name\x18% \x01(\t\x12\x37\n\x0cuser_command\x18& \x03(\x0b\x32!.machinetalk.EmcStatusUserCommand\x12\x37\n\rangular_units\x18\' \x01(\x0e\x32 .machinetalk.EmcAngularUnitsType:\x06\x92?\x03H\xd6\x08\"\xfb\x0b\n\x0f\x45mcStatusMotion\x12\x14\n\x0c\x61\x63tive_queue\x18\x01 \x01(\x05\x12.\n\x0f\x61\x63tual_position\x18\x02 \x01(\x0b\x32\x15.machinetalk.Position\x12\x1d\n\x15\x61\x64\x61ptive_feed_enabled\x18\x03 \x01(\x08\x12+\n\x03\x61in\x18\x04 \x03(\x0b\x32\x1e.machinetalk.EmcStatusAnalogIO\x12,\n\x04\x61out\x18\x05 \x03(\x0b\x32\x1e.machinetalk.EmcStatusAnalogIO\x12.\n\x04\x61xis\x18\x06 \x03(\x0b\x32 .machinetalk.EmcStatusMotionAxis\x12\x14\n\x0c\x62lock_delete\x18\x07 \x01(\x08\x12\x14\n\x0c\x63urrent_line\x18\x08 \x01(\x05\x12\x13\n\x0b\x63urrent_vel\x18\t \x01(\x01\x12\x12\n\ndelay_left\x18\n \x01(\x01\x12,\n\x03\x64in\x18\x0b \x03(\x0b\x32\x1f.machinetalk.EmcStatusDigitalIO\x12\x16\n\x0e\x64istance_to_go\x18\x0c \x01(\x01\x12-\n\x04\x64out\x18\r \x03(\x0b\x32\x1f.machinetalk.EmcStatusDigitalIO\x12\"\n\x03\x64tg\x18\x0e \x01(\x0b\x32\x15.machinetalk.Position\x12\x0f\n\x07\x65nabled\x18\x0f \x01(\x08\x12\x19\n\x11\x66\x65\x65\x64_hold_enabled\x18\x10 \x01(\x08\x12\x1d\n\x15\x66\x65\x65\x64_override_enabled\x18\x11 \x01(\x08\x12\x10\n\x08\x66\x65\x65\x64rate\x18\x12 \x01(\x01\x12+\n\tg5x_index\x18\x13 \x01(\x0e\x32\x18.machinetalk.OriginIndex\x12)\n\ng5x_offset\x18\x14 \x01(\x0b\x32\x15.machinetalk.Position\x12)\n\ng92_offset\x18\x15 \x01(\x0b\x32\x15.machinetalk.Position\x12\n\n\x02id\x18\x17 \x01(\x05\x12\r\n\x05inpos\x18\x18 \x01(\x08\x12\x34\n\x15joint_actual_position\x18\x19 \x01(\x0b\x32\x15.machinetalk.Position\x12-\n\x0ejoint_position\x18\x1a \x01(\x0b\x32\x15.machinetalk.Position\x12*\n\x05limit\x18\x1b \x03(\x0b\x32\x1b.machinetalk.EmcStatusLimit\x12\x13\n\x0bmotion_line\x18\x1c \x01(\x05\x12,\n\x0bmotion_type\x18\x1d \x01(\x0e\x32\x17.machinetalk.MotionType\x12\x37\n\x0bmotion_mode\x18\x1e \x01(\x0e\x32\".machinetalk.EmcTrajectoryModeType\x12\x0e\n\x06paused\x18\x1f \x01(\x08\x12\'\n\x08position\x18 \x01(\x0b\x32\x15.machinetalk.Position\x12\x15\n\rprobe_tripped\x18! \x01(\x08\x12\x11\n\tprobe_val\x18\" \x01(\x05\x12.\n\x0fprobed_position\x18# \x01(\x0b\x32\x15.machinetalk.Position\x12\x0f\n\x07probing\x18$ \x01(\x08\x12\r\n\x05queue\x18% \x01(\x05\x12\x12\n\nqueue_full\x18& \x01(\x08\x12\x13\n\x0brotation_xy\x18\' \x01(\x01\x12\x15\n\rspindle_brake\x18( \x01(\x08\x12\x19\n\x11spindle_direction\x18) \x01(\x05\x12\x17\n\x0fspindle_enabled\x18* \x01(\x08\x12\x1a\n\x12spindle_increasing\x18+ \x01(\x05\x12 \n\x18spindle_override_enabled\x18, \x01(\x08\x12\x15\n\rspindle_speed\x18- \x01(\x01\x12\x13\n\x0bspindlerate\x18. \x01(\x01\x12&\n\x05state\x18/ \x01(\x0e\x32\x17.machinetalk.RCS_STATUS\x12\x14\n\x0cmax_velocity\x18\x30 \x01(\x01\x12\x18\n\x10max_acceleration\x18\x31 \x01(\x01\x12\x11\n\trapidrate\x18\x32 \x01(\x01:\x06\x92?\x03H\xd7\x08\"\xee\x01\n\x0b\x45mcStatusIo\x12\r\n\x05\x65stop\x18\x01 \x01(\x08\x12\r\n\x05\x66lood\x18\x02 \x01(\x08\x12\x0c\n\x04lube\x18\x03 \x01(\x08\x12\x12\n\nlube_level\x18\x04 \x01(\x08\x12\x0c\n\x04mist\x18\x05 \x01(\x08\x12*\n\x0btool_offset\x18\x08 \x01(\x0b\x32\x15.machinetalk.Position\x12,\n\ntool_table\x18\t \x03(\x0b\x32\x18.machinetalk.EmcToolData\x12\x16\n\x0epocket_prepped\x18\n \x01(\x05\x12\x17\n\x0ftool_in_spindle\x18\x0b \x01(\x05:\x06\x92?\x03H\xd8\x08\"\xc7\x02\n\rEmcStatusTask\x12\x1a\n\x12\x65\x63ho_serial_number\x18\x01 \x01(\x05\x12\x35\n\nexec_state\x18\x02 \x01(\x0e\x32!.machinetalk.EmcTaskExecStateType\x12\x0c\n\x04\x66ile\x18\x03 \x01(\t\x12\x15\n\rinput_timeout\x18\x04 \x01(\x08\x12\x15\n\roptional_stop\x18\x05 \x01(\x08\x12\x11\n\tread_line\x18\x06 \x01(\x05\x12/\n\ttask_mode\x18\x07 \x01(\x0e\x32\x1c.machinetalk.EmcTaskModeType\x12\x13\n\x0btask_paused\x18\x08 \x01(\x05\x12\x31\n\ntask_state\x18\t \x01(\x0e\x32\x1d.machinetalk.EmcTaskStateType\x12\x13\n\x0btotal_lines\x18\n \x01(\x05:\x06\x92?\x03H\xd9\x08\"\xe4\x02\n\x0f\x45mcStatusInterp\x12\x0f\n\x07\x63ommand\x18\x01 \x01(\t\x12+\n\x06gcodes\x18\x02 \x03(\x0b\x32\x1b.machinetalk.EmcStatusGCode\x12\x35\n\x0cinterp_state\x18\x03 \x01(\x0e\x32\x1f.machinetalk.EmcInterpStateType\x12?\n\x13interpreter_errcode\x18\x04 \x01(\x0e\x32\".machinetalk.EmcInterpExitCodeType\x12+\n\x06mcodes\x18\x05 \x03(\x0b\x32\x1b.machinetalk.EmcStatusMCode\x12/\n\x08settings\x18\x06 \x03(\x0b\x32\x1d.machinetalk.EmcStatusSetting\x12\x35\n\rprogram_units\x18\x07 \x01(\x0e\x32\x1e.machinetalk.EmcCanonUnitsType:\x06\x92?\x03H\xda\x08\"\xe2\x03\n\x14\x45mcCommandParameters\x12\r\n\x05index\x18\x01 \x01(\r\x12\x13\n\x0b\x64\x65\x62ug_level\x18\x02 \x01(\r\x12\x13\n\x0bline_number\x18\x03 \x01(\x05\x12\r\n\x05scale\x18\x04 \x01(\x01\x12\x10\n\x08velocity\x18\x05 \x01(\x01\x12\x10\n\x08\x64istance\x18\x06 \x01(\x01\x12\r\n\x05value\x18\x07 \x01(\x01\x12\x0e\n\x06\x65nable\x18\x08 \x01(\x08\x12\x0f\n\x07\x63ommand\x18\t \x01(\t\x12\x0c\n\x04path\x18\n \x01(\t\x12/\n\ttask_mode\x18\x64 \x01(\x0e\x32\x1c.machinetalk.EmcTaskModeType\x12\x31\n\ntask_state\x18\x65 \x01(\x0e\x32\x1d.machinetalk.EmcTaskStateType\x12\x35\n\ttraj_mode\x18\x66 \x01(\x0e\x32\".machinetalk.EmcTrajectoryModeType\x12\"\n\x04pose\x18g \x01(\x0b\x32\x14.machinetalk.EmcPose\x12+\n\ttool_data\x18h \x01(\x0b\x32\x18.machinetalk.EmcToolData\x12,\n\ntool_table\x18i \x03(\x0b\x32\x18.machinetalk.EmcToolData:\x06\x92?\x03H\xdb\x08\">\n\x14\x45mcStatusUserCommand\x12\r\n\x05index\x18\x01 \x02(\x05\x12\x0f\n\x07\x63ommand\x18\x02 \x01(\t:\x06\x92?\x03H\xdc\x08\"\xa7\x02\n\x0b\x45mcStatusUI\x12\x1d\n\x15spindle_brake_visible\x18\x01 \x01(\x08\x12\x1a\n\x12spindle_cw_visible\x18\x02 \x01(\x08\x12\x1b\n\x13spindle_ccw_visible\x18\x03 \x01(\x08\x12\x1c\n\x14spindle_stop_visible\x18\x04 \x01(\x08\x12\x1c\n\x14spindle_plus_visible\x18\x05 \x01(\x08\x12\x1d\n\x15spindle_minus_visible\x18\x06 \x01(\x08\x12 \n\x18spindle_override_visible\x18\x07 \x01(\x08\x12\x1d\n\x15\x63oolant_flood_visible\x18\x08 \x01(\x08\x12\x1c\n\x14\x63oolant_mist_visible\x18\t \x01(\x08:\x06\x92?\x03H\xdd\x08*\xe7\x02\n\x14\x45mcTaskExecStateType\x12\x17\n\x13\x45MC_TASK_EXEC_ERROR\x10\x01\x12\x16\n\x12\x45MC_TASK_EXEC_DONE\x10\x02\x12$\n EMC_TASK_EXEC_WAITING_FOR_MOTION\x10\x03\x12*\n&EMC_TASK_EXEC_WAITING_FOR_MOTION_QUEUE\x10\x04\x12 \n\x1c\x45MC_TASK_EXEC_WAITING_FOR_IO\x10\x05\x12+\n\'EMC_TASK_EXEC_WAITING_FOR_MOTION_AND_IO\x10\x07\x12#\n\x1f\x45MC_TASK_EXEC_WAITING_FOR_DELAY\x10\x08\x12(\n$EMC_TASK_EXEC_WAITING_FOR_SYSTEM_CMD\x10\t\x12.\n*EMC_TASK_EXEC_WAITING_FOR_SPINDLE_ORIENTED\x10\n*\x84\x01\n\x12\x45mcInterpStateType\x12\x18\n\x14\x45MC_TASK_INTERP_IDLE\x10\x01\x12\x1b\n\x17\x45MC_TASK_INTERP_READING\x10\x02\x12\x1a\n\x16\x45MC_TASK_INTERP_PAUSED\x10\x03\x12\x1b\n\x17\x45MC_TASK_INTERP_WAITING\x10\x04*\xc8\x01\n\x15\x45mcInterpExitCodeType\x12\x16\n\x12\x45MC_INTERP_EXIT_OK\x10\x00\x12\x18\n\x14\x45MC_INTERP_EXIT_EXIT\x10\x01\x12\"\n\x1e\x45MC_INTERP_EXIT_EXECUTE_FINISH\x10\x02\x12\x1b\n\x17\x45MC_INTERP_EXIT_ENDFILE\x10\x03\x12!\n\x1d\x45MC_INTERP_EXIT_FILE_NOT_OPEN\x10\x04\x12\x19\n\x15\x45MC_INTERP_EXIT_ERROR\x10\x05*{\n\x11\x45mcKinematicsType\x12\x17\n\x13KINEMATICS_IDENTITY\x10\x01\x12\x1b\n\x17KINEMATICS_FORWARD_ONLY\x10\x02\x12\x1b\n\x17KINEMATICS_INVERSE_ONLY\x10\x03\x12\x13\n\x0fKINEMATICS_BOTH\x10\x04*b\n\x15\x45mcTrajectoryModeType\x12\x16\n\x12\x45MC_TRAJ_MODE_FREE\x10\x01\x12\x17\n\x13\x45MC_TRAJ_MODE_COORD\x10\x02\x12\x18\n\x14\x45MC_TRAJ_MODE_TELEOP\x10\x03*Q\n\x11\x45mcCanonUnitsType\x12\x14\n\x10\x43\x41NON_UNITS_INCH\x10\x01\x12\x12\n\x0e\x43\x41NON_UNITS_MM\x10\x02\x12\x12\n\x0e\x43\x41NON_UNITS_CM\x10\x03*U\n\x12\x45mcLinearUnitsType\x12\x15\n\x11LINEAR_UNITS_INCH\x10\x01\x12\x13\n\x0fLINEAR_UNITS_MM\x10\x02\x12\x13\n\x0fLINEAR_UNITS_CM\x10\x03*b\n\x13\x45mcAngularUnitsType\x12\x19\n\x15\x41NGULAR_UNITS_DEGREES\x10\x01\x12\x18\n\x14\x41NGULAR_UNITS_RADIAN\x10\x02\x12\x16\n\x12\x41NGULAR_UNITS_GRAD\x10\x03*@\n\x10\x45mcTimeUnitsType\x12\x15\n\x11TIME_UNITS_MINUTE\x10\x01\x12\x15\n\x11TIME_UNITS_SECOND\x10\x02*Z\n\x0f\x45mcTaskModeType\x12\x18\n\x14\x45MC_TASK_MODE_MANUAL\x10\x01\x12\x16\n\x12\x45MC_TASK_MODE_AUTO\x10\x02\x12\x15\n\x11\x45MC_TASK_MODE_MDI\x10\x03*{\n\x10\x45mcTaskStateType\x12\x18\n\x14\x45MC_TASK_STATE_ESTOP\x10\x01\x12\x1e\n\x1a\x45MC_TASK_STATE_ESTOP_RESET\x10\x02\x12\x16\n\x12\x45MC_TASK_STATE_OFF\x10\x03\x12\x15\n\x11\x45MC_TASK_STATE_ON\x10\x04*8\n\x0b\x45mcAxisType\x12\x13\n\x0f\x45MC_AXIS_LINEAR\x10\x01\x12\x14\n\x10\x45MC_AXIS_ANGULAR\x10\x02*V\n\x15\x45mcPositionOffsetType\x12\x1e\n\x1a\x45MC_CONFIG_RELATIVE_OFFSET\x10\x01\x12\x1d\n\x19\x45MC_CONFIG_MACHINE_OFFSET\x10\x02*\\\n\x17\x45mcPositionFeedbackType\x12\x1e\n\x1a\x45MC_CONFIG_ACTUAL_FEEDBACK\x10\x01\x12!\n\x1d\x45MC_CONFIG_COMMANDED_FEEDBACK\x10\x02')
,
dependencies=[machinetalk_dot_protobuf_dot_nanopb__pb2.DESCRIPTOR,machinetalk_dot_protobuf_dot_types__pb2.DESCRIPTOR,machinetalk_dot_protobuf_dot_preview__pb2.DESCRIPTOR,machinetalk_dot_protobuf_dot_emcclass__pb2.DESCRIPTOR,machinetalk_dot_protobuf_dot_motcmds__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_EMCTASKEXECSTATETYPE = _descriptor.EnumDescriptor(
name='EmcTaskExecStateType',
full_name='machinetalk.EmcTaskExecStateType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_ERROR', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_DONE', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_WAITING_FOR_MOTION', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_WAITING_FOR_MOTION_QUEUE', index=3, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_WAITING_FOR_IO', index=4, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_WAITING_FOR_MOTION_AND_IO', index=5, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_WAITING_FOR_DELAY', index=6, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_WAITING_FOR_SYSTEM_CMD', index=7, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_EXEC_WAITING_FOR_SPINDLE_ORIENTED', index=8, number=10,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6005,
serialized_end=6364,
)
_sym_db.RegisterEnumDescriptor(_EMCTASKEXECSTATETYPE)
EmcTaskExecStateType = enum_type_wrapper.EnumTypeWrapper(_EMCTASKEXECSTATETYPE)
_EMCINTERPSTATETYPE = _descriptor.EnumDescriptor(
name='EmcInterpStateType',
full_name='machinetalk.EmcInterpStateType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_TASK_INTERP_IDLE', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_INTERP_READING', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_INTERP_PAUSED', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_INTERP_WAITING', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6367,
serialized_end=6499,
)
_sym_db.RegisterEnumDescriptor(_EMCINTERPSTATETYPE)
EmcInterpStateType = enum_type_wrapper.EnumTypeWrapper(_EMCINTERPSTATETYPE)
_EMCINTERPEXITCODETYPE = _descriptor.EnumDescriptor(
name='EmcInterpExitCodeType',
full_name='machinetalk.EmcInterpExitCodeType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_INTERP_EXIT_OK', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_INTERP_EXIT_EXIT', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_INTERP_EXIT_EXECUTE_FINISH', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_INTERP_EXIT_ENDFILE', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_INTERP_EXIT_FILE_NOT_OPEN', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_INTERP_EXIT_ERROR', index=5, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6502,
serialized_end=6702,
)
_sym_db.RegisterEnumDescriptor(_EMCINTERPEXITCODETYPE)
EmcInterpExitCodeType = enum_type_wrapper.EnumTypeWrapper(_EMCINTERPEXITCODETYPE)
_EMCKINEMATICSTYPE = _descriptor.EnumDescriptor(
name='EmcKinematicsType',
full_name='machinetalk.EmcKinematicsType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='KINEMATICS_IDENTITY', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KINEMATICS_FORWARD_ONLY', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KINEMATICS_INVERSE_ONLY', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KINEMATICS_BOTH', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6704,
serialized_end=6827,
)
_sym_db.RegisterEnumDescriptor(_EMCKINEMATICSTYPE)
EmcKinematicsType = enum_type_wrapper.EnumTypeWrapper(_EMCKINEMATICSTYPE)
_EMCTRAJECTORYMODETYPE = _descriptor.EnumDescriptor(
name='EmcTrajectoryModeType',
full_name='machinetalk.EmcTrajectoryModeType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_TRAJ_MODE_FREE', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TRAJ_MODE_COORD', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TRAJ_MODE_TELEOP', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6829,
serialized_end=6927,
)
_sym_db.RegisterEnumDescriptor(_EMCTRAJECTORYMODETYPE)
EmcTrajectoryModeType = enum_type_wrapper.EnumTypeWrapper(_EMCTRAJECTORYMODETYPE)
_EMCCANONUNITSTYPE = _descriptor.EnumDescriptor(
name='EmcCanonUnitsType',
full_name='machinetalk.EmcCanonUnitsType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CANON_UNITS_INCH', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANON_UNITS_MM', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANON_UNITS_CM', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6929,
serialized_end=7010,
)
_sym_db.RegisterEnumDescriptor(_EMCCANONUNITSTYPE)
EmcCanonUnitsType = enum_type_wrapper.EnumTypeWrapper(_EMCCANONUNITSTYPE)
_EMCLINEARUNITSTYPE = _descriptor.EnumDescriptor(
name='EmcLinearUnitsType',
full_name='machinetalk.EmcLinearUnitsType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='LINEAR_UNITS_INCH', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LINEAR_UNITS_MM', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LINEAR_UNITS_CM', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7012,
serialized_end=7097,
)
_sym_db.RegisterEnumDescriptor(_EMCLINEARUNITSTYPE)
EmcLinearUnitsType = enum_type_wrapper.EnumTypeWrapper(_EMCLINEARUNITSTYPE)
_EMCANGULARUNITSTYPE = _descriptor.EnumDescriptor(
name='EmcAngularUnitsType',
full_name='machinetalk.EmcAngularUnitsType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ANGULAR_UNITS_DEGREES', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ANGULAR_UNITS_RADIAN', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ANGULAR_UNITS_GRAD', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7099,
serialized_end=7197,
)
_sym_db.RegisterEnumDescriptor(_EMCANGULARUNITSTYPE)
EmcAngularUnitsType = enum_type_wrapper.EnumTypeWrapper(_EMCANGULARUNITSTYPE)
_EMCTIMEUNITSTYPE = _descriptor.EnumDescriptor(
name='EmcTimeUnitsType',
full_name='machinetalk.EmcTimeUnitsType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='TIME_UNITS_MINUTE', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TIME_UNITS_SECOND', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7199,
serialized_end=7263,
)
_sym_db.RegisterEnumDescriptor(_EMCTIMEUNITSTYPE)
EmcTimeUnitsType = enum_type_wrapper.EnumTypeWrapper(_EMCTIMEUNITSTYPE)
_EMCTASKMODETYPE = _descriptor.EnumDescriptor(
name='EmcTaskModeType',
full_name='machinetalk.EmcTaskModeType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_TASK_MODE_MANUAL', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_MODE_AUTO', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_MODE_MDI', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7265,
serialized_end=7355,
)
_sym_db.RegisterEnumDescriptor(_EMCTASKMODETYPE)
EmcTaskModeType = enum_type_wrapper.EnumTypeWrapper(_EMCTASKMODETYPE)
_EMCTASKSTATETYPE = _descriptor.EnumDescriptor(
name='EmcTaskStateType',
full_name='machinetalk.EmcTaskStateType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_TASK_STATE_ESTOP', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_STATE_ESTOP_RESET', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_STATE_OFF', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_TASK_STATE_ON', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7357,
serialized_end=7480,
)
_sym_db.RegisterEnumDescriptor(_EMCTASKSTATETYPE)
EmcTaskStateType = enum_type_wrapper.EnumTypeWrapper(_EMCTASKSTATETYPE)
_EMCAXISTYPE = _descriptor.EnumDescriptor(
name='EmcAxisType',
full_name='machinetalk.EmcAxisType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_AXIS_LINEAR', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_AXIS_ANGULAR', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7482,
serialized_end=7538,
)
_sym_db.RegisterEnumDescriptor(_EMCAXISTYPE)
EmcAxisType = enum_type_wrapper.EnumTypeWrapper(_EMCAXISTYPE)
_EMCPOSITIONOFFSETTYPE = _descriptor.EnumDescriptor(
name='EmcPositionOffsetType',
full_name='machinetalk.EmcPositionOffsetType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_CONFIG_RELATIVE_OFFSET', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_CONFIG_MACHINE_OFFSET', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7540,
serialized_end=7626,
)
_sym_db.RegisterEnumDescriptor(_EMCPOSITIONOFFSETTYPE)
EmcPositionOffsetType = enum_type_wrapper.EnumTypeWrapper(_EMCPOSITIONOFFSETTYPE)
_EMCPOSITIONFEEDBACKTYPE = _descriptor.EnumDescriptor(
name='EmcPositionFeedbackType',
full_name='machinetalk.EmcPositionFeedbackType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='EMC_CONFIG_ACTUAL_FEEDBACK', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMC_CONFIG_COMMANDED_FEEDBACK', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7628,
serialized_end=7720,
)
_sym_db.RegisterEnumDescriptor(_EMCPOSITIONFEEDBACKTYPE)
EmcPositionFeedbackType = enum_type_wrapper.EnumTypeWrapper(_EMCPOSITIONFEEDBACKTYPE)
EMC_TASK_EXEC_ERROR = 1
EMC_TASK_EXEC_DONE = 2
EMC_TASK_EXEC_WAITING_FOR_MOTION = 3
EMC_TASK_EXEC_WAITING_FOR_MOTION_QUEUE = 4
EMC_TASK_EXEC_WAITING_FOR_IO = 5
EMC_TASK_EXEC_WAITING_FOR_MOTION_AND_IO = 7
EMC_TASK_EXEC_WAITING_FOR_DELAY = 8
EMC_TASK_EXEC_WAITING_FOR_SYSTEM_CMD = 9
EMC_TASK_EXEC_WAITING_FOR_SPINDLE_ORIENTED = 10
EMC_TASK_INTERP_IDLE = 1
EMC_TASK_INTERP_READING = 2
EMC_TASK_INTERP_PAUSED = 3
EMC_TASK_INTERP_WAITING = 4
EMC_INTERP_EXIT_OK = 0
EMC_INTERP_EXIT_EXIT = 1
EMC_INTERP_EXIT_EXECUTE_FINISH = 2
EMC_INTERP_EXIT_ENDFILE = 3
EMC_INTERP_EXIT_FILE_NOT_OPEN = 4
EMC_INTERP_EXIT_ERROR = 5
KINEMATICS_IDENTITY = 1
KINEMATICS_FORWARD_ONLY = 2
KINEMATICS_INVERSE_ONLY = 3
KINEMATICS_BOTH = 4
EMC_TRAJ_MODE_FREE = 1
EMC_TRAJ_MODE_COORD = 2
EMC_TRAJ_MODE_TELEOP = 3
CANON_UNITS_INCH = 1
CANON_UNITS_MM = 2
CANON_UNITS_CM = 3
LINEAR_UNITS_INCH = 1
LINEAR_UNITS_MM = 2
LINEAR_UNITS_CM = 3
ANGULAR_UNITS_DEGREES = 1
ANGULAR_UNITS_RADIAN = 2
ANGULAR_UNITS_GRAD = 3
TIME_UNITS_MINUTE = 1
TIME_UNITS_SECOND = 2
EMC_TASK_MODE_MANUAL = 1
EMC_TASK_MODE_AUTO = 2
EMC_TASK_MODE_MDI = 3
EMC_TASK_STATE_ESTOP = 1
EMC_TASK_STATE_ESTOP_RESET = 2
EMC_TASK_STATE_OFF = 3
EMC_TASK_STATE_ON = 4
EMC_AXIS_LINEAR = 1
EMC_AXIS_ANGULAR = 2
EMC_CONFIG_RELATIVE_OFFSET = 1
EMC_CONFIG_MACHINE_OFFSET = 2
EMC_CONFIG_ACTUAL_FEEDBACK = 1
EMC_CONFIG_COMMANDED_FEEDBACK = 2
_EMCTOOLDATA = _descriptor.Descriptor(
name='EmcToolData',
full_name='machinetalk.EmcToolData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcToolData.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='id', full_name='machinetalk.EmcToolData.id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='diameter', full_name='machinetalk.EmcToolData.diameter', index=2,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='frontangle', full_name='machinetalk.EmcToolData.frontangle', index=3,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='backangle', full_name='machinetalk.EmcToolData.backangle', index=4,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='orientation', full_name='machinetalk.EmcToolData.orientation', index=5,
number=15, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='offset', full_name='machinetalk.EmcToolData.offset', index=6,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='comment', full_name='machinetalk.EmcToolData.comment', index=7,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pocket', full_name='machinetalk.EmcToolData.pocket', index=8,
number=18, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\314\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=229,
serialized_end=427,
)
_EMCSTATUSMOTIONAXIS = _descriptor.Descriptor(
name='EmcStatusMotionAxis',
full_name='machinetalk.EmcStatusMotionAxis',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusMotionAxis.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='enabled', full_name='machinetalk.EmcStatusMotionAxis.enabled', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fault', full_name='machinetalk.EmcStatusMotionAxis.fault', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ferror_current', full_name='machinetalk.EmcStatusMotionAxis.ferror_current', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ferror_highmark', full_name='machinetalk.EmcStatusMotionAxis.ferror_highmark', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='homed', full_name='machinetalk.EmcStatusMotionAxis.homed', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='homing', full_name='machinetalk.EmcStatusMotionAxis.homing', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inpos', full_name='machinetalk.EmcStatusMotionAxis.inpos', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input', full_name='machinetalk.EmcStatusMotionAxis.input', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_hard_limit', full_name='machinetalk.EmcStatusMotionAxis.max_hard_limit', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_soft_limit', full_name='machinetalk.EmcStatusMotionAxis.max_soft_limit', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_hard_limit', full_name='machinetalk.EmcStatusMotionAxis.min_hard_limit', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_soft_limit', full_name='machinetalk.EmcStatusMotionAxis.min_soft_limit', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output', full_name='machinetalk.EmcStatusMotionAxis.output', index=13,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='override_limits', full_name='machinetalk.EmcStatusMotionAxis.override_limits', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='velocity', full_name='machinetalk.EmcStatusMotionAxis.velocity', index=15,
number=16, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\315\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=430,
serialized_end=771,
)
_EMCSTATUSCONFIGAXIS = _descriptor.Descriptor(
name='EmcStatusConfigAxis',
full_name='machinetalk.EmcStatusConfigAxis',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusConfigAxis.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis_type', full_name='machinetalk.EmcStatusConfigAxis.axis_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='backlash', full_name='machinetalk.EmcStatusConfigAxis.backlash', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_ferror', full_name='machinetalk.EmcStatusConfigAxis.max_ferror', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_position_limit', full_name='machinetalk.EmcStatusConfigAxis.max_position_limit', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_ferror', full_name='machinetalk.EmcStatusConfigAxis.min_ferror', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_position_limit', full_name='machinetalk.EmcStatusConfigAxis.min_position_limit', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='home_sequence', full_name='machinetalk.EmcStatusConfigAxis.home_sequence', index=7,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_acceleration', full_name='machinetalk.EmcStatusConfigAxis.max_acceleration', index=8,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_velocity', full_name='machinetalk.EmcStatusConfigAxis.max_velocity', index=9,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='increments', full_name='machinetalk.EmcStatusConfigAxis.increments', index=10,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\316\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=774,
serialized_end=1068,
)
_EMCPROGRAMEXTENSION = _descriptor.Descriptor(
name='EmcProgramExtension',
full_name='machinetalk.EmcProgramExtension',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcProgramExtension.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='extension', full_name='machinetalk.EmcProgramExtension.extension', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\317\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1070,
serialized_end=1133,
)
_EMCSTATUSANALOGIO = _descriptor.Descriptor(
name='EmcStatusAnalogIO',
full_name='machinetalk.EmcStatusAnalogIO',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusAnalogIO.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='machinetalk.EmcStatusAnalogIO.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\320\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1135,
serialized_end=1192,
)
_EMCSTATUSDIGITALIO = _descriptor.Descriptor(
name='EmcStatusDigitalIO',
full_name='machinetalk.EmcStatusDigitalIO',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusDigitalIO.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='machinetalk.EmcStatusDigitalIO.value', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\321\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1194,
serialized_end=1252,
)
_EMCSTATUSLIMIT = _descriptor.Descriptor(
name='EmcStatusLimit',
full_name='machinetalk.EmcStatusLimit',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusLimit.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='machinetalk.EmcStatusLimit.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\322\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1254,
serialized_end=1308,
)
_EMCSTATUSGCODE = _descriptor.Descriptor(
name='EmcStatusGCode',
full_name='machinetalk.EmcStatusGCode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusGCode.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='machinetalk.EmcStatusGCode.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\323\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1310,
serialized_end=1364,
)
_EMCSTATUSMCODE = _descriptor.Descriptor(
name='EmcStatusMCode',
full_name='machinetalk.EmcStatusMCode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusMCode.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='machinetalk.EmcStatusMCode.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\324\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1366,
serialized_end=1420,
)
_EMCSTATUSSETTING = _descriptor.Descriptor(
name='EmcStatusSetting',
full_name='machinetalk.EmcStatusSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusSetting.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='machinetalk.EmcStatusSetting.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\325\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1422,
serialized_end=1478,
)
_EMCSTATUSCONFIG = _descriptor.Descriptor(
name='EmcStatusConfig',
full_name='machinetalk.EmcStatusConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='default_acceleration', full_name='machinetalk.EmcStatusConfig.default_acceleration', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axes', full_name='machinetalk.EmcStatusConfig.axes', index=1,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='machinetalk.EmcStatusConfig.axis', index=2,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis_mask', full_name='machinetalk.EmcStatusConfig.axis_mask', index=3,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cycle_time', full_name='machinetalk.EmcStatusConfig.cycle_time', index=4,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='debug', full_name='machinetalk.EmcStatusConfig.debug', index=5,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kinematics_type', full_name='machinetalk.EmcStatusConfig.kinematics_type', index=6,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_acceleration', full_name='machinetalk.EmcStatusConfig.max_acceleration', index=7,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_velocity', full_name='machinetalk.EmcStatusConfig.max_velocity', index=8,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='linear_units', full_name='machinetalk.EmcStatusConfig.linear_units', index=9,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='default_velocity', full_name='machinetalk.EmcStatusConfig.default_velocity', index=10,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='program_extension', full_name='machinetalk.EmcStatusConfig.program_extension', index=11,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='position_offset', full_name='machinetalk.EmcStatusConfig.position_offset', index=12,
number=15, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='position_feedback', full_name='machinetalk.EmcStatusConfig.position_feedback', index=13,
number=16, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_feed_override', full_name='machinetalk.EmcStatusConfig.max_feed_override', index=14,
number=17, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_feed_override', full_name='machinetalk.EmcStatusConfig.min_feed_override', index=15,
number=18, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_spindle_override', full_name='machinetalk.EmcStatusConfig.max_spindle_override', index=16,
number=19, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_spindle_override', full_name='machinetalk.EmcStatusConfig.min_spindle_override', index=17,
number=20, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='default_spindle_speed', full_name='machinetalk.EmcStatusConfig.default_spindle_speed', index=18,
number=21, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='default_linear_velocity', full_name='machinetalk.EmcStatusConfig.default_linear_velocity', index=19,
number=22, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_velocity', full_name='machinetalk.EmcStatusConfig.min_velocity', index=20,
number=23, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_linear_velocity', full_name='machinetalk.EmcStatusConfig.max_linear_velocity', index=21,
number=24, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_linear_velocity', full_name='machinetalk.EmcStatusConfig.min_linear_velocity', index=22,
number=25, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='default_angular_velocity', full_name='machinetalk.EmcStatusConfig.default_angular_velocity', index=23,
number=26, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_angular_velocity', full_name='machinetalk.EmcStatusConfig.max_angular_velocity', index=24,
number=27, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_angular_velocity', full_name='machinetalk.EmcStatusConfig.min_angular_velocity', index=25,
number=28, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='increments', full_name='machinetalk.EmcStatusConfig.increments', index=26,
number=29, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='grids', full_name='machinetalk.EmcStatusConfig.grids', index=27,
number=30, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lathe', full_name='machinetalk.EmcStatusConfig.lathe', index=28,
number=31, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='geometry', full_name='machinetalk.EmcStatusConfig.geometry', index=29,
number=32, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='arcdivision', full_name='machinetalk.EmcStatusConfig.arcdivision', index=30,
number=33, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='no_force_homing', full_name='machinetalk.EmcStatusConfig.no_force_homing', index=31,
number=34, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='remote_path', full_name='machinetalk.EmcStatusConfig.remote_path', index=32,
number=35, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='time_units', full_name='machinetalk.EmcStatusConfig.time_units', index=33,
number=36, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name', full_name='machinetalk.EmcStatusConfig.name', index=34,
number=37, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='user_command', full_name='machinetalk.EmcStatusConfig.user_command', index=35,
number=38, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='angular_units', full_name='machinetalk.EmcStatusConfig.angular_units', index=36,
number=39, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\326\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1481,
serialized_end=2691,
)
_EMCSTATUSMOTION = _descriptor.Descriptor(
name='EmcStatusMotion',
full_name='machinetalk.EmcStatusMotion',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='active_queue', full_name='machinetalk.EmcStatusMotion.active_queue', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='actual_position', full_name='machinetalk.EmcStatusMotion.actual_position', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='adaptive_feed_enabled', full_name='machinetalk.EmcStatusMotion.adaptive_feed_enabled', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ain', full_name='machinetalk.EmcStatusMotion.ain', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aout', full_name='machinetalk.EmcStatusMotion.aout', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='machinetalk.EmcStatusMotion.axis', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_delete', full_name='machinetalk.EmcStatusMotion.block_delete', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='current_line', full_name='machinetalk.EmcStatusMotion.current_line', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='current_vel', full_name='machinetalk.EmcStatusMotion.current_vel', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='delay_left', full_name='machinetalk.EmcStatusMotion.delay_left', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='din', full_name='machinetalk.EmcStatusMotion.din', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='distance_to_go', full_name='machinetalk.EmcStatusMotion.distance_to_go', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dout', full_name='machinetalk.EmcStatusMotion.dout', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dtg', full_name='machinetalk.EmcStatusMotion.dtg', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='enabled', full_name='machinetalk.EmcStatusMotion.enabled', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='feed_hold_enabled', full_name='machinetalk.EmcStatusMotion.feed_hold_enabled', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='feed_override_enabled', full_name='machinetalk.EmcStatusMotion.feed_override_enabled', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='feedrate', full_name='machinetalk.EmcStatusMotion.feedrate', index=17,
number=18, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='g5x_index', full_name='machinetalk.EmcStatusMotion.g5x_index', index=18,
number=19, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='g5x_offset', full_name='machinetalk.EmcStatusMotion.g5x_offset', index=19,
number=20, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='g92_offset', full_name='machinetalk.EmcStatusMotion.g92_offset', index=20,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='id', full_name='machinetalk.EmcStatusMotion.id', index=21,
number=23, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inpos', full_name='machinetalk.EmcStatusMotion.inpos', index=22,
number=24, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='joint_actual_position', full_name='machinetalk.EmcStatusMotion.joint_actual_position', index=23,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='joint_position', full_name='machinetalk.EmcStatusMotion.joint_position', index=24,
number=26, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='limit', full_name='machinetalk.EmcStatusMotion.limit', index=25,
number=27, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='motion_line', full_name='machinetalk.EmcStatusMotion.motion_line', index=26,
number=28, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='motion_type', full_name='machinetalk.EmcStatusMotion.motion_type', index=27,
number=29, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='motion_mode', full_name='machinetalk.EmcStatusMotion.motion_mode', index=28,
number=30, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='paused', full_name='machinetalk.EmcStatusMotion.paused', index=29,
number=31, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='position', full_name='machinetalk.EmcStatusMotion.position', index=30,
number=32, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='probe_tripped', full_name='machinetalk.EmcStatusMotion.probe_tripped', index=31,
number=33, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='probe_val', full_name='machinetalk.EmcStatusMotion.probe_val', index=32,
number=34, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='probed_position', full_name='machinetalk.EmcStatusMotion.probed_position', index=33,
number=35, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='probing', full_name='machinetalk.EmcStatusMotion.probing', index=34,
number=36, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='queue', full_name='machinetalk.EmcStatusMotion.queue', index=35,
number=37, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='queue_full', full_name='machinetalk.EmcStatusMotion.queue_full', index=36,
number=38, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rotation_xy', full_name='machinetalk.EmcStatusMotion.rotation_xy', index=37,
number=39, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_brake', full_name='machinetalk.EmcStatusMotion.spindle_brake', index=38,
number=40, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_direction', full_name='machinetalk.EmcStatusMotion.spindle_direction', index=39,
number=41, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_enabled', full_name='machinetalk.EmcStatusMotion.spindle_enabled', index=40,
number=42, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_increasing', full_name='machinetalk.EmcStatusMotion.spindle_increasing', index=41,
number=43, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_override_enabled', full_name='machinetalk.EmcStatusMotion.spindle_override_enabled', index=42,
number=44, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_speed', full_name='machinetalk.EmcStatusMotion.spindle_speed', index=43,
number=45, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindlerate', full_name='machinetalk.EmcStatusMotion.spindlerate', index=44,
number=46, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='machinetalk.EmcStatusMotion.state', index=45,
number=47, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_velocity', full_name='machinetalk.EmcStatusMotion.max_velocity', index=46,
number=48, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_acceleration', full_name='machinetalk.EmcStatusMotion.max_acceleration', index=47,
number=49, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rapidrate', full_name='machinetalk.EmcStatusMotion.rapidrate', index=48,
number=50, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\327\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2694,
serialized_end=4225,
)
_EMCSTATUSIO = _descriptor.Descriptor(
name='EmcStatusIo',
full_name='machinetalk.EmcStatusIo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='estop', full_name='machinetalk.EmcStatusIo.estop', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='flood', full_name='machinetalk.EmcStatusIo.flood', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lube', full_name='machinetalk.EmcStatusIo.lube', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lube_level', full_name='machinetalk.EmcStatusIo.lube_level', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mist', full_name='machinetalk.EmcStatusIo.mist', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tool_offset', full_name='machinetalk.EmcStatusIo.tool_offset', index=5,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tool_table', full_name='machinetalk.EmcStatusIo.tool_table', index=6,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pocket_prepped', full_name='machinetalk.EmcStatusIo.pocket_prepped', index=7,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tool_in_spindle', full_name='machinetalk.EmcStatusIo.tool_in_spindle', index=8,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\330\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4228,
serialized_end=4466,
)
_EMCSTATUSTASK = _descriptor.Descriptor(
name='EmcStatusTask',
full_name='machinetalk.EmcStatusTask',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='echo_serial_number', full_name='machinetalk.EmcStatusTask.echo_serial_number', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exec_state', full_name='machinetalk.EmcStatusTask.exec_state', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file', full_name='machinetalk.EmcStatusTask.file', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_timeout', full_name='machinetalk.EmcStatusTask.input_timeout', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='optional_stop', full_name='machinetalk.EmcStatusTask.optional_stop', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='read_line', full_name='machinetalk.EmcStatusTask.read_line', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='task_mode', full_name='machinetalk.EmcStatusTask.task_mode', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='task_paused', full_name='machinetalk.EmcStatusTask.task_paused', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='task_state', full_name='machinetalk.EmcStatusTask.task_state', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='total_lines', full_name='machinetalk.EmcStatusTask.total_lines', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\331\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4469,
serialized_end=4796,
)
_EMCSTATUSINTERP = _descriptor.Descriptor(
name='EmcStatusInterp',
full_name='machinetalk.EmcStatusInterp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='command', full_name='machinetalk.EmcStatusInterp.command', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gcodes', full_name='machinetalk.EmcStatusInterp.gcodes', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='interp_state', full_name='machinetalk.EmcStatusInterp.interp_state', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='interpreter_errcode', full_name='machinetalk.EmcStatusInterp.interpreter_errcode', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mcodes', full_name='machinetalk.EmcStatusInterp.mcodes', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='settings', full_name='machinetalk.EmcStatusInterp.settings', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='program_units', full_name='machinetalk.EmcStatusInterp.program_units', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\332\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4799,
serialized_end=5155,
)
_EMCCOMMANDPARAMETERS = _descriptor.Descriptor(
name='EmcCommandParameters',
full_name='machinetalk.EmcCommandParameters',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcCommandParameters.index', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='debug_level', full_name='machinetalk.EmcCommandParameters.debug_level', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='line_number', full_name='machinetalk.EmcCommandParameters.line_number', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='machinetalk.EmcCommandParameters.scale', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='velocity', full_name='machinetalk.EmcCommandParameters.velocity', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='distance', full_name='machinetalk.EmcCommandParameters.distance', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='machinetalk.EmcCommandParameters.value', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='enable', full_name='machinetalk.EmcCommandParameters.enable', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='command', full_name='machinetalk.EmcCommandParameters.command', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path', full_name='machinetalk.EmcCommandParameters.path', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='task_mode', full_name='machinetalk.EmcCommandParameters.task_mode', index=10,
number=100, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='task_state', full_name='machinetalk.EmcCommandParameters.task_state', index=11,
number=101, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='traj_mode', full_name='machinetalk.EmcCommandParameters.traj_mode', index=12,
number=102, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pose', full_name='machinetalk.EmcCommandParameters.pose', index=13,
number=103, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tool_data', full_name='machinetalk.EmcCommandParameters.tool_data', index=14,
number=104, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tool_table', full_name='machinetalk.EmcCommandParameters.tool_table', index=15,
number=105, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\333\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5158,
serialized_end=5640,
)
_EMCSTATUSUSERCOMMAND = _descriptor.Descriptor(
name='EmcStatusUserCommand',
full_name='machinetalk.EmcStatusUserCommand',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='machinetalk.EmcStatusUserCommand.index', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='command', full_name='machinetalk.EmcStatusUserCommand.command', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\334\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5642,
serialized_end=5704,
)
_EMCSTATUSUI = _descriptor.Descriptor(
name='EmcStatusUI',
full_name='machinetalk.EmcStatusUI',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='spindle_brake_visible', full_name='machinetalk.EmcStatusUI.spindle_brake_visible', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_cw_visible', full_name='machinetalk.EmcStatusUI.spindle_cw_visible', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_ccw_visible', full_name='machinetalk.EmcStatusUI.spindle_ccw_visible', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_stop_visible', full_name='machinetalk.EmcStatusUI.spindle_stop_visible', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_plus_visible', full_name='machinetalk.EmcStatusUI.spindle_plus_visible', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_minus_visible', full_name='machinetalk.EmcStatusUI.spindle_minus_visible', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spindle_override_visible', full_name='machinetalk.EmcStatusUI.spindle_override_visible', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='coolant_flood_visible', full_name='machinetalk.EmcStatusUI.coolant_flood_visible', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='coolant_mist_visible', full_name='machinetalk.EmcStatusUI.coolant_mist_visible', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\335\010')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5707,
serialized_end=6002,
)
_EMCTOOLDATA.fields_by_name['offset'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSCONFIGAXIS.fields_by_name['axis_type'].enum_type = _EMCAXISTYPE
_EMCSTATUSCONFIG.fields_by_name['axis'].message_type = _EMCSTATUSCONFIGAXIS
_EMCSTATUSCONFIG.fields_by_name['kinematics_type'].enum_type = _EMCKINEMATICSTYPE
_EMCSTATUSCONFIG.fields_by_name['linear_units'].enum_type = _EMCLINEARUNITSTYPE
_EMCSTATUSCONFIG.fields_by_name['program_extension'].message_type = _EMCPROGRAMEXTENSION
_EMCSTATUSCONFIG.fields_by_name['position_offset'].enum_type = _EMCPOSITIONOFFSETTYPE
_EMCSTATUSCONFIG.fields_by_name['position_feedback'].enum_type = _EMCPOSITIONFEEDBACKTYPE
_EMCSTATUSCONFIG.fields_by_name['time_units'].enum_type = _EMCTIMEUNITSTYPE
_EMCSTATUSCONFIG.fields_by_name['user_command'].message_type = _EMCSTATUSUSERCOMMAND
_EMCSTATUSCONFIG.fields_by_name['angular_units'].enum_type = _EMCANGULARUNITSTYPE
_EMCSTATUSMOTION.fields_by_name['actual_position'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSMOTION.fields_by_name['ain'].message_type = _EMCSTATUSANALOGIO
_EMCSTATUSMOTION.fields_by_name['aout'].message_type = _EMCSTATUSANALOGIO
_EMCSTATUSMOTION.fields_by_name['axis'].message_type = _EMCSTATUSMOTIONAXIS
_EMCSTATUSMOTION.fields_by_name['din'].message_type = _EMCSTATUSDIGITALIO
_EMCSTATUSMOTION.fields_by_name['dout'].message_type = _EMCSTATUSDIGITALIO
_EMCSTATUSMOTION.fields_by_name['dtg'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSMOTION.fields_by_name['g5x_index'].enum_type = machinetalk_dot_protobuf_dot_types__pb2._ORIGININDEX
_EMCSTATUSMOTION.fields_by_name['g5x_offset'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSMOTION.fields_by_name['g92_offset'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSMOTION.fields_by_name['joint_actual_position'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSMOTION.fields_by_name['joint_position'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSMOTION.fields_by_name['limit'].message_type = _EMCSTATUSLIMIT
_EMCSTATUSMOTION.fields_by_name['motion_type'].enum_type = machinetalk_dot_protobuf_dot_motcmds__pb2._MOTIONTYPE
_EMCSTATUSMOTION.fields_by_name['motion_mode'].enum_type = _EMCTRAJECTORYMODETYPE
_EMCSTATUSMOTION.fields_by_name['position'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSMOTION.fields_by_name['probed_position'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSMOTION.fields_by_name['state'].enum_type = machinetalk_dot_protobuf_dot_types__pb2._RCS_STATUS
_EMCSTATUSIO.fields_by_name['tool_offset'].message_type = machinetalk_dot_protobuf_dot_preview__pb2._POSITION
_EMCSTATUSIO.fields_by_name['tool_table'].message_type = _EMCTOOLDATA
_EMCSTATUSTASK.fields_by_name['exec_state'].enum_type = _EMCTASKEXECSTATETYPE
_EMCSTATUSTASK.fields_by_name['task_mode'].enum_type = _EMCTASKMODETYPE
_EMCSTATUSTASK.fields_by_name['task_state'].enum_type = _EMCTASKSTATETYPE
_EMCSTATUSINTERP.fields_by_name['gcodes'].message_type = _EMCSTATUSGCODE
_EMCSTATUSINTERP.fields_by_name['interp_state'].enum_type = _EMCINTERPSTATETYPE
_EMCSTATUSINTERP.fields_by_name['interpreter_errcode'].enum_type = _EMCINTERPEXITCODETYPE
_EMCSTATUSINTERP.fields_by_name['mcodes'].message_type = _EMCSTATUSMCODE
_EMCSTATUSINTERP.fields_by_name['settings'].message_type = _EMCSTATUSSETTING
_EMCSTATUSINTERP.fields_by_name['program_units'].enum_type = _EMCCANONUNITSTYPE
_EMCCOMMANDPARAMETERS.fields_by_name['task_mode'].enum_type = _EMCTASKMODETYPE
_EMCCOMMANDPARAMETERS.fields_by_name['task_state'].enum_type = _EMCTASKSTATETYPE
_EMCCOMMANDPARAMETERS.fields_by_name['traj_mode'].enum_type = _EMCTRAJECTORYMODETYPE
_EMCCOMMANDPARAMETERS.fields_by_name['pose'].message_type = machinetalk_dot_protobuf_dot_emcclass__pb2._EMCPOSE
_EMCCOMMANDPARAMETERS.fields_by_name['tool_data'].message_type = _EMCTOOLDATA
_EMCCOMMANDPARAMETERS.fields_by_name['tool_table'].message_type = _EMCTOOLDATA
DESCRIPTOR.message_types_by_name['EmcToolData'] = _EMCTOOLDATA
DESCRIPTOR.message_types_by_name['EmcStatusMotionAxis'] = _EMCSTATUSMOTIONAXIS
DESCRIPTOR.message_types_by_name['EmcStatusConfigAxis'] = _EMCSTATUSCONFIGAXIS
DESCRIPTOR.message_types_by_name['EmcProgramExtension'] = _EMCPROGRAMEXTENSION
DESCRIPTOR.message_types_by_name['EmcStatusAnalogIO'] = _EMCSTATUSANALOGIO
DESCRIPTOR.message_types_by_name['EmcStatusDigitalIO'] = _EMCSTATUSDIGITALIO
DESCRIPTOR.message_types_by_name['EmcStatusLimit'] = _EMCSTATUSLIMIT
DESCRIPTOR.message_types_by_name['EmcStatusGCode'] = _EMCSTATUSGCODE
DESCRIPTOR.message_types_by_name['EmcStatusMCode'] = _EMCSTATUSMCODE
DESCRIPTOR.message_types_by_name['EmcStatusSetting'] = _EMCSTATUSSETTING
DESCRIPTOR.message_types_by_name['EmcStatusConfig'] = _EMCSTATUSCONFIG
DESCRIPTOR.message_types_by_name['EmcStatusMotion'] = _EMCSTATUSMOTION
DESCRIPTOR.message_types_by_name['EmcStatusIo'] = _EMCSTATUSIO
DESCRIPTOR.message_types_by_name['EmcStatusTask'] = _EMCSTATUSTASK
DESCRIPTOR.message_types_by_name['EmcStatusInterp'] = _EMCSTATUSINTERP
DESCRIPTOR.message_types_by_name['EmcCommandParameters'] = _EMCCOMMANDPARAMETERS
DESCRIPTOR.message_types_by_name['EmcStatusUserCommand'] = _EMCSTATUSUSERCOMMAND
DESCRIPTOR.message_types_by_name['EmcStatusUI'] = _EMCSTATUSUI
DESCRIPTOR.enum_types_by_name['EmcTaskExecStateType'] = _EMCTASKEXECSTATETYPE
DESCRIPTOR.enum_types_by_name['EmcInterpStateType'] = _EMCINTERPSTATETYPE
DESCRIPTOR.enum_types_by_name['EmcInterpExitCodeType'] = _EMCINTERPEXITCODETYPE
DESCRIPTOR.enum_types_by_name['EmcKinematicsType'] = _EMCKINEMATICSTYPE
DESCRIPTOR.enum_types_by_name['EmcTrajectoryModeType'] = _EMCTRAJECTORYMODETYPE
DESCRIPTOR.enum_types_by_name['EmcCanonUnitsType'] = _EMCCANONUNITSTYPE
DESCRIPTOR.enum_types_by_name['EmcLinearUnitsType'] = _EMCLINEARUNITSTYPE
DESCRIPTOR.enum_types_by_name['EmcAngularUnitsType'] = _EMCANGULARUNITSTYPE
DESCRIPTOR.enum_types_by_name['EmcTimeUnitsType'] = _EMCTIMEUNITSTYPE
DESCRIPTOR.enum_types_by_name['EmcTaskModeType'] = _EMCTASKMODETYPE
DESCRIPTOR.enum_types_by_name['EmcTaskStateType'] = _EMCTASKSTATETYPE
DESCRIPTOR.enum_types_by_name['EmcAxisType'] = _EMCAXISTYPE
DESCRIPTOR.enum_types_by_name['EmcPositionOffsetType'] = _EMCPOSITIONOFFSETTYPE
DESCRIPTOR.enum_types_by_name['EmcPositionFeedbackType'] = _EMCPOSITIONFEEDBACKTYPE
EmcToolData = _reflection.GeneratedProtocolMessageType('EmcToolData', (_message.Message,), dict(
DESCRIPTOR = _EMCTOOLDATA,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcToolData)
))
_sym_db.RegisterMessage(EmcToolData)
EmcStatusMotionAxis = _reflection.GeneratedProtocolMessageType('EmcStatusMotionAxis', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSMOTIONAXIS,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusMotionAxis)
))
_sym_db.RegisterMessage(EmcStatusMotionAxis)
EmcStatusConfigAxis = _reflection.GeneratedProtocolMessageType('EmcStatusConfigAxis', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSCONFIGAXIS,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusConfigAxis)
))
_sym_db.RegisterMessage(EmcStatusConfigAxis)
EmcProgramExtension = _reflection.GeneratedProtocolMessageType('EmcProgramExtension', (_message.Message,), dict(
DESCRIPTOR = _EMCPROGRAMEXTENSION,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcProgramExtension)
))
_sym_db.RegisterMessage(EmcProgramExtension)
EmcStatusAnalogIO = _reflection.GeneratedProtocolMessageType('EmcStatusAnalogIO', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSANALOGIO,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusAnalogIO)
))
_sym_db.RegisterMessage(EmcStatusAnalogIO)
EmcStatusDigitalIO = _reflection.GeneratedProtocolMessageType('EmcStatusDigitalIO', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSDIGITALIO,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusDigitalIO)
))
_sym_db.RegisterMessage(EmcStatusDigitalIO)
EmcStatusLimit = _reflection.GeneratedProtocolMessageType('EmcStatusLimit', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSLIMIT,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusLimit)
))
_sym_db.RegisterMessage(EmcStatusLimit)
EmcStatusGCode = _reflection.GeneratedProtocolMessageType('EmcStatusGCode', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSGCODE,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusGCode)
))
_sym_db.RegisterMessage(EmcStatusGCode)
EmcStatusMCode = _reflection.GeneratedProtocolMessageType('EmcStatusMCode', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSMCODE,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusMCode)
))
_sym_db.RegisterMessage(EmcStatusMCode)
EmcStatusSetting = _reflection.GeneratedProtocolMessageType('EmcStatusSetting', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSSETTING,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusSetting)
))
_sym_db.RegisterMessage(EmcStatusSetting)
EmcStatusConfig = _reflection.GeneratedProtocolMessageType('EmcStatusConfig', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSCONFIG,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusConfig)
))
_sym_db.RegisterMessage(EmcStatusConfig)
EmcStatusMotion = _reflection.GeneratedProtocolMessageType('EmcStatusMotion', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSMOTION,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusMotion)
))
_sym_db.RegisterMessage(EmcStatusMotion)
EmcStatusIo = _reflection.GeneratedProtocolMessageType('EmcStatusIo', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSIO,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusIo)
))
_sym_db.RegisterMessage(EmcStatusIo)
EmcStatusTask = _reflection.GeneratedProtocolMessageType('EmcStatusTask', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSTASK,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusTask)
))
_sym_db.RegisterMessage(EmcStatusTask)
EmcStatusInterp = _reflection.GeneratedProtocolMessageType('EmcStatusInterp', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSINTERP,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusInterp)
))
_sym_db.RegisterMessage(EmcStatusInterp)
EmcCommandParameters = _reflection.GeneratedProtocolMessageType('EmcCommandParameters', (_message.Message,), dict(
DESCRIPTOR = _EMCCOMMANDPARAMETERS,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcCommandParameters)
))
_sym_db.RegisterMessage(EmcCommandParameters)
EmcStatusUserCommand = _reflection.GeneratedProtocolMessageType('EmcStatusUserCommand', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSUSERCOMMAND,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusUserCommand)
))
_sym_db.RegisterMessage(EmcStatusUserCommand)
EmcStatusUI = _reflection.GeneratedProtocolMessageType('EmcStatusUI', (_message.Message,), dict(
DESCRIPTOR = _EMCSTATUSUI,
__module__ = 'machinetalk.protobuf.status_pb2'
# @@protoc_insertion_point(class_scope:machinetalk.EmcStatusUI)
))
_sym_db.RegisterMessage(EmcStatusUI)
_EMCTOOLDATA.has_options = True
_EMCTOOLDATA._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\314\010'))
_EMCSTATUSMOTIONAXIS.has_options = True
_EMCSTATUSMOTIONAXIS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\315\010'))
_EMCSTATUSCONFIGAXIS.has_options = True
_EMCSTATUSCONFIGAXIS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\316\010'))
_EMCPROGRAMEXTENSION.has_options = True
_EMCPROGRAMEXTENSION._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\317\010'))
_EMCSTATUSANALOGIO.has_options = True
_EMCSTATUSANALOGIO._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\320\010'))
_EMCSTATUSDIGITALIO.has_options = True
_EMCSTATUSDIGITALIO._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\321\010'))
_EMCSTATUSLIMIT.has_options = True
_EMCSTATUSLIMIT._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\322\010'))
_EMCSTATUSGCODE.has_options = True
_EMCSTATUSGCODE._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\323\010'))
_EMCSTATUSMCODE.has_options = True
_EMCSTATUSMCODE._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\324\010'))
_EMCSTATUSSETTING.has_options = True
_EMCSTATUSSETTING._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\325\010'))
_EMCSTATUSCONFIG.has_options = True
_EMCSTATUSCONFIG._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\326\010'))
_EMCSTATUSMOTION.has_options = True
_EMCSTATUSMOTION._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\327\010'))
_EMCSTATUSIO.has_options = True
_EMCSTATUSIO._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\330\010'))
_EMCSTATUSTASK.has_options = True
_EMCSTATUSTASK._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\331\010'))
_EMCSTATUSINTERP.has_options = True
_EMCSTATUSINTERP._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\332\010'))
_EMCCOMMANDPARAMETERS.has_options = True
_EMCCOMMANDPARAMETERS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\333\010'))
_EMCSTATUSUSERCOMMAND.has_options = True
_EMCSTATUSUSERCOMMAND._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\334\010'))
_EMCSTATUSUI.has_options = True
_EMCSTATUSUI._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\222?\003H\335\010'))
# @@protoc_insertion_point(module_scope)
| 47.414761
| 13,484
| 0.751339
|
3cf64f3a5efad2ab70e2ef7b515fee5c9fffd510
| 1,257
|
py
|
Python
|
google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/enums/types/ad_serving_optimization_status.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 7
|
2021-02-21T10:39:41.000Z
|
2021-12-07T07:31:28.000Z
|
google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/enums/types/ad_serving_optimization_status.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 6
|
2021-02-02T23:46:11.000Z
|
2021-11-15T01:46:02.000Z
|
google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/enums/types/ad_serving_optimization_status.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 4
|
2021-01-28T23:25:45.000Z
|
2021-08-30T01:55:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.enums',
marshal='google.ads.googleads.v7',
manifest={
'AdServingOptimizationStatusEnum',
},
)
class AdServingOptimizationStatusEnum(proto.Message):
r"""Possible ad serving statuses of a campaign.
"""
class AdServingOptimizationStatus(proto.Enum):
r"""Enum describing possible serving statuses."""
UNSPECIFIED = 0
UNKNOWN = 1
OPTIMIZE = 2
CONVERSION_OPTIMIZE = 3
ROTATE = 4
ROTATE_INDEFINITELY = 5
UNAVAILABLE = 6
__all__ = tuple(sorted(__protobuf__.manifest))
| 29.232558
| 74
| 0.70008
|
53f3f04082698e9f1805080ec268f4a4de9614ee
| 3,727
|
py
|
Python
|
zhym/TestImg_hande_only_table1.py
|
yanmeizhao/MyMmdetection
|
ae369f88b1faa87b32688c7ca770ec67f0a5c8d1
|
[
"Apache-2.0"
] | 1
|
2019-07-30T01:31:30.000Z
|
2019-07-30T01:31:30.000Z
|
zhym/TestImg_hande_only_table1.py
|
yanmeizhao/MyMmdetection
|
ae369f88b1faa87b32688c7ca770ec67f0a5c8d1
|
[
"Apache-2.0"
] | null | null | null |
zhym/TestImg_hande_only_table1.py
|
yanmeizhao/MyMmdetection
|
ae369f88b1faa87b32688c7ca770ec67f0a5c8d1
|
[
"Apache-2.0"
] | null | null | null |
import os
import cv2
import mmcv
import numpy as np
from mmcv.image import imread, imwrite
from mmcv import color_val
from mmdet.apis import init_detector, inference_detector
config_file = 'configs_zhym/faster_rcnn_r50_fpn_1x_voc_handeonlytable.py'
checkpoint_file = 'work_dirs/faster_rcnn_r50_fpn_1x_handeonlytable/epoch_10.pth'
#config_file = 'configs_zhym/cascade_mask_rcnn_r101_fpn_1x_four_points.py'
#checkpoint_file = 'work_dirs/cascade_mask_rcnn_r101_fpn_1x/epoch_12.pth'
# build the model from a config file and a checkpoint file
model = init_detector(config_file, checkpoint_file, device='cuda:0')
# test a single image and show the results
img_root_dir = '/home/zhaoyanmei/data/HANDE/HandeOnlyTable/PDF4_new_JPEGs/'
#img_root_dir = '/home/zhaoyanmei/mmdetection/data/CoCoFourPoint/test/'
dst_dir = '/home/zhaoyanmei/data/HANDE/HandeOnlyTable/visualize_PDF4/'
dst_pred_txt = dst_dir + 'pred_result.txt'
pred_txt_file = open(dst_pred_txt, 'w')
def show_result(img, result, class_names, score_thr=0.5, out_file=None):
assert isinstance(class_names, (tuple, list))
img_name = os.path.basename(img)
img = mmcv.imread(img)
if isinstance(result, tuple):
bbox_result, segm_result = result
else:
bbox_result, segm_result = result, None
bboxes = np.vstack(bbox_result)
# draw bounding boxes
labels = [np.full(bbox.shape[0], i, dtype=np.int32) for i, bbox in enumerate(bbox_result)]
labels = np.concatenate(labels)
imshow_det_bboxes(
img.copy(),
bboxes,
labels,
img_name,
class_names=class_names,
score_thr=score_thr,
show=out_file is None,
out_file=out_file)
def imshow_det_bboxes(img, bboxes, labels, img_name, class_names=None, score_thr=0.7, bbox_color='green', text_color='green', thickness=1, font_scale=0.5,show=True,win_name='',wait_time=0, out_file=None):
assert bboxes.ndim == 2
assert labels.ndim == 1
assert bboxes.shape[0] == labels.shape[0]
assert bboxes.shape[1] == 4 or bboxes.shape[1] == 5
img = imread(img)
if score_thr > 0:
assert bboxes.shape[1] == 5
scores = bboxes[:, -1]
inds = scores > score_thr
bboxeses = bboxes[inds, :]
labels = labels[inds]
bbox_color = color_val(bbox_color)
text_color = color_val(text_color)
for bbox, label in zip(bboxes, labels):
bbox_int = bbox.astype(np.int32)
left_top = (bbox_int[0], bbox_int[1])
right_bottom = (bbox_int[2], bbox_int[3])
cv2.rectangle(img, left_top, right_bottom, bbox_color, thickness=thickness)
label_text = class_names[label] if class_names is not None else 'cls {}'.format(label)
if len(bbox) > 4:
label_text += '|{:.02f}'.format(bbox[-1])
cv2.putText(img, label_text, (bbox_int[0], bbox_int[1]-2), cv2.FONT_HERSHEY_COMPLEX, font_scale, text_color)
bbox_str = [str(bbox[i]) for i in range(len(bbox))]
bbox_str.insert(0, img_name)
bbox_str.append(label_text)
pred_str = ','.join(bbox_str)
pred_txt_file.write(pred_str+'\n')
if show:
imshow(img, win_name, wait_time)
if out_file is not None:
imwrite(img, out_file)
for i, img_file in enumerate(os.listdir(img_root_dir)):
print(i)
img = os.path.join(img_root_dir, img_file)
result = inference_detector(model, img)
show_result(img, result, model.CLASSES, out_file=os.path.join(dst_dir, img_file))
# test a list of images and write the results to image files
#imgs = ['000000000060.jpg']
#for i, result in enumerate(inference_detector(model, imgs)):
# show_result(imgs[i], result, model.CLASSES, out_file='result_{}.jpg'.format(i))
| 39.231579
| 204
| 0.696002
|
45980d5debee01bd1d715b1b7da511c2a2840496
| 1,258
|
py
|
Python
|
missing_values-101703547-simran_kaur/missing_values.py
|
simrankaur7575/missing_values-101703547-simran_kaur
|
5d293a7ea8a6aa73e427f4008cf9dc6fa3d9a1df
|
[
"MIT"
] | null | null | null |
missing_values-101703547-simran_kaur/missing_values.py
|
simrankaur7575/missing_values-101703547-simran_kaur
|
5d293a7ea8a6aa73e427f4008cf9dc6fa3d9a1df
|
[
"MIT"
] | null | null | null |
missing_values-101703547-simran_kaur/missing_values.py
|
simrankaur7575/missing_values-101703547-simran_kaur
|
5d293a7ea8a6aa73e427f4008cf9dc6fa3d9a1df
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on sun Feb 16 14:39:30 2020
@author: simran kaur
"""
#importing libraries
import numpy as np
import pandas as pd
import sys
import datawig
def missing(data):
if data.shape[0]==0:
return print("empty dataset")
col_null=data.columns[data.isnull().any()]
data_out=pd.DataFrame(0,index=np.arange(len(data)),columns=col_null)
pstatement=[]
for nul_col in col_null:
cnull=data[nul_col].isnull()
cwnull=data[nul_col].notnull()
imputer=datawig.SimpleImputer(data.columns[data.columns!=nul_col],nul_col,'imputer_model')
imputer.fit(data[cwnull])
final=imputer.predict(data[cnull])
data_out[nul_col]=final[nul_col+'_imputed']
pstatement.append("number of missing values replaced in "+ str(nul_col) + " is "+ str(final.shape[0]))
data = data.fillna(data_out)
print("\n\n\n")
for i in pstatement:
print("\n",i)
return data
def main():
if len(sys.argv)!=2:
print("Incorrect parameters.Input format:python <programName> <InputDataFile> <OutputDataFile>")
exit(1)
else:
data=pd.read_csv(sys.argv[1])
missing(data).to_csv(sys.argv[1])
if __name__ == "__main__":
main()
| 26.765957
| 110
| 0.643084
|
dbc77ed57f3331451836084de56d235af740ba59
| 475
|
py
|
Python
|
maceoutliner/users/urls.py
|
maceoutliner/maceoutliner
|
259c8e1d38a454f83e738e8d13e9e7a580f22c9a
|
[
"BSD-3-Clause-Clear"
] | 1
|
2018-04-23T11:05:45.000Z
|
2018-04-23T11:05:45.000Z
|
maceoutliner/users/urls.py
|
maceoutliner/maceoutliner
|
259c8e1d38a454f83e738e8d13e9e7a580f22c9a
|
[
"BSD-3-Clause-Clear"
] | 474
|
2018-03-28T17:37:41.000Z
|
2021-08-11T03:29:31.000Z
|
maceoutliner/users/urls.py
|
maceoutliner/maceoutliner
|
259c8e1d38a454f83e738e8d13e9e7a580f22c9a
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
from django.conf.urls import url
from . import views
app_name = "maceoutliner.users"
urlpatterns = [
url(regex=r"^$", view=views.UserListView.as_view(), name="list"),
url(regex=r"^~redirect/$", view=views.UserRedirectView.as_view(), name="redirect"),
url(
regex=r"^(?P<username>[\w.@+-]+)/$",
view=views.UserDetailView.as_view(),
name="detail",
),
url(regex=r"^~update/$", view=views.UserUpdateView.as_view(), name="update"),
]
| 27.941176
| 87
| 0.627368
|
78af781534d10fdfd950aabd709542bff6fd1ea4
| 1,062
|
py
|
Python
|
var/spack/repos/builtin/packages/r-proxy/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360
|
2017-11-06T08:47:01.000Z
|
2022-03-31T14:45:33.000Z
|
var/spack/repos/builtin/packages/r-proxy/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838
|
2017-11-04T07:49:45.000Z
|
2022-03-31T23:38:39.000Z
|
var/spack/repos/builtin/packages/r-proxy/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793
|
2017-11-04T07:45:50.000Z
|
2022-03-30T14:31:53.000Z
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RProxy(RPackage):
"""Distance and Similarity Measures
Provides an extensible framework for the efficient calculation of auto- and
cross-proximities, along with implementations of the most popular ones."""
homepage = "https://cloud.r-project.org/package=proxy"
url = "https://cloud.r-project.org/src/contrib/proxy_0.4-19.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/proxy"
version('0.4-24', sha256='8cff9bf036475941a7c44ba9bb5e2f6d4777d49ab3daaeb52d23f4b2af6d9c7c')
version('0.4-23', sha256='9dd4eb0978f40e4fcb55c8a8a26266d32eff9c63ac9dfe70cf1f664ca9c3669d')
version('0.4-19', sha256='6b27e275018366e6024382704da9a9757c8878535dbcd7d450824b70e2e34d51')
depends_on('r@3.3.2:', type=('build', 'run'))
depends_on('r@3.4.0:', when='@0.4-21:', type=('build', 'run'))
| 42.48
| 96
| 0.739171
|
acea38caa1924bed5bb0b44eda7575e4c43ee9a0
| 3,977
|
py
|
Python
|
nemo_nowcast/workers/rotate_logs.py
|
douglatornell/nemo_nowcast
|
1b9181c29eee34a83e34869d13a0c0bf607882fa
|
[
"Apache-2.0"
] | 1
|
2020-02-06T01:10:27.000Z
|
2020-02-06T01:10:27.000Z
|
nemo_nowcast/workers/rotate_logs.py
|
douglatornell/nemo_nowcast
|
1b9181c29eee34a83e34869d13a0c0bf607882fa
|
[
"Apache-2.0"
] | 3
|
2021-04-06T18:03:49.000Z
|
2021-12-13T21:17:34.000Z
|
nemo_nowcast/workers/rotate_logs.py
|
43ravens/NEMO_Nowcast
|
1b9181c29eee34a83e34869d13a0c0bf607882fa
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016-2021 Doug Latornell, 43ravens
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NEMO_Nowcast framework rotate_logs worker.
Iterate through the nowcast system logging handlers, calling the
:py:meth:`doRollover` method on any that are instances of
:py:class:`logging.handlers.RotatingFileHandler`.
This worker is normally launched in automation at the end of a nowcast
processing cycle (e.g. end of the day).
It can also be launched from the command-line by the nowcast administrator
as necessary for system maintenance.
"""
import logging
import logging.config
from pathlib import Path
from nemo_nowcast import NowcastWorker
from nemo_nowcast.fileutils import FilePerms
NAME = "rotate_logs"
logger = logging.getLogger(NAME)
def main():
"""Set up and run the worker.
For command-line usage see:
:command:`python -m nemo_nowcast.workers.rotate_logs --help`
"""
worker = NowcastWorker(NAME, description=__doc__, package="nemo_nowcast.workers")
worker.init_cli()
worker.run(rotate_logs, success, failure)
def success(parsed_args):
# logger_name is required because file system handlers get loaded in
# rotate_logs()
logger.info("log files rotated", extra={"logger_name": NAME})
msg_type = "success"
return msg_type
def failure(parsed_args):
# logger_name is required because file system handlers get loaded in
# rotate_logs()
logger.critical("failed to rotate log files", extra={"logger_name": NAME})
msg_type = "failure"
return msg_type
def rotate_logs(parsed_args, config, *args):
# logger_name is required because file system handlers get loaded below
logger.info("rotating log files", extra={"logger_name": NAME})
checklist = {"log files": []}
checklist_logger = logging.getLogger("checklist")
if "aggregator" in config["logging"]:
pub_handlers = config["logging"]["publisher"]["handlers"]
if "checklist" in pub_handlers:
pub_loggers = config["logging"]["publisher"]["loggers"]
config["logging"]["aggregator"]["handlers"]["checklist"] = pub_handlers[
"checklist"
]
try:
config["logging"]["aggregator"]["loggers"].update(
{"checklist": pub_handlers["loggers"]["checklist"]}
)
except KeyError:
config["logging"]["aggregator"].update(
{"loggers": {"checklist": pub_loggers["checklist"]}}
)
logging.config.dictConfig(config["logging"]["aggregator"])
for handler in logger.root.handlers + checklist_logger.handlers:
if not hasattr(handler, "when"):
try:
handler.flush()
handler.doRollover()
except AttributeError:
# Handler without a doRollover() method;
# Probably a StreamHandler
continue
logger.info(
f"log file rotated: {handler.baseFilename}", extra={"logger_name": NAME}
)
p = Path(handler.baseFilename)
p.chmod(int(FilePerms(user="rw", group="rw", other="r")))
logger.debug(
f"new {handler.baseFilename} log file permissions set to rw-rw-r--",
extra={"logger_name": NAME},
)
checklist["log files"].append(handler.baseFilename)
return checklist
if __name__ == "__main__":
main() # pragma: no cover
| 35.828829
| 88
| 0.657279
|
a5141ac8de32a76f99d6b6a2c0962d0e9492d1d3
| 1,220
|
py
|
Python
|
modules/gsl_specfun/doc/gsl_sf_expint_e1.py
|
brycelelbach/nt2
|
73d7e8dd390fa4c8d251c6451acdae65def70e0b
|
[
"BSL-1.0"
] | 1
|
2022-03-24T03:35:10.000Z
|
2022-03-24T03:35:10.000Z
|
modules/gsl_specfun/doc/gsl_sf_expint_e1.py
|
brycelelbach/nt2
|
73d7e8dd390fa4c8d251c6451acdae65def70e0b
|
[
"BSL-1.0"
] | null | null | null |
modules/gsl_specfun/doc/gsl_sf_expint_e1.py
|
brycelelbach/nt2
|
73d7e8dd390fa4c8d251c6451acdae65def70e0b
|
[
"BSL-1.0"
] | null | null | null |
[ ## this file was manually modified by jt
{
'functor' : {
'arity' : '1',
'call_types' : [],
'ret_arity' : '0',
'rturn' : {
'default' : 'T',
},
'simd_types' : [],
'special' : ['gsl_specfun'],
'type_defs' : [],
'types' : ['real_'],
},
'info' : 'manually modified',
'unit' : {
'global_header' : {
'first_stamp' : 'created by jt the 10/03/2011',
'included' : [],
'no_ulp' : 'False',
'notes' : [],
'stamp' : 'modified by jt the 10/03/2011',
},
'ranges' : {
'default' : [['T(0)', 'T(10)']],
},
'specific_values' : {
},
'verif_test' : {
'property_call' : {
'default' : ['nt2::gsl_specfun::gsl_sf_expint_e1(a0)'],
},
'property_value' : {
'default' : ['nt2::gsl_specfun::gsl_sf_expint_e1(a0)'],
},
'simd' : {
},
'ulp_thresh' : {
'default' : ['1'],
},
},
},
},
]
| 27.111111
| 72
| 0.340984
|
d74b03c9addaee28eb32582233bf72a6678e216d
| 9,766
|
py
|
Python
|
bcbio/variation/validateplot.py
|
SciLifeLab/bcbio-nextgen
|
370b3f316c423b41523accc5e212d51a5b7ecaa9
|
[
"MIT"
] | 3
|
2015-11-18T07:17:54.000Z
|
2021-04-28T13:58:37.000Z
|
bcbio/variation/validateplot.py
|
SciLifeLab/bcbio-nextgen
|
370b3f316c423b41523accc5e212d51a5b7ecaa9
|
[
"MIT"
] | null | null | null |
bcbio/variation/validateplot.py
|
SciLifeLab/bcbio-nextgen
|
370b3f316c423b41523accc5e212d51a5b7ecaa9
|
[
"MIT"
] | null | null | null |
"""Plot validation results from variant calling comparisons.
Handles data normalization and plotting, emphasizing comparisons on methodology
differences.
"""
import collections
import os
import numpy as np
import pandas as pd
try:
import matplotlib as mpl
mpl.use('Agg', force=True)
import matplotlib.pyplot as plt
except ImportError:
mpl, plt = None, None
try:
import seaborn as sns
except ImportError:
sns = None
from bcbio.log import logger
from bcbio import utils
from bcbio.variation import bamprep
def create_from_csv(in_csv, config=None, outtype="pdf", title=None, size=None):
df = pd.read_csv(in_csv)
create(df, None, 0, config or {}, os.path.splitext(in_csv)[0], outtype, title,
size)
def create(plot_data, header, ploti, sample_config, out_file_base, outtype="pdf",
title=None, size=None):
"""Create plots of validation results for a sample, labeling prep strategies.
"""
if mpl is None or plt is None or sns is None:
not_found = ", ".join([x for x in ['mpl', 'plt', 'sns'] if eval(x) is None])
logger.info("No validation plot. Missing imports: %s" % not_found)
return None
if header:
df = pd.DataFrame(plot_data, columns=header)
else:
df = plot_data
df["aligner"] = [get_aligner(x, sample_config) for x in df["sample"]]
df["bamprep"] = [get_bamprep(x, sample_config) for x in df["sample"]]
floors = get_group_floors(df, cat_labels)
df["value.floor"] = [get_floor_value(x, cat, vartype, floors)
for (x, cat, vartype) in zip(df["value"], df["category"], df["variant.type"])]
out = []
for i, prep in enumerate(df["bamprep"].unique()):
out.append(plot_prep_methods(df, prep, i + ploti, out_file_base, outtype, title, size))
return out
cat_labels = {"concordant": "Concordant",
"discordant-missing-total": "Discordant (missing)",
"discordant-extra-total": "Discordant (extra)",
"discordant-shared-total": "Discordant (shared)"}
vtype_labels = {"snp": "SNPs", "indel": "Indels"}
prep_labels = {"gatk": "GATK best-practice BAM preparation (recalibration, realignment)",
"none": "Minimal BAM preparation (samtools de-duplication only)"}
caller_labels = {"ensemble": "Ensemble", "freebayes": "FreeBayes",
"gatk": "GATK Unified\nGenotyper", "gatk-haplotype": "GATK Haplotype\nCaller"}
def plot_prep_methods(df, prep, prepi, out_file_base, outtype, title=None,
size=None):
"""Plot comparison between BAM preparation methods.
"""
samples = df[(df["bamprep"] == prep)]["sample"].unique()
assert len(samples) >= 1, samples
out_file = "%s-%s.%s" % (out_file_base, samples[0], outtype)
df = df[df["category"].isin(cat_labels)]
_seaborn(df, prep, prepi, out_file, title, size)
return out_file
def _seaborn(df, prep, prepi, out_file, title=None, size=None):
"""Plot using seaborn wrapper around matplotlib.
"""
plt.ioff()
sns.set(style='dark')
vtypes = df["variant.type"].unique()
callers = sorted(df["caller"].unique())
cats = _check_cats(["concordant", "discordant-missing-total",
"discordant-extra-total", "discordant-shared-total"],
vtypes, df, prep, callers)
fig, axs = plt.subplots(len(vtypes), len(cats))
width = 0.8
for i, vtype in enumerate(vtypes):
ax_row = axs[i] if len(vtypes) > 1 else axs
for j, cat in enumerate(cats):
vals, labels, maxval = _get_chart_info(df, vtype, cat, prep, callers)
ax = ax_row[j]
if i == 0:
ax.set_title(cat_labels[cat], size=14)
ax.get_yaxis().set_ticks([])
if j == 0:
ax.set_ylabel(vtype_labels[vtype], size=14)
ax.bar(np.arange(len(callers)), vals, width=width)
ax.set_ylim(0, maxval)
if i == len(vtypes) - 1:
ax.set_xticks(np.arange(len(callers)) + width / 2.0)
ax.set_xticklabels([caller_labels.get(x, x).replace("__", "\n") if x else ""
for x in callers], size=8, rotation=45)
else:
ax.get_xaxis().set_ticks([])
_annotate(ax, labels, vals, np.arange(len(callers)), width)
fig.text(.5, .95, prep_labels.get(prep, "") if title is None else title, horizontalalignment='center', size=16)
fig.subplots_adjust(left=0.05, right=0.95, top=0.87, bottom=0.15, wspace=0.1, hspace=0.1)
x, y = (10, 5) if size is None else size
fig.set_size_inches(x, y)
fig.savefig(out_file)
def _check_cats(cats, vtypes, df, prep, callers):
"""Only include categories in the final output if they have values.
"""
out = []
for cat in cats:
all_vals = []
for vtype in vtypes:
vals, labels, maxval = _get_chart_info(df, vtype, cat, prep, callers)
all_vals.extend(vals)
if sum(all_vals) / float(len(all_vals)) > 2:
out.append(cat)
if len(out) == 0:
return cats
else:
return out
def _get_chart_info(df, vtype, cat, prep, callers):
"""Retrieve values for a specific variant type, category and prep method.
"""
maxval_raw = max(list(df["value.floor"]))
curdf = df[(df["variant.type"] == vtype) & (df["category"] == cat)
& (df["bamprep"] == prep)]
vals = []
labels = []
for c in callers:
row = curdf[df["caller"] == c]
if len(row) > 0:
vals.append(list(row["value.floor"])[0])
labels.append(list(row["value"])[0])
else:
vals.append(1)
labels.append("")
return vals, labels, maxval_raw
def _annotate(ax, annotate, height, left, width):
"""Annotate axis with labels.
"""
annotate_yrange_factor = 0.010
xticks = np.array(left) + width / 2.0
ymin, ymax = ax.get_ylim()
yrange = ymax - ymin
# Reset ymax and ymin so there's enough room to see the annotation of
# the top-most
if ymax > 0:
ymax += yrange * 0.15
if ymin < 0:
ymin -= yrange * 0.15
ax.set_ylim(ymin, ymax)
yrange = ymax - ymin
offset_ = yrange * annotate_yrange_factor
if isinstance(annotate, collections.Iterable):
annotations = map(str, annotate)
else:
annotations = ['%.3f' % h if type(h) is np.float_ else str(h)
for h in height]
for x, h, annotation in zip(xticks, height, annotations):
# Adjust the offset to account for negative bars
offset = offset_ if h >= 0 else -1 * offset_
verticalalignment = 'bottom' if h >= 0 else 'top'
if len(str(annotation)) > 6:
size = 7
elif len(str(annotation)) > 5:
size = 8
else:
size = 10
# Finally, add the text to the axes
ax.annotate(annotation, (x, h + offset),
verticalalignment=verticalalignment,
horizontalalignment='center',
size=size)
def _ggplot(df, out_file):
"""Plot faceted items with ggplot wrapper on top of matplotlib.
XXX Not yet functional
"""
import ggplot as gg
df["variant.type"] = [vtype_labels[x] for x in df["variant.type"]]
df["category"] = [cat_labels[x] for x in df["category"]]
df["caller"] = [caller_labels.get(x, None) for x in df["caller"]]
p = (gg.ggplot(df, gg.aes(x="caller", y="value.floor")) + gg.geom_bar()
+ gg.facet_wrap("variant.type", "category")
+ gg.theme_seaborn())
gg.ggsave(p, out_file)
def get_floor_value(x, cat, vartype, floors):
"""Modify values so all have the same relative scale for differences.
Using the chosen base heights, adjusts an individual sub-plot to be consistent
relative to that height.
"""
all_base = floors[vartype]
cur_max = floors[(cat, vartype)]
if cur_max > all_base:
diff = cur_max - all_base
x = max(1, x - diff)
return x
def get_group_floors(df, cat_labels):
"""Retrieve the floor for a given row of comparisons, creating a normalized set of differences.
We need to set non-zero floors so large numbers (like concordance) don't drown out small
numbers (like discordance). This defines the height for a row of comparisons as either
the minimum height of any sub-plot, or the maximum difference between higher and lower
(plus 10%).
"""
group_maxes = collections.defaultdict(list)
group_diffs = collections.defaultdict(list)
diff_pad = 0.1 # 10% padding onto difference to avoid large numbers looking like zero
for name, group in df.groupby(["category", "variant.type"]):
label, stype = name
if label in cat_labels:
diff = max(group["value"]) - min(group["value"])
group_diffs[stype].append(diff + int(diff_pad * diff))
group_maxes[stype].append(max(group["value"]))
group_maxes[name].append(max(group["value"]))
out = {}
for k, vs in group_maxes.iteritems():
if k in group_diffs:
out[k] = max(max(group_diffs[stype]), min(vs))
else:
out[k] = min(vs)
return out
def get_aligner(x, config):
return utils.get_in(config, ("algorithm", "aligner"), "")
def get_bamprep(x, config):
params = bamprep._get_prep_params({"config": {"algorithm": config.get("algorithm", {})}})
if params["realign"] == "gatk" and params["recal"] == "gatk":
return "gatk"
elif not params["realign"] and not params["recal"]:
return "none"
elif not params.get("recal") or not params.get("realign"):
return "mixed"
else:
return ""
| 38.753968
| 115
| 0.608847
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.