content
stringlengths 5
1.05M
|
|---|
"""
This module contains the HiSockClient, used to power the client
of HiSock, but also contains a `connect` function, to pass in
things automatically. It is strongly advised to use `connect`
over `HiSockClient`, as `connect` passes in some key arguments
that `HiSockClient` does not provide
====================================
Copyright SSS_Says_Snek, 2021-present
====================================
"""
# Imports
from __future__ import annotations # Remove when 3.10 is used by majority
import socket
import inspect # Type-hinting detection for type casting
import json # Handle sending dictionaries
import errno # Handle fatal errors with the server
import warnings # Non-severe errors
import sys # Utilize stderr
import threading # Threaded client and decorators
import traceback # Error handling
from typing import Callable, Union, Any # Type hints
from ipaddress import IPv4Address # Comparisons
from time import time # Unix timestamp support
try:
# Pip builds require relative import
from .utils import (
ClientException,
ClientNotFound,
ServerException,
FunctionNotFoundException,
FunctionNotFoundWarning,
ServerNotRunning,
MessageCacheMember,
Sendable,
Client,
_removeprefix,
_type_cast,
_str_type_to_type_annotations_dict,
make_header,
iptup_to_str,
validate_ipv4,
validate_command_not_reserved,
)
except ImportError:
# Relative import doesn't work for non-pip builds
from utils import (
ClientException,
ClientNotFound,
ServerException,
FunctionNotFoundException,
FunctionNotFoundWarning,
ServerNotRunning,
MessageCacheMember,
Sendable,
Client,
_removeprefix,
_type_cast,
_str_type_to_type_annotations_dict,
make_header,
iptup_to_str,
validate_ipv4,
validate_command_not_reserved,
)
# ░█████╗░░█████╗░██╗░░░██╗████████╗██╗░█████╗░███╗░░██╗██╗
# ██╔══██╗██╔══██╗██║░░░██║╚══██╔══╝██║██╔══██╗████╗░██║██║
# ██║░░╚═╝███████║██║░░░██║░░░██║░░░██║██║░░██║██╔██╗██║██║
# ██║░░██╗██╔══██║██║░░░██║░░░██║░░░██║██║░░██║██║╚████║╚═╝
# ╚█████╔╝██║░░██║╚██████╔╝░░░██║░░░██║╚█████╔╝██║░╚███║██╗
# ░╚════╝░╚═╝░░╚═╝░╚═════╝░░░░╚═╝░░░╚═╝░╚════╝░╚═╝░░╚══╝╚═╝
# Change this code only if you know what you are doing!
# If this code is changed, the client may not work properly
class HiSockClient:
"""
The client class for :mod:`HiSock`.
:param addr: A two-element tuple, containing the IP address and the
port number of where the server is hosted.
**Only IPv4 is currently supported.**
:type addr: tuple
:param name: Either a string or NoneType, representing the name the client
goes by. Having a name provides an easy interface of sending.
data to a specific client and identifying clients. It is therefore
highly recommended to pass in a name.
Pass in NoneType for no name (:meth:`connect` should handle that)
:type name: str, optional
:param group: Either a string or NoneType representing the group the client
is in. Being in a group provides an easy interface of sending
data to multiple specific clients, and identifying multiple clients.
It is highly recommended to provide a group for complex servers.
Pass in NoneType for no group (:meth:`connect` should handle that)
:type group: str, optional
:param blocking: A boolean set to whether the client should block the loop
while waiting for message or not.
Default is True.
:type blocking: bool, optional
:param header_len: An integer defining the header length of every message.
A larger header length would mean a larger maximum message length
(about 10**header_len).
**MUST** be the same header length as the server, or else it will crash
(hard to debug too!).
Default sets to 16 (maximum length of content: 10 quadrillion bytes).
:type header_len: int, optional
:ivar tuple addr: A two-element tuple containing the IP address and the
port number of the server.
:ivar int header_len: An integer storing the header length of each "message".
:ivar str name: A string representing the name of the client to identify by.
Default is None.
:ivar str group: A string representing the group of the client to identify by.
Default is None.
:ivar dict funcs: A list of functions registered with decorator :meth:`on`.
**This is mainly used for under-the-hood-code.**
:ivar int connect_time: An integer sotring the Unix timestamp of when the
client connected to the server.
"""
def __init__(
self,
addr: tuple[str, int],
name: Union[str, None],
group: Union[str, None],
blocking: bool = True,
header_len: int = 16,
cache_size: int = -1,
):
self.addr = addr
self.name = name
self.group = group
self.header_len = header_len
# Socket initialization
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.sock.connect(self.addr)
except ConnectionRefusedError:
raise ServerNotRunning(
"Server is not running! Aborting..."
) from ConnectionRefusedError
# Function related storage
# {"command": {"func": Callable, "name": str, "type_hint": Any, "threaded": bool}}
self.funcs = {}
# Stores the names of the reserved functions
# Used for the `on` decorator
self._reserved_functions = (
"client_connect",
"client_disconnect",
"force_disconnect",
)
# Stores the number of parameters each reserved function takes
# Used for the `on` decorator
self._reserved_functions_parameters_num = (
1, # client_connect
1, # client_disconnect
0, # force_disconnect
)
# Cache
self.cache_size = cache_size
if cache_size > 0:
# cache_size <= 0: No cache
self.cache = []
# TLS arguments
self.tls_arguments = {"tls": False} # If TLS is false, then no TLS
# Flags
self.closed = False
# If `update` is trying to receive while `recv_raw` is running, bad things happen.
self._receiving_data = False
# The data received by `update if this is set to `I NEED YOUR DATA`... see
# `update` and `recv_raw` for info
self._recv_data = ""
self.connected = False
self.connect_time = 0 # Unix timestamp
self.sock.setblocking(blocking)
# Send client hello
self._send_client_hello()
def __str__(self) -> str:
"""Example: <HiSockClient connected to 192.168.1.133:5000>"""
return f"<HiSockClient connected to {iptup_to_str(self.addr)}>"
def __repr__(self):
return self.__str__()
def __len__(self):
"""Returns how many clients are connected"""
# Comparisons
def __gt__(self, other: Union[HiSockClient, str]) -> bool:
"""Example: HiSockClient(...) > "192.168.1.133:5000" """
if type(other) not in [HiSockClient, str]:
raise TypeError("Type not supported for > comparison")
if isinstance(other, HiSockClient):
return IPv4Address(self.addr[0]) > IPv4Address(other.addr[0])
ip = other.split(":")
return IPv4Address(self.addr[0]) > IPv4Address(ip[0])
def __ge__(self, other: Union[HiSockClient, str]) -> bool:
"""Example: HiSockClient(...) >= "192.168.1.133:5000" """
if type(other) not in [HiSockClient, str]:
raise TypeError("Type not supported for >= comparison")
if isinstance(other, HiSockClient):
return IPv4Address(self.addr[0]) >= IPv4Address(other.addr[0])
ip = other.split(":")
return IPv4Address(self.addr[0]) >= IPv4Address(ip[0])
def __lt__(self, other: Union[HiSockClient, str]) -> bool:
"""Example: HiSockClient(...) < "192.168.1.133:5000" """
if type(other) not in [HiSockClient, str]:
raise TypeError("Type not supported for < comparison")
if isinstance(other, HiSockClient):
return IPv4Address(self.addr[0]) < IPv4Address(other.addr[0])
ip = other.split(":")
return IPv4Address(self.addr[0]) < IPv4Address(ip[0])
def __le__(self, other: Union[HiSockClient, str]) -> bool:
"""Example: HiSockClient(...) <= "192.168.1.133:5000" """
if type(other) not in [HiSockClient, str]:
raise TypeError("Type not supported for <= comparison")
if isinstance(other, HiSockClient):
return IPv4Address(self.addr[0]) <= IPv4Address(other.addr[0])
ip = other.split(":")
return IPv4Address(self.addr[0]) <= IPv4Address(ip[0])
def __eq__(self, other: Union[HiSockClient, str]) -> bool:
"""Example: HiSockClient(...) == "192.168.1.133:5000" """
if type(other) not in [HiSockClient, str]:
raise TypeError("Type not supported for == comparison")
if isinstance(other, HiSockClient):
return IPv4Address(self.addr[0]) == IPv4Address(other.addr[0])
ip = other.split(":")
return IPv4Address(self.addr[0]) == IPv4Address(ip[0])
# Internal methods
def _send_client_hello(self):
"""
Sends a hello to the server for the first connection
:raises ClientException: If the client is already connected
"""
if self.connected:
raise ClientException(
f"Client is already connected! (connected {time() - self.connect_time} seconds ago)"
)
hello_dict = {"name": self.name, "group": self.group}
self.send_raw(f"$CLTHELLO$ {json.dumps(hello_dict)}")
self.connected = True
self.connect_time = time()
def _handle_keepalive(self):
"""Handle a keepalive sent from the server."""
self.send_raw(f"$KEEPACK${iptup_to_str(self.get_client_addr())}")
def _send_type_cast(self, content: Sendable) -> bytes:
"""
Type casting content for the send methods.
This method exists so type casting can easily be changed without changing
all the send methods.
:param content: The content to type cast
:type content: Sendable
:return: The type casted content
:rtype: bytes
:raise InvalidTypeCast: If the content cannot be type casted
"""
return _type_cast(bytes, content, "<client sending function>")
# On decorator
def _call_function(self, func_name: str, sort_by_name: bool, *args, **kwargs):
"""
Calls a function with the given arguments and returns the result.
:param func_name: The name of the function to call.
:type func_name: str
:param sort_by_name: Whether to sort the arguments by name or not.
:type sort_by_name: bool
:param args: The arguments to pass to the function.
:param kwargs: The keyword arguments to pass to the function.
:raise FunctionNotFoundException: If the function is not found.
"""
func: dict
# Find the function by the function name
if sort_by_name:
for func_command, func_data in self.funcs.items():
if func_data["name"] == func_name:
func = func_command
break
else:
raise FunctionNotFoundException(
f"Function with name {func_name} not found"
)
# Find the function by the function command
else:
if func_name not in self.funcs:
raise FunctionNotFoundException(
f"Function with command {func_name} not found"
)
func = func_name
# Normal
if not self.funcs[func]["threaded"]:
self.funcs[func]["func"](*args, **kwargs)
return
# Threaded
function_thread = threading.Thread(
target=self.funcs[func]["func"],
args=args,
kwargs=kwargs,
daemon=True,
)
function_thread.start()
class _on:
"""Decorator used to handle something when receiving command"""
def __init__(
self, outer: HiSockClient, command: str, threaded: bool, override: bool
):
self.outer = outer
self.command = command
self.threaded = threaded
self.override = override
validate_command_not_reserved(self.command)
def __call__(self, func: Callable) -> Callable:
"""Adds a function that gets called when the client receives a matching command"""
func_args = inspect.getfullargspec(func).args
# Overriding a reserved command, remove it from reserved functions
if self.override:
if self.command in self.outer._reserved_functions:
self.outer.funcs.pop(self.command)
else:
warnings.warn(
f"Unnecessary override for {self.command}.", UserWarning
)
index = self.outer._reserved_functions.index(self.command)
self.outer._reserved_functions.pop(index)
self.outer._reserved_functions_parameters_num.pop(index)
self._assert_num_func_args_valid(len(func_args))
annotations = _str_type_to_type_annotations_dict(
inspect.getfullargspec(func).annotations
) # {"param": type}
parameter_annotations = {}
# Process unreserved commands
if self.command not in self.outer._reserved_functions:
# Map function arguments into type hint compliant ones
# Note: this is the same code as in `HiSockServer` which is why
# this could support multiple arguments. However, for now, the
# only argument is `message`.
for func_argument, argument_name in zip(func_args, ("message",)):
if func_argument not in annotations:
continue
parameter_annotations[argument_name] = annotations[func_argument]
# DEBUG PRINT PLEASE REMOVE LATER
print(f"{self.command=} {parameter_annotations=}")
# Add function
self.outer.funcs[self.command] = {
"func": func,
"name": func.__name__,
"type_hint": parameter_annotations,
"threaded": self.threaded,
}
# Decorator stuff
return func
def _assert_num_func_args_valid(self, number_of_func_args: int):
"""
Asserts the number of function arguments is valid.
Unreserved commands can have either 0 or 1 arguments.
For reserved commands, refer to
:ivar:`HiSockClient._reserved_functions_parameters_num`.
:raises TypeError: If the number of function arguments is invalid.
"""
valid = False
needed_number_of_args = "0-1"
# Reserved commands
try:
index_of_reserved_command = (
self.outer._reserved_functions.index(self.command),
)[0]
needed_number_of_args = (
self.outer._reserved_functions_parameters_num[
index_of_reserved_command
],
)[0]
valid = number_of_func_args == needed_number_of_args
# Unreserved commands
except ValueError:
valid = not (number_of_func_args > 1)
if not valid:
raise TypeError(
f"{self.command} command must have {needed_number_of_args} "
f"arguments, not {number_of_func_args}"
)
def on(
self, command: str, threaded: bool = False, override: bool = False
) -> Callable:
"""
A decorator that adds a function that gets called when the client
receives a matching command
Reserved functions are functions that get activated on
specific events, and they are:
1. ``client_connect`` - Activated when a client connects to the server
2. ``client_disconnect`` - Activated when a client disconnects from the server
The parameters of the function depend on the command to listen.
For example, reserved functions ``client_connect`` and
``client_disconnect`` gets the client's data passed in as an argument.
All other unreserved functions get the message passed in.
In addition, certain type casting is available to unreserved functions.
That means, that, using type hints, you can automatically convert
between needed instances. The type casting currently supports:
- ``bytes``
- ``str``
- ``int``
- ``float``
- ``bool``
- ``None``
- ``list`` (with the types listed here)
- ``dict`` (with the types listed here)
For more information, read the wiki for type casting.
:param command: A string, representing the command the function should activate
when receiving it.
:type command: str
:param threaded: A boolean, representing if the function should be run in a thread
in order to not block the update() loop.
Default is False.
:type threaded: bool, optional
:param override: A boolean representing if the function should override the
reserved function with the same name and to treat it as an unreserved function.
Default is False.
:type override: bool, optional
:return: The same function (the decorator just appended the function to a stack)
:rtype: function
"""
# Passes in outer to _on decorator/class
return self._on(self, command, threaded, override)
# Getters
def get_cache(
self,
idx: Union[int, slice, None] = None,
) -> list[MessageCacheMember]:
"""
Gets the message cache.
:param idx: An integer or ``slice``, specifying what specific message caches to return.
Default is None (Retrieves the entire cache)
:type idx: Union[int, slice], optional
:return: A list of dictionaries, representing the cache
:rtype: list[dict]
"""
if idx is None:
return self.cache
return self.cache[idx]
def get_client(self, client: Client):
"""
Gets the client data for a client.
:param client: The client name or IP+port to get.
:type client: Client
:return: The client data.
:rtype: dict
:raises ValueError: If the client IP is invalid.
:raise ClientNotFound: If the client couldn't be found.
:raise ServerException: If another error occurred.
"""
try:
validate_ipv4(iptup_to_str(client))
except ValueError as e:
# Names are allowed, too.
if not isinstance(client, str):
raise e
self.send_raw(f"$GETCLT$ {client}")
response = self.recv_raw()
response = _type_cast(dict, response, "<get_client response>")
# Validate response
if "traceback" not in response:
return response
if response["traceback"] == "$NOEXIST$":
raise ClientNotFound(f"Client {client} not connected to the server.")
raise ServerException(
f"Failed to get client from server: {response['traceback']}"
)
def get_server_addr(self) -> tuple[str, int]:
"""
Gets the address of where the hisock client is connected
at.
:return: A tuple, with the format (str IP, int port)
:rtype: tuple[str, int]
"""
return self.addr
def get_client_addr(self) -> tuple[str, int]:
"""
Gets the address of the hisock client that is connected
to the server.
:return: A tuple, with the format (str IP, int port)
:rtype: tuple[str, int]
"""
return self.sock.getsockname()
# Transmit data
def send(self, command: str, content: Sendable = None):
"""
Sends a command & content to the server.
:param command: A string, containing the command to send
:type command: str
:param content: The message / content to send
:type content: Sendable, optional
"""
data_to_send = (
b"$CMD$" + command.encode() + b"$MSG$" + self._send_type_cast(content)
)
content_header = make_header(data_to_send, self.header_len)
self.sock.send(content_header + data_to_send)
def send_raw(self, content: Sendable = None):
"""
Sends a message to the server: NO COMMAND REQUIRED.
This is preferable in some situations, where clients need to send
multiple data over the server, without overcomplicating it with commands
:param content: The message / content to send
:type content: Sendable, optional
"""
data_to_send = self._send_type_cast(content)
header = make_header(data_to_send, self.header_len)
self.sock.send(header + data_to_send)
def recv_raw(self, ignore_reserved: bool = False) -> bytes:
"""
Waits (blocks) until a message is sent, and returns that message.
This is not recommended for content with commands attached;
it is meant to be used alongside with :func:`HiSockServer.send_client_raw` and
:func:`HiSockServer.send_group_raw`
:param ignore_reserved: A boolean, representing if the function should ignore
reserved commands.
Default is False.
:type ignore_reserved: bool, optional
.. note::
If the message is a keepalive, the client will send an acknowledgement and
then ignore it, even if ``ignore_reserved`` is False.
:return: A bytes-like object, containing the content/message
the client first receives
:rtype: bytes
"""
def _handle_data(data: bytes):
# DEBUG PRINT PLEASE REMOVE LATER
print(f"Received data: {data}")
# Reserved commands
reserved_command = False
try:
validate_command_not_reserved(str(data))
except ValueError:
reserved_command = True
if reserved_command:
# Was there a keepalive?
if data == b"$KEEPALIVE$":
self._handle_keepalive()
return self.recv_raw()
if not ignore_reserved:
return self.recv_raw()
return data
# Sometimes, `update` can be running at the same time as this is running
# (e.x. if this is in a thread). In this case, `update` will receive the data
# and send it to us, as we cannot receive data at the same time as it receives
# data.
if self._receiving_data:
self._recv_data = "I NEED YOUR DATA"
# Wait until the data is received
while self._recv_data == "I NEED YOUR DATA":
"...waiting..."
# Data is received
data_received = self._recv_data
self._recv_data = ""
return _handle_data(data_received)
self._receiving_data = True
message_len = int(self.sock.recv(self.header_len).decode())
data_received = self.sock.recv(message_len)
self._receiving_data = False
return _handle_data(data_received)
# Changers
def change_name(self, new_name: Union[str, None]):
"""
Changes the name of the client
:param new_name: The new name for the client to be called
If left blank, then the name will be reset.
:type new_name: str, optional
"""
data_to_send = "$CHNAME$" + (f" {new_name}" or "")
self.send_raw(data_to_send)
def change_group(self, new_group: Union[str, None]):
"""
Changes the client's group.
:param new_group: The new group name of the client
:type new_group: Union[str, None]
"""
data_to_send = "$CHGROUP$" + (f" {new_group}" or "")
self.send_raw(data_to_send)
# Update
def update(self):
"""
Handles newly received messages, excluding the received messages for :meth:`wait_recv`
This method must be called every iteration of a while loop, as to not lose valuable info.
In some cases, it is recommended to run this in a thread, as to not block the
program
"""
if self.closed:
return
try:
self._receiving_data = True
# Receive header
try:
content_header = self.sock.recv(self.header_len)
except ConnectionResetError:
raise ServerNotRunning(
"Server has stopped running, aborting..."
) from ConnectionResetError
except ConnectionAbortedError:
# Keepalive timeout reached
self.closed = True
# Most likely server has stopped running
if not content_header:
print("Connection forcibly closed by server, exiting...")
raise SystemExit
data = self.sock.recv(int(content_header.decode()))
self._receiving_data = False
# Handle keepalive
if data == b"$KEEPALIVE$":
self._handle_keepalive()
return
# `update` can be called and run at the same time as `recv_raw`, so we need
# to make sure receiving data doesn't clash.
# If `recv_raw` would like the data, send it to them and don't process it.
if self._recv_data == "I NEED YOUR DATA":
self._recv_data = data
return
### Reserved ###
# Handle force disconnection
if data == b"$DISCONN$":
self.close()
if "force_disconnect" in self.funcs:
self._call_function("force_disconnect", False)
return
# Handle new client connection
if data.startswith(b"$CLTCONN$"):
if "client_connect" not in self.funcs:
warnings.warn("client_connect", FunctionNotFoundWarning)
return
client_content = json.loads(_removeprefix(data, b"$CLTCONN$ "))
self._call_function("client_connect", False, client_content)
return
# Handle client disconnection
if data.startswith(b"$CLTDISCONN$"):
if "client_disconnect" not in self.funcs:
warnings.warn("client_disconnect", FunctionNotFoundWarning)
return
client_content = json.loads(_removeprefix(data, b"$CLTDISCONN$ "))
self._call_function("client_disconnect", False, client_content)
return
### Unreserved ###
has_corresponding_function = False # For cache
decoded_data = data.decode()
if decoded_data.startswith("$CMD$"):
command = decoded_data.lstrip("$CMD$").split("$MSG$")[0]
content = _removeprefix(decoded_data, "$CMD$" + command + "$MSG$")
# No content? (_removeprefix didn't do anything)
if not content or content == decoded_data:
content = None
# This shouldn't happen, but we'll handle it anyway
if command in self._reserved_functions:
print("Reserved command received, but not handled properly.")
return
for matching_command, func in self.funcs.items():
if command == matching_command:
has_corresponding_function = True
# Call function with dynamic args
if not len(func["type_hint"]) == 0:
content = _type_cast(
func["type_hint"]["message"], content, func["name"]
)
self._call_function(func["name"], True, content)
break
self._call_function(func["name"], True)
# No function found
if not has_corresponding_function:
# DEBUG PRINT PLEASE REMOVE LATER
print(
"No corresponding function, here are the functions: "
f"{self.funcs.keys()}"
)
warnings.warn(
f"No function found for command {command}",
FunctionNotFoundWarning,
)
# Caching
if self.cache_size >= 0:
if has_corresponding_function:
cache_content = content
else:
cache_content = data
self.cache.append(
MessageCacheMember(
{
"header": content_header,
"content": cache_content,
"called": has_corresponding_function,
"command": command,
}
)
)
# Pop oldest from stack
if 0 < self.cache_size < len(self.cache):
self.cache.pop(0)
except IOError as e:
# Normal, means message has ended
if not (
e.errno != errno.EAGAIN
and e.errno != errno.EWOULDBLOCK
and not self.closed
):
return
# Fatal error, abort client
traceback.print_exception(type(e), e, e.__traceback__, file=sys.stderr)
print(
"\nServer error encountered, aborting client...",
file=sys.stderr,
)
self.close()
raise SystemExit
def close(self, emit_leave: bool = True):
"""
Closes the client; running ``client.update()`` won't do anything now
:param emit_leave: Decides if the client will emit `leave` to the server or not
:type emit_leave: bool
"""
self.closed = True
if emit_leave:
close_header = make_header(b"$USRCLOSE$", self.header_len)
self.sock.send(close_header + b"$USRCLOSE$")
self.sock.close()
class ThreadedHiSockClient(HiSockClient):
"""
A downside of :class:`HiSockClient` is that you need to constantly
:meth:`run` it in a while loop, which may block the program. Fortunately,
in Python, you can use threads to do two different things at once. Using
:class:`ThreadedHiSockClient`, you would be able to run another
blocking program, without ever fearing about blocking and all that stuff.
.. note::
In some cases though, :class:`HiSockClient` offers more control than
:class:`ThreadedHiSockClient`, so be careful about when to use
:class:`ThreadedHiSockClient` over :class:`HiSockClient`
"""
def __init__(
self, addr, name=None, group=None, blocking=True, header_len=16, cache_size=-1
):
super().__init__(addr, name, group, blocking, header_len, cache_size)
self._thread = threading.Thread(target=self._run)
self._stop_event = threading.Event()
def stop_client(self):
"""Stops the client"""
self.closed = True
self._stop_event.set()
self.sock.close()
def _run(self):
"""
The main while loop to run the thread
Refer to :class:`HiSockClient` for more details (:meth:`update`)
.. warning::
This method is **NOT** recommended to be used in an actual
production environment. This is used internally for the thread, and should
not be interacted with the user
"""
while not (self._stop_event.is_set() or self.closed):
try:
self.update()
except (OSError, ValueError):
break
def start_client(self):
"""Starts the main server loop"""
self._thread.start()
def join(self):
"""Waits for the thread to be killed"""
self._thread.join()
def connect(addr, name=None, group=None, blocking=True, header_len=16, cache_size=-1):
"""
Creates a `HiSockClient` instance. See HiSockClient for more details
:param addr: A two-element tuple containing the IP address and
the port number of the server.
:type addr: tuple
:param name: A string containing the name of what the client should go by.
This argument is optional.
:type name: str, optional
:param group: A string, containing the "group" the client is in.
Groups can be utilized to send specific messages to them only.
This argument is optional.
:type group: str, optional
:param blocking: A boolean specifying if the client should block or not
in the socket.
Default is True.
:type blocking: bool, optional
:param header_len: An integer defining the header length of every message.
Default is True.
:type header_len: int, optional
:return: A :class:`HiSockClient` instance.
:rtype: instance
.. note::
A simple way to use this function is to use :func:`utils.input_client_config`
which will ask you for the server IP, port, name, and group. Then, you can
call this function by simply doing ``connect(*input_client_config())``
"""
return HiSockClient(addr, name, group, blocking, header_len, cache_size)
def threaded_connect(
addr, name=None, group=None, blocking=True, header_len=16, cache_size=-1
):
"""
Creates a :class:`ThreadedHiSockClient` instance. See :class:`ThreadedHiSockClient`
and :func:`connect` for more details.
:return: A :class:`ThreadedHiSockClient` instance
"""
return ThreadedHiSockClient(addr, name, group, blocking, header_len, cache_size)
if __name__ == "__main__":
# Tests
client = connect(
("127.0.0.1", int(input("Port: "))),
name=input("Name: "),
group=input("Group: "),
)
print(
"The HiSock police are on to you. "
"You must change your name and group before they catch you."
)
client.change_name(input("New name: "))
client.change_group(input("New group: "))
@client.on("client_connect")
def on_connect(client_data: dict):
print(
f'{client_data["name"]} has joined! '
f'Their IP is {iptup_to_str(client_data["ip"])}. '
f'Their group is {client_data["group"]}.'
)
@client.on("client_disconnect")
def on_disconnect(client_data: dict):
print(f'{client_data["name"]} disconnected from the server.')
@client.on("force_disconnect")
def on_force_disconnect():
print("You have been disconnected from the server.")
raise SystemExit
@client.on("message", threaded=True)
def on_message(message: str):
print(f"Message received:\n{message}")
@client.on("genocide")
def on_genocide():
print("It's time to die!")
exit(69)
def choices():
print(
"Your choices are:"
"\n\tsend\n\tchange_name\n\tchange_group\n\tset_timer\n\tstop\n\tgenocide"
)
while True:
choice = input("What would you like to do? ")
if choice == "send":
client.send("broadcast_message", input("Message: "))
elif choice == "ping":
client.send("ping", b"")
elif choice == "change_name":
client.change_name(input("New name: "))
elif choice == "change_group":
client.change_group(input("New group: "))
elif choice == "set_timer":
client.send("set_timer", input("Seconds: "))
print(client.recv_raw())
print("Timer done!")
elif choice == "get_all_clients":
client.send("all_clients", b"")
print(client.recv_raw())
elif choice == "stop":
client.close()
return
elif choice == "genocide":
input("You will kill many people. Do you wish to proceed?")
print("Just kidding, your input had no effect. Time for genocide!")
client.send(
"set_timer", input("How many seconds for the genocide to last?")
)
print(client.recv_raw())
print("Genociding (it is a word)...")
client.send("commit_genocide")
else:
print("Invalid choice.")
function_thread = threading.Thread(target=choices, daemon=True)
function_thread.start()
while not client.closed:
client.update()
|
#!/usr/bin/env python3
# -*- encoding=utf-8 -*-
# description:
# author:jack
# create_time: 2018/9/19
from dueros.directive.Display.BaseRenderPlayerInfo import BaseRenderPlayerInfo
class RenderAudioPlayerInfo(BaseRenderPlayerInfo):
def __init__(self, content=None, controls=[]):
super(RenderAudioPlayerInfo, self).__init__('Display.RenderAudioPlayerInfo', content, controls)
|
import os
from random import randint
import praw
from dotenv import load_dotenv
load_dotenv()
client_id = os.environ.get("CLIENT_ID")
client_secret = os.environ.get("CLIENT_SERVER")
user_agent = os.environ.get("USER_AGENT")
reddit = praw.Reddit(client_id=client_id,
client_secret=client_secret,
user_agent=user_agent)
subreddit = reddit.subreddit('shitpostcrusaders')
reddit_url = "https://www.reddit.com"
def return_post():
post = subreddit.random().permalink
return reddit_url + post
if __name__ == "__main__":
print(return_post())
|
from models.decoders.decoder import PredictionDecoder
from models.decoders.utils import bivariate_gaussian_activation
import torch
import torch.nn as nn
from typing import Dict
class MTP(PredictionDecoder):
def __init__(self, args):
"""
Prediction decoder for MTP
args to include:
num_modes: int number of modes K
op_len: int prediction horizon
hidden_size: int hidden layer size
encoding_size: int size of context encoding
use_variance: Whether to output variance params along with mean predicted locations
"""
super().__init__()
self.agg_type = args['agg_type']
self.num_modes = args['num_modes']
self.op_len = args['op_len']
self.use_variance = args['use_variance']
self.op_dim = 5 if self.use_variance else 2
self.hidden = nn.Linear(args['encoding_size'], args['hidden_size'])
self.traj_op = nn.Linear(args['hidden_size'], args['op_len'] * self.op_dim * self.num_modes)
self.prob_op = nn.Linear(args['hidden_size'], self.num_modes)
self.leaky_relu = nn.LeakyReLU(0.01)
self.log_softmax = nn.LogSoftmax(dim=1)
def forward(self, agg_encoding: torch.Tensor) -> Dict:
"""
Forward pass for MTP
:param agg_encoding: aggregated context encoding
:return predictions: dictionary with 'traj': K predicted trajectories and
'probs': K corresponding probabilities
"""
h = self.leaky_relu(self.hidden(agg_encoding))
batch_size = h.shape[0]
traj = self.traj_op(h)
probs = self.log_softmax(self.prob_op(h))
traj = traj.reshape(batch_size, self.num_modes, self.op_len, self.op_dim)
probs = probs.squeeze(dim=-1)
traj = bivariate_gaussian_activation(traj) if self.use_variance else traj
predictions = {'traj': traj, 'probs': probs}
return predictions
|
from pycspr.crypto import cl_checksum
from pycspr.serialisation.binary.cl_value import encode as encode_cl_value
from pycspr.serialisation.json.cl_type import encode as encode_cl_type
from pycspr.serialisation.utils import cl_value_to_cl_type
from pycspr.serialisation.utils import cl_value_to_parsed
from pycspr.types import cl_values
def encode(entity: cl_values.CL_Value) -> dict:
"""Encodes a CL value as a JSON compatible dictionary.
:param entity: A CL value to be encoded.
:returns: A JSON compatible dictionary.
"""
return {
"cl_type": encode_cl_type(cl_value_to_cl_type(entity)),
"bytes": cl_checksum.encode(encode_cl_value(entity)),
"parsed": cl_value_to_parsed(entity)
}
|
#!/usr/bin/env python
__copyright__ = """
Copyright (c) 2020 Tananaev Denis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions: The above copyright notice and this permission
notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import os
import glob
import datetime
import numpy as np
import matplotlib.pyplot as plt
from detection_3d.tools.file_io import read_json, save_plot_to_image
def parse_report_for_epoch_metrics(report_dict, metrics=["train_mIoU"]):
"""
Parse report for given metric information over epoch and add them to list
Arguments:
report_dict: dictionalry with raw report data
metrics: list of metrics to search over epoch
Returns:
result: the dictionary of the form {metric : {"epoch": [], "value": []}
"""
result = {metric: {"epoch": [], "value": []} for metric in metrics}
for epoch in report_dict["epoch_metrics"]:
epoch_num = int(epoch)
for metric in metrics:
value = float(report_dict["epoch_metrics"][epoch][metric])
result[metric]["epoch"].append(epoch_num)
result[metric]["value"].append(value)
return result
def get_report_json(
checkpoints_dir, report_dir, plot_metrics, main_metric, full_report=False
):
"""
Parse checkpoints_dir and returns json report with all information
Arguments:
checkpoints_dir: ditectory containing all checkpoints from training
report_dir: directory to save reprot
plot_metrics: metrics to plot
main_metric: main metric to define best epoch
full_report: generate .json file containing all information
from all epoch (not only from the best)
Returns:
report_dict: the dictionary with information for report
"""
search_string = os.path.join(checkpoints_dir, "*")
model_list = sorted(glob.glob(search_string))
date_now = datetime.datetime.now().strftime("%d %B %Y")
report_dict = {
"model_name": None,
"date": date_now,
"parameters": None,
"best_epoch": None,
"main_metric": main_metric,
"epoch_metrics": None,
"plot_metrics": None,
}
if len(model_list) == 0:
ValueError("The checkpoint folder {} is empty".format(checkpoints_dir))
else:
epoch_metrics = {}
for model_folder in model_list:
model_name, epoch = model_folder.split("/")[-1].split("-")
# Fill header
if report_dict["model_name"] is None and report_dict["parameters"] is None:
param_filename = os.path.join(model_folder, "parameters.json")
report_dict["model_name"] = model_name
report_dict["parameters"] = read_json(param_filename)
# Check that we have only one model name inside checkpoints
if model_name != report_dict["model_name"]:
ValueError(
"model name in report {} is not \
same as current model folder {}".format(
report_dict["model_name"], model_name
)
)
# Fill epoch metrics
metrics_filename = os.path.join(model_folder, "epoch_metrics.json")
epoch_metrics[epoch] = read_json(metrics_filename)
# Find best epoch by metric
report_dict["epoch_metrics"] = epoch_metrics
# Get metrics to plot
plot_metrics = parse_report_for_epoch_metrics(report_dict, metrics=plot_metrics)
# Find best checkpoint idx, epoch
best_ckpt_idx = np.argmax(plot_metrics[main_metric]["value"])
best_epoch = "{0:04d}".format(plot_metrics[main_metric]["epoch"][best_ckpt_idx])
report_dict["best_epoch"] = best_epoch
report_dict["plot_metrics"] = plot_metrics
if not full_report:
# Override the epoch metrics with info from only best epoch
report_dict["epoch_metrics"] = {best_epoch: epoch_metrics[best_epoch]}
return report_dict
def plot_metric_to_image(
metrics_dict,
filename_to_save,
plot_title="epoch_metrics",
loc="best",
best_epoch=None,
):
"""
Plot metrics over multiple epoch (e.g. train/val loss) as graph to image
Arguments:
metrics_dict: dict of the type metric : {"epoch": [], "value": []}
filename_to_save: image filename to save plot
plot_title: title of plot
loc: location of the legend (see matplotlib options)
best_epoch: plot dotted line for best epoch
"""
plt.style.use("seaborn-whitegrid")
figure = plt.figure()
plt.title(plot_title)
plt.xlabel("epoch")
plt.ylabel("value")
if best_epoch is not None:
plt.axvline(
best_epoch,
0,
1,
linestyle=":",
color="tab:red",
alpha=1.0,
label="best checkpoint",
)
for metric in metrics_dict:
color = "tab:blue" if "val" in metric else "tab:orange"
x = metrics_dict[metric]["epoch"]
y = metrics_dict[metric]["value"]
plt.plot(x, y, label=metric, color=color, linewidth=1, alpha=1.0)
plt.legend(loc=loc)
save_plot_to_image(filename_to_save, figure)
plt.style.use("classic") # Change back to classic style
|
import pandas as pd
import numpy as np
def safeSubset(lst, idx):
if len(lst) >= idx:
return np.nan
else:
return lst[idx]
def getLat(lst):
rawlat = safeSubset(lst, 7)
if rawlat == np.nan:
return rawlat
if rawlat[-1] == 'N':
sign = 1
else:
sign = -1
return float(rawlat[:-1]) / 10 * sign
def getLon(lst):
rawlon = safeSubset(lst, 8)
if rawlon == np.nan:
return rawlon
if rawlon[-1] == 'W':
sign = 1
else:
sign = -1
return float(rawlon[:-1]) / 10 * sign
def getTime(lst):
timestamp = safeSubset(lst, 2)
if timestamp == np.nan:
return timestamp
return timestamp[-3:-1] + '00'
def getDate(lst):
ts = safeSubset(lst, 2)
if ts == np.nan:
return ts
return '-'.join(ts[:4], ts[4:6], ts[6:8])
def getL(lst):
return np.nan
def getClass(lst):
return safeSubset(lst, 10)
def getSeason(lst):
ts = safeSubset(lst, 2)
if ts == np.nan:
return ts
return ts[:4]
def getNum(lst):
rawnum = safeSubset(lst, 1)
return int(rawnum)
def getID(lst):
season = getSeason(lst)
basin = safeSubseet(lst, 0)
num = getNum(lst)
return basin + num + season
def getTimestamp(lst):
ts = safeSubset(lst, 2)
if ts == np.nan:
return ts
year = int(ts[:4])
month = int(ts[4:6])
day = int(ts[6:8])
hour = int(ts[8:10])
return pd.Timestamp(
year=year,
month=month,
day=day,
hour=hour,
)
def getWind(lst):
return int(safeSubset(lst, 8))
def getPress(lst):
return int(safeSubset(lst, 9))
|
from unittest import TestCase
from nba_data.data.season import Season
from nba_data.data.season_range import SeasonRange
class TestSeasonRange(TestCase):
def test_instantiation(self):
start_season = Season.season_2015
end_season = Season.season_2016
self.assertIsNotNone(SeasonRange(start=start_season, end=end_season))
|
import discord
import asyncio
import time
from nltk.corpus import wordnet
from collections import defaultdict
import re
import requests
from model import *
def generate_image(keyword):
url = 'https://image.baidu.com/search/flip?tn=baiduimage&ie=utf-8&word='+keyword+'&ct=201326592&v=flip'
result = requests.get(url)
html = result.text
pic_url = re.findall('"objURL":"(.*?)",',html,re.S)
i = 0
return pic_url[0]
PARTS_OF_SPEECH = {
wordnet.NOUN: "Noun",
wordnet.ADJ: "Adjective",
wordnet.VERB: "Verb",
wordnet.ADJ_SAT: "Adjective Satellite",
wordnet.ADV: "Adverb"
}
def format_meaning(word, synsets):
reply = f'**{word}**\n\n'
# Group by POS
grouped = defaultdict(list)
for synset in synsets:
grouped[PARTS_OF_SPEECH[synset.pos()]].append(synset.definition())
for pos, definitions in grouped.items():
reply += f'*{pos}*\n'
for counter, definition in enumerate(definitions, 1):
reply += f' {counter}. {definition}\n'
return reply
async def handle_message(message):
word = message.content[5:]
embed = discord.Embed(title=f"Let the {word} be made known to thee")
embed.set_image(url=generate_image(word))
await message.channel.send(embed=embed)
try:
synsets = wordnet.synsets(word)
if synsets:
reply = format_meaning(word, synsets)
else:
reply = f'No extra information could be acquired.'
except:
reply = 'Sorry, an error occurred while fetching that definition.'
await message.channel.send(reply)
messages = joined = 0
client = discord.Client()
async def update_stats():
await client.wait_until_ready()
global messages, joined
while not client.is_closed():
try:
with open("stats.txt", "a") as f:
f.write(f"Time: {int(time.time())}, Messages: {messages}, Members Joined: {joined}\n")
messages = 0
joined = 0
await asyncio.sleep(5)
except Exception as e:
print(e)
await asyncio.sleep(5)
@client.event
async def on_message(message):
global messages
messages += 1
id = client.get_guild(709006557358063636)
# Printing the message to the console
print(message.content)
print("--------------------------------------------------------------------")
channels = ["commands", "general"]
users = ["dynamic#6160"]
if str(message.channel) in channels and str(message.author) in users:
if message.content.find("awaken") != -1:
await message.channel.send("Ravel in my presence my brethren")
elif message.content == "!users":
await message.channel.send("## of Members: ", id.member_count)
if message.content.startswith('seek '):
await handle_message(message)
elif message.content.startswith("preach"):
await message.channel.send(str(gennames.generate()))
@client.event
async def on_member_join(member):
global joined
joined += 1
for channel in member.guild.channels:
if str(channel) == "general": # We check to make sure we are sending the message in the general channel
await channel.send_message(f"""Welcome to the server {member.mention}""")
client.loop.create_task(update_stats())
client.run("get_ur_own")
|
from google.appengine.ext import ndb
#
# One row per match. Each match has two players, a winner and a loser.
# The match takes place at a club, in a tourney.
#
import clubs
import tourneys
import players
class Match(ndb.Model):
"""Models a match between two players."""
matchid = ndb.IntegerProperty()
club = ndb.KeyProperty(kind=clubs.Club)
tourney = ndb.KeyProperty(kind=tourneys.Tourney)
playerA = ndb.KeyProperty(kind=players.Player)
handicapA = ndb.StringProperty()
scoreA = ndb.IntegerProperty()
targetA = ndb.IntegerProperty()
playerB = ndb.KeyProperty(kind=players.Player)
handicapB = ndb.StringProperty()
scoreB = ndb.IntegerProperty()
targetB = ndb.IntegerProperty()
winner = ndb.IntegerProperty(default = 0)
|
# Don't call this flask.py!
# Documentation for Flask can be found at:
# https://flask.palletsprojects.com/en/1.1.x/quickstart/
from flask import Flask, render_template, request, session, redirect, url_for, jsonify, abort
import os
app = Flask(__name__)
app.secret_key = b'REPLACE_ME_x#pi*CO0@^z'
@app.route('/')
def index():
return redirect(url_for('fortune'))
@app.route('/fortune/')
def fortune():
out = ""
os.system("fortune > fortune.txt")
f = open("fortune.txt", "r")
out = f.read()
out = "<pre>" + out +"</pre>"
os.system("re fortune.txt")
return out
@app.route('/cowsay/<message>/')
def cowsay(message):
out2 =""
os.system("cowsay " + message + " > cowsay.txt")
f = open("cowsay.txt", "r")
out2 = f.read()
out2 = "<pre>" + out2 + "</pre>"
os.system("rm cowsay.txt")
return out2
@app.route('/cowfortune/')
def cowfortune():
out3 = ""
os.system("'fortune' | cowsay > cowfortune.txt")
f = open("cowfortune.txt", "r")
out3 = f.read()
out3 = "<pre>" + out3 + "</pre>"
os.system("rm cowfortune.txt")
return out3
|
import requests
from bs4 import BeautifulSoup
import xlsxwriter
from fake_headers import Headers
workbook = xlsxwriter.Workbook('resault_amazon.xlsx')
worksheet = workbook.add_worksheet()
search_text = input('please enter book name: ')
search_text = search_text.replace(' ', '+')
url = 'https://www.amazon.com/s?k=' + search_text + '&i=stripbooks-intl-ship'
#print(url)
headers = Headers(
browser="chrome", # Generate only Chrome UA
os="win", # Generate ony Windows platform
headers=True # generate misc headers
).generate()
r = requests.get(url, headers=headers)
soup = BeautifulSoup(r.text, 'html.parser')
#print(soup)
title_soup = soup.select('.a-color-base.a-text-normal')
writer_soup = soup.select('.a-color-secondary .a-row')
book = {}
title_list = []
writer_list = []
for i in title_soup:
title = i.get_text()
title_list.append(title)
#print(title_list)
for x in writer_soup:
writer = x.get_text()
writer_list.append(writer)
#print(writer_list)
#workbook.close()
for i, x in zip(title_list, writer_list):
book[i] = x
print(book)
book_div_list = []
book_price_list = []
for i in range(0, 17):
book_div = soup('div', {'data-index': i})
book_div_list.append(book_div[0].get_text())
if 'Paperback' in book_div_list[i]:
book_price = book_div_list[i]
paperback = book_div_list[i].find('Paperback')+11
#print(paperback)
paperback_price = book_price[paperback:paperback+5]
if '$' in paperback_price:
paperback_price = paperback_price.replace('$', '')
try:
book_price_list.append(float(paperback_price))
except:
book_price_list.append('*')
else:
book_price_list.append('*')
#print(book_price_list)
#min_price = min(book_price_list.astype('float'))
min_price = min(x for x in book_price_list if x != '*' and x != '\n')
#print(min_price)
min_price_index = book_price_list.index(min_price)
#print(min_price_index)
min_final = {}
min_final[title_list[min_price_index]] = min_price
print(min_final)
#print(book_div_list)
#print(len(book_div_list))
|
import unittest
from linear_algebra.matrix import Matrix
A = Matrix(dims=(3, 3), fill=1.0)
B = Matrix(dims=(3, 3), fill=2.0)
class Testclass(unittest.TestCase):
def test_matrix_A(self):
T = [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]
self.assertEqual(A.A, T, "Left add (Matrix-Matrix)")
def test_matrix_B(self):
T = [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]]
self.assertEqual(B.A, T, "Left add (Matrix-Matrix)")
def test_left_add_matrix_matrix(self):
C = A + B
T = [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]]
self.assertEqual(C.A, T, "Left add (Matrix-Matrix)")
def test_left_add_matrix_scalar(self):
C = A + 10.0
T = [[11.0, 11.0, 11.0], [11.0, 11.0, 11.0], [11.0, 11.0, 11.0]]
self.assertEqual(C.A, T, "Left add (Matrix-Scalar)")
def test_standard_multiplication_matrix_matrix(self):
C = A @ B
T = [[-16.0, -16.0, -16.0], [6.0, 6.0, 6.0], [6.0, 6.0, 6.0]]
self.assertEqual(C.A, T, "Standard multiplication (Matrix-Matrix):")
def test_right_add_01(self):
C = 20.0 + A
T = [[21.0, 21.0, 21.0], [21.0, 21.0, 21.0], [21.0, 21.0, 21.0]]
self.assertEqual(C.A, T, "Right add:")
def test_right_add_02(self):
C = A * B
T = [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]]
self.assertEqual(C.A, T, "Right add:")
def test_right_add_03(self):
C = A * 0.5
T = [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [0.5, 0.5, 0.5]]
self.assertEqual(C.A, T, "Right add:")
def test_right_add_04(self):
C = 0.5 * A
T = [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [0.5, 0.5, 0.5]]
self.assertEqual(C.A, T, "Right add:")
def test_right_add_05(self):
C = A[0, 0]
self.assertEqual(C, 1.0, "Right add:")
def test_right_add_06(self):
C = A
C[0, 0] = -10.0
self.assertEqual(C[0, 0], -10.0, "Right add:")
if __name__ == '__main__':
unittest.main()
|
import json as json_handler
from yahoo_weather.classes.errors import Error
class Location:
def __init__(self,woeid, city, country, lat, long, region, timezone_id):
self.woeid = woeid
self.city = city
self.country = country
self.lat = lat
self.long = long
self.region = region
self.timezone_id = timezone_id
def as_dict(self):
return self.__dict__
@classmethod
def load_from_json(cls, json):
if isinstance(json, dict):
json_dict = json
elif isinstance(json, str):
json_dict = json_handler.loads(json)
else:
raise ValueError("{}: {} : {}".format("Location", Error.unacceptable_json, json))
woeid = json_dict.get("woeid")
city = json_dict.get("city")
country = json_dict.get("country")
lat = json_dict.get("lat")
long = json_dict.get("long")
region = json_dict.get("region")
timezone_id = json_dict.get("timezone_id")
return cls(woeid=woeid, city=city, country=country, lat=lat, long=long, region=region, timezone_id=timezone_id)
|
import ctypes
so3 = ctypes.CDLL("./so3.so")
a = 1
b = 2
c = so3.add(a, b)
print(c)
|
# -*- coding=utf-8 -*-
from SplineInterpolator import SplineInterpolator
__author__ = 'balin'
# noinspection PyPep8Naming
def generateSplineFunction(array1, array2):
interpolator = SplineInterpolator()
return interpolator.interpolate(array1, array2)
def interpolate(spline, x):
# noinspection PyBroadException
try:
return spline.value(x)
except:
knots = spline.Knots
return spline.value(knots[0 if x < knots[0] else spline.n - 1])
|
import asyncio
import hashlib
import logging
import magic
import os
from abc import ABC, abstractmethod
from polyswarmclient import Client
from polyswarmclient.events import SettleBounty
from polyswarmclient.exceptions import LowBalanceError, FatalError
from polyswarmclient.utils import asyncio_stop
logger = logging.getLogger(__name__) # Initialize logger
BOUNTY_QUEUE_SIZE = int(os.environ.get('BOUNTY_QUEUE_SIZE', 10))
MAX_BOUNTIES_IN_FLIGHT = int(os.environ.get('MAX_BOUNTIES_IN_FLIGHT', 10))
MAX_BOUNTIES_PER_BLOCK = int(os.environ.get('MAX_BOUNTIES_PER_BLOCK', 1))
BLOCK_DIVISOR = int(os.environ.get('BLOCK_DIVISOR', 1))
class QueuedBounty(object):
def __init__(self, artifact_type, amount, ipfs_uri, duration, api_key=None, metadata=None):
self.amount = amount
self.ipfs_uri = ipfs_uri
self.duration = duration
self.api_key = api_key
self.artifact_type = artifact_type
self.metadata = metadata
def __repr__(self):
return f'({self.artifact_type}, {self.amount}, {self.ipfs_uri}, {self.duration}, {self.metadata})'
class AbstractAmbassador(ABC):
def __init__(self, client, testing=0, chains=None, watchdog=0, submission_rate=0):
self.client = client
self.chains = chains
self.client.on_run.register(self.__handle_run)
self.client.on_new_block.register(self.__handle_new_block)
self.client.on_quorum_reached.register(self.__handle_quorum_reached)
self.client.on_settled_bounty.register(self.__handle_settled_bounty)
self.client.on_settle_bounty_due.register(self.__handle_settle_bounty)
self.client.on_deprecated.register(self.__handle_deprecated)
# Initialize in run_task to ensure we're on the right loop
self.bounty_queues = {}
self.bounty_semaphores = {}
self.block_events = {}
self.watchdog = watchdog
self.first_block = 0
self.last_block = 0
self.last_bounty_count = {}
self.testing = testing
self.bounties_posted = {}
self.bounties_posted_locks = {}
self.bounties_pending = {}
self.bounties_pending_locks = {}
self.settles_posted = {}
self.settles_posted_locks = {}
self.submission_rate = submission_rate
@classmethod
def connect(cls, polyswarmd_addr, keyfile, password, api_key=None, testing=0, chains=None, watchdog=0,
submission_rate=0):
"""Connect the Ambassador to a Client.
Args:
polyswarmd_addr (str): URL of polyswarmd you are referring to.
keyfile (str): Keyfile filename.
password (str): Password associated with Keyfile.
api_key (str): Your PolySwarm API key.
testing (int): Number of testing bounties to use.
chains (set(str)): Set of chains you are acting on.
Returns:
AbstractAmbassador: Ambassador instantiated with a Client.
"""
client = Client(polyswarmd_addr, keyfile, password, api_key, testing > 0)
return cls(client, testing, chains, watchdog, submission_rate)
@staticmethod
def generate_metadata(content):
""" Generate a bunch of metadata for a given bytestream from a file
Args:
content: bytes-like object (or string)
Returns:
dictionary of metadata about a file
"""
# Force to be bytes-like
try:
content = content.encode()
except AttributeError:
pass
return {
'sha256': hashlib.sha256(content).hexdigest(),
'md5': hashlib.md5(content).hexdigest(),
'size': len(content),
'sha1': hashlib.sha1(content).hexdigest(),
'mimetype': magic.from_buffer(content, mime=True),
'extended_type': magic.from_buffer(content),
}
@abstractmethod
async def generate_bounties(self, chain):
"""Override this to submit bounties to the queue (using the push_bounty method)
Args:
chain (str): Chain we are operating on.
"""
pass
async def push_bounty(self, artifact_type, amount, ipfs_uri, duration, chain, api_key=None, metadata=None):
"""Push a bounty onto the queue for submission
Args:
artifact_type (ArtifactType): Type of artifact being pushed
amount (int): Amount of NCT to place on the bounty
ipfs_uri (str): URI for artifact(s) to be analyzed
duration (int): Duration in blocks to accept assertions
chain (str): Chain to submit the bounty
api_key (str): API key to use to submit, if None use default from client
metadata (str): json blob of metadata
"""
bounty = QueuedBounty(artifact_type, amount, ipfs_uri, duration, api_key=api_key, metadata=metadata)
logger.info('Queueing bounty %s', bounty)
await self.bounty_queues[chain].put(bounty)
def run(self):
"""Run the Client on all of our chains."""
self.client.run(self.chains)
async def run_task(self, chain):
"""Iterate through the bounties an Ambassador wants to post on a given chain.
Post each bounty to polyswarmd and schedule the bounty to be settled.
Args:
chain (str): Name of the chain to post bounties to.
"""
self.bounty_queues[chain] = asyncio.Queue(maxsize=BOUNTY_QUEUE_SIZE)
self.bounty_semaphores[chain] = asyncio.Semaphore(value=MAX_BOUNTIES_IN_FLIGHT)
self.block_events[chain] = asyncio.Event()
self.bounties_posted_locks[chain] = asyncio.Lock()
self.bounties_pending_locks[chain] = asyncio.Lock()
self.settles_posted_locks[chain] = asyncio.Lock()
# Producer task
asyncio.get_event_loop().create_task(self.generate_bounties(chain))
# Consumer
while True:
# Delay submissions
await asyncio.sleep(self.submission_rate)
# Wait for a block
await self.block_events[chain].wait()
self.block_events[chain].clear()
bounties_this_block = 0
while bounties_this_block < MAX_BOUNTIES_PER_BLOCK:
# Exit if we are in testing mode
async with self.bounties_posted_locks[chain]:
bounties_posted = self.bounties_posted.get(chain, 0)
if 0 < self.testing <= bounties_posted:
logger.info('All testing bounties submitted')
return
try:
bounty = self.bounty_queues[chain].get_nowait()
except asyncio.queues.QueueEmpty:
await self._handle_empty_queue()
continue
if bounty is None:
logger.info('Got None for bounty value, moving on to next block')
break
bounties_this_block += 1
await self.bounty_semaphores[chain].acquire()
asyncio.get_event_loop().create_task(self.submit_bounty(bounty, chain))
async def submit_bounty(self, bounty, chain):
"""Submit a bounty in a new task
Args:
bounty (QueuedBounty): Bounty to submit
chain: Name of the chain to post to
"""
async with self.client.liveness_recorder.waiting_task(bounty.ipfs_uri, self.last_block):
bounty_fee = await self.client.bounties.parameters[chain].get('bounty_fee')
try:
await self.client.balances.raise_for_low_balance(bounty.amount + bounty_fee, chain)
except LowBalanceError as e:
await self.on_bounty_post_failed(bounty.artifact_type, bounty.amount, bounty.ipfs_uri, bounty.duration,
chain, metadata=bounty.metadata)
self.bounty_queues[chain].task_done()
self.bounty_semaphores[chain].release()
if self.client.tx_error_fatal:
raise FatalError('Failed to post bounty due to low balance') from e
else:
return
assertion_reveal_window = await self.client.bounties.parameters[chain].get('assertion_reveal_window')
arbiter_vote_window = await self.client.bounties.parameters[chain].get('arbiter_vote_window')
metadata = None
if bounty.metadata is not None:
metadata = await self.client.bounties.post_metadata(bounty.metadata, chain)
await self.on_before_bounty_posted(bounty.artifact_type, bounty.amount, bounty.ipfs_uri, bounty.duration, chain)
bounties = await self.client.bounties.post_bounty(bounty.artifact_type, bounty.amount, bounty.ipfs_uri,
bounty.duration, chain, api_key=bounty.api_key,
metadata=metadata)
if not bounties:
await self.on_bounty_post_failed(bounty.artifact_type, bounty.amount, bounty.ipfs_uri, bounty.duration,
chain, metadata=bounty.metadata)
else:
async with self.bounties_posted_locks[chain]:
bounties_posted = self.bounties_posted.get(chain, 0)
logger.info('Submitted bounty %s', bounties_posted, extra={'extra': bounty})
self.bounties_posted[chain] = bounties_posted + len(bounties)
async with self.bounties_pending_locks[chain]:
bounties_pending = self.bounties_pending.get(chain, set())
self.bounties_pending[chain] = bounties_pending | {b.get('guid') for b in bounties if 'guid' in b}
for b in bounties:
guid = b.get('guid')
expiration = int(b.get('expiration', 0))
if guid is None or expiration == 0:
logger.error('Processing invalid bounty, not scheduling settle')
continue
# Handle any additional steps in derived implementations
await self.on_after_bounty_posted(guid, bounty.artifact_type, bounty.amount, bounty.ipfs_uri,
expiration, chain, metadata=bounty.metadata)
sb = SettleBounty(guid)
self.client.schedule(expiration + assertion_reveal_window + arbiter_vote_window, sb, chain)
self.bounty_queues[chain].task_done()
self.bounty_semaphores[chain].release()
async def on_before_bounty_posted(self, artifact_type, amount, ipfs_uri, duration, chain, metadata=None):
"""Override this to implement additional steps before the bounty is posted
Args:
artifact_type (ArtifactType): Type of artifact for the soon to be posted bounty
amount (int): Amount to place this bounty for
ipfs_uri (str): IPFS URI of the artifact to post
duration (int): Duration of the bounty in blocks
chain (str): Chain we are operating on
metadata (dict): Oprional dict of metadata
"""
pass
async def on_bounty_post_failed(self, artifact_type, amount, ipfs_uri, duration, chain, metadata=None):
"""Override this to implement additional steps when a bounty fails to post
Args:
artifact_type (ArtifactType): Type of artifact for the failed bounty
amount (int): Amount to place this bounty for
ipfs_uri (str): IPFS URI of the artifact to post
duration (int): Duration of the bounty in blocks
chain (str): Chain we are operating on
metadata (dict): Oprional dict of metadata
"""
pass
async def on_after_bounty_posted(self, guid, artifact_type, amount, ipfs_uri, expiration, chain, metadata=None):
"""Override this to implement additional steps after bounty is posted
Args:
guid (str): GUID of the posted bounty
artifact_type (ArtifactType): Type of artifact for the posted bounty
amount (int): Amount of the posted bounty
ipfs_uri (str): URI of the artifact submitted
expiration (int): Block number of bounty expiration
chain (str): Chain we are operating on
metadata (dict): Oprional dict of metadata
"""
pass
async def __handle_run(self, chain):
"""Asynchronously run a task on a given chain.
Args:
chain (str): Name of the chain to run.
"""
asyncio.get_event_loop().create_task(self.run_task(chain))
async def __handle_deprecated(self, rollover, block_number, txhash, chain):
asyncio.get_event_loop().create_task(self.client.bounties.settle_all_bounties(chain))
return []
async def __handle_new_block(self, number, chain):
if number <= self.last_block:
return
self.last_block = number
event = self.block_events.get(chain)
if event is not None and number % BLOCK_DIVISOR == 0:
event.set()
if not self.watchdog:
return
if not self.first_block:
self.first_block = number
return
blocks = number - self.first_block
async with self.bounties_posted_locks[chain]:
bounties_posted = self.bounties_posted.get(chain, 0)
last_bounty_count = self.last_bounty_count.get(chain, 0)
if blocks % self.watchdog == 0 and bounties_posted == last_bounty_count:
raise FatalError('Bounties not processing')
self.last_bounty_count[chain] = bounties_posted
async def __handle_quorum_reached(self, bounty_guid, block_number, txhash, chain):
return await self.__settle_bounty(bounty_guid, chain)
async def __handle_settle_bounty(self, bounty_guid, chain):
return await self.__settle_bounty(bounty_guid, chain)
async def __handle_settled_bounty(self, bounty_guid, settler, payout, block_number, txhash, chain):
return await self.__settle_bounty(bounty_guid, chain)
async def __settle_bounty(self, bounty_guid, chain):
"""
When a bounty is scheduled to be settled, actually settle the bounty to the given chain.
Args:
bounty_guid (str): GUID of the bounty to be submitted.
chain (str): Name of the chain where the bounty is to be posted.
Returns:
Response JSON parsed from polyswarmd containing emitted events.
"""
async with self.bounties_pending_locks[chain]:
bounties_pending = self.bounties_pending.get(chain, set())
if bounty_guid not in bounties_pending:
logger.debug('Bounty %s already settled', bounty_guid)
return []
self.bounties_pending[chain] = bounties_pending - {bounty_guid}
last_settle = False
async with self.settles_posted_locks[chain]:
settles_posted = self.settles_posted.get(chain, 0)
self.settles_posted[chain] = settles_posted + 1
if self.testing > 0:
if self.settles_posted[chain] > self.testing:
logger.warning('Scheduled settle, but finished with testing mode')
return []
elif self.settles_posted[chain] == self.testing:
last_settle = True
logger.info('Testing mode , %s settles remaining', self.testing - self.settles_posted[chain])
ret = await self.client.bounties.settle_bounty(bounty_guid, chain)
if last_settle:
logger.info('All testing bounties complete, exiting')
asyncio_stop()
return ret
async def _handle_empty_queue(self):
"""
Just debug log on empty queue.
"""
# Just a forced await to share
await asyncio.sleep(0)
logger.debug('Queue empty, waiting for next window')
|
def func2():
pass
ctrl5 = root.Controls['ConnFrame']
ctrl6 = ctrl5.Controls['layTable']
ctrl8 = ctrl6.Controls['tbxDataSource']
ctrl8.Text = 'localhost';DoEvents()
ctrl10 = ctrl6.Controls['tbxLogin']
ctrl10.Text = 'root';DoEvents()
ctrl12 = ctrl6.Controls['tbxPassword']
ctrl12.Text = 'n3j4k3h3sl0';DoEvents()
ctrl4 = root.Controls['btnOk']
ctrl4.PerformClick();DoEvents()
root.Close();DoEvents()
def func1():
pass
ctrl6 = root.Controls['lbxGroups']
if ctrl6.FocusedItem is not None: ctrl6.FocusedItem.Selected = False
ctrl6.FocusedItem = ctrl6.Items.Find('connections', False)[0]; DoEvents()
ctrl6.FocusedItem.Selected = True; DoEvents()
ctrl5 = root.Controls['lbxItems']
if ctrl5.FocusedItem is not None: ctrl5.FocusedItem.Selected = False
ctrl5.FocusedItem = ctrl5.Items.Find('mysql', False)[0]; DoEvents()
ctrl5.FocusedItem.Selected = True; DoEvents()
ctrl2 = root.Controls['tbxNewName']
SetWindowProc('func2')
ctrl2.Text = 'test-mysql';DoEvents()
ctrl4 = root.Controls['btnOk']
ctrl4.PerformClick();DoEvents()
root.Close();DoEvents()
def main():
pass
SetWindowProc('func1')
ctrl77 = root.Controls['tstMain']
menu45 = ctrl77.Items['btnConnect']
menu45.PerformClick();DoEvents()
if procedure is None: main()
if procedure == 'main': main()
if procedure == 'func1': func1()
if procedure == 'func2': func2()
|
from django.core.management.base import BaseCommand
from app.posts.models import Post
class Command(BaseCommand):
"""Add specifique commande to manage.py
:param BaseCommand: Legacy from BaseCommand class
:type BaseCommand: BaseCommand
"""
help = "Clean all Product from the db"
def handle(self, *args, **options):
"""Methode to launch the specifque commande."""
all_post = Post.objects.get_all_post_on_queue()
for post in all_post:
if post.schedule_time == datetime.now():
post.post_on_Linkedin()
print(post.schedule_time)
|
from turtle import Screen
from paddle import Paddle
from ball import Ball
from scoreboard import Scoreboard
import time
pantalla = Screen()
pantalla.setup(width=800, height=600)
pantalla.bgcolor("black")
pantalla.title("Pong")
pantalla.tracer(0)
player_2 = Paddle(350)
player_1 = Paddle(-350)
ball = Ball()
scoreboard = Scoreboard()
pantalla.listen()
pantalla.onkeypress(player_2.up, "P")
pantalla.onkeypress(player_2.down, "L")
pantalla.onkeypress(player_2.up, "p")
pantalla.onkeypress(player_2.down, "l")
pantalla.onkeypress(player_1.up, "W")
pantalla.onkeypress(player_1.down, "S")
pantalla.onkeypress(player_1.up, "w")
pantalla.onkeypress(player_1.down, "s")
while True:
time.sleep(ball.frame)
pantalla.update()
ball.move()
if ball.xcor() >= 330 and ball.should_bounce(player_2):
ball.bounce()
if ball.xcor() <= -330 and ball.should_bounce(player_1):
ball.bounce()
if ball.xcor() > 380:
ball.reset_position()
scoreboard.score(left=True)
scoreboard.print_score()
if ball.xcor() < -380:
ball.reset_position()
scoreboard.score(left=False)
scoreboard.print_score()
|
"""
Abstract Syntax Trees: https://docs.python.org/3/library/ast.html
"""
import ast as _ast
import sys
from typing import Type
from ._base import AST, immutable
if (3, 7) <= sys.version_info < (3, 8):
import asttrs._py3_7 as _asttrs
from asttrs._py3_7 import * # noqa
pass
elif (3, 8) <= sys.version_info < (3, 9):
import asttrs._py3_8 as _asttrs
from asttrs._py3_8 import * # noqa
pass
elif (3, 9) <= sys.version_info < (3, 10):
import asttrs._py3_9 as _asttrs
from asttrs._py3_9 import * # noqa
pass
elif (3, 10) <= sys.version_info < (3, 11):
import asttrs._py3_10 as _asttrs
from asttrs._py3_10 import * # noqa
pass
else:
raise ImportError("Support only Python 3.7 to 3.10")
stmt = getattr(_asttrs, "stmt", AST)
@immutable
class Comment(stmt):
"""A Comment wrapper for convenient purpose, since there's no comment node in ast.
Args:
body: comment string
>>> Comment(body="This is a comment").to_source().strip()
'# This is a comment'
"""
body: str
@classmethod
def infer_ast_type(cls) -> Type[_ast.AST]:
return _ast.Expr
def to_ast(self) -> _ast.AST:
from asttrs import Expr, Name, Store
lines = [body.strip() for body in self.body.split("\n") if body.strip()]
cmt: str = "\n".join(
[body if body.startswith("#") else f"# {body}" for body in lines]
)
return Expr(value=Name(id=cmt, ctx=Store())).to_ast()
|
import battlecode as bc
class FactoryManager():
def __init__(self, controller):
self.controller = controller
def handle(self, unit):
pass
|
"""Export all the data from an assessment within Gophish into a single JSON file.
Usage:
gophish-export [--log-level=LEVEL] ASSESSMENT_ID SERVER API_KEY
gophish-export (-h | --help)
gophish-export --version
Options:
API_KEY Gophish API key.
ASSESSMENT_ID ID of the assessment to export data from.
SERVER Full URL to Gophish server.
-h --help Show this screen.
--version Show version.
-l --log-level=LEVEL If specified, then the log level will be set to
the specified value. Valid values are "debug", "info",
"warning", "error", and "critical". [default: info]
"""
# Standard Python Libraries
from datetime import datetime
import hashlib
import json
import logging
import re
import sys
from typing import Dict
# Third-Party Libraries
from docopt import docopt
import httpagentparser
import requests
# cisagov Libraries
from tools.connect import connect_api
from util.validate import validate_assessment_id
from ._version import __version__
# Disable "Insecure Request" warning: Gophish uses a self-signed certificate
# as default for https connections, which can not be verified by a third
# party; thus, an SSL insecure request warning is produced.
requests.packages.urllib3.disable_warnings()
def assessment_exists(api, assessment_id):
"""Check if Gophish has at least one campaign for designated assessment.
Args:
api (Gophish API): Connection to Gophish server via the API.
assessment_id (string): Assessment identifier to get campaigns from.
Returns:
boolean: Indicates if a campaign is found starting with assessment_id.
"""
allCampaigns = api.campaigns.get()
for campaign in allCampaigns:
if campaign.name.startswith(assessment_id):
return True
return False
def export_targets(api, assessment_id):
"""Add all targets to a list.
Achieved by pulling the group IDs for any group starting with
the assessment id. The targets within the group are then parsed
into a targets list of target dicts. Each target dict includes a
sha256 hash of the target's email and assessment id with any labels.
Args:
api (Gophish API): Connection to Gophish server via the API.
assessment_id (string): Assessment identifier to get campaigns from.
Returns:
List of targets from the assessment's group(s).
"""
groupIDs = get_group_ids(api, assessment_id)
targets = list()
for group_id in groupIDs:
# Gets target list for parsing.
raw_targets = api.groups.get(group_id).as_dict()["targets"]
for raw_target in raw_targets:
target = dict()
target["id"] = hashlib.sha256(
raw_target["email"].encode("utf-8")
).hexdigest()
target["customer_defined_labels"] = dict()
if "position" in raw_target:
target["customer_defined_labels"][assessment_id] = [
raw_target["position"]
]
targets.append(target)
logging.info(
"%d email targets found for assessment %s.", len(targets), assessment_id
)
return targets
def get_group_ids(api, assessment_id):
"""Return a list of group IDs for all groups starting with specified assessment_id."""
rawGroup = api.groups.get() # Holds raw list of campaigns from Gophish.
groups = list() # Holds list of campaign IDs that match the assessment.
for group in rawGroup:
group = group.as_dict()
if group["name"].startswith(assessment_id):
groups.append(group["id"])
return groups
def export_campaigns(api, assessment_id):
"""Add all the campaigns' data for an assessment to a list.
Args:
api (Gophish API): Connection to Gophish server via the API.
assessment_id (string): Assessment identifier to get campaigns from.
Returns:
List of the assessment's campaigns with data.
"""
campaignIDs = get_campaign_ids(api, assessment_id)
campaigns = list()
for campaign_id in campaignIDs:
campaigns.append(get_campaign_data(api, campaign_id))
logging.info("%d campaigns found for assessment %s.", len(campaigns), assessment_id)
return campaigns
def get_campaign_ids(api, assessment_id):
"""Return a list of campaign IDs for all campaigns starting with specified assessment_id."""
rawCampaigns = api.campaigns.get() # Holds raw list of campaigns from Gophish.
campaigns = list() # Holds list of campaign IDs that match the assessment.
for campaign in rawCampaigns:
campaign = campaign.as_dict()
if campaign["name"].startswith(assessment_id):
campaigns.append(campaign["id"])
return campaigns
def get_campaign_data(api, campaign_id):
"""Return campaign metadata for the given campaign ID."""
campaign = dict()
# Pulls the campaign data as dict from Gophish.
rawCampaign: dict = api.campaigns.get(campaign_id).as_dict()
campaign["id"] = rawCampaign["name"]
campaign["start_time"] = rawCampaign["launch_date"]
campaign["end_time"] = rawCampaign["completed_date"]
campaign["url"] = rawCampaign["url"]
campaign["subject"] = rawCampaign["template"]["subject"]
# Get the template ID from the Gophish template name.
campaign["template"] = (
api.templates.get(rawCampaign["template"]["id"]).as_dict()["name"].split("-")[2]
)
campaign["clicks"] = get_click_data(api, campaign_id)
# Get the e-mail send status from Gophish.
campaign["status"] = get_email_status(api, campaign_id)
return campaign
def get_click_data(api, campaign_id):
"""Return a list of all clicks for a given campaign."""
rawEvents = api.campaigns.get(campaign_id).as_dict()["timeline"]
clicks = list() # Holds list of all users that clicked.
for rawEvent in rawEvents:
if rawEvent["message"] == "Clicked Link":
click = dict()
# Builds out click document.
click["user"] = hashlib.sha256(
rawEvent["email"].encode("utf-8")
).hexdigest()
click["source_ip"] = rawEvent["details"]["browser"]["address"]
click["time"] = rawEvent["time"]
click["application"] = get_application(rawEvent)
clicks.append(click)
return clicks
def get_email_status(api, campaign_id):
"""Return the email send status and time."""
rawEvents = api.campaigns.get(campaign_id).as_dict()["timeline"]
status = list()
for rawEvent in rawEvents:
email = dict()
if rawEvent["message"] == "Email Sent":
email["user"] = hashlib.sha256(
rawEvent["email"].encode("utf-8")
).hexdigest()
email["time"] = rawEvent["time"]
email["status"] = "SUCCESS"
elif rawEvent["message"] == "Error Sending Email":
email["user"] = hashlib.sha256(
rawEvent["email"].encode("utf-8")
).hexdigest()
# Trim microseconds before converting to datetime.
rawEvent["time"] = datetime.strptime(
rawEvent["time"].split(".")[0], "%Y-%m-%dT%H:%M:%S"
)
email["time"] = rawEvent["time"]
email["status"] = "Failed"
if email:
status.append(email)
return status
def get_application(rawEvent):
"""Return application details."""
application = dict()
application["external_ip"] = rawEvent["details"]["browser"]["address"]
# Process user agent string.
userAgent = rawEvent["details"]["browser"]["user-agent"]
application["name"] = httpagentparser.detect(userAgent)["platform"]["name"]
application["version"] = httpagentparser.detect(userAgent)["platform"]["version"]
return application
def find_unique_target_clicks_count(clicks):
"""Return the number of unique clicks in a click set."""
uniq_users = set()
for click in clicks:
uniq_users.add(click["user"])
return len(uniq_users)
def write_campaign_summary(api, assessment_id):
"""Output a campaign summary report to JSON, console, and a text file."""
campaign_ids = get_campaign_ids(api, assessment_id)
campaign_data_template = "campaign_data.json"
campaign_summary_json = f"{assessment_id}_campaign_data.json"
campaign_summary_textfile = f"{assessment_id}_summary_{datetime.strftime(datetime.now(), '%Y-%m-%dT%H:%M:%S')}.txt"
with open(campaign_data_template) as template:
campaign_data = json.load(template)
logging.info("Writing campaign summary report to %s", campaign_summary_textfile)
file_out = open(campaign_summary_textfile, "w+")
file_out.write("Campaigns for Assessment: " + assessment_id)
regex = re.compile(r"^.*_(?P<level>level-[1-6])$")
for campaign_id in campaign_ids:
campaign = api.campaigns.get(campaign_id)
match = regex.fullmatch(campaign.name)
if match:
level = match.group("level")
else:
logging.warn(
"Encountered campaign (%s) that is unable to be processed for campaign summary export. \n"
"Campaign name is not properly suffixed with the campaign level number (e.g. '_level-1')\n"
"Skipping campaign",
campaign.name,
)
continue
logging.info(level)
clicks = get_click_data(api, campaign_id)
total_clicks = api.campaigns.summary(campaign_id=campaign_id).stats.clicked
unique_clicks = find_unique_target_clicks_count(clicks)
if total_clicks > 0:
percent_clicks = unique_clicks / float(total_clicks)
else:
percent_clicks = 0.0
campaign_data[level]["subject"] = campaign.template.subject
campaign_data[level]["sender"] = campaign.smtp.from_address
campaign_data[level]["start_date"] = campaign.launch_date
campaign_data[level]["end_date"] = campaign.completed_date
campaign_data[level]["redirect"] = campaign.url
campaign_data[level]["clicks"] = total_clicks
campaign_data[level]["unique_clicks"] = unique_clicks
campaign_data[level]["percent_clicks"] = percent_clicks
file_out.write("\n")
file_out.write("-" * 50)
file_out.write("\nCampaign: %s" % campaign.name)
file_out.write("\nSubject: %s" % campaign_data[level]["subject"])
file_out.write("\nSender: %s" % campaign_data[level]["sender"])
file_out.write("\nStart Date: %s" % campaign_data[level]["start_date"])
file_out.write("\nEnd Date: %s" % campaign_data[level]["end_date"])
file_out.write("\nRedirect: %s" % campaign_data[level]["redirect"])
file_out.write("\nClicks: %d" % campaign_data[level]["clicks"])
file_out.write("\nUnique Clicks: %d" % campaign_data[level]["unique_clicks"])
file_out.write(
"\nPercentage Clicks: %f" % campaign_data[level]["percent_clicks"]
)
file_out.close()
logging.info("Writing out summary JSON to %s", campaign_summary_json)
with open(campaign_summary_json, "w") as fp:
json.dump(campaign_data, fp, indent=4)
def export_user_reports(api, assessment_id):
"""Build and export a user_report JSON file for each campaign in an assessment."""
campaign_ids = get_campaign_ids(api, assessment_id)
for campaign_id in campaign_ids:
first_report = None
user_report_doc = dict()
campaign = get_campaign_data(api, campaign_id)
# iterate over clicks and find the earliest click
for click in campaign["clicks"]:
click_time = datetime.strptime(
click["time"].split(".")[0], "%Y-%m-%dT%H:%M:%S"
)
if first_report is None or click_time < first_report:
first_report = click_time
# The "customer" field is a placeholder added for operator convenience when
# working with the JSON file created.
user_report_doc["customer"] = ""
user_report_doc["assessment"] = assessment_id
# get_campaign_ids() returns integers, but user_report_doc["campaign"]
# expects a string
user_report_doc["campaign"] = str(campaign_id)
if first_report is not None:
user_report_doc["first_report"] = datetime.strftime(
first_report, "%Y-%m-%dT%H:%M:%S"
)
else:
user_report_doc["first_report"] = "No clicks reported"
user_report_doc["total_num_reports"] = api.campaigns.summary(
campaign_id=campaign_id
).stats.clicked
logging.info(
"Writing out user report for campaign %s in assessment %s",
campaign["id"],
assessment_id,
)
with open(f"{assessment_id}_{campaign_id}_user_report_doc.json", "w") as fp:
json.dump(user_report_doc, fp, indent=4)
def main() -> None:
"""Set up logging, connect to API, export all assessment data."""
args: Dict[str, str] = docopt(__doc__, version=__version__)
# Set up logging
log_level = args["--log-level"]
try:
logging.basicConfig(
format="\n%(levelname)s: %(message)s", level=log_level.upper()
)
except ValueError:
logging.critical(
'"%s" is not a valid logging level. Possible values are debug, info, warning, and error.',
log_level,
)
sys.exit(1)
else:
# Connect to API
try:
api = connect_api(args["API_KEY"], args["SERVER"])
logging.debug("Connected to: %s", args["SERVER"])
except Exception as e:
logging.critical(e.args[0])
sys.exit(1)
if not validate_assessment_id(args["ASSESSMENT_ID"]):
logging.critical(
'"%s" is an invalid assessment_id format. Assessment identifiers begin with RV and are followed by '
" a 4 or 5 digit numerical sequence. Examples: RV1234, RV12345",
args["ASSESSMENT_ID"],
)
sys.exit(1)
if assessment_exists(api, args["ASSESSMENT_ID"]):
assessment_dict: Dict = dict()
# Add targets list to assessment dict.
assessment_dict["targets"] = export_targets(api, args["ASSESSMENT_ID"])
# Add campaigns list to the assessment dict.
assessment_dict["campaigns"] = export_campaigns(api, args["ASSESSMENT_ID"])
with open(f'data_{args["ASSESSMENT_ID"]}.json', "w") as fp:
json.dump(assessment_dict, fp, indent=4)
logging.info("Data written to data_%s.json", args["ASSESSMENT_ID"])
export_user_reports(api, args["ASSESSMENT_ID"])
write_campaign_summary(api, args["ASSESSMENT_ID"])
else:
logging.error(
'Assessment "%s" does not exist in Gophish.', args["ASSESSMENT_ID"]
)
sys.exit(1)
|
import argparse
import shlex
from discord.ext import commands
class FlagConverter(
commands.FlagConverter, prefix="--", delimiter=" ", case_insensitive=True
):
pass
class _Arguments(argparse.ArgumentParser):
def error(self, message):
raise RuntimeError(message)
class LegacyFlagItems:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class LegacyFlagConverter:
def __init__(self, l: list[LegacyFlagItems]):
self.data = l
self.parser = _Arguments(add_help=False, allow_abbrev=False)
for i in l:
self.parser.add_argument(*i.args, **i.kwargs)
def convert(self, argument: str):
if argument:
return self.parser.parse_args(shlex.split(argument))
else:
return self.parser.parse_args([])
|
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Example script to train the Relational Memory Core.
This is a reduced size version of the "Learning To Execute" (LTE) task defined
in:
https://arxiv.org/abs/1806.01822
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
# Dependency imports
from absl import flags
import six
import sonnet as snt
from sonnet.examples import learn_to_execute
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_float("learning_rate", 1e-4, "Initial learning rate.")
flags.DEFINE_float("min_learning_rate", 8e-5, "Minimum learning rate.")
flags.DEFINE_integer("batch_size", 1600, "Batch size.")
flags.DEFINE_integer("head_size", 2048, "Total memory size for the RMC.")
flags.DEFINE_integer("num_heads", 1, "Attention heads for RMC.")
flags.DEFINE_integer("num_mems", 4, "Number of memories for RMC.")
flags.DEFINE_integer("num_blocks", 1, "Number of attention blocks for RMC.")
flags.DEFINE_string("gate_style", "unit", "Gating style for RMC.")
flags.DEFINE_integer("max_length", 5, "LTE max literal length.")
flags.DEFINE_integer("max_nest", 2, "LTE max nesting level.")
flags.DEFINE_integer("epochs", 1000000, "Total training epochs.")
flags.DEFINE_integer("log_stride", 500, "Iterations between reports.")
class SequenceModel(snt.AbstractModule):
"""Seq2Seq Model to process LTE sequence batches."""
def __init__(
self,
core,
target_size,
final_mlp,
name="sequence_model"):
super(SequenceModel, self).__init__(name=name)
self._core = core
self._target_size = target_size
self._final_mlp = final_mlp
def _build(
self, inputs, targets, input_sequence_length, output_sequence_length):
"""Dynamic unroll across input objects.
Args:
inputs: tensor (input_sequence_length x batch x feature_size). Encoder
sequence.
targets: tensor (output_sequence_length x batch x feature_size). Decoder
sequence.
input_sequence_length: tensor (batch). Size of each batched input
sequence.
output_sequence_length: tensor (batch). Size of each batched target
sequence.
Returns:
Tensor (batch x num_objects); logits indicating the reference objects.
"""
# Connect decoding steps.
batch_size = inputs.get_shape()[1]
initial_state = self._core.initial_state(batch_size, trainable=False)
_, state = tf.nn.dynamic_rnn(
cell=self._core,
inputs=inputs,
sequence_length=input_sequence_length,
time_major=True,
initial_state=initial_state
)
# Connect decoding steps.
zero_input = tf.zeros(shape=targets.get_shape())
output_sequence, _ = tf.nn.dynamic_rnn(
cell=self._core,
inputs=zero_input, # Non-autoregressive model. Zeroed input.
sequence_length=output_sequence_length,
initial_state=state,
time_major=True)
outputs = snt.BatchApply(self._final_mlp)(output_sequence)
logits = snt.BatchApply(snt.Linear(self._target_size))(outputs)
tf.logging.info("Connected seq2seq model.")
return logits
def build_and_train(iterations, log_stride, test=False):
"""Construct the data, model, loss and optimizer then train."""
# Test mode settings.
batch_size = 2 if test else FLAGS.batch_size
num_mems = 2 if test else FLAGS.num_mems
num_heads = 1 if test else FLAGS.num_mems
num_blocks = 1 if test else FLAGS.num_mems
head_size = 4 if test else FLAGS.head_size
max_length = 3 if test else FLAGS.max_length
max_nest = 2 if test else FLAGS.max_nest
mlp_size = (20,) if test else (256, 256, 256, 256)
with tf.Graph().as_default():
t0 = time.time()
# Initialize the dataset.
lte_train = learn_to_execute.LearnToExecute(
batch_size, max_length, max_nest)
lte_test = learn_to_execute.LearnToExecute(
batch_size, max_length, max_nest, mode=learn_to_execute.Mode.TEST)
train_data_iter = lte_train.make_one_shot_iterator().get_next()
test_data_iter = lte_test.make_one_shot_iterator().get_next()
output_size = lte_train.state.vocab_size
# Create the model.
core = snt.RelationalMemory(
mem_slots=num_mems,
head_size=head_size,
num_heads=num_heads,
num_blocks=num_blocks,
gate_style=FLAGS.gate_style)
final_mlp = snt.nets.MLP(
output_sizes=mlp_size,
activate_final=True)
model = SequenceModel(
core=core,
target_size=output_size,
final_mlp=final_mlp)
tf.logging.info("Instantiated models ({:3f})".format(time.time() - t0))
# Define the loss & accuracy.
def loss_fn(inputs, targets, input_sequence_length, output_sequence_length):
"""Creates the loss and the exports."""
logits = model(
inputs, targets, input_sequence_length, output_sequence_length)
targets = tf.cast(targets, tf.int32)
sq_sz_out_max = targets.shape[0].value
# Create a mask to ignore accuracy on buffer characters.
sequence_sizes = tf.cast(output_sequence_length, tf.float32)
lengths_transposed = tf.expand_dims(sequence_sizes, 1)
range_row = tf.expand_dims(
tf.range(0, sq_sz_out_max, 1, dtype=tf.float32), 0)
mask = tf.cast(tf.transpose(tf.less(range_row, lengths_transposed)),
tf.float32)
# Compute token accuracy and solved.
correct = tf.equal(tf.argmax(logits, 2), tf.argmax(targets, 2))
solved = tf.reduce_all(tf.boolean_mask(correct, tf.squeeze(mask)), axis=0)
token_acc = tf.reduce_sum(tf.cast(correct, tf.float32) * mask)
token_acc /= tf.reduce_sum(sequence_sizes)
# Compute Loss.
mask = tf.cast(tf.tile(tf.expand_dims(mask, 2), (1, 1, logits.shape[2])),
tf.float32)
masked_logits = logits * mask
masked_target = tf.cast(targets, tf.float32) * mask
logits_flat = tf.reshape(masked_logits,
[sq_sz_out_max * batch_size, -1])
target_flat = tf.reshape(masked_target,
[sq_sz_out_max * batch_size, -1])
xent = tf.nn.softmax_cross_entropy_with_logits(logits=logits_flat,
labels=target_flat)
loss = tf.reduce_mean(xent)
return loss, token_acc, solved
# Get training step counter.
global_step = tf.train.get_or_create_global_step()
# Create the optimizer.
learning_rate_op = tf.reduce_max([
tf.train.exponential_decay(
FLAGS.learning_rate,
global_step,
decay_steps=FLAGS.epochs // 100,
decay_rate=0.9,
staircase=False),
FLAGS.min_learning_rate
])
optimizer = tf.train.AdamOptimizer(learning_rate_op)
# Compute loss, accuracy & the step op.
inputs, targets, _, input_lengths, output_lengths = train_data_iter
train_loss, train_acc, train_sol = loss_fn(
inputs, targets, input_lengths, output_lengths)
step_op = optimizer.minimize(train_loss, global_step=global_step)
inputs, targets, _, input_lengths, output_lengths = test_data_iter
_, test_acc, test_sol = loss_fn(
inputs, targets, input_lengths, output_lengths)
tf.logging.info("Created losses and optimizers ({:3f})".format(
time.time() - t0))
# Begin Training.
t0 = time.time()
tf.logging.info("Starting training ({:3f})".format(time.time() - t0))
with tf.train.SingularMonitoredSession() as sess:
for it in six.moves.range(iterations):
sess.run([step_op, learning_rate_op])
if it % log_stride == 0:
loss_v, train_acc_v, test_acc_v, train_sol_v, test_sol_v = sess.run([
train_loss, train_acc, test_acc, train_sol, test_sol])
elapsed = time.time() - t0
tf.logging.info(
"iter: {:2d}, train loss {:3f}; train acc {:3f}; test acc {:3f};"
" train solved {:3f}; test solved {:3f}; ({:3f})".format(
it, loss_v, train_acc_v, test_acc_v, train_sol_v, test_sol_v,
elapsed))
def main(unused_argv):
build_and_train(FLAGS.epochs, FLAGS.log_stride, test=True)
if __name__ == "__main__":
tf.app.run()
|
# MIT License
#
# Copyright (c) 2021, Bosch Rexroth AG
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import datalayer
from datalayer.provider_node import ProviderNodeCallbacks, NodeCallback
from datalayer.variant import Result, Variant
import flatbuffers
from comm.datalayer import Metadata, NodeClass, AllowedOperations, Reference
class MyProviderNode:
def __init__(self,
provider: datalayer.provider,
typeAddress: str,
address: str,
name: str,
unit: str,
description: str,
initialValue: Variant):
self.cbs = ProviderNodeCallbacks(
self.__on_create,
self.__on_remove,
self.__on_browse,
self.__on_read,
self.__on_write,
self.__on_metadata
)
self.providerNode = datalayer.provider_node.ProviderNode(self.cbs)
self.provider = provider
self.address = address
self.data = initialValue
self.metadata = Variant()
self.create_metadata(typeAddress, name, unit, description)
def register_node(self):
self.provider.register_node(self.address, self.providerNode)
def unregister_node(self):
self.provider.unregister_node(self.address, self.providerNode)
def set_value(self, value: Variant):
self.data = value
def create_metadata(self, typeAddress: str, name: str, unit: str, description: str):
# Create `FlatBufferBuilder`instance. Initial Size 1024 bytes (grows automatically if needed)
builder = flatbuffers.Builder(1024)
# Serialize AllowedOperations data
AllowedOperations.AllowedOperationsStart(builder)
AllowedOperations.AllowedOperationsAddRead(builder, True)
AllowedOperations.AllowedOperationsAddWrite(builder, True)
AllowedOperations.AllowedOperationsAddCreate(builder, False)
AllowedOperations.AllowedOperationsAddDelete(builder, False)
operations = AllowedOperations.AllowedOperationsEnd(builder)
# Metadata description strings
descriptionBuilderString = builder.CreateString(description)
urlBuilderString = builder.CreateString("tbd")
displayNameString = builder.CreateString(name)
unitString = builder.CreateString(unit)
# Store string parameter into builder
readTypeBuilderString = builder.CreateString("readType")
writeTypeBuilderString = builder.CreateString("writeType")
#createTypeBuilderString = builder.CreateString("createType")
targetAddressBuilderString = builder.CreateString(typeAddress)
# Serialize Reference data (for read operation)
Reference.ReferenceStart(builder)
Reference.ReferenceAddType(builder, readTypeBuilderString)
Reference.ReferenceAddTargetAddress(
builder, targetAddressBuilderString)
reference_read = Reference.ReferenceEnd(builder)
# Serialize Reference data (for write operation)
Reference.ReferenceStart(builder)
Reference.ReferenceAddType(builder, writeTypeBuilderString)
Reference.ReferenceAddTargetAddress(
builder, targetAddressBuilderString)
reference_write = Reference.ReferenceEnd(builder)
# Create FlatBuffer vector and prepend reference data. Note: Since we prepend the data, prepend them in reverse order.
Metadata.MetadataStartReferencesVector(builder, 2)
# builder.PrependSOffsetTRelative(reference_create)
builder.PrependSOffsetTRelative(reference_write)
builder.PrependSOffsetTRelative(reference_read)
references = builder.EndVector(2)
# Serialize Metadata data
Metadata.MetadataStart(builder)
Metadata.MetadataAddNodeClass(builder, NodeClass.NodeClass.Variable)
Metadata.MetadataAddOperations(builder, operations)
Metadata.MetadataAddDescription(builder, descriptionBuilderString)
Metadata.MetadataAddDescriptionUrl(builder, urlBuilderString)
Metadata.MetadataAddDisplayName(builder, displayNameString)
Metadata.MetadataAddUnit(builder, unitString)
# Metadata reference table
Metadata.MetadataAddReferences(builder, references)
metadata = Metadata.MetadataEnd(builder)
# Closing operation
builder.Finish(metadata)
result = self.metadata.set_flatbuffers(builder.Output())
if result != datalayer.variant.Result.OK:
print("ERROR creating metadata failed with: ", result)
def __on_create(self, userdata: datalayer.clib.userData_c_void_p, address: str, data: Variant, cb: NodeCallback):
print("__on_create()", "address:", address, "userdata:", userdata)
cb(Result.OK, data)
def __on_remove(self, userdata: datalayer.clib.userData_c_void_p, address: str, cb: NodeCallback):
print("__on_remove()", "address:", address, "userdata:", userdata)
cb(Result.UNSUPPORTED, None)
def __on_browse(self, userdata: datalayer.clib.userData_c_void_p, address: str, cb: NodeCallback):
print("__on_browse()", "address:", address, "userdata:", userdata)
new_data = Variant()
new_data.set_array_string([])
cb(Result.OK, new_data)
def __on_read(self, userdata: datalayer.clib.userData_c_void_p, address: str, data: Variant, cb: NodeCallback):
print("__on_read()", "address:", address,
"data:", self.data, "userdata:", userdata)
new_data = self.data
cb(Result.OK, new_data)
def __on_write(self, userdata: datalayer.clib.userData_c_void_p, address: str, data: Variant, cb: NodeCallback):
print("__on_write()", "address:", address,
"data:", data, "userdata:", userdata)
if self.data.get_type() != data.get_type():
cb(Result.TYPE_MISMATCH, None)
return
result, self.data = data.clone()
cb(Result.OK, self.data)
def __on_metadata(self, userdata: datalayer.clib.userData_c_void_p, address: str, cb: NodeCallback):
print("__on_metadata()", "address:", address,
"metadata:", self.metadata, "userdata:", userdata)
cb(Result.OK, self.metadata)
|
from workers.base import contextualProcess
from services.config import get_cameras, get_stream_dir, get_capture_command
from time import sleep
from pathlib import Path
from subprocess import Popen, DEVNULL
class capturerProcess(contextualProcess):
def __init__(self, context):
super().__init__(context, 'Capturer')
self.streams = self.context['streams']
self.configLock = self.context['configLock']
self.processes = {}
def make_dir(self):
try:
self.configLock.acquire()
stdir = get_stream_dir()
Path(stdir).mkdir(parents=True, exist_ok=True)
finally:
self.configLock.release()
return stdir
def load_streams(self):
# Fetch the stream list from settings
streams = self.streams
try:
self.configLock.acquire()
status = get_cameras()
st_dir = get_stream_dir()
for i in status:
i['capture_cmd'] = get_capture_command(
directory=st_dir,
camera_id=i['id'],
url=i['url']
)
i['status'] = 'pending'
status = {i['id']: i for i in status}
streams.status = status
finally:
self.configLock.release()
def start_stream(self, k):
self.logger.info('Starting capture for {}'.format(k))
streams_ = self.streams.status
cmd_ = streams_[k]['capture_cmd']
self.logger.debug('Executing {}'.format(cmd_))
self.processes[k] = Popen(cmd_, stderr=DEVNULL, stdout=DEVNULL)
# self.processes[k] = Popen(cmd_)
streams_[k]['status'] = 'running'
self.streams.status = streams_
def stop_stream(self, k):
self.logger.info('Terminating capturer {}'.format(k))
streams_ = self.streams.status
if k in self.processes and self.processes[k].poll():
self.processes[k].terminate()
streams_[k]['status'] = 'terminated'
self.streams.status = streams_
def run(self):
self.logger.info('Started.')
self.logger.debug('Creating stream directory.')
self.logger.debug('Stream dir created: {}'.format(self.make_dir()))
self.logger.info('Loading camera streams.')
self.load_streams()
self.logger.debug(
"Loaded {} streams: {}".format(
len(self.streams.status),
'; '.join(i for i in self.streams.status)
)
)
self.logger.info('Starting to spawn capturers.')
try:
while True:
sleep(1)
streams_ = self.streams.status
for k, v in streams_.items():
if v['status'] == 'running':
# Poll the stream
if (not self.processes[k]) or (self.processes[k].poll()):
# Process has terminated
v['status'] = 'dead'
exitcode = self.processes[k].poll()
if exitcode == 0:
self.logger.error('Capturer for {} terminated normally.'.format(k))
else:
self.logger.error('Capturer for {} abnormal exit with code {}.'.format(k, exitcode))
self.streams.status = streams_
else:
# Bring up the process
self.start_stream(k)
except (KeyboardInterrupt, SystemExit):
pass
finally:
for k in self.streams.status:
self.stop_stream(k)
|
from src.gamelogic.board import Board, Color
class Game:
def __init__(self, size):
self.current_color = Color.BLACK
self.history = [Board(size)]
def place_token(self, position, color):
if self.current_color != color:
return False
next_board = self.current_board.set_color(position, color)
removed_amount = 0
for neighbour in next_board.get_neighbours(position):
if not next_board.is_in_board(neighbour):
continue
removal_result = next_board.remove_tokens(neighbour)
next_board = removal_result.board
removed_amount += removal_result.amount
if self.is_turn_valid(next_board, position, color):
self.history.append(next_board)
self.turn_made()
return True
return False
def turn_made(self):
if self.current_color == Color.BLACK:
self.current_color = Color.WHITE
else:
self.current_color = Color.BLACK
def from_board(board):
game = Game(board.size)
game.history[0] = board
return game
@property
def current_board(self):
return self.history[-1]
def is_ko(self, board):
if len(self.history) < 2:
return False
second_last_board = self.history[-2]
return second_last_board == board
def is_turn_valid(self, next_board, position, color):
if self.current_board.get_color(position) != Color.NEUTRAL:
return False
if self.is_ko(next_board):
return False
if not next_board.has_freedom(position):
return False
return True
|
for i in range(1, 101):
if i ** 0.5 % 1:
state = 'open'
else:
state = 'close'
print("Door {} {}".format(i, state))
|
from django.db.models import F
from django.db.models.query import QuerySet
from django.utils.translation import get_language
from .compat import Manager
class TranslatableFilterMixin(object):
def translate(self, language=None):
if language is None:
language = get_language()
return self.filter(language=language)
def translations(self, translation_set):
if isinstance(translation_set, self.model):
translation_set = translation_set.translation_set_id
return self.filter(translation_set=translation_set)
def translation_set_parents(self):
"""
Only return objects which are translation set parents, i.e. where
``self.translation_set == self``.
"""
return self.filter(translation_set__pk=F('pk'))
class TranslatableQuerySet(TranslatableFilterMixin, QuerySet):
pass
TranslatableManager = Manager.from_queryset(TranslatableQuerySet)
|
#! /usr/bin/env python
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Mylog module
Subclasses standard python logging module and adds some convenience to it's use.
Like being able to set GMT timestamps
"""
import sys
import time
import logging
def logg(label, lfile=None, llevel='WARN', fmt=None, gmt=False, cnsl=None, sh=None):
r"""
Constructor for logging module
string:label set the name of the logging provider
string:lfile pathname of file to log to, default is no logging to a
file
string:llevel string of the loglevel
string:fmt custom format string, default will use built-in
bool:gmt set to True to log in the machines vision of GMT time
and reflect it in the logs
bool:cnsl set to True if you want to log to console
int:sh file descriptor for log stream defaults to sys.stderr
returns: a singleton object
************************* doctest *************************************
# when invoking mylog() set sh=sys.stdout this is needed for doctest
>>> t = logg("test logger", cnsl=True, sh=sys.stdout)
>>> print t # doctest: +ELLIPSIS
<...logging.Logger object at 0x...>
>>> t.warn("hello world!") #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
2... WARNING :test logger [...] hello world!
>>> t.error("should see this")#doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
2... ERROR :test logger [...] should see this
>>> t.info("should not see this")
>>> t.debug("0x1337") # or this
>>>
***********************************************************************
"""
log = logging.getLogger(label)
n_level = getattr(logging, llevel.upper(), None)
if not isinstance(n_level, int):
raise ValueError('Invalid log level: %s' % llevel)
log.setLevel(n_level)
if fmt :
formatter = fmt
elif gmt :
formatter = logging.Formatter(
'%(asctime)s:Z %(levelname)s: %(name)s:%(lineno)d] %(message)s')
else :
formatter = logging.Formatter(
'%(asctime)s %(levelname)s: %(name)s:%(lineno)d] %(message)s')
if gmt:
logging.Formatter.converter = time.gmtime
try:
if lfile :
fh = logging.FileHandler(lfile)
fh.setFormatter(formatter)
log.addHandler(fh)
except IOError :
print("Can't open location %s" % fh)
if cnsl :
if sh :
ch = logging.StreamHandler(sh)
else:
ch = logging.StreamHandler()
ch.setFormatter(formatter)
log.addHandler(ch)
return log
def printlog(logger, msg, level="WARN"):
print( '%s:%s' % (level.upper(), msg))
if level.upper() == 'INFO':
logger.info(msg)
elif level.upper() == 'WARN' or level.upper() == 'WARNING':
logger.warn(msg)
elif level.upper() == 'ERROR':
logger.error(msg)
elif level.upper() == 'CRITICAL':
logger.critical(msg)
elif level.upper() == 'DEBUG':
logger.debug(msg)
def main():
logger = logg("Test Logger",llevel='INFO', cnsl=True,sh=sys.stdout)
logger.info("Hello World")
logger.warn("Danger Will Robinson")
logger.critical("Time to Die")
logger.debug("0x1337")
# import doctest
# doctest.testmod()
# return 0
if __name__ == "__main__" :
main()
sys.exit(0)
|
class Config():
SECRET_KEY = "7c9270027a164800f09e52vb828q1384523667f"
#change if not using gmail
MAIL_SERVER = "smtp.gmail.com"
MAIL_PORT = 465
MAIL_USE_SSL = True
#mail port info ends
#add email and password
MAIL_USERNAME = "pariksha.contact@gmail.com"
MAIL_PASSWORD = "pycavmail"
SQLALCHEMY_DATABASE_URI = "sqlite:///site.db"
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Twisted News: A NNTP-based news service.
"""
|
import shutil
from easydict import EasyDict
from ding.league import BaseLeague, ActivePlayer
class DemoLeague(BaseLeague):
# override
def _get_job_info(self, player: ActivePlayer, eval_flag: bool = False) -> dict:
assert isinstance(player, ActivePlayer), player.__class__
player_job_info = EasyDict(player.get_job(eval_flag))
return {
'agent_num': 2,
'launch_player': player.player_id,
'player_id': [player.player_id, player_job_info.opponent.player_id],
'checkpoint_path': [player.checkpoint_path, player_job_info.opponent.checkpoint_path],
'player_active_flag': [isinstance(p, ActivePlayer) for p in [player, player_job_info.opponent]],
}
# override
def _mutate_player(self, player: ActivePlayer):
pass
# override
def _update_player(self, player: ActivePlayer, player_info: dict) -> None:
assert isinstance(player, ActivePlayer)
if 'learner_step' in player_info:
player.total_agent_step = player_info['learner_step']
# override
@staticmethod
def save_checkpoint(src_checkpoint_path: str, dst_checkpoint_path: str) -> None:
shutil.copy(src_checkpoint_path, dst_checkpoint_path)
|
"""
Copyright (c) 2021, Electric Power Research Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of DER-VET nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
CAESTech.py
This Python class contains methods and attributes specific for technology analysis within StorageVet.
"""
from storagevet.Technology.EnergyStorage import EnergyStorage
import cvxpy as cvx
import pandas as pd
import numpy as np
import storagevet.Library as Lib
class CAES(EnergyStorage):
""" CAES class that inherits from Storage.
"""
def __init__(self, params):
""" Initializes a CAES class that inherits from the technology class.
It sets the type and physical constraints of the technology.
Args:
params (dict): params dictionary from dataframe for one case
"""
# create generic technology object
super().__init__(params)
# add CAES specific attributes
self.tag = 'CAES'
self.heat_rate_high = params['heat_rate_high']
self.fuel_price = params['fuel_price'] # $/MillionBTU per month
def grow_drop_data(self, years, frequency, load_growth):
""" Adds data by growing the given data OR drops any extra data that might have slipped in.
Update variable that hold timeseries data after adding growth data. These method should be called after
add_growth_data and before the optimization is run.
Args:
years (List): list of years for which analysis will occur on
frequency (str): period frequency of the timeseries data
load_growth (float): percent/ decimal value of the growth rate of loads in this simulation
"""
self.fuel_price = Lib.fill_extra_data(self.fuel_price, years, 0, frequency)
self.fuel_price = Lib.drop_extra_data(self.fuel_price, years)
def objective_function(self, mask, annuity_scalar=1):
""" Generates the objective costs for fuel cost and O&M cost
Args:
mask (Series): Series of booleans used, the same length as case.power_kw
annuity_scalar (float): a scalar value to be multiplied by any yearly cost or benefit that helps capture the cost/benefit over
the entire project lifetime (only to be set iff sizing)
Returns:
costs (Dict): Dict of objective costs
"""
# get generic Tech objective costs
costs = super().objective_function(mask, annuity_scalar)
# add fuel cost expression
fuel_exp = cvx.sum(cvx.multiply(self.fuel_price[mask].values, self.variables_dict['dis'] + self.variables_dict['udis'])
* self.heat_rate_high * self.dt * 1e-6 * annuity_scalar)
costs.update({self.name + 'CAES_fuel_cost': fuel_exp})
return costs
def calc_operating_cost(self, energy_rate, fuel_rate):
""" Calculates operating cost in dollars per MWh_out
Args:
energy_rate (float): energy rate [=] $/kWh
fuel_rate (float): natural gas price rate [=] $/MillionBTU
Returns:
Value of Operating Cost [=] $/MWh_out
Note: not used
"""
fuel_cost = fuel_rate*self.heat_rate_high*1e3/1e6
om = self.get_fixed_om()
energy = energy_rate*1e3/self.rte
return fuel_cost + om + energy
def timeseries_report(self):
""" Summaries the optimization results for this DER.
Returns: A timeseries dataframe with user-friendly column headers that summarize the results
pertaining to this instance
"""
tech_id = self.unique_tech_id()
results = super().timeseries_report()
results[tech_id + ' Natural Gas Price ($/MillionBTU)'] = self.fuel_price
return results
def proforma_report(self, apply_inflation_rate_func, fill_forward_func, results):
""" Calculates the proforma that corresponds to participation in this value stream
Args:
apply_inflation_rate_func:
fill_forward_func:
results (pd.DataFrame):
Returns: A DateFrame of with each year in opt_year as the index and
the corresponding value this stream provided.
"""
pro_forma = super().proforma_report(apply_inflation_rate_func, fill_forward_func, results)
fuel_col_name = self.unique_tech_id() + ' Natural Gas Costs'
analysis_years = self.variables_df.index.year.unique()
fuel_costs_df = pd.DataFrame()
for year in analysis_years:
fuel_price_sub = self.fuel_price.loc[self.fuel_price.index.year == year]
fuel_costs_df.loc[pd.Period(year=year, freq='y'), fuel_col_name] = -np.sum(fuel_price_sub*self.heat_rate_high*1e3/1e6)
# fill forward
fuel_costs_df = fill_forward_func(fuel_costs_df, None)
# append will super class's proforma
pro_forma = pd.concat([pro_forma, fuel_costs_df], axis=1)
return pro_forma
|
from sqlalchemy import String
class StringTypes:
SHORT_STRING = String(16)
MEDIUM_STRING = String(64)
LONG_STRING = String(255)
LONG_LONG_STRING = String(4096)
I18N_KEY = String(255)
LOCALE_CODE = String(5)
PASSWORD_HASH = String(128)
class QueryArgumentError(Exception):
def __init__(self, message, code, *args):
""" represents an error in the query arguments of a request """
super().__init__(message, code, *args)
self.message = message
self.code = code
|
'''
An ugly number is a positive integer whose prime factors are limited to 2, 3, and 5.
Given an integer n, return the nth ugly number.
Example 1:
Input: n = 10
Output: 12
Explanation: [1, 2, 3, 4, 5, 6, 8, 9, 10, 12] is the sequence of the first 10 ugly numbers.
Example 2:
Input: n = 1
Output: 1
Explanation: 1 has no prime factors, therefore all of its prime factors are limited to 2, 3, and 5.
Constraints:
1 <= n <= 1690
'''
class Solution:
def nthUglyNumber(self, n: int) -> int:
if n == 0:
return None
result = []
duplicate = set()
final = []
heapq.heappush(result,1)
count = 0
while count < n:
minvalue = heapq.heappop(result)
final.append(minvalue)
count += 1
for i in [2, 3, 5]:
tmp = i * minvalue
if tmp not in duplicate:
heapq.heappush(result, tmp)
duplicate.add(tmp)
return final[n - 1]
|
import numpy as np
scoreboard = np.array(range(0, 441))
team1 = {
'char1': [
1, 2, 3,
50, 49, 48,
53, 54, 55,
102, 101, 100,
105, 106, 107,
],
'char2': [
5, 6, 7,
46, 45, 44,
57, 58, 59,
98, 97, 96,
109, 110, 111
],
'char3': [
9, 10, 11,
42, 41, 40,
61, 62, 63,
94, 93, 92,
113, 114, 115
]
}
team2 = {
'char1': [
14, 15, 16,
37, 36, 35,
66, 67, 68,
89, 88, 87,
118, 119, 120
],
'char2': [
18, 19, 20,
33, 32, 31,
70, 71, 72,
85, 84, 83,
122, 123, 124
],
'char3': [
22, 23, 24,
29, 28, 27,
74, 75, 76,
81, 80, 79,
126, 127, 128
]
}
score1 = {
'dig1_char1': [
161, 162, 163,
202, 201, 200,
213, 214, 215,
254, 253, 252,
265, 266, 267
],
'dig1_char2': [
157, 158, 159,
206, 205, 204,
209, 210, 211,
258, 257, 256,
261, 262, 263
],
'dig1_char3': [
165, 166, 167,
198, 197, 196,
217, 218, 219,
250, 249, 248,
269, 270, 271
],
'dig2_char1': [
159, 160, 161,
204, 203, 202,
211, 212, 213,
256, 255, 254,
263, 264, 265
],
'dig2_char2': [
163, 164, 165,
200, 199, 198,
215, 216, 217,
252, 251, 250,
267, 268, 269
],
'dig2_char3': [
157, 158,
206, 205,
209, 210,
258, 257,
261, 262,
166, 167,
197, 196,
218, 219,
249, 248,
270, 271
],
'dig3_char1': [
157, 158, 159,
206, 205, 204,
209, 210, 211,
258, 257, 256,
261, 262, 263
],
'dig3_char2': [
161, 162, 163,
202, 201, 200,
213, 214, 215,
254, 253, 252,
265, 266, 267
],
'dig3_char3': [
165, 166, 167,
198, 197, 196,
217, 218, 219,
250, 249, 248,
269, 270, 271
]
}
score2 = {
'dig1_char1': [
174, 175, 176,
189, 188, 187,
226, 227, 228,
241, 240, 239,
278, 279, 280
],
'dig1_char2': [
170, 171, 172,
193, 192, 191,
222, 223, 224,
245, 244, 243,
274, 275, 276
],
'dig1_char3': [
178, 179, 180,
185, 184, 183,
230, 231, 232,
237, 236, 235,
282, 283, 284
],
'dig2_char1': [
172, 173, 174,
191, 190, 189,
224, 225, 226,
243, 242, 241,
276, 277, 278
],
'dig2_char2': [
176, 177, 178,
187, 186, 185,
228, 229, 230,
239, 238, 237,
280, 281, 282
],
'dig2_char3': [
170, 171,
193, 192,
222, 223,
245, 244,
274, 275,
179, 180,
184, 183,
231, 232,
236, 235,
283, 284
],
'dig3_char1': [
170, 171, 172,
193, 192, 191,
222, 223, 224,
245, 244, 243,
274, 275, 276
],
'dig3_char2': [
174, 175, 176,
189, 188, 187,
226, 227, 228,
241, 240, 239,
278, 279, 280
],
'dig3_char3': [
178, 179, 180,
185, 184, 183,
230, 231, 232,
237, 236, 235,
282, 283, 284
]
}
time_clock = {
'clock_dig1': [
317, 318, 319,
358, 357, 356,
369, 370, 371,
410, 409, 408,
421, 422, 423
],
'clock_dig2': [
321, 322, 323,
354, 353, 352,
373, 374, 375,
406, 405, 404,
425, 426, 427
],
'clock_dig3': [
326, 327, 328,
349, 348, 347,
378, 379, 380,
401, 400, 399,
430, 431, 432
],
'clock_dig4': [
330, 331, 332,
345, 344, 343,
382, 383, 384,
397, 396, 395,
434, 435, 436
]
}
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.postgres.operations import HStoreExtension
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('raster_aggregation', '0002_aggregationlayer_modified'),
]
operations = [
HStoreExtension(),
]
|
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (C) 2017-2020, SCANOSS Ltd. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import hashlib
def parse_diff(src):
"""
Parse a commit diff.
This function parses a diff string and generates an output dictionary containing as keys the filenames in the changeset.
Each value is an array with the strings representing additions (starting with a '+').
There are only entries for changesets with additions, as these are the only
ones that we are interested in.
Parameters
----------
src : str
The contents of the diff in raw format.
"""
output = {}
currentfile = ""
currentlines = []
all_lines = ""
for line in src.splitlines():
if line.startswith("+++"):
# We are only interested in additions
if currentfile and currentlines:
output[currentfile] = currentlines
currentlines = []
# Typically a file line starts with "+++ b/...."
currentfile = line[6:] if line.startswith('+++ b/') else line[:4]
# Other lines starting with '+' are additions
elif line.startswith('+'):
currentlines.append(line[1:])
all_lines += line[1:]
# Wrap
if currentfile and currentlines:
output[currentfile] = currentlines
return output, hashlib.md5(all_lines.encode('utf-8')).hexdigest()
|
# Generated by Django 2.1.1 on 2018-11-13 02:59
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0004_points_round'),
]
operations = [
migrations.CreateModel(
name='Club',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=512)),
],
),
migrations.CreateModel(
name='Judge',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=512)),
('clubID', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Club')),
],
),
migrations.CreateModel(
name='JudgePoints',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('points', models.FloatField()),
('judgeID', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Judge')),
],
),
migrations.CreateModel(
name='MatchUp',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('judgeID', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Judge')),
],
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=512)),
('clubID', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Club')),
],
),
migrations.CreateModel(
name='MemberPoints',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('StylePoints', models.FloatField()),
('ContentPoints', models.FloatField()),
('StratergyPoints', models.FloatField()),
('memberID', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Member')),
],
),
migrations.RemoveField(
model_name='round',
name='opposition',
),
migrations.RemoveField(
model_name='round',
name='panelist',
),
migrations.RemoveField(
model_name='round',
name='points',
),
migrations.RemoveField(
model_name='round',
name='proposition',
),
migrations.RemoveField(
model_name='team',
name='member1',
),
migrations.RemoveField(
model_name='team',
name='member2',
),
migrations.RemoveField(
model_name='team',
name='member3',
),
migrations.AddField(
model_name='round',
name='statement',
field=models.CharField(default='default', max_length=4096),
preserve_default=False,
),
migrations.AlterField(
model_name='round',
name='chair',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Judge'),
),
migrations.AlterField(
model_name='round',
name='decision',
field=models.CharField(choices=[('Split', 'Split'), ('Unaminous', 'Unaminous')], default='Split', max_length=20),
),
migrations.AlterField(
model_name='round',
name='round',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='team',
name='clubName',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Club'),
),
migrations.DeleteModel(
name='Points',
),
migrations.AddField(
model_name='memberpoints',
name='roundID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Round'),
),
migrations.AddField(
model_name='member',
name='teamID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Team'),
),
migrations.AddField(
model_name='matchup',
name='oppID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='opposition', to='api.Team'),
),
migrations.AddField(
model_name='matchup',
name='propID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='proposition', to='api.Team'),
),
migrations.AddField(
model_name='matchup',
name='roundID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Round'),
),
migrations.AddField(
model_name='judgepoints',
name='roundID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Round'),
),
migrations.AddField(
model_name='judge',
name='teamID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Team'),
),
]
|
# Written By - Shaswat Lenka
# DATE: 22nd October 2019
# Code Review Requested.
from scrapers.helpers import Webrequests
from bs4 import BeautifulSoup
import csv
# Get the drug name from a drug database
# Get all the discussions regarding the drug from medications.com
# Preprocess and save the same to disc
# testing for a single drug
# TODO: write a function to get the drug name dynamically from a source
def get_all_urls(html):
"""
gets the url of all the comments in a drug page
@params:
html: BeautifulSoup object
drugname: String
@returns: list()
"""
urls = list()
for title in html.find_all("span", class_="post-title"):
urls.append(title.a['href'])
return urls
def get_text_from_url(url):
"""get the text from a given comment url
@params: url - String (here I assume that it is in the same format as the 'urls' of the return value of get_all_urls
@returns: String - text corpus
"""
full_url = "http://medications.com" + url
w = Webrequests()
raw_html = w.simple_get(full_url)
html = BeautifulSoup(raw_html, 'html.parser')
first_content = html.find("span", class_="first_content")
if first_content is not None:
first_content_text = first_content.p.text
else:
return None
more_content = html.find("span", class_="more_content")
if more_content is not None:
more_content_text = more_content.p.text
else:
return None
return first_content_text + more_content_text
def get_comments(drugname):
"""Get the discussions and comments on medications.com about the given drug
@params:
drugname - String representing the drug name
@returns:
list() of all text based comments on the given drug
"""
w = Webrequests()
drug = drugname
url = "http://medications.com/" + drug
raw_html = w.simple_get(url)
text_corpus = list()
if raw_html is not None:
html = BeautifulSoup(raw_html, 'html.parser')
# list of all urls that posts titles of the given drug
urls = get_all_urls(html)
for url in urls:
text = get_text_from_url(url)
if text is not None:
text_corpus.append(text)
else:
continue
else:
# raise an exception if we failed to get any data from url
raise Exception("Error retrieving contents from {}".format(url))
return text_corpus
def save_to_disk(text_corpus, drugname):
"""save the scrapped text to csv
@params:
"""
with open("/Users/jarvis/Desktop/CODE/sadrat/datasets/medications_dot_com_data.csv", 'a') as file:
for i in range(len(text_corpus)):
row = [i, drugname, text_corpus[i]]
writer = csv.writer(file)
writer.writerow(row)
file.close()
# Driver Code
druglist = ["lisinopril", "avelox", "prednisone", "cipro", "floxin", "elavil", "norvasc", "cozaar", "hyzaar", "femara",
"methylpred-dp","aricept", "versed", "questran", "welchol", "venofer", "avalide", "topamax", "yaz", "geodon"
, "warfarin-sodium"]
for drug in druglist:
comments = get_comments(drug)
save_to_disk(comments, drug)
|
from fastapi import FastAPI
# custom modules
from models.algebra import array
app = FastAPI()
@app.get("/")
def read_root():
return {"msg": "Hello Universe !"}
@app.get("/array")
def get_array():
return {"result": array.get_random().tolist()}
|
"""
Script for building the example.
Usage:
python setup.py py2app
"""
from setuptools import setup
plist = dict(CFBundleName='FieldGraph')
setup(
name="FieldGraph",
app=["Main.py"],
setup_requires=["py2app"],
data_files=["English.lproj", 'CrossCursor.tiff', 'Map.png'],
options=dict(py2app=dict(plist=plist)),
)
|
# https://www.pygame.org/docs/ref/sprite.html
# https://www.pygame.org/docs/ref/rect.html
import pygame
import random
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
class Game_object(pygame.sprite.Sprite):
# Constructor. Pass in the color of the block,
# and its x and y position
def __init__(self, game, image, position=(0, 0)):
# Call the parent class (Sprite) constructor
super().__init__()
self._game = game
# Create an image of the block, and fill it with a color.
# This could also be an image loaded from the disk.
self.image = image
# Fetch the rectangle object that has the dimensions of the image
# Update the position of this object by setting the values of rect.x and rect.y
self.rect = self.image.get_rect(topleft=position)
class Ship(Game_object):
def update(self):
self.rect.x = self._game.m_position[0]
self.rect.y = self._game.m_position[1]
class Asteroid(Game_object):
def __init__(self, game, image, position=(0, 0)):
super().__init__(game, image, position)
class Game():
def __init__(self):
self._running = True
self._size = (1000, 1000)
self.screen = pygame.display.set_mode(self._size)
self._clock = pygame.time.Clock()
self._ship_image = pygame.image.load("ship.png")
self._user = Ship(self, self._ship_image)
self.m_position = (0, 0)
self._all = pygame.sprite.Group()
self._asteroids = pygame.sprite.Group()
self._user_group = pygame.sprite.GroupSingle()
self._user_group.add(self._user)
self._all.add(self._user)
self._asteroid_image = pygame.image.load("asteroid.png")
for x in range(0, 20):
size = self._asteroid_image.get_size()
position = (random.randint(0 + size[0], self._size[0] - size[0]),
random.randint(0 + size[1], self._size[1] - size[1]))
block = Asteroid(self, self._asteroid_image, position=position)
self._asteroids.add(block)
self._all.add(block)
def run(self):
# main loop
while self._running:
# fill screen
self.screen.fill(BLACK)
# handling events
for event in pygame.event.get():
if event.type == pygame.QUIT:
self._running = False
elif event.type == pygame.MOUSEMOTION:
self.m_position = pygame.mouse.get_pos()
# update sprite
self._user_group.update()
collisions = pygame.sprite.groupcollide(self._user_group, self._asteroids, False, False)
if collisions:
for x in collisions[self._user]:
if pygame.sprite.collide_mask(self._user, x):
x.kill()
self._all.draw(self.screen)
print(len(self._asteroids.sprites()))
# clock cap 60 ticks per seconds
self._clock.tick(60)
# clock.get_fps()
# update
pygame.display.flip()
if __name__ == "__main__":
pygame.init()
game = Game()
game.run()
pygame.quit()
|
"""
OAIPJsfopiasjd
"""
first_name = input("Enter your first name: ")
middle_name = input("Enter your middle name: ")
last_name = input("Enter your last name: ")
#Ahfojf
full_name = first_name + " " + middle_name + " " + last_name
print full_name
|
#!/usr/bin/env python
"""
train_SVM.py
VARPA, University of Coruna
Mondejar Guerra, Victor M.
15 Dec 2017
"""
import os
import csv
import gc
import cPickle as pickle
import time
from imblearn.over_sampling import SMOTE, ADASYN
from imblearn.combine import SMOTEENN, SMOTETomek
import collections
from sklearn import svm
import numpy as np
cpu_threads = 7
# http://contrib.scikit-learn.org/imbalanced-learn/stable/auto_examples/combine/plot_comparison_combine.html#sphx-glr-auto-examples-combine-plot-comparison-combine-py
# Perform the oversampling method over the descriptor data
def perform_oversampling(oversamp_method, db_path, oversamp_features_name, tr_features, tr_labels):
start = time.time()
oversamp_features_pickle_name = db_path + oversamp_features_name + '_' + oversamp_method + '.p'
print(oversamp_features_pickle_name)
if True:
print("Oversampling method:\t" + oversamp_method + " ...")
# 1 SMOTE
if oversamp_method == 'SMOTE':
#kind={'borderline1', 'borderline2', 'svm'}
svm_model = svm.SVC(C=0.001, kernel='rbf', degree=3, gamma='auto', decision_function_shape='ovo')
oversamp = SMOTE(ratio='auto', random_state=None, k_neighbors=5, m_neighbors=10, out_step=0.5, kind='svm', svm_estimator=svm_model, n_jobs=1)
# PROBAR SMOTE CON OTRO KIND
elif oversamp_method == 'SMOTE_regular_min':
oversamp = SMOTE(ratio='minority', random_state=None, k_neighbors=5, m_neighbors=10, out_step=0.5, kind='regular', svm_estimator=None, n_jobs=1)
elif oversamp_method == 'SMOTE_regular':
oversamp = SMOTE(ratio='auto', random_state=None, k_neighbors=5, m_neighbors=10, out_step=0.5, kind='regular', svm_estimator=None, n_jobs=1)
elif oversamp_method == 'SMOTE_border':
oversamp = SMOTE(ratio='auto', random_state=None, k_neighbors=5, m_neighbors=10, out_step=0.5, kind='borderline1', svm_estimator=None, n_jobs=1)
# 2 SMOTEENN
elif oversamp_method == 'SMOTEENN':
oversamp = SMOTEENN()
# 3 SMOTE TOMEK
# NOTE: http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.65.3904&rep=rep1&type=pdf
elif oversamp_method == 'SMOTETomek':
oversamp = SMOTETomek()
# 4 ADASYN
elif oversamp_method == 'ADASYN':
oversamp = ADASYN(ratio='auto', random_state=None, k=None, n_neighbors=5, n_jobs=cpu_threads)
tr_features_balanced, tr_labels_balanced = oversamp.fit_sample(tr_features, tr_labels)
# TODO Write data oversampled!
print("Writing oversampled data at: " + oversamp_features_pickle_name + " ...")
np.savetxt('mit_db/' + oversamp_features_name + '_DS1_labels.csv', tr_labels_balanced.astype(int), '%.0f')
f = open(oversamp_features_pickle_name, 'wb')
pickle.dump(tr_features_balanced, f, 2)
f.close
end = time.time()
count = collections.Counter(tr_labels_balanced)
print("Oversampling balance")
print(count)
print("Time required: " + str(format(end - start, '.2f')) + " sec" )
return tr_features_balanced, tr_labels_balanced
|
# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Servus(CMakePackage):
"""Servus is a small C++ network utility library that provides a zeroconf
API, URI parsing and UUIDs."""
homepage = "https://github.com/HPBVIS/Servus"
git = "https://github.com/HBPVIS/Servus.git"
generator = 'Ninja'
version('develop', submodules=True)
version('1.5.2', tag='1.5.2', submodules=True, preferred=True)
depends_on('cmake@3.1:', type='build')
depends_on('ninja', type='build')
depends_on('boost', type='build')
|
#Se importan las librerías que se van a necesitar
import matplotlib.pyplot as plt
import time
import numpy as np
import random
#Definimos una variable global para llevar el número de iteraciones (para imprimir el resultado):
iteracion = 1
#Definimos nuestra función a integrar:
def function(x):
return 2*x
#Función que utiliza bucles para obtener el resultado de la integral
def integra_mc_listas(fun, a ,b, num_puntos = 10000):
start = time.process_time()
axis_x = [random.uniform(a, b) for n in range(num_puntos)]
aux = [fun(n) for n in axis_x]
max_fun = max(aux)
maxs = [random.uniform(0, max_fun) for n in range(num_puntos)]
booleans = [ maxs[i] <= aux[i] for i in range(0,num_puntos)]
points = booleans.count(True)
print("Iteracion ", iteracion, " (listas) -> ", (points / num_puntos)*(b - a)* max_fun)
stop = time.process_time()
return 1000 * (stop - start)
#Función que utiliza operaciones vectoriales para obtener el resultado de la integral
def integra_mc_vectorial(fun, a ,b, num_puntos = 10000):
start = time.process_time()
axis_x = np.random.uniform(a, b, num_puntos)
aux = np.apply_along_axis(fun, 0, axis_x)
max_fun = aux.max()
maxs = np.random.uniform(0, max_fun, num_puntos)
booleans = maxs <= aux
points = np.sum(booleans)
print("Iteracion ", iteracion, " (vectorial) -> ", (points / num_puntos) * (b - a) * max_fun)
stop = time.process_time()
return 1000 * (stop - start)
#Función que crea la figura pedida en el enunciado
def compara_tiempos(fun):
global iteracion
sizes = np.linspace(100,10000000,20)
sizes.sort()
times_vector=[]
times_list=[]
for size in sizes:
times_vector += [integra_mc_vectorial(fun, 1, 4, num_puntos=int(size))]
times_list += [integra_mc_listas(fun, 1, 4, num_puntos=int(size))]
iteracion += 1
plt.figure()
plt.scatter(sizes, times_vector, c='red', label='Operaciones Vectoriales (Numpy)')
plt.scatter(sizes, times_list, c='blue', label='Bucles')
plt.legend()
plt.savefig('time.png')
print("Finished")
#Llamada a la funcion para crear la gráfica
compara_tiempos(function)
|
"""
Main file for training multi-camera pose
"""
import sys
import time
import traceback
import itertools as it
from joblib import Parallel, delayed
import cPickle as pickle
import optparse
from copy import deepcopy
import numpy as np
import scipy.misc as sm
import scipy.ndimage as nd
import Image
import cv2
import skimage
from skimage import color
from skimage.draw import line, circle
from skimage.color import rgb2gray,gray2rgb, rgb2lab
from skimage.feature import local_binary_pattern, match_template, peak_local_max
# from RGBDActionDatasets.dataset_readers.KinectPlayer import *KinectPlayer, display_help
from RGBDActionDatasets.dataset_readers.RealtimePlayer import RealtimePlayer
# from pyKinectTools.dataset_readers.KinectPlayer import KinectPlayer, display_help
# from pyKinectTools.dataset_readers.RealtimePlayer import RealtimePlayer
from RGBDActionDatasets.dataset_readers.MHADPlayer import MHADPlayer
# from pyKinectTools.dataset_readers.MHADPlayer import MHADPlayer
from pyKinectTools.utils.DepthUtils import *
from pyKinectTools.utils.SkeletonUtils import display_skeletons, transform_skels, kinect_to_msr_skel, msr_to_kinect_skel
from pyKinectTools.algs.GeodesicSkeleton import *
from pyKinectTools.algs.PoseTracking import *
from sklearn.mixture import GMM
from sklearn.cluster import KMeans
from IPython import embed
np.seterr(all='ignore')
# -------------------------MAIN------------------------------------------
def main(visualize=False, learn=False, actions=[1], subjects=[1], n_frames=220):
# search_joints=[0,2,4,5,7,10,13]
search_joints=range(14)
# interactive = True
interactive = False
save_results = False
if 0:
learn = False
# learn = learn
else:
learn = True
actions = [1]
subjects = [1]
# actions = range(1,10)
# subjects = range(1,9)
if 1:
dataset = 'MHAD'
cam = MHADPlayer(base_dir='/Users/colin/Data/BerkeleyMHAD/', kinect=1, actions=actions, subjects=subjects, reps=[1], get_depth=True, get_color=True, get_skeleton=True, fill_images=False)
elif 0:
dataset = 'JHU'
cam = KinectPlayer(base_dir='./', device=1, bg_subtraction=True, get_depth=True, get_color=True, get_skeleton=True, fill_images=False)
bg = Image.open('/Users/colin/Data/JHU_RGBD_Pose/CIRL_Background_A.tif')
# bg = Image.open('/Users/colin/Data/JHU_RGBD_Pose/Wall_Background_A.tif')
# bg = Image.open('/Users/colin/Data/JHU_RGBD_Pose/Office_Background_A.tif')
# bg = Image.open('/Users/colin/Data/WICU_May2013_C2/WICU_C2_Background.tif')
# cam = KinectPlayer(base_dir='./', device=2, bg_subtraction=True, get_depth=True, get_color=True, get_skeleton=True, fill_images=False)
# bg = Image.open('/Users/colin/Data/JHU_RGBD_Pose/CIRL_Background_B.tif')
cam.bgSubtraction.backgroundModel = np.array(bg.getdata()).reshape([240,320]).clip(0, 4500) - 000.
else:
# Realtime
dataset = 'RT'
cam = RealtimePlayer(device=0, edit=True, get_depth=True, get_color=True, get_skeleton=True)
# cam.set_bg_model('box', 2500)
tmp = cam.depthIm
tmp[tmp>4000] = 4000
cam.set_bg_model(bg_type='static', param=tmp)
# embed()
height, width = cam.depthIm.shape
skel_previous = None
face_detector = FaceDetector()
hand_detector = HandDetector(cam.depthIm.shape)
n_joints = 14
# gmm = GMM(n_components=n_joints)
kmeans = KMeans(n_clusters=n_joints, n_init=4, max_iter=100)
# Video writer
# video_writer = cv2.VideoWriter("/Users/colin/Desktop/test.avi", cv2.cv.CV_FOURCC('M','J','P','G'), 15, (640,480))
# Save Background model
# im = Image.fromarray(cam.depthIm.astype(np.int32), 'I')
# im.save("/Users/Colin/Desktop/k2.png")
# Setup pose database
append = True
# append = False
# pose_database = PoseDatabase("PoseDatabase.pkl", learn=learn, search_joints=[0,4,7,10,13], append=append)
# pose_database = PoseDatabase("PoseDatabase.pkl", learn=learn, search_joints=search_joints,
# append=append, scale=1.1, n_clusters=-1)#1000
pose_database = PoseDatabase("PoseDatabase.pkl", learn=learn, search_joints=search_joints,
append=append, scale=1.0, n_clusters=1500)
pose_prob = np.ones(len(pose_database.database), dtype=np.float)/len(pose_database.database)
# embed()
# Setup Tracking
skel_init, joint_size, constraint_links, features_joints,skel_parts, convert_to_kinect = get_14_joint_properties()
constraint_values = []
for c in constraint_links:
constraint_values += [np.linalg.norm(skel_init[c[0]]-skel_init[c[1]], 2)]
constraint_values = np.array(constraint_values)
skel_current = None#skel_init.copy()
skel_previous = None#skel_current.copy()
skel_previous_uv = None
# Evaluation
accuracy_all_db = []
accuracy_all_track = []
joint_accuracy_db = []
joint_accuracy_track = []
if not learn:
try:
results = pickle.load(open('Accuracy_Results.pkl'))
except:
results = { 'subject':[], 'action':[], 'accuracy_all':[],
'accuracy_mean':[], 'joints_all':[],
'joint_mean':[], 'joint_median':[]}
frame_count = 0
frame_rate = 10
if dataset == 'JHU':
cam.next(350)
# cam.next(700)
pass
frame_prev = 0
try:
# if 1:
while cam.next(frame_rate):# and frame_count < n_frames:
# Print every once in a while
if frame_count - frame_prev > 99:
print ""
print "Frame #{0:d}".format(frame_count)
frame_prev = frame_count
if dataset in ['MHAD', 'JHU']:
users = deepcopy(cam.users)
else:
users = deepcopy(cam.user_skels)
ground_truth = False
if dataset in ['RT','JHU']:
if len(users) > 0:
if not np.any(users[0][0] == -1):
ground_truth = True
users[0][:,1] *= -1
cam.users_uv_msr = [cam.camera_model.world2im(users[0], cam.depthIm.shape)]
else:
ground_truth = True
# Apply mask to image
mask = cam.get_person(200) == 1 # > 0
# cv2.imshow('bg',(mask*255).astype(np.uint8))
# cv2.imshow('bg',cam.colorIm)
# cv2.waitKey(1)
if type(mask)==bool or np.all(mask==False):
# print "No mask"
continue
# cv2.imshow('bg',cam.bgSubtraction.backgroundModel)
# cv2.imshow('bg',(mask*255).astype(np.uint8))
im_depth = cam.depthIm
# if dataset in ['RT']:
# cam.depthIm[cam.depthIm>2500] = 0
if cam.colorIm is not None:
im_color = cam.colorIm*mask[:,:,None]
cam.colorIm *= mask[:,:,None]
if ground_truth:
pose_truth = users[0]
pose_truth_uv = cam.users_uv_msr[0]
# Get bounding box around person
box = nd.find_objects(mask)[0]
d = 20
# Widen box
box = (slice(np.maximum(box[0].start-d, 0), \
np.minimum(box[0].stop+d, height-1)), \
slice(np.maximum(box[1].start-d, 0), \
np.minimum(box[1].stop+d, width-1)))
box_corner = [box[0].start,box[1].start]
mask_box = mask[box]
''' ---------- ----------------------------------- --------'''
''' ---------- ----------------------------------- --------'''
''' ---- Calculate Detectors ---- '''
# Face detection
# face_detector.run(im_color[box])
# Skin detection
# hand_markers = hand_detector.run(im_color[box], n_peaks=3)
hand_markers = []
# Calculate Geodesic Extrema
im_pos = cam.camera_model.im2PosIm(cam.depthIm*mask)[box]
# geodesic_markers = geodesic_extrema_MPI(im_pos, iterations=5, visualize=False)
if 1:
''' Find pts using kmeans or gmm '''
pts = im_pos[np.nonzero(im_pos)].reshape([-1,3])
# gmm.fit(pts)
kmeans.fit(pts)
# pts = cam.camera_model.world2im(gmm.means_)
pts = cam.camera_model.world2im(kmeans.cluster_centers_)
geodesic_markers = pts[:,:2] - box_corner
else:
''' Find pts using geodesic extrema '''
geodesic_markers = geodesic_extrema_MPI(im_pos, iterations=10, visualize=False)
if len(geodesic_markers) == 0:
print "No markers"
continue
# Concatenate markers
markers = list(geodesic_markers) + list(hand_markers) #+ list(lop_markers) + curve_markers
markers = np.array([list(x) for x in markers])
if np.any(markers==0):
print "Bad markers"
continue
''' ---- Database lookup ---- '''
time_t0 = time.time()
pts_mean = im_pos[(im_pos!=0)[:,:,2]].mean(0)
if learn and ground_truth:
# pose_uv = pose_truth_uv
if np.any(pose_truth_uv==0):
frame_count += frame_rate
if not interactive:
continue
# Markers can be just outside of bounds
markers = list(geodesic_markers) + hand_markers
markers = np.array([list(x) for x in markers])
# pose_database.update(pose_truth-pts_mean, keys=im_pos[markers[:,0],markers[:,1]]-pts_mean)
pose_database.update(pose_truth-pts_mean)
if not interactive:
continue
# else:
if 1:
# Normalize pose
pts = im_pos[markers[:,0], markers[:,1]]
pts = np.array([x for x in pts if x[0] != 0])
pts -= pts_mean
# Get closest pose
# Based on markers/raw positions
# poses_obs, pose_error = pose_database.query(pts, knn=1, return_error=True)
pose_error = pose_query(pts, np.array(pose_database.database), search_joints=search_joints)
# pose_error = query_error(pts, pose_database.trees, search_joints=search_joints)
# Based on markers/keys:
# pts = im_pos[markers[:,0], markers[:,1]] - pts_mean
# # poses, pose_error = pose_database.query_tree(pts, knn=len(pose_database.database), return_error=True)
# # poses, pose_error = pose_database.query_flann(pts, knn=len(pose_database.database), return_error=True)
# pose_error = np.sqrt(np.sum((pose_database.keys - pts.reshape([27]))**2, 1))
observation_variance = 100.
prob_obervation = np.exp(-pose_error / observation_variance) / np.sum(np.exp(-pose_error/observation_variance))
# subplot(2,2,1)
# plot(prob_obervation)
# subplot(2,2,2)
# plot(prob_motion)
# subplot(2,2,3)
# plot(pose_prob_new)
# subplot(2,2,4)
# plot(pose_prob)
# show()
# inference = 'NN'
inference = 'Bayes'
# inference = 'PF'
if inference=='NN': # Nearest neighbor
poses_obs, _ = pose_database.query(pts, knn=1, return_error=True)
poses = [poses_obs[0]]
elif inference=='Bayes': # Bayes
if frame_count is 0:
poses_obs, _ = pose_database.query(pts, knn=1, return_error=True)
skel_previous = poses_obs[0].copy()
# poses_m, pose_m_error = pose_database.query(skel_previous-pts_mean, knn=1, return_error=True)
pose_m_error = pose_query(skel_previous-pts_mean, np.array(pose_database.database), search_joints=search_joints)
# poses_m, pose_m_error = pose_database.query(skel_previous-pts_mean+(np.random.random([3,14])-.5).T*30, knn=5, return_error=True)
motion_variance = 10000.
prob_motion = np.exp(-pose_m_error / motion_variance) / np.sum(np.exp(-pose_m_error/motion_variance))
pose_prob_new = prob_obervation*prob_motion
if pose_prob_new.shape == pose_prob.shape:
pose_prob = (pose_prob_new+pose_prob).T/2.
else:
pose_prob = pose_prob_new.T
prob_sorted = np.argsort(pose_prob)
poses = [pose_database.database[np.argmax(pose_prob)]]
# poses = pose_database.database[prob_sorted[-1:]]
# Particle Filter
elif inference=='PF':
prob_sorted = np.argsort(pose_prob)
poses = pose_database.database[prob_sorted[-5:]]
## ICP
# im_pos -= pts_mean
# R,t = IterativeClosestPoint(pose, im_pos.reshape([-1,3])-pts_mean, max_iters=5, min_change=.001, pt_tolerance=10000)
# pose = np.dot(R.T, pose.T).T - t
# pose = np.dot(R, pose.T).T + t
# scale = 1.
# poses *= scale
poses += pts_mean
# print "DB time:", time.time() - time_t0
''' ---- Tracker ---- '''
surface_map = nd.distance_transform_edt(-nd.binary_erosion(mask_box), return_distances=False, return_indices=True)
if skel_previous_uv is None:
skel_previous = poses[0].copy()
skel_current = poses[0].copy()
pose_tmp = cam.camera_model.world2im(poses[0], cam.depthIm.shape)
skel_previous_uv = pose_tmp.copy()
skel_current_uv = pose_tmp.copy()
pose_weights = np.zeros(len(poses), dtype=np.float)
pose_updates = []
pose_updates_uv = []
time_t0 = time.time()
# 2) Sample poses
if inference in ['PF', 'Bayes']:
for pose_i, pose in enumerate(poses):
skel_current = skel_previous.copy()
skel_current_uv = skel_previous_uv.copy()
pose_uv = cam.camera_model.world2im(pose, cam.depthIm.shape)
try:
pose_uv[:,:2] = surface_map[:, pose_uv[:,0]-box_corner[0], pose_uv[:,1]-box_corner[1]].T + [box_corner[0], box_corner[1]]
except:
pass
pose = cam.camera_model.im2world(pose_uv, cam.depthIm.shape)
# ---- (Step 2) Update pose state, x ----
correspondence_displacement = skel_previous - pose
lambda_p = .0
lambda_c = 1.
skel_prev_difference = (skel_current - skel_previous)
# print skel_prev_difference
skel_current = skel_previous \
+ lambda_p * skel_prev_difference \
- lambda_c * correspondence_displacement#\
# ---- (Step 3) Add constraints ----
# A: Link lengths / geometry
# skel_current = link_length_constraints(skel_current, constraint_links, constraint_values, alpha=.5)
# skel_current = geometry_constraints(skel_current, joint_size, alpha=0.5)
# skel_current = collision_constraints(skel_current, constraint_links)
skel_current_uv = (cam.camera_model.world2im(skel_current, cam.depthIm.shape) - [box[0].start, box[1].start, 0])#/mask_interval
skel_current_uv = skel_current_uv.clip([0,0,0], [box[0].stop-box[0].start-1, box[1].stop-box[1].start-1, 9999])
# B: Ray-cast constraints
skel_current, skel_current_uv = ray_cast_constraints(skel_current, skel_current_uv, im_pos, surface_map, joint_size)
# Map back from mask to image
# try:
# skel_current_uv[:,:2] = surface_map[:, skel_current_uv[:,0], skel_current_uv[:,1]].T# + [box_corner[0], box_corner[1]]
# except:
# pass
# ---- (Step 4) Update the confidence ----
if inference=='PF':
time_t1 = time.time()
## Calc distance between each pixel and all joints
px_corr = np.zeros([im_pos.shape[0], im_pos.shape[1], 14])
for i,s in enumerate(skel_current):
px_corr[:,:,i] = np.sqrt(np.sum((im_pos - s)**2, -1))# / joint_size[i]**2
# for i,s in enumerate(pose_uv):
# for i,s in enumerate(skel_current_uv):
# ''' Problem: need to constrain pose_uv to mask '''
# _, geo_map = geodesic_extrema_MPI(im_pos, [s[0],s[1]], iterations=1, visualize=True)
# px_corr[:,:,i] = geo_map
# subplot(2,7,i+1)
# imshow(geo_map, vmin=0, vmax=2000)
# axis('off')
# px_corr[geo_map==0,i] = 9999
px_label = np.argmin(px_corr, -1)*mask_box
px_label_flat = px_label[mask_box].flatten()
# cv2.imshow('gMap', (px_corr.argmin(-1)+1)/15.*mask_box)
# cv2.waitKey(1)
# Project distance to joint's radius
px_joint_displacement = im_pos[mask_box] - skel_current[px_label_flat]
px_joint_magnitude = np.sqrt(np.sum(px_joint_displacement**2,-1))
joint_mesh_pos = skel_current[px_label_flat] + px_joint_displacement*(joint_size[px_label_flat]/px_joint_magnitude)[:,None]
px_joint_displacement = joint_mesh_pos - im_pos[mask_box]
# Ensure pts aren't too far away (these are noise!)
px_joint_displacement[np.abs(px_joint_displacement) > 500] = 0
if 0:
x = im_pos.copy()*0
x[mask_box] = joint_mesh_pos
for i in range(3):
subplot(1,4,i+1)
imshow(x[:,:,i])
axis('off')
subplot(1,4,4)
imshow((px_label+1)*mask_box)
# Calc the correspondance change in position for each joint
correspondence_displacement = np.zeros([len(skel_current), 3])
ii = 0
for i,_ in enumerate(skel_current):
labels = px_label_flat==i
correspondence_displacement[i] = np.sum(px_joint_displacement[px_label_flat==ii], 0) / np.sum(px_joint_displacement[px_label_flat==ii]!=0)
ii+=1
correspondence_displacement = np.nan_to_num(correspondence_displacement)
# print "time:", time.time() - time_t1
# Likelihood
motion_variance = 500
prob_motion = np.exp(-np.mean(np.sum((pose-skel_previous)**2,1)/motion_variance**2))
if inference == 'PF':
correspondence_variance = 40
prob_coor = np.exp(-np.mean(np.sum(correspondence_displacement**2,1)/correspondence_variance**2))
prob = prob_motion * prob_coor
prob = prob_motion
# Viz correspondences
# x = im_pos.copy()*0
# x[mask_box] = px_joint_displacement
# for i in range(3):
# subplot(1,4,i+1)
# imshow(x[:,:,i])
# axis('off')
# subplot(1,4,4)
# imshow((px_label+1)*mask_box)
# # embed()
# # for j in range(3):
# # for i in range(14):
# # subplot(3,14,j*14+i+1)
# # imshow(x[:,:,j]*((px_label==i)*mask_box))
# # axis('off')
# show()
# prob = link_length_probability(skel_current, constraint_links, constraint_values, 100)
# print frame_count
# print "Prob:", np.mean(prob)#, np.min(prob), prob
# thresh = .05
# if np.min(prob) < thresh:
# # print 'Resetting pose'
# for c in constraint_links[prob<thresh]:
# for cc in c:
# skel_current_uv[c] = pose_uv[c] - [box[0].start, box[1].start, 0]
# skel_current[c] = pose[c]
# skel_current_uv = pose_uv.copy() - [box[0].start, box[1].start, 0]
# skel_current = pose.copy()
skel_current_uv = skel_current_uv + [box[0].start, box[1].start, 0]
skel_current = cam.camera_model.im2world(skel_current_uv, cam.depthIm.shape)
# print 'Error:', np.sqrt(np.sum((pose_truth-skel_current)**2, 0))
pose_weights[pose_i] = prob
# pose_updates += [skel_current.copy()]
# pose_updates_uv += [skel_current_uv.copy()]
pose_updates += [pose.copy()]
pose_updates_uv += [pose_uv.copy()]
if cam.colorIm is not None:
cam.colorIm = display_skeletons(cam.colorIm, skel_current_uv, skel_type='Kinect', color=(0,0,pose_i*40+50))
else:
cam.depthIm = display_skeletons(cam.depthIm, skel_current_uv, skel_type='Kinect', color=(0,0,pose_i*40+50))
# cam.colorIm = display_skeletons(cam.colorIm, pose_uv, skel_type='Kinect', color=(0,pose_i*40+50,pose_i*40+50))
# print "Tracking time:", time.time() - time_t0
# Update for next round
pose_ind = np.argmax(pose_weights)
# print "Pickled:", pose_ind
skel_previous = pose_updates[pose_ind].copy()
skel_previous_uv = pose_updates_uv[pose_ind].copy()
# print pose_weights
else:
pose = poses[0]
skel_previous = pose.copy()
pose_uv = cam.camera_model.world2im(skel_previous, cam.depthIm.shape)
skel_current_uv = pose_uv.copy()
skel_previous_uv = pose_uv.copy()
''' ---- Accuracy ---- '''
if ground_truth:
error_track = pose_truth - skel_previous
error_track *= np.any(pose_truth!=0, 1)[:,None]
error_l2_track = np.sqrt(np.sum(error_track**2, 1))
joint_accuracy_track += [error_l2_track]
accuracy_track = np.sum(error_l2_track < 150) / n_joints
accuracy_all_track += [accuracy_track]
print "Current track: {}% {} mm".format(accuracy_track, error_l2_track.mean())
print "Running avg (track):", np.mean(accuracy_all_track)
# print "Joint avg (overall track):", np.mean(joint_accuracy_track)
print ""
''' --- Visualization --- '''
if visualize:
display_markers(cam.colorIm, hand_markers[:2], box, color=(0,250,0))
if len(hand_markers) > 2:
display_markers(cam.colorIm, [hand_markers[2]], box, color=(0,200,0))
display_markers(cam.colorIm, geodesic_markers, box, color=(200,0,0))
# display_markers(cam.colorIm, curve_markers, box, color=(0,100,100))
# display_markers(cam.colorIm, lop_markers, box, color=(0,0,200))
if ground_truth:
cam.colorIm = display_skeletons(cam.colorIm, pose_truth_uv, skel_type='Kinect', color=(0,255,0))
cam.colorIm = display_skeletons(cam.colorIm, skel_current_uv, skel_type='Kinect', color=(255,0,0))
cam.visualize(color=True, depth=False)
# ------------------------------------------------------------
# video_writer.write((geo_clf_map/float(geo_clf_map.max())*255.).astype(np.uint8))
# video_writer.write(cam.colorIm[:,:,[2,1,0]])
frame_count += frame_rate
print "Frame:", frame_count
except:
traceback.print_exc(file=sys.stdout)
pass
try:
print "-- Results for subject {:d} action {:d}".format(subjects[0],actions[0])
except:
pass
# print "Running avg (db):", np.mean(accuracy_all_db)
print "Running mean (track):", np.mean(accuracy_all_track)
# print "Joint avg (overall db):", np.mean(joint_accuracy_db)
print "Joint mean (overall track):", np.mean(joint_accuracy_track)
print "Joint median (overall track):", np.median(joint_accuracy_track)
# print 'Done'
embed()
if learn:
pose_database.save()
elif save_results:
# Save results:
results['subject'] += [subjects[0]]
results['action'] += [actions[0]]
results['accuracy_all'] += [accuracy_all_track]
results['accuracy_mean'] += [np.mean(accuracy_all_track)]
results['joints_all'] += [joint_accuracy_track]
results['joint_mean'] += [np.mean(joint_accuracy_track)]
results['joint_median'] += [np.median(joint_accuracy_track)]
pickle.dump(results, open('/Users/colin/Data/BerkeleyMHAD/Accuracy_Results.pkl', 'w'))
if __name__=="__main__":
parser = optparse.OptionParser()
parser.add_option('-v', '--visualize', dest='viz', action="store_true", default=False, help='Enable visualization')
parser.add_option('-l', '--learn', dest='learn', action="store_true", default=False, help='Training phase')
parser.add_option('-a', '--actions', dest='actions', type='int', action='append', default=[], help='Training phase')
parser.add_option('-s', '--subjects', dest='subjects', type='int', action='append', default=[], help='Training phase')
(opt, args) = parser.parse_args()
main(visualize=opt.viz, learn=opt.learn, actions=opt.actions, subjects=opt.subjects)
|
# -*- coding: utf-8 -*-
"""
Create at 16/12/13
"""
__author__ = 'TT'
import os
import tornado.options
import tornado.ioloop
from tornado.options import options
import tornado.web
from tornado.httpserver import HTTPServer
from controllers.index import Index, Index1
from controllers.wx import WX, WXUser
class Application(tornado.web.Application):
""""""
def __init__(self):
urls = [
(r'/?', Index),
(r'/wx/comment/?', WX),
(r'/wx/userinfo.html/?', WXUser),
(r'/index1/?', Index1),
]
settings = dict(
xsrf_cookies=False,
debug=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
)
tornado.web.Application.__init__(self, urls, **settings)
# if __name__ == '__main__':
# options.define(name='config', default='tt')
# options.define(name='port', default=31833)
# options.define(name='process', default=2)
#
# tornado.options.parse_command_line()
# app = Application()
# app.config = options.config
#
# server = HTTPServer(app)
# server.bind(int(options.port))
# server.start(num_processes=int(options.process))
# tornado.ioloop.IOLoop.instance().start()
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
])
if __name__ == "__main__":
app = Application()
app.listen(31831)
tornado.ioloop.IOLoop.current().start()
|
"""The tests for Philips Hue device triggers for V1 bridge."""
from homeassistant.components import automation, hue
from homeassistant.components.device_automation import DeviceAutomationType
from homeassistant.components.hue.v1 import device_trigger
from homeassistant.setup import async_setup_component
from .conftest import setup_platform
from .test_sensor_v1 import HUE_DIMMER_REMOTE_1, HUE_TAP_REMOTE_1
from tests.common import assert_lists_same, async_get_device_automations
REMOTES_RESPONSE = {"7": HUE_TAP_REMOTE_1, "8": HUE_DIMMER_REMOTE_1}
async def test_get_triggers(hass, mock_bridge_v1, device_reg):
"""Test we get the expected triggers from a hue remote."""
mock_bridge_v1.mock_sensor_responses.append(REMOTES_RESPONSE)
await setup_platform(hass, mock_bridge_v1, ["sensor", "binary_sensor"])
assert len(mock_bridge_v1.mock_requests) == 1
# 2 remotes, just 1 battery sensor
assert len(hass.states.async_all()) == 1
# Get triggers for specific tap switch
hue_tap_device = device_reg.async_get_device(
{(hue.DOMAIN, "00:00:00:00:00:44:23:08")}
)
triggers = await async_get_device_automations(
hass, DeviceAutomationType.TRIGGER, hue_tap_device.id
)
expected_triggers = [
{
"platform": "device",
"domain": hue.DOMAIN,
"device_id": hue_tap_device.id,
"type": t_type,
"subtype": t_subtype,
}
for t_type, t_subtype in device_trigger.HUE_TAP_REMOTE
]
assert_lists_same(triggers, expected_triggers)
# Get triggers for specific dimmer switch
hue_dimmer_device = device_reg.async_get_device(
{(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")}
)
triggers = await async_get_device_automations(
hass, DeviceAutomationType.TRIGGER, hue_dimmer_device.id
)
trigger_batt = {
"platform": "device",
"domain": "sensor",
"device_id": hue_dimmer_device.id,
"type": "battery_level",
"entity_id": "sensor.hue_dimmer_switch_1_battery_level",
}
expected_triggers = [
trigger_batt,
*(
{
"platform": "device",
"domain": hue.DOMAIN,
"device_id": hue_dimmer_device.id,
"type": t_type,
"subtype": t_subtype,
}
for t_type, t_subtype in device_trigger.HUE_DIMMER_REMOTE
),
]
assert_lists_same(triggers, expected_triggers)
async def test_if_fires_on_state_change(hass, mock_bridge_v1, device_reg, calls):
"""Test for button press trigger firing."""
mock_bridge_v1.mock_sensor_responses.append(REMOTES_RESPONSE)
await setup_platform(hass, mock_bridge_v1, ["sensor", "binary_sensor"])
assert len(mock_bridge_v1.mock_requests) == 1
assert len(hass.states.async_all()) == 1
# Set an automation with a specific tap switch trigger
hue_tap_device = device_reg.async_get_device(
{(hue.DOMAIN, "00:00:00:00:00:44:23:08")}
)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": hue.DOMAIN,
"device_id": hue_tap_device.id,
"type": "remote_button_short_press",
"subtype": "button_4",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "B4 - {{ trigger.event.data.event }}"
},
},
},
{
"trigger": {
"platform": "device",
"domain": hue.DOMAIN,
"device_id": "mock-device-id",
"type": "remote_button_short_press",
"subtype": "button_1",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "B1 - {{ trigger.event.data.event }}"
},
},
},
]
},
)
# Fake that the remote is being pressed.
new_sensor_response = dict(REMOTES_RESPONSE)
new_sensor_response["7"] = dict(new_sensor_response["7"])
new_sensor_response["7"]["state"] = {
"buttonevent": 18,
"lastupdated": "2019-12-28T22:58:02",
}
mock_bridge_v1.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge_v1.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge_v1.mock_requests) == 2
assert len(calls) == 1
assert calls[0].data["some"] == "B4 - 18"
# Fake another button press.
new_sensor_response["7"] = dict(new_sensor_response["7"])
new_sensor_response["7"]["state"] = {
"buttonevent": 34,
"lastupdated": "2019-12-28T22:58:05",
}
mock_bridge_v1.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge_v1.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge_v1.mock_requests) == 3
assert len(calls) == 1
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_datatab.ui'
#
# Created by: PyQt5 UI code generator 5.14.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_data(object):
def setupUi(self, data):
data.setObjectName("data")
data.resize(761, 857)
self.verticalLayout = QtWidgets.QVBoxLayout(data)
self.verticalLayout.setObjectName("verticalLayout")
self.layoutVolume = QtWidgets.QVBoxLayout()
self.layoutVolume.setObjectName("layoutVolume")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout_6 = QtWidgets.QVBoxLayout()
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.label_13 = QtWidgets.QLabel(data)
self.label_13.setObjectName("label_13")
self.verticalLayout_6.addWidget(self.label_13)
self.horizontalLayout_8 = QtWidgets.QHBoxLayout()
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.checkBoxLeftView = QtWidgets.QCheckBox(data)
self.checkBoxLeftView.setText("")
self.checkBoxLeftView.setChecked(True)
self.checkBoxLeftView.setObjectName("checkBoxLeftView")
self.horizontalLayout_8.addWidget(self.checkBoxLeftView)
self.checkBoxCentralView = QtWidgets.QCheckBox(data)
self.checkBoxCentralView.setText("")
self.checkBoxCentralView.setChecked(True)
self.checkBoxCentralView.setObjectName("checkBoxCentralView")
self.horizontalLayout_8.addWidget(self.checkBoxCentralView)
self.checkBoxRightView = QtWidgets.QCheckBox(data)
self.checkBoxRightView.setText("")
self.checkBoxRightView.setChecked(True)
self.checkBoxRightView.setObjectName("checkBoxRightView")
self.horizontalLayout_8.addWidget(self.checkBoxRightView)
self.verticalLayout_6.addLayout(self.horizontalLayout_8)
self.line_6 = QtWidgets.QFrame(data)
self.line_6.setFrameShape(QtWidgets.QFrame.HLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.verticalLayout_6.addWidget(self.line_6)
self.label = QtWidgets.QLabel(data)
self.label.setObjectName("label")
self.verticalLayout_6.addWidget(self.label)
self.horizontalLayoutExtraButtons = QtWidgets.QHBoxLayout()
self.horizontalLayoutExtraButtons.setObjectName("horizontalLayoutExtraButtons")
self.pushButtonManagerRed = QtWidgets.QPushButton(data)
self.pushButtonManagerRed.setText("")
self.pushButtonManagerRed.setObjectName("pushButtonManagerRed")
self.horizontalLayoutExtraButtons.addWidget(self.pushButtonManagerRed)
self.pushButtonManagerBlue = QtWidgets.QPushButton(data)
self.pushButtonManagerBlue.setText("")
self.pushButtonManagerBlue.setObjectName("pushButtonManagerBlue")
self.horizontalLayoutExtraButtons.addWidget(self.pushButtonManagerBlue)
self.pushButtonManagerGreen = QtWidgets.QPushButton(data)
self.pushButtonManagerGreen.setText("")
self.pushButtonManagerGreen.setObjectName("pushButtonManagerGreen")
self.horizontalLayoutExtraButtons.addWidget(self.pushButtonManagerGreen)
self.verticalLayout_6.addLayout(self.horizontalLayoutExtraButtons)
self.horizontalLayout_9 = QtWidgets.QHBoxLayout()
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.pushButtonManagerOrange = QtWidgets.QPushButton(data)
self.pushButtonManagerOrange.setText("")
self.pushButtonManagerOrange.setObjectName("pushButtonManagerOrange")
self.horizontalLayout_9.addWidget(self.pushButtonManagerOrange)
self.pushButtonManagerGrey = QtWidgets.QPushButton(data)
self.pushButtonManagerGrey.setText("")
self.pushButtonManagerGrey.setObjectName("pushButtonManagerGrey")
self.horizontalLayout_9.addWidget(self.pushButtonManagerGrey)
self.pushButtonManagerCyan = QtWidgets.QPushButton(data)
self.pushButtonManagerCyan.setText("")
self.pushButtonManagerCyan.setObjectName("pushButtonManagerCyan")
self.horizontalLayout_9.addWidget(self.pushButtonManagerCyan)
self.verticalLayout_6.addLayout(self.horizontalLayout_9)
self.horizontalLayout.addLayout(self.verticalLayout_6)
self.line_7 = QtWidgets.QFrame(data)
self.line_7.setFrameShape(QtWidgets.QFrame.VLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.horizontalLayout.addWidget(self.line_7)
self.verticalLayout_7 = QtWidgets.QVBoxLayout()
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.checkBoxLinkViews = QtWidgets.QCheckBox(data)
self.checkBoxLinkViews.setChecked(True)
self.checkBoxLinkViews.setObjectName("checkBoxLinkViews")
self.verticalLayout_7.addWidget(self.checkBoxLinkViews)
self.checkBox6Views = QtWidgets.QCheckBox(data)
self.checkBox6Views.setObjectName("checkBox6Views")
self.verticalLayout_7.addWidget(self.checkBox6Views)
self.horizontalLayout.addLayout(self.verticalLayout_7)
self.line_8 = QtWidgets.QFrame(data)
self.line_8.setFrameShape(QtWidgets.QFrame.VLine)
self.line_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_8.setObjectName("line_8")
self.horizontalLayout.addWidget(self.line_8)
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.verticalLayout_9 = QtWidgets.QVBoxLayout()
self.verticalLayout_9.setObjectName("verticalLayout_9")
self.label_2 = QtWidgets.QLabel(data)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setObjectName("label_2")
self.verticalLayout_9.addWidget(self.label_2)
self.comboBoxOrientation = QComboBoxIgnoreSCroll(data)
self.comboBoxOrientation.setObjectName("comboBoxOrientation")
self.comboBoxOrientation.addItem("")
self.comboBoxOrientation.addItem("")
self.comboBoxOrientation.addItem("")
self.comboBoxOrientation.addItem("")
self.verticalLayout_9.addWidget(self.comboBoxOrientation)
self.verticalLayout_5.addLayout(self.verticalLayout_9)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.pushButtonLoadAtlasMeta = QtWidgets.QPushButton(data)
self.pushButtonLoadAtlasMeta.setObjectName("pushButtonLoadAtlasMeta")
self.horizontalLayout_4.addWidget(self.pushButtonLoadAtlasMeta)
self.labelNewVersion = QtWidgets.QLabel(data)
self.labelNewVersion.setText("")
self.labelNewVersion.setObjectName("labelNewVersion")
self.horizontalLayout_4.addWidget(self.labelNewVersion)
self.verticalLayout_5.addLayout(self.horizontalLayout_4)
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.pushButtonScaleBarColor = QtWidgets.QPushButton(data)
self.pushButtonScaleBarColor.setObjectName("pushButtonScaleBarColor")
self.horizontalLayout_5.addWidget(self.pushButtonScaleBarColor)
self.pushButtonScreenShot = QtWidgets.QPushButton(data)
self.pushButtonScreenShot.setStyleSheet("border: none")
self.pushButtonScreenShot.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/icons/Camera_font_awesome.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButtonScreenShot.setIcon(icon)
self.pushButtonScreenShot.setIconSize(QtCore.QSize(24, 24))
self.pushButtonScreenShot.setObjectName("pushButtonScreenShot")
self.horizontalLayout_5.addWidget(self.pushButtonScreenShot)
self.verticalLayout_5.addLayout(self.horizontalLayout_5)
self.horizontalLayout.addLayout(self.verticalLayout_5)
self.layoutVolume.addLayout(self.horizontalLayout)
self.line_5 = QtWidgets.QFrame(data)
self.line_5.setFrameShape(QtWidgets.QFrame.HLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.layoutVolume.addWidget(self.line_5)
self.horizontalLayout_12 = QtWidgets.QHBoxLayout()
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.labelVolume = QtWidgets.QLabel(data)
self.labelVolume.setStyleSheet("background-color:rgb(102, 95, 95);\n"
"padding: 3px;\n"
"color: rgb(223, 223, 223);")
self.labelVolume.setObjectName("labelVolume")
self.horizontalLayout_12.addWidget(self.labelVolume)
self.layoutVolume.addLayout(self.horizontalLayout_12)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.comboBoxVolume = QComboBoxIgnoreSCroll(data)
self.comboBoxVolume.setObjectName("comboBoxVolume")
self.horizontalLayout_2.addWidget(self.comboBoxVolume)
self.pushButtonEditVolName = QtWidgets.QPushButton(data)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButtonEditVolName.sizePolicy().hasHeightForWidth())
self.pushButtonEditVolName.setSizePolicy(sizePolicy)
self.pushButtonEditVolName.setStyleSheet("border:none")
self.pushButtonEditVolName.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/icons/edit.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButtonEditVolName.setIcon(icon1)
self.pushButtonEditVolName.setIconSize(QtCore.QSize(20, 20))
self.pushButtonEditVolName.setObjectName("pushButtonEditVolName")
self.horizontalLayout_2.addWidget(self.pushButtonEditVolName)
self.layoutVolume.addLayout(self.horizontalLayout_2)
self.horizontalLayoutVol1Levels = QtWidgets.QHBoxLayout()
self.horizontalLayoutVol1Levels.setObjectName("horizontalLayoutVol1Levels")
self.comboBoxVolumeLut = QComboBoxIgnoreSCroll(data)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBoxVolumeLut.sizePolicy().hasHeightForWidth())
self.comboBoxVolumeLut.setSizePolicy(sizePolicy)
self.comboBoxVolumeLut.setObjectName("comboBoxVolumeLut")
self.horizontalLayoutVol1Levels.addWidget(self.comboBoxVolumeLut)
self.layoutVolume.addLayout(self.horizontalLayoutVol1Levels)
self.verticalLayout.addLayout(self.layoutVolume)
self.line = QtWidgets.QFrame(data)
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.verticalLayout.addWidget(self.line)
self.vol2Widget_layout = QtWidgets.QVBoxLayout()
self.vol2Widget_layout.setObjectName("vol2Widget_layout")
self.volume2Label_layout = QtWidgets.QHBoxLayout()
self.volume2Label_layout.setObjectName("volume2Label_layout")
self.checkBoxShowVol2Controls = QtWidgets.QCheckBox(data)
self.checkBoxShowVol2Controls.setMaximumSize(QtCore.QSize(20, 16777215))
self.checkBoxShowVol2Controls.setText("")
self.checkBoxShowVol2Controls.setObjectName("checkBoxShowVol2Controls")
self.volume2Label_layout.addWidget(self.checkBoxShowVol2Controls)
self.label_14 = QtWidgets.QLabel(data)
self.label_14.setStyleSheet("background-color:rgb(102, 95, 95);\n"
"padding: 3px;\n"
"color: rgb(223, 223, 223);")
self.label_14.setObjectName("label_14")
self.volume2Label_layout.addWidget(self.label_14)
self.checkBoxVisibilityVol2 = QtWidgets.QCheckBox(data)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBoxVisibilityVol2.sizePolicy().hasHeightForWidth())
self.checkBoxVisibilityVol2.setSizePolicy(sizePolicy)
self.checkBoxVisibilityVol2.setText("")
self.checkBoxVisibilityVol2.setChecked(True)
self.checkBoxVisibilityVol2.setObjectName("checkBoxVisibilityVol2")
self.volume2Label_layout.addWidget(self.checkBoxVisibilityVol2)
self.vol2Widget_layout.addLayout(self.volume2Label_layout)
self.vol2ControlsWidget = QtWidgets.QWidget(data)
self.vol2ControlsWidget.setMinimumSize(QtCore.QSize(300, 80))
self.vol2ControlsWidget.setObjectName("vol2ControlsWidget")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.vol2ControlsWidget)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.layoutVolume2 = QtWidgets.QVBoxLayout()
self.layoutVolume2.setObjectName("layoutVolume2")
self.comboBoxVolume2 = QtWidgets.QComboBox(self.vol2ControlsWidget)
self.comboBoxVolume2.setObjectName("comboBoxVolume2")
self.layoutVolume2.addWidget(self.comboBoxVolume2)
self.horizontalLayoutVol2Levels = QtWidgets.QHBoxLayout()
self.horizontalLayoutVol2Levels.setObjectName("horizontalLayoutVol2Levels")
self.comboBoxVolumeLut2 = QtWidgets.QComboBox(self.vol2ControlsWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBoxVolumeLut2.sizePolicy().hasHeightForWidth())
self.comboBoxVolumeLut2.setSizePolicy(sizePolicy)
self.comboBoxVolumeLut2.setObjectName("comboBoxVolumeLut2")
self.horizontalLayoutVol2Levels.addWidget(self.comboBoxVolumeLut2)
self.doubleSpinBoxVol2Opacity = QtWidgets.QDoubleSpinBox(self.vol2ControlsWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.doubleSpinBoxVol2Opacity.sizePolicy().hasHeightForWidth())
self.doubleSpinBoxVol2Opacity.setSizePolicy(sizePolicy)
self.doubleSpinBoxVol2Opacity.setObjectName("doubleSpinBoxVol2Opacity")
self.horizontalLayoutVol2Levels.addWidget(self.doubleSpinBoxVol2Opacity)
self.label_3 = QtWidgets.QLabel(self.vol2ControlsWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3.sizePolicy().hasHeightForWidth())
self.label_3.setSizePolicy(sizePolicy)
self.label_3.setObjectName("label_3")
self.horizontalLayoutVol2Levels.addWidget(self.label_3)
self.layoutVolume2.addLayout(self.horizontalLayoutVol2Levels)
self.verticalLayout_3.addLayout(self.layoutVolume2)
self.vol2Widget_layout.addWidget(self.vol2ControlsWidget)
self.verticalLayout.addLayout(self.vol2Widget_layout)
self.line_2 = QtWidgets.QFrame(data)
self.line_2.setLineWidth(5)
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.verticalLayout.addWidget(self.line_2)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.checkBoxShowDataControls = QtWidgets.QCheckBox(data)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBoxShowDataControls.sizePolicy().hasHeightForWidth())
self.checkBoxShowDataControls.setSizePolicy(sizePolicy)
self.checkBoxShowDataControls.setText("")
self.checkBoxShowDataControls.setObjectName("checkBoxShowDataControls")
self.horizontalLayout_3.addWidget(self.checkBoxShowDataControls)
self.labelData = QtWidgets.QLabel(data)
self.labelData.setStyleSheet("background-color:rgb(102, 95, 95);\n"
"padding: 3px;\n"
"color: rgb(223, 223, 223);")
self.labelData.setObjectName("labelData")
self.horizontalLayout_3.addWidget(self.labelData)
self.checkBoxVisibilityHeatmap = QtWidgets.QCheckBox(data)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBoxVisibilityHeatmap.sizePolicy().hasHeightForWidth())
self.checkBoxVisibilityHeatmap.setSizePolicy(sizePolicy)
self.checkBoxVisibilityHeatmap.setText("")
self.checkBoxVisibilityHeatmap.setChecked(True)
self.checkBoxVisibilityHeatmap.setObjectName("checkBoxVisibilityHeatmap")
self.horizontalLayout_3.addWidget(self.checkBoxVisibilityHeatmap)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.dataWidget = QtWidgets.QWidget(data)
self.dataWidget.setMinimumSize(QtCore.QSize(0, 170))
self.dataWidget.setObjectName("dataWidget")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.dataWidget)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.verticalLayout_8 = QtWidgets.QVBoxLayout()
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.comboBoxData = QtWidgets.QComboBox(self.dataWidget)
self.comboBoxData.setObjectName("comboBoxData")
self.verticalLayout_8.addWidget(self.comboBoxData)
self.horizontalLayout_11 = QtWidgets.QHBoxLayout()
self.horizontalLayout_11.setObjectName("horizontalLayout_11")
self.comboBoxLutHeatmap = QtWidgets.QComboBox(self.dataWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBoxLutHeatmap.sizePolicy().hasHeightForWidth())
self.comboBoxLutHeatmap.setSizePolicy(sizePolicy)
self.comboBoxLutHeatmap.setObjectName("comboBoxLutHeatmap")
self.horizontalLayout_11.addWidget(self.comboBoxLutHeatmap)
self.verticalLayout_8.addLayout(self.horizontalLayout_11)
self.verticalLayout_4.addLayout(self.verticalLayout_8)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.doubleSpinBoxNegThresh = QtWidgets.QDoubleSpinBox(self.dataWidget)
self.doubleSpinBoxNegThresh.setObjectName("doubleSpinBoxNegThresh")
self.horizontalLayout_6.addWidget(self.doubleSpinBoxNegThresh)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem)
self.pushButtonRecalcConnectComponents = QtWidgets.QPushButton(self.dataWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButtonRecalcConnectComponents.sizePolicy().hasHeightForWidth())
self.pushButtonRecalcConnectComponents.setSizePolicy(sizePolicy)
self.pushButtonRecalcConnectComponents.setObjectName("pushButtonRecalcConnectComponents")
self.horizontalLayout_6.addWidget(self.pushButtonRecalcConnectComponents)
self.doubleSpinBoxPosThresh = QtWidgets.QDoubleSpinBox(self.dataWidget)
self.doubleSpinBoxPosThresh.setObjectName("doubleSpinBoxPosThresh")
self.horizontalLayout_6.addWidget(self.doubleSpinBoxPosThresh)
self.verticalLayout_4.addLayout(self.horizontalLayout_6)
self.labelFdrThresholds = QtWidgets.QLabel(self.dataWidget)
self.labelFdrThresholds.setObjectName("labelFdrThresholds")
self.verticalLayout_4.addWidget(self.labelFdrThresholds)
self.gridLayoutFdrButtons = QtWidgets.QGridLayout()
self.gridLayoutFdrButtons.setObjectName("gridLayoutFdrButtons")
self.verticalLayout_4.addLayout(self.gridLayoutFdrButtons)
self.horizontalLayoutDataSliders = QtWidgets.QHBoxLayout()
self.horizontalLayoutDataSliders.setObjectName("horizontalLayoutDataSliders")
self.verticalLayout_4.addLayout(self.horizontalLayoutDataSliders)
self.horizontalLayout_10 = QtWidgets.QHBoxLayout()
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_10.addItem(spacerItem1)
self.verticalLayout_4.addLayout(self.horizontalLayout_10)
self.verticalLayoutConnectedComponents = QtWidgets.QHBoxLayout()
self.verticalLayoutConnectedComponents.setObjectName("verticalLayoutConnectedComponents")
self.verticalLayout_4.addLayout(self.verticalLayoutConnectedComponents)
self.labelFdrThresholds.raise_()
self.verticalLayout.addWidget(self.dataWidget)
self.line_4 = QtWidgets.QFrame(data)
self.line_4.setFrameShape(QtWidgets.QFrame.HLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.verticalLayout.addWidget(self.line_4)
self.verticalLayoutVectors = QtWidgets.QVBoxLayout()
self.verticalLayoutVectors.setObjectName("verticalLayoutVectors")
self.horizontalLayoutVectorHeader = QtWidgets.QHBoxLayout()
self.horizontalLayoutVectorHeader.setObjectName("horizontalLayoutVectorHeader")
self.checkBoxShowVectorControls = QtWidgets.QCheckBox(data)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBoxShowVectorControls.sizePolicy().hasHeightForWidth())
self.checkBoxShowVectorControls.setSizePolicy(sizePolicy)
self.checkBoxShowVectorControls.setText("")
self.checkBoxShowVectorControls.setObjectName("checkBoxShowVectorControls")
self.horizontalLayoutVectorHeader.addWidget(self.checkBoxShowVectorControls)
self.label_9 = QtWidgets.QLabel(data)
self.label_9.setStyleSheet("background-color:rgb(102, 95, 95);\n"
"padding: 3px;\n"
"color: rgb(223, 223, 223);")
self.label_9.setObjectName("label_9")
self.horizontalLayoutVectorHeader.addWidget(self.label_9)
self.verticalLayoutVectors.addLayout(self.horizontalLayoutVectorHeader)
self.vectorWidget = QtWidgets.QWidget(data)
self.vectorWidget.setMinimumSize(QtCore.QSize(0, 100))
self.vectorWidget.setObjectName("vectorWidget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.vectorWidget)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.comboBoxVectors = QtWidgets.QComboBox(self.vectorWidget)
self.comboBoxVectors.setObjectName("comboBoxVectors")
self.verticalLayout_2.addWidget(self.comboBoxVectors)
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.label_10 = QtWidgets.QLabel(self.vectorWidget)
self.label_10.setObjectName("label_10")
self.horizontalLayout_7.addWidget(self.label_10)
self.spinBoxVectorSubsampling = QtWidgets.QSpinBox(self.vectorWidget)
self.spinBoxVectorSubsampling.setObjectName("spinBoxVectorSubsampling")
self.horizontalLayout_7.addWidget(self.spinBoxVectorSubsampling)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_7.addItem(spacerItem2)
self.label_11 = QtWidgets.QLabel(self.vectorWidget)
self.label_11.setObjectName("label_11")
self.horizontalLayout_7.addWidget(self.label_11)
self.spinBoxVectorScale = QtWidgets.QSpinBox(self.vectorWidget)
self.spinBoxVectorScale.setObjectName("spinBoxVectorScale")
self.horizontalLayout_7.addWidget(self.spinBoxVectorScale)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_7.addItem(spacerItem3)
self.label_12 = QtWidgets.QLabel(self.vectorWidget)
self.label_12.setObjectName("label_12")
self.horizontalLayout_7.addWidget(self.label_12)
self.pushButtonVectorColor = QtWidgets.QPushButton(self.vectorWidget)
self.pushButtonVectorColor.setText("")
self.pushButtonVectorColor.setObjectName("pushButtonVectorColor")
self.horizontalLayout_7.addWidget(self.pushButtonVectorColor)
self.verticalLayout_2.addLayout(self.horizontalLayout_7)
self.pushButtonVectorMagnitudeFilter = QtWidgets.QPushButton(self.vectorWidget)
self.pushButtonVectorMagnitudeFilter.setObjectName("pushButtonVectorMagnitudeFilter")
self.verticalLayout_2.addWidget(self.pushButtonVectorMagnitudeFilter)
self.horizontalLayoutMagnitudeSlider = QtWidgets.QHBoxLayout()
self.horizontalLayoutMagnitudeSlider.setObjectName("horizontalLayoutMagnitudeSlider")
self.verticalLayout_2.addLayout(self.horizontalLayoutMagnitudeSlider)
self.verticalLayoutVectors.addWidget(self.vectorWidget)
self.verticalLayout.addLayout(self.verticalLayoutVectors)
self.line_3 = QtWidgets.QFrame(data)
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.verticalLayout.addWidget(self.line_3)
self.gridLayoutScalebar = QtWidgets.QGridLayout()
self.gridLayoutScalebar.setObjectName("gridLayoutScalebar")
self.doubleSpinBoxVoxelSize = QtWidgets.QDoubleSpinBox(data)
self.doubleSpinBoxVoxelSize.setObjectName("doubleSpinBoxVoxelSize")
self.gridLayoutScalebar.addWidget(self.doubleSpinBoxVoxelSize, 0, 1, 1, 1)
self.label_15 = QtWidgets.QLabel(data)
self.label_15.setObjectName("label_15")
self.gridLayoutScalebar.addWidget(self.label_15, 0, 0, 1, 1)
self.doubleSpinBoxScaleBarLength = QtWidgets.QDoubleSpinBox(data)
self.doubleSpinBoxScaleBarLength.setObjectName("doubleSpinBoxScaleBarLength")
self.gridLayoutScalebar.addWidget(self.doubleSpinBoxScaleBarLength, 0, 4, 1, 1)
self.label_16 = QtWidgets.QLabel(data)
self.label_16.setObjectName("label_16")
self.gridLayoutScalebar.addWidget(self.label_16, 0, 3, 1, 1)
self.checkBoxScaleBarLabel = QtWidgets.QCheckBox(data)
self.checkBoxScaleBarLabel.setObjectName("checkBoxScaleBarLabel")
self.gridLayoutScalebar.addWidget(self.checkBoxScaleBarLabel, 0, 2, 1, 1)
self.verticalLayout.addLayout(self.gridLayoutScalebar)
self.line_9 = QtWidgets.QFrame(data)
self.line_9.setLineWidth(4)
self.line_9.setMidLineWidth(2)
self.line_9.setFrameShape(QtWidgets.QFrame.HLine)
self.line_9.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_9.setObjectName("line_9")
self.verticalLayout.addWidget(self.line_9)
self.verticalLayoutColorScale = QtWidgets.QVBoxLayout()
self.verticalLayoutColorScale.setObjectName("verticalLayoutColorScale")
self.verticalLayout.addLayout(self.verticalLayoutColorScale)
spacerItem4 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem4)
self.retranslateUi(data)
QtCore.QMetaObject.connectSlotsByName(data)
def retranslateUi(self, data):
_translate = QtCore.QCoreApplication.translate
data.setWindowTitle(_translate("data", "Form"))
self.label_13.setText(_translate("data", "Viewport visiblity"))
self.label.setText(_translate("data", "Edit viewport"))
self.checkBoxLinkViews.setText(_translate("data", "Link views"))
self.checkBox6Views.setText(_translate("data", "Two row view"))
self.label_2.setText(_translate("data", "Orientation"))
self.comboBoxOrientation.setItemText(0, _translate("data", "sagittal"))
self.comboBoxOrientation.setItemText(1, _translate("data", "coronal"))
self.comboBoxOrientation.setItemText(2, _translate("data", "axial"))
self.comboBoxOrientation.setItemText(3, _translate("data", "Isosurface - Not implemented"))
self.pushButtonLoadAtlasMeta.setText(_translate("data", "Load atlas metadata"))
self.pushButtonScaleBarColor.setText(_translate("data", "scale bar color"))
self.labelVolume.setText(_translate("data", "Volume 1"))
self.label_14.setText(_translate("data", "Volume 2 / Atlas"))
self.label_3.setText(_translate("data", "opacity"))
self.labelData.setText(_translate("data", "Heatmap Data"))
self.pushButtonRecalcConnectComponents.setText(_translate("data", "Find blobs"))
self.labelFdrThresholds.setText(_translate("data", "FDR thresholds"))
self.label_9.setText(_translate("data", "Vectors"))
self.label_10.setText(_translate("data", "Subsample"))
self.label_11.setText(_translate("data", "Scale"))
self.label_12.setText(_translate("data", "Arrow colour"))
self.pushButtonVectorMagnitudeFilter.setText(_translate("data", "filter magnitude"))
self.label_15.setText(_translate("data", "Voxel size"))
self.label_16.setText(_translate("data", "Scale bar length"))
self.checkBoxScaleBarLabel.setText(_translate("data", "scale label"))
from vpv.ui.views.combobox_ignore_scroll import QComboBoxIgnoreSCroll
import resources_rc
|
# Copyright (c) 2019-2020 SAP SE or an SAP affiliate company. All rights reserved. This file is
# licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import shlex
from concourse.model.base import ScriptType
from concourse.model.step import (
PipelineStep,
StepNotificationPolicy,
)
class PipelineStepTest(unittest.TestCase):
def _examinee(self, name='dontcare', **kwargs):
return PipelineStep(
name=name,
is_synthetic=False,
notification_policy=StepNotificationPolicy.NOTIFY_PULL_REQUESTS,
script_type=ScriptType.BOURNE_SHELL,
raw_dict=kwargs,
)
def test_image(self):
examinee = self._examinee(image='an_image:1.2.3')
self.assertEqual(examinee.image(), 'an_image:1.2.3')
def test__argv(self):
# argv defaults to [step.name]
examinee = self._examinee(name='a_name')
self.assertEqual(examinee._argv(), ['a_name'])
# executable may be overwritten
examinee = self._examinee(execute='another_executable')
self.assertEqual(examinee._argv(), ['another_executable'])
# executable may be a list
examinee = self._examinee(execute=['a', 'b'])
self.assertEqual(examinee._argv(), ['a', 'b'])
def test_executable(self):
examinee = self._examinee(name='x')
self.assertEqual(examinee.executable(), 'x')
self.assertEqual(examinee.executable(prefix='foo'), 'foo/x')
self.assertEqual(examinee.executable(prefix=('foo',)), 'foo/x')
self.assertEqual(examinee.executable(prefix=('foo','bar')), 'foo/bar/x')
examinee = self._examinee(execute='another_executable')
self.assertEqual(examinee.executable(), 'another_executable')
examinee = self._examinee(execute=['exec', 'arg 1', 'arg2'])
self.assertEqual(examinee.executable(), 'exec')
def test_execute(self):
examinee = self._examinee(execute=['exec', 'arg1'])
self.assertEqual(examinee.execute(), 'exec arg1')
# whitespace must be quoted
examinee = self._examinee(execute=['e x', 'a r g'])
self.assertEqual(examinee.execute(), ' '.join(map(shlex.quote, ('e x', 'a r g'))))
|
# coding=utf-8
import unittest
__author__ = 'Lorenzo'
from src.stock import Stock
from datetime import datetime
class StockTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
def setUp(self):
self.stock = Stock("GOOG")
def trend_fixture(self, prices):
"""Create new prices in price history from a list of Float.
To be used in the tests below."""
return [self.stock.update(datetime(2015, 12, i+1), price=p)
for i, p in enumerate(prices)]
def test_should_create_Stock_object(self):
self.assertIsInstance(self.stock, Stock)
self.assertRaises(
ValueError,
self.trend_fixture,
[80, -78.3, 81.1]
)
def test_price_of_a_new_stock_class_should_be_None(self):
self.assertIsNone(self.stock.price)
def test_should_update_price(self):
"""Requirement: * Price has to update properly via a `update` method, with a
timestamp also"
"""
self.stock.update(datetime(2015, 12, 5), price=80)
self.assertEqual(self.stock.price, 80)
def test_negative_price_should_raise_valuerror(self):
"""Requirement: * Price cannot be negative"""
# assertRaise returns a context! Cool
with self.assertRaises(ValueError):
self.stock.update(datetime(2015, 12, 5), -1)
def test_after_multiple_updates_should_return_ordered(self):
"""Requirement: * After multiple updates, elements in stock.price_history
should be ordered from the latest to the oldest"""
self.stock.update(datetime(2015, 12, 5), price=80)
self.stock.update(datetime(2015, 12, 6), price=82.6)
self.stock.update(datetime(2015, 12, 4), price=81)
self.stock.update(datetime(2015, 12, 9), price=87.6)
self.stock.update(datetime(2015, 12, 7), price=81.9)
self.stock.update(datetime(2015, 12, 8), price=84.9)
self.assertEqual(
[s[1] for s in self.stock.price_history],
[81, 80, 82.6, 81.9, 84.9, 87.6]
)
def test_after_multiple_updates_should_return_last(self):
"""Requirement: * After multiple updates, stock.price returns the latest
price"""
self.stock.update(datetime(2015, 12, 5), price=80)
self.stock.update(datetime(2015, 12, 6), price=82.6)
self.stock.update(datetime(2015, 12, 4), price=81)
self.stock.update(datetime(2015, 12, 9), price=87.6)
self.stock.update(datetime(2015, 12, 7), price=81.9)
self.stock.update(datetime(2015, 12, 8), price=84.9)
self.assertAlmostEqual(self.stock.price, 87.6, delta=0.1)
def tearDown(self):
print(self.stock) if self.stock.price is not None else None
del self.stock
pass
@classmethod
def tearDownClass(cls):
pass
class StockTrendTest(StockTest):
"""Requirement: * implement a method to check if a a stock has a incremental
trend (if the last three quotes are increasing)"""
@classmethod
def setUpClass(cls):
pass
# inherit setUp() from super()
def test_trend_should_return_last_three_prices(self):
"""Test the stock.trend method"""
self.trend_fixture([82, 79, 80, 78.3, 81.1])
self.assertTrue(self.stock.trend, [80, 78.3, 81.1])
def test_trend_should_be_incremental(self):
"""Pass three recent growing prices in the method"""
self.trend_fixture([82, 79, 81.1, 82.6, 84.9])
self.assertTrue(self.stock.trend_is_incremental())
def test_trend_should_be_decremental(self):
"""Pass three recent decrementing prices in the method"""
self.trend_fixture([82, 79, 84.1, 82.6, 80.9])
self.assertFalse(self.stock.trend_is_incremental())
def test_trend_should_be_none(self):
"""Pass three recent not trended prices in the method"""
self.trend_fixture([82, 79, 84.1, 85.6, 80.9])
self.assertIsNone(self.stock.trend_is_incremental())
def tearDown(self):
pass
@classmethod
def tearDownClass(cls):
pass
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
# coding: utf-8
# In[19]:
idade = input("Informe a sua idade: ")
print(idade, type(idade))
# In[24]:
idade = int(idade)
print(idade, type(idade))
# In[26]:
print(float('123.52'))
print(str(123.25))
print(bool(''))
print(bool('abc'))
print(bool(0))
print(bool(-2))
# In[32]:
salario = input("Qual o seu salário?")
salario = float(salario)
gasto = input("Gastos totais")
gasto = float(gasto)
salario_total = salario * 12
gasto_total = gasto * 12
montante = salario_total - gasto_total
print("O montante economizado é", montante)
# In[ ]:
|
# Generated by Django 3.1.1 on 2020-09-26 21:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_auto_20200926_2241'),
]
operations = [
migrations.CreateModel(
name='Donator',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=50)),
('last_name', models.TextField(max_length=50)),
('phone', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='DonationDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField()),
('donator', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='accounts.donator')),
('ngo', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='accounts.ngo')),
('requirement', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='accounts.requirement')),
],
),
]
|
from hypothesis import given
from hypothesis_bio import protein
from .minimal import minimal
@given(protein())
def test_protein_type(seq):
assert type(seq) == str
def test_smallest_example():
assert minimal(protein()) == ""
def test_smallest_example_3_letter_abbrv():
assert minimal(protein(single_letter_protein=False)) == ""
def test_smallest_non_empty_example():
assert minimal(protein(min_size=1)) == "A"
def test_smallest_non_empty_example_3_letter_abbrv():
seq = minimal(protein(single_letter_protein=False, min_size=1))
assert len(seq) == 3
assert seq == "Ala"
def test_2_mer():
assert minimal(protein(min_size=2)) == "AA"
def test_2_mer_3_letter_abbrv():
seq = minimal(protein(single_letter_protein=False, min_size=2))
assert len(seq) == 6
assert seq == "AlaAla"
@given(protein(max_size=10))
def test_max_size(seq):
assert len(seq) <= 10
def test_max_size_3_letter_abbrv():
seq = minimal(protein(single_letter_protein=False, max_size=10))
assert len(seq) <= 30
assert len(seq) % 3 == 0
def test_allow_ambiguous():
seq = minimal(
protein(
single_letter_protein=True,
allow_ambiguous=True,
allow_extended=False,
min_size=1,
max_size=1,
),
lambda x: x not in norm_AA.keys(),
)
assert seq == "X"
def test_allow_ambiguous_3_letter_abbrv():
seq = minimal(
protein(
single_letter_protein=False,
allow_ambiguous=True,
allow_extended=False,
min_size=1,
max_size=1,
),
lambda x: x not in norm_AA.values(),
)
assert seq == "Xaa"
def test_allow_extended():
seq = minimal(
protein(
single_letter_protein=True,
allow_ambiguous=False,
allow_extended=True,
min_size=1,
max_size=1,
),
lambda x: x not in norm_AA.keys(),
)
assert seq == "B"
def test_allow_extended_3_letter_abbrv():
seq = minimal(
protein(
single_letter_protein=False,
allow_ambiguous=False,
allow_extended=True,
min_size=1,
max_size=1,
),
lambda x: x not in norm_AA.values(),
)
assert seq == "Asx"
def test_uppercase_only():
seq = minimal(protein(single_letter_protein=True, uppercase_only=True, min_size=1))
assert seq == "A"
def test_uppercase_only_3_letter_abbrv():
seq = minimal(protein(single_letter_protein=False, uppercase_only=True, min_size=1))
assert seq == "ALA"
def test_not_uppercase_only():
seq = minimal(
protein(single_letter_protein=True, uppercase_only=False, min_size=1),
lambda x: all(not c.isupper() for c in x),
)
assert seq == "a"
def test_not_uppercase_only_3_letter_abbrv():
seq = minimal(
protein(single_letter_protein=False, uppercase_only=False, min_size=1),
# lambda x: all(not c.isupper() for c in x)
)
assert seq == "Ala"
norm_AA = {
"A": "Ala",
"C": "Cys",
"D": "Asp",
"E": "Glu",
"F": "Phe",
"G": "Gly",
"H": "His",
"I": "Ile",
"K": "Lys",
"L": "Leu",
"M": "Met",
"N": "Asn",
"P": "Pro",
"Q": "Gln",
"R": "Arg",
"S": "Ser",
"T": "Thr",
"V": "Val",
"W": "Trp",
"Y": "Tyr",
}
|
import torch
import torch.distributed as dist
from oslo.torch.distributed._seed.helper import moe_set_seed
def _check_sanity(parallel_context):
if (
parallel_context.tensor_parallel_size > 1
or parallel_context.pipeline_parallel_size > 1
or parallel_context.sequence_parallel_size > 1
):
raise NotImplementedError(
"Expert parallelism is not compatible with "
"tensor or pipeline or sequence parallel at present."
)
class ExpertParallelInfo(object):
"""
A class to describe information expert parallelization and expert data parallelization
Args:
ep_size: the number of nodes in expert parallel group
dp_size: the number of nodes in expert data parallel group
parallel_context: global parallel context
"""
def __init__(self, ep_size, dp_size, parallel_context):
self.ep_size = ep_size
self.dp_size = dp_size
self.ep_group = None
self.dp_group = None
self.ep_group_ranks = None
self.dp_group_ranks = None
# Create expert parallel group
rank = parallel_context.get_global_rank()
for i in range(dp_size):
ranks = [i * ep_size + j for j in range(ep_size)]
group = dist.new_group(ranks)
if rank in ranks:
self.ep_group = group
self.ep_group_ranks = ranks
self.ep_local_rank = ranks.index(rank)
# Create expert data parallel group
for j in range(ep_size):
ranks = [i * ep_size + j for i in range(dp_size)]
group = dist.new_group(ranks)
if rank in ranks:
self.dp_group = group
self.dp_group_ranks = ranks
self.dp_local_rank = ranks.index(rank)
def get_dp_group(self):
return self.dp_group
def get_ep_group(self):
return self.ep_group
def get_dp_local_rank(self):
return self.dp_local_rank
def get_ep_local_rank(self):
return self.ep_local_rank
def get_ep_group_ranks(self):
return self.ep_group_ranks
def get_dp_group_ranks(self):
return self.dp_group_ranks
class ExpertParallelContext(object):
"""
A class to describe the Context about Expert Parallel
Args:
parallel_context: global parallel context
use_kernel_optim: flag to use kernel optimization
"""
def __init__(self, parallel_context, use_kernel_optim):
self.world_size = parallel_context.expert_parallel_size
self.parallel_context = parallel_context
self.use_kernel_optim = use_kernel_optim
self.min_dp_size = 1
self.aux_loss = None
self.has_setup = False
self._parallel_info_dict = dict()
@property
def parallel_info_dict(self):
return self._parallel_info_dict
@property
def is_initialized(self):
return self.has_setup
def setup(self, seed: int):
"""
Set base information about expert parallel context
Args:
seed: random seed value for expert parallel
"""
assert (
not self.is_initialized
), "MoE distributed context shouldn't be set up again"
_check_sanity(self.parallel_context)
assert (
self.world_size % self.parallel_context.expert_parallel_size == 0
), "Maximum expert parallel size must be a factor of the number of GPUs"
self.min_dp_size = self.world_size // self.parallel_context.expert_parallel_size
moe_set_seed(self.parallel_context, seed)
self.has_setup = True
def get_info(self, num_experts: int):
"""
If there is no information about given num_experts, create, save and return the information about expert parallel.
Otherwise, just return the information about expert parallel
Args:
num_experts: the number of experts
Returns:
num_local_experts: the number of local experts
"""
gt_flag = (
num_experts % self.parallel_context.expert_parallel_size == 0
) # check whether num_experts is greater
lt_flag = (
self.parallel_context.expert_parallel_size % num_experts == 0
) # check whether num_experts is less
assert gt_flag or lt_flag, (
"Automatic experts placement dose not not support expert number"
" is not a multiple of ep size or vice versa."
)
# If the number of experts is greater than maximum expert parallel size. a.k.a ep_size,
# there are multiple experts in each GPU and each GPU has different experts
# So it's data parallel size is 1
# Otherwise, there is only one expert in each GPU
# The data parallel size should be calculated
dp_size = (
1 if gt_flag else self.parallel_context.expert_parallel_size // num_experts
)
ep_size = self.parallel_context.expert_parallel_size // dp_size
# Calculate the number of experts for each GPU
num_local_experts = (
1 if lt_flag else num_experts // self.parallel_context.expert_parallel_size
)
# Don't forget to multiply minimum data parallel size
dp_size *= self.min_dp_size
if not (ep_size in self.parallel_info_dict):
self.parallel_info_dict[ep_size] = ExpertParallelInfo(
ep_size, dp_size, self.parallel_context
)
return num_local_experts, self.parallel_info_dict[ep_size]
def set_kernel_not_use(self):
self.use_kernel_optim = False
def reset_loss(self):
self.aux_loss = 0
def add_loss(self, loss):
self.aux_loss += loss
def get_loss(self):
return self.aux_loss
def get_world_size(self):
return self.world_size
|
from datetime import datetime
import hashlib
def construct_url(base: str, endpoint: str) -> str:
"""Construct a URL from a base URL and an API endpoint.
Args:
base: The root address, e.g. http://api.backblaze.com/b2api/v1.
endpoint: The path of the endpoint, e.g. /list_buckets.
Returns:
A URL based on the info.
"""
return ''.join((base, endpoint))
def read_file(file_name: str) -> str:
"""Reads the bytes of a file.
Args:
file_name: The file to read.
Returns:
The bytes of the file.
"""
with open(file_name, 'rb') as f:
return f.read()
def write_file(file_name: str, contents: str):
"""Reads the bytes of a file.
Args:
file_name: The file to read.
Returns:
The bytes of the file.
"""
with open(file_name, 'wb') as f:
return f.write(contents)
def sha1(contents: str) -> str:
"""
Args:
contents: The bytes to hash.
Returns:
The sha1 hash of the contents.
"""
return hashlib.sha1(contents).hexdigest()
|
from pylab import *
from scipy.signal import *
from velocity import *
# Plotting, velocity curves and derivatives
def plotcurves(curves, titles, vel_yrange=None, dif_yrange=None):
for n, v in enumerate(curves):
acc = v-vel
subplot(len(curves),2,n*2+1)
plot(time, v)
if (vel_yrange!=None):
axis([time[0],time[-1],vel_yrange[0],vel_yrange[1]])
title(titles[n]+': velocity')
subplot(len(curves),2,n*2+2)
plot(time, acc)
if (dif_yrange!=None):
axis([time[0],time[-1],dif_yrange[0],dif_yrange[1]])
title(titles[n]+': ideal difference')
if __name__=="__main__":
# Constants
sr = 1000.0;
T = 1/sr;
r = int(sr/100);
noise_max = 1e-05; # This is ||e_k||inf
# Define a velocity curve
vel = array([0.]*(15*r) + [1.]*(4*r) + [2.]*(25*r) + [0.]*(5*r)
+ [-1.]*(3*r) + [-1.]*(20*r))
time = arange(len(vel))/float(sr);
# Another interesting test signal
# vel = (((0.5+sin(time*50)*pow(2,-time*1))
# + (0.2+sin(time*500)*0.2*pow(2,-time*1)))
# *concatenate((ones(len(time)/2),
# zeros(len(time)/2))))
# Integrate it to get position
pos = lfilter([1], [1,-1], vel)*T
# Add some noise
pos = pos + rand(len(pos))*noise_max
# Finite difference
fdvel = lfilter([1,-1],[1],pos)/T
# Butterworth 300 Hz
[B,A] = butter(2, 300/(sr/2))
bwvel = lfilter(B,A,fdvel)
# FD skip 3
dist = 3
fd3vel = lfilter(array([1]+[0]*(dist-1)+[-1])/float(dist),[1],pos)/T
lsvel = lfilter(leastsquares(15), 1, pos)/T
levantvel1 = levant(pos, sr, C=max(abs(vel[1:]-vel[:-1]))/T, rk=1)
levantvel2 = levant(pos, sr, C=max(abs(vel[1:]-vel[:-1]))/T, rk=2)
levantvel4 = levant(pos, sr, C=max(abs(vel[1:]-vel[:-1]))/T, rk=4)
endfitfoawvel = foaw(pos, sr, noise_max, n=16, best=False)
bestfitfoawvel = foaw(pos, sr, noise_max, n=16, best=True)
mpos = median_filter(pos, n=3)
endfitfoawvelm = foaw(mpos, sr, noise_max, n=16, best=False)
bestfitfoawvelm = foaw(mpos, sr, noise_max, n=16, best=True)
curves = [fdvel, fd3vel, bwvel, lsvel]
titles = ['Simple Finite Difference',
'Finite difference 3',
'Butterworth 300 Hz',
'Least Squares']
figure(1)
clf()
plotcurves(curves, titles, vel_yrange = [-1.5, 2.5],
dif_yrange = [-0.3, 0.3])
curves = [endfitfoawvel,bestfitfoawvel,endfitfoawvelm,bestfitfoawvelm]
titles = ['end-fit-FOAW','best-fit-FOAW','end-fit-FOAW w/ median',
'best-fit-FOAW w/ median']
figure(2)
clf()
plotcurves(curves, titles, vel_yrange = [-1.5, 2.5],
dif_yrange = [-0.3, 0.3])
curves = [levantvel1, levantvel2, levantvel4]
titles = ['Levant RK=1',
'Levant RK=2',
'Levant RK=4']
figure(3)
clf()
plotcurves(curves, titles, vel_yrange = [-1.5, 2.5],
dif_yrange = [-0.3, 0.3])
figure(4)
clf()
plot(vel, label='ideal')
plot(lsvel, label='ls')
plot(bestfitfoawvel, label='bf-foaw')
plot(levantvel1, label='levant1')
plot(levantvel2, label='levant2')
plot(levantvel4, label='levant4')
legend()
def rms(x):
return sqrt(sum((x[r:] - vel[r:])*(x[r:] - vel[r:])))
r = len(levantvel1)/5
print 'bf-foaw error (%d Hz) ='%sr, rms(bestfitfoawvel)
print 'ef-foaw error (%d Hz) ='%sr, rms(endfitfoawvel)
print 'bw2-300 error (%d Hz) ='%sr, rms(bwvel)
print 'levant1 error (%d Hz) ='%sr, rms(levantvel1)
print 'levant2 error (%d Hz) ='%sr, rms(levantvel2)
print 'levant4 error (%d Hz) ='%sr, rms(levantvel4)
print 'fd error (%d Hz) ='%sr, rms(fdvel)
show()
|
import asyncio
import contextlib
import pytest
from qurpc import *
class Error(QuLabRPCError):
pass
class MySrv:
def __init__(self, sid=''):
self.sid = sid
class Test:
def hello(self):
return "hello, world"
self.sub = Test()
def add(self, a, b):
return a + b
async def add_async(self, a, b):
await asyncio.sleep(0.2)
return a + b
def add_fut(self, a, b):
return self.add_async(a, b)
def error(self):
raise Error('error')
def serverError(self):
return 1 / 0
async def sleep(self, t):
await asyncio.sleep(t)
def gen(self):
get = None
for i in range(10):
get = yield i if get is None else get
async def async_gen(self):
get = None
for i in range(10):
await asyncio.sleep(0.01)
get = yield i if get is None else get
@contextlib.contextmanager
def context(self, x):
try:
yield x
finally:
pass
async def async_context(self, x):
class AContext():
def __init__(self, x):
self.x = x
async def __aenter__(self):
return self.x
async def __aexit__(self, exc_type, exc_value, traceback):
pass
return AContext(x)
@pytest.fixture()
def server(event_loop):
s = ZMQServer(loop=event_loop)
s.set_module(MySrv())
s.start()
yield s
s.close()
@pytest.mark.asyncio
async def test_gen(server, event_loop):
c = ZMQClient('tcp://127.0.0.1:%d' % server.port,
timeout=0.7,
loop=event_loop)
await c.connect()
v1 = []
v2 = list(range(10))
async for v in c.gen():
v1.append(v)
assert len(v1) == len(v2)
assert v1 == v2
@pytest.mark.asyncio
async def test_async_gen(server, event_loop):
c = ZMQClient('tcp://127.0.0.1:%d' % server.port,
timeout=0.7,
loop=event_loop)
await c.connect()
v1 = []
v2 = list(range(10))
async for v in c.async_gen():
v1.append(v)
assert len(v1) == len(v2)
assert v1 == v2
|
f = float(input('Podaj liczbe zmiennoprzecinkowa: '))
d = int('%1d' % f)
x = d % 10
z = float('%.1f' % f) - d
y = int(z * 10)
print('Cyfra przed przecinkiem: ', x)
print('Cyfra po przecinku: ', y)
|
# Copyright 2020 The Kraken Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
import xml.etree.ElementTree as ET
from . import utils
from . import tool
log = logging.getLogger(__name__)
def collect_tests(step):
params = step.get('params', '')
cwd = step.get('cwd', '.')
pytest_exe = step.get('pytest_exe', 'pytest-3')
params = params.replace('-vv', '')
params = params.replace('-v', '')
pypath = step.get('pythonpath', '')
if pypath:
pypath = ':' + pypath
cmd = 'PYTHONPATH=`pwd`%s %s --collect-only -q %s | head -n -2' % (pypath, pytest_exe, params)
ret, out = utils.execute(cmd, cwd=cwd, out_prefix='')
if ret != 0:
log.error('problem with collecting tests:\n%s', out)
raise Exception('problem with collecting tests')
tests = out
tests = tests.splitlines()
tests2 = []
for t in tests:
if not t:
continue
if '= warnings summary =' in t:
break
tests2.append(t)
return tests2
def run_tests(step, report_result=None):
params = step.get('params', '')
tests = step['tests']
pytest_exe = step.get('pytest_exe', 'pytest-3')
cwd = step.get('cwd', '.')
pypath = step.get('pythonpath', '')
if pypath:
pypath = ':' + pypath
for test in tests:
cmd = 'PYTHONPATH=`pwd`%s %s -vv -r ap --junit-xml=result.xml %s %s' % (pypath, pytest_exe, params, test)
ret, _ = utils.execute(cmd, cwd=cwd, out_prefix='')
result = dict(cmd=cmd, test=test)
if ret != 0:
result['status'] = 3 # error
report_result(result)
continue
tree = ET.parse(os.path.join(cwd, 'result.xml'))
root = tree.getroot()
errors = 0
if root.get('errors'):
errors = int(root.get('errors'))
failures = 0
if root.get('failures'):
failures = int(root.get('failures'))
skips = 0
if root.get('skips'):
skips = int(root.get('skips'))
if errors > 0:
result['status'] = 3 # error
elif failures > 0:
result['status'] = 2 # failed
elif skips > 0:
result['status'] = 4 # disabled
else:
result['status'] = 1 # passed
report_result(result)
return 0, ''
if __name__ == '__main__':
tool.main()
|
# A function that returns the 'year' value:
def myFunc(e):
return e['year']
# Used lambda as function for sort year wise
myF = lambda e : e['year']
cars = [
{'car': 'Ford', 'year': 2005},
{'car': 'Mitsubishi', 'year': 2000},
{'car': 'BMW', 'year': 2019},
{'car': 'VW', 'year': 2011}
]
cars.sort(key=myF)
print(cars)
|
from .metacrypt import MetaCrypt
|
"""Implementation of SMS"""
|
def run_this_command(cmd):
import subprocess
res = subprocess.run(cmd.split(' '), stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
return res.stdout.decode('ascii').strip(), res.returncode == 0
build_hash = "N/A"
build_tag = "untagged build"
build_branch = "unknown branch"
_, this_is_a_git_repo = run_this_command("git status")
if this_is_a_git_repo:
build_hash, _ = run_this_command("git log --pretty=format:%h -n 1")
_, every_files_clean = run_this_command("git diff --quiet --exit-code")
if not every_files_clean:
build_hash += "+"
tag, ok = run_this_command("git describe --exact-match --tags")
if ok:
build_tag = tag
build_branch, _ = run_this_command("git rev-parse --abbrev-ref HEAD")
try:
with open("version.cpp", "r") as f:
content = f.read()
except:
content = ""
output = [ '#include "version.h"']
output += [f'const char* things::build_hash = "{build_hash}";']
output += [f'const char* things::build_tag = "{build_tag}";']
output += [f'const char* things::build_branch = "{build_branch}";']
output = "\n".join(output)
if content != output:
with open("version.cpp", "w") as f:
f.seek(0)
f.write(output)
|
'''
MEDIUM 74. Search a 2D Matrix
Write an efficient algorithm that searches for a value in an m x n matrix. This matrix has the following properties:
Integers in each row are sorted from left to right.
The first integer of each row is greater than the last integer of the previous row.
Example 1:
Input: matrix = [[1,3,5,7],[10,11,16,20],[23,30,34,60]], target = 3
Output: true
Example 2:
Input: matrix = [[1,3,5,7],[10,11,16,20],[23,30,34,60]], target = 13
Output: false
'''
class Solution:
def searchMatrix(self, matrix: List[List[int]], target: int) -> bool:
row = len(matrix)
col = len(matrix[0])
top = row * col - 1
low = 0
while (low <= top):
mid = (top + low) // 2
currVal = matrix[int(mid // col)][int(mid % col)]
if (currVal == target):
return True
if (currVal < target):
low = mid + 1
else:
top = mid - 1
return False
|
"""
A discogs_client based search query utility with filtering support.
"""
import configparser
import discogs_client
class Conf(configparser.ConfigParser):
"""A class to read and store configuration items."""
def __init__(self, filename):
super(Conf, self).__init__()
self.filename = filename
self.read(self.filename)
def _write(self):
with open(self.filename, 'w') as configfile:
self.write(configfile)
def save(self):
"""Write current config to file and read it again."""
self._write()
self.read(self.filename)
def enabled(self, section):
"""Return all enabled config keys for a given section"""
enabled = []
for item in self[section].keys():
if self[section].getboolean(item):
enabled.append(item)
return enabled
def query(catalog_number):
"""
Search query for catalog number.
Return a list of discogs_client.Results.
"""
api_conf = Conf("search/api.ini")
connection = discogs_client.Client(api_conf['client']['user_agent'],
user_token=api_conf['user']['user_token'])
results = connection.search(catalog_number, type='catno')
return results
def result_filter(search_result):
"""Filter key values from a configured list of search results."""
filter_conf = Conf("search/filter.ini")
dict_filter = filter_conf["filter"]
filter_true = filter(dict_filter.getboolean, dict_filter)
shown_result = {}
try:
for key in filter_true:
shown_result[key] = search_result[key]
except KeyError as e:
pass
# print("The result from the query did not have field requested in filter config. {}".format(e))
return shown_result
def search(catalog_number):
"""
A top level `catalog_number` search that returns a list of result dicts.
Usually catalog numbers are unique but not always hence the returned list.
"""
results = query(catalog_number)
result_list = []
for result in results:
dict_result = vars(result)["data"]
result_list.append(result_filter(dict_result))
return result_list
def test_search():
"""Top level test for this package."""
search("W-90629")
return True
def test_config():
"""Test to see if config can read and write."""
filter_conf = Conf("search/filter.ini")
item = "Test"
filter_conf[item] = {} # Create section
filter_conf[item][item] = item # Create option
filter_conf.save()
if filter_conf[item][item] == item:
filter_conf.remove_option(item, item)
filter_conf.remove_section(item)
filter_conf.save()
if not filter_conf.has_option(item, item):
return True
return False
|
from typing import Any, Dict
from weakref import ref
from flask import Blueprint
from kombu import Connection
from rethinkdb.ast import Table
from pysite.constants import (
BOT_EVENT_QUEUE, BotEventTypes,
RMQ_HOST, RMQ_PASSWORD, RMQ_PORT, RMQ_USERNAME
)
from pysite.database import RethinkDB
from pysite.oauth import OAuthBackend
BOT_EVENT_REQUIRED_PARAMS = {
"mod_log": ("level", "title", "message"),
"send_message": ("target", "message"),
"send_embed": ("target",),
"add_role": ("target", "role_id", "reason"),
"remove_role": ("target", "role_id", "reason")
}
class DBMixin:
"""
Mixin for classes that make use of RethinkDB. It can automatically create a table with the specified primary
key using the attributes set at class-level.
This class is intended to be mixed in alongside one of the other view classes. For example:
>>> class MyView(APIView, DBMixin):
... name = "my_view" # Flask internal name for this route
... path = "/my_view" # Actual URL path to reach this route
... table_name = "my_table" # Name of the table to create
... table_primary_key = "username" # Primary key to set for this table
This class will also work with Websockets:
>>> class MyWebsocket(WS, DBMixin):
... name = "my_websocket"
... path = "/my_websocket"
... table_name = "my_table"
... table_primary_key = "username"
You may omit `table_primary_key` and it will be defaulted to RethinkDB's default column - "id".
"""
table_name = "" # type: str
table_primary_key = "id" # type: str
@classmethod
def setup(cls: "DBMixin", manager: "pysite.route_manager.RouteManager", blueprint: Blueprint):
"""
Set up the view by creating the table specified by the class attributes - this will also deal with multiple
inheritance by calling `super().setup()` as appropriate.
:param manager: Instance of the current RouteManager (used to get a handle for the database object)
:param blueprint: Current Flask blueprint
"""
if hasattr(super(), "setup"):
super().setup(manager, blueprint) # pragma: no cover
cls._db = ref(manager.db)
@property
def table(self) -> Table:
return self.db.query(self.table_name)
@property
def db(self) -> RethinkDB:
return self._db()
class RMQMixin:
"""
Mixin for classes that make use of RabbitMQ. It allows routes to send JSON-encoded messages to specific RabbitMQ
queues.
This class is intended to be mixed in alongside one of the other view classes. For example:
>>> class MyView(APIView, RMQMixin):
... name = "my_view" # Flask internal name for this route
... path = "/my_view" # Actual URL path to reach this route
... queue_name = "my_queue" # Name of the RabbitMQ queue to send on
Note that the queue name is optional if all you want to do is send bot events.
This class will also work with Websockets:
>>> class MyWebsocket(WS, RMQMixin):
... name = "my_websocket"
... path = "/my_websocket"
... queue_name = "my_queue"
"""
queue_name = ""
@classmethod
def setup(cls: "RMQMixin", manager: "pysite.route_manager.RouteManager", blueprint: Blueprint):
"""
Set up the view by calling `super().setup()` as appropriate.
:param manager: Instance of the current RouteManager (used to get a handle for the database object)
:param blueprint: Current Flask blueprint
"""
if hasattr(super(), "setup"):
super().setup(manager, blueprint) # pragma: no cover
@property
def rmq_connection(self) -> Connection:
"""
Get a Kombu AMQP connection object - use this in a context manager so that it gets closed after you're done
If you're just trying to send a message, check out `rmq_send` and `rmq_bot_event` instead.
"""
return Connection(hostname=RMQ_HOST, userid=RMQ_USERNAME, password=RMQ_PASSWORD, port=RMQ_PORT)
def rmq_send(self, data: Dict[str, Any], routing_key: str = None):
"""
Send some data to the RabbitMQ queue
>>> self.rmq_send({
... "text": "My hovercraft is full of eels!",
... "source": "Dirty Hungarian Phrasebook"
... })
...
This will be delivered to the queue immediately.
"""
if routing_key is None:
routing_key = self.queue_name
with self.rmq_connection as c:
producer = c.Producer()
producer.publish(data, routing_key=routing_key)
def rmq_bot_event(self, event_type: BotEventTypes, data: Dict[str, Any]):
"""
Send an event to the queue responsible for delivering events to the bot
>>> self.rmq_bot_event(BotEventTypes.send_message, {
... "channel": CHANNEL_MOD_LOG,
... "message": "This is a plain-text message for @everyone, from the site!"
... })
...
This will be delivered to the bot and actioned immediately, or when the bot comes online if it isn't already
connected.
"""
if not isinstance(event_type, BotEventTypes):
raise ValueError("`event_type` must be a member of the the `pysite.constants.BotEventTypes` enum")
event_type = event_type.value
required_params = BOT_EVENT_REQUIRED_PARAMS[event_type]
for param in required_params:
if param not in data:
raise KeyError(f"Event is missing required parameter: {param}")
return self.rmq_send(
{"event": event_type, "data": data},
routing_key=BOT_EVENT_QUEUE,
)
class OAuthMixin:
"""
Mixin for the classes that need access to a logged in user's information. This class should be used
to grant route's access to user information, such as name, email, id, ect.
There will almost never be a need for someone to inherit this, as BaseView does that for you.
This class will add 3 properties to your route:
* logged_in (bool): True if user is registered with the site, False else wise.
* user_data (dict): A dict that looks like this:
{
"user_id": Their discord ID,
"username": Their discord username (without discriminator),
"discriminator": Their discord discriminator,
"email": Their email, in which is connected to discord
}
user_data returns None, if the user isn't logged in.
* oauth (OAuthBackend): The instance of pysite.oauth.OAuthBackend, connected to the RouteManager.
"""
@classmethod
def setup(cls: "OAuthMixin", manager: "pysite.route_manager.RouteManager", blueprint: Blueprint):
if hasattr(super(), "setup"):
super().setup(manager, blueprint) # pragma: no cover
cls._oauth = ref(manager.oauth_backend)
@property
def logged_in(self) -> bool:
return self.user_data is not None
@property
def user_data(self) -> dict:
return self.oauth.user_data()
@property
def oauth(self) -> OAuthBackend:
return self._oauth()
|
import random
from manga_py.crypt import ManhuaGuiComCrypt
from manga_py.provider import Provider
from .helpers.std import Std
class ManhuaGuiCom(Provider, Std):
servers = [
'i.hamreus.com:8080',
'us.hamreus.com:8080',
'dx.hamreus.com:8080',
'eu.hamreus.com:8080',
'lt.hamreus.com:8080',
]
def _get_ch_idx(self):
chapter = self.chapter
return self.re.search(r'/comic/[^/]+/(\d+)', chapter.get('href')).group(1)
def get_archive_name(self) -> str:
return super().get_archive_name() + '-' + self._get_ch_idx()
def get_chapter_index(self) -> str:
chapter = self.chapter
span = chapter.cssselect('span')
idx = self._get_ch_idx()
if span:
span = span[0].text_content()
i = self.re.search(r'(\d+)', span).group(1)
return '{}-{}'.format(i, idx)
return '0-{}'.format(idx)
def get_content(self):
_ = self._get_name(r'/comic/(\d+)')
return self.http_get('{}/comic/{}/'.format(self.domain, _))
def get_manga_name(self) -> str:
url = self.get_url()
selector = 'h1'
if self.re.search(r'/comic/\d+/\d+\.html', url):
selector = 'h1 > a'
return self.html_fromstring(url, selector, 0).text_content()
def get_chapters(self):
parser = self.document_fromstring(self.content)
chapters = parser.cssselect('.chapter-list li > a')
if not len(chapters):
code = parser.cssselect('#__VIEWSTATE')[0].get('value')
manhuagui = ManhuaGuiComCrypt()
js = manhuagui.decrypt('LZString.decompressFromBase64("' + code + '")', '<a></a>')
chapters = self.document_fromstring(js, '.chapter-list li > a')
return chapters
def parse_files_data(self, data):
images = []
md5 = data.get('sl', {}).get('md5', '')
cid = data.get('cid', '')
for i in data.get('files', []):
prior = 3
ln = len(self.servers)
server = int(random.random() * (ln + prior))
server = 0 if server < prior else server - prior
images.append('http://{}{}{}?cid={}&md5={}'.format(
self.servers[server],
data.get('path', ''),
i, cid, md5
))
return images
def get_files(self):
url = self.chapter
self._storage['referer'] = url
content = self.http_get(url)
js = self.re.search(r'\](\(function\(.+\))\s?<', content)
if not js:
return []
manhuagui = ManhuaGuiComCrypt()
data = manhuagui.decrypt(js.group(1), '')
data = self.re.search(r'\(({.+})\)', data)
if not data:
return []
data = self.json.loads(data.group(1))
return self.parse_files_data(data)
def get_cover(self):
return self._cover_from_content('.hcover img')
def book_meta(self) -> dict:
# todo meta
pass
main = ManhuaGuiCom
|
"""
prepacked queries
"""
import warnings
warnings.warn("prepacked_queries are deprecated", DeprecationWarning, stacklevel=2)
DEPRECATION_MESSAGE = (
"The prepacked_queries modules will be removed. A replacement is under consideration but not guaranteed."
)
|
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from optparse import make_option
import sys
class GenericCommand(BaseCommand):
''' paloma postfix management
'''
args = ''
help = ''
model = None
option_list = BaseCommand.option_list + (
make_option(
'--id',
action='store',
dest='id',
default=None,
help=u'entity id(message,user,...'
),
make_option(
'-s',
'--sync',
action='store_true',
dest='sync',
default=False,
help=u'Synchronous Call'),
make_option(
'--file',
action='store',
dest='file',
default='stdin',
help=u'flle'),
make_option(
'--description',
action='store',
dest='description',
default=None,
help=u'Description'),
make_option(
'--eta',
action='store',
dest='eta',
default=None,
help=u'Estimated Time of Arrival'),
make_option(
'--encoding',
action='store',
dest='encoding',
default='utf-8',
help=u'encoding'),
make_option(
'-d', '--dryrun',
action='store_true',
dest='dryrun', default=False,
help=u'''False(default): modify data on storages,
True: print data to console out
'''),
make_option(
'--async',
action='store',
dest='async',
default=True,
help=u'Asynchronos Execution'),
)
''' Command Option '''
def open_file(self, options):
fp = sys.stdin if options['file'] == 'stdin' else open(options['file'])
return fp
def handle_count(self, *args, **option):
if self.model:
print self.model, self.model.objects.count()
def handle_help(self, *args, **options):
''' help
'''
import re
for i in dir(self):
m = re.search('^handle_(.*)$', i)
if m is None:
continue
print "subcommand:", m.group(1)
print args
print options
def handle(self, *args, **options):
''' command main '''
if len(args) < 1:
self.handle_help(*args, **options)
return "a sub command must be specfied"
self.command = args[0]
getattr(self,
'handle_%s' % self.command,
GenericCommand.handle_help)(*args[1:], **options)
|
from .manager import AppManager
from .deploy import deploy
from .test import test
from .show import show
from .build import build
import click
@click.group()
@click.pass_context
def kctl(ctx):
"""
kctl controls the \033[1;3;4;34mKoursaros\033[0m platform.
Find more information at: https://github.com/koursaros-ai/koursaros
"""
ctx.obj = AppManager()
kctl.add_command(deploy)
kctl.add_command(test)
kctl.add_command(show)
kctl.add_command(build)
def main():
kctl(prog_name=__package__)
if __name__ == "__main__":
main()
|
import unittest
import tempfile
from xl_helper.FileUtils import FileUtils
from tests.util.TestingUtils import TestingUtils
class TestWithTempDirs(unittest.TestCase):
default_temp = tempfile.mkdtemp()
created_dirs = [default_temp]
test_config = TestingUtils.get_test_config()
@classmethod
def tearDownClass(cls):
super(TestWithTempDirs, cls).tearDownClass()
for td in TestWithTempDirs.created_dirs:
print("Removing temporary directory %s" % td)
FileUtils.delete_dirs(td)
def create_temp_dir(self):
d = tempfile.mkdtemp()
TestWithTempDirs.created_dirs.append(d)
return d
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-20 13:26
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('published_from', models.DateTimeField(default=django.utils.timezone.now, verbose_name='published from')),
('published_to', models.DateTimeField(blank=True, null=True, verbose_name='published from')),
('title', models.CharField(max_length=200, unique=True, verbose_name='title')),
('verbose_address', models.TextField(blank=True, verbose_name='Verbose address')),
],
options={
'verbose_name_plural': 'buildings',
'verbose_name': 'building',
},
),
migrations.CreateModel(
name='Office',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('published_from', models.DateTimeField(default=django.utils.timezone.now, verbose_name='published from')),
('published_to', models.DateTimeField(blank=True, null=True, verbose_name='published from')),
('title', models.CharField(max_length=200, unique=True, verbose_name='title')),
],
options={
'verbose_name_plural': 'offices',
'verbose_name': 'office',
},
),
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('published_from', models.DateTimeField(default=django.utils.timezone.now, verbose_name='published from')),
('published_to', models.DateTimeField(blank=True, null=True, verbose_name='published from')),
('title', models.CharField(max_length=500, unique=True, verbose_name='title')),
('email_domain', models.CharField(max_length=100, unique=True, verbose_name='email domain')),
('filter_label', models.CharField(blank=True, max_length=20, verbose_name='filter label')),
('email_background_color', models.CharField(blank=True, default='lightskyblue', max_length=20)),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='paorganizations.Organization', verbose_name='parent organization')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('published_from', models.DateTimeField(default=django.utils.timezone.now, verbose_name='published from')),
('published_to', models.DateTimeField(blank=True, null=True, verbose_name='published from')),
('first_name', models.CharField(blank=True, max_length=50, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=50, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'people',
'verbose_name': 'person',
},
),
migrations.AddField(
model_name='office',
name='components',
field=models.ManyToManyField(blank=True, to='paorganizations.Person', verbose_name='components'),
),
migrations.AddField(
model_name='office',
name='organization',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='paorganizations.Organization', verbose_name='organization'),
),
migrations.AddField(
model_name='office',
name='parent',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='paorganizations.Office', verbose_name='parent organization'),
),
migrations.AddField(
model_name='building',
name='organization',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='paorganizations.Organization', verbose_name='organization'),
),
]
|
from __future__ import print_function
import numpy as np
from scipy.stats import multivariate_normal
import matplotlib.pyplot as plt
from matplotlib import cm
import pyplasma as plasma
import pyplasmaDev as pdev
import initialize as init
import injector
from visualize_amr import plot2DSlice
def filler(xloc, uloc, ispcs, conf):
delgam = np.sqrt(1.0)
mean = [3.0, 0.0, 0.0]
cov = np.zeros((3,3))
cov[0,0] = 1.0
cov[1,1] = 2.0
cov[2,2] = 5.0
f = multivariate_normal.pdf(uloc, mean, cov)
return f
def insert_em(node, conf):
for i in range(node.getNx()):
for j in range(node.getNy()):
c = node.getCellPtr(i,j)
yee = c.getYee(0)
for q in range(conf.NxMesh):
for k in range(conf.NyMesh):
yee.ex[q,k,0] = 0.0
yee.ey[q,k,0] = 0.0
yee.ez[q,k,0] = 0.0
yee.bx[q,k,0] = 0.0
yee.by[q,k,0] = 0.0
yee.bz[q,k,0] = 0.1
class Conf:
outdir = "out"
#--------------------------------------------------
# space parameters
Nx = 1
Ny = 1
dt = 1.0
dx = 1.0
dy = 1.0
dz = 1.0
NxMesh = 1
NyMesh = 1
NzMesh = 1
qm = -1.0
#--------------------------------------------------
# velocity space parameters
vxmin = -10.0
vymin = -10.0
vzmin = -10.0
vxmax = 10.0
vymax = 10.0
vzmax = 10.0
Nxv = 20
Nyv = 20
Nzv = 20
#vmesh refinement
refinement_level = 2
clip = True
clipThreshold = 1.0e-5
def saveVisz(lap, conf):
slap = str(lap).rjust(4, '0')
fname = conf.outdir + '/amr2_{}.png'.format(slap)
plt.savefig(fname)
def plotAll(axs, node, conf, lap):
#for i in range(node.getNx()):
# for j in range(node.getNy()):
# cid = node.cellId(i,j)
# c = node.getCellPtr(cid)
# blocks = c.getPlasma(0,0)
# vmesh = block[0,0,0]
# print("xx", vmesh.get_cells(True))
#get first of first velomesh from cell
cid = node.cellId(0,0)
c = node.getCellPtr(cid) #get cell ptr
block = c.getPlasmaSpecies(0,0) # timestep 0
vmesh = block[0,0,0]
#print("xx", vmesh.get_cells(True))
rfl = conf.refinement_level
args = {"dir":"xy",
"q": "mid",
"rfl": rfl }
plot2DSlice(axs[0], vmesh, args)
args = {"dir":"xz",
"q": "mid",
"rfl": rfl }
plot2DSlice(axs[1], vmesh, args)
args = {"dir":"yz",
"q": "mid",
"rfl": rfl }
plot2DSlice(axs[2], vmesh, args)
#set_xylims(axs)
saveVisz(lap, conf)
# dig out velocity meshes from containers and feed to actual adapter
def adapt(node):
cid = node.cellId(0,0)
cell = node.getCellPtr(cid)
block = cell.getPlasmaSpecies(0,0)
vmesh = block[0,0,0]
adaptVmesh(vmesh)
def adaptVmesh(vmesh):
adapter = pdev.Adapter();
sweep = 1
while(True):
adapter.check(vmesh)
adapter.refine(vmesh)
print("cells to refine: {}".format( len(adapter.cells_to_refine)))
for cid in adapter.cells_created:
rfl = vmesh.get_refinement_level(cid)
indx = vmesh.get_indices(cid)
uloc = vmesh.get_center(indx, rfl)
val = pdev.tricubic_interp(vmesh, indx, [0.5,0.5,0.5], rfl)
vmesh[indx[0], indx[1], indx[2], rfl] = val
adapter.unrefine(vmesh)
print("cells to be removed: {}".format( len(adapter.cells_removed)))
sweep += 1
if sweep > conf.refinement_level: break
if conf.clip:
print("clipping...")
vmesh.clip_cells(conf.clipThreshold)
if __name__ == "__main__":
##################################################
# set up plotting and figure
plt.fig = plt.figure(1, figsize=(12,20))
plt.rc('font', family='serif', size=12)
plt.rc('xtick')
plt.rc('ytick')
gs = plt.GridSpec(3, 1)
gs.update(hspace = 0.5)
axs = []
axs.append( plt.subplot(gs[0,0]) )
axs.append( plt.subplot(gs[1,0]) )
axs.append( plt.subplot(gs[2,0]) )
##################################################
# node configuration
conf = Conf()
node = plasma.Grid(conf.Nx, conf.Ny)
xmin = 0.0
xmax = conf.dx*conf.Nx*conf.NxMesh
ymin = 0.0
ymax = conf.dy*conf.Ny*conf.NyMesh
node.setGridLims(xmin, xmax, ymin, ymax)
init.loadCells(node, conf)
##################################################
# load values into cells
injector.inject(node, filler, conf)
insert_em(node, conf)
#visualize initial condition
plotAll(axs, node, conf, 0)
vsol = pdev.AmrMomentumLagrangianSolver()
print("solving momentum space push")
cid = node.cellId(0,0)
cell = node.getCellPtr(cid)
for lap in range(1,20):
print("-------lap {} -------".format(lap))
vsol.solve(cell)
cell.cycle()
if conf.clip:
cell.clip()
plotAll(axs, node, conf, lap)
|
from distutils.version import LooseVersion
from typing import Tuple, Union
import torch
from torch_complex.tensor import ComplexTensor
from espnet2.diar.layers.tcn_nomask import TemporalConvNet
from espnet2.enh.layers.complex_utils import is_complex
from espnet2.enh.separator.abs_separator import AbsSeparator
is_torch_1_9_plus = LooseVersion(torch.__version__) >= LooseVersion("1.9.0")
class TCNSeparatorNomask(AbsSeparator):
def __init__(
self,
input_dim: int,
layer: int = 8,
stack: int = 3,
bottleneck_dim: int = 128,
hidden_dim: int = 512,
kernel: int = 3,
causal: bool = False,
norm_type: str = "gLN",
):
"""Temporal Convolution Separator
Note that this separator is equivalent to TCNSeparator except
for not having the mask estimation part.
This separator outputs the intermediate bottleneck feats
(which is used as the input to diarization branch in enh_diar task).
This separator is followed by MultiMask module,
which estimates the masks.
Args:
input_dim: input feature dimension
layer: int, number of layers in each stack.
stack: int, number of stacks
bottleneck_dim: bottleneck dimension
hidden_dim: number of convolution channel
kernel: int, kernel size.
causal: bool, defalut False.
norm_type: str, choose from 'BN', 'gLN', 'cLN'
"""
super().__init__()
self.tcn = TemporalConvNet(
N=input_dim,
B=bottleneck_dim,
H=hidden_dim,
P=kernel,
X=layer,
R=stack,
norm_type=norm_type,
causal=causal,
)
self._output_dim = bottleneck_dim
def forward(
self, input: Union[torch.Tensor, ComplexTensor], ilens: torch.Tensor
) -> Tuple[torch.Tensor, torch.Tensor]:
"""Forward.
Args:
input (torch.Tensor or ComplexTensor): Encoded feature [B, T, N]
ilens (torch.Tensor): input lengths [Batch]
Returns:
feats (torch.Tensor): [B, T, bottleneck_dim]
ilens (torch.Tensor): (B,)
"""
# if complex spectrum
if is_complex(input):
feature = abs(input)
else:
feature = input
feature = feature.transpose(1, 2) # B, N, L
feats = self.tcn(feature) # [B, bottleneck_dim, L]
feats = feats.transpose(1, 2) # B, L, bottleneck_dim
return feats, ilens
@property
def output_dim(self) -> int:
return self._output_dim
@property
def num_spk(self):
return None
|
import sys
def usage():
print 'Usage: jython jython_checker.py <module name created by make_checker>'
sys.exit(1)
if not len(sys.argv) == 2:
usage()
checker_name = sys.argv[1].split('.')[0]#pop off the .py if needed
try:
checker = __import__(checker_name)
except:
print 'No module "%s" found' % checker_name
usage()
import make_checker
ignored_types = ['frame',
'code',
'traceback']
checks = []
for check in checker.checks:
index, expected_type, expected_bases, expected_dict = check
if checker.names[index] in ignored_types:
print 'Skipping', checker.names[index]
continue
checks.append(check)
ignored_members = ['__getattribute__', '__doc__']
ok, missing, bad_type, different = make_checker.do_check(checker.names, checks)
def strip_ignored(differences, key, ignored):
if not key in differences:
return
problems = differences[key]
for member in ignored_members:
if member in problems:
problems.remove(member)
for t, name, differences in different:
strip_ignored(differences, 'missing', ignored_members)
strip_ignored(differences, 'extras', ignored_members)
make_checker.report(ok, missing, bad_type, different)
|
#
# PySNMP MIB module ZHONE-MAU-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ZHONE-MAU-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:41:32 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint")
ifName, ifOperStatus, ifAdminStatus, InterfaceIndexOrZero, ifIndex = mibBuilder.importSymbols("IF-MIB", "ifName", "ifOperStatus", "ifAdminStatus", "InterfaceIndexOrZero", "ifIndex")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, MibIdentifier, ModuleIdentity, Integer32, ObjectIdentity, iso, NotificationType, Counter64, Counter32, Bits, TimeTicks, Gauge32, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "MibIdentifier", "ModuleIdentity", "Integer32", "ObjectIdentity", "iso", "NotificationType", "Counter64", "Counter32", "Bits", "TimeTicks", "Gauge32", "Unsigned32")
TruthValue, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "DisplayString", "TextualConvention")
zhoneModules, zhoneEnet = mibBuilder.importSymbols("Zhone", "zhoneModules", "zhoneEnet")
phyEnetMauMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 5504, 6, 8))
phyEnetMauMib.setRevisions(('2013-10-13 17:08', '2012-05-25 14:55', '2009-02-03 01:39', '2009-01-19 21:44', '2008-08-14 07:17', '2008-03-10 12:03', '2007-11-01 14:37', '2007-06-24 23:11', '2007-05-22 16:05', '2005-10-13 16:55', '2000-09-12 18:01',))
if mibBuilder.loadTexts: phyEnetMauMib.setLastUpdated('201310131500Z')
if mibBuilder.loadTexts: phyEnetMauMib.setOrganization('Zhone Technologies, Inc.')
zhoneIfMauTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1), )
if mibBuilder.loadTexts: zhoneIfMauTable.setStatus('current')
zhoneIfMauEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1), ).setIndexNames((0, "ZHONE-MAU-MIB", "zhoneMauIfIndex"))
if mibBuilder.loadTexts: zhoneIfMauEntry.setStatus('current')
zhoneMauIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauIfIndex.setStatus('current')
zhoneMauType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 5, 10, 11, 15, 16, 22, 23, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40))).clone(namedValues=NamedValues(("mauOther", 1), ("mau10BaseT", 5), ("mau10BaseTHD", 10), ("mau10BaseTFD", 11), ("mau100BaseTXHD", 15), ("mau100BaseTXFD", 16), ("mau1000BaseLXHD", 22), ("mau1000BaseLXFD", 23), ("mau1000BaseSXHD", 25), ("mau1000BaseSXFD", 26), ("mau1000BaseTHD", 29), ("mau1000BaseTFD", 30), ("mau10gBaseX", 31), ("mau10gBaseR", 32), ("mau10gBaseW", 33), ("mau10gBaseLX4", 34), ("mau10gBaseSR", 35), ("mau10gBaseSW", 36), ("mau10gBaseLR", 37), ("mau10gBaseLW", 38), ("mau10gBaseER", 39), ("mau10gBaseEW", 40)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauType.setStatus('current')
zhoneMauOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("other", 1), ("unknown", 2), ("operational", 3), ("standby", 4), ("shutdown", 5), ("reset", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauOperStatus.setStatus('current')
zhoneMauMediaAvailable = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("other", 1), ("unknown", 2), ("available", 3), ("notAvailable", 4), ("remoteFault", 5), ("invalidSignal", 6), ("remoteJabber", 7), ("remoteLinkLoss", 8), ("remoteTest", 9), ("offline", 10), ("autoNegError", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauMediaAvailable.setStatus('current')
zhoneMauMediaAvailStateExits = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauMediaAvailStateExits.setStatus('current')
zhoneMauJabberState = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("unknown", 2), ("noJabber", 3), ("jabbering", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauJabberState.setStatus('current')
zhoneMauJabberingStateEnters = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauJabberingStateEnters.setStatus('current')
zhoneMauFalseCarriers = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauFalseCarriers.setStatus('current')
zhoneMauDefaultType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 5, 10, 11, 15, 16, 22, 23, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40))).clone(namedValues=NamedValues(("mauOther", 1), ("mau10BaseT", 5), ("mau10BaseTHD", 10), ("mau10BaseTFD", 11), ("mau100BaseTXHD", 15), ("mau100BaseTXFD", 16), ("mau1000BaseLXHD", 22), ("mau1000BaseLXFD", 23), ("mau1000BaseSXHD", 25), ("mau1000BaseSXFD", 26), ("mau1000BaseTHD", 29), ("mau1000BaseTFD", 30), ("mau10gBaseX", 31), ("mau10gBaseR", 32), ("mau10gBaseW", 33), ("mau10gBaseLX4", 34), ("mau10gBaseSR", 35), ("mau10gBaseSW", 36), ("mau10gBaseLR", 37), ("mau10gBaseLW", 38), ("mau10gBaseER", 39), ("mau10gBaseEW", 40)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauDefaultType.setStatus('current')
zhoneMauAutoNegSupported = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 10), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauAutoNegSupported.setStatus('current')
zhoneMauTypeListBits = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 11), Bits().clone(namedValues=NamedValues(("bOther", 0), ("bAUI", 1), ("b10base5", 2), ("bFoirl", 3), ("b10base2", 4), ("b10baseT", 5), ("b10baseFP", 6), ("b10baseFB", 7), ("b10baseFL", 8), ("b10broad36", 9), ("b10baseTHD", 10), ("b10baseTFD", 11), ("b10baseFLHD", 12), ("b10baseFLFD", 13), ("b100baseT4", 14), ("b100baseTXHD", 15), ("b100baseTXFD", 16), ("b100baseFXHD", 17), ("b100baseFXFD", 18), ("b100baseT2HD", 19), ("b100baseT2FD", 20), ("b1000baseXHD", 21), ("b1000baseXFD", 22), ("b1000baseLXHD", 23), ("b1000baseLXFD", 24), ("b1000baseSXHD", 25), ("b1000baseSXFD", 26), ("b1000baseCXHD", 27), ("b1000baseCXFD", 28), ("b1000baseTHD", 29), ("b1000baseTFD", 30), ("b10gbaseX", 31), ("b10gbaseR", 32), ("b10gbaseW", 33), ("b10gbaseLX4", 34), ("b10gbaseSR", 35), ("b10gbaseSW", 36), ("b10gbaseLR", 37), ("b10gbaseLW", 38), ("b10gbaseER", 39), ("b10gbaseEW", 40)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauTypeListBits.setStatus('current')
zhoneMauClkSrc = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unused", 1), ("automatic", 2), ("master", 3), ("slave", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauClkSrc.setStatus('current')
zhoneMauPauseFlowControl = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("disabled", 1), ("asymmetricTx", 2), ("asymmetricRx", 3), ("symmetric", 4), ("passthrough", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauPauseFlowControl.setStatus('current')
zhoneMauAggregationMode = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("passive", 3), ("active", 4))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauAggregationMode.setStatus('current')
zhoneMauLinkStateMirror = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 15), InterfaceIndexOrZero()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauLinkStateMirror.setStatus('current')
zhoneMauSetPauseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauSetPauseTime.setStatus('current')
zhoneMauMaxFrameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15360))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauMaxFrameSize.setStatus('current')
zhoneMauIngressRate = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10240000))).setUnits('Kbps').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauIngressRate.setStatus('current')
zhoneMauIngressBurstSize = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 512000))).setUnits('Kbits').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauIngressBurstSize.setStatus('current')
zhoneMauEgressRate = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10240000))).setUnits('Kbps').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauEgressRate.setStatus('current')
zhoneMauEgressBurstSize = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 512000))).setUnits('Kbits').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauEgressBurstSize.setStatus('current')
zhoneMauAutoNegTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2), )
if mibBuilder.loadTexts: zhoneMauAutoNegTable.setStatus('current')
zhoneMauAutoNegEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1), ).setIndexNames((0, "ZHONE-MAU-MIB", "zhoneMauIfIndex"))
if mibBuilder.loadTexts: zhoneMauAutoNegEntry.setStatus('current')
zhoneMauAutoNegAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauAutoNegAdminStatus.setStatus('current')
zhoneMauAutoNegRemoteSignaling = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("detected", 1), ("notdetected", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauAutoNegRemoteSignaling.setStatus('current')
zhoneMauAutoNegConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("other", 1), ("configuring", 2), ("complete", 3), ("disabled", 4), ("parallelDetectFail", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauAutoNegConfig.setStatus('current')
zhoneMauAutoNegRestart = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("restart", 1), ("norestart", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauAutoNegRestart.setStatus('current')
zhoneMauAutoNegCapabilityBits = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 5), Bits().clone(namedValues=NamedValues(("bOther", 0), ("b10baseT", 1), ("b10baseTFD", 2), ("b100baseT4", 3), ("b100baseTX", 4), ("b100baseTXFD", 5), ("b100baseT2", 6), ("b100baseT2FD", 7), ("bfdxPause", 8), ("bfdxAPause", 9), ("bfdxSPause", 10), ("bfdxBPause", 11), ("b1000baseX", 12), ("b1000baseXFD", 13), ("b1000baseT", 14), ("b1000baseTFD", 15), ("b10gbaseX", 16), ("b10gbaseR", 17), ("b10gbaseW", 18), ("b10gbaseLX4", 19), ("b10gbaseSR", 20), ("b10gbaseSW", 21), ("b10gbaseLR", 22), ("b10gbaseLW", 23), ("b10gbaseER", 24), ("b10gbaseEW", 25)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauAutoNegCapabilityBits.setStatus('current')
zhoneMauAutoNegCapAdvertBits = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 6), Bits().clone(namedValues=NamedValues(("bOther", 0), ("b10baseT", 1), ("b10baseTFD", 2), ("b100baseT4", 3), ("b100baseTX", 4), ("b100baseTXFD", 5), ("b100baseT2", 6), ("b100baseT2FD", 7), ("bFdxPause", 8), ("bFdxAPause", 9), ("bFdxSPause", 10), ("bFdxBPause", 11), ("b1000baseX", 12), ("b1000baseXFD", 13), ("b1000baseT", 14), ("b1000baseTFD", 15), ("b10gbaseX", 16), ("b10gbaseR", 17), ("b10gbaseW", 18), ("b10gbaseLX4", 19), ("b10gbaseSR", 20), ("b10gbaseSW", 21), ("b10gbaseLR", 22), ("b10gbaseLW", 23), ("b10gbaseER", 24), ("b10gbaseEW", 25)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauAutoNegCapAdvertBits.setStatus('current')
zhoneMauAutoNegCapRecvBits = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 7), Bits().clone(namedValues=NamedValues(("bOther", 0), ("b10baseT", 1), ("b10baseTFD", 2), ("b100baseT4", 3), ("b100baseTX", 4), ("b100baseTXFD", 5), ("b100baseT2", 6), ("b100baseT2FD", 7), ("bFdxPause", 8), ("bFdxAPause", 9), ("bFdxSPause", 10), ("bFdxBPause", 11), ("b1000baseX", 12), ("b1000baseXFD", 13), ("b1000baseT", 14), ("b1000baseTFD", 15), ("b10gbaseX", 16), ("b10gbaseR", 17), ("b10gbaseW", 18), ("b10gbaseLX4", 19), ("b10gbaseSR", 20), ("b10gbaseSW", 21), ("b10gbaseLR", 22), ("b10gbaseLW", 23), ("b10gbaseER", 24), ("b10gbaseEW", 25)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauAutoNegCapRecvBits.setStatus('current')
zhoneMauAutoNegRemoteFaultAdvert = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("noError", 1), ("offline", 2), ("linkFailure", 3), ("autoNegError", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneMauAutoNegRemoteFaultAdvert.setStatus('current')
zhoneMauAutoNegRemoteFaultRecv = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("noError", 1), ("offline", 2), ("linkFailure", 3), ("autoNegError", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneMauAutoNegRemoteFaultRecv.setStatus('current')
zhoneEnetTraps = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 1, 3))
if mibBuilder.loadTexts: zhoneEnetTraps.setStatus('current')
enetV2Traps = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 1, 3, 0))
if mibBuilder.loadTexts: enetV2Traps.setStatus('current')
rprEastSpanTrap = NotificationType((1, 3, 6, 1, 4, 1, 5504, 5, 1, 3, 0, 1)).setObjects(("IF-MIB", "ifIndex"), ("IF-MIB", "ifAdminStatus"), ("IF-MIB", "ifOperStatus"), ("IF-MIB", "ifName"))
if mibBuilder.loadTexts: rprEastSpanTrap.setStatus('current')
rprWestSpanTrap = NotificationType((1, 3, 6, 1, 4, 1, 5504, 5, 1, 3, 0, 2)).setObjects(("IF-MIB", "ifIndex"), ("IF-MIB", "ifAdminStatus"), ("IF-MIB", "ifOperStatus"), ("IF-MIB", "ifName"))
if mibBuilder.loadTexts: rprWestSpanTrap.setStatus('current')
linkAggLink1Trap = NotificationType((1, 3, 6, 1, 4, 1, 5504, 5, 1, 3, 0, 3))
if mibBuilder.loadTexts: linkAggLink1Trap.setStatus('current')
linkAggLink2Trap = NotificationType((1, 3, 6, 1, 4, 1, 5504, 5, 1, 3, 0, 4))
if mibBuilder.loadTexts: linkAggLink2Trap.setStatus('current')
rprSpanGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 5504, 5, 1, 3, 2)).setObjects(("ZHONE-MAU-MIB", "rprEastSpanTrap"), ("ZHONE-MAU-MIB", "rprWestSpanTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rprSpanGroup = rprSpanGroup.setStatus('current')
linkAggLinkGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 5504, 5, 1, 3, 3)).setObjects(("ZHONE-MAU-MIB", "linkAggLink1Trap"), ("ZHONE-MAU-MIB", "linkAggLink2Trap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
linkAggLinkGroup = linkAggLinkGroup.setStatus('current')
zhoneLinkAggTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 1, 4), )
if mibBuilder.loadTexts: zhoneLinkAggTable.setStatus('current')
zhoneLinkAggEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 1, 4, 1), ).setIndexNames((0, "ZHONE-MAU-MIB", "zhoneLinkAggIfIndex"))
if mibBuilder.loadTexts: zhoneLinkAggEntry.setStatus('current')
zhoneLinkAggIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneLinkAggIfIndex.setStatus('current')
zhoneLinkAggPartnerSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 4, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 18))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneLinkAggPartnerSystem.setStatus('current')
zhoneLinkAggPartnerSystemPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneLinkAggPartnerSystemPriority.setStatus('current')
zhoneLinkAggPartnerGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 1, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneLinkAggPartnerGroupId.setStatus('current')
mibBuilder.exportSymbols("ZHONE-MAU-MIB", zhoneMauDefaultType=zhoneMauDefaultType, zhoneMauType=zhoneMauType, zhoneEnetTraps=zhoneEnetTraps, zhoneIfMauTable=zhoneIfMauTable, zhoneMauIngressBurstSize=zhoneMauIngressBurstSize, zhoneMauAutoNegRemoteFaultAdvert=zhoneMauAutoNegRemoteFaultAdvert, zhoneLinkAggIfIndex=zhoneLinkAggIfIndex, zhoneIfMauEntry=zhoneIfMauEntry, zhoneMauAutoNegSupported=zhoneMauAutoNegSupported, zhoneMauAutoNegAdminStatus=zhoneMauAutoNegAdminStatus, zhoneMauAutoNegEntry=zhoneMauAutoNegEntry, zhoneMauAutoNegConfig=zhoneMauAutoNegConfig, zhoneMauMediaAvailable=zhoneMauMediaAvailable, zhoneMauMaxFrameSize=zhoneMauMaxFrameSize, zhoneMauIngressRate=zhoneMauIngressRate, zhoneMauAutoNegCapAdvertBits=zhoneMauAutoNegCapAdvertBits, zhoneLinkAggTable=zhoneLinkAggTable, zhoneMauAutoNegTable=zhoneMauAutoNegTable, zhoneMauEgressRate=zhoneMauEgressRate, zhoneLinkAggEntry=zhoneLinkAggEntry, zhoneMauOperStatus=zhoneMauOperStatus, zhoneLinkAggPartnerSystemPriority=zhoneLinkAggPartnerSystemPriority, zhoneMauAutoNegCapRecvBits=zhoneMauAutoNegCapRecvBits, zhoneMauTypeListBits=zhoneMauTypeListBits, zhoneLinkAggPartnerSystem=zhoneLinkAggPartnerSystem, enetV2Traps=enetV2Traps, linkAggLink1Trap=linkAggLink1Trap, zhoneMauAutoNegRestart=zhoneMauAutoNegRestart, zhoneMauAggregationMode=zhoneMauAggregationMode, zhoneMauJabberState=zhoneMauJabberState, linkAggLink2Trap=linkAggLink2Trap, phyEnetMauMib=phyEnetMauMib, zhoneLinkAggPartnerGroupId=zhoneLinkAggPartnerGroupId, zhoneMauClkSrc=zhoneMauClkSrc, zhoneMauMediaAvailStateExits=zhoneMauMediaAvailStateExits, zhoneMauSetPauseTime=zhoneMauSetPauseTime, rprSpanGroup=rprSpanGroup, zhoneMauAutoNegRemoteSignaling=zhoneMauAutoNegRemoteSignaling, rprWestSpanTrap=rprWestSpanTrap, zhoneMauPauseFlowControl=zhoneMauPauseFlowControl, zhoneMauJabberingStateEnters=zhoneMauJabberingStateEnters, zhoneMauLinkStateMirror=zhoneMauLinkStateMirror, zhoneMauAutoNegRemoteFaultRecv=zhoneMauAutoNegRemoteFaultRecv, rprEastSpanTrap=rprEastSpanTrap, zhoneMauFalseCarriers=zhoneMauFalseCarriers, linkAggLinkGroup=linkAggLinkGroup, PYSNMP_MODULE_ID=phyEnetMauMib, zhoneMauEgressBurstSize=zhoneMauEgressBurstSize, zhoneMauIfIndex=zhoneMauIfIndex, zhoneMauAutoNegCapabilityBits=zhoneMauAutoNegCapabilityBits)
|
#!/usr/bin/env python
# Copyright 2011 Jonathan Kinred
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import sys
import time
from psphere import config, template
from psphere.client import Client
from psphere.errors import TemplateNotFoundError
from psphere.soap import VimFault
def create_vm(client, name, compute_resource, datastore, disksize, nics,
memory, num_cpus, guest_id, host=None):
"""Create a virtual machine using the specified values.
:param name: The name of the VM to create.
:type name: str
:param compute_resource: The name of a ComputeResource in which to \
create the VM.
:type compute_resource: str
:param datastore: The name of the datastore on which to create the VM.
:type datastore: str
:param disksize: The size of the disk, specified in KB, MB or GB. e.g. \
20971520KB, 20480MB, 20GB.
:type disksize: str
:param nics: The NICs to create, specified in a list of dict's which \
contain a "network_name" and "type" key. e.g. \
{"network_name": "VM Network", "type": "VirtualE1000"}
:type nics: list of dict's
:param memory: The amount of memory for the VM. Specified in KB, MB or \
GB. e.g. 2097152KB, 2048MB, 2GB.
:type memory: str
:param num_cpus: The number of CPUs the VM will have.
:type num_cpus: int
:param guest_id: The vSphere string of the VM guest you are creating. \
The list of VMs can be found at \
http://pubs.vmware.com/vsphere-50/index.jsp?topic=/com.vmware.wssdk.apiref.doc_50/right-pane.html
:type guest_id: str
:param host: The name of the host (default: None), if you want to \
provision the VM on a \ specific host.
:type host: str
"""
print("Creating VM %s" % name)
# If the host is not set, use the ComputeResource as the target
if host is None:
target = client.find_entity_view("ComputeResource",
filter={"name": compute_resource})
resource_pool = target.resourcePool
else:
target = client.find_entity_view("HostSystem", filter={"name": host})
resource_pool = target.parent.resourcePool
disksize_pattern = re.compile("^\d+[KMG]B")
if disksize_pattern.match(disksize) is None:
print("Disk size %s is invalid. Try \"12G\" or similar" % disksize)
sys.exit(1)
if disksize.endswith("GB"):
disksize_kb = int(disksize[:-2]) * 1024 * 1024
elif disksize.endswith("MB"):
disksize_kb = int(disksize[:-2]) * 1024
elif disksize.endswith("KB"):
disksize_kb = int(disksize[:-2])
else:
print("Disk size %s is invalid. Try \"12G\" or similar" % disksize)
memory_pattern = re.compile("^\d+[KMG]B")
if memory_pattern.match(memory) is None:
print("Memory size %s is invalid. Try \"12G\" or similar" % memory)
sys.exit(1)
if memory.endswith("GB"):
memory_mb = int(memory[:-2]) * 1024
elif memory.endswith("MB"):
memory_mb = int(memory[:-2])
elif memory.endswith("KB"):
memory_mb = int(memory[:-2]) / 1024
else:
print("Memory size %s is invalid. Try \"12G\" or similar" % memory)
# A list of devices to be assigned to the VM
vm_devices = []
# Create a disk controller
controller = create_controller(client, "VirtualLsiLogicController")
vm_devices.append(controller)
ds_to_use = None
for ds in target.datastore:
if ds.name == datastore:
ds_to_use = ds
break
if ds_to_use is None:
print("Could not find datastore on %s with name %s" %
(target.name, datastore))
sys.exit(1)
# Ensure the datastore is accessible and has enough space
if ds_to_use.summary.accessible is not True:
print("Datastore (%s) exists, but is not accessible" %
ds_to_use.summary.name)
sys.exit(1)
if ds_to_use.summary.freeSpace < disksize_kb * 1024:
print("Datastore (%s) exists, but does not have sufficient"
" free space." % ds_to_use.summary.name)
sys.exit(1)
disk = create_disk(client, datastore=ds_to_use, disksize_kb=disksize_kb)
vm_devices.append(disk)
for nic in nics:
nic_spec = create_nic(client, target, nic)
if nic_spec is None:
print("Could not create spec for NIC")
sys.exit(1)
# Append the nic spec to the vm_devices list
vm_devices.append(nic_spec)
vmfi = client.create("VirtualMachineFileInfo")
vmfi.vmPathName = "[%s]" % ds_to_use.summary.name
vm_config_spec = client.create("VirtualMachineConfigSpec")
vm_config_spec.name = name
vm_config_spec.memoryMB = memory_mb
vm_config_spec.files = vmfi
vm_config_spec.annotation = "Auto-provisioned by psphere"
vm_config_spec.numCPUs = num_cpus
vm_config_spec.guestId = guest_id
vm_config_spec.deviceChange = vm_devices
# Find the datacenter of the target
if target.__class__.__name__ == "HostSystem":
datacenter = target.parent.parent.parent
else:
datacenter = target.parent.parent
try:
task = datacenter.vmFolder.CreateVM_Task(config=vm_config_spec,
pool=resource_pool)
except VimFault, e:
print("Failed to create %s: " % e)
sys.exit()
while task.info.state in ["queued", "running"]:
time.sleep(5)
task.update()
print("Waiting 5 more seconds for VM creation")
if task.info.state == "success":
elapsed_time = task.info.completeTime - task.info.startTime
print("Successfully created new VM %s. Server took %s seconds." %
(name, elapsed_time.seconds))
elif task.info.state == "error":
print("ERROR: The task for creating the VM has finished with"
" an error. If an error was reported it will follow.")
try:
print("ERROR: %s" % task.info.error.localizedMessage)
except AttributeError:
print("ERROR: There is no error message available.")
else:
print("UNKNOWN: The task reports an unknown state %s" %
task.info.state)
def create_nic(client, target, nic):
"""Return a NIC spec"""
# Iterate through the networks and look for one matching
# the requested name
for network in target.network:
if network.name == nic["network_name"]:
net = network
break
else:
return None
# Success! Create a nic attached to this network
backing = client.create("VirtualEthernetCardNetworkBackingInfo")
backing.deviceName = nic["network_name"]
backing.network = net
connect_info = client.create("VirtualDeviceConnectInfo")
connect_info.allowGuestControl = True
connect_info.connected = False
connect_info.startConnected = True
new_nic = client.create(nic["type"])
new_nic.backing = backing
new_nic.key = 2
# TODO: Work out a way to automatically increment this
new_nic.unitNumber = 1
new_nic.addressType = "generated"
new_nic.connectable = connect_info
nic_spec = client.create("VirtualDeviceConfigSpec")
nic_spec.device = new_nic
nic_spec.fileOperation = None
operation = client.create("VirtualDeviceConfigSpecOperation")
nic_spec.operation = (operation.add)
return nic_spec
def create_controller(client, controller_type):
controller = client.create(controller_type)
controller.key = 0
controller.device = [0]
controller.busNumber = 0,
controller.sharedBus = client.create("VirtualSCSISharing").noSharing
spec = client.create("VirtualDeviceConfigSpec")
spec.device = controller
spec.fileOperation = None
spec.operation = client.create("VirtualDeviceConfigSpecOperation").add
return spec
def create_disk(client, datastore, disksize_kb):
backing = client.create("VirtualDiskFlatVer2BackingInfo")
backing.datastore = None
backing.diskMode = "persistent"
backing.fileName = "[%s]" % datastore.summary.name
backing.thinProvisioned = True
disk = client.create("VirtualDisk")
disk.backing = backing
disk.controllerKey = 0
disk.key = 0
disk.unitNumber = 0
disk.capacityInKB = disksize_kb
disk_spec = client.create("VirtualDeviceConfigSpec")
disk_spec.device = disk
file_op = client.create("VirtualDeviceConfigSpecFileOperation")
disk_spec.fileOperation = file_op.create
operation = client.create("VirtualDeviceConfigSpecOperation")
disk_spec.operation = operation.add
return disk_spec
def main(name, options):
"""The main method for this script.
:param name: The name of the VM to create.
:type name: str
:param template_name: The name of the template to use for creating \
the VM.
:type template_name: str
"""
server = config._config_value("general", "server", options.server)
if server is None:
raise ValueError("server must be supplied on command line"
" or in configuration file.")
username = config._config_value("general", "username", options.username)
if username is None:
raise ValueError("username must be supplied on command line"
" or in configuration file.")
password = config._config_value("general", "password", options.password)
if password is None:
raise ValueError("password must be supplied on command line"
" or in configuration file.")
vm_template = None
if options.template is not None:
try:
vm_template = template.load_template(options.template)
except TemplateNotFoundError:
print("ERROR: Template \"%s\" could not be found." % options.template)
sys.exit(1)
expected_opts = ["compute_resource", "datastore", "disksize", "nics",
"memory", "num_cpus", "guest_id", "host"]
vm_opts = {}
for opt in expected_opts:
vm_opts[opt] = getattr(options, opt)
if vm_opts[opt] is None:
if vm_template is None:
raise ValueError("%s not specified on the command line and"
" you have not specified any template to"
" inherit the value from." % opt)
try:
vm_opts[opt] = vm_template[opt]
except AttributeError:
raise ValueError("%s not specified on the command line and"
" no value is provided in the specified"
" template." % opt)
client = Client(server=server, username=username, password=password)
create_vm(client, name, vm_opts["compute_resource"], vm_opts["datastore"],
vm_opts["disksize"], vm_opts["nics"], vm_opts["memory"],
vm_opts["num_cpus"], vm_opts["guest_id"], host=vm_opts["host"])
client.logout()
if __name__ == "__main__":
from optparse import OptionParser
usage = "Usage: %prog [options] name"
parser = OptionParser(usage=usage)
parser.add_option("--server", dest="server",
help="The server to connect to for provisioning")
parser.add_option("--username", dest="username",
help="The username used to connect to the server")
parser.add_option("--password", dest="password",
help="The password used to connect to the server")
parser.add_option("--template", dest="template",
help="The template used to create the VM")
parser.add_option("--compute_resource", dest="compute_resource",
help="The ComputeResource in which to provision the VM")
parser.add_option("--datastore", dest="datastore",
help="The datastore on which to provision the VM")
parser.add_option("--disksize", dest="disksize",
help="The size of the VM disk")
parser.add_option("--nics", dest="nics",
help="The nics for the VM")
parser.add_option("--memory", dest="memory",
help="The amount of memory for the VM")
parser.add_option("--num_cpus", dest="num_cpus",
help="The number of CPUs for the VM")
parser.add_option("--guest_id", dest="guest_id",
help="The guest_id of the VM (see vSphere doco)")
parser.add_option("--host", dest="host",
help="Specify this if you want to provision the VM on a"
" specific host in the ComputeResource")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.print_help()
sys.exit(1)
main(args[0], options)
|
class MaxStack:
def __init__(self):
"""
initialize your data structure here.
"""
self.stack = []
self.maxs = []
def push(self, x):
"""
:type x: int
:rtype: void
"""
self.stack.append(x)
if not self.maxs or x > self.maxs[-1]:
self.maxs.append(x)
else:
self.maxs.append(self.maxs[-1])
def pop(self):
"""
:rtype: int
"""
self.maxs.pop()
return self.stack.pop()
def top(self):
"""
:rtype: int
"""
return self.stack[-1]
def peekMax(self):
"""
:rtype: int
"""
return self.maxs[-1]
def popMax(self):
"""
:rtype: int
"""
item = self.maxs.pop()
b = []
while self.stack[-1] != item:
b.append(self.pop())
self.stack.pop()
for num in reversed(b):
self.push(num)
return item
# Your MaxStack object will be instantiated and called as such:
# obj = MaxStack()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.top()
# param_4 = obj.peekMax()
# param_5 = obj.popMax()
|
from distutils.core import setup
from setuptools import find_packages
import re
# http://bugs.python.org/issue15881
try:
import multiprocessing
except ImportError:
pass
def parse_requirements(file_name):
requirements = []
for line in open(file_name, 'r').read().split('\n'):
if re.match(r'(\s*#)|(\s*$)', line):
continue
if re.match(r'\s*-e\s+', line):
requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1', line))
elif re.match(r'\s*-f\s+', line):
pass
else:
requirements.append(line)
return requirements
def parse_dependency_links(file_name):
dependency_links = []
for line in open(file_name, 'r').read().split('\n'):
if re.match(r'\s*-[ef]\s+', line):
dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
return dependency_links
setup(name='django-experiments',
version='1.2.0',
description='Python Django AB Testing Framework',
author='Mixcloud',
author_email='technical@mixcloud.com',
url='https://github.com/mixcloud/django-experiments',
packages=find_packages(exclude=["example_project"]),
include_package_data=True,
license="MIT license, see LICENSE file",
install_requires=parse_requirements('requirements.txt'),
dependency_links=parse_dependency_links('requirements.txt'),
long_description=open('README.rst').read(),
test_suite="testrunner.runtests",
)
|
from .settings import *
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'.treehouse-app.com',
]
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from benchmarks import silk_flags
import ct_benchmarks_util
from measurements import smoothness
import page_sets
from page_sets import repaint_helpers
from telemetry import benchmark
class _Repaint(perf_benchmark.PerfBenchmark):
@classmethod
def AddBenchmarkCommandLineArgs(cls, parser):
parser.add_option('--mode', type='string',
default='viewport',
help='Invalidation mode. '
'Supported values: fixed_size, layer, random, viewport.')
parser.add_option('--width', type='int',
default=None,
help='Width of invalidations for fixed_size mode.')
parser.add_option('--height', type='int',
default=None,
help='Height of invalidations for fixed_size mode.')
@classmethod
def Name(cls):
return 'repaint'
def CreateStorySet(self, options):
return page_sets.KeyMobileSitesRepaintPageSet(
options.mode, options.width, options.height)
def CreatePageTest(self, options):
return smoothness.Repaint()
# crbug.com/499320
#@benchmark.Enabled('android')
@benchmark.Disabled('all')
class RepaintKeyMobileSites(_Repaint):
"""Measures repaint performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
@classmethod
def Name(cls):
return 'repaint.key_mobile_sites_repaint'
# crbug.com/502179
@benchmark.Enabled('android')
@benchmark.Disabled('all')
class RepaintGpuRasterizationKeyMobileSites(_Repaint):
"""Measures repaint performance on the key mobile sites with forced GPU
rasterization.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
tag = 'gpu_rasterization'
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@classmethod
def Name(cls):
return 'repaint.gpu_rasterization.key_mobile_sites_repaint'
# Disabled because we do not plan on running CT benchmarks on the perf
# waterfall any time soon.
@benchmark.Disabled('all')
class RepaintCT(_Repaint):
"""Measures repaint performance for Cluster Telemetry."""
@classmethod
def Name(cls):
return 'repaint_ct'
@classmethod
def AddBenchmarkCommandLineArgs(cls, parser):
_Repaint.AddBenchmarkCommandLineArgs(parser)
ct_benchmarks_util.AddBenchmarkCommandLineArgs(parser)
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
ct_benchmarks_util.ValidateCommandLineArgs(parser, args)
def CreateStorySet(self, options):
return page_sets.CTPageSet(
options.urls_list, options.user_agent, options.archive_data_file,
run_page_interaction_callback=repaint_helpers.WaitThenRepaint)
|
# %% [markdown]
'''
# The hidden secrets of the bitcoin price
'''
# %% [markdown]
'''
Bitcoin is a digital currency created in 2009 by Satoshi Nakamoto, he describes it as a "peer-to-peer version of electronic cash" <cite> nakamoto2019bitcoin </cite>.
One big advantage of bitcoin (and other cryptocurrencies) is that all the data is open and immutable, residing inside the blockchain.
The openness and immutability of the data has made the research behind blockchain really active, mostly on the price forecasting (<cite> jang2017empirical </cite>, <cite> mudassir2020time </cite>).
Many, rightfully, rush into the blockchain data (such as addresses, transactions etc..), but I will show in this post that the bitcoin price itself is already really informative.
Understanding how the price behaves will make a substantial difference in the choice of models and parameters for predicting price.
The behaviour of the price is best understood via two main properties of time series: stationarity and seasonality. For example, a stationary time series can be much easier to model than a non-stationary.
In what’s coming, I will share with you my thought process in looking at the price, using statistical tools and python programming.
'''
# %% [markdown]
'''
<binder></binder>
'''
# %% [markdown]
'''
## tl;dr
1. Two important properties for a time-serie: stationnarity (distribution does not depend on the time) and seasonnality (recurrent patterns in the data).
2. Auto-correlation to check if a data is non-stationnary; derivative or data filtering/substraction to remove the non-stationnary component.
3. FFT and short FFT to analyse the seasonnality.
'''
# %% [markdown]
'''
## 1. A quick primer on time series
'''
# %% [markdown]
'''
As I said, there are two important properties attached to time series: seasonality and stationarity.
A stationnary process means that the distribution (statistical properties) of the data does not changes over time, this is why it is much easier to model.
Seasonnality represents how frequently the data change (for the bitcoin price, we can express it in cycles per day), and also when it starts.
We will first focus on the analysis of the stationarity, and after the seasonality.
'''
# %% [markdown]
'''
### 1.1. Stationnarity
'''
# %% [markdown]
'''
One way to detect if a data is stationnay is to compute the autocorrelation of the data, if it degrades quickly it is stationnary.
There are many different types of non-stationnary data in the litterature, so I suggest you to read the [following post](https://towardsdatascience.com/stationarity-in-time-series-analysis-90c94f27322)
if you want to learn more on it. Check also [this figure](https://otexts.com/fpp2/stationarity.html) and try to guess which time-serie is stationary!
'''
# %% [markdown]
'''
### 1.2. Seasonnality
'''
# %% [markdown]
'''
To analyse the seasonality of the bitcoin, we can make a [fourier analysis](https://www.ritchievink.com/blog/2017/04/23/understanding-the-fourier-transform-by-example/) to extract the most proeminent frequencies.
The magnitude of the FFT inform us how the given frequency component affect the price. In the other hand, the phase of the FFT is interresting to watch when does the dynamic of the price starts.
If the magnitude or phase has a random white noise trend, then there is no evidence of principal component.
Check this nice [blog post](https://machinelearningmastery.com/time-series-seasonality-with-python/) if you want to learn more on seasonnality.
'''
# %% [markdown]
'''
## 2. Code
'''
# %% [markdown]
'''
## 2.1. Loading the data
'''
# %% [markdown]
'''
The hourly USD price for the bitcoin can be collected using [glassnode](https://studio.glassnode.com/pricing), with their advanced subscription.
If you don’t want to pay for it, the 24-hour data comes free of charge.
Here we will use hourly data to get a more precise analysis.
'''
# %% [code]
### imports
import os
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
import warnings
warnings.filterwarnings("ignore")
# %% [markdown]
'''
First, we want to create a function to load the data.
'''
# %% [code]
# function to read the data
def read_data(filepath):
price = []
time = []
with open(filepath) as f:
lines = f.readlines()[1:]
for data in lines:
curr_time = float(data.split(",")[0])
curr_price = -1
if data.split(",")[1][:-1]:
curr_price = float(data.split(",")[1][:-1])
time += [curr_time]
price += [curr_price]
return np.array(price, dtype=np.float32), np.array(time, dtype=int)
# %% [markdown]
'''
Now we will load the data by skipping the first year.
'''
# %% [code]
# define paths
filepath = "data/market/price_usd_close_BTC_1h"
figure_dir = ""
# loading the hourly data, to avoid unimformative data, we skip the first year (8760 h)
price, time = read_data(filepath)
time_shifted = time - time[0]
price = price[8760:]
time_shifted = time_shifted[8760:]
# %% [markdown]
'''
Let's look at the bitcoin price over the time,
'''
# %% [code]
### plot
plt.figure()
plt.plot(time_shifted, price)
plt.title("Bitcoin price over time (USD)")
plt.ylabel("price (USD)")
plt.xlabel("time (h)")
if figure_dir:
plt.savefig(os.path.join(figure_dir, "price.png"))
plt.show()
plt.close()
# %% [markdown]
'''
The non-stationnary behaviour of the data is obvious when looking at the bitcoin price.
We can also see clearly the big rises of Dec 2017/2020 there.
'''
# %% [markdown]
'''
## 2.2. Stationnarity
'''
# %% [markdown]
'''
One way to remove the non-stationnary component on the data is to compute its derivative.
Another way is to filter the data with a gaussian kernel, and substract it to the original price data.
'''
# %% code
# derivative
price_dt = price[1:] - price[:-1]
# filter
filter_width = 12
def gaussian_kernel_1d(filter_width):
#99% of the values
sigma = (filter_width)/2.33
norm = 1.0 / (np.sqrt(2*np.pi) * sigma)
kernel = [norm * np.exp((-1)*(x**2)/(2 * sigma**2)) for x in range(-filter_width, filter_width + 1)]
return np.float32(kernel / np.sum(kernel))
f = tf.reshape(gaussian_kernel_1d(filter_width), [-1, 1, 1])
tf_price = tf.reshape(tf.constant(price, dtype=tf.float32), [1, -1, 1])
tf_price = tf.reshape(tf.nn.conv1d(tf_price, filters=f, stride=1, padding='VALID'), [-1])
# padding is necessary to keep same dim
tf_price = tf.concat([ tf.constant(tf_price[0].numpy(), shape=filter_width), tf_price ], axis=0)
filt_price = tf.concat([ tf_price,tf.constant(tf_price[-1].numpy(), shape=filter_width) ], axis=0).numpy()
price_centered = price - filt_price
# %% [markdown]
'''
By comparing the two methods (derivative and filetring), we see that the resulting prices are now zero-centered.
They are shown with the orange colour in the below charts:
'''
# %% code
### plot
fig, axes = plt.subplots(2, figsize=(12, 8))
axes[0].plot(time_shifted, price, label="non-stationnary bitcoin price")
axes[0].plot(time_shifted[:-1], price_dt, label="stationnary bitcoin price")
axes[0].set_title('Derivative method')
axes[0].legend(loc="upper left")
axes[1].plot(time_shifted, price, label="non-stationnary bitcoin price")
axes[1].plot(time_shifted, price_centered, label="stationnary bitcoin price")
axes[1].plot(time_shifted, filt_price, label="filtered bitcoin price")
axes[1].set_title('Filtering and substraction method')
axes[1].legend(loc="upper left")
if figure_dir:
plt.savefig(os.path.join(figure_dir, "price_stationnarity.png"))
plt.show()
plt.close()
# %% [markdown]
'''
In order to verify the quality of the process, one can check the auto-correlation for both the raw price data (blue line), and stationnary price data with the filtering method (green line).
This will inform us about how well the data is stationnary after the process.
We will compute the auto-correlations with different delays of up to 2 days every hours.
'''
# %% [code]
### auto-correlation function
def autocorr(input, delay):
input = tf.constant(input, dtype=tf.float32)
input_delayed = tf.roll(input, shift=delay, axis=0)
x1 = tf.reshape(input, [1, -1, 1])
x2 = tf.reshape(input_delayed, [-1, 1, 1])
return tf.reshape(tf.nn.conv1d(x1, filters=x2, stride=1, padding='VALID'), [-1])
# %% [code]
# autocorrelation of the price for different delays
delays = np.arange(0, 48)
# raw price data
autocorr_price = []
for hour in delays:
autocorr_price += [autocorr(price, hour)]
# stationnary data
autocorr_centered_price = []
for hour in delays:
autocorr_centered_price += [autocorr(price_centered, hour)]
# %% [markdown]
'''
Looking at the plot, it is clear that the auto-correlation for the stationnary data degrades much faster than for the raw price data.
This means that we successfully removed the non-stationnary component for the price!
'''
# %% [code]
### plot
fig, axes = plt.subplots(2, figsize=(12, 8))
axes[0].stem(delays, autocorr_centered_price, linefmt='b-', markerfmt='bo', basefmt='', use_line_collection=True)
axes[0].set_title('stationnary bitcoin price auto-correlation')
axes[1].stem(delays, autocorr_price, linefmt='b-', markerfmt='bo', basefmt='', use_line_collection=True)
axes[1].set_title('raw bitcoin price auto-correlation')
axes[1].set(xlabel='delay (h)', ylabel='amplitude')
if figure_dir:
plt.savefig(os.path.join(figure_dir, "check_stationnarity.png"))
plt.show()
plt.close()
# %% [markdown]
'''
Looking into the stationarity component also allows us to determine the window of prediction that is most suitable for the data.
For example by checking how fast, for a given timestamp, the distribution of the raw price differ with its neighbors.
By comparing the histogram (i.e. computing the correlation) for each timestamp with its neighbors, one can get an overview of what would be the acceptable range for a prediction.
With the idea that if the distributions are close to each other, it is obviously easier to predict (because they are closed to each other).
'''
# %% code
### histogram function
def data_distribution(inp):
return np.histogram(inp, range=(0, 20000), bins=500, density=True)[0]
# %% code
win_size = 2*24 #distribution of the data is calculated within 2 days (in hours)
slide = 5*24 #we slide up to -/+ 5 days
corr = []
# loop through al timestamps
timestamps_range = np.arange(slide + int(win_size/2), len(price) - slide - int(win_size/2), 72)
sliding_range = np.arange(-slide, slide + 1)
for i in timestamps_range:
idx = i-int(win_size/2)
# distribution of the price (over price from day -7.5 to day +7.5), the fixed distributioin
fixed_price = price[idx:int(idx + win_size)]
fixed_distrib = data_distribution(fixed_price)
curr_corr = []
# compare to each distribution at different timestamps (sliding from -30 to +30), the moving distribution
for offset in sliding_range:
idx = offset + i - int(win_size/2)
moving_price = price[idx:(idx + win_size)]
moving_distrib = data_distribution(moving_price)
curr_corr += [np.correlate(fixed_distrib, moving_distrib)]
curr_corr = curr_corr / np.max(curr_corr)
corr += [curr_corr]
if i%7992 == 0:
print("day {}/{}".format(i/24, len(price)/24))
output = np.array(corr)[:, :, 0]
# %% [markdown]
'''
In the following plot, the y-axis describes some samples taken at different timestamps of the bitcoin price.
From up to down, it follows the chronological order, but this is not important since each sample can be taken independently.
The x-axis desribes the different offsets to compute the histogramms (from -120 hours to +120 hours).
And the color is the resulting correlation between these distributions and the distribution at timestamp $h_0$ (the current timestamp of the sample).
'''
# %% [code]
### plot
plt.imshow(output, cmap="gray")
plt.axis("tight")
idx_sliding_range = np.arange(0, len(sliding_range), 30)
plt.xticks([i for i in idx_sliding_range], ["h{}".format(sliding_range[i]) for i in idx_sliding_range])
plt.xlabel("time offset (h)")
plt.ylabel("samples")
cbar = plt.colorbar()
cbar.set_label('correlation value')
if figure_dir:
plt.imsave(os.path.join(figure_dir, "range_accuracy.png"), output, cmap="gray")
plt.show()
plt.close()
# %% [markdown]
'''
Looking at it, we can say that the acceptable range for prediction is around +/-15 hours.
>**Note**
>The range for the color is verry granular, and sometimes constant.
>This is because of the number of bins in the histogramm (500) and price values ranging from 0 to 20k\$, meaning the precision is about ~40\$.
>So if the price moves inside the 40\$ range within a certain period, the histogramms will have a perfect match.
'''
# %% [markdown]
'''
## 2.3. Seasonnality
'''
# %% [markdown]
'''
Let's now switch the seasonality analysis by computing the FFT, and extract its magnitude and phase components.
As explained before, the FFT will be used here to understand the redundant patterns in the data.
Because the FFT works better on LTI (linear and time invariant) systems, it cannot be applied with the raw bitcoin price (which is not stationnary!). Therefore we will apply it on the stationnary bitcoin price.
'''
# %% code
# fft
price_fouried = tf.signal.fft(price_centered)
T = 1/24 # sampling interval in days
N = price_fouried.shape[0]
frequencies = np.linspace(0, 1 / T, N)
# %% [markdown]
'''
In the below figure, there is no clear evidence of a pattern there, although we see evidence for important frequency ranging from 1 to 1.9 cycles per day, with a little peak at 1.52.
This means that the bitcoin price can "generally" be explained by a sinusoid with a period of ~15.8 hours.
'''
# %% code
### plot
fig, axes = plt.subplots(2, figsize=(12, 8))
axes[0].plot(frequencies[:N // 2], tf.abs(price_fouried)[:N // 2] * 1 / N)
axes[0].set_title('FFT magnitude')
axes[1].plot(frequencies[:N // 2], tf.math.angle(price_fouried)[:N // 2])
axes[1].set_title('FFT phase')
axes[1].set(xlabel='cycles per day', ylabel='amplitude')
if figure_dir:
plt.savefig(os.path.join(figure_dir, "fft.png"))
plt.show()
plt.close()
# %% [markdown]
'''
Another way to analyse seasonnality on a non-stationnary data is to compute its spectogramm (derived from a time-frequency analysis).
A spectrogram is a visual representation during time of a signal's spectrum of frequencies. It is commonly used (for example by [spleeter](https://github.com/deezer/spleeter)) to exctract voice from audio signals.
The spectrogram can be computed using a short-fourier transform, which basically runs a fourier transform on a short window, sliding through all the data.
Here, we will use a window size of 48 samples (hours), with a step of 1 and 62 frequency components.
'''
# %% [code]
# tensorflow provides a fast implementation of the fast fourier transform.
stft = tf.signal.stft(price, frame_length=48, frame_step=1, fft_length=125, pad_end=True)
spectrogram = tf.abs(stft).numpy()
# %% [code]
### plot
# inspired from https://www.tensorflow.org/tutorials/audio/simple_audio
# convert to log scale and transpose so that the time is represented in the x-axis (columns).
fig, axes = plt.subplots(2, figsize=(12, 8))
max_time = np.max(time_shifted)
axes[0].plot(time_shifted, price)
axes[0].set_xlim([0, max_time])
axes[0].set_title('non-stationnary bitcoin price')
log_spec = np.log(spectrogram.T)
axes[1].pcolormesh(time_shifted, np.arange(log_spec.shape[0]), log_spec)
axes[1].set_xlim([0, max_time])
axes[1].set_title('Spectrogram (short-fft)')
axes[1].set(xlabel='time (h)', ylabel='frequencies')
if figure_dir:
plt.savefig(os.path.join(figure_dir, "spectrogram.png"))
plt.show()
plt.close()
# %% [markdown]
'''
Looking at the figure, whenever there are big changes in the data (for example Dec. 2017), there is a much higher magnitude response.
Generally speaking, it seems that the FFT looks like a white noise whenever the time.
'''
# %% [markdown]
'''
## Conclusion
'''
# %% [markdown]
'''
In the light of the properties that we saw above, one thing can be said with certainty; predicting bitcoin price is no easy task because of its time dependency.
Hopefully we found a way to simplify the process, by removing the non-stationnary component of the data (so it does not depend on time anymore).
This allowed us to analyse redundant patterns in the data and we found that such a pattern exists.
The reccurent patterns are interresting, because they can be latter used as a new feature into a predictive model (think of adding the time of day into a weather prediction model for example).
These findings oppened to us new ways to get an accurate predictive model for the bitcoin price, but this is another story...
'''
# %% [markdown]
'''
## To go further
'''
# %% [markdown]
'''
I really suggest you to read the book from Hyndman <cite> hyndman2018forecasting </cite>, it covers all best practices for time-series forecasting as well as coding examples.
The online version is available [here](https://otexts.com/fpp2/index.html).
'''
# %% [markdown]
'''
## Acknowledgement
'''
# %% [markdown]
'''
Thanks to [Vahid Zarifpayam](https://twitter.com/Vahidzarif1) for the review of this post.
Credits goes to [Bitprobe](https://bitprobe.io/).
'''
# %% [markdown]
'''
## Tags
'''
# %% [markdown]
'''
Data-Science; Cryptocurrency; Statistics;
'''
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
import os
import numpy as np
from ...testing import utils
from .. import nilearn as iface
from ...pipeline import engine as pe
import pytest
import numpy.testing as npt
no_nilearn = True
try:
__import__('nilearn')
no_nilearn = False
except ImportError:
pass
@pytest.mark.skipif(no_nilearn, reason="the nilearn library is not available")
class TestSignalExtraction():
filenames = {
'in_file': 'fmri.nii',
'label_files': 'labels.nii',
'4d_label_file': '4dlabels.nii',
'out_file': 'signals.tsv'
}
labels = ['CSF', 'GrayMatter', 'WhiteMatter']
global_labels = ['GlobalSignal'] + labels
@pytest.fixture(autouse=True, scope='class')
def setup_class(self, tmpdir_factory):
tempdir = tmpdir_factory.mktemp("test")
self.orig_dir = tempdir.chdir()
utils.save_toy_nii(self.fake_fmri_data, self.filenames['in_file'])
utils.save_toy_nii(self.fake_label_data, self.filenames['label_files'])
def test_signal_extract_no_shared(self):
# run
iface.SignalExtraction(in_file=self.filenames['in_file'],
label_files=self.filenames['label_files'],
class_labels=self.labels,
incl_shared_variance=False).run()
# assert
self.assert_expected_output(self.labels, self.base_wanted)
def test_signal_extr_bad_label_list(self):
# run
with pytest.raises(ValueError):
iface.SignalExtraction(in_file=self.filenames['in_file'],
label_files=self.filenames['label_files'],
class_labels=['bad'],
incl_shared_variance=False).run()
def test_signal_extr_equiv_4d_no_shared(self):
self._test_4d_label(self.base_wanted, self.fake_equiv_4d_label_data,
incl_shared_variance=False)
def test_signal_extr_4d_no_shared(self):
# set up & run & assert
self._test_4d_label(self.fourd_wanted, self.fake_4d_label_data, incl_shared_variance=False)
def test_signal_extr_global_no_shared(self):
# set up
wanted_global = [[-4./6], [-1./6], [3./6], [-1./6], [-7./6]]
for i, vals in enumerate(self.base_wanted):
wanted_global[i].extend(vals)
# run
iface.SignalExtraction(in_file=self.filenames['in_file'],
label_files=self.filenames['label_files'],
class_labels=self.labels,
include_global=True,
incl_shared_variance=False).run()
# assert
self.assert_expected_output(self.global_labels, wanted_global)
def test_signal_extr_4d_global_no_shared(self):
# set up
wanted_global = [[3./8], [-3./8], [1./8], [-7./8], [-9./8]]
for i, vals in enumerate(self.fourd_wanted):
wanted_global[i].extend(vals)
# run & assert
self._test_4d_label(wanted_global, self.fake_4d_label_data,
include_global=True, incl_shared_variance=False)
def test_signal_extr_shared(self):
# set up
wanted = []
for vol in range(self.fake_fmri_data.shape[3]):
volume = self.fake_fmri_data[:, :, :, vol].flatten()
wanted_row = []
for reg in range(self.fake_4d_label_data.shape[3]):
region = self.fake_4d_label_data[:, :, :, reg].flatten()
wanted_row.append((volume*region).sum()/(region*region).sum())
wanted.append(wanted_row)
# run & assert
self._test_4d_label(wanted, self.fake_4d_label_data)
def test_signal_extr_traits_valid(self):
''' Test a node using the SignalExtraction interface.
Unlike interface.run(), node.run() checks the traits
'''
# run
node = pe.Node(iface.SignalExtraction(in_file=os.path.abspath(self.filenames['in_file']),
label_files=os.path.abspath(self.filenames['label_files']),
class_labels=self.labels,
incl_shared_variance=False),
name='SignalExtraction')
node.run()
# assert
# just checking that it passes trait validations
def _test_4d_label(self, wanted, fake_labels, include_global=False, incl_shared_variance=True):
# set up
utils.save_toy_nii(fake_labels, self.filenames['4d_label_file'])
# run
iface.SignalExtraction(in_file=self.filenames['in_file'],
label_files=self.filenames['4d_label_file'],
class_labels=self.labels,
incl_shared_variance=incl_shared_variance,
include_global=include_global).run()
wanted_labels = self.global_labels if include_global else self.labels
# assert
self.assert_expected_output(wanted_labels, wanted)
def assert_expected_output(self, labels, wanted):
with open(self.filenames['out_file'], 'r') as output:
got = [line.split() for line in output]
labels_got = got.pop(0) # remove header
assert labels_got == labels
assert len(got) == self.fake_fmri_data.shape[3],'num rows and num volumes'
# convert from string to float
got = [[float(num) for num in row] for row in got]
for i, time in enumerate(got):
assert len(labels) == len(time)
for j, segment in enumerate(time):
npt.assert_almost_equal(segment, wanted[i][j], decimal=1)
#dj: self doesnt have orig_dir at this point, not sure how to change it. should work without it
# def teardown_class(self):
# self.orig_dir.chdir()
fake_fmri_data = np.array([[[[2, -1, 4, -2, 3],
[4, -2, -5, -1, 0]],
[[-2, 0, 1, 4, 4],
[-5, 3, -3, 1, -5]]],
[[[2, -2, -1, -2, -5],
[3, 0, 3, -5, -2]],
[[-4, -2, -2, 1, -2],
[3, 1, 4, -3, -2]]]])
fake_label_data = np.array([[[1, 0],
[3, 1]],
[[2, 0],
[1, 3]]])
fake_equiv_4d_label_data = np.array([[[[1., 0., 0.],
[0., 0., 0.]],
[[0., 0., 1.],
[1., 0., 0.]]],
[[[0., 1., 0.],
[0., 0., 0.]],
[[1., 0., 0.],
[0., 0., 1.]]]])
base_wanted = [[-2.33333, 2, .5],
[0, -2, .5],
[-.3333333, -1, 2.5],
[0, -2, .5],
[-1.3333333, -5, 1]]
fake_4d_label_data = np.array([[[[0.2, 0.3, 0.5],
[0.1, 0.1, 0.8]],
[[0.1, 0.3, 0.6],
[0.3, 0.4, 0.3]]],
[[[0.2, 0.2, 0.6],
[0., 0.3, 0.7]],
[[0.3, 0.3, 0.4],
[0.3, 0.4, 0.3]]]])
fourd_wanted = [[-5.0652173913, -5.44565217391, 5.50543478261],
[-7.02173913043, 11.1847826087, -4.33152173913],
[-19.0869565217, 21.2391304348, -4.57608695652],
[5.19565217391, -3.66304347826, -1.51630434783],
[-12.0, 3., 0.5]]
|
import argparse
import copy
import random
from generator import makeBasicProject, addSpriteSheet, makeBackground, makeScene, makeActor, addSymmetricSceneConnections, makeMusic, reverse_direction, initializeGenerator, writeProjectToDisk
def SachitasGame():
"""
Create an empty world as an example to build future projects from.
"""
# Set up a barebones project
project = makeBasicProject()
# Create sprite sheet for the player sprite
player_sprite_sheet = addSpriteSheet(project, "actor_animated.png", "actor_animated", "actor_animated")
project.settings["playerSpriteSheetId"] = player_sprite_sheet["id"]
# add a sprite we can use for the rocks
duck_sprite = addSpriteSheet(project, "duck.png", "duck", "animated", 2)
actor2 = makeActor(duck_sprite, 1, 30, "animated")
doorway_sprite = addSpriteSheet(project, "tower.png", "tower", "static")
actor3 = makeActor(doorway_sprite, 29, 2)
a_rock_sprite = addSpriteSheet(project, "rock.png", "rock", "static")
actor4 = makeActor(a_rock_sprite, 15, 30)
a_rock_sprite = addSpriteSheet(project, "rock.png", "rock", "static")
# Add a background image
default_bkg = makeBackground("stars.png", "stars")
project.backgrounds.append(default_bkg)
a_scene = copy.deepcopy(makeScene(f"Scene", default_bkg))
project.scenes.append(a_scene)
#make a function for collisions
# Get information about the background
bkg_x = default_bkg["imageWidth"]
bkg_y = default_bkg["imageHeight"]
bkg_width = default_bkg["width"]
bkg_height = default_bkg["height"]
actor = makeActor(a_rock_sprite, 1, 1)
actor2 = makeActor(duck_sprite, 5,5)
a_scene['actors'].append(actor3)
amount = random.randint(1, 5)
arr = [[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]
increment = 0
#for x in range(amount): #choosing a random point and making a line of sprites. The amount is the loop #
# increment = increment + 2
#actor = makeActor(a_rock_sprite, xPos, increment, "static")
#a_scene['actors'].append(actor)
#amount = random.randint(1, 10)
for i in range(1, 20):
amount = random.randint(3, 5)
xPos = random.randint(1, 30)
yPos = random.randint(1, 30)
increment = 0
edgeBlock = 0
while xPos == 31 or xPos == 30 or xPos == 29:
if arr[0][xPos-1] == 1:
xPos = random.randint(3, 29)
edgeBlock = 1
else:
edgeBlock = 1
break
while yPos == 31 or yPos == 30 or yPos == 29:
if arr[1][yPos-1] == 1:
yPos = random.randint(3, 29)
edgeBlock = 1
else:
edgeBlock = 1
break
#avoiding out of bounds error
if edgeBlock == 1:
actor = makeActor(a_rock_sprite, xPos - increment, yPos - increment, "static")
a_scene['actors'].append(actor)
increment = increment + 2
while arr[0][xPos-1] == 1 or arr[0][xPos + 1] == 1 or arr[0][ xPos - 3] == 1:
xPos = random.randint(3, 29)
while arr[1][yPos-1] == 1 or arr[1][yPos + 1] == 1 or arr[1][yPos - 3] == 1:
yPos = random.randint(3, 29)
if(0 <= xPos <= 15) and (0 <= yPos <= 15):
choose = random.randint(1,2)
if(choose == 1):
for a in range (amount):
increment = increment + 2
actor = makeActor(a_rock_sprite, xPos, yPos + increment, "static")
a_scene['actors'].append(actor)
arr[0][ xPos-1] = 1
arr[1][ yPos-1 + increment] = 1
if(choose == 2):
for b in range (amount):
increment = increment + 2
actor = makeActor(a_rock_sprite, xPos + increment, yPos, "static")
a_scene['actors'].append(actor)
arr[0][xPos-1 + increment] = 1
arr[1][yPos-1] = 1
if(0 <= xPos <= 15) and (15 <= yPos <= 28):
choose = random.randint(1,2)
if(choose == 1):
for c in range (amount):
increment = increment + 2
actor = makeActor(a_rock_sprite, xPos + increment, yPos, "static")
a_scene['actors'].append(actor)
arr[0][xPos -1 + increment] = 1
arr[1][yPos-1] = 1
if(choose == 2):
for d in range (amount):
increment = increment + 2
actor = makeActor(a_rock_sprite, xPos, yPos - increment, "static")
a_scene['actors'].append(actor)
arr[0][xPos-1] = 1
arr[1][yPos-1 + increment] = 1
if(15 <= xPos <= 28) and (15 <= yPos <= 28):
choose = random.randint(1,2)
if(choose == 1):
for e in range (1, amount):
actor = makeActor(a_rock_sprite, xPos, yPos - increment, "static")
a_scene['actors'].append(actor)
arr[0][xPos-1] = 1
arr[1][yPos-1 - increment] = 1
if(choose == 2):
for f in range (1, amount):
increment = increment + 2
actor = makeActor(a_rock_sprite, xPos - increment, yPos, "static")
a_scene['actors'].append(actor)
arr[0][xPos-1 - increment] = 1
arr[1][yPos-1] = 1
increment = increment + 2
if(15 <= xPos <= 28) and (0 <= yPos <= 15):
choose = random.randint(1,2)
if(choose == 1):
for g in range (1, amount):
actor = makeActor(a_rock_sprite, xPos, yPos + increment, "static")
a_scene['actors'].append(actor)
arr[0][xPos-1] = 1
arr[1][yPos -1 - increment] = 1
increment = increment + 2
if(choose == 2):
for h in range (1, amount):
actor = makeActor(a_rock_sprite, xPos - increment, yPos, "static")
a_scene['actors'].append(actor)
arr[0][xPos -1 + increment] = 1
arr[1][yPos-1] = 1
increment = increment + 2
#import pdb; pdb.set_trace()
# add a sprite to indicate the location of a doorway
# a better way to do this in the actual levels is to alter the background image instead
# Add some music
project.music.append(makeMusic("template", "template.mod"))
# Set the starting scene
project.settings["startSceneId"] = project.scenes[0]["id"]
return project
# Utilities
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
### Run the generator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Generate a Game Boy ROM via a GB Studio project file.")
parser.add_argument('--destination', '-d', type=str, help="destination folder name", default="../gbprojects/projects/")
parser.add_argument('--assets', '-a', type=str, help="asset folder name", default="../assets/")
args = parser.parse_args()
initializeGenerator(asset_folder = args.assets)
project = SachitasGame()
writeProjectToDisk(project, output_path = args.destination, assets_path="assets/")
if args.destination == "../gbprojects/projects/":
print(f"{bcolors.WARNING}NOTE: Used default output directory, change with the -d flag{bcolors.ENDC}")
print(f"{bcolors.OKBLUE}See generate.py --help for more options{bcolors.ENDC}")
|
passw = "123"
uid = "xiaoming"
if passw=="123" and uid=="xiaoming" :
print("login success")
|
#!/usr/bin/python3
"""Saved contour plots for the ETGM paper"""
# Standard Packages
import sys; sys.path.insert(0, '../'); sys.path.insert(0, '../../')
import logging
# 3rd Party Packages
import matplotlib.pyplot as plt
# Local Packages
import modules.options # Import here even though it's unused to avoid issues
from plotting.modules.plotstyles import PlotStyles, StyleType
from plotting.plot_contour import main
_log = logging.getLogger(__name__)
# Run this file directly to plot scanned variable profiles from previously created scanned data
if __name__ == '__main__':
scan_data = {}
PlotStyles(
axes=StyleType.Axes.WHITE,
lines=StyleType.Lines.RHO_MMM,
layout=StyleType.Layout.AIP,
)
plt.rcParams.update({
'savefig.format': 'pdf', # Common save formats: png, pdf, eps
# 'text.usetex': True,
})
# Text to append to the end of the generated save name
savenameend = ''
'''
Input options:
* vars_to_plot (list): List of output variables to plot
Examples:
* vars_to_plot = ['gmaETGM']
* vars_to_plot = ['xteMTM', 'xteETGM', 'xteETG', 'gmaMTM', 'omgMTM', 'dbsqprf']
* vars_to_plot = OutputVariables().get_all_output_vars()
* vars_to_plot = OutputVariables().get_etgm_vars()
'''
# vars_to_plot = ['gmaETGM', 'omgETGM', 'xteETGM', 'xte2ETGM', 'kyrhoeETGM', 'kyrhosETGM', 'gave', 'var_to_scan']
# vars_to_plot = ['gmaETGM', 'omgETGM', 'xteETGM']
# vars_to_plot = ['var_to_scan', 'gmaETGM', 'lareunit', 'alphamhdunit', 'xteETGM', 'xte2ETGM', 'gaveETGM']
# vars_to_plot = ['var_to_scan']
'''
Scan Data:
* Uncomment the lines you wish to use
- keys (str): The runid of the scan
- values (list of int): The scan_numbers to plot from
'''
"""exbs off"""
vars_to_plot = [
'gmaETGM', 'omgETGM', 'xteETGM', 'xte2ETGM',
'xteETG', 'walfvenunit', 'phi2ETGM', 'Apara2ETGM', 'satETGM',
'gaveETGM', 'kyrhosETGM', 'kyrhoeETGM', 'kpara2ETGM', 'fleETGM', 'omegateETGM',
'omegadETGM', 'omegad_gaveETGM', 'omegasETGM', 'omegasetaETGM', 'omegadiffETGM', 'gammadiffETGM',
'gne', 'gte', 'shat_gxi', 'etae', 'betaeunit', 'wexbs', 'bunit', 'te', 'ne', 'q'
]
"""SUMMED MODES"""
# scan_data['121123K55'] = [4]
# scan_data['120968A02'] = [4]
# scan_data['120982A09'] = [4]
# scan_data['129016A04'] = [4]
# scan_data['129017A04'] = [4]
# scan_data['129018A02'] = [4]
# scan_data['129019A02'] = [4]
# scan_data['129020A02'] = [4]
# scan_data['129041A10'] = [4]
# scan_data['138536A01'] = [1475] # 1236 (7), 1256 (8), 1269 (9)
# scan_data['141007A10'] = [4]
# scan_data['141031A01'] = [4]
# scan_data['141032A01'] = [4]
# scan_data['141040A01'] = [4]
# scan_data['141716A80'] = [4]
# scan_data['132017T01'] = [4]
# scan_data['141552A01'] = [4]
"""MAX MODE"""
# scan_data['121123K55'] = [5]
# scan_data['120968A02'] = [5]
# scan_data['120982A09'] = [5]
# scan_data['129016A04'] = [5]
# scan_data['129017A04'] = [5]
# scan_data['129018A02'] = [5]
# scan_data['129019A02'] = [5]
# scan_data['129020A02'] = [5]
# scan_data['129041A10'] = [5]
# scan_data['138536A01'] = [1476] # 1236 (7), 1256 (8), 1269 (9)
# scan_data['141007A10'] = [5]
# scan_data['141031A01'] = [5]
# scan_data['141032A01'] = [5]
# scan_data['141040A01'] = [5]
# scan_data['141716A80'] = [5]
# scan_data['132017T01'] = [5]
# scan_data['141552A01'] = [5]
"""exbs on"""
# vars_to_plot = ['gmaETGM', 'xteETGM', 'xte2ETGM']
# scan_data['121123K55'] = [2]
# scan_data['120968A02'] = [2]
# scan_data['120982A09'] = [2]
# scan_data['129016A04'] = [2]
# scan_data['129017A04'] = [2]
# scan_data['129018A02'] = [2]
# scan_data['129019A02'] = [2]
# scan_data['129020A02'] = [2]
# scan_data['129041A10'] = [2]
# scan_data['138536A01'] = [1237]
# scan_data['141007A10'] = [2]
# scan_data['141031A01'] = [2]
# scan_data['141032A01'] = [2]
# scan_data['141040A01'] = [2]
# scan_data['141716A80'] = [2]
# scan_data['132017T01'] = [2]
# scan_data['141552A01'] = [2]
# vars_to_plot = ['gmaETGM', 'omgETGM', 'xteETGM', 'xte2ETGM']
# vars_to_plot = ['xteETGM', 'xte2ETGM']
# vars_to_plot = ['gmaDBM', 'omgDBM', 'xteDBM', 'xtiDBM']
# vars_to_plot = ['gmaMTM', 'omgMTM', 'xteMTM', 'kyrhosMTM']
# scan_data['138536A01'] = [1866]
# scan_data['138536A01'] = [1925]
# scan_data['121123K55'] = [7]
# scan_data['120968A02'] = [6]
# scan_data['129020A02'] = [6]
# scan_data['141007A10'] = [6]
# scan_data['101381T31'] = [1]
# scan_data['129016A04'] = [6]
# scan_data['129041A10'] = [6]
# scan_data['132017T01'] = [6]
scan_data['138536A01'] = [1]
# scan_data['138536A01'] = [i for i in range(1716, 1738 + 1)]
# scan_data['138536A01'] = [i for i in range(1749, 1750 + 1)]
# scan_data['138536A01'] = [i for i in range(1756, 1763 + 1)]
# scan_data['138536A01'] = [i for i in range(1779, 1780 + 1)]
# scan_data['138536A01'] = [i for i in range(1924, 1925 + 1)]
# scan_data['138536A01'] = [
# i for i in [
# *range(1716, 1738 + 1),
# *range(1749, 1750 + 1),
# *range(1756, 1763 + 1),
# *range(1779, 1780 + 1),
# *range(1924, 1925 + 1),
# ]
# ]
main(vars_to_plot, scan_data, savenameend=savenameend, savefig=1, savedata=1)
|
# pylint: disable=W0603
'''Logging share library.'''
import logging
from logging.handlers import TimedRotatingFileHandler
from enum import Enum
SLOG_LOGGER = None
SLOG_AMOUNT_OF_KEEPED_LOG_FILE = 7
SLOG_DEFAULT_FILE = 'log/system.log'
class SLogLevel(Enum):
'''Define logging level Enumerations.'''
DEBUG = 1
INFO = 2
WARNING = 3
ERROR = 4
class SLog():
'''Logging with default settings.'''
@staticmethod
def init(level = SLogLevel.INFO, file=None):
'''intialize the logging system.'''
global SLOG_LOGGER
# format the log record
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
if file is None:
file = SLOG_DEFAULT_FILE
handler = TimedRotatingFileHandler(file,
when='midnight',
backupCount=SLOG_AMOUNT_OF_KEEPED_LOG_FILE)
handler.setFormatter(formatter)
SLOG_LOGGER = logging.getLogger(__name__)
SLOG_LOGGER.addHandler(handler)
# The default logging level is INFO
SLog.set_level(level)
@staticmethod
def set_level(level = SLogLevel.INFO):
'''Set logging level.'''
if level == SLogLevel.ERROR:
SLOG_LOGGER.setLevel(logging.ERROR)
elif level == SLogLevel.WARNING:
SLOG_LOGGER.setLevel(logging.WARNING)
elif level == SLogLevel.INFO:
SLOG_LOGGER.setLevel(logging.INFO)
else:
SLOG_LOGGER.setLevel(logging.DEBUG)
@staticmethod
def debug(msg):
'''Log debug level message.'''
SLOG_LOGGER.debug(msg)
@staticmethod
def info(msg):
'''Log info level message.'''
SLOG_LOGGER.info(msg)
@staticmethod
def warning(msg):
'''Log warning level message.'''
SLOG_LOGGER.warning(msg)
@staticmethod
def error(msg):
'''Log error level message.'''
SLOG_LOGGER.error(msg)
|
import pandas as pd
from sqlalchemy import MetaData, Table, Column
import config
def load_query(query):
"""
loads data from db, based on the given sql query
:param query: sql query in string
:return: dataframe
"""
df = pd.read_sql_query(query, con=config.db_connection)
return df
def load_table(table):
"""
loads table based from postgres and returns dataframe of the table
:param table: string, table name
:return: dataframe
"""
df = pd.read_sql_table(table, con=config.db_connection)
return df
def load_experiment_names():
query = """SELECT experiment_id, filename FROM "Experiment" """
return load_query(query)
def initialize_table(table_name, list_columns_types):
"""
Creation (initialization) of postgressql table
:return: empty table
"""
con = config.db_connection
if not con.dialect.has_table(con, table_name):
metadata = MetaData(config.db_connection)
Table(table_name, metadata, *(Column(**i) for i in list_columns_types))
metadata.create_all()
print("Table '{}' created".format(table_name))
else:
print("Table '{}' already exists, so the initialization will be skipped".format(table_name))
def append_to_experiment(table_name, column_names, check_existing=None):
if check_existing is not None:
col_name = check_existing
value = column_names[col_name]
sql = """SELECT {0} FROM "{1}" WHERE {0} = '{2}'""".format(col_name, table_name, value)
if config.db_connection.execute(sql).fetchone() is not None:
print("Value '{}' already exists in column '{}' in table '{}'. " +
"The value will not be added to the table".format(value, col_name, table_name))
return None
metadata = MetaData(bind=config.db_connection)
mytable = Table(table_name, metadata, autoload=True)
ins = mytable.insert()
new_row = ins.values(column_names)
config.db_connection.execute(new_row)
print("Added values {} to table {}".format(column_names, table_name))
def return_heritage_id(file_name):
"""
Check if file_name exists in heritage table. If so, return the heritage_id.
:param file_name: name of file
:return: heritage id that belongs to filename if it exists in the heritage_table
"""
sql = """SELECT name, heritage_id FROM "Heritage" WHERE name = %s """
result = config.db_connection.execute(sql, file_name).fetchone()
if result is None:
raise ValueError("file '{}' does not exist in 'Heritage' table. " +
"Check file name and/or add file name to 'Heritage table'"
.format(file_name))
return result[1]
def load_peaks(experiment):
if experiment == 'test':
return pd.DataFrame(columns=['rt', 'mz', 'intensity', 'formula', 'label'], data=[[100, 85, 120, 'CO', 'blanco'],
[100, 95, 10, 'COH2', ''],
[100, 100.1, 30, 'COH3', ''],
[90, 100.1, 50, 'COH3', '']])
if experiment == 'random':
n = 200
formulas = ['CO', 'COH2', 'COH3', None]
weights = [.05, .15, .1, .7]
peaks = np.random.randint(1, n, (n, 3))
labels = np.random.choice(formulas, (n, 1), p=weights)
blanco = np.random.choice(['blanco', None], (n, 1))
data = np.concatenate((peaks, labels, blanco), axis=1)
return pd.DataFrame(columns=['rt', 'mz', 'intensity', 'formula', 'label'], data=data)
#
# query = """SELECT
# spec.time_passed_since_start AS rt,
# peak.mz AS mz,
# peak.intensity AS intensity,
# fiv.base_formula AS formula,
# '' AS label --- TODO this could be more than one
# FROM "Experiment" exp
# JOIN "Spectrum" spec ON exp.experiment_id = spec.experiment_id
# JOIN "Peak" peak ON spec.spectrum_id = peak.spectrum_id AND spec.experiment_id = peak.experiment_id
# JOIN "PeakMatch" pm ON pm.spectrum_id = peak.spectrum_id AND pm.experiment_id = peak.experiment_id
# JOIN "FormulaIsotopeVariant" fiv ON pm.isotope_id = fiv.id
# JOIN "Label" label ON label.formula = fiv.base_formula
# where exp.experiment_id = {}""".format(experiment)
#
# # TODO deze staat erin omdat we de shortcut trivialpeakmatch gebruiken
# # TODO dit kan problemen opleveren als we meerdere labels hebben
# if True:
query = """SELECT
spec.time_passed_since_start AS rt,
peak.mz AS mz,
peak.intensity AS intensity,
tpm.formula as formula
FROM "Experiment" exp
JOIN "Spectrum" spec ON exp.experiment_id = spec.experiment_id
JOIN "Peak" peak ON spec.spectrum_id = peak.spectrum_id AND spec.experiment_id = peak.experiment_id
LEFT JOIN "TrivialPeakMatch" tpm on tpm.matched_mass = peak.mz
where exp.experiment_id = {}""".format(experiment)
return load_query(query)
def return_query_values_per_experiment(experiment_id):
query = '''
SELECT spec.time_passed_since_start AS rt, peak.mz AS mz, peak.intensity AS intensity, tpm.formula as formula, lab.label_name as label, exp.filename
FROM "Experiment" exp
JOIN "Spectrum" spec ON exp.experiment_id = spec.experiment_id
JOIN "Peak" peak ON spec.spectrum_id = peak.spectrum_id AND spec.experiment_id = peak.experiment_id
JOIN "TrivialPeakMatch" tpm on tpm.matched_mass = peak.mz LEFT JOIN "Label" lab on lab.formula = tpm.formula
where exp.experiment_id = {} '''.format(experiment_id)
return query
def load_df_per_exp(exp_id):
df = load_query(return_query_values_per_experiment(exp_id))
return df
def load_peaks_for_mz(experiment_id, min_value, max_value):
"""
load peaks in an experiment where min_value <= mz < max_value per rt
sums intensity of the peak where more than one mz on a rt
concatenates the known formulas per rt, mz combination
:param experiment_id: unique identifier of experiment (int)
:param min_value: lower mz bound (float)
:param max_value: upper mz bound (float)
:return: pandas dataframe; rt, intensity, formulas
"""
query = '''
select
spec.time_passed_since_start AS rt,
sum(peak.intensity) AS intensity,
string_agg(distinct tpm.formula, ', ') as formulas,
string_agg(DISTINCT cn.name, ', ') AS names
FROM "Experiment" exp
left JOIN "Spectrum" spec ON exp.experiment_id = spec.experiment_id
left JOIN "Peak" peak ON spec.spectrum_id = peak.spectrum_id AND spec.experiment_id = peak.experiment_id
left JOIN "TrivialPeakMatch" tpm on tpm.matched_mass = peak.mz and tpm.spectrum_id = spec.spectrum_id and exp.experiment_id = tpm.experiment_id
left JOIN "ChemicalName" cn ON cn.formula = tpm.formula
where
exp.experiment_id = {0}
and round(peak.mz::numeric,3) >= {1}
and round(peak.mz::numeric,3) < {2}
group by
spec.time_passed_since_start '''.format(experiment_id, min_value, max_value)
return load_query(query)
def load_peaks_for_rt(experiment_id, min_value, max_value):
"""
load peaks in an experiment where min_value <= rt < max_value per mz
sums intensity of the peaks where more than one mz on an rt
concatenates the known formulas per rt, mz combination
:param experiment_id: unique identifier of experiment (int)
:param min_value: lower rt bound (float)
:param max_value: upper rt bound (float)
:return: pandas dataframe; mz, intensity, formulas
"""
query = '''
select
round(peak.mz::numeric,3) AS mz,
sum(peak.intensity) AS intensity,
string_agg(distinct tpm.formula, ', ') as formulas,
string_agg(DISTINCT cn.name, ', ') AS names
FROM "Experiment" exp
left JOIN "Spectrum" spec ON exp.experiment_id = spec.experiment_id
left JOIN "Peak" peak ON spec.spectrum_id = peak.spectrum_id AND spec.experiment_id = peak.experiment_id
left JOIN "TrivialPeakMatch" tpm on tpm.matched_mass = peak.mz and tpm.spectrum_id = spec.spectrum_id and exp.experiment_id = tpm.experiment_id
left JOIN "ChemicalName" cn ON cn.formula = tpm.formula
where
exp.experiment_id = {0}
and spec.time_passed_since_start >= {1}
and spec.time_passed_since_start < {2}
and not peak.mz is null
group by
round(peak.mz::numeric,3) '''.format(experiment_id, min_value, max_value)
return load_query(query)
|
class Config():
'''
This is the setting file. You can change some parameters here.
'''
'''
--------------------
主机的地址和端口号
Host and port of your server
'''
HOST = '0.0.0.0'
PORT = '6116'
'''
--------------------
'''
'''
--------------------
游戏API地址前缀
Game API's URL prefix
'''
GAME_API_PREFIX = '/years/19'
'''
--------------------
'''
'''
--------------------
允许使用的游戏版本,若为空,则默认全部允许
Allowed game versions
If it is blank, all are allowed.
'''
ALLOW_APPVERSION = []
'''
--------------------
'''
'''
--------------------
联机功能的端口号,若为空,则默认不开启联机功能
Port of your link play server
If it is blank, link play will be unavailable.
'''
UDP_PORT = '10900'
'''
--------------------
'''
'''
--------------------
联机功能地址,留空则自动获取
Link Play address
If left blank, it will be obtained automatically.
'''
LINK_PLAY_HOST = '' # ***.com
'''
--------------------
'''
'''
--------------------
SSL证书路径
留空则使用HTTP
SSL certificate path
If left blank, use HTTP.
'''
SSL_CERT = '' # *.pem
SSL_KEY = '' # *.key
'''
--------------------
'''
'''
--------------------
愚人节模式开关
Switch of April Fool's Day
'''
IS_APRILFOOLS = True
'''
--------------------
'''
'''
--------------------
世界排名的最大显示数量
The largest number of global rank
'''
WORLD_RANK_MAX = 200
'''
--------------------
'''
'''
--------------------
世界模式当前活动图设置
Current available maps in world mode
'''
AVAILABLE_MAP = [] # Ex. ['test', 'test2']
'''
--------------------
'''
'''
--------------------
Web后台管理页面的用户名和密码
Username and password of web background management page
'''
USERNAME = 'admin'
PASSWORD = 'admin'
'''
--------------------
'''
'''
--------------------
Web后台管理页面的session秘钥,如果不知道是什么,请不要修改
Session key of web background management page
If you don't know what it is, please don't modify it.
'''
SECRET_KEY = '1145141919810'
'''
--------------------
'''
'''
--------------------
API接口完全控制权限Token,留空则不使用
API interface full control permission Token
If you don't want to use it, leave it blank.
'''
API_TOKEN = ''
'''
--------------------
'''
'''
--------------------
歌曲下载地址前缀,留空则自动获取
Song download address prefix
If left blank, it will be obtained automatically.
'''
DOWNLOAD_LINK_PREFIX = '' # http://***.com/download/
'''
--------------------
'''
'''
--------------------
玩家歌曲下载的24小时次数限制,每个文件算一次
Player's song download limit times in 24 hours, once per file
'''
DOWNLOAD_TIMES_LIMIT = 3000
'''
歌曲下载链接的有效时长,单位:秒
Effective duration of song download link, unit: seconds
'''
DOWNLOAD_TIME_GAP_LIMIT = 1000
'''
--------------------
'''
'''
--------------------
Arcaea登录的最大允许设备数量,最小值为1
The maximum number of devices allowed to log in Arcaea, minimum: 1
'''
LOGIN_DEVICE_NUMBER_LIMIT = 100
'''
是否允许同设备多应用共存登录
请注意,这个选项设置为True时,下一个选项将自动变为False
If logging in from multiple applications on the same device is allowed
Note that when this option is set to True, the next option automatically becomes False
'''
ALLOW_LOGIN_SAME_DEVICE = True
'''
24小时内登陆设备数超过最大允许设备数量时,是否自动封号(1天、3天、7天、15天、31天)
When the number of login devices exceeds the maximum number of devices allowed to log in Arcaea within 24 hours, whether the account will be automatically banned (1 day, 3 days, 7 days, 15 days, 31 days)
'''
ALLOW_BAN_MULTIDEVICE_USER_AUTO = False
'''
--------------------
'''
'''
--------------------
是否记录详细的服务器日志
If recording detailed server logs is enabled
'''
ALLOW_LOG_INFO = False
'''
--------------------
'''
'''
--------------------
用户注册时的默认记忆源点数量
The default amount of memories at the time of user registration
'''
DEFAULT_MEMORIES = 6116
'''
--------------------
'''
'''
--------------------
数据库更新时,是否采用最新的角色数据,如果你想采用最新的官方角色数据
注意:如果是,旧的数据将丢失;如果否,某些角色的数据变动将无法同步
If using the latest character data when updating database. If you want to only keep newest official character data, please set it `True`.
Note: If `True`, the old data will be lost; If `False`, the data changes of some characters will not be synchronized.
'''
UPDATE_WITH_NEW_CHARACTER_DATA = True
'''
--------------------
'''
'''
--------------------
是否全解锁搭档
If unlocking all partners is enabled
'''
CHARACTER_FULL_UNLOCK = True
'''
--------------------
'''
'''
--------------------
是否全解锁世界歌曲
If unlocking all world songs is enabled
'''
WORLD_SONG_FULL_UNLOCK = True
'''
--------------------
'''
'''
--------------------
是否全解锁世界场景
If unlocking all world sceneries is enabled
'''
WORLD_SCENERY_FULL_UNLOCK = True
'''
--------------------
'''
'''
--------------------
是否强制使用全解锁云端存档
If forcing full unlocked cloud save is enabled
'''
SAVE_FULL_UNLOCK = False
'''
--------------------
'''
'''
--------------------
是否使用最好的 10 条记录(而不是最近的 30 条记录中较好的 10 条)来计算 PTT
Calculate PTT with best 10 instead of recent best 10
'''
USE_B10_AS_R10 = False
'''
--------------------
'''
|
"""
:module:
:synopsis:
:author: Julian Sobott
"""
import os
import unittest
from OpenDrive.server_side import database, paths as server_paths
from tests.server_side.database.helper_database import h_setup_server_database
class TestDatabaseConnections(unittest.TestCase):
def setUp(self) -> None:
h_setup_server_database()
def test_create_database_non_existing(self):
self.assertTrue(os.path.exists(server_paths.SERVER_DB_PATH),
"Database file was not created or created at the wrong place!")
def test_tables_creation(self):
sql = "SELECT name FROM sqlite_master WHERE type='table'"
with database.DBConnection(server_paths.SERVER_DB_PATH) as db:
ret = db.get(sql)
tables = [table_name for table_name, in ret]
self.assertTrue("users" in tables)
self.assertTrue("devices" in tables)
self.assertTrue("folders" in tables)
|
__author__ = 'kocsen'
import logging
import time
import json
import mysql.connector
from mysql.connector import errorcode
from mysql.connector.errors import IntegrityError
"""
Used to write app feature data to a DB.
NOTE: This script is VERY specifically tied to the way data is modeled
in the existing Database AppDataDB and the naming conventions of the apk's.
"""
def write_app_data(app, config_filename):
"""
:param app:
:param config_filename:
:return:
"""
config = parse_config(config_filename)
cnx = None
try:
# Establish a connection
cnx = mysql.connector.connect(**config)
write(app, cnx)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
logging.error("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
logging.error("Database does not exist")
else:
logging.error(str(err))
finally:
# Close no matter what
if cnx is not None:
cnx.close()
def write(app, cnx):
"""
Given an app and a SQL connection, write the app features
into the feature table.
:param app: The Android Application object with the data
:param cnx: SQL Connection
:return:
"""
cursor = cnx.cursor()
results = app.features
table_name = 'features'
split = app.name.split("-")
if len(split) != 3:
exit()
logging.debug("getting foregin id")
foreign_key_id = get_version_id(split[0], split[1], split[2], cnx)
add_feature_query = ("INSERT INTO version_features "
"(app_version_id, internet, account_manager, uses_ssl, sharing_sending, translation) "
"VALUES (%s, %s, %s, %s, %s, %s)")
feature_data = (
foreign_key_id,
results['Internet'],
results['Account Manager'],
results['Use SSL'],
results['Sharing-Sending'],
results['Internationalization']
)
try:
cursor.execute(add_feature_query, feature_data)
except IntegrityError as e:
logging.warning("It seems we already analyzed this app." + str(e))
# commit & actually save
cnx.commit()
def get_version_id(app_package, version_code, raw_date, cnx):
"""
Gets the id of the app found inside of the `version_details` table
in the Database. Used for adding a foreign key to the features table
:param app_package: The name, such as com.google.gmail (without the .apk ending)
:param version_code: The version code used in the DB
:param raw_date: The date as appears on apk name in the format YYYY_MM_DD
:return: id - as integer
"""
cursor = cnx.cursor()
uid = None
logging.debug("App package: \t" + app_package)
logging.debug("Version code:\t" + version_code)
logging.debug("Raw date: \t" + raw_date)
parsed_date = time.strftime("%b %-d, %Y", time.strptime(raw_date, "%Y_%m_%d"))
logging.debug("Parsed date: \t" + parsed_date)
# Select id FROM version_details WHERE
# docid = app package,
# details_appDetails_versionCode = version_code
# details_appDetails_uploadDate = parsed_date // Maybe use %LIKE%
query = ("SELECT id FROM version_details WHERE "
"docid = %s AND "
"details_appDetails_versionCode = %s AND "
"details_appDetails_uploadDate = %s")
cursor.execute(query, (app_package, version_code, parsed_date))
row = cursor.fetchone()
uid = row[0]
logging.debug("Retrieved FKey ID:\t" + str(uid))
cursor.close()
return uid
def parse_config(filename):
try:
# TODO: Validate configuration contents
with open(filename) as f:
config = json.load(f)
return config
except Exception as e:
logging.error(e.message)
exit(2)
|
nome = input('Escreva uma frase:').strip().lower()
print('Quantas vezes aparece a letra "A"? {} vez(es)!'.format(nome.count('a')))
print('Em que posição a letra "A" aparece a primeira vez? Na {} posição!'.format(nome.find('a')))
print('Em que posição a letra "A" aparece por ultimo? Na {} posição!'.format(nome.find('a', -1)))
|
from datetime import datetime, timedelta
import calendar
from dateutil.relativedelta import relativedelta
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from timezone_field import TimeZoneField
import fleming
import pytz
from .fields import DurationField
INTERVAL_CHOICES = (
('DAY', 'Day'),
('WEEK', 'Week'),
('MONTH', 'Month'),
('QUARTER', 'Quarter'),
('YEAR', 'Year'),
)
class LocalizedRecurrenceQuerySet(models.query.QuerySet):
def update_schedule(self, time=None):
"""Update the schedule times for all the provided recurrences.
:type time: :py:class:`datetime.datetime`
:param time: The time the schedule was checked. If ``None``,
defaults to ``datetime.utcnow()``.
In the common case, this can be called without any arguments.
.. code-block:: python
>>> past_due = LocalizedRecurrence.objects.filter(
... next_scheduled__lte=datetime.utcnow()
... )
>>> # Do something with past_due recurrences
>>> past_due.update_schedule()
The code above will ensure that all the processed recurrences
are re-scheduled for their next recurrence.
Calling this function has the side effect that the
``next_scheduled`` attribute of every recurrence in the
queryset will be updated to the new time in utc.
"""
_update_schedule(self, time=time)
class LocalizedRecurrenceManager(models.Manager):
def get_queryset(self):
return LocalizedRecurrenceQuerySet(self.model)
def update_schedule(self, time=None):
"""Update the schedule times for all recurrences.
Functions exactly the same as the method on the querysets. The
following to calls are equivalent:
.. code-block:: python
>>> LocalizedRecurrence.objects.all().update_schedule()
>>> LocalizedRecurrence.objects.update_schedule()
Calling this function has the side effect that the
``next_scheduled`` attribute of every recurrence will be
updated to the new time in utc.
"""
self.get_queryset().update_schedule(time=time)
@python_2_unicode_compatible
class LocalizedRecurrence(models.Model):
"""The information necessary to act on events in users local
times. Can be instantiated with ``LocalizedRecurrence.objects.create``
:type interval: str
:param interval: The interval at which the event recurs.
One of ``'DAY'``, ``'WEEK'``, ``'MONTH'``, ``'QUARTER'``, ``'YEAR'``.
:type offset: :py:class:`datetime.timedelta`
:param offset: The amount of time into the interval that the event
occurs at.
If the interval is monthly, quarterly, or yearly, the number
of days in the interval are variable. In the case of offsets
with more days than the number of days in the interval,
updating the schedule will not raise an error, but will update
to the last day in the interval if necessary.
:type timezone: pytz.timezone
:param timezone: The local timezone for the user.
Localized recurrences are simply objects in the database. They can
be created with standard django ORM tools:
.. code-block:: python
>>> from datetime import datetime, timedelta
>>> my_lr = LocalizedRecurrence.objects.create(
... interval='DAY',
... offset=timedela(hours=15),
... timezone=pytz.timedelta('US/Eastern'),
... )
Once instantiated it is simple to check if a localized recurrence
is due to be acted upon.
.. code-block:: python
>>> my_lr.next_scheduled < datetime.utcnow()
True
After a recurrence has been acted upon, it's schedule can be
simply reset to occur at the prescribed time in the next interval.
.. code-block:: python
>>> my_lr.update_schedule()
>>> my_lr.next_scheduled < datetime.utcnow()
False
"""
interval = models.CharField(max_length=18, default='DAY', choices=INTERVAL_CHOICES)
offset = DurationField(default=timedelta(0))
timezone = TimeZoneField(default='UTC')
previous_scheduled = models.DateTimeField(default=datetime(1970, 1, 1))
next_scheduled = models.DateTimeField(default=datetime(1970, 1, 1))
objects = LocalizedRecurrenceManager()
def __str__(self):
return 'ID: {0}, Interval: {1}, Next Scheduled: {2}'.format(self.id, self.interval, self.next_scheduled)
def update(self, **updates):
"""Updates fields in the localized recurrence."""
for update in updates:
setattr(self, update, updates[update])
return self.save()
def update_schedule(self, time=None):
"""Update the schedule for this recurrence or an object it tracks.
:type time: :py:class:`datetime.datetime`
:param time: The time the schedule was checked. If ``None``,
defaults to ``datetime.utcnow()``.
Calling this function has the side effect that the
``next_scheduled`` attribute will be updated to the new time
in utc.
"""
_update_schedule([self], time)
def utc_of_next_schedule(self, current_time):
"""The time in UTC of this instance's next recurrence.
:type current_time: :py:class:`datetime.datetime`
:param current_time: The current time in utc.
Usually this function does not need to be called directly, but
will be used by ``update_schedule``. If however, you need to
check when the next recurrence of a instance would happen,
without persisting an update to the schedule, this funciton
can be called without side-effect.
"""
local_time = fleming.convert_to_tz(current_time, self.timezone)
local_scheduled_time = fleming.fleming.dst_normalize(
_replace_with_offset(local_time, self.offset, self.interval))
utc_scheduled_time = fleming.convert_to_tz(local_scheduled_time, pytz.utc, return_naive=True)
if utc_scheduled_time <= current_time:
additional_time = {
'DAY': timedelta(days=1),
'WEEK': timedelta(weeks=1),
'MONTH': relativedelta(months=1),
'QUARTER': relativedelta(months=3),
'YEAR': relativedelta(years=1),
}
utc_scheduled_time = fleming.add_timedelta(
utc_scheduled_time, additional_time[self.interval], within_tz=self.timezone)
return utc_scheduled_time
def _update_schedule(recurrences, time=None):
"""Update the schedule times for all the provided recurrences.
"""
time = time or datetime.utcnow()
for recurrence in recurrences:
recurrence.next_scheduled = recurrence.utc_of_next_schedule(time)
recurrence.previous_scheduled = time
recurrence.save()
def _replace_with_offset(dt, offset, interval):
"""Replace components of a datetime with those of a timedelta.
This replacement is done within the given interval. This means the
the final result, will the be a datetime, at the desired offset
given the interval.
"""
hours, minutes, seconds = offset.seconds // 3600, (offset.seconds // 60) % 60, offset.seconds % 60
interval = interval.lower()
if interval == 'day':
dt_out = dt.replace(hour=hours, minute=minutes, second=seconds)
elif interval == 'week':
dt_out = dt + timedelta(days=offset.days - dt.weekday())
dt_out = dt_out.replace(hour=hours, minute=minutes, second=seconds)
elif interval == 'month':
_, last_day = calendar.monthrange(dt.year, dt.month)
day = (offset.days + 1) if (offset.days + 1) <= last_day else last_day
dt_out = dt.replace(day=day, hour=hours, minute=minutes, second=seconds)
elif interval == 'quarter':
month_range = [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]][int((dt.month - 1) / 3)]
quarter_days = sum(calendar.monthrange(dt.year, month)[1] for month in month_range)
days = offset.days if offset.days <= (quarter_days - 1) else (quarter_days - 1)
dt_out = fleming.floor(dt, month=3).replace(hour=hours, minute=minutes, second=seconds)
dt_out += timedelta(days)
elif interval == 'year':
leap_year_extra_days = 1 if calendar.isleap(dt.year) else 0
days = offset.days if offset.days <= 364 + leap_year_extra_days else 364 + leap_year_extra_days
dt_out = fleming.floor(dt, year=1).replace(hour=hours, minute=minutes, second=seconds)
dt_out += timedelta(days)
else:
raise ValueError('{i} is not a proper interval value'.format(i=interval))
return dt_out
|
#!/usr/bin/env python3
# Copyright (c) 2020 Bitcoin Association
# Distributed under the Open BSV software license, see the accompanying file LICENSE.
from test_framework.blocktools import create_block, create_coinbase, create_transaction
from test_framework.test_framework import BitcoinTestFramework, ChainManager
from test_framework.util import assert_equal, wait_until, assert_raises_rpc_error
from test_framework.mininode import ToHex, msg_block, msg_tx, CBlock, CTransaction, CTxIn, COutPoint, CTxOut
from test_framework.script import CScript, OP_TRUE, OP_RETURN
from test_framework.cdefs import BUFFER_SIZE_HttpTextWriter
from binascii import b2a_hex
def b2x(b):
return b2a_hex(b).decode('ascii')
class GetBlockTemplateRPCTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.chain = ChainManager()
def check_mempool(self, rpc, should_be_in_mempool):
wait_until(lambda: {t.hash for t in should_be_in_mempool}.issubset(set(rpc.getrawmempool())))
def createLargeTransaction(self, size, depends):
tx = CTransaction()
for depend in depends:
tx.vin.append(CTxIn(COutPoint(depend.sha256, 0), b''))
tx.vout.append(CTxOut(int(100), CScript([OP_RETURN, b"a" * size])))
tx.rehash()
return tx
def create_bad_block(self, template):
coinbase_tx = create_coinbase(height=int(template["height"]) + 1)
coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
coinbase_tx.rehash()
block = CBlock()
block.nVersion = template["version"]
block.nTime = template["curtime"]
block.nBits = int(template["bits"], 16)
block.nNonce = 0
block.vtx = [coinbase_tx]
# Make this block incorrect.
block.hashPrevBlock = 123
block.hashMerkleRoot = block.calc_merkle_root()
return block
def checkBlockTemplate(self, template, txs, dependingTx):
assert 'capabilities' in template
assert_equal('proposal', template['capabilities'][0])
assert 'version' in template
assert 'previousblockhash' in template
assert_equal(self.nodes[0].getbestblockhash(), template['previousblockhash'])
assert 'transactions' in template
# check if hex data was parsed correctly
txs_data = [tx['data'] for tx in template['transactions']]
assert(ToHex(dependingTx) in txs_data)
for tx in txs:
assert(ToHex(tx) in txs_data)
# check dependencies
depending_indices = []
depending_txs_hash = [tx.hash for tx in txs]
for i in range(len(template['transactions'])):
if template['transactions'][i]['hash'] in depending_txs_hash:
depending_indices.append(i+1)
for tmpl_tx in template['transactions']:
if tmpl_tx['hash'] == dependingTx.hash:
assert_equal(2, len(tmpl_tx['depends']))
assert_equal(set(tmpl_tx['depends']), set(depending_indices))
break
assert 'coinbaseaux' in template
assert 'coinbasevalue' in template
assert 'longpollid' in template
assert 'target' in template
assert 'mintime' in template
assert 'mutable' in template
assert 'noncerange' in template
assert 'sizelimit' in template
assert 'curtime' in template
assert 'bits' in template
assert 'height' in template
assert_equal(self.nodes[0].getblock(self.nodes[0].getbestblockhash())['height'] + 1, template['height'])
def run_test(self):
self.stop_node(0)
with self.run_node_with_connections("test getblocktemplate RPC call", 0, ["-blockmintxfee=0.0000001"], 1) as connections:
connection = connections[0]
# Preparation.
self.chain.set_genesis_hash(int(self.nodes[0].getbestblockhash(), 16))
starting_blocks = 101
block_count = 0
for i in range(starting_blocks):
block = self.chain.next_block(block_count)
block_count += 1
self.chain.save_spendable_output()
connection.cb.send_message(msg_block(block))
out = []
for i in range(starting_blocks):
out.append(self.chain.get_spendable_output())
self.nodes[0].waitforblockheight(starting_blocks)
# Create and send 2 transactions.
transactions = []
for i in range(2):
tx = create_transaction(out[i].tx, out[i].n, b"", 100000, CScript([OP_TRUE]))
connection.cb.send_message(msg_tx(tx))
transactions.append(tx)
self.check_mempool(self.nodes[0], transactions)
# Create large transaction that depends on previous two transactions.
# If transaction pubkey contains 1/2 of BUFFER_SIZE_HttpTextWriter of data, it means that final result will for sure be chunked.
largeTx = self.createLargeTransaction(int(BUFFER_SIZE_HttpTextWriter/2), transactions)
connection.cb.send_message(msg_tx(largeTx))
self.check_mempool(self.nodes[0], [largeTx])
# Check getblocktemplate response.
template = self.nodes[0].getblocktemplate()
self.checkBlockTemplate(template, transactions, largeTx)
# Check getblocktemplate with invalid reponse
block = self.create_bad_block(template)
rsp = self.nodes[0].getblocktemplate({'data': b2x(block.serialize()), 'mode': 'proposal'})
assert_equal(rsp, "inconclusive-not-best-prevblk")
assert_raises_rpc_error(-22, "Block decode failed", self.nodes[0].getblocktemplate,
{'data': b2x(block.serialize()[:-1]), 'mode': 'proposal'})
# Test getblocktemplate in a batch
batch = self.nodes[0].batch([
self.nodes[0].getblockbyheight.get_request(100),
self.nodes[0].getblocktemplate.get_request(),
self.nodes[0].getblockcount.get_request(),
self.nodes[0].undefinedmethod.get_request()])
assert_equal(batch[0]["error"], None)
assert_equal(batch[1]["error"], None)
assert_equal(batch[1]["result"], template)
assert_equal(batch[2]["error"], None)
assert_equal(batch[3]["error"]["message"], "Method not found")
assert_equal(batch[3]["result"], None)
if __name__ == '__main__':
GetBlockTemplateRPCTest().main()
|
from .pram2mesa import pram2mesa
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.