content
stringlengths 5
1.05M
|
|---|
import pytest
from protean import BaseEventSourcedAggregate
from protean.fields import Integer, String
from protean.utils import fully_qualified_name
class User(BaseEventSourcedAggregate):
name = String()
age = Integer()
def test_registering_an_event_sourced_aggregate_manually(test_domain):
try:
test_domain.register(User)
except Exception:
pytest.fail("Failed to register an Event Sourced Aggregate")
assert fully_qualified_name(User) in test_domain.registry.event_sourced_aggregates
def test_registering_an_event_sourced_aggregate_via_annotation(test_domain):
try:
@test_domain.event_sourced_aggregate
class Person:
name = String()
age = Integer()
except Exception:
pytest.fail("Failed to register an Event Sourced Aggregate via annotation")
assert fully_qualified_name(Person) in test_domain.registry.event_sourced_aggregates
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ishell.command import Command
from ishell.console import Console
from ishell.utils import _print
import getpass
class InterfaceConsole(Command):
"""Interface Console.
Parameters:
interface name -- Press tab for options
"""
def args(self):
return ["FastEthernet0/0", "FastEthernet1/0"]
def run(self, line):
interface = line.split()[-1]
self.interface = interface
self.prompt = "RouterA(config-if-%s)" % self.interface
self.prompt_delim = "#"
ip = Command("ip", help="Configure ip parameters: address")
address = Command("address")
address.run = self.set_address
self.addChild(ip).addChild(address)
self.loop()
def set_address(self, line):
addr = line.split()[-1]
_print("Setting interface %s address %s" % (self.interface, addr))
class RouteAdd(Command):
"""RouteAdd Command.
Parameters:
network gateway - Example: ip route add 10.0.0.0/8 192.168.0.1
"""
def run(self, line):
_print("Route added!")
class ConfigureTerminal(Command):
"""Configure Console.
Childs:
interface -- Configure mode for interface
ip -- IP configuration: route add
"""
def run(self, line):
self.prompt = "RouterA(config)"
self.prompt_delim = "#"
ip = Command("ip", help="Set ip configurations")
route = Command("route")
add = RouteAdd("add")
interface = InterfaceConsole("interface", dynamic_args=True,
help="Configure interface parameters")
self.addChild(interface)
self.addChild(ip).addChild(route).addChild(add)
self.loop()
class Show(Command):
"""Show Command.
Childs:
running-config -- Show RAM configuration
startup-config -- Show NVRAM configuration
"""
def args(self):
return ["running-config", "startup-config"]
def run(self, line):
arg = line.split()[-1]
if arg not in self.args():
_print("%% Invalid argument: %s" % arg)
_print("\tUse:")
_print("\trunning-config -- Show RAM configuration")
_print("\tstartup-config -- Show NVRAM configuration")
return
_print("Executing show %s..." % arg)
class Enable(Command):
"""Enable Command.
Default password is 'python', if typed correctly you get access
to router's enable console.
Childs:
configure -- Enter configure mode
show -- Show configurations
"""
def run(self, line):
self.prompt = "RouterA"
self.prompt_delim = "#"
password = getpass.getpass()
if password != 'python':
_print("%% Invalid Password")
return
configure = Command("configure", help="Enter configure mode")
terminal = ConfigureTerminal("terminal")
configure.addChild(terminal)
show = Show('show', help="Show configurations", dynamic_args=True)
self.addChild(configure)
self.addChild(show)
self.loop()
class PingCommand(Command):
"""Ping Command.
Parameters:
destination -- Destination ip
"""
def run(self, line):
destination = line.split()[-1]
import subprocess
try:
subprocess.call(['ping', '-c5', '%s' % destination])
except KeyboardInterrupt:
_print("ping canceled.")
return
class TraceRouteCommand(Command):
"""TraceRoute Command.
Parameters:
destination -- Destination ip
"""
def run(self, line):
destination = line.split()[-1]
import subprocess
try:
subprocess.call(['traceroute', '%s' % destination])
except KeyboardInterrupt:
_print("Traceroute canceled.")
return
def main():
console = Console("RouterA")
enable = Enable("enable", help="Enter enable mode")
ping = PingCommand('ping', help="Ping destination ip. Ex: ping 8.8.8.8")
traceroute = TraceRouteCommand('traceroute', help="Trace route to destination ip. Ex: traceroute 8.8.8.8")
console.addChild(enable)
console.addChild(ping)
console.addChild(traceroute)
console.loop()
if __name__ == '__main__':
main()
|
from app.utils import ServiceSettingsBase
class MongoDBSettings(ServiceSettingsBase):
mongodb_dsn: str
mongodb_min_connections_count: int = 2
mongodb_max_connections_count: int = 8
|
from src.functions.hello import say_hello
def test_say_hello():
assert say_hello('Sample') == 'Hello Sample!'
|
from .models import Bubble, BubbleMenu
from django import forms
from django.contrib.auth import authenticate, login, logout
from django.http import HttpResponseRedirect as redirect, \
HttpResponse as response, Http404
from django.shortcuts import render
from django.template import RequestContext
import json
from what_apps.cms.models import ContentBlock
from what_apps.do.models import Task
from what_apps.people.models import GenericParty
def main_cms(request):
pass
def main_landing(request):
''''
TODO: Bring this back for a menu-driven front page.
'''
disable_incoming_calls = True
blog_blocks = ContentBlock.objects.filter(published=True, tags__name__in=["public","blog"]).order_by('-created').distinct() [:4]
return render(request, "main/main_landing.html", locals())
#If the user is logging in on the front page, they have new bubbles coming their way.
def loggedInBubbles(request):
'''
Where are we going with this view? Can we disappear this thing?
'''
#Which menu are we rendering? TODO: Un-hardcode menu reference. (For that matter, we prolly oughta take members to a completely different function)
initial_menu = BubbleMenu.objects.get(launch_name="admin_main")
#Get the menu tree for this menu - this will recursively get the sub-menus of its bubbles.
menu_tree=initial_menu.getMenuTree()
#Tell the template that we want this menu to be first.
initial_menu.first = True
#TODO: Move all of this stuff to an admin-only function.
#Get the last 10 sales for display in the POS.
from products.models import BeverageInstance
sales=BeverageInstance.objects.order_by('created')[:10].reverse()
'''
The following is heavily borrowed from (and probably rightly belongs in) hwtrack.views.
I'm putting it here mostly for testing.
Since it's here, it doesn't need to load via ajax.
It's probably best to re-ajax it.
-Justin
'''
from what_apps.hwtrack.models import Device
from what_apps.people.models import Client
class AddClientForm(forms.ModelForm):
class Meta:
model=Client
class AddDeviceForm(forms.ModelForm):
class Meta:
model=Device
# class AddServiceForm(forms.ModelForm):
# class Meta:
# model=RenderDeviceService
service_check_in_forms=(AddClientForm(), AddDeviceForm())
#END AREA OF ADMIN-ONLY STUFF
return render('main/logged_in_bubbles.html', locals())
def rightSideWidgets(request):
from django.contrib.sessions.models import Session
from django.contrib.auth.models import User
user_ids = []
for session in Session.objects.all():
try:
id = session.get_decoded()['_auth_user_id']
user_ids.append(id)
except KeyError:
pass
tasks = Task.objects.filter_active_by_owner(request.user)
logged_in_users = User.objects.filter(id__in=user_ids).distinct()
#TODO: This needs to be all the widgets, not just whoisloggedin
return render('widgets/whoIsLoggedIn.html', locals() )
def hack_and_tell(request):
return render(request,'main/hack_and_tell/info.html', locals())
def moving(request):
moving_content_blocks = ContentBlock.objects.filter(tags__name='moving_2012_blocks')
invisible_footer = True
return render(request, 'main/moving.html', locals())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Provides robot dynamic transition functions
"""
# TODO: to implement
from pyrobolearn.robots.robot import Robot
from pyrobolearn.dynamics.dynamic import DynamicModel
__author__ = "Brian Delhaisse"
__copyright__ = "Copyright 2019, PyRoboLearn"
__credits__ = ["Brian Delhaisse"]
__license__ = "GNU GPLv3"
__version__ = "1.0.0"
__maintainer__ = "Brian Delhaisse"
__email__ = "briandelhaisse@gmail.com"
__status__ = "Development"
class PhysicalDynamicModel(DynamicModel):
r"""Physical Dynamic Model
Dynamic model described by mathematical/physical equations.
"""
def __init__(self, state, action):
super(PhysicalDynamicModel, self).__init__(state, action)
class RobotDynamicModel(PhysicalDynamicModel):
r"""Robot Dynamical Model
This is the mathematical model of the robots.
Limitations:
* mathematical assumptions such as rigid bodies
* approximation of the dynamics
* the states/actions have to be robot states/actions
"""
def __init__(self, state, action, robot):
super(RobotDynamicModel, self).__init__(state, action)
self.robot = robot
##############
# Properties #
##############
@property
def robot(self):
"""Return the robot instance."""
return self._robot
@robot.setter
def robot(self, robot):
"""Set the robot instance."""
if not isinstance(robot, Robot):
raise TypeError("Expecting the given robot to be an instance of `Robot`, instead got: "
"{}".format(type(robot)))
self._robot = robot
|
import conf
from keras.applications.resnet50 import ResNet50
from keras.preprocessing.image import load_img
from keras.preprocessing.image import img_to_array
from keras.applications.resnet50 import preprocess_input
from keras.applications.resnet50 import decode_predictions
from sys import argv
from pprint import pprint
# load the model
model = ResNet50()
print(argv[1])
# Load image from the first script's argument.
# Resise to fit model's required input.
image = load_img(argv[1], target_size=(224, 224))
# Convert image to numpy array.
image = img_to_array(image)
# Reshape image to fit the model's requirments.
# Fist argument is the number of images we plan
# to classify using the model.
image = image.reshape((1, image.shape[0], image.shape[1], image.shape[2]))
# Prepare the image in the same way
# that images that the model was trained on.
image = preprocess_input(image)
# Get the probablities for each class.
predictions = model.predict(image)
# Convert them to human readable labels.
labels = decode_predictions(predictions)
pprint(labels)
# Get the class that is the best match
# (has the highest probablity)
label = labels[0][0]
# Show the most probable class and it's probablity percentage.
print('%s: %.2f%%' % (label[1], label[2]*100))
|
import matplotlib.colors as mcolors
import numpy as np
import plotly.graph_objects as go
from plotly.subplots import make_subplots
def plotly_history(
history,
title=None,
fontsize=14,
yscale=None,
yticks=10,
xticks=10,
ax_width = 515,
ax_height = 450,
width = None,
height = None,
):
n = len(history.keys())
if n > 1:
ncols = 2
nrows = int(np.ceil(n / ncols))
if width is None:
width = 2 * ax_width
if height is None:
height = ax_height * nrows
else:
ncols = nrows = 1
if width is None:
width = ax_width
if height is None:
height = ax_height
rows_and_cols = [(row, col) for row in
range(1, nrows + 1) for col in range(1, ncols + 1)]
fig = make_subplots(rows=nrows,
cols=ncols,
subplot_titles=list(history.keys()),
horizontal_spacing=0.05,
vertical_spacing=0.08 / (nrows - 1) if nrows > 1
else 0)
colors = list(mcolors.TABLEAU_COLORS.values())
for subplot_no, ((key, labels), (row, col)) in enumerate(
zip(
history.items(),
rows_and_cols
)
):
subplot_no += 1
max_y = -np.inf
max_x = -np.inf
legend_text = ""
for idx, (label, values) in enumerate(labels.items()):
if legend_text != "":
legend_text += "<br>"
legend_text += f'<span style="color:{colors[idx]};"><b>{label}</b></span>'
xs = list(range(1, len(values) + 1))
max_y = max(max_y, max(values))
max_x = max(max_x, len(values) + 1)
fig.add_trace(
go.Scatter(x=xs, y=values,
line=dict(color=colors[idx]),
mode="lines+markers",
name="", legendgroup=key, showlegend=False,
hovertemplate=f"{label}: " + "%{y}",
marker=dict(size=0.1)),
row=row, col=col
)
fig.add_annotation(xref=f"x{subplot_no}",
yref=f"y{subplot_no}",
showarrow=False,
x=max_x, y=max_y, yshift=10,
text=legend_text, bgcolor="white",
align="left")
width = int(width * 0.9)
height = int(height * 0.9)
fig.update_layout(width=width, height=height,
margin=dict(l=0, r=0, t=50, b=0),
font_size=fontsize,
title=title,
title_x=0.5,
template="simple_white",
hovermode="x unified")
fig.update_yaxes(showgrid=True)
fig.update_xaxes(showgrid=True)
if yscale is not None:
fig.update_yaxes(type=yscale)
if yticks is not None:
fig.update_yaxes(nticks=yticks)
if xticks is not None:
fig.update_xaxes(nticks=xticks)
fig.update_annotations({"yanchor": "bottom", "xanchor": "right"})
return fig
|
# Copyright 2011-2013 Colin Scott
# Copyright 2012-2013 Andrew Or
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Host, switch and link GUI entities that belong to the QGraphicsScene in TopologyView
'''
import math
from PyQt4 import QtGui, QtCore
class GuiNode(QtGui.QGraphicsItem):
'''
Abstract Interactive Node
If switch, id = switch.dpid
If host, id = host.hid
'''
def __init__(self, graphics_scene, _id):
QtGui.QGraphicsItem.__init__(self)
self.graphics_scene = graphics_scene
self.id = _id
self.linkList = []
self.newPos = QtCore.QPointF()
self.setFlag(QtGui.QGraphicsItem.ItemIsMovable)
self.setFlag(QtGui.QGraphicsItem.ItemSendsGeometryChanges)
self.setZValue(1)
self.setAcceptHoverEvents(True)
def addLink(self, link):
self.linkList.append(link)
link.adjust()
def links(self):
return self.linkList
def boundingRect(self):
adjust = 2.0
return QtCore.QRectF(-10 - adjust, -10 - adjust, 23 + adjust, 23 + adjust)
def shape(self):
path = QtGui.QPainterPath()
path.addEllipse(-10, -10, 20, 20)
return path
def itemChange(self, change, value):
if change == QtGui.QGraphicsItem.ItemPositionChange:
for link in self.linkList:
link.adjust()
return QtGui.QGraphicsItem.itemChange(self, change, value)
def mousePressEvent(self, event):
self.still_hover = False
self.update()
QtGui.QGraphicsItem.mousePressEvent(self, event)
def mouseDoubleClickEvent(self, event):
QtGui.QGraphicsItem.mouseDoubleClickEvent(self, event)
@QtCore.pyqtSlot()
def popupnode_detailsMenu(self):
if self.still_hover:
self.node_details.exec_(self.hover_pos)
def hoverLeaveEvent(self, event):
self.still_hover = False
class GuiHost(GuiNode):
'''
Interactive Host
'''
def paint(self, painter, option, widget):
painter.setPen(QtCore.Qt.NoPen)
painter.setBrush(QtGui.QColor(QtCore.Qt.darkGray).light(25))
painter.drawRect(-9, -9, 15, 15)
gradient = QtGui.QRadialGradient(-3, -3, 10)
color = QtGui.QColor(QtCore.Qt.yellow)
if option.state & QtGui.QStyle.State_Sunken:
gradient.setCenter(3, 3)
gradient.setFocalPoint(3, 3)
gradient.setColorAt(1, color.light(100))
gradient.setColorAt(0, color.light(30))
else:
gradient.setColorAt(0, color.light(85))
gradient.setColorAt(1, color.light(25))
painter.setBrush(QtGui.QBrush(gradient))
painter.setPen(QtGui.QPen(QtCore.Qt.black, 0))
painter.drawRect(-10, -10, 15, 15)
# Draw hid
text_rect = self.boundingRect()
message = str(self.id)
font = painter.font()
font.setBold(True)
painter.setFont(font)
painter.setPen(QtCore.Qt.gray)
painter.drawText(text_rect.translated(0.1, 0.1), message)
painter.setPen(QtGui.QColor(QtCore.Qt.gray).light(130))
painter.drawText(text_rect.translated(0, 0), message)
def mouseReleaseEvent(self, event):
self.update()
QtGui.QGraphicsItem.mouseReleaseEvent(self, event)
class GuiSwitch(GuiNode):
'''
Interactive Switch
'''
def paint(self, painter, option, widget):
painter.setPen(QtCore.Qt.NoPen)
painter.setBrush(QtGui.QColor(QtCore.Qt.darkGray).light(25))
painter.drawEllipse(-9, -9, 20, 20)
gradient = QtGui.QRadialGradient(-3, -3, 10)
color = QtGui.QColor(QtCore.Qt.green)
if option.state & QtGui.QStyle.State_Sunken:
gradient.setCenter(3, 3)
gradient.setFocalPoint(3, 3)
gradient.setColorAt(1, color.light(100))
gradient.setColorAt(0, color.light(30))
else:
gradient.setColorAt(0, color.light(85))
gradient.setColorAt(1, color.light(25))
painter.setBrush(QtGui.QBrush(gradient))
painter.setPen(QtGui.QPen(QtCore.Qt.black, 0))
painter.drawEllipse(-10, -10, 20, 20)
# Draw dpid
text_rect = self.boundingRect()
message = str(self.id)
font = painter.font()
font.setBold(True)
painter.setFont(font)
painter.setPen(QtCore.Qt.gray)
painter.drawText(text_rect.translated(0.1, 0.1), message)
painter.setPen(QtGui.QColor(QtCore.Qt.gray).light(130))
painter.drawText(text_rect.translated(0, 0), message)
def mouseReleaseEvent(self, event):
'''
Popup menu contains:
Add Link To --
Remove Link To --
Attach Host
Remove Host --
Remove Switch
'''
if event.button() == QtCore.Qt.RightButton:
popup = QtGui.QMenu()
# Hack against python scoping
def create_network_link_lambda(from_dpid, to_dpid):
return lambda: self.graphics_scene.syncer.create_network_link(from_dpid, to_dpid)
def remove_network_link_lambda(from_dpid, to_dpid):
return lambda: self.graphics_scene.syncer.remove_network_link(from_dpid, to_dpid)
def attach_host_lambda(dpid):
return lambda: self.graphics_scene.syncer.attach_host_to_switch(dpid)
def remove_host_lambda(hid):
return lambda: self.graphics_scene.syncer.remove_host(hid)
def remove_switch_lambda(dpid):
return lambda: self.graphics_scene.syncer.remove_switch(dpid)
addLinkMenu = QtGui.QMenu("Add Link To")
removeLinkMenu = QtGui.QMenu("Remove Link To")
removeHostMenu = QtGui.QMenu("Remove Host")
# Find network links to potentially create
dpids_to_add = []
for dpid in self.graphics_scene.syncer.dpid2switch.keys():
switch = self.graphics_scene.syncer.dpid2switch[dpid]
if switch is self:
continue
for link in self.linkList:
if link.dest is switch:
break
else:
dpids_to_add.append(switch.id)
for dpid in sorted(dpids_to_add):
addLinkMenu.addAction(str(dpid), create_network_link_lambda(self.id, dpid))
if len(dpids_to_add) > 0:
popup.addMenu(addLinkMenu)
# Find network links to potentially remove
dpids_to_remove = []
for link in self.linkList:
if isinstance(link.dest, GuiSwitch) and link.dest is not self:
dpids_to_remove.append(link.dest.id)
for dpid in sorted(dpids_to_remove):
removeLinkMenu.addAction(str(dpid), remove_network_link_lambda(self.id, dpid))
if len(dpids_to_remove) > 0:
popup.addMenu(removeLinkMenu)
# Attach a host to the given switch
popup.addAction("Attach Host", attach_host_lambda(self.id))
# Find hosts to potentially remove
hids_to_remove = []
for link in self.linkList:
if isinstance(link.source, GuiHost) and link.dest is self:
hids_to_remove.append(link.source.id)
for hid in hids_to_remove:
removeHostMenu.addAction(str(hid), remove_host_lambda(hid))
if len(hids_to_remove) > 0:
popup.addMenu(removeHostMenu)
# Remove self
popup.addAction("Remove Switch", remove_switch_lambda(self.id))
popup.exec_(event.lastScreenPos())
self.update()
QtGui.QGraphicsItem.mouseReleaseEvent(self, event)
class GuiLink(QtGui.QGraphicsItem):
'''
Interactive Link
'''
def __init__(self, graphics_scene, source_node, dest_node):
QtGui.QGraphicsItem.__init__(self)
self.graphics_scene = graphics_scene
self.arrow_size = 10.0
self.source_point = QtCore.QPointF()
self.dest_point = QtCore.QPointF()
self.setFlag(QtGui.QGraphicsItem.ItemIsMovable)
self.setAcceptedMouseButtons(QtCore.Qt.RightButton)
self.setAcceptHoverEvents(False)
self.source = source_node
self.dest = dest_node
self.draw_arrow = True
self.source.addLink(self)
self.dest.addLink(self)
self.adjust()
def adjust(self):
if not self.source or not self.dest:
return
line = QtCore.QLineF(self.mapFromItem(self.source, 0, 0), \
self.mapFromItem(self.dest, 0, 0))
length = line.length()
if length == 0.0:
return
linkOffset = QtCore.QPointF((line.dx() * 10) / length, (line.dy() * 10) / length)
self.prepareGeometryChange()
self.source_point = line.p1() + linkOffset
self.dest_point = line.p2() - linkOffset
def boundingRect(self):
if not self.source or not self.dest:
return QtCore.QRectF()
penWidth = 1
extra = (penWidth + self.arrow_size) / 2.0
rect = QtCore.QRectF(self.source_point,
QtCore.QSizeF(self.dest_point.x() - self.source_point.x(),
self.dest_point.y() - self.source_point.y())).normalized()
return rect.adjusted(-extra, -extra, extra, extra)
def paint(self, painter, option, widget):
if not self.source or not self.dest:
return
line = QtCore.QLineF(self.source_point, self.dest_point)
if line.length() == 0.0:
return
color = QtCore.Qt.gray
pattern = QtCore.Qt.SolidLine
# Highlight when clicked/held
if option.state & QtGui.QStyle.State_Sunken:
color = QtGui.QColor(color).light(256)
else:
color = QtGui.QColor(color).light(90)
painter.setPen(QtGui.QPen(color, 1,
pattern, QtCore.Qt.RoundCap, QtCore.Qt.RoundJoin))
painter.drawLine(line)
# Draw the arrows if there's enough room.
angle = math.acos(line.dx() / line.length())
if line.dy() >= 0:
angle = 2 * math.pi - angle
dest_arrow_p1 = self.dest_point + \
QtCore.QPointF(math.sin(angle - math.pi / 3) * self.arrow_size,
math.cos(angle - math.pi / 3) * self.arrow_size)
dest_arrow_p2 = self.dest_point + \
QtCore.QPointF(math.sin(angle - math.pi + math.pi / 3) * self.arrow_size,
math.cos(angle - math.pi + math.pi / 3) * self.arrow_size)
if self.draw_arrow:
painter.setBrush(color)
painter.drawPolygon(QtGui.QPolygonF([line.p2(), \
dest_arrow_p1, dest_arrow_p2]))
|
from schafkopf.players.mc_tree import MCTree
from schafkopf.players.mc_node import MCNode
from schafkopf.players.nn_player import NNPlayer
from schafkopf.players.uct_player import UCTPlayer
from schafkopf.tournaments.game_states_trick_play import sample_game_states
def main():
sim_player_list = [NNPlayer(game_mode_nn='../players/models/bigger_classifier200.hdf5',
partner_nn='../players/models/partner_model_wider_data_2.hdf5',
solo_nn='../players/models/solo_model_wider_data_10.hdf5',
wenz_nn='../players/models/wenz_model_wider_data_10.hdf5'),
NNPlayer(game_mode_nn='../players/models/bigger_classifier200.hdf5',
partner_nn='../players/models/partner_model_wider_data_2.hdf5',
solo_nn='../players/models/solo_model_wider_data_10.hdf5',
wenz_nn='../players/models/wenz_model_wider_data_10.hdf5'),
NNPlayer(game_mode_nn='../players/models/bigger_classifier200.hdf5',
partner_nn='../players/models/partner_model_wider_data_2.hdf5',
solo_nn='../players/models/solo_model_wider_data_10.hdf5',
wenz_nn='../players/models/wenz_model_wider_data_10.hdf5'),
NNPlayer(game_mode_nn='../players/models/bigger_classifier200.hdf5',
partner_nn='../players/models/partner_model_wider_data_2.hdf5',
solo_nn='../players/models/solo_model_wider_data_10.hdf5',
wenz_nn='../players/models/wenz_model_wider_data_10.hdf5')]
game_state = sample_game_states[15]
num_simulations = 100
ucb_constant = 0.1
player = UCTPlayer(ucb_const=ucb_constant,
num_samples=1,
num_simulations=num_simulations,
simulation_player_list=None)
root_node = MCNode(game_state=game_state)
mc_tree = MCTree(root_node=root_node)
for sim_num in range(1, player.num_simulations + 1):
selected_node = player.selection(mc_tree)
rewards = player.simulation(selected_node)
mc_tree.backup_rewards(leaf_node=selected_node, rewards=rewards)
mc_tree.visualize_tree(ucb=player.ucb_const,
filename="Tree_{}nodes{}ucb_const{}game_mode".format(num_simulations,
ucb_constant,
game_state["game_mode"]))
if __name__ == "__main__":
main()
|
-- Write a query to find the shortest distance between two points in these points.
-- | x |
-- |-----|
-- | -1 |
-- | 0 |
-- | 2 |
-- The shortest distance is '1' obviously, which is from point '-1' to '0'. So the output is as below:
-- | shortest|
-- |---------|
-- | 1 |
# Write your MySQL query statement below
select min(abs(p1.x-p2.x)) as shortest
from point p1
join point p2
on p1.x!=p2.x
|
"""
analytic_generator.py is a tool for generating logic-tables for random logic-operators
as csv-files for machine learning.
"""
import numpy as np
import itertools
import hashlib
class LogicOperators:
"""LogicOperators.
Attributes
----------
logic operators functions:
1. lg_and
2. lg_or
3. lg_xor
4. lg_not_and
5. lg_not_or
call functions for the logic operators
"""
@staticmethod
def lg_and(x1, x2, dtype=np.int):
"""lg_and.
Parameters
----------
x1 :
x1
x2 :
x2
dtype :
dtype
"""
return np.logical_and(x1, x2, dtype=dtype)
@staticmethod
def lg_or(x1, x2, dtype=np.int):
"""lg_or.
Parameters
----------
x1 :
x1
x2 :
x2
dtype :
dtype
"""
return np.logical_or(x1, x2, dtype=dtype)
@staticmethod
def lg_xor(x1, x2, dtype=np.int):
"""lg_xor.
Parameters
----------
x1 :
x1
x2 :
x2
dtype :
dtype
"""
return np.logical_xor(x1, x2, dtype=dtype)
@staticmethod
def lg_not_and(x1, x2, dtype=np.int):
"""lg_not_and.
Parameters
----------
x1 :
x1
x2 :
x2
dtype :
dtype
"""
return np.logical_not(np.logical_and(x1, x2, dtype=dtype))
@staticmethod
def lg_not_or(x1, x2, dtype=np.int):
"""lg_not_or.
Parameters
----------
x1 :
x1
x2 :
x2
dtype :
dtype
"""
return np.logical_not(np.logical_or(x1, x2, dtype=dtype))
@staticmethod
def get_operator(lgopt, x1, x2):
"""Get the logic operators.
get_operator is calling the static logic functions from above:
1. lg_and
2. lg_or
3. lg_xor
4. lg_not_and
5. lg_not_or
Parameters
----------
lgopt : str
Initial str for calling the logic operators
x1 : int or bool as array-like
x1, first value of the logic operator
x2 : int or bool as array-like
x2, second value of the logic operator
Return
------
: bool
Returns the bool single or bool array depending on the logic operators
"""
if lgopt.lower() == "and":
return LogicOperators.lg_and(x1, x2)
elif lgopt.lower() == "or":
return LogicOperators.lg_or(x1, x2)
elif lgopt.lower() == "xor":
return LogicOperators.lg_xor(x1, x2)
elif lgopt.lower() == "nand":
return LogicOperators.lg_not_and(x1, x2)
elif lgopt.lower() == "nor":
return LogicOperators.lg_not_or(x1, x2)
class LogicGenerator(LogicOperators, object):
"""LogicGenerator.
"""
__lst = [0, 1]
# only input parameters
logic_items = 0
# optional input
fname = None
# internal variables non-privat
logic_variables = 0
logic_init_mat = np.array([])
logic_result = np.array([])
logic_operators = []
# export parameters
logic_res_mat = np.array([])
logic_expression = ""
def __init__(self, logic_items=5, fname=None):
"""__init__.
Parameters
----------
logic_items : int, optional
Total number of logical operators, by default 5.
fname : str, optional
Filename to save as txt, by default None
"""
super().__init__()
self.logic_items = logic_items
self.logic_variables = self.logic_items + 1
self.logic_operations()
self.fname = fname
def logic_operations(self):
"""Generate a random connections of logic opperators."""
self.logic_init_mat = self.logic_matrices()
ref_list = ["and", "or", "xor", "nand", "nor"]
rnd_range = np.random.randint(len(ref_list), size=self.logic_items)
for i in rnd_range:
self.logic_operators.append(ref_list[i])
def logic_matrices(self):
"""Generate a logic matrice with all possible combinations."""
logic_mat = np.asarray(
list(itertools.product(self.__lst, repeat=self.logic_variables))
)
return logic_mat
def generator(self):
"""generator.
"""
self.logic_init_mat = self.logic_matrices()
self.logic_result = self.get_operator(
self.logic_operators[0],
self.logic_init_mat[:, 0],
self.logic_init_mat[:, 1],
)
for i, lopt in enumerate(self.logic_operators[1:]):
self.logic_result = self.get_operator(
lopt, self.logic_result, self.logic_init_mat[:, i + 2]
)
# print(self.logic_result.astype(dtype=bool))
# print(b)
X = self.logic_init_mat
y = self.logic_result.astype(dtype=int).reshape(len(self.logic_result), 1)
self.logic_res_mat = np.concatenate((X, y), axis=1)
self.export()
def export(self):
"""export the random-generated truth-tables as csv.
The header inculdes all x-variables like x_0, x_1, ... and y as truht or false-value.
The footer includes the logical expression like: `or(x_5, or(x_4, ...`. The function
goes always from the inner to the outer ring. As result of the bijektivity, capsuled
expression can be generated by re-wirrting the expression by switching the variables.
Notes
-----
If `self.fname == None` the csv-name is based on the math-expression of the logical
operators exported to an hashlib-value.
"""
header = ""
for i in range(self.logic_variables):
header += "x_{}, ".format(str(i))
header += " y"
# Generate math expression as footer
# Start with the inner-function
exprs = self.logic_operators[0] + "(x_0, x_1)"
# Adding the the rest of the logicial expressions
for i, item in enumerate(self.logic_operators[1:]):
exprs = "{}(x_{}, {})".format(item, str(i + 2), exprs)
exprs += ",-" # Finisher for beeing consitent in the csv-style
# Generate the export-name
if not self.fname:
self.fname = "{}.csv".format(
hashlib.sha256(exprs.encode("utf-8")).hexdigest()
)
# Export
np.savetxt(
self.fname,
self.logic_res_mat,
delimiter=",",
header=header,
footer=exprs,
fmt="%5i",
)
if __name__ == "__main__":
# print(LogicOperators().lg_and([1,0], [1, 1]))
import argparse
parser = argparse.ArgumentParser(description="Generating random truth-tables.")
parser.add_argument(
"n_items", type=int, default=5, help="Number of elements, default 5"
)
args = parser.parse_args()
items = args.n_items
LogicGenerator(logic_items=items).generator()
|
# Copyright (c) 2014-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
# Stdlib imports
from functools import wraps
# Third party imports
# Cloudify imports
from cloudify import ctx
from cloudify import context
# This package imports
from .constants import (
ASYNC_TASK_ID,
DELETE_NODE_ACTION,
)
from .clients import VsphereClient # noqa
from .clients.server import ServerClient
from .clients.network import NetworkClient
from .clients.storage import StorageClient, RawVolumeClient
def remove_runtime_properties():
# cleanup runtime properties
# need to convert generaton to list, python 3
for prop_key in list(ctx.instance.runtime_properties.keys()):
del ctx.instance.runtime_properties[prop_key]
# save flag as current state before external call
ctx.instance.update()
def run_deferred_task(client, instance=None):
instance = instance or ctx.instance
if instance.runtime_properties.get(ASYNC_TASK_ID):
client._wait_for_task(instance=instance)
def _with_client(client_name, client):
def decorator(f):
@wraps(f)
def wrapper(connection_config, *args, **kwargs):
kwargs[client_name] = client(
ctx_logger=ctx.logger).get(config=connection_config)
if not hasattr(f, '__wrapped__'):
# don't pass connection_config to the real operation
kwargs.pop('connection_config', None)
try:
# check unfinished tasks
if ctx.type == context.NODE_INSTANCE:
run_deferred_task(client=kwargs[client_name])
elif ctx.type == context.RELATIONSHIP_INSTANCE:
run_deferred_task(client=kwargs[client_name],
instance=ctx.source.instance)
run_deferred_task(client=kwargs[client_name],
instance=ctx.target.instance)
# run real task
result = f(*args, **kwargs)
# in delete action
current_action = ctx.operation.name
if current_action == DELETE_NODE_ACTION and \
ctx.type == context.NODE_INSTANCE:
# no retry actions
if not ctx.instance.runtime_properties.get(ASYNC_TASK_ID):
ctx.logger.info('Cleanup resource.')
# cleanup runtime
remove_runtime_properties()
# return result
return result
except Exception:
raise
wrapper.__wrapped__ = f
return wrapper
return decorator
with_server_client = _with_client('server_client', ServerClient)
with_network_client = _with_client('network_client', NetworkClient)
with_storage_client = _with_client('storage_client', StorageClient)
with_rawvolume_client = _with_client('rawvolume_client', RawVolumeClient)
|
#
# Copyright 2021 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
"""
Handler module for gathering configuration data.
"""
from .env import ENVIRONMENT
CLOWDER_ENABLED = ENVIRONMENT.bool("CLOWDER_ENABLED", default=False)
if CLOWDER_ENABLED:
from app_common_python import ObjectBuckets, LoadedConfig, KafkaTopics, DependencyEndpoints
class Configurator:
"""Obtain configuration based on mode."""
@staticmethod
def is_clowder_enabled():
"""Check if clowder is enabled."""
return CLOWDER_ENABLED
@staticmethod
def get_in_memory_db_host():
"""Obtain in memory (redis) db host."""
pass
@staticmethod
def get_in_memory_db_port():
"""Obtain in memory (redis) db port."""
pass
@staticmethod
def get_kafka_broker_host():
"""Obtain kafka broker host address."""
pass
@staticmethod
def get_kafka_broker_port():
"""Obtain kafka broker port."""
pass
@staticmethod
def get_kafka_topic(requestedName: str):
"""Obtain kafka topic."""
pass
@staticmethod
def get_cloudwatch_access_id():
"""Obtain cloudwatch access id."""
pass
@staticmethod
def get_cloudwatch_access_key():
"""Obtain cloudwatch access key."""
pass
@staticmethod
def get_cloudwatch_region():
"""Obtain cloudwatch region."""
pass
@staticmethod
def get_cloudwatch_log_group():
"""Obtain cloudwatch log group."""
pass
@staticmethod
def get_object_store_endpoint():
"""Obtain object store endpoint."""
pass
@staticmethod
def get_object_store_host():
"""Obtain object store host."""
pass
@staticmethod
def get_object_store_port():
"""Obtain object store port."""
pass
@staticmethod
def get_object_store_tls():
"""Obtain object store secret key."""
pass
@staticmethod
def get_object_store_access_key(requestedName: str = ""):
"""Obtain object store access key."""
pass
@staticmethod
def get_object_store_secret_key(requestedName: str = ""):
"""Obtain object store secret key."""
pass
@staticmethod
def get_object_store_bucket(requestedName: str = ""):
"""Obtain object store bucket."""
pass
@staticmethod
def get_database_name():
"""Obtain database name."""
pass
@staticmethod
def get_database_user():
"""Obtain database user."""
pass
@staticmethod
def get_database_password():
"""Obtain database password."""
pass
@staticmethod
def get_database_host():
"""Obtain database host."""
pass
@staticmethod
def get_database_port():
"""Obtain database port."""
pass
@staticmethod
def get_database_ca():
"""Obtain database ca."""
pass
@staticmethod
def get_database_ca_file():
"""Obtain database ca file."""
pass
@staticmethod
def get_metrics_port():
"""Obtain metrics port."""
pass
@staticmethod
def get_metrics_path():
"""Obtain metrics path."""
pass
@staticmethod
def get_endpoint_host(app, name, default):
"""Obtain endpoint hostname."""
pass
@staticmethod
def get_endpoint_port(app, name, default):
"""Obtain endpoint port."""
pass
class EnvConfigurator(Configurator):
"""Returns information based on the environment data"""
@staticmethod
def get_in_memory_db_host():
"""Obtain in memory (redis) db host."""
return ENVIRONMENT.get_value("REDIS_HOST", default="redis")
@staticmethod
def get_in_memory_db_port():
"""Obtain in memory (redis) db port."""
return ENVIRONMENT.get_value("REDIS_PORT", default="6379")
@staticmethod
def get_kafka_broker_host():
"""Obtain kafka broker host address."""
return ENVIRONMENT.get_value("INSIGHTS_KAFKA_HOST", default="localhost")
@staticmethod
def get_kafka_broker_port():
"""Obtain kafka broker port."""
return ENVIRONMENT.get_value("INSIGHTS_KAFKA_PORT", default="29092")
@staticmethod
def get_kafka_topic(requestedName: str):
"""Obtain kafka topic."""
return requestedName
@staticmethod
def get_cloudwatch_access_id():
"""Obtain cloudwatch access id."""
return ENVIRONMENT.get_value("CW_AWS_ACCESS_KEY_ID", default=None)
@staticmethod
def get_cloudwatch_access_key():
"""Obtain cloudwatch access key."""
return ENVIRONMENT.get_value("CW_AWS_SECRET_ACCESS_KEY", default=None)
@staticmethod
def get_cloudwatch_region():
"""Obtain cloudwatch region."""
return ENVIRONMENT.get_value("CW_AWS_REGION", default="us-east-1")
@staticmethod
def get_cloudwatch_log_group():
"""Obtain cloudwatch log group."""
return ENVIRONMENT.get_value("CW_LOG_GROUP", default="platform-dev")
@staticmethod
def get_object_store_endpoint():
"""Obtain object store endpoint."""
S3_ENDPOINT = ENVIRONMENT.get_value("S3_ENDPOINT", default="s3.us-east-1.amazonaws.com")
if not (S3_ENDPOINT.startswith("https://") or S3_ENDPOINT.startswith("http://")):
S3_ENDPOINT = "https://" + S3_ENDPOINT
return S3_ENDPOINT
@staticmethod
def get_object_store_host():
"""Obtain object store host."""
# return ENVIRONMENT.get_value("S3_HOST", default=None)
pass
@staticmethod
def get_object_store_port():
"""Obtain object store port."""
# return ENVIRONMENT.get_value("S3_PORT", default=443)
pass
@staticmethod
def get_object_store_tls():
"""Obtain object store secret key."""
# return ENVIRONMENT.bool("S3_SECURE", default=False)
pass
@staticmethod
def get_object_store_access_key(requestedName: str = ""):
"""Obtain object store access key."""
return ENVIRONMENT.get_value("S3_ACCESS_KEY", default=None)
@staticmethod
def get_object_store_secret_key(requestedName: str = ""):
"""Obtain object store secret key."""
return ENVIRONMENT.get_value("S3_SECRET", default=None)
@staticmethod
def get_object_store_bucket(requestedName: str = ""):
"""Obtain object store bucket."""
return ENVIRONMENT.get_value("S3_BUCKET_NAME", default=requestedName)
@staticmethod
def get_database_name():
"""Obtain database name."""
return ENVIRONMENT.get_value("DATABASE_NAME", default="postgres")
@staticmethod
def get_database_user():
"""Obtain database user."""
return ENVIRONMENT.get_value("DATABASE_USER", default="postgres")
@staticmethod
def get_database_password():
"""Obtain database password."""
return ENVIRONMENT.get_value("DATABASE_PASSWORD", default="postgres")
@staticmethod
def get_database_host():
"""Obtain database host."""
SERVICE_NAME = ENVIRONMENT.get_value("DATABASE_SERVICE_NAME", default="").upper().replace("-", "_")
return ENVIRONMENT.get_value(f"{SERVICE_NAME}_SERVICE_HOST", default="localhost")
@staticmethod
def get_database_port():
"""Obtain database port."""
SERVICE_NAME = ENVIRONMENT.get_value("DATABASE_SERVICE_NAME", default="").upper().replace("-", "_")
return ENVIRONMENT.get_value(f"{SERVICE_NAME}_SERVICE_PORT", default="15432")
@staticmethod
def get_database_ca():
"""Obtain database ca."""
return ENVIRONMENT.get_value("DATABASE_SERVICE_CERT", default=None)
@staticmethod
def get_database_ca_file():
"""Obtain database ca file."""
return ENVIRONMENT.get_value("DATABASE_SERVICE_CERTFILE", default="/etc/ssl/certs/server.pem")
@staticmethod
def get_metrics_port():
"""Obtain metrics port."""
return 8080
@staticmethod
def get_metrics_path():
"""Obtain metrics path."""
return "/metrics"
@staticmethod
def get_endpoint_host(app, name, default):
"""Obtain endpoint hostname."""
svc = "_".join((app, name, "HOST")).replace("-", "_").upper()
return ENVIRONMENT.get_value(svc, default=default)
@staticmethod
def get_endpoint_port(app, name, default):
"""Obtain endpoint port."""
svc = "_".join((app, name, "PORT")).replace("-", "_").upper()
return ENVIRONMENT.get_value(svc, default=default)
class ClowderConfigurator(Configurator):
"""Obtain configuration based on using Clowder and app-common."""
@staticmethod
def get_in_memory_db_host():
"""Obtain in memory (redis) db host."""
return LoadedConfig.inMemoryDb.hostname
# TODO: if we drop an elasticache instance or clowder supports more
# than 1 elasticache instance, we can switch to using the inMemoryDb
# return ENVIRONMENT.get_value("REDIS_HOST", default="redis")
@staticmethod
def get_in_memory_db_port():
"""Obtain in memory (redis) db port."""
return LoadedConfig.inMemoryDb.port
# return ENVIRONMENT.get_value("REDIS_PORT", default="6379")
@staticmethod
def get_kafka_broker_host():
"""Obtain kafka broker host address."""
return LoadedConfig.kafka.brokers[0].hostname
@staticmethod
def get_kafka_broker_port():
"""Obtain kafka broker port."""
return LoadedConfig.kafka.brokers[0].port
@staticmethod
def get_kafka_topic(requestedName: str):
"""Obtain kafka topic."""
return KafkaTopics.get(requestedName).name
@staticmethod
def get_cloudwatch_access_id():
"""Obtain cloudwatch access id."""
return LoadedConfig.logging.cloudwatch.accessKeyId
@staticmethod
def get_cloudwatch_access_key():
"""Obtain cloudwatch access key."""
return LoadedConfig.logging.cloudwatch.secretAccessKey
@staticmethod
def get_cloudwatch_region():
"""Obtain cloudwatch region."""
return LoadedConfig.logging.cloudwatch.region
@staticmethod
def get_cloudwatch_log_group():
"""Obtain cloudwatch log group."""
return LoadedConfig.logging.cloudwatch.logGroup
@staticmethod
def get_object_store_endpoint():
"""Obtain object store endpoint."""
S3_SECURE = CONFIGURATOR.get_object_store_tls()
S3_HOST = CONFIGURATOR.get_object_store_host()
S3_PORT = CONFIGURATOR.get_object_store_port()
S3_PREFIX = "https://" if S3_SECURE else "http://"
endpoint = f"{S3_PREFIX}{S3_HOST}"
if bool(S3_PORT):
endpoint += f":{S3_PORT}"
return endpoint
@staticmethod
def get_object_store_host():
"""Obtain object store host."""
return LoadedConfig.objectStore.hostname
@staticmethod
def get_object_store_port():
"""Obtain object store port."""
return LoadedConfig.objectStore.port
@staticmethod
def get_object_store_tls():
"""Obtain object store secret key."""
value = LoadedConfig.objectStore.tls
if type(value) == bool:
return value
if value and value.lower() in ["true", "false"]:
return value.lower() == "true"
else:
return False
@staticmethod
def get_object_store_access_key(requestedName: str = ""):
"""Obtain object store access key."""
if requestedName != "" and ObjectBuckets.get(requestedName):
return ObjectBuckets.get(requestedName).accessKey
if len(LoadedConfig.objectStore.buckets) > 0:
return LoadedConfig.objectStore.buckets[0].accessKey
if LoadedConfig.objectStore.accessKey:
return LoadedConfig.objectStore.accessKey
@staticmethod
def get_object_store_secret_key(requestedName: str = ""):
"""Obtain object store secret key."""
if requestedName != "" and ObjectBuckets.get(requestedName):
return ObjectBuckets.get(requestedName).secretKey
if len(LoadedConfig.objectStore.buckets) > 0:
return LoadedConfig.objectStore.buckets[0].secretKey
if LoadedConfig.objectStore.secretKey:
return LoadedConfig.objectStore.secretKey
@staticmethod
def get_object_store_bucket(requestedName: str = ""):
"""Obtain object store bucket."""
if ObjectBuckets.get(requestedName):
return ObjectBuckets.get(requestedName).name
return requestedName
@staticmethod
def get_database_name():
"""Obtain database name."""
return LoadedConfig.database.name
@staticmethod
def get_database_user():
"""Obtain database user."""
return LoadedConfig.database.username
@staticmethod
def get_database_password():
"""Obtain database password."""
return LoadedConfig.database.password
@staticmethod
def get_database_host():
"""Obtain database host."""
return LoadedConfig.database.hostname
@staticmethod
def get_database_port():
"""Obtain database port."""
return LoadedConfig.database.port
@staticmethod
def get_database_ca():
"""Obtain database ca."""
return LoadedConfig.database.rdsCa
@staticmethod
def get_database_ca_file():
"""Obtain database ca file."""
if LoadedConfig.database.rdsCa:
return LoadedConfig.rds_ca()
return None
@staticmethod
def get_metrics_port():
"""Obtain metrics port."""
return LoadedConfig.metricsPort
@staticmethod
def get_metrics_path():
"""Obtain metrics path."""
return LoadedConfig.metricsPath
@staticmethod
def get_endpoint_host(app, name, default):
"""Obtain endpoint hostname."""
endpoint = DependencyEndpoints.get(app, {}).get(name)
if endpoint:
return endpoint.hostname
# if the endpoint is not defined by clowder, fall back to env variable
svc = "_".join((app, name, "HOST")).replace("-", "_").upper()
return ENVIRONMENT.get_value(svc, default=default)
@staticmethod
def get_endpoint_port(app, name, default):
"""Obtain endpoint port."""
endpoint = DependencyEndpoints.get(app, {}).get(name)
if endpoint:
return endpoint.port
# if the endpoint is not defined by clowder, fall back to env variable
svc = "_".join((app, name, "PORT")).replace("-", "_").upper()
return ENVIRONMENT.get_value(svc, default=default)
class ConfigFactory:
"""Returns configurator based on mode."""
@staticmethod
def get_configurator():
"""Returns configurator based on mode from env variable."""
return ClowderConfigurator if CLOWDER_ENABLED else EnvConfigurator
CONFIGURATOR = ConfigFactory.get_configurator()
|
print('='*5+'10 TERMOS DE UMA PA'+'='*5)
primeiro = int(input('Primeiro termo: '))
razao = int(input('Razão: '))
decimo = primeiro + (10 - 1) * razao
for c in range(primeiro, decimo, razao):
print(f'{c}', end=' ¬ ')
print('Acabou')
|
# -*- coding: utf-8 -*-
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def buildTree(self, preorder, inorder):
if len(preorder) == 0 or len(inorder) == 0 or len(preorder) != len(inorder):
return []
res = self.buildTreeHelper(preorder, inorder, 0, 0, len(preorder) - 1)
return res
def buildTreeHelper(self, preorder, inorder, pre_st, in_st, in_end):
if pre_st > len(preorder) or in_st > in_end:
return None
# first node in preorder is root
root = TreeNode(preorder[pre_st])
i = in_st
# find root in inorder, root is the first element in preorder
while(i <= in_end):
if inorder[i] == preorder[pre_st]:
break
i += 1
# left: pre start is the next element in preorder, i is curr root in inorder so in_end is at the left position of i
root.left = self.buildTreeHelper(preorder, inorder, pre_st + 1, in_st, i - 1)
# right: pre start is curr root (pre_st) + len(left child in inorder) + 1 (画图可见)
root.right = self.buildTreeHelper(preorder, inorder, pre_st + (i - in_st + 1), i + 1, in_end)
return root
# My Accepted Solution
class Solution2(object):
def buildTree(self, preorder, inorder):
if len(preorder) == 0 or len(inorder) == 0:
return None
curr = TreeNode(preorder[0])
index = inorder.index(preorder[0])
curr.left = self.buildTree(preorder[1:len(inorder[0:index]) + 1], inorder[0:index])
curr.right = self.buildTree(preorder[len(inorder[0:index]) + 1:], inorder[index + 1:])
return curr
|
import uuid
from abc import ABC
from django.core.exceptions import ImproperlyConfigured
from django.forms import BaseFormSet, modelformset_factory, HiddenInput
from django.shortcuts import redirect
from formtools.wizard.views import SessionWizardView
from django.utils.translation import gettext_lazy as _
from django.forms.formsets import DELETION_FIELD_NAME
APPEND_FORM_LOOKUP_KEY = "APPEND_FORM"
def get_class( kls ):
parts = kls.split('.')
module = ".".join(parts[:-1])
m = __import__( module )
for comp in parts[1:]:
m = getattr(m, comp)
return m
class MrMapWizard(SessionWizardView, ABC):
template_name = "generic_views/base_extended/wizard.html"
ignore_uncomitted_forms = False
instance_id = None
title = _('Wizard')
id_wizard = "id_" + str(uuid.uuid4())
required_forms = None
action_url = ""
success_url = ""
def get_success_url(self):
if self.success_url:
return self.success_url
else:
raise ImproperlyConfigured(
"No URL to redirect to. Provide a success_url.")
def get_context_data(self, form, **kwargs):
context = super(MrMapWizard, self).get_context_data(form=form, **kwargs)
context.update({'id_modal': self.id_wizard,
'modal_title': self.title,
'action_url': self.action_url,
})
context['wizard'].update({'ignore_uncomitted_forms': self.ignore_uncomitted_forms})
return context
def is_form_update(self):
if 'is_form_update' in self.request.POST:
# it's update of dropdown items or something else
# refresh with updated form
return True
return False
def is_append_formset(self, form):
if issubclass(form.__class__, BaseFormSet):
# formset is posted
append_form_lookup_key = f"{form.prefix}-{APPEND_FORM_LOOKUP_KEY}" if form.prefix else APPEND_FORM_LOOKUP_KEY
if append_form_lookup_key in form.data:
# to prevent data loses, we have to store the current form
self.storage.set_step_data(self.steps.current, self.process_step(form))
self.storage.set_step_files(self.steps.current, self.process_step_files(form))
# append current initial_dict to initial new form also
current_extra = len(form.extra_forms)
new_init_list = []
for i in range(current_extra + 1):
new_init_list.append(self.initial_dict[self.steps.current][0])
self.initial_dict[self.steps.current] = new_init_list
# overwrite new generated forms to form list
self.form_list[self.steps.current] = modelformset_factory(form.form.Meta.model,
can_delete=True,
# be carefully; there could also be other Form
# classes
form=form.forms[0].__class__,
extra=current_extra + 1)
return True
return False
def render_next_step(self, form, **kwargs):
if self.is_append_formset(form=form):
# call form again with get_form(), cause it is updated and the current form instance does not hold updates
return self.render(form=self.get_form(step=self.steps.current))
if self.is_form_update():
return self.render(form=form)
return super().render_next_step(form=form, **kwargs)
def render_done(self, form, **kwargs):
if self.is_append_formset(form=form):
# call form again with get_form(), cause it is updated and the current form instance does not hold updates
return self.render(form=self.get_form(step=self.steps.current))
if self.is_form_update():
return self.render(form=form)
return super().render_done(form=form, **kwargs)
def render_goto_step(self, goto_step, **kwargs):
current_form = self.get_form(data=self.request.POST, files=self.request.FILES)
if self.is_append_formset(form=current_form):
# call form again with get_form(), cause it is updated and the current form instance does not hold updates
return self.render(form=self.get_form(step=self.steps.current))
if self.is_form_update():
return self.render(current_form)
# 1. save current form, we doesn't matter for validation for now.
# If the wizard is done, he will perform validation for each.
self.storage.set_step_data(self.steps.current,
self.process_step(current_form))
self.storage.set_step_files(self.steps.current, self.process_step_files(current_form))
return super().render_goto_step(goto_step=goto_step)
def process_step(self, form):
# we implement custom logic to ignore uncomitted forms,
# but if the uncomitted form is required, then we don't drop it
if self.ignore_uncomitted_forms and 'wizard_save' in self.request.POST:
uncomitted_forms = []
for form_key in self.get_form_list():
form_obj = self.get_form(
step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key)
)
# x.1. self.get_form_list(): get the unbounded forms
if not form_obj.is_bound and form_key != self.steps.current:
uncomitted_forms.append(form_key)
for uncomitted_form in uncomitted_forms:
if uncomitted_form in self.required_forms:
# not all required forms are posted. Render next form.
return self.get_form_step_data(form)
# x.4. if no unbounded form has required fields then remove them from the form_list
temp_form_list = self.form_list
for uncomitted_form in uncomitted_forms:
# only pop non required forms
if self.required_forms is None or uncomitted_form not in self.required_forms:
self.form_list.pop(uncomitted_form)
else:
# there was an uncomitted required form. Go the default way
self.form_list = temp_form_list
return self.get_form_step_data(form)
# set current commited form as last form
self.form_list.move_to_end(self.steps.current)
return self.get_form_step_data(form)
def get_form(self, step=None, data=None, files=None):
form = super().get_form(step=step, data=data, files=files)
if issubclass(form.__class__, BaseFormSet) and form.can_delete:
for _form in form.forms:
_form.fields[DELETION_FIELD_NAME].widget = HiddenInput()
return form
def done(self, form_list, **kwargs):
return redirect(to=self.get_success_url())
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.stylegan2_generator import *
from models.stylegan2_discriminator import *
from models.pggan_generator import *
from models.pggan_discriminator import *
def stylegan2(path, res):
G = StyleGAN2Generator(resolution = res)
D = StyleGAN2Discriminator(resolution = res)
weight = torch.load(path)
G.load_state_dict(weight['generator_smooth'])
D.load_state_dict(weight['discriminator'])
G.eval()
D.eval()
return G, D
def pggan(path, res):
G = PGGANGenerator(resolution = res)
D = PGGANDiscriminator(resolution = res)
weight = torch.load(path)
G.load_state_dict(weight['generator_smooth'])
D.load_state_dict(weight['discriminator'])
G.eval()
D.eval()
return G, D
|
from urllib import quote
import re
"""
motionless is a library that takes the pain out of generating Google Static Map URLs.
For example code and documentation see:
http://github.com/ryancox/motionless
For details about the GoogleStatic Map API see:
http://code.google.com/apis/maps/documentation/staticmaps/
If you encounter problems, log an issue on github. If you have questions, drop me an
email at ryan.a.cox@gmail.com.
"""
"""
Copyright 2010 Ryan A Cox - ryan.a.cox@gmail.com
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = "Ryan Cox <ryan.a.cox@gmail.com>"
__version__ = "1.0"
class Color(object):
COLORS = ['black', 'brown', 'green', 'purple',
'yellow', 'blue', 'gray', 'orange', 'red', 'white']
pat = re.compile("0x[0-9A-Fa-f]{6}|[0-9A-Fa-f]{8}")
@staticmethod
def is_valid_color(color):
return Color.pat.match(color) or color in Color.COLORS
class Marker(object):
SIZES = ['tiny', 'mid', 'small']
LABELS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
def __init__(self, size, color, label):
if size and size not in Marker.SIZES:
raise ValueError(
"[%s] is not a valid marker size. Valid sizes include %s" %
(size, Marker.SIZES))
if label and (len(label) <> 1 or not label in Marker.LABELS):
raise ValueError(
"[%s] is not a valid label. Valid labels are a single character 'A'..'Z' or '0'..'9'" % label)
if color and color not in Color.COLORS:
raise ValueError(
"[%s] is not a valid color. Valid colors include %s" %
(color, Color.COLORS))
self.size = size
self.color = color
self.label = label
class AddressMarker(Marker):
def __init__(self, address, size=None, color=None, label=None):
Marker.__init__(self, size, color, label)
self.address = address
class LatLonMarker(Marker):
def __init__(self, lat, lon, size=None, color=None, label=None):
Marker.__init__(self, size, color, label)
self.latitude = lat
self.longitude = lon
class Map(object):
MAX_URL_LEN = 2048
MAPTYPES = ['roadmap', 'satellite', 'hybrid', 'terrain']
FORMATS = ['png', 'png8', 'png32', 'gif', 'jpg', 'jpg-baseline']
MAX_X = 640
MAX_Y = 640
ZOOM_RANGE = range(1, 21)
def __init__(self, size_x, size_y, maptype, zoom=None):
self.base_url = 'http://maps.google.com/maps/api/staticmap?'
self.size_x = size_x
self.size_y = size_y
self.sensor = False
self.format = 'png'
self.maptype = maptype
self.zoom = zoom
def __str__(self):
return self.generate_url()
def check_parameters(self):
if self.format not in Map.FORMATS:
raise ValueError(
"[%s] is not a valid file format. Valid formats include %s" %
(self.format, Map.FORMATS))
if self.format not in Map.FORMATS:
raise ValueError(
"[%s] is not a valid map type. Valid types include %s" %
(self.maptype, Map.MAPTYPES))
if self.size_x > Map.MAX_X or self.size_x < 1:
raise ValueError(
"[%s] is not a valid x-dimension. Must be between 1 and %s" %
(self.size_x, Map.MAX_X))
if self.size_y > Map.MAX_Y or self.size_y < 1:
raise ValueError(
"[%s] is not a valid y-dimension. Must be between 1 and %s" %
(self.size_x, Map.MAX_Y))
if self.zoom is not None and self.zoom not in Map.ZOOM_RANGE:
raise ValueError(
"[%s] is not a zoom setting. Must be between %s and %s" %
(self.size_x, min(Map.ZOOM_RANGE), max(Map.ZOOM_RANGE)))
def _get_sensor(self):
if self.sensor:
return 'true'
else:
return 'false'
def _check_url(self, url):
if len(url) > Map.MAX_URL_LEN:
raise ValueError(
"Generated URL is %s characters in length. Maximum is %s" %
(len(url), Map.MAX_URL_LEN))
class CenterMap(Map):
def __init__(self, address=None, lat=None, lon=None, zoom=17, size_x=400,
size_y=400, maptype='roadmap'):
Map.__init__(self, size_x=size_x, size_y=size_y, maptype=maptype,
zoom=zoom)
if address:
self.center = quote(address)
elif lat and lon:
self.center = "%s,%s" % (lat, lon)
else:
self.center = "1600 Amphitheatre Parkway Mountain View, CA"
def check_parameters(self):
super(CenterMap, self).check_parameters()
def generate_url(self):
self.check_parameters()
url = "%smaptype=%s&format=%s¢er=%s&zoom=%s&size=%sx%s&sensor=%s" % (
self.base_url,
self.maptype,
self.format,
self.center,
self.zoom,
self.size_x,
self.size_y,
self._get_sensor())
self._check_url(url)
return url
class VisibleMap(Map):
def __init__(self, size_x=400, size_y=400, maptype='roadmap'):
Map.__init__(self, size_x=size_x, size_y=size_y, maptype=maptype)
self.locations = []
def add_address(self, address):
self.locations.append(quote(address))
def add_latlon(self, lat, lon):
self.locations.append("%s,%s" % (quote(lat), quote(lon)))
def generate_url(self):
self.check_parameters()
url = "%smaptype=%s&format=%s&size=%sx%s&sensor=%s&visible=%s" % (
self.base_url,
self.maptype,
self.format,
self.size_x,
self.size_y,
self._get_sensor(),
"|".join(self.locations))
self._check_url(url)
return url
class DecoratedMap(Map):
def __init__(self, lat=None, lon=None, zoom=None, size_x=400, size_y=400,
maptype='roadmap', region=False, fillcolor='green',
pathweight=None, pathcolor=None):
Map.__init__(self, size_x=size_x, size_y=size_y, maptype=maptype,
zoom=zoom)
self.markers = []
self.fillcolor = fillcolor
self.pathweight = pathweight
self.pathcolor = pathcolor
self.region = region
self.path = []
self.contains_addresses = False
if lat and lon:
self.center = "%s,%s" % (lat, lon)
else:
self.center = None
def check_parameters(self):
super(DecoratedMap, self).check_parameters()
if self.region and len(self.path) < 2:
raise ValueError(
"At least two path elements required if region is enabled")
if self.region and self.path[0] <> self.path[-1]:
raise ValueError(
"If region enabled, first and last path entry must be identical")
if len(self.path) == 0 and len(self.markers) == 0:
raise ValueError("Must specify points in path or markers")
if not Color.is_valid_color(self.fillcolor):
raise ValueError(
"%s is not a valid fill color. Must be 24 or 32 bit value or one of %s" %
(self.fillcolor, Color.COLORS))
if self.pathcolor and not Color.is_valid_color(self.pathcolor):
raise ValueError(
"%s is not a valid path color. Must be 24 or 32 bit value or one of %s" %
(self.pathcolor, Color.COLORS))
def _generate_markers(self):
styles = set()
data = {}
ret = []
# build list of unique styles
for marker in self.markers:
styles.add((marker.size, marker.color, marker.label))
# setup styles/location dict
for style in styles:
data[style] = []
# populate styles/location dict
for marker in self.markers:
if isinstance(marker, AddressMarker):
data[(marker.size, marker.color, marker.label)
].append(quote(marker.address))
if isinstance(marker, LatLonMarker):
location = "%s,%s" % (marker.latitude, marker.longitude)
data[(marker.size, marker.color, marker.label)
].append(location)
# build markers entries for URL
for style in data:
locations = data[style]
parts = []
parts.append("markers=")
if style[0]:
parts.append("size:%s" % style[0])
if style[1]:
parts.append("color:%s" % style[1])
if style[2]:
parts.append("label:%s" % style[2])
for location in locations:
parts.append(location)
ret.append("|".join(parts))
return "&".join(ret)
def _can_polyencode(self):
try:
import gpolyencode
except:
return False
return not self.contains_addresses
def _polyencode(self):
import gpolyencode
encoder = gpolyencode.GPolyEncoder()
points = []
for point in self.path:
tokens = point.split(',')
points.append((float(tokens[1]), float(tokens[0])))
return encoder.encode(points)['points']
def add_marker(self, marker):
if not isinstance(marker, Marker):
raise ValueError("Must pass instance of Marker to add_marker")
self.markers.append(marker)
def add_path_address(self, address):
self.contains_addresses = True
self.path.append(quote(address))
def add_path_latlon(self, lat, lon):
self.path.append("%s,%s" % (quote(str(lat)), quote(str(lon))))
def generate_url(self):
self.check_parameters()
url = "%smaptype=%s&format=%s&size=%sx%s&sensor=%s" % (
self.base_url,
self.maptype,
self.format,
self.size_x,
self.size_y,
self._get_sensor())
if self.center:
url = "%s¢er=%s" % (url, self.center)
if self.zoom:
url = "%s&zoom=%s" % (url, self.zoom)
if len(self.markers) > 0:
url = "%s&%s" % (url, self._generate_markers())
if len(self.path) > 0:
url = "%s&path=" % url
if self.pathcolor:
url = "%scolor:%s|" % (url, self.pathcolor)
if self.pathweight:
url = "%sweight:%s|" % (url, self.pathweight)
if self.region:
url = "%sfillcolor:%s|" % (url, self.fillcolor)
if self._can_polyencode():
url = "%senc:%s" % (url, quote(self._polyencode()))
else:
url = "%s%s" % (url, "|".join(self.path))
self._check_url(url)
return url
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time
# 时间格式:YYYY-MM-DD hh:mm:ss
def get_date_time():
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
# 时间格式:YYYY-MM-DD hh:mm:ss.xxx
def get_ms_date_time():
ct = time.time()
local_time = time.localtime(ct)
data_head = time.strftime("%Y-%m-%d %H:%M:%S", local_time)
data_secs = (ct - int(ct)) * 1000
time_stamp = "%s.%03d" % (data_head, data_secs)
return time_stamp
|
import copy
import itertools
import weakref
from mopidy.models.fields import Field
# Registered models for automatic deserialization
_models = {}
class ImmutableObject:
"""
Superclass for immutable objects whose fields can only be modified via the
constructor.
This version of this class has been retained to avoid breaking any clients
relying on it's behavior. Internally in Mopidy we now use
:class:`ValidatedImmutableObject` for type safety and it's much smaller
memory footprint.
:param kwargs: kwargs to set as fields on the object
:type kwargs: any
"""
# Any sub-classes that don't set slots won't be effected by the base using
# slots as they will still get an instance dict.
__slots__ = ["__weakref__"]
def __init__(self, *args, **kwargs):
for key, value in kwargs.items():
if not self._is_valid_field(key):
raise TypeError(
f"__init__() got an unexpected keyword argument {key!r}"
)
self._set_field(key, value)
def __setattr__(self, name, value):
if name.startswith("_"):
object.__setattr__(self, name, value)
else:
raise AttributeError("Object is immutable.")
def __delattr__(self, name):
if name.startswith("_"):
object.__delattr__(self, name)
else:
raise AttributeError("Object is immutable.")
def _is_valid_field(self, name):
return hasattr(self, name) and not callable(getattr(self, name))
def _set_field(self, name, value):
if value == getattr(self.__class__, name):
self.__dict__.pop(name, None)
else:
self.__dict__[name] = value
def _items(self):
return self.__dict__.items()
def __repr__(self):
kwarg_pairs = []
for key, value in sorted(self._items()):
if isinstance(value, (frozenset, tuple)):
if not value:
continue
value = list(value)
kwarg_pairs.append(f"{key}={value!r}")
return f"{self.__class__.__name__}({', '.join(kwarg_pairs)})"
def __hash__(self):
hash_sum = 0
for key, value in self._items():
hash_sum += hash(key) + hash(value)
return hash_sum
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return all(
a == b
for a, b in itertools.zip_longest(
self._items(), other._items(), fillvalue=object()
)
)
def __ne__(self, other):
return not self.__eq__(other)
def replace(self, **kwargs):
"""
Replace the fields in the model and return a new instance
Examples::
# Returns a track with a new name
Track(name='foo').replace(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).replace(num_tracks=5)
:param kwargs: kwargs to set as fields on the object
:type kwargs: any
:rtype: instance of the model with replaced fields
"""
other = copy.copy(self)
for key, value in kwargs.items():
if not self._is_valid_field(key):
raise TypeError(
f"replace() got an unexpected keyword argument {key!r}"
)
other._set_field(key, value)
return other
def serialize(self):
data = {}
data["__model__"] = self.__class__.__name__
for key, value in self._items():
if isinstance(value, (set, frozenset, list, tuple)):
value = [
v.serialize() if isinstance(v, ImmutableObject) else v
for v in value
]
elif isinstance(value, ImmutableObject):
value = value.serialize()
if not (isinstance(value, list) and len(value) == 0):
data[key] = value
return data
class _ValidatedImmutableObjectMeta(type):
"""Helper that initializes fields, slots and memoizes instance creation."""
def __new__(cls, name, bases, attrs):
fields = {}
for base in bases: # Copy parent fields over to our state
fields.update(getattr(base, "_fields", {}))
for key, value in attrs.items(): # Add our own fields
if isinstance(value, Field):
fields[key] = "_" + key
value._name = key
attrs["_fields"] = fields
attrs["_instances"] = weakref.WeakValueDictionary()
attrs["__slots__"] = list(attrs.get("__slots__", [])) + list(
fields.values()
)
clsc = super().__new__(cls, name, bases, attrs)
if clsc.__name__ != "ValidatedImmutableObject":
_models[clsc.__name__] = clsc
return clsc
def __call__(cls, *args, **kwargs): # noqa: N805
instance = super().__call__(*args, **kwargs)
return cls._instances.setdefault(weakref.ref(instance), instance)
class ValidatedImmutableObject(
ImmutableObject, metaclass=_ValidatedImmutableObjectMeta
):
"""
Superclass for immutable objects whose fields can only be modified via the
constructor. Fields should be :class:`Field` instances to ensure type
safety in our models.
Note that since these models can not be changed, we heavily memoize them
to save memory. So constructing a class with the same arguments twice will
give you the same instance twice.
"""
__slots__ = ["_hash"]
def __hash__(self):
if not hasattr(self, "_hash"):
hash_sum = super().__hash__()
object.__setattr__(self, "_hash", hash_sum)
return self._hash
def _is_valid_field(self, name):
return name in self._fields
def _set_field(self, name, value):
object.__setattr__(self, name, value)
def _items(self):
for field, key in self._fields.items():
if hasattr(self, key):
yield field, getattr(self, key)
def replace(self, **kwargs):
"""
Replace the fields in the model and return a new instance
Examples::
# Returns a track with a new name
Track(name='foo').replace(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).replace(num_tracks=5)
Note that internally we memoize heavily to keep memory usage down given
our overly repetitive data structures. So you might get an existing
instance if it contains the same values.
:param kwargs: kwargs to set as fields on the object
:type kwargs: any
:rtype: instance of the model with replaced fields
"""
if not kwargs:
return self
other = super().replace(**kwargs)
if hasattr(self, "_hash"):
object.__delattr__(other, "_hash")
return self._instances.setdefault(weakref.ref(other), other)
|
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# type: ignore
from functools import reduce
from io import StringIO
from random import sample
from unittest import TestCase
from unittest.mock import call, patch
from opentelemetry.instrumentation import bootstrap
def sample_packages(packages, rate):
sampled = sample(list(packages), int(len(packages) * rate),)
return {k: v for k, v in packages.items() if k in sampled}
class TestBootstrap(TestCase):
installed_libraries = {}
installed_instrumentations = {}
@classmethod
def setUpClass(cls):
# select random 60% of instrumentations
cls.installed_libraries = sample_packages(
bootstrap.instrumentations, 0.6
)
# treat 50% of sampled packages as pre-installed
cls.installed_instrumentations = sample_packages(
cls.installed_libraries, 0.5
)
cls.pkg_patcher = patch(
"opentelemetry.instrumentation.bootstrap._find_installed_libraries",
return_value=cls.installed_libraries,
)
pip_freeze_output = []
for inst in cls.installed_instrumentations.values():
inst = inst.replace(">=", "==")
if "==" not in inst:
inst = "{}==x.y".format(inst)
pip_freeze_output.append(inst)
cls.pip_freeze_patcher = patch(
"opentelemetry.instrumentation.bootstrap._sys_pip_freeze",
return_value="\n".join(pip_freeze_output),
)
cls.pip_install_patcher = patch(
"opentelemetry.instrumentation.bootstrap._sys_pip_install",
)
cls.pip_uninstall_patcher = patch(
"opentelemetry.instrumentation.bootstrap._sys_pip_uninstall",
)
cls.pip_check_patcher = patch(
"opentelemetry.instrumentation.bootstrap._pip_check",
)
cls.pkg_patcher.start()
cls.mock_pip_freeze = cls.pip_freeze_patcher.start()
cls.mock_pip_install = cls.pip_install_patcher.start()
cls.mock_pip_uninstall = cls.pip_uninstall_patcher.start()
cls.mock_pip_check = cls.pip_check_patcher.start()
@classmethod
def tearDownClass(cls):
cls.pip_check_patcher.start()
cls.pip_uninstall_patcher.start()
cls.pip_install_patcher.start()
cls.pip_freeze_patcher.start()
cls.pkg_patcher.stop()
@patch("sys.argv", ["bootstrap", "-a", "pipenv"])
def test_run_unknown_cmd(self):
with self.assertRaises(SystemExit):
bootstrap.run()
@patch("sys.argv", ["bootstrap", "-a", "requirements"])
def test_run_cmd_print(self):
with patch("sys.stdout", new=StringIO()) as fake_out:
bootstrap.run()
self.assertEqual(
fake_out.getvalue(),
"\n".join(self.installed_libraries.values()),
)
@patch("sys.argv", ["bootstrap", "-a", "install"])
def test_run_cmd_install(self):
bootstrap.run()
self.assertEqual(
self.mock_pip_freeze.call_count, len(self.installed_libraries)
)
to_uninstall = reduce(
lambda x, y: x + y,
[
pkgs
for lib, pkgs in bootstrap.libraries.items()
if lib in self.installed_instrumentations
],
)
self.mock_pip_uninstall.assert_has_calls(
[call(i) for i in to_uninstall], any_order=True
)
self.mock_pip_install.assert_has_calls(
[call(i) for i in self.installed_libraries.values()],
any_order=True,
)
self.assertEqual(self.mock_pip_check.call_count, 1)
|
#!/usr/bin/env python
"""
Copyright 2018
author: <scott.wales@unimelb.edu.au>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import xarray
import matplotlib.pyplot as plt
import sys
d = xarray.open_dataset(sys.argv[1], chunks={'time': 1, 'st_ocean': 1})
temp = d.isel(time=-1, st_ocean=0).temp
#d.isel(time=0, st_ocean=0).temp.plot.imshow()
temp.plot.imshow()
#plt.imshow(temp.values)
plt.show()
|
'''
@File : main.py
@Time : 2020/08/28 11:01:52
@Author : Haowei
@Version : 1.0
@Contact : blackmhw@gmail.com
@Desc :
intput:样本shp目录 (shp文件必须包含class类别,EPSG:3857);
多光谱的影像数据 (EPSG:3857)
output:在输出目录中生成包含label和对应区域的image影像
'''
import argparse
import os.path as osp
import glob
import sys
# sys.path.append('./')
# import module.utils as geotool
import geotools as geotool
def collect_files(dir,suffix):
files = []
filen = osp.join(dir, '*.' + suffix)
filenames = glob.glob(filen)
for filename in filenames:
assert filename.endswith(suffix), filename
files.append(filename)
assert len(files), f'No images found in {dir}'
print(f'Loaded {len(files)} images from {dir}')
return files
def parse_args():
parser = argparse.ArgumentParser(
description='Convert shp to semantic segmentation datasets')
parser.add_argument('--shpdir', default=r'D:\gaofen_caigangfang\Sample', help='shp data path')
parser.add_argument('--image', default=r'D:\gaofen_caigangfang\Land_GF2.tif' ,help='raster data path' )
parser.add_argument('--output', default=r'D:\gaofen_caigangfang\shpimg', help='output path')
parser.add_argument('--classtype', default='fasdfs', help='support to byte')
parser.add_argument('--datatype', default='byte', help='support to byte')
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
# # shp to geoTiff
shpfiles = collect_files(args.shpdir,'shp')
for shpfile in shpfiles:
# support muilt-class (还需要修改)
geotool.shp2img(shpfile,args.image,args.output,args.classtype)
# # data type convert byte
# if args.datatype == 'byte':
# geotool.img2byte(args.image,args.output)
# # img clip img (employ gdal_translate is not resample,therefore crs is consistent)
# for shpfile in shpfiles:
# shpname = osp.basename(shpfile).split('.')[0]+'_'+ args.classtype
# outtif = osp.join(args.output,shpname + '.tif')
# geotool.rasterClipraster(outtif,args.image,args.output)
# "所有函数需要明确输入输出,保持标准化"
# outtif = r'D:\EarthLearning\preprocessing\datasets\landcover\landcover1_byte.tif'
# geotool.rasterClipraster(outtif,args.image,args.output)
intimg = r'D:\EarthLearning\preprocessing\datasets\landcover\tile\mosaic_3857.tif'
targetimg = r'D:\EarthLearning\preprocessing\datasets\landcover\tile\landcover3_byte.tif'
outimg = r'D:\EarthLearning\preprocessing\datasets\landcover\tile\label30\landcover3_img10.tif'
geotools.reSamplebyimg(intimg,targetimg,outimg)
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
import requests
import requests_mock
from pynso.connection import NSOConnection, _format_url
class TestConnection(unittest.TestCase):
test_error = {"errors": {"error": [{"error-message": "test"}]}}
def setUp(self):
self.connection = NSOConnection("test.com", "test", "test", False)
self.adapter = requests_mock.Adapter()
self.connection.session.mount("http://test.com/", self.adapter)
def test_url_format(self):
self.assertEqual(
_format_url(
host="test.com",
root="restconf",
data_store="data",
path="foo/bar",
),
"https://test.com/restconf/data/foo/bar",
)
def test_url_format_http(self):
self.assertEqual(
_format_url(
host="test.com",
root="restconf",
data_store="data",
path="foo/bar",
ssl=False,
),
"http://test.com/restconf/data/foo/bar",
)
def test_url_format_http_no_path(self):
self.assertEqual(
_format_url(
host="test.com",
root="restconf",
data_store="data",
path=None,
ssl=False,
),
"http://test.com/restconf/data",
)
def test_url_format_base(self):
self.assertEqual(
_format_url(host="test.com", root="restconf", data_store=None, path=None, ssl=True),
"https://test.com/restconf",
)
def test_url_format_path_(self):
self.assertEqual(
_format_url(host="test.com", root="restconf", data_store=None, path="foo/bar", ssl=True),
"https://test.com/restconf/foo/bar",
)
def test_get(self):
test = {"a": "b"}
self.adapter.register_uri("GET", "/restconf/data", json=test, status_code=200)
response = self.connection.get(
"data",
)
self.assertEqual(test["a"], response["a"])
def test_get_error(self):
self.adapter.register_uri("GET", "/restconf/data", json=self.test_error, status_code=404)
with self.assertRaises(requests.HTTPError):
self.connection.get(
"data",
)
def test_get_error_plain(self):
self.adapter.register_uri("GET", "/restconf/data", text="should be json", status_code=200)
with self.assertRaises(json.decoder.JSONDecodeError):
self.connection.get(
"data",
)
def test_head(self):
self.adapter.register_uri("HEAD", "/restconf/data", status_code=200)
response = self.connection.head(
"data",
)
self.assertIsNone(response)
def test_head_error(self):
self.adapter.register_uri("HEAD", "/restconf/data", json=self.test_error, status_code=500)
with self.assertRaises(requests.HTTPError):
self.connection.head(
"data",
)
def test_put(self):
test = {"a": "b"}
self.adapter.register_uri("PUT", "/restconf/data", json=test, status_code=200)
response = self.connection.put("data", data={"test": "data"})
self.assertEqual(test, response)
def test_put_created(self):
test = {"a": "b"}
self.adapter.register_uri("PUT", "/restconf/data", json=test, status_code=201)
response = self.connection.put("data", data={"test": "data"})
self.assertIsNone(response)
def test_put_no_response(self):
self.adapter.register_uri("PUT", "/restconf/data", status_code=204)
response = self.connection.put("data", data={"test": "data"})
self.assertIsNone(response)
def test_put_error(self):
self.adapter.register_uri("PUT", "/restconf/data", json=self.test_error, status_code=404)
with self.assertRaises(requests.HTTPError):
self.connection.put("data", data={"test": "data"})
def test_patch(self):
self.adapter.register_uri("PATCH", "/restconf/data", status_code=204)
response = self.connection.patch("data", data={"test": "data"})
self.assertIsNone(response)
def test_patch_error(self):
self.adapter.register_uri("PATCH", "/restconf/data", json=self.test_error, status_code=404)
with self.assertRaises(requests.HTTPError):
self.connection.patch("data", data={"test": "data"})
def test_delete(self):
self.adapter.register_uri("DELETE", "/restconf/data", status_code=204)
response = self.connection.delete("data", data={"test": "data"})
self.assertIsNone(response)
def test_delete_error(self):
self.adapter.register_uri("DELETE", "/restconf/data", json=self.test_error, status_code=404)
with self.assertRaises(requests.HTTPError):
self.connection.delete("data", data={"test": "data"})
def test_post(self):
test = {"a": "b"}
self.adapter.register_uri("POST", "/restconf/data", json=test, status_code=200)
response = self.connection.post("data", data={"test": "data"})
self.assertEqual(test, response)
def test_post_no_response(self):
self.adapter.register_uri("POST", "/restconf/data", status_code=204)
response = self.connection.post("data", data={"test": "data"})
self.assertIsNone(response)
def test_post_error(self):
self.adapter.register_uri("POST", "/restconf/data", json=self.test_error, status_code=404)
with self.assertRaises(requests.HTTPError):
self.connection.post("data", data={"test": "data"})
def test_options(self):
self.adapter.register_uri("OPTIONS", "/restconf/data", status_code=200, headers={"allow": "GET,PUT"})
response = self.connection.options("data", data={"test": "data"})
self.assertEqual(response, ["GET", "PUT"])
def test_options_error(self):
self.adapter.register_uri("OPTIONS", "/restconf/data", json=self.test_error, status_code=404)
with self.assertRaises(requests.HTTPError):
self.connection.options("data", data={"test": "data"})
if __name__ == "__main__":
import sys
sys.exit(unittest.main())
|
from .c_line_search import CLineSearch
from .c_line_search_bisect import CLineSearchBisect
|
from general_utils import path_checker
import configparser
class ConfigWriter:
"""Class for writing and updating config files.
Arguments:
config_fp {str} -- Path of config file to write/append to
Keyword Arguments:
config_details {dict} -- kwarg keywords will become sections; keys and values from
arguments (dictionaries) will populate the sections.
"""
def __init__(self,
config_fp: str,
**config_details):
# self.check_config_details(config_details)
self.config_fp = config_fp
self.config_details = config_details
@staticmethod
def check_config_details(config_details):
"""Method which checks the format of config details passed.
Arguments:
config_details {dict} -- dict of dicts containing config details
Returns:
is_valid {bool} -- Indication of whether the config details are in a valid format.
"""
# TODO: need to validate the dict of dict type
raise NotImplementedError ('This method has not been implemented yet.')
def write_config(self):
"""Method which writes or appends to the specified config_fp.
Arguments:
None
Returns:
out_string {string} -- Success message indicating save type and location
"""
exists = path_checker(path=self.config_fp,
check_type='file',
to_raise = False)
out_phrase = 'appended' if exists else 'written'
config = configparser.ConfigParser()
for key, value in self.config_details.items():
config[key] = value
with open(self.config_fp, 'a') as configfile:
config.write(configfile)
out_string = "Config file has been {} to '{}'".format(out_phrase,
self.config_fp)
return out_string
class ConfigReader:
"""Class for reading config files.
Arguments:
config_fp {str} -- Path of config file to read
"""
def __init__(self,
config_fp: str):
self.exists = path_checker(path=config_fp,
check_type='file',
to_raise = True)
self.config = configparser.ConfigParser()
self.config.read(config_fp)
def get_all(self) -> dict:
"""Returns all sections and options in the specified config file.
Returns:
all_sections {dict} -- All sections and options in the config file
e.g. {section1: {options1},
section2: {options2}}
"""
all_sections = {section: dict(self.config[section])
for section in self.config.sections()}
return all_sections
@staticmethod
def validate_section(section_dict: dict,
validation_options: set) -> bool:
"""Validates if all options expected are present in a section.
Arguments:
section_dict {dict} -- Dict of config options to validate
validation_options {set} -- Names of options to be present in the section_dict
Returns:
is_valid {bool} -- Whether all expected fields are present in the section_dict
"""
is_valid = set(section_dict.keys()) == validation_options
if is_valid:
return is_valid
else:
raise ValueError ("Config file is not valid. `section_dict` fields do not match\
`validation_options` specified.")
def get_section(self,
section_name: str,
validate: bool = False,
validation_options: set = None) -> dict:
"""Returns a section of the specified config file.
Arguments:
section_name {str} -- Name of section to return
validate {bool} -- Whether or not to validate the options
in the section (default: {False})
validation_options {set} -- Names of options to be present in the section_dict
Returns:
section_dict {dict} -- Dictionary of config options from the specified section
"""
section_dict = dict(self.config[section_name])
if not validate:
return section_dict
if not validation_options:
raise ValueError ("'{}' is an invalid input. Please specify a valid set of \
`expected_fields` for validation.".format(validation_options))
is_valid = self.validate_section(section_dict,
validation_options=validation_options)
if is_valid:
return section_dict
def get_option(self,
section_name: str,
option_name: str) -> str:
"""Returns an option from a section of the specified config file.
Arguments:
section_name {str} -- Name of section to return
validate {bool} -- Whether or not to validate the options
in the section (default: {False})
validation_options {set} -- Names of options to be present in the section_dict
Returns:
section_dict {dict} -- Dictionary of config options from the specified section
"""
return self.config.get(section_name, option_name)
|
# built-in
from math import isclose
# external
import hypothesis
import pytest
# project
import textdistance
ALGS = (
textdistance.bag,
textdistance.hamming,
textdistance.levenshtein,
textdistance.damerau_levenshtein,
textdistance.jaro,
textdistance.jaro_winkler,
textdistance.mlipns,
textdistance.lcsseq,
textdistance.lcsstr,
textdistance.ratcliff_obershelp,
textdistance.jaccard,
textdistance.sorensen,
textdistance.tversky,
textdistance.overlap,
textdistance.cosine,
textdistance.strcmp95,
textdistance.monge_elkan,
textdistance.mra,
textdistance.prefix,
textdistance.postfix,
textdistance.identity,
# textdistance.length,
# numpy-based:
# textdistance.gotoh,
textdistance.needleman_wunsch,
textdistance.smith_waterman,
textdistance.editex,
)
@pytest.mark.parametrize('alg', ALGS)
@hypothesis.given(
left=hypothesis.strategies.text(),
right=hypothesis.strategies.text(),
)
def test_normalization_range(left, right, alg):
assert 0 <= alg.normalized_distance(left, right) <= 1
assert 0 <= alg.normalized_similarity(left, right) <= 1
@pytest.mark.parametrize('alg', ALGS)
@hypothesis.given(
left=hypothesis.strategies.text(),
right=hypothesis.strategies.text(),
)
def test_normalization_by_one(left, right, alg):
d = alg.normalized_distance(left, right)
s = alg.normalized_similarity(left, right)
assert isclose(s + d, 1)
@pytest.mark.parametrize('alg', ALGS)
@hypothesis.given(text=hypothesis.strategies.text())
def test_normalization_same(text, alg):
assert alg.normalized_distance(text, text) == 0
if alg is not textdistance.needleman_wunsch:
assert alg.distance(text, text) == 0
assert alg.normalized_similarity(text, text) == 1
@pytest.mark.parametrize('alg', ALGS)
@hypothesis.settings(deadline=None)
@hypothesis.given(
left=hypothesis.strategies.text(min_size=1),
right=hypothesis.strategies.text(min_size=1),
)
def test_normalization_monotonic(left, right, alg):
nd = alg.normalized_distance(left, right)
ns = alg.normalized_similarity(left, right)
d = alg.distance(left, right)
s = alg.similarity(left, right)
assert (nd < ns) == (d < s)
@pytest.mark.parametrize('alg', ALGS)
def test_no_common_chars(alg):
if alg is textdistance.editex:
return
assert alg.similarity('spam', 'qwer') == 0
@pytest.mark.parametrize('alg', ALGS)
def test_empty(alg):
assert alg.distance('', '') == 0
@pytest.mark.parametrize('alg', ALGS)
def test_unequal_distance(alg):
if alg.maximum('', 'qwertyui'):
assert alg.distance('', 'qwertyui') > 0
|
from pydantic import BaseModel, validator
class IRIncident(BaseModel):
title: str
description: str
location: int
user: int
@validator("title")
def title_length(cls, value):
if not len(value) >= 10:
raise ValueError("title/minimum-10-characters")
return value
@validator("description")
def description_length(cls, value):
if not len(value) >= 20:
raise ValueError("description/minimum-20-characters")
return value
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Different data analysis steps."""
from abc import abstractmethod
from typing import Any, Dict, Optional, Tuple
import numpy as np
from qiskit_experiments.data_processing.data_action import DataAction
from qiskit_experiments.data_processing.exceptions import DataProcessorError
class IQPart(DataAction):
"""Abstract class for IQ data post-processing."""
def __init__(self, scale: Optional[float] = None, validate: bool = True):
"""
Args:
scale: Float with which to multiply the IQ data.
validate: If set to False the DataAction will not validate its input.
"""
self.scale = scale
super().__init__(validate)
@abstractmethod
def _process(self, datum: np.array) -> np.array:
"""Defines how the IQ point will be processed.
Args:
datum: A 3D array of shots, qubits, and a complex IQ point as [real, imaginary].
Returns:
Processed IQ point.
"""
def _format_data(self, datum: Any) -> Any:
"""Check that the IQ data has the correct format and convert to numpy array.
Args:
datum: A single item of data which corresponds to single-shot IQ data. It should
have dimension three: shots, qubits, iq-point as [real, imaginary].
Returns:
datum as a numpy array.
Raises:
DataProcessorError: If the datum does not have the correct format.
"""
datum = np.asarray(datum, dtype=float)
if self._validate and len(datum.shape) != 3:
raise DataProcessorError(
f"Single-shot data given {self.__class__.__name__}"
f"must be a 3D array. Instead, a {len(datum.shape)}D "
f"array was given."
)
return datum
def __repr__(self):
"""String representation of the node."""
return f"{self.__class__.__name__}(validate: {self._validate}, scale: {self.scale})"
class ToReal(IQPart):
"""IQ data post-processing. Isolate the real part of the IQ data."""
def _process(self, datum: np.array) -> np.array:
"""Take the real part of the IQ data.
Args:
datum: A 3D array of shots, qubits, and a complex IQ point as [real, imaginary].
Returns:
A 2D array of shots, qubits. Each entry is the real part of the given IQ data.
"""
if self.scale is None:
return datum[:, :, 0]
return datum[:, :, 0] * self.scale
class ToImag(IQPart):
"""IQ data post-processing. Isolate the imaginary part of the IQ data."""
def _process(self, datum: np.array) -> np.array:
"""Take the imaginary part of the IQ data.
Args:
datum: A 3D array of shots, qubits, and a complex IQ point as [real, imaginary].
Returns:
A 2D array of shots, qubits. Each entry is the imaginary part of the given IQ data.
"""
if self.scale is None:
return datum[:, :, 1]
return datum[:, :, 1] * self.scale
class Probability(DataAction):
"""Count data post processing. This returns the probabilities of the outcome string
used to initialize an instance of Probability."""
def __init__(self, outcome: str, validate: bool = True):
"""Initialize a counts to probability data conversion.
Args:
outcome: The bitstring for which to compute the probability.
validate: If set to False the DataAction will not validate its input.
"""
self._outcome = outcome
super().__init__(validate)
def _format_data(self, datum: dict) -> dict:
"""
Checks that the given data has a counts format.
Args:
datum: An instance of data the should be a dict with bit strings as keys
and counts as values.
validate: If True the DataAction checks that the format of the datum is valid.
Returns:
The datum as given.
Raises:
DataProcessorError: if the data is not a counts dict.
"""
if self._validate:
if not isinstance(datum, dict):
raise DataProcessorError(
f"Given counts datum {datum} to "
f"{self.__class__.__name__} is not a valid count format."
)
for bit_str, count in datum.items():
if not isinstance(bit_str, str):
raise DataProcessorError(
f"Key {bit_str} is not a valid count key in{self.__class__.__name__}."
)
if not isinstance(count, (int, float)):
raise DataProcessorError(
f"Count {bit_str} is not a valid count value in {self.__class__.__name__}."
)
return datum
def _process(self, datum: Dict[str, Any]) -> Tuple[float, float]:
"""
Args:
datum: The data dictionary,taking the data under counts and
adding the corresponding probabilities.
Returns:
processed data: A dict with the populations.
"""
shots = sum(datum.values())
p_mean = datum.get(self._outcome, 0.0) / shots
p_var = p_mean * (1 - p_mean) / shots
return p_mean, p_var
|
def summarize(filename):
f = open(filename)
file_content = f.readlines()
total_reads = 0
for each_line in file_content:
mapped, unmapped = map(int, each_line.split()[2:])
total_reads += mapped + unmapped
return total_reads
for i in range(1, 25):
if i <10:
file_id = "000" + str(i)
else:
file_id = "00" + str(i)
filename_normal = "F://BRCA" + file_id + ".normal.idxstats"
filename_tumor = "F://BRCA" + file_id + ".tumor.idxstats"
print file_id, "N:", summarize(filename_normal)
print file_id, "T:", summarize(filename_tumor)
|
################################################################################
# Authors: Brian Schott (Sir Alaran)
# Copyright: Brian Schott (Sir Alaran)
# Date: Oct 20 2009
# License:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
import os
import logging
import json
import mapio
import worldio
import backgroundio
def write(fileName, tileMap, world, background):
if tileMap is not None:
mw = mapio.MapWriter(tileMap)
md = mw.writed()
else:
md = None
if world is not None:
ww = worldio.WorldWriter(world)
wd = ww.writed()
else:
wd = None
if background is not None:
bw = backgroundio.BackgroundWriter(background)
bd = bw.writed()
else:
bd = None
dictionary = {"background": bd, "tileMap": md, "blazeWorld": wd}
try:
f = open(fileName, "w")
except IOError as e:
log.error(e)
else:
json.dump(dictionary, f, indent=4)
f.close()
def read(fileName):
"""
@type fileName: str
@param fileName: the path of the file to load from
@rtype: (background, BlazeWorld, TileMap)
@return: the background (or None), the blaze world (or none), and the tile
map (or none)
This function will raise a LoadError on failure to signal the caller that
something went wrong.
"""
background = None
world = None
tilemap = None
try:
f = open(fileName, "r")
except IOError as e:
err = LoadError(str(e))
raise err
try:
dictionary = json.load(f)
except Exception as e:
err = LoadError(str(e))
raise err
if "background" in dictionary and dictionary["background"] is not None:
bgReader = backgroundio.BackgroundReader()
background = bgReader.readd(dictionary["background"])
if "tileMap" in dictionary and dictionary["tileMap"] is not None:
mapReader = mapio.MapReader()
try:
tilemap = mapReader.readd(dictionary["tileMap"])
except mapio.MapLoadException as e:
err = LoadError(str(e))
raise err
else:
raise LoadError("No tileMap specified in level")
if "blazeWorld" in dictionary and dictionary["blazeWorld"] is not None:
blazeReader = worldio.WorldReader()
world = blazeReader.readd(dictionary["blazeWorld"])
return background, world, tilemap
class LoadError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return self.msg
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Defines Transform whose expansion is implemented elsewhere.
"""
from __future__ import absolute_import
from __future__ import print_function
import contextlib
import copy
import threading
from apache_beam import pvalue
from apache_beam.portability import common_urns
from apache_beam.portability.api import beam_expansion_api_pb2
from apache_beam.portability.api import beam_runner_api_pb2
from apache_beam.runners import pipeline_context
from apache_beam.transforms import ptransform
# Protect against environments where grpc is not available.
# pylint: disable=wrong-import-order, wrong-import-position, ungrouped-imports
try:
import grpc
from apache_beam.portability.api import beam_expansion_api_pb2_grpc
except ImportError:
grpc = None
# pylint: enable=wrong-import-order, wrong-import-position, ungrouped-imports
class ExternalTransform(ptransform.PTransform):
_namespace_counter = 0
_namespace = threading.local()
_namespace.value = 'external'
_EXPANDED_TRANSFORM_UNIQUE_NAME = 'root'
_IMPULSE_PREFIX = 'impulse'
def __init__(self, urn, payload, endpoint):
if grpc is None and isinstance(endpoint, str):
raise NotImplementedError('Grpc required for external transforms.')
# TODO: Start an endpoint given an environment?
self._urn = urn
self._payload = payload
self._endpoint = endpoint
self._namespace = self._fresh_namespace()
def default_label(self):
return '%s(%s)' % (self.__class__.__name__, self._urn)
@classmethod
@contextlib.contextmanager
def outer_namespace(cls, namespace):
prev = cls._namespace.value
cls._namespace.value = namespace
yield
cls._namespace.value = prev
@classmethod
def _fresh_namespace(cls):
ExternalTransform._namespace_counter += 1
return '%s_%d' % (cls._namespace.value, cls._namespace_counter)
def expand(self, pvalueish):
if isinstance(pvalueish, pvalue.PBegin):
self._inputs = {}
elif isinstance(pvalueish, (list, tuple)):
self._inputs = {str(ix): pvalue for ix, pvalue in enumerate(pvalueish)}
elif isinstance(pvalueish, dict):
self._inputs = pvalueish
else:
self._inputs = {'input': pvalueish}
pipeline = (
next(iter(self._inputs.values())).pipeline
if self._inputs
else pvalueish.pipeline)
context = pipeline_context.PipelineContext()
transform_proto = beam_runner_api_pb2.PTransform(
unique_name=self._EXPANDED_TRANSFORM_UNIQUE_NAME,
spec=beam_runner_api_pb2.FunctionSpec(
urn=self._urn, payload=self._payload))
for tag, pcoll in self._inputs.items():
transform_proto.inputs[tag] = context.pcollections.get_id(pcoll)
# Conversion to/from proto assumes producers.
# TODO: Possibly loosen this.
context.transforms.put_proto(
'%s_%s' % (self._IMPULSE_PREFIX, tag),
beam_runner_api_pb2.PTransform(
unique_name='%s_%s' % (self._IMPULSE_PREFIX, tag),
spec=beam_runner_api_pb2.FunctionSpec(
urn=common_urns.primitives.IMPULSE.urn),
outputs={'out': transform_proto.inputs[tag]}))
components = context.to_runner_api()
request = beam_expansion_api_pb2.ExpansionRequest(
components=components,
namespace=self._namespace,
transform=transform_proto)
if isinstance(self._endpoint, str):
with grpc.insecure_channel(self._endpoint) as channel:
response = beam_expansion_api_pb2_grpc.ExpansionServiceStub(
channel).Expand(request)
else:
response = self._endpoint.Expand(request)
if response.error:
raise RuntimeError(response.error)
self._expanded_components = response.components
self._expanded_transform = response.transform
result_context = pipeline_context.PipelineContext(response.components)
def fix_output(pcoll, tag):
pcoll.pipeline = pipeline
pcoll.tag = tag
return pcoll
self._outputs = {
tag: fix_output(result_context.pcollections.get_by_id(pcoll_id), tag)
for tag, pcoll_id in self._expanded_transform.outputs.items()
}
return self._output_to_pvalueish(self._outputs)
def _output_to_pvalueish(self, output_dict):
if len(output_dict) == 1:
return next(iter(output_dict.values()))
else:
return output_dict
def to_runner_api_transform(self, context, full_label):
pcoll_renames = {}
renamed_tag_seen = False
for tag, pcoll in self._inputs.items():
if tag not in self._expanded_transform.inputs:
if renamed_tag_seen:
raise RuntimeError(
'Ambiguity due to non-preserved tags: %s vs %s' % (
sorted(self._expanded_transform.inputs.keys()),
sorted(self._inputs.keys())))
else:
renamed_tag_seen = True
tag, = self._expanded_transform.inputs.keys()
pcoll_renames[self._expanded_transform.inputs[tag]] = (
context.pcollections.get_id(pcoll))
for tag, pcoll in self._outputs.items():
pcoll_renames[self._expanded_transform.outputs[tag]] = (
context.pcollections.get_id(pcoll))
def _equivalent(coder1, coder2):
return coder1 == coder2 or _normalize(coder1) == _normalize(coder2)
def _normalize(coder_proto):
normalized = copy.copy(coder_proto)
normalized.spec.environment_id = ''
# TODO(robertwb): Normalize components as well.
return normalized
for id, proto in self._expanded_components.coders.items():
if id.startswith(self._namespace):
context.coders.put_proto(id, proto)
elif id in context.coders:
if not _equivalent(context.coders._id_to_proto[id], proto):
raise RuntimeError('Re-used coder id: %s\n%s\n%s' % (
id, context.coders._id_to_proto[id], proto))
else:
context.coders.put_proto(id, proto)
for id, proto in self._expanded_components.windowing_strategies.items():
if id.startswith(self._namespace):
context.windowing_strategies.put_proto(id, proto)
for id, proto in self._expanded_components.environments.items():
if id.startswith(self._namespace):
context.environments.put_proto(id, proto)
for id, proto in self._expanded_components.pcollections.items():
if id not in pcoll_renames:
context.pcollections.put_proto(id, proto)
for id, proto in self._expanded_components.transforms.items():
if id.startswith(self._IMPULSE_PREFIX):
# Our fake inputs.
continue
assert id.startswith(self._namespace), (id, self._namespace)
new_proto = beam_runner_api_pb2.PTransform(
unique_name=full_label + proto.unique_name[
len(self._EXPANDED_TRANSFORM_UNIQUE_NAME):],
spec=proto.spec,
subtransforms=proto.subtransforms,
inputs={tag: pcoll_renames.get(pcoll, pcoll)
for tag, pcoll in proto.inputs.items()},
outputs={tag: pcoll_renames.get(pcoll, pcoll)
for tag, pcoll in proto.outputs.items()})
context.transforms.put_proto(id, new_proto)
return beam_runner_api_pb2.PTransform(
unique_name=full_label,
spec=self._expanded_transform.spec,
subtransforms=self._expanded_transform.subtransforms,
inputs=self._expanded_transform.inputs,
outputs={
tag: pcoll_renames.get(pcoll, pcoll)
for tag, pcoll in self._expanded_transform.outputs.items()})
def memoize(func):
cache = {}
def wrapper(*args):
if args not in cache:
cache[args] = func(*args)
return cache[args]
return wrapper
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 1999-2017 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
from ..analyzer import BaseAnalyzer
from ...expr.arithmetic import *
from ...expr.math import *
from ...expr.datetimes import *
from ...expr.strings import *
from ...expr.strings import Count as StrCount
from ...expr.element import *
from ...expr.reduction import *
from ...expr.collections import *
from ...expr.merge import *
from ...utils import output
from ..errors import CompileError
from ..utils import refresh_dynamic
from ... import types
from .... import compat
from ....utils import to_text
class Analyzer(BaseAnalyzer):
def _parents(self, expr):
return self._dag.successors(expr)
def visit_element_op(self, expr):
if isinstance(expr, Between):
if expr.inclusive:
sub = ((expr.left <= expr.input) & (expr.input.copy() <= expr.right))
else:
sub = ((expr.left < expr.input) & (expr.input.copy() < expr.right))
self._sub(expr, sub.rename(expr.name))
elif isinstance(expr, Cut):
sub = self._get_cut_sub_expr(expr)
self._sub(expr, sub)
else:
raise NotImplementedError
def visit_sample(self, expr):
if expr._parts is None:
raise CompileError('ODPS SQL only support sampling by specifying `parts` arg')
idxes = [None, ] if expr._i is None else expr._i
condition = None
for idx in idxes:
inputs = [expr._parts]
if idx is not None:
new_val = idx.value + 1
inputs.append(Scalar(_value=new_val, _value_type=idx.value_type))
if expr._sampled_fields:
inputs.extend(expr._sampled_fields)
cond = MappedExpr(_inputs=inputs, _func='SAMPLE', _data_type=types.boolean)
if condition is None:
condition = cond
else:
condition |= cond
sub = FilterCollectionExpr(_input=expr.input, _predicate=condition,
_schema=expr.schema)
expr.input.optimize_banned = True
self._sub(expr, sub)
def _visit_pivot(self, expr):
sub = self._get_pivot_sub_expr(expr)
self._sub(expr, sub)
def _visit_pivot_table(self, expr):
sub = self._get_pivot_table_sub_expr(expr)
self._sub(expr, sub)
def visit_pivot(self, expr):
if isinstance(expr, PivotCollectionExpr):
self._visit_pivot(expr)
else:
self._visit_pivot_table(expr)
def visit_extract_kv(self, expr):
kv_delimiter = expr._kv_delimiter.value
item_delimiter = expr._item_delimiter.value
default = expr._default.value if expr._default else None
class KeyAgg(object):
def buffer(self):
return set()
def __call__(self, buf, val):
if not val:
return
def validate_kv(v):
parts = v.split(kv_delimiter)
if len(parts) != 2:
raise ValueError('Malformed KV pair: %s' % v)
return parts[0]
buf.update([validate_kv(item) for item in val.split(item_delimiter)])
def merge(self, buf, pbuffer):
buf.update(pbuffer)
def getvalue(self, buf):
return item_delimiter.join(sorted(buf))
columns_expr = expr.input.exclude(expr._intact).apply(KeyAgg, names=[c.name for c in expr._columns])
intact_names = [g.name for g in expr._intact]
intact_types = [g.dtype for g in expr._intact]
exprs = [expr]
def callback(result, new_expr):
expr = exprs[0]
names = list(intact_names)
tps = list(intact_types)
kv_slot_map = dict()
for col, key_str in compat.izip(result.columns, result[0]):
kv_slot_map[col.name] = dict()
for k in key_str.split(item_delimiter):
names.append('%s_%s' % (col.name, k))
tps.append(expr._column_type)
kv_slot_map[col.name][k] = len(names) - 1
kv_slot_names = list(kv_slot_map.keys())
type_adapter = None
if isinstance(expr._column_type, types.Float):
type_adapter = float
elif isinstance(expr._column_type, types.Integer):
type_adapter = int
@output(names, tps)
def mapper(row):
ret = [default, ] * len(names)
ret[:len(intact_names)] = [getattr(row, col) for col in intact_names]
for col in kv_slot_names:
kv_val = getattr(row, col)
if not kv_val:
continue
for kv_item in kv_val.split(item_delimiter):
k, v = kv_item.split(kv_delimiter)
if type_adapter:
v = type_adapter(v)
ret[kv_slot_map[col][k]] = v
return tuple(ret)
new_expr._schema = Schema.from_lists(names, tps)
extracted = expr.input.map_reduce(mapper)
self._sub(new_expr, extracted)
# trigger refresh of dynamic operations
refresh_dynamic(extracted, self._dag)
sub = CollectionExpr(_schema=DynamicSchema.from_lists(intact_names, intact_types),
_deps=[(columns_expr, callback)])
self._sub(expr, sub)
def visit_value_counts(self, expr):
self._sub(expr, self._get_value_counts_sub_expr(expr))
def _gen_mapped_expr(self, expr, inputs, func, name,
args=None, kwargs=None, multiple=False):
kwargs = dict(_inputs=inputs, _func=func, _name=name,
_func_args=args, _func_kwargs=kwargs,
_multiple=multiple)
if isinstance(expr, SequenceExpr):
kwargs['_data_type'] = expr.dtype
else:
kwargs['_value_type'] = expr.dtype
return MappedExpr(**kwargs)
def visit_binary_op(self, expr):
if not options.df.analyze:
raise NotImplementedError
if isinstance(expr, FloorDivide):
func = lambda l, r: l // r
# multiple False will pass *args instead of namedtuple
sub = self._gen_mapped_expr(expr, (expr.lhs, expr.rhs),
func, expr.name, multiple=False)
self._sub(expr, sub)
return
if isinstance(expr, Mod):
func = lambda l, r: l % r
sub = self._gen_mapped_expr(expr, (expr.lhs, expr.rhs),
func, expr.name, multiple=False)
self._sub(expr, sub)
return
if isinstance(expr, Add) and \
all(child.dtype == types.datetime for child in (expr.lhs, expr.rhs)):
return
elif isinstance(expr, (Add, Substract)):
if expr.lhs.dtype == types.datetime and expr.rhs.dtype == types.datetime:
pass
elif any(isinstance(child, MilliSecondScalar) for child in (expr.lhs, expr.rhs)):
pass
else:
return
if sys.version_info[:2] <= (2, 6):
def total_seconds(self):
return self.days * 86400.0 + self.seconds + self.microseconds * 1.0e-6
else:
from datetime import timedelta
def total_seconds(self):
return self.total_seconds()
def func(l, r, method):
from datetime import datetime, timedelta
if not isinstance(l, datetime):
l = timedelta(milliseconds=l)
if not isinstance(r, datetime):
r = timedelta(milliseconds=r)
if method == '+':
res = l + r
else:
res = l - r
if isinstance(res, timedelta):
return int(total_seconds(res) * 1000)
return res
inputs = expr.lhs, expr.rhs, Scalar('+') if isinstance(expr, Add) else Scalar('-')
sub = self._gen_mapped_expr(expr, inputs, func, expr.name, multiple=False)
self._sub(expr, sub)
raise NotImplementedError
def visit_unary_op(self, expr):
if not options.df.analyze:
raise NotImplementedError
if isinstance(expr, Invert) and isinstance(expr.input.dtype, types.Integer):
sub = expr.input.map(lambda x: ~x)
self._sub(expr, sub)
return
raise NotImplementedError
def visit_math(self, expr):
if not options.df.analyze:
raise NotImplementedError
if expr.dtype != types.decimal:
if isinstance(expr, Arccosh):
def func(x):
import numpy as np
return float(np.arccosh(x))
elif isinstance(expr, Arcsinh):
def func(x):
import numpy as np
return float(np.arcsinh(x))
elif isinstance(expr, Arctanh):
def func(x):
import numpy as np
return float(np.arctanh(x))
elif isinstance(expr, Radians):
def func(x):
import numpy as np
return float(np.radians(x))
elif isinstance(expr, Degrees):
def func(x):
import numpy as np
return float(np.degrees(x))
else:
raise NotImplementedError
sub = expr.input.map(func, expr.dtype)
self._sub(expr, sub)
return
raise NotImplementedError
def visit_datetime_op(self, expr):
if isinstance(expr, Strftime):
if not options.df.analyze:
raise NotImplementedError
date_format = expr.date_format
def func(x):
return x.strftime(date_format)
sub = expr.input.map(func, expr.dtype)
self._sub(expr, sub)
return
raise NotImplementedError
def visit_string_op(self, expr):
if isinstance(expr, Ljust):
rest = expr.width - expr.input.len()
sub = expr.input + \
(rest >= 0).ifelse(expr._fillchar.repeat(rest), '')
self._sub(expr, sub.rename(expr.name))
return
elif isinstance(expr, Rjust):
rest = expr.width - expr.input.len()
sub = (rest >= 0).ifelse(expr._fillchar.repeat(rest), '') + expr.input
self._sub(expr, sub.rename(expr.name))
return
elif isinstance(expr, Zfill):
fillchar = Scalar('0')
rest = expr.width - expr.input.len()
sub = (rest >= 0).ifelse(fillchar.repeat(rest), '') + expr.input
self._sub(expr, sub.rename(expr.name))
return
elif isinstance(expr, CatStr):
input = expr.input
others = expr._others if isinstance(expr._others, Iterable) else (expr._others, )
for other in others:
if expr.na_rep is not None:
for e in (input, ) + tuple(others):
self._sub(e, e.fillna(expr.na_rep), parents=(expr, ))
return
else:
if expr._sep is not None:
input = other.isnull().ifelse(input, input + expr._sep + other)
else:
input = other.isnull().ifelse(input, input + other)
self._sub(expr, input.rename(expr.name))
return
if not options.df.analyze:
raise NotImplementedError
func = None
if isinstance(expr, Contains) and expr.regex:
def func(x, pat, case, flags):
if x is None:
return False
flgs = 0
if not case:
flgs = re.I
if flags > 0:
flgs = flgs | flags
r = re.compile(pat, flgs)
return r.search(x) is not None
pat = expr._pat if not isinstance(expr._pat, StringScalar) or expr._pat._value is None \
else Scalar(re.escape(to_text(expr.pat)))
inputs = expr.input, pat, expr._case, expr._flags
sub = self._gen_mapped_expr(expr, inputs, func,
expr.name, multiple=False)
self._sub(expr, sub)
return
elif isinstance(expr, StrCount):
def func(x, pat, flags):
regex = re.compile(pat, flags=flags)
return len(regex.findall(x))
pat = expr._pat if not isinstance(expr._pat, StringScalar) or expr._pat._value is None \
else Scalar(re.escape(to_text(expr.pat)))
inputs = expr.input, pat, expr._flags
sub = self._gen_mapped_expr(expr, inputs, func,
expr.name, multiple=False)
self._sub(expr, sub)
return
elif isinstance(expr, Find) and expr.end is not None:
start = expr.start
end = expr.end
substr = expr.sub
def func(x):
return x.find(substr, start, end)
elif isinstance(expr, RFind):
start = expr.start
end = expr.end
substr = expr.sub
def func(x):
return x.rfind(substr, start, end)
elif isinstance(expr, Extract):
def func(x, pat, flags, group):
regex = re.compile(pat, flags=flags)
m = regex.search(x)
if m:
if group is None:
return m.group()
return m.group(group)
pat = expr._pat if not isinstance(expr._pat, StringScalar) or expr._pat._value is None \
else Scalar(re.escape(to_text(expr.pat)))
inputs = expr.input, pat, expr._flags, expr._group
sub = self._gen_mapped_expr(expr, inputs, func,
expr.name, multiple=False)
self._sub(expr, sub)
return
elif isinstance(expr, Replace):
use_regex = [expr.regex]
def func(x, pat, repl, n, case, flags):
use_re = use_regex[0] and (not case or len(pat) > 1 or flags)
if use_re:
if not case:
flags |= re.IGNORECASE
regex = re.compile(pat, flags=flags)
n = n if n >= 0 else 0
return regex.sub(repl, x, count=n)
else:
return x.replace(pat, repl, n)
pat = expr._pat if not isinstance(expr._pat, StringScalar) or expr._value is None \
else Scalar(re.escape(to_text(expr.pat)))
inputs = expr.input, pat, expr._repl, expr._n, \
expr._case, expr._flags
sub = self._gen_mapped_expr(expr, inputs, func,
expr.name, multiple=False)
self._sub(expr, sub)
return
elif isinstance(expr, (Lstrip, Strip, Rstrip)) and expr.to_strip != ' ':
to_strip = expr.to_strip
if isinstance(expr, Lstrip):
def func(x):
return x.lstrip(to_strip)
elif isinstance(expr, Strip):
def func(x):
return x.strip(to_strip)
elif isinstance(expr, Rstrip):
def func(x):
return x.rstrip(to_strip)
elif isinstance(expr, Pad):
side = expr.side
fillchar = expr.fillchar
width = expr.width
if side == 'left':
func = lambda x: x.rjust(width, fillchar)
elif side == 'right':
func = lambda x: x.ljust(width, fillchar)
elif side == 'both':
func = lambda x: x.center(width, fillchar)
else:
raise NotImplementedError
elif isinstance(expr, Slice):
start, end, step = expr.start, expr.end, expr.step
if end is None and step is None:
raise NotImplementedError
if isinstance(start, six.integer_types) and \
isinstance(end, six.integer_types) and step is None:
if start >= 0 and end >= 0:
raise NotImplementedError
has_start = start is not None
has_end = end is not None
has_step = step is not None
def func(x, *args):
idx = 0
s, e, t = None, None, None
for i in range(3):
if i == 0 and has_start:
s = args[idx]
idx += 1
if i == 1 and has_end:
e = args[idx]
idx += 1
if i == 2 and has_step:
t = args[idx]
idx += 1
return x[s: e: t]
inputs = expr.input, expr._start, expr._end, expr._step
sub = self._gen_mapped_expr(expr, tuple(i for i in inputs if i is not None),
func, expr.name, multiple=False)
self._sub(expr, sub)
return
elif isinstance(expr, Swapcase):
func = lambda x: x.swapcase()
elif isinstance(expr, Title):
func = lambda x: x.title()
elif isinstance(expr, Strptime):
date_format = expr.date_format
def func(x):
from datetime import datetime
return datetime.strptime(x, date_format)
else:
if isinstance(expr, Isalnum):
func = lambda x: x.isalnum()
elif isinstance(expr, Isalpha):
func = lambda x: x.isalpha()
elif isinstance(expr, Isdigit):
func = lambda x: x.isdigit()
elif isinstance(expr, Isspace):
func = lambda x: x.isspace()
elif isinstance(expr, Islower):
func = lambda x: x.islower()
elif isinstance(expr, Isupper):
func = lambda x: x.isupper()
elif isinstance(expr, Istitle):
func = lambda x: x.istitle()
elif isinstance(expr, (Isnumeric, Isdecimal)):
def u_safe(s):
try:
return unicode(s, "unicode_escape")
except:
return s
if isinstance(expr, Isnumeric):
func = lambda x: u_safe(x).isnumeric()
else:
func = lambda x: u_safe(x).isdecimal()
if func is not None:
sub = expr.input.map(func, expr.dtype)
self._sub(expr, sub)
return
raise NotImplementedError
def visit_reduction(self, expr):
if isinstance(expr, (Var, GroupedVar)):
std = expr.input.std(ddof=expr._ddof)
if isinstance(expr, GroupedVar):
std = std.to_grouped_reduction(expr._grouped)
sub = (std ** 2).rename(expr.name)
self._sub(expr, sub)
return
elif isinstance(expr, (Moment, GroupedMoment)):
order = expr._order
center = expr._center
sub = self._get_moment_sub_expr(expr, expr.input, order, center)
sub = sub.rename(expr.name)
self._sub(expr, sub)
return
elif isinstance(expr, (Skewness, GroupedSkewness)):
std = expr.input.std(ddof=1)
if isinstance(expr, GroupedSequenceReduction):
std = std.to_grouped_reduction(expr._grouped)
cnt = expr.input.count()
if isinstance(expr, GroupedSequenceReduction):
cnt = cnt.to_grouped_reduction(expr._grouped)
sub = self._get_moment_sub_expr(expr, expr.input, 3, True) / (std ** 3)
sub *= (cnt ** 2) / (cnt - 1) / (cnt - 2)
sub = sub.rename(expr.name)
self._sub(expr, sub)
elif isinstance(expr, (Kurtosis, GroupedKurtosis)):
std = expr.input.std(ddof=0)
if isinstance(expr, GroupedSequenceReduction):
std = std.to_grouped_reduction(expr._grouped)
cnt = expr.input.count()
if isinstance(expr, GroupedSequenceReduction):
cnt = cnt.to_grouped_reduction(expr._grouped)
m4 = self._get_moment_sub_expr(expr, expr.input, 4, True)
sub = 1.0 / (cnt - 2) / (cnt - 3) * ((cnt * cnt - 1) * m4 / (std ** 4) - 3 * (cnt - 1) ** 2)
sub = sub.rename(expr.name)
self._sub(expr, sub)
raise NotImplementedError
|
"""
Module: 'tpcalib' on esp32_LoBo
MCU: (sysname='esp32_LoBo', nodename='esp32_LoBo', release='3.2.24', version='ESP32_LoBo_v3.2.24 on 2018-09-06', machine='ESP32 board with ESP32')
Stubber: 1.0.0 - updated
"""
from typing import Any
class Calibrate:
""""""
def calibError(self, *args) -> Any:
pass
def calibrate(self, *args) -> Any:
pass
def drawCrossHair(self, *args) -> Any:
pass
def readCoordinates(self, *args) -> Any:
pass
def tpcalib(self, *args) -> Any:
pass
display = None
machine = None
math = None
utime = None
|
#!/usr/bin/env python3
# Copyright 2009-2017 BHG http://bw.org/
# An instance of a class is called an object. It's creating by calling the
# class itself as if it were a function.
# There's a special class method name called init, with double underscores
# before and after, so those are two underscore characters. One and two,
# both before and after the word init. And that's a special name for a class
# function which operates as an initializer, or a constructor.
class Animal:
def __init__(self, type, name, sound): # The first argument is always self
# the others are object variables because they're never initialized
# until after the object is defined, so they don't exist in the class
# without having been constructed into an object.
self._type = type
self._name = name
self._sound = sound
# And you notice that
# the object variables all have an underscore at the beginning of the
# name. Again, this is traditional, and this discourages users of the
# object from accessing these variables directly.
# Instead, you have these accessors, or getters, I call them getters,
# some people call them accessors.
def type(self):
return self._type
def name(self):
return self._name
def sound(self):
return self._sound
def print_animal(o):
if not isinstance(o, Animal):
raise TypeError('print_animal(): requires an Animal')
print('The {} is named "{}" and says "{}".'.format(o.type(), o.name(), o.sound()))
# So the object is created by using the class name as if it were a function
# name. And this calls the constructor
def main():
a0 = Animal('kitten', 'fluffy', 'rwar')
a1 = Animal('duck', 'donald', 'quack')
print_animal(a0)
print_animal(a1)
print_animal(Animal('velociraptor', 'veronica', 'hello'))
if __name__ == '__main__': main()
|
import psycopg2
import math
from subprocess import call, Popen
import pandas as pd
import numpy as np
from mpmath import mp
import matplotlib.pyplot as plt
import matplotlib
from geopy import Point
from geopy.distance import distance, VincentyDistance
from sqlalchemy import create_engine
import itertools as it
df = pd.read_csv('katrina_test_3.dat.gz', compression = 'gzip', sep=',', error_bad_lines=False, header=None, skiprows = 5)
number_of_profile = 17
#define constants
# f = 2*(7.292*10**-5)*math.sin(np.deg2rad(lat_float))
rho = 1.15
e = 2.7182
w = 0
zeta = 0
n = .85
X1 = 288.5
X2 = 75
A = 0.1
pn = 101200
Vtrans = 5
# print x.loc[:, (df != 0)]
forecast_time = df.iloc[:,5:6]
date = df.iloc[:,2:3]
pc = df.fillna(0).iloc[:,9:10]
Vmax = df.fillna(0).iloc[:,8:9]
Rmax = df.fillna(0).iloc[:,19:20]
roci = df.fillna(0).iloc[:,18:19]
lat = df.fillna(0).iloc[:,6:7].replace(to_replace='N', value='',
regex=True).replace(to_replace='S', value='', regex=True)
lon = df.fillna(0).iloc[:,7:8].replace(to_replace='W', value='',
regex=True).replace(to_replace='E', value='', regex=True)
transition = df.fillna(0).iloc[:,26:27]
forecast_hr_slice = []
datetime_slice = []
pc_slice = []
vmax_slice = []
rmax_slice = []
roci_slice = []
roci_slice_miles = []
lat_slice = []
lon_slice = []
b_slice = []
transition_slice = []
for length in range(1,len(df), 1):
datetime_slice.append((np.float64(date.values[length].item())))
for length in range(1,len(df), 1):
forecast_hr_slice.append((np.float64(forecast_time.values[length].item())))
for length in range(1,len(df), 1):
pc_slice.append((np.float64(pc.values[length]).item()) * 100)
for length in range(1,len(df), 1):
vmax_slice.append((np.float64(Vmax.values[length]).item()) * .514)
for length in range(1,len(df), 1):
rmax_slice.append((np.float64(Rmax.values[length]).item()) * 1.852)
for length in range(1,len(df), 1):
roci_slice.append((np.float64(roci.values[length]).item()) * 1.852)
for length in range(1,len(df), 1):
roci_slice_miles.append((np.float64(roci.values[length]).item()) * 1.15)
for length in range(1, len(df), 1):
lat_slice.append((np.float64(lat.values[length]).item()) / 10)
#longitude for the united states
# #if else statement for west vs east and north vs south
for length in range(1,len(df), 1):
lon_slice.append((-1 * (np.float64(lon.values[length]).item()) / 10))
for length in range(1,len(df),1):
transition_slice.append((transition.values[length].item()))
# A = int(Rmax_float)**int(B)
hurricane_df = pd.DataFrame({'forecast_hr' : forecast_hr_slice,'datetime': datetime_slice, 'rmax': rmax_slice, 'vmax': vmax_slice, 'roci': roci_slice, 'roci_miles': roci_slice_miles, 'lat': lat_slice, 'lon' : lon_slice, 'pc' :pc_slice, 'vtrans' : transition_slice})
hurricane_df_clean = hurricane_df[hurricane_df.rmax != 0]
datetime_clean = []
rmax_clean = []
vmax_clean = []
roci_clean = []
roci_clean_miles = []
lat_clean = []
lon_clean = []
roci_clean = []
b_clean = []
vtrans_clean = []
for length in range(1,len(hurricane_df_clean),4):
datetime_clean.append(int(hurricane_df_clean['datetime'].values[length].item()))
for length in range(1,len(hurricane_df_clean),4):
rmax_clean.append(hurricane_df_clean['rmax'].values[length].item())
for length in range(1,len(hurricane_df_clean),4):
vmax_clean.append(hurricane_df_clean['vmax'].values[length].item())
for length in range(1,len(hurricane_df_clean),4):
roci_clean.append(hurricane_df_clean['roci'].values[length].item())
for length in range(1,len(hurricane_df_clean),4):
roci_clean_miles.append(hurricane_df_clean['roci_miles'].values[length].item())
for length in range(1,len(hurricane_df_clean),4):
lat_clean.append(hurricane_df_clean['lat'].values[length].item())
for length in range(1,len(hurricane_df_clean),4):
lon_clean.append(hurricane_df_clean['lon'].values[length].item())
for length in range(1,len(hurricane_df_clean),4):
vtrans_clean.append(int(hurricane_df_clean['vtrans'].values[length]) *.514)
for length in range(1,len(hurricane_df_clean),4):
b_clean.append(((((Vmax.values[length]).item())**2)*rho*e)/(pn - (np.float64(hurricane_df_clean['pc'].values[length]).item())))
windspeed = []
roci_new = []
lat_new = []
lon_new = []
bearing_angle = []
distance_from = []
latitude_halfi = []
longitude_halfi = []
latitude_halfii = []
longitude_halfii = []
motion_vector = []
datetime_new = []
motion_vector_halfi= []
motion_vector_halfii = []
def windprofile(vmax, rmax, b, roci_, datetime):
for r in xrange(1, int(roci_)):
windspeed.append(float(vmax*((((rmax * .001)/(r * .001)**1.5)*mp.exp(1-(((rmax * .001)/(r * .001)**1.5)))))))
roci_new.append(r)
datetime_new.append(str(datetime))
def windvector_halfi(roci_, vtrans):
for r in xrange(1,len(roci_)):
for angle in range(0, 180):
motion_vector_halfi.append(float(windspeed[r]+(float((vtrans*abs(math.sin(np.deg2rad(angle))))))))
def windvector_halfii(roci_, vtrans):
for r in xrange(1,len(roci_)):
for angle in range(180, 361):
motion_vector_halfii.append(float((windspeed[r] - float((2*vtrans*abs(math.sin(np.deg2rad(angle))))))))
def hurricane_halfi(roci_, lat, lon):
for r in xrange(1,len(roci_)):
for bearing in range(0, 180):
bearing_angle.append(bearing)
distance_from.append(r)
destination = VincentyDistance(kilometers=r).destination(Point(lat, lon), bearing)
lat2,lon2 = destination.latitude, destination.longitude
latitude_halfi.append(lat2)
longitude_halfi.append(lon2)
def hurricane_halfii(roci_, lat, lon):
for r in xrange(1,len(roci_)):
for bearing in range(180, 361):
bearing_angle.append(bearing)
distance_from.append(r)
destination = VincentyDistance(kilometers=r).destination(Point(lat, lon), bearing)
lat2,lon2 = destination.latitude, destination.longitude
latitude_halfii.append(lat2)
longitude_halfii.append(lon2)
windprofile(vmax_clean[number_of_profile], rmax_clean[number_of_profile], b_clean[number_of_profile],
roci_clean[number_of_profile], datetime_clean[number_of_profile])
windvector_halfi(roci_new, vtrans_clean[number_of_profile])
hurricane_halfi(roci_new, lat_clean[number_of_profile], lon_clean[number_of_profile])
windvector_halfii(roci_new, vtrans_clean[number_of_profile])
hurricane_halfii(roci_new, lat_clean[number_of_profile], lon_clean[number_of_profile])
halfi_df = pd.DataFrame({'lat' : latitude_halfi, 'lon' : longitude_halfi, 'translation' : motion_vector_halfi})
halfii_df = pd.DataFrame({'lat' : latitude_halfii, 'lon' : longitude_halfii, 'translation' : motion_vector_halfii })
conn_string = "dbname='hamlethurricane' user=postgres port='5432' host='127.0.0.1' password='password'"
try:
conn = psycopg2.connect(conn_string)
except Exception as e:
print str(e)
sys.exit()
engine = create_engine('postgresql://postgres:password@127.0.0.1/hamlethurricane')
halfi_tb = 'katrina_halfi_{}'.format(number_of_profile)
halfii_tb = 'katrina_halfii_{}'.format(number_of_profile)
halfi_df.to_sql(halfi_tb, con = engine)
halfii_df.to_sql(halfii_tb, con = engine)
conn.commit()
profile_cur = conn.cursor()
halfi_sql = """create or replace view katrina_halfi_pnt_{} as select *, ST_SetSRID(ST_MakePoint("lon", "lat"), 4326) as geom from katrina_halfi_{}""".format(number_of_profile, number_of_profile)
halfii_sql = """create or replace view katrina_halfii_pnt_{} as select *, ST_SetSRID(ST_MakePoint("lon", "lat"), 4326) as geom from katrina_halfii_{}""".format(number_of_profile, number_of_profile)
profile_cur.execute(halfi_sql)
profile_cur.execute(halfii_sql)
conn.commit()
merge_sql = """create or replace view katrina_merge_{} as
select * from katrina_halfi_pnt_{}
union all
select * from katrina_halfii_pnt_{}""".format(number_of_profile, number_of_profile, number_of_profile)
profile_cur.execute(merge_sql)
conn.commit()
conn.close()
# #------------------------------------------------------#
# print len(roci_new), len(motion_vector_test)
# plot_dataframe = pd.DataFrame({'Radius (km)' : roci_new, 'Windspeed' : motion_vector_test})
# plot_dataframe = plot_dataframe.astype(float)
# plt.figure()
# wind_speed_plot = plot_dataframe.plot(x ='Radius (km)', y='Windspeed', label = 'Vg')
# plt.xlabel('Radius (km)')
# plt.ylabel('Radial Windspeed (m/s)')
# plt.title('0 Degree Vg')
# # plt.plot(x='Radius', y = 'Windspeed')
# plt.show()
# import matplotlib.pyplot as plt
# print "the values that went into this profile were" + ' ' + str(pc_float) + ' ' + str(Vmax_float) + ' ' + str(Rmax_float)+ ' ' + str(roci_float) + ' ' + str(Vtrans_float) + ' ' + str(theta_float)
|
"""
Example of classification
-------------------------
Figure 4.5.
An example of a simple classification problem between two Gaussian
distributions. Given a value of x, we need to assign that measurement to one
of the two distributions (background vs. source). The cut at xc = 120 leads
to very few Type II errors (i.e., false negatives: points from the distribution
hS with x < xc being classified as background), but this comes at the cost of
a significant number of Type I errors (i.e., false positives: points from the
distribution :math:`h_B` with x > xc being classified as sources).
"""
# Author: Jake VanderPlas
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from scipy.stats import norm
from matplotlib import pyplot as plt
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
#------------------------------------------------------------
# Generate and draw the curves
x = np.linspace(50, 200, 1000)
p1 = 0.9 * norm(100, 10).pdf(x)
p2 = 0.1 * norm(150, 12).pdf(x)
fig, ax = plt.subplots(figsize=(5, 3.75))
ax.fill(x, p1, ec='k', fc='#AAAAAA', alpha=0.5)
ax.fill(x, p2, '-k', fc='#AAAAAA', alpha=0.5)
ax.plot([120, 120], [0.0, 0.04], '--k')
ax.text(100, 0.036, r'$h_B(x)$', ha='center', va='bottom')
ax.text(150, 0.0035, r'$h_S(x)$', ha='center', va='bottom')
ax.text(122, 0.039, r'$x_c=120$', ha='left', va='top')
ax.text(125, 0.01, r'$(x > x_c\ {\rm classified\ as\ sources})$')
ax.set_xlim(50, 200)
ax.set_ylim(0, 0.04)
ax.set_xlabel('$x$')
ax.set_ylabel('$p(x)$')
plt.show()
|
# zipbackup copia uma pasta toda e seu conteúdo para um arquivo ZIP cujo nome seja incrementado
import zipfile
import os
def backupzip(folder):
"""Faz backup de todos os arquivos de folder para um zip
:param folder:
:return:
"""
folder = os.path.abspath(folder) # garante que o folder é um path absoluto
# Determina o nome do arquio de acordo com os arquivos já existentes
number = 1
while True:
zip_filename = 'E:\\Backup\\Backup' + '_' + str(number) + '.zip'
if not os.path.exists(zip_filename):
break
number += 1
# Cria o arquivo ZIP
backupzip = zipfile.ZipFile(zip_filename, 'w')
# Percorre toda a árvore de arquivos e compacta os arquivos da pasta
for foldername, subfolders, filenames in os.walk(folder):
#acrescenta a pasta atual ao arquivo ZIP
backupzip.write(foldername)
for filename in filenames:
new_base = os.path.basename(folder) + '_'
if filename.startswith(new_base) and filename.endswith('.zip'):
continue
backupzip.write(os.path.join(foldername, filename))
backupzip.close()
print('Pronto!')
backupzip()
|
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from django.conf import settings
from django.conf.urls.static import static
from . import views
urlpatterns = [
url(r'^objednavka/pridat/$', views.PegastudioOrderCreate.as_view(), name='objedn_list'),
]
|
import json
import logging
import multiprocessing
import unittest
from datetime import timedelta, datetime
from multiprocessing import Process
from unittest import mock
from unittest.mock import call
import pika
import pika.exceptions
from freezegun import freeze_time
from parameterized import parameterized
from src.abstract.publisher_subscriber import \
QueuingPublisherSubscriberComponent
from src.alerter.alerter_starters import (start_chainlink_node_alerter,
start_chainlink_contract_alerter)
from src.alerter.alerters.contract.chainlink import ChainlinkContractAlerter
from src.alerter.alerters.node.chainlink import ChainlinkNodeAlerter
from src.alerter.alerts.internal_alerts import ComponentResetAlert
from src.alerter.managers.chainlink import ChainlinkAlertersManager
from src.configs.alerts.contract.chainlink import (
ChainlinkContractAlertsConfig)
from src.configs.alerts.node.chainlink import ChainlinkNodeAlertsConfig
from src.configs.factory.node.chainlink_alerts import (
ChainlinkContractAlertsConfigsFactory, ChainlinkNodeAlertsConfigsFactory)
from src.message_broker.rabbitmq import RabbitMQApi
from src.utils import env
from src.utils.constants.names import (CHAINLINK_NODE_ALERTER_NAME,
CHAINLINK_CONTRACT_ALERTER_NAME)
from src.utils.constants.rabbitmq import (
HEALTH_CHECK_EXCHANGE, CONFIG_EXCHANGE, ALERT_EXCHANGE,
CL_ALERTERS_MAN_HB_QUEUE_NAME, CL_ALERTERS_MAN_CONFIGS_QUEUE_NAME,
PING_ROUTING_KEY, CL_ALERTS_CONFIGS_ROUTING_KEY,
HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY, CL_NODE_ALERT_ROUTING_KEY,
CL_CONTRACT_ALERT_ROUTING_KEY)
from src.utils.exceptions import PANICException, MessageWasNotDeliveredException
from test.utils.utils import (
delete_exchange_if_exists, delete_queue_if_exists, disconnect_from_rabbit,
connect_to_rabbit
)
from test.utils.utils import infinite_fn
class TestChainlinkAlertersManager(unittest.TestCase):
def setUp(self) -> None:
# Some dummy objects
self.dummy_logger = logging.getLogger('Dummy')
self.dummy_logger.disabled = True
self.manager_name = 'test_chainlink_alerters_manager'
self.test_queue_name = 'Test Queue'
self.test_data_str = 'test data'
self.test_heartbeat = {
'component_name': self.manager_name,
'is_alive': True,
'timestamp': datetime(2012, 1, 1).timestamp(),
}
self.dummy_process_1 = Process(target=infinite_fn, args=())
self.dummy_process_1.daemon = True
self.dummy_process_2 = Process(target=infinite_fn, args=())
self.dummy_process_2.daemon = True
self.test_exception = PANICException('test_exception', 1)
# RabbitMQ initialisation
self.connection_check_time_interval = timedelta(seconds=0)
self.rabbitmq = RabbitMQApi(
self.dummy_logger, env.RABBIT_IP,
connection_check_time_interval=self.connection_check_time_interval)
# Test routing key and parent_id
self.routing_key_1 = 'chains.chainlink.matic.alerts_config'
self.parent_id_1 = "chain_name_d21d780d-92cb-42de-a7c1-11b751654510"
self.config_1 = {
"1": {
"name": "head_tracker_current_head",
"parent_id": self.parent_id_1,
},
"2": {
"name": "head_tracker_heads_received_total",
"parent_id": self.parent_id_1,
},
"3": {
"name": "max_unconfirmed_blocks",
"parent_id": self.parent_id_1,
},
"4": {
"name": "process_start_time_seconds",
"parent_id": self.parent_id_1,
},
"5": {
"name": "tx_manager_gas_bump_exceeds_limit_total",
"parent_id": self.parent_id_1,
},
"6": {
"name": "unconfirmed_transactions",
"parent_id": self.parent_id_1,
},
"7": {
"name": "run_status_update_total",
"parent_id": self.parent_id_1,
},
"8": {
"name": "eth_balance_amount",
"parent_id": self.parent_id_1,
},
"9": {
"name": "eth_balance_amount_increase",
"parent_id": self.parent_id_1,
},
"10": {
"name": "node_is_down",
"parent_id": self.parent_id_1,
},
"11": {
"name": "price_feed_not_observed",
"parent_id": self.parent_id_1,
},
"12": {
"name": "price_feed_deviation",
"parent_id": self.parent_id_1,
},
"13": {
"name": "consensus_failure",
"parent_id": self.parent_id_1,
},
"14": {
"name": "error_retrieving_chainlink_contract_data",
"parent_id": self.parent_id_1,
},
}
self.test_manager = ChainlinkAlertersManager(
self.dummy_logger, self.manager_name, self.rabbitmq)
self.alerter_process_dict_example = {
CHAINLINK_NODE_ALERTER_NAME: self.dummy_process_1,
CHAINLINK_CONTRACT_ALERTER_NAME: self.dummy_process_2,
}
self.node_alerts_config_factory = ChainlinkNodeAlertsConfigsFactory()
self.contract_alerts_config_factory = \
ChainlinkContractAlertsConfigsFactory()
self.configs_processor_helper_example = {
CHAINLINK_NODE_ALERTER_NAME: {
'alerterClass': ChainlinkNodeAlerter,
'configsClass': ChainlinkNodeAlertsConfig,
'factory': self.node_alerts_config_factory,
'routing_key': CL_NODE_ALERT_ROUTING_KEY,
'starter': start_chainlink_node_alerter,
},
CHAINLINK_CONTRACT_ALERTER_NAME: {
'alerterClass': ChainlinkContractAlerter,
'configsClass': ChainlinkContractAlertsConfig,
'factory': self.contract_alerts_config_factory,
'routing_key': CL_CONTRACT_ALERT_ROUTING_KEY,
'starter': start_chainlink_contract_alerter,
},
}
def tearDown(self) -> None:
# Delete any queues and exchanges which are common across many tests
connect_to_rabbit(self.test_manager.rabbitmq)
delete_queue_if_exists(
self.test_manager.rabbitmq, self.test_queue_name)
delete_queue_if_exists(self.test_manager.rabbitmq,
CL_ALERTERS_MAN_HB_QUEUE_NAME)
delete_queue_if_exists(self.test_manager.rabbitmq,
CL_ALERTERS_MAN_CONFIGS_QUEUE_NAME)
delete_exchange_if_exists(self.test_manager.rabbitmq,
HEALTH_CHECK_EXCHANGE)
delete_exchange_if_exists(self.test_manager.rabbitmq, ALERT_EXCHANGE)
delete_exchange_if_exists(self.test_manager.rabbitmq, CONFIG_EXCHANGE)
disconnect_from_rabbit(self.test_manager.rabbitmq)
self.dummy_logger = None
self.dummy_process_1 = None
self.dummy_process_2 = None
self.connection_check_time_interval = None
self.rabbitmq = None
self.test_exception = None
self.node_alerts_config_factory = None
self.contract_alerts_config_factory = None
self.alerter_process_dict_example = None
self.test_manager = None
self.configs_processor_helper_example = None
def test_str_returns_manager_name(self) -> None:
self.assertEqual(self.manager_name, str(self.test_manager))
def test_name_returns_manager_name(self) -> None:
self.assertEqual(self.manager_name, self.test_manager.name)
def test_alerter_process_dict_returns_alerter_process_dict(self) -> None:
self.test_manager._alerter_process_dict = \
self.alerter_process_dict_example
self.assertEqual(self.alerter_process_dict_example,
self.test_manager.alerter_process_dict)
def test_config_factory_properties_return_correctly(self) -> None:
# Test for the contract alerts config factory
self.test_manager._contracts_alerts_config_factory = \
self.contract_alerts_config_factory
self.assertEqual(self.contract_alerts_config_factory,
self.test_manager.contracts_alerts_config_factory)
# Test for the node alerts config factory
self.test_manager._node_alerts_config_factory = \
self.node_alerts_config_factory
self.assertEqual(self.node_alerts_config_factory,
self.test_manager.node_alerts_config_factory)
def test_configs_processor_helper_return_correctly(self) -> None:
self.test_manager._configs_processor_helper = \
self.configs_processor_helper_example
self.assertEqual(self.configs_processor_helper_example,
self.test_manager.configs_processor_helper)
@mock.patch.object(RabbitMQApi, "start_consuming")
def test_listen_for_data_calls_start_consuming(
self, mock_start_consuming) -> None:
mock_start_consuming.return_value = None
self.test_manager._listen_for_data()
self.assertEqual(1, mock_start_consuming.call_count)
@mock.patch.object(RabbitMQApi, "basic_consume")
def test_initialise_rabbitmq_initialises_everything_as_expected(
self, mock_basic_consume) -> None:
mock_basic_consume.return_value = None
# To make sure that there is no connection/channel already established
self.assertIsNone(self.rabbitmq.connection)
self.assertIsNone(self.rabbitmq.channel)
# To make sure that the exchanges and queues have not already been
# declared
self.rabbitmq.connect()
self.test_manager.rabbitmq.queue_delete(CL_ALERTERS_MAN_HB_QUEUE_NAME)
self.test_manager.rabbitmq.queue_delete(
CL_ALERTERS_MAN_CONFIGS_QUEUE_NAME)
self.test_manager.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
self.test_manager.rabbitmq.exchange_delete(CONFIG_EXCHANGE)
self.test_manager.rabbitmq.exchange_delete(ALERT_EXCHANGE)
self.rabbitmq.disconnect()
self.test_manager._initialise_rabbitmq()
# Perform checks that the connection has been opened, marked as open
# and that the delivery confirmation variable is set.
self.assertTrue(self.test_manager.rabbitmq.is_connected)
self.assertTrue(self.test_manager.rabbitmq.connection.is_open)
self.assertTrue(
self.test_manager.rabbitmq.channel._delivery_confirmation)
# Check whether the producing exchanges have been created by using
# passive=True. If this check fails an exception is raised
# automatically.
self.test_manager.rabbitmq.exchange_declare(ALERT_EXCHANGE,
passive=True)
# Check whether the consuming exchanges and queues have been creating by
# sending messages with the same routing keys as for the bindings.
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=HEALTH_CHECK_EXCHANGE, routing_key=PING_ROUTING_KEY,
body='test_str', is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2), mandatory=True)
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=CONFIG_EXCHANGE, routing_key=CL_ALERTS_CONFIGS_ROUTING_KEY,
body='another_test_str', is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2), mandatory=True)
# Re-declare queue to get the number of messages, and check that the
# message received is the message sent
res = self.test_manager.rabbitmq.queue_declare(
CL_ALERTERS_MAN_HB_QUEUE_NAME, False, True, False, False)
self.assertEqual(1, res.method.message_count)
_, _, body = self.test_manager.rabbitmq.basic_get(
CL_ALERTERS_MAN_HB_QUEUE_NAME)
self.assertEqual('test_str', body.decode())
res = self.test_manager.rabbitmq.queue_declare(
CL_ALERTERS_MAN_CONFIGS_QUEUE_NAME, False, True, False, False)
self.assertEqual(1, res.method.message_count)
_, _, body = self.test_manager.rabbitmq.basic_get(
CL_ALERTERS_MAN_CONFIGS_QUEUE_NAME)
self.assertEqual('another_test_str', body.decode())
expected_calls = [
call(CL_ALERTERS_MAN_HB_QUEUE_NAME, self.test_manager._process_ping,
True, False, None),
call(CL_ALERTERS_MAN_CONFIGS_QUEUE_NAME,
self.test_manager._process_configs, False, False, None)
]
mock_basic_consume.assert_has_calls(expected_calls, True)
def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones set by send_heartbeat, and checks that the
# heartbeat is received
self.test_manager._initialise_rabbitmq()
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._send_heartbeat(self.test_heartbeat)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is actually the HB
_, _, body = self.test_manager.rabbitmq.basic_get(self.test_queue_name)
self.assertEqual(self.test_heartbeat, json.loads(body))
@parameterized.expand([
([True, True], [],),
([True, False], [CHAINLINK_CONTRACT_ALERTER_NAME],),
([False, True], [CHAINLINK_NODE_ALERTER_NAME],),
([False, False], [CHAINLINK_NODE_ALERTER_NAME,
CHAINLINK_CONTRACT_ALERTER_NAME],),
])
@freeze_time("2012-01-01")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(ChainlinkAlertersManager,
"_create_and_start_alerter_processes")
@mock.patch.object(ChainlinkAlertersManager, "_send_heartbeat")
def test_process_ping_sends_a_valid_hb(
self, is_alive_side_effect, dead_alerters, mock_send_hb,
mock_create_and_start, mock_is_alive, mock_join) -> None:
mock_send_hb.return_value = None
mock_join.return_value = None
mock_create_and_start.return_value = None
mock_is_alive.side_effect = is_alive_side_effect
self.test_manager._alerter_process_dict = \
self.alerter_process_dict_example
# Some of the variables below are needed as parameters for the
# process_ping function
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(routing_key=PING_ROUTING_KEY)
body = 'ping'
properties = pika.spec.BasicProperties()
self.test_manager._process_ping(blocking_channel, method, properties,
body)
expected_hb = {
'component_name': self.manager_name,
'running_processes': [
alerter
for alerter in self.alerter_process_dict_example.keys()
if alerter not in dead_alerters
],
'dead_processes': dead_alerters,
'timestamp': datetime.now().timestamp()
}
mock_send_hb.assert_called_once_with(expected_hb)
@parameterized.expand([
([True, True], False,),
([True, False], True,),
([False, True], True,),
([False, False], True,),
])
@freeze_time("2012-01-01")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(ChainlinkAlertersManager,
"_create_and_start_alerter_processes")
@mock.patch.object(ChainlinkAlertersManager, "_send_heartbeat")
def test_process_ping_restarts_dead_processes_correctly(
self, is_alive_side_effect, expected_restart, mock_send_hb,
mock_create_and_start, mock_is_alive, mock_join) -> None:
mock_send_hb.return_value = None
mock_join.return_value = None
mock_create_and_start.return_value = None
mock_is_alive.side_effect = is_alive_side_effect
self.test_manager._alerter_process_dict = \
self.alerter_process_dict_example
# Some of the variables below are needed as parameters for the
# process_ping function
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(routing_key=PING_ROUTING_KEY)
body = 'ping'
properties = pika.spec.BasicProperties()
self.test_manager._process_ping(blocking_channel, method, properties,
body)
if expected_restart:
mock_create_and_start.assert_called_once()
else:
mock_create_and_start.assert_not_called()
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(ChainlinkAlertersManager, "_send_heartbeat")
def test_process_ping_does_not_send_hb_if_processing_fails(
self, mock_send_hb, mock_is_alive) -> None:
mock_is_alive.side_effect = self.test_exception
mock_send_hb.return_value = None
self.test_manager._alerter_process_dict = \
self.alerter_process_dict_example
# Some of the variables below are needed as parameters for the
# process_ping function
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(routing_key=PING_ROUTING_KEY)
body = 'ping'
properties = pika.spec.BasicProperties()
self.test_manager._process_ping(blocking_channel, method,
properties, body)
mock_send_hb.assert_not_called()
def test_proc_ping_send_hb_does_not_raise_msg_not_del_exce_if_hb_not_routed(
self) -> None:
"""
In this test we are assuming that no configs have been set, this is done
to keep the test as simple as possible. We are also assuming that a
MsgWasNotDeliveredException will be raised automatically because we are
deleting the HealthExchange after every test, and thus there are no
consumers of the heartbeat.
"""
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(routing_key=PING_ROUTING_KEY)
body = 'ping'
properties = pika.spec.BasicProperties()
try:
self.test_manager._process_ping(blocking_channel, method,
properties, body)
except MessageWasNotDeliveredException:
self.fail('A MessageWasNotDeliveredException should not have been '
'raised')
@parameterized.expand([
(pika.exceptions.AMQPConnectionError,
pika.exceptions.AMQPConnectionError('test'),),
(pika.exceptions.AMQPChannelError,
pika.exceptions.AMQPChannelError('test'),),
(Exception, Exception('test'),),
])
@mock.patch.object(ChainlinkAlertersManager, "_send_heartbeat")
def test_process_ping_raises_unrecognised_error_if_raised_by_send_heartbeat(
self, exception_class, exception_instance, mock_send_hb) -> None:
mock_send_hb.side_effect = exception_instance
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(routing_key=PING_ROUTING_KEY)
body = 'ping'
properties = pika.spec.BasicProperties()
self.assertRaises(exception_class, self.test_manager._process_ping,
blocking_channel, method, properties, body)
@parameterized.expand([
({}, False,),
('self.alerter_process_dict_example', True)
])
@freeze_time("2012-01-01")
@mock.patch.object(ChainlinkAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(multiprocessing.Process, "start")
def test_create_and_start_alerter_processes_if_processes_not_running(
self, state, state_is_str, mock_start, mock_push_and_send) -> None:
"""
In this test we will check that the required processes are created and
started, and that reset alerts are sent. We will perform this test for
both when the function is executed for the first time (empty state), and
for when the process is dead (state non empty but dummy process not
running by default)
"""
self.test_manager._alerter_process_dict = eval(
state) if state_is_str else state
mock_start.return_value = None
mock_push_and_send.return_value = None
self.test_manager._create_and_start_alerter_processes()
node_alerter_process = self.test_manager.alerter_process_dict[
CHAINLINK_NODE_ALERTER_NAME]
contract_alerter_process = self.test_manager.alerter_process_dict[
CHAINLINK_CONTRACT_ALERTER_NAME]
# Check that the processes were created correctly
self.assertTrue(node_alerter_process.daemon)
self.assertEqual(1, len(node_alerter_process._args))
self.assertEqual(self.test_manager.node_alerts_config_factory,
node_alerter_process._args[0])
self.assertEqual(start_chainlink_node_alerter,
node_alerter_process._target)
self.assertTrue(contract_alerter_process.daemon)
self.assertEqual(1, len(contract_alerter_process._args))
self.assertEqual(self.test_manager.contracts_alerts_config_factory,
contract_alerter_process._args[0])
self.assertEqual(start_chainlink_contract_alerter,
contract_alerter_process._target)
# Check that the processes were started
self.assertEqual(2, mock_start.call_count)
# Check that 2 reset alerts were sent
expected_alert_1 = ComponentResetAlert(
CHAINLINK_NODE_ALERTER_NAME, datetime.now().timestamp(),
ChainlinkNodeAlerter.__name__)
expected_alert_2 = ComponentResetAlert(
CHAINLINK_CONTRACT_ALERTER_NAME, datetime.now().timestamp(),
ChainlinkContractAlerter.__name__)
expected_calls = [
call(expected_alert_1.alert_data, CL_NODE_ALERT_ROUTING_KEY),
call(expected_alert_2.alert_data, CL_CONTRACT_ALERT_ROUTING_KEY)
]
mock_push_and_send.assert_has_calls(expected_calls, True)
@mock.patch.object(ChainlinkAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(multiprocessing, "Process")
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(multiprocessing.Process, "start")
def test_create_and_start_alerter_proc_does_nothing_if_proc_running(
self, mock_start, mock_is_alive, mock_init_proc,
mock_push_and_send) -> None:
"""
In this test we will check that no process is created or started, and
that no reset alert is sent.
"""
self.test_manager._alerter_process_dict = \
self.alerter_process_dict_example
mock_start.return_value = None
mock_is_alive.return_value = True
mock_init_proc.return_value = None
mock_push_and_send.return_value = None
self.test_manager._create_and_start_alerter_processes()
mock_push_and_send.assert_not_called()
mock_init_proc.assert_not_called()
mock_start.assert_not_called()
@freeze_time("2012-01-01")
@mock.patch.object(RabbitMQApi, 'basic_ack')
@mock.patch.object(ChainlinkAlertersManager,
"_push_latest_data_to_queue_and_send")
def test_process_configs_if_non_empty_configs_received(
self, mock_push_and_send, mock_ack) -> None:
"""
In this test we will check that if non-empty configs are received,
the process_configs function stores the received configs and sends a
reset alert
"""
mock_ack.return_value = None
mock_push_and_send.return_value = None
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(routing_key=self.routing_key_1)
body = json.dumps(self.config_1)
properties = pika.spec.BasicProperties()
parsed_routing_key = self.routing_key_1.split('.')
chain_name = parsed_routing_key[1] + ' ' + parsed_routing_key[2]
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body)
expected_node_configs = {
chain_name: ChainlinkNodeAlertsConfig(
parent_id=self.parent_id_1,
head_tracker_current_head=self.config_1['1'],
head_tracker_heads_received_total=self.config_1['2'],
max_unconfirmed_blocks=self.config_1['3'],
process_start_time_seconds=self.config_1['4'],
tx_manager_gas_bump_exceeds_limit_total=self.config_1['5'],
unconfirmed_transactions=self.config_1['6'],
run_status_update_total=self.config_1['7'],
eth_balance_amount=self.config_1['8'],
eth_balance_amount_increase=self.config_1['9'],
node_is_down=self.config_1['10']
)
}
expected_contract_configs = {
chain_name: ChainlinkContractAlertsConfig(
parent_id=self.parent_id_1,
price_feed_not_observed=self.config_1['11'],
price_feed_deviation=self.config_1['12'],
consensus_failure=self.config_1['13'],
)
}
expected_alert_1 = ComponentResetAlert(
CHAINLINK_NODE_ALERTER_NAME, datetime.now().timestamp(),
ChainlinkNodeAlerter.__name__, self.parent_id_1, chain_name
)
expected_alert_2 = ComponentResetAlert(
CHAINLINK_CONTRACT_ALERTER_NAME, datetime.now().timestamp(),
ChainlinkContractAlerter.__name__, self.parent_id_1, chain_name
)
self.assertEqual(expected_node_configs,
self.test_manager.node_alerts_config_factory.configs)
self.assertEqual(
expected_contract_configs,
self.test_manager.contracts_alerts_config_factory.configs)
expected_calls = [
call(expected_alert_1.alert_data, CL_NODE_ALERT_ROUTING_KEY),
call(expected_alert_2.alert_data, CL_CONTRACT_ALERT_ROUTING_KEY)
]
mock_push_and_send.assert_has_calls(expected_calls, True)
mock_ack.assert_called_once()
@mock.patch.object(RabbitMQApi, 'basic_ack')
@mock.patch.object(ChainlinkAlertersManager,
"_push_latest_data_to_queue_and_send")
def test_process_configs_if_received_empty_configs(
self, mock_push_and_send, mock_ack) -> None:
"""
In this test we will check that if empty configs are received, the
process_configs function removes the already stored config and does not
send a reset alert.
"""
mock_ack.return_value = None
mock_push_and_send.return_value = None
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(routing_key=self.routing_key_1)
body = json.dumps({})
properties = pika.spec.BasicProperties()
parsed_routing_key = self.routing_key_1.split('.')
chain_name = parsed_routing_key[1] + ' ' + parsed_routing_key[2]
# Store configs directly since we need to test their removal
self.test_manager.node_alerts_config_factory.add_new_config(
chain_name, self.config_1)
self.test_manager.contracts_alerts_config_factory.add_new_config(
chain_name, self.config_1)
# Make sure that the configs were added
expected_node_configs = {
chain_name: ChainlinkNodeAlertsConfig(
parent_id=self.parent_id_1,
head_tracker_current_head=self.config_1['1'],
head_tracker_heads_received_total=self.config_1['2'],
max_unconfirmed_blocks=self.config_1['3'],
process_start_time_seconds=self.config_1['4'],
tx_manager_gas_bump_exceeds_limit_total=self.config_1['5'],
unconfirmed_transactions=self.config_1['6'],
run_status_update_total=self.config_1['7'],
eth_balance_amount=self.config_1['8'],
eth_balance_amount_increase=self.config_1['9'],
node_is_down=self.config_1['10']
)
}
expected_contract_configs = {
chain_name: ChainlinkContractAlertsConfig(
parent_id=self.parent_id_1,
price_feed_not_observed=self.config_1['11'],
price_feed_deviation=self.config_1['12'],
consensus_failure=self.config_1['13'],
)
}
self.assertEqual(expected_node_configs,
self.test_manager.node_alerts_config_factory.configs)
self.assertEqual(
expected_contract_configs,
self.test_manager.contracts_alerts_config_factory.configs)
# Send an empty config for the same chain
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body)
expected_configs = {}
self.assertEqual(expected_configs,
self.test_manager.node_alerts_config_factory.configs)
self.assertEqual(
expected_configs,
self.test_manager.contracts_alerts_config_factory.configs)
mock_push_and_send.assert_not_called()
mock_ack.assert_called_once()
@mock.patch.object(QueuingPublisherSubscriberComponent, "_push_to_queue")
@mock.patch.object(ChainlinkAlertersManager, "_send_data")
def test_push_latest_data_to_queue_and_send_pushes_correctly_and_sends(
self, mock_send_data, mock_push) -> None:
mock_send_data.return_value = None
mock_push.return_value = None
test_dict = {'test_key': 'test_val'}
self.test_manager._push_latest_data_to_queue_and_send(
test_dict, self.routing_key_1)
mock_push.assert_called_once_with(
data=test_dict, exchange=ALERT_EXCHANGE,
routing_key=self.routing_key_1,
properties=pika.BasicProperties(delivery_mode=2), mandatory=True
)
mock_send_data.assert_called_once()
|
# -*- coding: utf-8 -*-
'''
@Author : lance
@Email : wangyl306@163.com
'''
from sklearn import metrics
from keras.models import load_model
from keras_preprocessing.image import ImageDataGenerator
import numpy as np
import keras
import keras.backend as K
#%%加载模型
#model_cx=load_model("myweights/model_cx_bin_255_224x224_0.7.h5")
#model_x23=load_model("myweights/model_x23_bin_255_224x224_0.8.h5")
K.clear_session()
K.set_learning_phase(0)
model=load_model("weights/resnet_0014.h5")
#%%单张展示
#区分组别 binary
test_gen=ImageDataGenerator(rescale=1/255).flow_from_directory("ceshi",
target_size=(224,224),
class_mode="binary",
batch_size=1,
shuffle=False)
pred = model_cx.predict_generator(test_gen, steps=110, verbose=1)
pred=pred.ravel()
pred[pred<0.7]=0
pred[pred>=0.7]=1
print("组别(cx)是:", pred)
#准确率
n=0
for i in pred:
if i==1: #类别标签
n+=1
print(n/110)
#%%单张展示
#区分组别 binary
test_gen=ImageDataGenerator(rescale=1/255).flow_from_directory("ceshi",
target_size=(224,224),
class_mode="binary",
batch_size=1,
shuffle=False)
pred = model_x23.predict_generator(test_gen, steps=10, verbose=1)
pred=pred.ravel()
pred[pred<0.8]=0
pred[pred>=0.8]=1
print("级别是(x23):", pred)
#准确率
n=0
for i in pred:
if i==1: #类别标签
n+=1
print(n/110)
#%%4分级
def salt(img, n=10000):
# 循环添加n个椒盐
# for k in range(n):
# # 随机选择椒盐的坐标
# i = int(np.random.random() * img.shape[1])
# j = int(np.random.random() * img.shape[0])
# # 如果是灰度图
# if img.ndim == 2:
# img[j,i] = 255
# # 如果是RBG图片
# elif img.ndim == 3:
# img[j,i,0]= 255
# img[j,i,1]= 255
# img[j,i,2]= 255
noise = np.random.rand(448,448, 3)*0.05-0.025
img = img + noise
return img
test_gen=ImageDataGenerator(rescale=1/255,preprocessing_function=None).flow_from_directory("ceshi",
target_size=(448,448),
batch_size=1,
shuffle=False)
pred= model.predict_generator(test_gen, steps=120, verbose=1)
predicted_class_indices = np.argmax(pred, axis=1)
print("组别是(softmax):", predicted_class_indices)
#准确率
n=0
for i in predicted_class_indices:
if i==1: #类别标签
n+=1
print(n/120)
#plot:×255后显示
import matplotlib.pyplot as plt
def plots(ims,figsize=(10,5),rows=1,interp=False,titles=None):
if type(ims[0]) is np.ndarray:
ims=np.array(ims).astype(np.uint8)
if (ims.shape[-1] != 3):
ims=ims.transpose((0,2,3,1))
f=plt.figure(figsize=figsize)
cols=len(ims)//rows if len(ims)%2 ==0 else len(ims)//rows+1
for i in range(len(ims)):
sp=f.add_subplot(rows,cols,i+1)
sp.axis('off')
if titles is not None:
sp.set_title(titles[i],fontsize=9)
plt.imshow(ims[i],interpolation=None if interp else "none")
imgs,labels=next(test_gen)
plots(imgs)
plt.show()
#%%读图测试
#skimage预测测试
import os
from skimage import io,transform
path="新建文件夹/t1_c41"
files=os.listdir(path)
Tmp_Img=[]
for i in range(len(files)):
print(i)
tmp=io.imread(path+'/'+files[i])
tmp_img=transform.resize(tmp,[448,448])
Tmp_Img.append(tmp_img)
Tmp_Img=np.array(Tmp_Img)
pred=model.predict(Tmp_Img)
pred=np.argmax(pred, axis=1)
print(pred)
#准确率
n=0
for i in pred:
if i==1: #类别标签
n+=1
print(n/120)
#画图
io.imshow(tmp_img)
tmp_img=tmp_img*255
#keras 同ImageDataGenerator
import os
from keras.preprocessing import image
path="ceshi/t1_x31"
file_names = os.listdir(path)
i=0
for file_name in file_names:
img_path=os.path.join(path, file_name)
img = image.load_img(img_path, target_size=(448,448))
x = image.img_to_array(img)
x = x*(1/255)
x = np.expand_dims(x, axis=0)
pred = model.predict(x)
predicted_class_indices=np.argmax(pred, axis=1)
print(predicted_class_indices)
if predicted_class_indices ==3:
i+=1
print(i/110)
plots(x)
plt.show()
#%%加载数据
# train_gen=ImageDataGenerator(1/255).flow_from_directory("re_cx/train",
# target_size=(224,224),
#
# class_mode="binary",
# batch_size=10,
# shuffle=False)
#
#
# valid_gen=ImageDataGenerator(1/255).flow_from_directory("re_cx/valid",
# target_size=(224,224),
#
# class_mode="binary",
# batch_size=10,
# shuffle=False)
#test_gen=ImageDataGenerator(rescale=1/255).flow_from_directory("ceshi",
# target_size=(448,448),
# class_mode="binary",
# batch_size=50,
# shuffle=False)
#%%测试
steps=6
#test_class=np.array([])
#for i in range(steps):
# test_imgs, test_lables = next(test_gen)
# test_class=np.hstack((test_class,test_lables ))
#print("真实类别:",test_class)
pred = model_cx.predict_generator(test_gen, steps=steps, verbose=1)
pred=pred.ravel()
pred=list(pred)
for i in range(len(pred)):
if pred[i]<0.7:
pred[i]=0
else:
pred[i]=1
print("预测结果:", pred)
# 打印混淆矩阵
#cm = metrics.confusion_matrix(test_class, pred)
#
#
#print(cm)
#%%特征模型
model_feather = keras.Model(inputs=model_cx.input,
outputs=model_cx.layers[-2].output)
model_feather.summary()
#%%特征提取
# Labels=[]
# Predicts=[]
#
# for i in range(63):#111
# print(i)
# temp=next(train_gen)
# temp1=temp[0]
#
# Labels.append(temp[1])
# Predicts.append(model_feather.predict(temp1))
#
# train_features=np.array(Predicts).reshape([630,1024]) #[1110,1024]
# train_labels=np.array(Labels).reshape([630,1]) #[1110,1]
#
# Labels=[]
# Predicts=[]
#
# for i in range(27):#47
# print(i)
# temp=next(valid_gen)
# temp1=temp[0]
#
# Labels.append(temp[1])
# Predicts.append(model_feather.predict(temp1))
#
# valid_features=np.array(Predicts).reshape([270,1024]) #[470,1024]
# valid_labels=np.array(Labels).reshape([270,1]) #[470,1]
#
# Labels=[]
# Predicts=[]
#
# for i in range(3):
# print(i)
# temp=next(test_gen)
# temp1=temp[0]
#
# Labels.append(temp[1])
# Predicts.append(model_feather.predict(temp1))
#
# pred_features=np.array(Predicts).reshape([30,1024])
# real_labels=np.array(Labels).reshape([30,1])
#%%误差图
import matplotlib.pyplot as plt
from keras.preprocessing.image import ImageDataGenerator
valid_datagen = ImageDataGenerator(rescale=1./255)
#valid_datagen = ImageDataGenerator()
valid_gen = valid_datagen.flow_from_directory( 'ceshi/c1',
target_size=(224,224),batch_size=10,class_mode='binary')
from keras.preprocessing.image import ImageDataGenerator
valid_datagen = ImageDataGenerator()
#valid_datagen = ImageDataGenerator()
valid_gen2 = valid_datagen.flow_from_directory( 'ceshi/c1',
target_size=(224,224),batch_size=10,class_mode='binary')
temp1=next(valid_gen)
image1=temp1[0]
temp2=next(valid_gen2)
image2=temp2[0]
diff=image1*255-image2
print(np.mean(diff),np.max(diff),np.min(diff))
plt.imshow(diff[1])
plt.show()
#配置:因特尔E5系列金牌处理器、两块总共44核88线程的CPU、四块2080Ti的显卡 - 10万
|
from PySide2.QtCore import Qt
from PySide2.QtWidgets import QGridLayout, QSlider, QSpinBox, QWidget
from hexrd.ui.hexrd_config import HexrdConfig
class ImageSeriesToolbar(QWidget):
def __init__(self, name, parent=None):
super(ImageSeriesToolbar, self).__init__(parent)
self.ims = HexrdConfig().imageseries(name)
self.slider = None
self.frame = None
self.layout = None
self.widget = None
self.show = False
self.name = name
self.create_widget()
self.set_range()
self.setup_connections()
def setup_connections(self):
self.slider.valueChanged.connect(self.val_changed)
self.slider.valueChanged.connect(self.frame.setValue)
self.frame.valueChanged.connect(
self.slider.setSliderPosition)
def create_widget(self):
self.slider = QSlider(Qt.Horizontal, self.parent())
self.frame = QSpinBox(self.parent())
self.widget = QWidget(self.parent())
self.layout = QGridLayout(self.widget)
self.layout.addWidget(self.slider, 0, 0, 1, 9)
self.layout.addWidget(self.frame, 0, 9, 1, 1)
self.widget.setLayout(self.layout)
if self.ims and len(self.ims) > 1:
self.show = True
self.widget.setVisible(self.show)
def set_range(self):
if self.ims:
size = len(self.ims) - 1
if not size and self.show:
self.show = False
elif size and not self.show:
self.show = True
self.widget.setVisible(self.show)
self.slider.setMaximum(size)
self.slider.setMinimumWidth(self.parent().width()/2)
self.frame.setMaximum(size)
self.slider.setValue(HexrdConfig().current_imageseries_idx)
else:
self.show = False
self.widget.setVisible(self.show)
def update_range(self):
self.ims = HexrdConfig().imageseries(self.name)
self.set_range()
def set_visible(self, b=False):
if self.show:
self.widget.setVisible(b)
def val_changed(self, pos):
self.parent().change_ims_image(pos)
|
import os
import pandas as pd
import itertools
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
class ConfusionMatrixGenerater(object):
def __init__(self, image_data, csvfile):
self.image_data = image_data
self.csvfile = csvfile
self.df = pd.read_csv(csvfile)
self.biradsmap = {0.0: "bi-rads0", 1.0:"bi-rads1", 2.0:"bi-rads2", 3.0:"bi-rads3", 4.0:"bi-rads4",
4.1: "bi-rads4a", 4.2:"bi-rads4b", 4.3:"bi-rads4c", 5.0:"bi-rads5"}
def gen_dict_from_csv(self):
"""
create a dictionary that stores ziwei's birads label
key: image name "pid-laterality", e.g. 111-L
value: birads-num as csv file showed
choose the highest birads to store
"""
ziwei_birads = {}
for i in range(len(self.df["index"])):
patient_idx = self.df["index"][i].split("-")[0]
# laterality_idx = df["index"][i].split("-")[1]
if self.df["index"][i].split("-")[1] == str(1) or self.df["index"][i].split("-")[1] == str(3):
laterality = "R"
elif self.df["index"][i].split("-")[1] == str(2) or self.df["index"][i].split("-")[1] == str(4):
laterality = "L"
else:
print("wrong laterality ", self.df["index"][i])
image_idx = patient_idx + "-" + laterality
if image_idx not in ziwei_birads.keys():
ziwei_birads[image_idx] = float(self.df["birads_num"][i])
else:
if ziwei_birads[image_idx] < float(self.df["birads_num"][i]):
ziwei_birads[image_idx] = float(self.df["birads_num"][i])
"""
convert the num to real birads class, like bi-rads2, etc
"""
ziwei_birads_new = {}
for patient in ziwei_birads:
ziwei_birads_new[patient] = self.biradsmap[ziwei_birads[patient]]
return ziwei_birads_new
def gen_dict_from_txt(self):
"""
create a dictionary that stores the original comments.txt file info
key: image name "pid-laterality", e.g. 111-L
value: birads
"""
image_comment = {}
for patient in os.listdir(self.image_data):
if os.path.isdir(os.path.join(self.image_data, patient)):
comment_path = os.path.join(self.image_data, patient, "comments.txt")
# print(comment_path)
with open(comment_path, "r", encoding='utf-8') as f:
info = f.readlines()
for i in range(len(info)):
if "left" in info[i].lower():
left_birads = info[i].split(":")[-1].lower().replace(" ", "").replace("\n","")
image_comment[patient+"-L"] = left_birads
if "right" in info[i].lower():
right_birads = info[i].split(":")[-1].lower().replace(" ", "").replace("\n","")
image_comment[patient+"-R"] = right_birads
return image_comment
def gen_confustion_matrix(self, class_names):
csv_dict = self.gen_dict_from_csv()
txt_dict = self.gen_dict_from_txt()
ziwei_list = []
img_comment_list = []
# wrong_list = []
count = 0
for img in csv_dict.keys():
if img in txt_dict.keys():
count += 1
ziwei_list.append(csv_dict[img])
img_comment_list.append(txt_dict[img])
cnf_matrix = confusion_matrix(img_comment_list, ziwei_list, labels=class_names)
np.set_printoptions(precision=2)
return cnf_matrix
@staticmethod
def plot_confusion_matrix(cm, classes, xlabel, ylabel,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
generate two lists that stores the bi-rads info
corresponding info stores at the same slot index
"""
plt.figure()
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel(ylabel)
plt.xlabel(xlabel)
plt.show()
if __name__ == "__main__":
img_data = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/ChinaLQ_Display_1"
csvfile = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/china_bbox.csv"
class_names = ["bi-rads0", "bi-rads1", "bi-rads2", "bi-rads3", "bi-rads4", "bi-rads4a", "bi-rads4b",
"bi-rads4c", "bi-rads5"]
cmg = ConfusionMatrixGenerater(img_data, csvfile)
cnf_matrix = cmg.gen_confustion_matrix(class_names)
cmg.plot_confusion_matrix(cnf_matrix, class_names, "ziwei", "hospital")
|
#!/usr/bin/env python
"""
Downloads files to temp locations. This script is invoked by the Transfer
Manager (galaxy.jobs.transfer_manager) and should not normally be invoked by
hand.
"""
import ConfigParser
import json
import logging
import optparse
import os
import random
import SocketServer
import sys
import tempfile
import threading
import time
import urllib2
try:
import pexpect
except ImportError:
pexpect = None
from daemon import DaemonContext
from sqlalchemy import create_engine, MetaData, Table
from sqlalchemy.orm import scoped_session, sessionmaker
galaxy_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir ) )
sys.path.insert( 1, os.path.join( galaxy_root, 'lib' ) )
import galaxy.model
from galaxy.util import bunch
from galaxy.util.json import jsonrpc_response, validate_jsonrpc_request
PEXPECT_IMPORT_MESSAGE = ('The Python pexpect package is required to use this '
'feature, please install it')
log = logging.getLogger( __name__ )
log.setLevel( logging.DEBUG )
handler = logging.StreamHandler( sys.stdout )
log.addHandler( handler )
debug = False
slow = False
class ArgHandler( object ):
"""
Collect command line flags.
"""
def __init__( self ):
self.parser = optparse.OptionParser()
self.parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (config/galaxy.ini)',
default=os.path.abspath( os.path.join( galaxy_root, 'config/galaxy.ini' ) ) )
self.parser.add_option( '-d', '--debug', action='store_true', dest='debug', help="Debug (don't detach)" )
self.parser.add_option( '-s', '--slow', action='store_true', dest='slow', help="Transfer slowly (for debugging)" )
self.opts = None
def parse( self ):
self.opts, args = self.parser.parse_args()
if len( args ) != 1:
log.error( 'usage: transfer.py <transfer job id>' )
sys.exit( 1 )
try:
self.transfer_job_id = int( args[0] )
except TypeError:
log.error( 'The provided transfer job ID is not an integer: %s' % args[0] )
sys.exit( 1 )
if self.opts.debug:
global debug
debug = True
log.setLevel( logging.DEBUG )
if self.opts.slow:
global slow
slow = True
class GalaxyApp( object ):
"""
A shell Galaxy App to provide access to the Galaxy configuration and
model/database.
"""
def __init__( self, config_file ):
self.config = ConfigParser.ConfigParser( dict( database_file='database/universe.sqlite',
file_path='database/files',
transfer_worker_port_range='12275-12675',
transfer_worker_log=None ) )
self.config.read( config_file )
self.model = bunch.Bunch()
self.connect_database()
def connect_database( self ):
# Avoid loading the entire model since doing so is exceptionally slow
default_dburl = 'sqlite:///%s?isolation_level=IMMEDIATE' % self.config.get( 'app:main', 'database_file' )
try:
dburl = self.config.get( 'app:main', 'database_connection' )
except ConfigParser.NoOptionError:
dburl = default_dburl
engine = create_engine( dburl )
metadata = MetaData( engine )
self.sa_session = scoped_session( sessionmaker( bind=engine, autoflush=False, autocommit=True ) )
self.model.TransferJob = galaxy.model.TransferJob
self.model.TransferJob.table = Table( "transfer_job", metadata, autoload=True )
def get_transfer_job( self, id ):
return self.sa_session.query( self.model.TransferJob ).get( int( id ) )
class ListenerServer( SocketServer.ThreadingTCPServer ):
"""
The listener will accept state requests and new transfers for as long as
the manager is running.
"""
def __init__( self, port_range, RequestHandlerClass, app, transfer_job, state_result ):
self.state_result = state_result
# Try random ports until a free one is found
while True:
random_port = random.choice( port_range )
try:
SocketServer.ThreadingTCPServer.__init__( self, ( 'localhost', random_port ), RequestHandlerClass )
log.info( 'Listening on port %s' % random_port )
break
except Exception as e:
log.warning( 'Tried binding port %s: %s' % ( random_port, str( e ) ) )
transfer_job.socket = random_port
app.sa_session.add( transfer_job )
app.sa_session.flush()
class ListenerRequestHandler( SocketServer.BaseRequestHandler ):
"""
Handle state or transfer requests received on the socket.
"""
def handle( self ):
request = self.request.recv( 8192 )
response = {}
valid, request, response = validate_jsonrpc_request( request, ( 'get_state', ), () )
if valid:
self.request.send( json.dumps( jsonrpc_response( request=request, result=self.server.state_result.result ) ) )
else:
error_msg = 'Unable to serve request: %s' % response['error']['message']
if 'data' in response['error']:
error_msg += ': %s' % response['error']['data']
log.error( error_msg )
log.debug( 'Original request was: %s' % request )
class StateResult( object ):
"""
A mutable container for the 'result' portion of JSON-RPC responses to state requests.
"""
def __init__( self, result=None ):
self.result = result
def transfer( app, transfer_job_id ):
transfer_job = app.get_transfer_job( transfer_job_id )
if transfer_job is None:
log.error( 'Invalid transfer job ID: %s' % transfer_job_id )
return False
port_range = app.config.get( 'app:main', 'transfer_worker_port_range' )
try:
port_range = [ int( p ) for p in port_range.split( '-' ) ]
except Exception as e:
log.error( 'Invalid port range set in transfer_worker_port_range: %s: %s' % ( port_range, str( e ) ) )
return False
protocol = transfer_job.params[ 'protocol' ]
if protocol not in ( 'http', 'https', 'scp' ):
log.error( 'Unsupported protocol: %s' % protocol )
return False
state_result = StateResult( result=dict( state=transfer_job.states.RUNNING, info='Transfer process starting up.' ) )
listener_server = ListenerServer( range( port_range[0], port_range[1] + 1 ), ListenerRequestHandler, app, transfer_job, state_result )
# daemonize here (if desired)
if not debug:
daemon_context = DaemonContext( files_preserve=[ listener_server.fileno() ], working_directory=os.getcwd() )
daemon_context.open()
# If this fails, it'll never be detected. Hopefully it won't fail since it succeeded once.
app.connect_database() # daemon closed the database fd
transfer_job = app.get_transfer_job( transfer_job_id )
listener_thread = threading.Thread( target=listener_server.serve_forever )
listener_thread.setDaemon( True )
listener_thread.start()
# Store this process' pid so unhandled deaths can be handled by the restarter
transfer_job.pid = os.getpid()
app.sa_session.add( transfer_job )
app.sa_session.flush()
terminal_state = None
if protocol in [ 'http', 'https' ]:
for transfer_result_dict in http_transfer( transfer_job ):
state_result.result = transfer_result_dict
if transfer_result_dict[ 'state' ] in transfer_job.terminal_states:
terminal_state = transfer_result_dict
elif protocol in [ 'scp' ]:
# Transfer the file using scp
transfer_result_dict = scp_transfer( transfer_job )
# Handle the state of the transfer
state = transfer_result_dict[ 'state' ]
state_result.result = transfer_result_dict
if state in transfer_job.terminal_states:
terminal_state = transfer_result_dict
if terminal_state is not None:
transfer_job.state = terminal_state[ 'state' ]
for name in [ 'info', 'path' ]:
if name in terminal_state:
transfer_job.__setattr__( name, terminal_state[ name ] )
else:
transfer_job.state = transfer_job.states.ERROR
transfer_job.info = 'Unknown error encountered by transfer worker.'
app.sa_session.add( transfer_job )
app.sa_session.flush()
return True
def http_transfer( transfer_job ):
"""Plugin" for handling http(s) transfers."""
url = transfer_job.params['url']
assert url.startswith('http://') or url.startswith('https://')
try:
f = urllib2.urlopen( url )
except urllib2.URLError as e:
yield dict( state=transfer_job.states.ERROR, info='Unable to open URL: %s' % str( e ) )
return
size = f.info().getheader( 'Content-Length' )
if size is not None:
size = int( size )
chunksize = 1024 * 1024
if slow:
chunksize = 1024
read = 0
last = 0
try:
fh, fn = tempfile.mkstemp()
except Exception as e:
yield dict( state=transfer_job.states.ERROR, info='Unable to create temporary file for transfer: %s' % str( e ) )
return
log.debug( 'Writing %s to %s, size is %s' % ( url, fn, size or 'unknown' ) )
try:
while True:
chunk = f.read( chunksize )
if not chunk:
break
os.write( fh, chunk )
read += chunksize
if size is not None and read < size:
percent = int( float( read ) / size * 100 )
if percent != last:
yield dict( state=transfer_job.states.PROGRESS, read=read, percent='%s' % percent )
last = percent
elif size is None:
yield dict( state=transfer_job.states.PROGRESS, read=read )
if slow:
time.sleep( 1 )
os.close( fh )
yield dict( state=transfer_job.states.DONE, path=fn )
except Exception as e:
yield dict( state=transfer_job.states.ERROR, info='Error during file transfer: %s' % str( e ) )
return
return
def scp_transfer( transfer_job ):
"""Plugin" for handling scp transfers using pexpect"""
def print_ticks( d ):
pass
host = transfer_job.params[ 'host' ]
user_name = transfer_job.params[ 'user_name' ]
password = transfer_job.params[ 'password' ]
file_path = transfer_job.params[ 'file_path' ]
if pexpect is None:
return dict( state=transfer_job.states.ERROR, info=PEXPECT_IMPORT_MESSAGE )
try:
fh, fn = tempfile.mkstemp()
except Exception as e:
return dict( state=transfer_job.states.ERROR, info='Unable to create temporary file for transfer: %s' % str( e ) )
try:
# TODO: add the ability to determine progress of the copy here like we do in the http_transfer above.
cmd = "scp %s@%s:'%s' '%s'" % ( user_name,
host,
file_path.replace( ' ', '\ ' ),
fn )
pexpect.run( cmd, events={ '.ssword:*': password + '\r\n',
pexpect.TIMEOUT: print_ticks },
timeout=10 )
return dict( state=transfer_job.states.DONE, path=fn )
except Exception as e:
return dict( state=transfer_job.states.ERROR, info='Error during file transfer: %s' % str( e ) )
if __name__ == '__main__':
arg_handler = ArgHandler()
arg_handler.parse()
app = GalaxyApp( arg_handler.opts.config )
log.debug( 'Initiating transfer...' )
if transfer( app, arg_handler.transfer_job_id ):
log.debug( 'Finished' )
else:
log.error( 'Error in transfer process...' )
sys.exit( 1 )
sys.exit( 0 )
|
import sys
if sys.version_info[0] == 2:
from .society2 import core_value_decode, core_value_encode
else:
from .society3 import core_value_encode, core_value_decode
|
if __name__ == '__main__':
import os
import config
from components.send_backup import send_backup
from components.sort_backups import sort_backups
from utils import utils
from utils.run_system_cmd import run_system_cmd
from utils.send_mail import send_mail
utils.create_folders()
run_system_cmd(config.COMMAND_STOP, 'stopping the application and the DBMS')
run_system_cmd(f'cp -r {config.DATA_DIR} {config.TEMP_DIR}', 'copying the file')
run_system_cmd(config.COMMAND_START, 'starting the application and the DBMS')
archive_name = f'{utils.current_day}_{utils.current_month}_{utils.current_year}_{config.BACKUP_NAME}.tar.gz'
backup_path = os.path.join(config.BACKUP_DIR, 'daily', f'{archive_name}.gpg')
change_owner_cmd = f'chown -R {config.CLIENT_USER}:{config.CLIENT_USER} {config.TEMP_DIR}'
make_archive_cmd = f'su - {config.CLIENT_USER} -c "cd {config.TEMP_DIR} && tar -zcvf {archive_name} *"'
make_pgp_cmd = f'gpg -e -r {config.CLIENT_USER} {archive_name}'
copy_cmd = f'cp {config.TEMP_DIR}/{archive_name}.gpg {backup_path}'
clean_cmd = f'rm -rf {config.TEMP_DIR}/*'
create_gpg_cmd = f'su - {config.CLIENT_USER} -c "cd {config.TEMP_DIR} && {make_pgp_cmd} && {copy_cmd}" && {clean_cmd}'
run_system_cmd(change_owner_cmd, 'changing the owner of the temp directory')
run_system_cmd(make_archive_cmd, 'creating the archive')
run_system_cmd(create_gpg_cmd, 'encrypting the archive')
if config.SEND_BACKUP:
send_backup(backup_path, f'{archive_name}.gpg')
elif config.SEND_FINAL_EMAIL:
send_mail(config.MAIL_BODY, 'success')
sort_backups(backup_path)
|
import os
import re
from wok import logger
from wok.config.data import Data
from wok.core.flow.loader import FlowLoader
_LOGGER_NAME = "wok.projects"
class ProjectManager(object):
FLOW_URI_RE = re.compile(r"^(?:([^\:]+)\:)?([^\/]+)(?:\/(.+))?$")
def __init__(self, conf, base_path=None):
self.conf = conf
self.base_path = base_path or os.getcwd()
self._log = logger.get_logger(_LOGGER_NAME)
self._projects = {}
def _iter_dict(self, conf):
for name, pdesc in conf.items():
yield name, pdesc
def _iter_list(self, conf):
for pdesc in conf:
yield None, pdesc
def initialize(self):
self._log.info("Initializing projects ...")
if Data.is_element(self.conf):
iter_conf = self._iter_dict(self.conf)
elif Data.is_list(self.conf):
iter_conf = self._iter_list(self.conf)
else:
iter_conf = iter([])
for name, pdesc in iter_conf:
if isinstance(pdesc, basestring):
pdesc = self._load_project_desc(pdesc, self.base_path)
if name is None:
name = pdesc["name"]
self._projects[name] = Project(pdesc, name=name)
for name, project in sorted(self._projects.items(), key=lambda x: x[0]):
project.initialize()
def _load_project_desc(self, path, base_path=None):
if not os.path.isabs(path):
if base_path is not None:
path = os.path.join(base_path, path)
else:
path = os.path.abspath(path)
if not os.path.exists(path):
raise Exception("Project path not found: {}".format(path))
if os.path.isdir(path):
path = os.path.join(path, "project.conf")
if not os.path.isfile(path):
raise Exception("Project configuration not found: {}".format(path))
project = Data.element()
cfg = ConfigFile(path)
cfg.merge_into(project)
if "path" not in project:
project["path"] = os.path.dirname(path)
return project
def load_flow(self, uri):
m = self.FLOW_URI_RE.match(uri)
if m is None:
raise Exception("Malformed flow uri: {}".format(uri))
project_name, flow_name, version = m.groups()
project = self._projects.get(project_name)
if project is None:
raise Exception("Project not found: {}".format(project_name))
return project.load_flow(flow_name, version=version)
def __iter__(self):
return iter(self._projects.values())
class Project(object):
def __init__(self, conf, name=None, path=None):
self._log = logger.get_logger(_LOGGER_NAME)
self.name = name or conf.get("name")
self.path = path or conf.get("path")
self._source = conf.get("source")
self._setup = conf.get("setup")
self._update = conf.get("update")
self._clean = conf.get("clean")
self.flows = conf.get("flows")
self.platforms = conf.get("platforms") # + prefix = allowed, - prefix = not allowed
self._flow_loader = None
def initialize(self):
self._log.info("Initializing project {} ...".format(self.name))
if not os.path.exists(self.path):
self.download()
self.setup()
flow_paths = [os.path.join(self.path, flow) if not os.path.isabs(flow) else flow for flow in self.flows]
self._flow_loader = FlowLoader(flow_paths)
for uri, path in self._flow_loader.flow_files.items():
self._log.debug("{0} : {1}".format(uri, path))
def load_flow(self, flow_name, version=None):
flow = self._flow_loader.load_from_canonical(flow_name, version=version)
flow.project = self
return flow
def load_from_ref(self, ref):
flow = self._flow_loader.load_from_ref(ref)
flow.project = self
return flow
def download(self):
# TODO Download
if not os.path.exists(self.path):
raise Exception("Failed downloading of project {}".format(self.name))
def setup(self):
pass
def update(self):
pass
def clean(self):
pass
def load_flows(self):
pass
def __repr__(self):
sb = ["Project:"]
sb += [" name={0}".format(self.name)]
sb += [" path={0}".format(self.path)]
sb += [" flows={0}".format(self.flows)]
return "\n".join(sb)
|
import argparse
import logging
import os
import time
import numpy as np
import torch
import torch.nn as nn
from data_cleaning import ParkingDataLoader
from torch.utils.data import TensorDataset, DataLoader
parser = argparse.ArgumentParser()
parser.add_argument('--network', type=str, choices=['resnet', 'odenet'], default='odenet')
parser.add_argument('--tol', type=float, default=1e-3)
parser.add_argument('--adjoint', type=eval, default=False, choices=[True, False])
parser.add_argument('--downsampling-method', type=str, default='conv', choices=['conv', 'res'])
parser.add_argument('--nepochs', type=int, default=100)
parser.add_argument('--data_aug', type=eval, default=True, choices=[True, False])
parser.add_argument('--lr', type=float, default=0.01)
parser.add_argument('--batch_size', type=int, default=256)
parser.add_argument('--test_batch_size', type=int, default=256)
parser.add_argument('--save', type=str, default='./experiment1')
parser.add_argument('--debug', action='store_true')
parser.add_argument('--gpu', type=int, default=0)
args = parser.parse_args()
loader = ParkingDataLoader()
train_data, validation_data, test_data = loader.get_train_validation_test_datasets(validation_split=0.16,
test_split=0.2)
if args.adjoint:
from torchdiffeq import odeint_adjoint as odeint
else:
from torchdiffeq import odeint
def norm(dim):
return nn.GroupNorm(min(32, dim), dim)
class ODEfunc(nn.Module):
def __init__(self, dim):
super(ODEfunc, self).__init__()
self.relu = nn.ReLU(inplace=True)
self.lin1 = nn.Linear(dim, dim)
self.lin2 = nn.Linear(dim, dim)
self.lin3 = nn.Linear(dim, dim)
self.nfe = 0
def forward(self, t, x):
self.nfe += 1
out = self.relu(x)
out = self.lin1(out)
out = self.relu(out)
out = self.lin2(out)
out = self.relu(out)
out = self.lin3(out)
return out
class ODEBlock(nn.Module):
def __init__(self, odefunc):
super(ODEBlock, self).__init__()
self.odefunc = odefunc
self.integration_time = torch.tensor([0, 1]).float()
def forward(self, x):
self.integration_time = self.integration_time.type_as(x)
out = odeint(self.odefunc, x, self.integration_time, rtol=args.tol, atol=args.tol)
return out[1]
@property
def nfe(self):
return self.odefunc.nfe
@nfe.setter
def nfe(self, value):
self.odefunc.nfe = value
class Flatten(nn.Module):
def __init__(self):
super(Flatten, self).__init__()
def forward(self, x):
shape = torch.prod(torch.tensor(x.shape[1:])).item()
return x.view(-1, shape)
class RunningAverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self, momentum=0.99):
self.momentum = momentum
self.reset()
def reset(self):
self.val = None
self.avg = 0
def update(self, val):
if self.val is None:
self.avg = val
else:
self.avg = self.avg * self.momentum + val * (1 - self.momentum)
self.val = val
def get_mnist_loaders(batch_size=128, test_batch_size=256):
# train_data, validation_data, test_data
train_loader = DataLoader(
TensorDataset(
torch.tensor(train_data.drop('Occupied', axis=1).values.astype(np.float32)),
torch.tensor(train_data['Occupied'].values.astype(np.float32).reshape((len(train_data), 1)))
),
batch_size=batch_size, shuffle=True, num_workers=2, drop_last=True
)
train_eval_loader = DataLoader(
TensorDataset(
torch.tensor(validation_data.drop('Occupied', axis=1).values.astype(np.float32)),
torch.tensor(validation_data['Occupied'].values.astype(np.float32).reshape((len(validation_data), 1)))
),
batch_size=test_batch_size, shuffle=True, num_workers=2, drop_last=True
)
test_loader = DataLoader(
TensorDataset(
torch.tensor(test_data.drop('Occupied', axis=1).values.astype(np.float32)),
torch.tensor(test_data['Occupied'].values.astype(np.float32).reshape((len(test_data), 1)))
),
batch_size=test_batch_size, shuffle=True, num_workers=2, drop_last=True
)
return train_loader, test_loader, train_eval_loader
def inf_generator(iterable):
"""Allows training with DataLoaders in a single infinite loop:
for i, (x, y) in enumerate(inf_generator(train_loader)):
"""
iterator = iterable.__iter__()
while True:
try:
yield iterator.__next__()
except StopIteration:
iterator = iterable.__iter__()
def learning_rate_with_decay(batch_size, batch_denom, batches_per_epoch, boundary_epochs, decay_rates):
initial_learning_rate = args.lr * batch_size / batch_denom
boundaries = [int(batches_per_epoch * epoch) for epoch in boundary_epochs]
vals = [initial_learning_rate * decay for decay in decay_rates]
def learning_rate_fn(itr):
lt = [itr < b for b in boundaries] + [True]
i = np.argmax(lt)
return vals[i]
return learning_rate_fn
def one_hot(x, K):
return np.array(x[:, None] == np.arange(K)[None, :], dtype=int)
def accuracy(model, dataset_loader):
losses = []
for x, y in dataset_loader:
x = x.to(device)
y = y.to(device)
logits = model(x)
loss = criterion(logits, y)
losses.append(np.sqrt(loss.cpu().detach().numpy()))
return (1 - sum(losses) / len(losses)) * 100
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
def makedirs(dirname):
if not os.path.exists(dirname):
os.makedirs(dirname)
def get_logger(logpath, filepath, package_files=[], displaying=True, saving=False, debug=False):
logger = logging.getLogger()
if debug:
level = logging.DEBUG
else:
level = logging.INFO
logger.setLevel(level)
if saving:
info_file_handler = logging.FileHandler(logpath, mode="a")
info_file_handler.setLevel(level)
logger.addHandler(info_file_handler)
if displaying:
console_handler = logging.StreamHandler()
console_handler.setLevel(level)
logger.addHandler(console_handler)
return logger
def getModel(size=64, layers=1):
global model
feature_layers = [ODEBlock(ODEfunc(size)) for _ in range(layers)]
fc_layers = [nn.Linear(size, 1)]
model = nn.Sequential(nn.Linear(16, size), *feature_layers, *fc_layers).to(device)
return model
if __name__ == '__main__':
logger = get_logger(logpath=os.path.join(args.save, 'logs'), filepath=os.path.abspath(__file__))
logger.info(args)
device = torch.device('cuda:' + str(args.gpu) if torch.cuda.is_available() else 'cpu')
criterion = nn.MSELoss().to(device)
train_loader, test_loader, train_eval_loader = get_mnist_loaders(
args.batch_size, args.test_batch_size
)
data_gen = inf_generator(train_loader)
batches_per_epoch = len(train_loader)
lr_fn = learning_rate_with_decay(
args.batch_size, batch_denom=128, batches_per_epoch=batches_per_epoch, boundary_epochs=[60, 100, 140],
decay_rates=[1, 0.1, 0.01, 0.001]
)
best_acc = 0
batch_time_meter = RunningAverageMeter()
f_nfe_meter = RunningAverageMeter()
b_nfe_meter = RunningAverageMeter()
end = time.time()
for dims in [768, 512, 256, 128, 64, 32]:
for layers in [12, 10, 8, 6, 4, 2]:
try:
model = getModel(dims, layers=layers)
optimizer = torch.optim.SGD(model.parameters(), lr=args.lr, momentum=0.9)
logger.info(model)
logger.info('Number of parameters: {}'.format(count_parameters(model)))
print(args.nepochs * batches_per_epoch)
with open("results2", mode="a") as f:
f.write("-------------------------------------------------")
f.write("layers: " + str(layers) + "\n")
f.write("dims: " + str(dims) + "\n")
for itr in range(args.nepochs * batches_per_epoch):
for param_group in optimizer.param_groups:
param_group['lr'] = lr_fn(itr)
optimizer.zero_grad()
x, y = data_gen.__next__()
x = x.to(device)
y = y.to(device)
logits = model(x)
loss = criterion(logits, y)
loss.backward()
optimizer.step()
batch_time_meter.update(time.time() - end)
end = time.time()
if itr % batches_per_epoch == 0:
with torch.no_grad():
train_acc = accuracy(model, train_eval_loader)
val_acc = accuracy(model, test_loader)
if val_acc > best_acc:
torch.save({'state_dict': model.state_dict(), 'args': args},
os.path.join(args.save, 'model.pth'))
best_acc = val_acc
print("------------------------")
print("loss:", 100 - np.sqrt(loss.cpu().detach().numpy()) * 100)
logger.info(
"Epoch {:04d} | Time {:.3f} ({:.3f}) | NFE-F {:.1f} | NFE-B {:.1f} | "
"Train Acc {:.10f} | Test Acc {:.10f}".format(
itr // batches_per_epoch, batch_time_meter.val, batch_time_meter.avg,
f_nfe_meter.avg,
b_nfe_meter.avg, train_acc, val_acc
)
)
with open("results2", mode="a") as f:
f.write("Epoch {:04d} | Time {:.3f} ({:.3f}) | NFE-F {:.1f} | NFE-B {:.1f} | "
"Train Acc {:.10f} | Test Acc {:.10f} \n".format(itr, batch_time_meter.val,
batch_time_meter.avg,
f_nfe_meter.avg,
b_nfe_meter.avg, train_acc,
val_acc))
except Exception as error:
print(error)
pass
|
from .interpolateopacity import InterpolatingOpacity
import pickle
import numpy as np
import pathlib
from taurex.mpi import allocate_as_shared
class HDF5Opacity(InterpolatingOpacity):
"""
This is the base class for computing opactities
"""
@classmethod
def priority(cls):
return 5
@classmethod
def discover(cls):
import os
import glob
import pathlib
from taurex.cache import GlobalCache
from taurex.util.util import sanitize_molecule_string
path = GlobalCache()['xsec_path']
if path is None:
return []
path = [os.path.join(path, '*.h5'), os.path.join(path, '*.hdf5')]
file_list = [f for glist in path for f in glob.glob(glist)]
discovery = []
interp = GlobalCache()['xsec_interpolation'] or 'linear'
mem = GlobalCache()['xsec_in_memory'] or True
for f in file_list:
op = HDF5Opacity(f, interpolation_mode='linear', in_memory=False)
mol_name = op.moleculeName
discovery.append((mol_name, [f, interp, mem]))
#op._spec_dict.close()
del op
return discovery
def __init__(self, filename, interpolation_mode='exp', in_memory=False):
super().__init__('HDF5Opacity:{}'.format(pathlib.Path(filename).stem[0:10]),
interpolation_mode=interpolation_mode)
self._filename = filename
self._molecule_name = None
self._spec_dict = None
self.in_memory = in_memory
self._load_hdf_file(filename)
@property
def moleculeName(self):
return self._molecule_name
@property
def xsecGrid(self):
return self._xsec_grid
def _load_hdf_file(self, filename):
import h5py
import astropy.units as u
# Load the pickle file
self.debug('Loading opacity from {}'.format(filename))
self._spec_dict = h5py.File(filename, 'r')
self._wavenumber_grid = self._spec_dict['bin_edges'][:]
#temperature_units = self._spec_dict['t'].attrs['units']
#t_conversion = u.Unit(temperature_units).to(u.K).value
self._temperature_grid = self._spec_dict['t'][:] # *t_conversion
pressure_units = self._spec_dict['p'].attrs['units']
try:
p_conversion = u.Unit(pressure_units).to(u.Pa)
except:
p_conversion = u.Unit(pressure_units, format="cds").to(u.Pa)
self._pressure_grid = self._spec_dict['p'][:]*p_conversion
if self.in_memory:
self._xsec_grid = allocate_as_shared(self._spec_dict['xsecarr'][...], logger=self)
else:
self._xsec_grid = self._spec_dict['xsecarr']
self._resolution = np.average(np.diff(self._wavenumber_grid))
self._molecule_name = self._spec_dict['mol_name'][()]
if isinstance(self._molecule_name, np.ndarray):
self._molecule_name = self._molecule_name[0]
try:
self._molecule_name = self._molecule_name.decode()
except (UnicodeDecodeError, AttributeError,):
pass
from taurex.util.util import ensure_string_utf8
self._molecule_name = ensure_string_utf8(self._molecule_name)
self._min_pressure = self._pressure_grid.min()
self._max_pressure = self._pressure_grid.max()
self._min_temperature = self._temperature_grid.min()
self._max_temperature = self._temperature_grid.max()
if self.in_memory:
self._spec_dict.close()
@property
def wavenumberGrid(self):
return self._wavenumber_grid
@property
def temperatureGrid(self):
return self._temperature_grid
@property
def pressureGrid(self):
return self._pressure_grid
@property
def resolution(self):
return self._resolution
# return factor*(q_11*(Pmax-P)*(Tmax-T) + q_21*(P-Pmin)*(Tmax-T) + q_12*(Pmax-P)*(T-Tmin) + q_22*(P-Pmin)*(T-Tmin))
|
# -*- coding: utf-8 -*-
"""Set classifier function."""
__author__ = ["TonyBagnall"]
from sktime.clustering import TimeSeriesKMeans, TimeSeriesKMedoids
def set_clusterer(cls, resample_id=None):
"""Construct a clusterer.
Basic way of creating the clusterer to build using the default settings. This
set up is to help with batch jobs for multiple problems to facilitate easy
reproducability through run_clustering_experiment. You can set up bespoke
clusterers and pass them to run_clustering_experiment if you prefer. It also
serves to illustrate the base clusterer parameters
Parameters
----------
cls : str
indicating which clusterer you want
resample_id : int or None, default = None
clusterer random seed
Return
------
A clusterer.
"""
name = cls.lower()
# Distance based
if name == "kmeans" or name == "k-means":
return TimeSeriesKMeans(
n_clusters=5,
max_iter=50,
averaging_algorithm="mean",
random_state=resample_id,
)
if name == "kmedoids" or name == "k-medoids":
return TimeSeriesKMedoids(
n_clusters=5,
max_iter=50,
averaging_algorithm="mean",
random_state=resample_id,
)
else:
raise Exception("UNKNOWN CLUSTERER")
|
class MatrixFormatter:
def __init__(self, matrix):
self.matrix = matrix
def pretty_print(self):
""" Make the matrix look pretty """
out = ""
rows,cols = self.matrix.shape
for row in range(0,rows):
out += "["
for col in range(0,cols):
out += "%+0.2f "%self.matrix[row][col]
out += "]\n"
return out
|
import git
import skbio
import pandas as pd
import numpy as np
import os
# Find home directory for repo
repo = git.Repo("./", search_parent_directories=True)
homedir = repo.working_dir
# Define data directory
datadir = f"{homedir}/data/processed_sequencing/"
# List all fastq.gz files
fastq_file = datadir + "efflux_merged.fastq.gz"
# Use skbio to have a generator to iterate over fastq
print("Loading sequences...\n")
seqs = skbio.io.read(
fastq_file,
format="fastq",
verify="false",
variant="illumina1.8",
)
# Initialize list to save sequence objects
seq_list = list()
# Iterate over sequences
print("Extracting sequence information...\n")
for seq in seqs:
# Extract sequence information
seq_id = seq.metadata["id"]
quality = seq._positional_metadata["quality"].values
sequence = str(skbio.DNA(sequence=seq, validate=True))
# Append to list
seq_list.append([sequence, quality, np.sum(quality)])
# Initialize dataframe to save sequences
names = ["sequence", "quality", "total_quality"]
df_seq = pd.DataFrame.from_records(seq_list, columns=names)
# Add index and sequence length to dataframe
df_seq["seq_len"] = df_seq.sequence.apply(len)
# Filter sequences and find barcode/promoter
print("Finding promoters and barcodes...\n")
df_filt = df_seq[df_seq.seq_len == 220].reset_index(drop=True)
df_filt.insert(4, "barcode", [seq[0:20] for seq in df_filt.sequence])
df_filt.insert(4, "promoter", [str(skbio.DNA(sequence=seq[-160:]).complement(reverse=True)) for seq in df_filt.sequence])
# Load ordered sequences
df_list = pd.read_csv(f"{homedir}/data/efflux_pumps_twist.csv")
df_list.insert(2, "promoter", [seq[20:180] for seq in df_list.sequence])
# Find promoters
print("Identifying promoters...\n")
name_list = df_list.name.values
list_promoters = df_list.promoter.values
sequenced_promoters = df_filt.promoter.values
ident_list = []
for promoter in sequenced_promoters:
index = np.where(promoter == list_promoters)
if len(index[0]) != 0:
ident_list.append(name_list[index[0][0]])
else:
ident_list.append("None")
df_filt.insert(5, "identified_promoter", ident_list)
# Return found sequences
print("Saving results...\n")
df_return = df_filt[["identified_promoter", "promoter", "barcode"]]
df_return[df_return.identified_promoter != 'None']
# Define output dir
outputdir = f"{homedir}/data/barcodes/"
if not os.path.exists(outputdir):
os.mkdir(outputdir)
df_return.to_csv(f"{outputdir}/barcode_mapping.csv", index=False)
|
#!/usr/bin/python3
from influxdb import InfluxDBClient
class Influxdb:
def __init__(self, host, port):
self.host = host #8086
self.port = port
def connect(self, db):
self.client = InfluxDBClient(host=self.host, port=self.port)
self.client.switch_database(db)
def write(self, json):
self.client.write_points(json)
def connect_write(self, db, json):
self.connect(db)
self.write(json)
self.client.close()
def jsonBubbler(bubblelist):
l = []
for b in bubblelist:
l.append(
{
"measurement": "bubble",
"tags": {
"name": b["name"],
},
"time": b["endtime"],
"fields": {
"number": b["bubbles"]
}
}
)
return l
def jsonTilt(tiltlist):
l = []
for b in tiltlist:
l.append(
{
"measurement": "tilt",
"tags": {
"name": b["name"],
},
"time": b["time"],
"fields": {
"gravity": b["gravity"],
"temp": b["temp"]
}
}
)
return l
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server import util
class GetPollutionInfoData(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, lat: float=None, lng: float=None, range: float=None): # noqa: E501
"""GetPollutionInfoData - a model defined in Swagger
:param lat: The lat of this GetPollutionInfoData. # noqa: E501
:type lat: float
:param lng: The lng of this GetPollutionInfoData. # noqa: E501
:type lng: float
:param range: The range of this GetPollutionInfoData. # noqa: E501
:type range: float
"""
self.swagger_types = {
'lat': float,
'lng': float,
'range': float
}
self.attribute_map = {
'lat': 'lat',
'lng': 'lng',
'range': 'range'
}
self._lat = lat
self._lng = lng
self._range = range
@classmethod
def from_dict(cls, dikt) -> 'GetPollutionInfoData':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The GetPollutionInfoData of this GetPollutionInfoData. # noqa: E501
:rtype: GetPollutionInfoData
"""
return util.deserialize_model(dikt, cls)
@property
def lat(self) -> float:
"""Gets the lat of this GetPollutionInfoData.
Latitude # noqa: E501
:return: The lat of this GetPollutionInfoData.
:rtype: float
"""
return self._lat
@lat.setter
def lat(self, lat: float):
"""Sets the lat of this GetPollutionInfoData.
Latitude # noqa: E501
:param lat: The lat of this GetPollutionInfoData.
:type lat: float
"""
self._lat = lat
@property
def lng(self) -> float:
"""Gets the lng of this GetPollutionInfoData.
Longitude # noqa: E501
:return: The lng of this GetPollutionInfoData.
:rtype: float
"""
return self._lng
@lng.setter
def lng(self, lng: float):
"""Sets the lng of this GetPollutionInfoData.
Longitude # noqa: E501
:param lng: The lng of this GetPollutionInfoData.
:type lng: float
"""
self._lng = lng
@property
def range(self) -> float:
"""Gets the range of this GetPollutionInfoData.
Range in meters to get sensor data # noqa: E501
:return: The range of this GetPollutionInfoData.
:rtype: float
"""
return self._range
@range.setter
def range(self, range: float):
"""Sets the range of this GetPollutionInfoData.
Range in meters to get sensor data # noqa: E501
:param range: The range of this GetPollutionInfoData.
:type range: float
"""
self._range = range
|
from lxml import html
from typing import NamedTuple
import requests
class BootEntity(NamedTuple):
""" 书本信息 """
title: str
price: float
link: str
store: str
def __str__(self):
return '价格: {self.price} ;名称:{self.title} ; 购买链接:{self.link} 店铺:{self.store}'.format(self=self)
class MySpider(object):
def __init__(self, sn):
self.sn = sn
# 存储所有的书本信息
self.book_list = []
def dangdang(self):
""" 爬取当当网的数据 """
""" 爬取当当网的数据 """
url = 'http://search.dangdang.com/?key={sn}&act=input'.format(sn=self.sn)
# 获取html内容
html_data = requests.get(url).text
# xpath对象
selector = html.fromstring(html_data)
# 找到书本列表
ul_list = selector.xpath('//div[@id="search_nature_rg"]/ul/li')
print(len(ul_list))
for li in ul_list:
# 标题
title = li.xpath('a/@title')
print(title[0])
# 购买链接
link = li.xpath('a/@href')
print(link[0])
# 价格
price = li.xpath('p[@class="price"]/span[@class="search_now_price"]/text()')
print(price[0].replace('¥', ''))
# 商家
store = li.xpath('p[@class="search_shangjia"]/a/text()')
store = '当当自营' if len(store) == 0 else store[0]
print(store)
print('-----------------------')
book = BootEntity(
title=title[0],
price=price[0].replace('¥', ''),
link=link[0],
store=store[0]
)
print(book)
self.book_list.append(book)
def jd(self):
""" 爬取京东的数据 """
return []
def taobao(self):
""" 爬取淘宝的数据 """
return []
def yhd(self):
""" 爬取一号店的数据 """
return []
def spider(self):
""" 得到排序后的数据 """
self.dangdang()
self.jd()
print('--------------------------------')
bk_list = sorted(self.book_list, key=lambda item: float(item.price), reverse=True)
for book in bk_list:
print(book)
if __name__ == '__main__':
client = MySpider('9787115428028')
client.spider()
|
#! python 3
import http.client
import sys
import os
import string
import re
from urllib.request import urlopen
class Display () :
def read_input_file() :
print ("Please enter the path and name of the file you would like to open")
userInput = input("Path and file name: ")
if Display.file_exists( userInput ) == False :
print ("Invalid path and file name. Please Try again ")
Display.read_input_file()
f = open( userInput, "r" )
for line in f :
print (line)
def main_menu_display () :
validOption = True
while True :
print ('*' * 30, ' Menu ' , '*' * 30)
print (' ' * 30, ' ____ ' , '' * 30)
print(' ' * 10, "R - Read an input file")
print(' ' * 10, "E - Print every even character from a string")
print (' ' * 10, "W - Read in a web-based URL")
print (' ' * 10, "L - Load file into a list")
print (' ' * 10, "C - Capitalize each word in a string")
print (' ' * 10, "D - Read in an HTML file, filter on <DIV>")
print (' ' * 10, "Q - Quit")
userInput = input("\n\nEnter in an option: ")
if 123 > ord(userInput) and ord(userInput) > 96 :
userInput = chr( ord(userInput) - 32 )
if userInput == "R" :
os.system("cls")
Display.read_input_file()
#break
elif userInput == "E" :
os.system("cls")
Display.print_even_characters_from_string()
#break
elif userInput == "W" :
os.system("cls")
Display.read_in_url()
#break
elif userInput == "L" :
os.system("cls")
Display.load_file_into_list()
#break
elif userInput == "C" :
os.system("cls")
Display.capitalize_each_word_in_string()
#break
elif userInput == "D" :
os.system("cls")
Display.filter_div_from_html_file()
#break
elif userInput == "Q" :
os.system("cls")
break
else :
print ("Invalid input")
def read_in_url() :
url = input("Please enter the an URL")
if Display.url_exists( url ) :
print("URL exists and is accessible")
else :
print("URL does not exists or is inaccessible")
def url_exists( urlName ) :
conn = http.client.HTTPConnection('www.fakeurl.com')
conn.request('HEAD', urlName)
if conn.getresponse().status == 200 :
return True
else :
return False
def file_exists( fileName ) :
return os.path.isfile( fileName )
def string_has_length( string ) :
if len( string ) > 0 :
return True
else :
return False
def print_even_characters_from_string() :
print ("Please enter a string ")
userInput = input("Input string: ")
if Display.string_has_length( userInput ) == False :
print ("Invalid string!!!!!!")
Display.print_even_characters_from_string()
index = 1;
for everyCharacter in userInput :
if index % 2 == 0 :
print (everyCharacter)
index = index + 1;
def load_file_into_list () :
print ("Please enter the path and name of the file you would like to open")
userInput = input("Path and file name: ")
if Display.file_exists( userInput ) == False :
print( "Invalid path and file name. Please Try again ")
Display.load_file_into_list()
f = open( userInput, "r" )
lineList = []
for line in f :
lineList.append( line )
print (line)
print ("The contents of the ", userInput, " in a list format is : \n", lineList)
def capitalize_each_word_in_string() :
userInput = input("Please enter a string: ")
if Display.string_has_length( userInput ) == False :
print("Invalid string provided")
Display.capitalize_each_word_in_string()
stringParts = userInput.split()
newString = []
newWord = []
for eachString in stringParts :
index = 0
for charInWord in eachString :
if ord( charInWord ) > 96 and ord( charInWord ) < 123 and index == 0 :
newWord.append( chr( ord( charInWord ) - 32 ))
else :
newWord.append( charInWord )
index = index + 1
stringWord = ''.join( newWord )
newString.append( stringWord )
del newWord
newWord = []
newString = ' '.join( newString )
print( newString )
def filter_div_from_html_file() :
divStarts = []
divEnds = []
lineList = []
rerun = data = open( 'foreverhomepage.html', "r" )
lineNumber = 1;
divStarted = False
divList = []
for line in data :
if re.search( r'(<div).*(>)', line, re.DOTALL) :
divStarts.append(lineNumber)
divStarted = True
lineList.append(line)
elif re.search( r'(</div>)', line ) :
divEnds.append(lineNumber)
elif divStarted == True :
lineList.append(line)
if len(divEnds) != 0 and len(divStarts) != 0 :
lineStartIndex = 1
divSet = []
compDiv = ''
rerun = open( 'foreverhomepage.html', "r" )
for subSection in rerun :
if lineStartIndex >= divStarts[ len( divStarts ) -1 ] and lineStartIndex <= divEnds[ len( divEnds ) -1 ] :
divSet.append(subSection)
lineStartIndex = lineStartIndex + 1
compDiv = ''.join( divSet )
divList.append( compDiv )
divStarts.remove( divStarts[ len( divStarts ) - 1 ] )
divEnds.remove( divEnds[ len( divEnds ) - 1 ] )
lineNumber = lineNumber + 1
index = 0
for list in divList :
print ("item in list is : " , index , " \t\t list: " , list)
index = index + 1
Display.main_menu_display()
|
"""The ``resilient_exporters`` package provides data exporters designed to be
resilient to peak utilisation and Internet connection disruptions.
Install:
$ pip install resilient-exporters
"""
import logging
from .exporters import *
from .exceptions import *
# With NullHandler we enable logging only the user sets up logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
pool = ExporterPool([], use_memory=False)
"""Default, ready to use pool of exporter, which uses a file for data storage"""
add_exporter = pool.add_exporter
send = pool.send
|
from pyquilted.quilted.iconify import Iconify
class Detail:
def serialize(self):
name = self.__class__.__name__ + '-detail'
detail = dict()
detail[name.lower()] = vars(self)
return detail
class Location(Detail, Iconify):
def __init__(self, location=None, via=None, **kwargs):
self.location = location
self.via = self.iconify(value=via)
class Flair(Detail):
def __init__(self, flair=None, flair_icon=None, flair_link=None, **kwargs):
self.flair = flair
self.icon = flair_icon
self.link = flair_link
class Objective(Detail):
def __init__(self, objective=None, **kwargs):
self.objective = objective
class Title(Detail):
def __init__(self, title=None, **kwargs):
self.title = title
|
from nornir import InitNornir
from nornir.plugins.tasks.networking import netmiko_send_command
from nornir.plugins.functions.text import print_result
from nornir.core.task import Result
import argparse
import sys
from pathlib import Path
from datetime import datetime, timedelta
import yaml
# Nornir Tool
#
# -c "command" or -x command-file
# device/s or -d device-file
#
# TODO:
# -n config.yaml
# -g group -p platform --all
# -w number of workers
# autoenable if enable secret in inventory
# DEBUG = True
# SESSION_LOG = True
# AUTOENABLE = True
#
# Unfinished. A work in progess...
# working examples:
# python nrtool.py -d devices.txt -x commands.txt
# python nrtool.py cisco3 cisco4 -x commands.txt
# python nrtool.py cisco3 cisco4 -c "[{'mode': 'config', 'set': ['int lo999', 'ip address 1.1.1.1'], 'delay-factor': 4}]"
#
# More complicated, but hopefully more reliable command format
# Example 1:
# ---
# - mode: config
# set:
# - int lo999
# - ip address 10.99.9.99 255.255.255.255
# - mode: enable
# set:
# - wr
# - show ip int bri
# - mode: interactive
# set:
# - copy running start
# - mode: interactive
# set:
# - \n
# delay_factor: 2
#
# Example 2:
# ---
# - mode: interactive
# set:
# - copy running start
# - mode: enable
# set:
# - '\n'
# delay_factor: 2
# expect_string: '#'
#
# The copy will likely finish faster in example 2, but it requires the expect_string to behave as expected.
NUM_WORKERS = 1
TIMEOUT = 60
#import ipdb; ipdb.set_trace()
#OSError: Search pattern never detected in send_command_expect: cisco3\#
def netmiko_deploy(task, commands):
net_connect = task.host.get_connection("netmiko", task.nornir.config)
output = net_connect.find_prompt()
if NUM_WORKERS==1: print(output,end='')
result = output
for group in commands:
group_mode = group.get('mode',None)
group_set = group.get('set', [])
group_delay_factor = group.get('delay_factor', 1)
if group_mode not in ('enable','config','interactive'):
continue
if not isinstance(group_set, list) or len(group_set)==0:
continue
if group_mode == "enable":
group_expect_string = group.get('expect_string', None)
for cmd_str in group_set:
if cmd_str=='\\n':
cmd_str='\n'
if group_expect_string:
output = net_connect.send_command(cmd_str,
strip_prompt=False,
strip_command=False,
delay_factor=group_delay_factor,
expect_string=rf'{group_expect_string}'
)
else:
output = net_connect.send_command(cmd_str,
strip_prompt=False,
strip_command=False,
delay_factor=group_delay_factor)
if NUM_WORKERS==1: print(output,end='')
result += output
elif group_mode == "config":
output = net_connect.send_config_set(config_commands=group_set,
delay_factor=group_delay_factor
)
if NUM_WORKERS==1: print(output,end='')
result += output
elif group_mode == "interactive":
for cmd_str in group_set:
if cmd_str=='\\n':
cmd_str='\n'
output = net_connect.send_command_timing(cmd_str,
strip_prompt=False,
strip_command=False,
delay_factor=group_delay_factor
)
if NUM_WORKERS==1: print(output,end='')
result += output
else:
pass
if NUM_WORKERS==1: print('\n\n')
return result
def main(args):
devices = []
commands = []
#get device list to work on
if len(args.device)>0 and not isinstance(args.d, type(None)):
print('Cannot have positional device arguments and the -d argument')
sys.exit()
if len(args.device)>0:
devices = args.device
if not isinstance(args.d, type(None)):
#check file exists
filename = f"{args.d}"
my_file = Path(filename)
if not my_file.is_file():
print(f"File {args.d} does not exist")
sys.exit()
#read in file
with open(filename, "r") as f:
devices = f.read()
devices = devices.split()
if len(devices) == 0:
print('No devices to work against')
sys.exit()
#filter device list
nr = InitNornir(config_file='config.yaml',
core={'num_workers': NUM_WORKERS},
)
nr = nr.filter(filter_func=lambda h: h.name in args.device)
if len(nr.inventory.hosts.keys()) == 0:
print("No matching devices found in inventory files.")
sys.exit()
#get command set to run
if not isinstance(args.c, type(None)) and not isinstance(args.x, type(None)):
print('Cannot have command string and the -x argument')
sys.exit()
if not isinstance(args.c, type(None)):
commands = yaml.load(args.c)
if not isinstance(args.x, type(None)):
#check file exists
filename = f"{args.x}"
my_file = Path(filename)
if not my_file.is_file():
print(f"File {args.d} does not exist")
sys.exit()
#read in file
with open(filename, "r") as f:
commands = yaml.load(f)
if not isinstance(commands, list):
print('Commands should be a list of dicts')
sys.exit()
#start_time = datetime.now()
results = nr.run(task=netmiko_deploy, commands=commands)
#elapsed_time = datetime.now() - start_time
#print(elapsed_time)
print_result(results)
def run():
parser = argparse.ArgumentParser(
description="Nornir Tool"
)
parser.add_argument(
"-c",
metavar="command",
help="command array in quotes"
)
parser.add_argument(
"-x",
metavar="command file",
help="yaml file containing list of commands"
)
parser.add_argument(
"-d",
metavar="device file",
help="file containing list of devices"
)
parser.add_argument(
'device',
metavar='device',
type=str,
nargs='*',
help='devices')
args = parser.parse_args()
main(args=args)
if __name__ == "__main__":
run()
|
"""Available Commands:
.moon
.smoon
.tmoon"""
from telethon import events
import asyncio
from collections import deque
@borg.on(events.NewMessage(pattern=r"\.tmoon", outgoing=True))
async def _(event):
if event.fwd_from:
return
deq = deque(list("🌗🌘🌑🌒🌓🌔🌕🌖"))
for _ in range(32):
await asyncio.sleep(0.1)
await event.edit("".join(deq))
deq.rotate(1)
@borg.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 101)
input_str = event.pattern_match.group(1)
if input_str == "smoon":
await event.edit(input_str)
animation_chars = [
"🌗🌗🌗🌗🌗\n🌓🌓🌓🌓🌓\n🌗🌗🌗🌗🌗\n🌓🌓🌓🌓🌓\n🌗🌗🌗🌗🌗",
"🌘🌘🌘🌘🌘\n🌔🌔🌔🌔🌔\n🌘🌘🌘🌘🌘\n🌔🌔🌔🌔🌔\n🌘🌘🌘🌘🌘",
"🌑🌑🌑🌑🌑\n🌕🌕🌕🌕🌕\n🌑🌑🌑🌑🌑\n🌕🌕🌕🌕🌕\n🌑🌑🌑🌑🌑",
"🌒🌒🌒🌒🌒\n🌖🌖🌖🌖🌖\n🌒🌒🌒🌒🌒\n🌖🌖🌖🌖🌖\n🌒🌒🌒🌒🌒",
"🌓🌓🌓🌓🌓\n🌓🌓🌓🌓🌓\n🌓🌓🌓🌓🌓\n🌗🌗🌗🌗🌗\n🌓🌓🌓🌓🌓",
"🌔🌔🌔🌔🌔\n🌘🌘🌘🌘🌘\n🌔🌔🌔🌔🌔\n🌘🌘🌘🌘🌘\n🌔🌔🌔🌔🌔",
"🌕🌕🌕🌕🌕\n🌑🌑🌑🌑🌑\n🌕🌕🌕🌕🌕\n🌑🌑🌑🌑🌑\n🌕🌕🌕🌕🌕",
"🌖🌖🌖🌖🌖\n🌒🌒🌒🌒🌒\n🌖🌖🌖🌖🌖\n🌒🌒🌒🌒🌒\n🌖🌖🌖🌖🌖"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 8])
@borg.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 117)
input_str = event.pattern_match.group(1)
if input_str == "moon":
await event.edit(input_str)
animation_chars = [
"🌗",
"🌘",
"🌑",
"🌒",
"🌓",
"🌔",
"🌕",
"🌖",
"🌗",
"🌘",
"🌑",
"🌒",
"🌓",
"🌔",
"🌕",
"🌖",
"🌗",
"🌘",
"🌑",
"🌒",
"🌓",
"🌔",
"🌕",
"🌖",
"🌗",
"🌘",
"🌑",
"🌒",
"🌓",
"🌔",
"🌕",
"🌖"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 32])
|
import itertools
import collections
from . import parser
from .utils import to_string
class FeatureFactory(object):
def __init__(self, feature_classes):
self.parser = parser.Parser()
self.feature_classes = feature_classes
self.feature_dict = {f.name: f for f in self.feature_classes}
self.feature_cache = dict()
def _parse_from(self, data):
assert "feat" in data and "kwargs" in data, \
f"incorrect data format: {data}"
name = data["feat"]
args, kwargs = data.get("args", tuple()), data.get("kwargs", dict())
assert name in self.feature_dict, \
(f"unrecognized feature name: {name} (did you update "
f"`create_factory` function in `features.py`?)")
cls = self.feature_dict[name]
for v in itertools.chain(kwargs.values(), args):
vtype = v["type"]
if vtype == "feature":
v["value"] = self.parse_from(v["value"])
args = [v["value"] for v in args]
kwargs = {k: v["value"] for k, v in kwargs.items()}
ret = cls(*args, **kwargs)
ret.factory = self
return ret
def parse_from(self, string):
if isinstance(string, collections.Mapping):
string = to_string(string)
p = self.parser.parse(string)
if p is None:
raise ValueError("not a valid feature serialization")
feat = self._parse_from(p)
if str(feat) not in self.feature_cache:
self.feature_cache[str(feat)] = feat
return self.feature_cache[str(feat)]
|
from .GloVeModel import GloVeModel
from .Word2vecGoogleModel import Word2vecGoogleModel
|
from flask import Flask, request
from flask_restful import Resource
from .models import Order, orders
class OrderDetals(Resource):
def get(self, id):
order = Order().get_order_by_id(id)
if not order:
return {"message":"Order not found"}, 404
return {"order": order.collect_order_details()}, 200
def delete(self, id):
# global orders
# orders = list(filter(lambda x: x['id'] != id, orders))
# return {"messsage": "item deleted"}
order = Order().get_order_by_id(id)
if not order:
return {"message":"Order not found"}, 404
orders.remove(order)
return {"message":"order deleted successfully"},200
def put(self, id):
# data = request.get_json()
order = Order().get_by_id(id)
if order:
return {"message":"Order not found"}, 404
order.status="approved"
return {"message":"status approved"}
class NewOrderPlacement(Resource):
def post(self):
data = request.get_json()
order = Order(data['name'], data["price"],data['no_of_items_ordered'])
orders.append(order)
return {"message":"Food order created"}, 201
class DisplayAllOrders(Resource):
def get(self):
return {"orders":[order.collect_order_details() for order in orders]}
|
"""Python command-line script for managing CKAN user accounts.
This script can only create or update a user account. It does not
manage memberships in groups or roles.
The base URL for the API to use (without the trailing "/api/action" text)
can be specified in an environment variable named 'CKAN_URL'. The value for the
URL will be prompted for input if the environment variable is not set.
The API key to use for authentication can be specified in an environment
variable named 'CKAN_KEY'. The value for the API key will be prompted for input
if the environment variable is not set.
The script expects a single command-line argument, specifying the name
of a file containing an array of user account data dictionaries, in JSON
format and matching the specification at
http://docs.ckan.org/en/2.8/api/#ckan.logic.action.create.user_create
with one addition - a key of 'apikey' to indicate the API Key for an
existing user account should be regenerated. Note the associated value for the
'apikey' label must be the literal 'reset' to trigger the API Key regeneration.
Sample content for the JSON file:
[
{
"name": "user1_name",
"email": "user1@exmple.com",
"fullname": "User One"
},
{
"name": "user2_name",
"email": "user2@example.com",
"fullname": "User Two"
},
{
"id": "d5bd2e1a-0d4b-4381-84e4-98da475679e3",
"name": "exiting_user1_name",
"email": "new_email@example.com",
"fullname": "New Full Name",
"password": "F0rc3dUpd*t3"
},
{
"id": "d5bd2e1a-0d4a-4380-83e4-98da465679e3",
"apikey": "reset"
}
]
"""
import getpass
import logging
import os
import random
import string
import sys
import ckanapi
import json
def create_user_account(connection, user_data_dict):
""" Create a user account using the passed dictionary."""
# If the user_data_dict does not contain a password element, add a random
# string as the password. Set the first 4 characters to satisfy the
# diversity requirements, then add some other random characters to satisfy
# the length requirement.
if 'password' not in user_data_dict:
user_data_dict['password'] = random.choice(string.ascii_uppercase)
user_data_dict['password'] += random.choice(string.ascii_lowercase)
user_data_dict['password'] += random.choice(string.digits)
user_data_dict['password'] += random.choice(string.punctuation)
for x in range(9):
user_data_dict['password'] += random.choice(string.printable)
try:
result = connection.call_action(action='user_create', data_dict=user_data_dict)
logging.info("Created user account for %s", result['name'])
except:
logging.exception('Exception creating user account for {}'.format(user_data_dict['name']))
def update_user_account(connection, user_data_dict):
"""Update an existing user account.
Explicitly check whether the update includes regenerating the API key.
"""
try:
if 'apikey' in user_data_dict:
if user_data_dict['apikey'] == 'reset':
api_dict = {'id': user_data_dict['id']}
result = connection.call_action(action='user_generate_apikey', data_dict=api_dict)
logging.info('Regenerated API Key for %s', user_data_dict['id'])
# Remove the API Key key pair from the user_data_dict before proceeding
user_data_dict.pop('apikey')
# Remove any keys for fields that cannot be changed.
user_data_dict.pop('name',None)
user_data_dict.pop('email',None)
if len(user_data_dict) > 1:
result = connection.call_action(action='user_update', data_dict=user_data_dict)
logging.info("Updated user account for %s", result['name'])
else:
logging.info("Nothing left to update for %s", user_data_dict['id'])
except:
logging.exception('Error attempting to update user account for {}'.format(user_data_dict['id']))
def manage_user_account(connection, user_data_dict):
# If the user_data_dict contains an identifier, assume the user entry
# needs to be updated.
if 'id' in user_data_dict:
update_user_account(connection, user_data_dict)
else:
create_user_account(connection, user_data_dict)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
# Retrieve the URL and API Key from environment variables, if set.
url = os.getenv('CKAN_URL', None)
api_key = os.getenv('CKAN_KEY', None)
# Prompt for the API connection details if missing.
if not url:
url = raw_input('Enter CKAN URL:')
if not api_key:
api_key = getpass.getpass('Enter CKAN API key:')
remote = ckanapi.RemoteCKAN(url, api_key)
if len(sys.argv) > 1:
input_file_name = sys.argv[1]
with open(input_file_name) as input_file:
try:
user_entries = json.load(input_file)
for user_entry in user_entries:
manage_user_account(remote, user_entry)
except:
logging.exception('Exception reading input file.')
else:
print('Provide a file name containing JSON user entries as the only command argument.')
|
# X10 is so slow...
# I ran a web server to let me control X10 lights through a CM11A gateway.
# there's nearly a second of latency per command! on top of that, it just
# wasn't very reliable (some commands would go, some wouldn't).
# I'm not using this code anymore, but left it here if it's helpful to you
import logging, requests, threading, queue
logger = logging.getLogger(__name__)
import os
if 'X10_HTTP_SERVER' in os.environ:
X10_HTTP_SERVER = os.environ['X10_HTTP_SERVER']
else:
logger.warning("X10_HTTP_SERVER wasn't specified in the environment... disabling!")
X10_HTTP_SERVER = None
if X10_HTTP_SERVER is not None:
x10threadqueue = queue.Queue()
def x10thread():
# We run all the X10 web requests on a separate thread because they are
# so slow we don't want to block the music playing
logger.debug("x10 thread started")
while True:
code,cmd = x10threadqueue.get()
r = requests.get('http://10.0.0.21:8080/'+code+'/'+cmd)
logger.debug("x10 response = "+r.text)
x10threadqueue.task_done()
thread = threading.Thread(target=x10thread)
thread.daemon = True
thread.start()
X10_DELAY_MS = 850
# X10 (especially X10 via http request) is extremely inconsistent
# on timeliness... but this is about how much latency.
def raw_x10(code,cmd):
x10threadqueue.put((code,cmd))
def x10_macro(time,code,cmd):
""" this macro is to deal with the delay """
return [(time-X10_DELAY_MS,"raw_x10 %s %s"%(code,cmd))]
def register(commands,macros,commits):
macros['x10'] = x10_macro
commands['raw_x10'] = raw_x10
else:
def register(commands,macros,commits):
pass
|
"""Test haddock3-cfg client."""
import pytest
from haddock import config_expert_levels
from haddock.clis import cli_cfg
from haddock.modules import modules_category
@pytest.fixture(params=config_expert_levels + ("all",))
def config_level(request):
"""Haddock3 config levels."""
return request.param
@pytest.mark.parametrize(
"module",
list(modules_category.keys()),
)
def test_export_cfgs(module, config_level):
"""Test export all configs work."""
cli_cfg.main(module, config_level)
|
################################################################################
# #
# Copyright (C) 2011-2015, Armory Technologies, Inc. #
# Distributed under the GNU Affero General Public License (AGPL v3) #
# See LICENSE or http://www.gnu.org/licenses/agpl.html #
# #
################################################################################
from PyQt4.Qt import * #@UnusedWildImport
from PyQt4.QtGui import * #@UnusedWildImport
from armoryengine.BDM import TheBDM, BDM_BLOCKCHAIN_READY
from qtdefines import * #@UnusedWildImport
from armoryengine.Transaction import UnsignedTransaction, getTxOutScriptType
from armoryengine.Script import convertScriptToOpStrings
from armoryengine.CoinSelection import PySelectCoins, calcMinSuggestedFees,\
calcMinSuggestedFeesHackMS, PyUnspentTxOut, estimateTxSize
from ui.WalletFrames import SelectWalletFrame, LockboxSelectFrame
from armoryengine.MultiSigUtils import \
calcLockboxID, readLockboxEntryStr, createLockboxEntryStr, isBareLockbox,\
isP2SHLockbox
from armoryengine.ArmoryUtils import MAX_COMMENT_LENGTH, getAddrByte
from ui.FeeSelectUI import FeeSelectionDialog
from CppBlockUtils import TXOUT_SCRIPT_P2SH, TXOUT_SCRIPT_P2WPKH, TXOUT_SCRIPT_P2WSH, \
TransactionBatch, SecureBinaryData, RecipientReuseException
from armoryengine.SignerWrapper import SIGNER_DEFAULT
class SendBitcoinsFrame(ArmoryFrame):
def __init__(self, parent, main, initLabel='',
wlt=None, wltIDList=None,
selectWltCallback = None, onlyOfflineWallets=False,
sendCallback = None, createUnsignedTxCallback = None,
spendFromLockboxID=None):
super(SendBitcoinsFrame, self).__init__(parent, main)
self.maxHeight = tightSizeNChar(GETFONT('var'), 1)[1] + 8
self.customUtxoList = []
self.altBalance = None
self.useCustomListInFull = False
self.wlt = wlt
self.wltID = wlt.uniqueIDB58 if wlt else None
self.wltIDList = wltIDList
self.selectWltCallback = selectWltCallback
self.sendCallback = sendCallback
self.createUnsignedTxCallback = createUnsignedTxCallback
self.lbox = self.main.getLockboxByID(spendFromLockboxID)
self.onlyOfflineWallets = onlyOfflineWallets
self.widgetTable = []
self.isMax = False
self.scrollRecipArea = QScrollArea()
self.signerType = SIGNER_DEFAULT
lblRecip = QRichLabel('<b>Enter Recipients:</b>')
lblRecip.setAlignment(Qt.AlignLeft | Qt.AlignBottom)
self.shuffleEntries = True
self.freeOfErrors = True
def getWalletIdList(onlyOfflineWallets):
result = []
if onlyOfflineWallets:
result = self.main.getWatchingOnlyWallets()
else:
result = list(self.main.walletIDList)
return result
self.wltIDList = wltIDList
if wltIDList == None:
self.wltIDList = getWalletIdList(onlyOfflineWallets)
feetip = self.main.createToolTipWidget(\
self.tr('Transaction fees go to users who contribute computing power to '
'keep the Bitcoin network secure, and in return they get your transaction '
'included in the blockchain faster.'))
self.feeDialog = FeeSelectionDialog(self, self.main, \
self.resolveCoinSelection, self.getCoinSelectionState)
self.feeLblButton = self.feeDialog.getLabelButton()
def feeDlg():
self.feeDialog.exec_()
self.connect(self.feeLblButton, SIGNAL('clicked()'), feeDlg)
# This used to be in the later, expert-only section, but some of these
# are actually getting referenced before being declared. So moved them
# up to here.
self.chkDefaultChangeAddr = QCheckBox(self.tr('Use an existing address for change'))
self.radioFeedback = QRadioButton(self.tr('Send change to first input address'))
self.radioSpecify = QRadioButton(self.tr('Specify a change address'))
self.lblChangeAddr = QRichLabel(self.tr('Change:'))
addrWidgets = self.main.createAddressEntryWidgets(self, maxDetectLen=36,
defaultWltID=self.wltID)
self.edtChangeAddr = addrWidgets['QLE_ADDR']
self.btnChangeAddr = addrWidgets['BTN_BOOK']
self.lblAutoDetect = addrWidgets['LBL_DETECT']
self.getUserChangeScript = addrWidgets['CALLBACK_GETSCRIPT']
self.chkRememberChng = QCheckBox(self.tr('Remember for future transactions'))
self.vertLine = VLINE()
self.ttipSendChange = self.main.createToolTipWidget(\
self.tr('Most transactions end up with oversized inputs and Armory will send '
'the change to the next address in this wallet. You may change this '
'behavior by checking this box.'))
self.ttipFeedback = self.main.createToolTipWidget(\
self.tr('Guarantees that no new addresses will be created to receive '
'change. This reduces anonymity, but is useful if you '
'created this wallet solely for managing imported addresses, '
'and want to keep all funds within existing addresses.'))
self.ttipSpecify = self.main.createToolTipWidget(\
self.tr('You can specify any valid Bitcoin address for the change. '
'<b>NOTE:</b> If the address you specify is not in this wallet, '
'Armory will not be able to distinguish the outputs when it shows '
'up in your ledger. The change will look like a second recipient, '
'and the total debit to your wallet will be equal to the amount '
'you sent to the recipient <b>plus</b> the change.'))
self.ttipUnsigned = self.main.createToolTipWidget(\
self.tr('Check this box to create an unsigned transaction to be signed'
' and/or broadcast later.'))
self.unsignedCheckbox = QCheckBox(self.tr('Create Unsigned'))
self.RBFcheckbox = QCheckBox(self.tr('enable RBF'))
self.RBFcheckbox.setChecked(True)
self.ttipRBF = self.main.createToolTipWidget(\
self.tr('RBF flagged inputs allow to respend the underlying outpoint for a '
'higher fee as long as the original spending transaction remains '
'unconfirmed. <br><br>'
'Checking this box will RBF flag all inputs in this transaction'))
self.btnSend = QPushButton(self.tr('Send!'))
self.btnCancel = QPushButton(self.tr('Cancel'))
self.connect(self.btnCancel, SIGNAL(CLICKED), parent.reject)
self.btnPreviewTx = QLabelButton("Preview Transaction")
self.connect(self.btnPreviewTx, SIGNAL('clicked()'), self.previewTx)
# Created a standard wallet chooser frame. Pass the call back method
# for when the user selects a wallet.
if self.lbox is None:
coinControlCallback = self.coinControlUpdate if self.main.usermode == USERMODE.Expert else None
RBFcallback = self.RBFupdate if self.main.usermode == USERMODE.Expert else None
self.frmSelectedWlt = SelectWalletFrame(parent, main,
VERTICAL,
self.wltID,
wltIDList=self.wltIDList,
selectWltCallback=self.setWallet, \
coinControlCallback=coinControlCallback,
onlyOfflineWallets=self.onlyOfflineWallets,
RBFcallback=RBFcallback)
else:
self.frmSelectedWlt = LockboxSelectFrame(parent, main,
VERTICAL,
self.lbox.uniqueIDB58)
self.setupCoinSelectionForLockbox(self.lbox)
# Only the Create Unsigned Transaction button if there is a callback for it.
# Otherwise the containing dialog or wizard will provide the offlien tx button
metaButtonList = [self.btnPreviewTx, STRETCH, self.RBFcheckbox, self.ttipRBF]
buttonList = []
if self.createUnsignedTxCallback:
self.connect(self.unsignedCheckbox, SIGNAL(CLICKED), self.unsignedCheckBoxUpdate)
buttonList.append(self.unsignedCheckbox)
buttonList.append(self.ttipUnsigned)
buttonList.append(STRETCH)
buttonList.append(self.btnCancel)
# Only add the Send Button if there's a callback for it
# Otherwise the containing dialog or wizard will provide the send button
if self.sendCallback:
self.connect(self.btnSend, SIGNAL(CLICKED), self.createTxAndBroadcast)
buttonList.append(self.btnSend)
txFrm = makeHorizFrame([self.feeLblButton, feetip], STYLE_RAISED, condenseMargins=True)
metaFrm = makeHorizFrame(metaButtonList, STYLE_RAISED, condenseMargins=True)
buttonFrame = makeHorizFrame(buttonList, condenseMargins=True)
btnEnterURI = QPushButton(self.tr('Manually Enter "bitcoin:" Link'))
ttipEnterURI = self.main.createToolTipWidget( self.tr(
'Armory does not always succeed at registering itself to handle '
'URL links from webpages and email. '
'Click this button to copy a "bitcoin:" link directly into Armory.'))
self.connect(btnEnterURI, SIGNAL("clicked()"), self.clickEnterURI)
fromFrameList = [self.frmSelectedWlt]
if not self.main.usermode == USERMODE.Standard:
frmEnterURI = makeHorizFrame([btnEnterURI, ttipEnterURI], condenseMargins=True)
fromFrameList.append(frmEnterURI)
########################################################################
# In Expert usermode, allow the user to modify source addresses
if self.main.usermode == USERMODE.Expert:
sendChangeToFrame = QFrame()
sendChangeToLayout = QGridLayout()
sendChangeToLayout.addWidget(self.lblChangeAddr, 0,0)
sendChangeToLayout.addWidget(self.edtChangeAddr, 0,1)
sendChangeToLayout.addWidget(self.btnChangeAddr, 0,2)
sendChangeToLayout.addWidget(self.lblAutoDetect, 1,1, 1,2)
sendChangeToLayout.setColumnStretch(0,0)
sendChangeToLayout.setColumnStretch(1,1)
sendChangeToLayout.setColumnStretch(2,0)
sendChangeToFrame.setLayout(sendChangeToLayout)
btngrp = QButtonGroup(self)
btngrp.addButton(self.radioFeedback)
btngrp.addButton(self.radioSpecify)
btngrp.setExclusive(True)
self.connect(self.chkDefaultChangeAddr, SIGNAL('toggled(bool)'), self.toggleChngAddr)
self.connect(self.radioSpecify, SIGNAL('toggled(bool)'), self.toggleSpecify)
frmChngLayout = QGridLayout()
i = 0;
frmChngLayout.addWidget(self.chkDefaultChangeAddr, i, 0, 1, 6)
frmChngLayout.addWidget(self.ttipSendChange, i, 6, 1, 2)
i += 1
frmChngLayout.addWidget(self.radioFeedback, i, 1, 1, 5)
frmChngLayout.addWidget(self.ttipFeedback, i, 6, 1, 2)
i += 1
frmChngLayout.addWidget(self.radioSpecify, i, 1, 1, 5)
frmChngLayout.addWidget(self.ttipSpecify, i, 6, 1, 2)
i += 1
frmChngLayout.addWidget(sendChangeToFrame, i, 1, 1, 6)
i += 1
frmChngLayout.addWidget(self.chkRememberChng, i, 1, 1, 7)
frmChngLayout.addWidget(self.vertLine, 1, 0, i - 1, 1)
frmChngLayout.setColumnStretch(0,1)
frmChngLayout.setColumnStretch(1,1)
frmChngLayout.setColumnStretch(2,1)
frmChngLayout.setColumnStretch(3,1)
frmChngLayout.setColumnStretch(4,1)
frmChngLayout.setColumnStretch(5,1)
frmChngLayout.setColumnStretch(6,1)
frmChangeAddr = QFrame()
frmChangeAddr.setLayout(frmChngLayout)
frmChangeAddr.setFrameStyle(STYLE_SUNKEN)
fromFrameList.append('Stretch')
fromFrameList.append(frmChangeAddr)
else:
fromFrameList.append('Stretch')
frmBottomLeft = makeVertFrame(fromFrameList, STYLE_RAISED, condenseMargins=True)
lblSend = QRichLabel(self.tr('<b>Sending from Wallet:</b>'))
lblSend.setAlignment(Qt.AlignLeft | Qt.AlignBottom)
leftFrame = makeVertFrame([lblSend, frmBottomLeft], condenseMargins=True)
rightFrame = makeVertFrame(\
[lblRecip, self.scrollRecipArea, txFrm, metaFrm, buttonFrame], condenseMargins=True)
layout = QHBoxLayout()
layout.addWidget(leftFrame, 0)
layout.addWidget(rightFrame, 1)
layout.setContentsMargins(0,0,0,0)
layout.setSpacing(0)
self.setLayout(layout)
self.makeRecipFrame(1)
self.setWindowTitle(self.tr('Send Bitcoins'))
self.setMinimumHeight(self.maxHeight * 20)
if self.lbox:
self.toggleSpecify(False)
self.toggleChngAddr(False)
hexgeom = self.main.settings.get('SendBtcGeometry')
if len(hexgeom) > 0:
geom = QByteArray.fromHex(hexgeom)
self.restoreGeometry(geom)
# Use this to fire wallet change after the constructor is complete.
# if it's called during construction then self's container may not exist yet.
def fireWalletChange(self):
# Set the wallet in the wallet selector and let all of display components
# react to it. This is at the end so that we can be sure that all of the
# components that react to setting the wallet exist.
if self.lbox:
self.unsignedCheckbox.setChecked(True)
self.unsignedCheckbox.setEnabled(False)
else:
self.frmSelectedWlt.updateOnWalletChange()
self.unsignedCheckBoxUpdate()
#############################################################################
def unsignedCheckBoxUpdate(self):
if self.unsignedCheckbox.isChecked():
self.btnSend.setText(self.tr('Continue'))
self.btnSend.setToolTip(self.tr('Click to create an unsigned transaction!'))
else:
self.btnSend.setText(self.tr('Send!'))
self.btnSend.setToolTip(self.tr('Click to send bitcoins!'))
#############################################################################
def getRBFFlag(self):
return self.RBFcheckbox.checkState() == Qt.Checked
#############################################################################
def addOneRecipient(self, addr160, amt, msg, label=None, plainText=None):
"""
plainText arg can be used, and will override addr160. It is for
injecting either fancy script types, or special keywords into the
address field, such as a lockbox ID
"""
if label is not None and addr160:
self.wlt.setComment(addr160, label)
if len(self.widgetTable) > 0:
lastIsEmpty = True
for widg in ['QLE_ADDR', 'QLE_AMT', 'QLE_COMM']:
if len(str(self.widgetTable[-1][widg].text())) > 0:
lastIsEmpty = False
else:
lastIsEmpty = False
if not lastIsEmpty:
self.makeRecipFrame(len(self.widgetTable) + 1)
if amt:
amt = coin2str(amt, maxZeros=2).strip()
if plainText is None:
plainText = hash160_to_addrStr(addr160)
self.widgetTable[-1]['QLE_ADDR'].setText(plainText)
self.widgetTable[-1]['QLE_ADDR'].setCursorPosition(0)
self.widgetTable[-1]['QLE_AMT'].setText(amt)
self.widgetTable[-1]['QLE_AMT'].setCursorPosition(0)
self.widgetTable[-1]['QLE_COMM'].setText(msg)
self.widgetTable[-1]['QLE_COMM'].setCursorPosition(0)
self.addCoinSelectionRecipient(len(self.widgetTable) - 1)
self.resolveCoinSelection()
#############################################################################
# Now that the wallet can change in the context of the send dialog, this
# method is used as a callback for when the wallet changes
# isDoubleClick is unused - do not accept or close dialog on double click
def setWallet(self, wlt, isDoubleClick=False):
self.wlt = wlt
self.wltID = wlt.uniqueIDB58 if wlt else None
self.setupCoinSelectionInstance()
if not TheBDM.getState() == BDM_BLOCKCHAIN_READY:
self.lblSummaryBal.setText('(available when online)', color='DisableFG')
if self.main.usermode == USERMODE.Expert:
# Pre-set values based on settings
chngBehave = self.main.getWltSetting(self.wltID, 'ChangeBehavior')
chngAddr = self.main.getWltSetting(self.wltID, 'ChangeAddr')
if chngBehave == 'Feedback':
self.chkDefaultChangeAddr.setChecked(True)
self.radioFeedback.setChecked(True)
self.radioSpecify.setChecked(False)
self.toggleChngAddr(True)
self.chkRememberChng.setChecked(True)
elif chngBehave == 'Specify':
self.chkDefaultChangeAddr.setChecked(True)
self.radioFeedback.setChecked(False)
self.radioSpecify.setChecked(True)
self.toggleChngAddr(True)
if checkAddrStrValid(chngAddr):
self.edtChangeAddr.setText(chngAddr)
self.edtChangeAddr.setCursorPosition(0)
self.chkRememberChng.setChecked(True)
else:
# Other option is "NewAddr" but in case there's an error, should run
# this branch by default
self.chkDefaultChangeAddr.setChecked(False)
self.radioFeedback.setChecked(False)
self.radioSpecify.setChecked(False)
self.toggleChngAddr(False)
if (self.chkDefaultChangeAddr.isChecked() and \
not self.radioFeedback.isChecked() and \
not self.radioSpecify.isChecked()):
self.radioFeedback.setChecked(True)
# If there is a unsigned then we have a send button and unsigned checkbox to update
if self.createUnsignedTxCallback:
self.unsignedCheckbox.setChecked(wlt.watchingOnly)
self.unsignedCheckbox.setEnabled(not wlt.watchingOnly)
self.unsignedCheckBoxUpdate()
if self.selectWltCallback:
self.selectWltCallback(wlt)
#############################################################################
def setupCoinSelectionInstance(self):
if self.wlt is None:
self.coinSelection = None
return
try:
self.coinSelection = self.wlt.cppWallet.getCoinSelectionInstance()
except Cpp.DbErrorMsg as dbErr:
LOGERROR('DB error: %s', dbErr.what())
try:
self.resetCoinSelectionRecipients()
except:
pass
#############################################################################
def setupCoinSelectionForLockbox(self, lbox):
try:
lbCppWlt = self.main.cppLockboxWltMap[lbox.uniqueIDB58]
self.coinSelection = Cpp.CoinSelectionInstance(\
lbCppWlt, lbox.M, lbox.N, \
TheBDM.getTopBlockHeight(), lbCppWlt.getSpendableBalance())
except:
self.coinSelection = None
#############################################################################
def resetCoinSelectionRecipients(self):
if self.coinSelection is None:
return
self.coinSelection.resetRecipients()
for row in range(len(self.widgetTable)):
self.addCoinSelectionRecipient(row)
try:
self.resolveCoinSelection()
except:
pass
#############################################################################
def addCoinSelectionRecipient(self, id_):
try:
coinSelRow = self.widgetTable[id_]
scrAddr = str(coinSelRow['QLE_ADDR'].text()).strip()
if len(scrAddr) == 0:
raise BadAddressError('Empty address string')
try:
prefix, h160 = addrStr_to_hash160(scrAddr)
except:
h160 = Cpp.BtcUtils_bech32ToScript(scrAddr, BECH32_PREFIX)[2:]
if len(h160) == 20:
prefix = SCRADDR_P2WPKH_BYTE
elif len(h160) == 32:
prefix = SCRADDR_P2WSH_BYTE
scrAddr = prefix + h160
valueStr = str(coinSelRow['QLE_AMT'].text()).strip()
value = str2coin(valueStr, negAllowed=False)
self.coinSelection.addRecipient(scrAddr, value)
except:
self.resetCoinSelectionText()
#############################################################################
def updateCoinSelectionRecipient(self, uid):
try:
id_ = -1
for i in range(len(self.widgetTable)):
if self.widgetTable[i]['UID'] == uid:
id_ = i
if id_ == -1:
raise Exception()
coinSelRow = self.widgetTable[id_]
if 'OP_RETURN' not in coinSelRow:
addrStr = str(coinSelRow['QLE_ADDR'].text()).strip()
try:
prefix, h160 = addrStr_to_hash160(addrStr)
except:
#recipient input is not an address, is it a locator instead?
scriptDict = self.main.getScriptForUserString(addrStr)
if scriptDict['Script'] == None:
raise Exception("invalid addrStr in recipient")
if scriptDict['IsBech32'] == False:
scraddr = script_to_scrAddr(scriptDict['Script'])
prefix = scraddr[0]
h160 = scraddr[1:]
else:
h160 = Cpp.BtcUtils_bech32ToScript(addrStr, BECH32_PREFIX)[2:]
if len(h160) == 20:
prefix = SCRADDR_P2WPKH_BYTE
elif len(h160) == 32:
prefix = SCRADDR_P2WSH_BYTE
scrAddr = prefix + h160
valueStr = str(coinSelRow['QLE_AMT'].text()).strip()
try:
value = str2coin(valueStr, negAllowed=False)
except:
value = 0
self.coinSelection.updateRecipient(id_, scrAddr, value)
else:
opreturn_message = str(coinSelRow['QLE_ADDR'].text())
self.coinSelection.updateOpReturnRecipient(id_, opreturn_message)
self.resolveCoinSelection()
except:
self.resetCoinSelectionText()
#############################################################################
def serializeUtxoList(self, utxoList):
serializedUtxoList = []
for utxo in utxoList:
bp = BinaryPacker()
bp.put(UINT64, utxo.getValue())
bp.put(UINT32, utxo.getTxHeight())
bp.put(UINT16, utxo.getTxIndex())
bp.put(UINT16, utxo.getTxOutIndex())
bp.put(VAR_STR, utxo.getTxHash())
bp.put(VAR_STR, utxo.getScript())
bp.put(UINT32, utxo.sequence)
serializedUtxoList.append(bp.getBinaryString())
return serializedUtxoList
#############################################################################
def resolveCoinSelection(self):
maxRecipientID = self.getMaxRecipientID()
if maxRecipientID != None:
self.setMaximum(maxRecipientID)
try:
fee, feePerByte, adjust_fee = self.feeDialog.getFeeData()
processFlag = 0
if self.useCustomListInFull:
processFlag += Cpp.USE_FULL_CUSTOM_LIST
if adjust_fee:
processFlag += Cpp.ADJUST_FEE
if self.shuffleEntries:
processFlag += Cpp.SHUFFLE_ENTRIES
if self.customUtxoList is None or len(self.customUtxoList) == 0:
self.coinSelection.selectUTXOs(fee, feePerByte, processFlag)
else:
serializedUtxoList = self.serializeUtxoList(self.customUtxoList)
self.coinSelection.processCustomUtxoList(\
serializedUtxoList, fee, feePerByte, processFlag)
self.feeDialog.updateLabelButton()
except RuntimeError as e:
self.resetCoinSelectionText()
raise e
#############################################################################
def getCoinSelectionState(self):
txSize = self.coinSelection.getSizeEstimate()
flatFee = self.coinSelection.getFlatFee()
feeByte = self.coinSelection.getFeeByte()
return txSize, flatFee, feeByte
#############################################################################
def resetCoinSelectionText(self):
self.feeDialog.resetLabel()
#############################################################################
# Update the available source address list and balance based on results from
# coin control. This callback is now necessary because coin control was moved
# to the Select Wallet Frame
def coinControlUpdate(self, customUtxoList, altBalance, useAll):
self.customUtxoList = customUtxoList
self.altBalance = altBalance
self.useCustomListInFull = useAll
try:
self.resolveCoinSelection()
except:
pass
#############################################################################
def RBFupdate(self, rbfList, altBalance, forceVerbose=False):
self.customUtxoList = rbfList
self.useCustomListInFull = True
self.altBalance = altBalance
try:
self.resolveCoinSelection()
except:
if forceVerbose == False:
return
#failed to setup rbf send dialog, maybe the setup cannot cover for
#auto fee. let's force the fee to 0 and warn the user
self.feeDialog.setZeroFee()
try:
self.resolveCoinSelection()
MsgBoxCustom(MSGBOX.Warning, self.tr('RBF value error'), \
self.tr(
'You are trying to bump the fee of a broadcasted unconfirmed transaction. '
'Unfortunately, your transaction lacks the funds to cover the default fee. '
'Therefore, <b><u>the default fee has currently been set to 0</b></u>.<br><br>'
'You will have to set the appropriate fee and arrange the transaction spend '
'value manually to successfully double spend this transaction.'
), \
yesStr=self.tr('Ok'))
except:
MsgBoxCustom(MSGBOX.Error, self.tr('RBF failure'), \
self.tr(
'You are trying to bump the fee of a broadcasted unconfirmed transaction. '
'The process failed unexpectedly. To double spend your transaction, pick '
'the relevant output from the RBF Control dialog, found in the Send dialog '
'in Expert User Mode.') , \
yesStr=self.tr('Ok'))
#############################################################################
def handleCppCoinSelectionExceptions(self):
try:
self.coinSelection.rethrow()
except RecipientReuseException as e:
addrList = e.getAddresses()
addrParagraph = '<br>'
for addrEntry in addrList:
addrParagraph = addrParagraph + ' - ' + addrEntry + '<br>'
result = MsgBoxCustom(MSGBOX.Warning, self.tr('Recipient reuse'), \
self.tr(
'The transaction you crafted <b>reuses</b> the following recipient address(es):<br>'
'%1<br>'
' The sum of values for this leg of the transaction amounts to %2 BTC. There is only'
' a total of %3 BTC available in UTXOs to fund this leg of the'
' transaction without <b>damaging your privacy.</b>'
'<br><br>In order to meet the full payment, Armory has to make use of extra '
' UTXOs, <u>and this will result in privacy loss on chain.</u> <br><br>'
'To progress beyond this warning, choose Ignore. Otherwise'
' the operation will be cancelled.'
).arg(addrParagraph, \
coin2str(e.total(), 5, maxZeros=0), \
coin2str(e.value(), 5, maxZeros=0)), \
wCancel=True, yesStr=self.tr('Ignore'), noStr=self.tr('Cancel'))
if not result:
return False
return True;
#############################################################################
def validateInputsGetUSTX(self, peek=False):
self.freeOfErrors = True
scripts = []
addrList = []
self.comments = []
if self.handleCppCoinSelectionExceptions() == False:
return
for row in range(len(self.widgetTable)):
# Verify validity of address strings
widget_obj = self.widgetTable[row]
if 'OP_RETURN' in widget_obj:
continue
addrStr = str(widget_obj['QLE_ADDR'].text()).strip()
self.widgetTable[row]['QLE_ADDR'].setText(addrStr) # overwrite w/ stripped
addrIsValid = True
addrList.append(addrStr)
try:
enteredScript = widget_obj['FUNC_GETSCRIPT']()['Script']
if not enteredScript:
addrIsValid = False
else:
scripts.append(enteredScript)
except:
LOGEXCEPT('Failed to parse entered address: %s', addrStr)
addrIsValid = False
if not addrIsValid:
scripts.append('')
self.freeOfErrors = False
self.updateAddrColor(row, Colors.SlightRed)
numChkFail = sum([1 if len(b)==0 else 0 for b in scripts])
if not self.freeOfErrors:
QMessageBox.critical(self, self.tr('Invalid Address'),
self.tr("You have entered %1 invalid addresses. "
"The errors have been highlighted on the entry screen").arg(str(numChkFail)), QMessageBox.Ok)
for row in range(len(self.widgetTable)):
try:
atype, a160 = addrStr_to_hash160(addrList[row])
if atype == -1 or not atype in [ADDRBYTE,P2SHBYTE]:
net = 'Unknown Network'
if addrList[row][0] in NETWORKS:
net = NETWORKS[addrList[row][0]]
QMessageBox.warning(self, self.tr('Wrong Network!'), self.tr(
'Address %1 is for the wrong network! You are on the <b>%2</b> '
'and the address you supplied is for the the <b>%3</b>!').arg(row+1, NETWORKS[ADDRBYTE], net), QMessageBox.Ok)
except:
pass
return False
# Construct recipValuePairs and check that all metrics check out
scriptValPairs = []
opreturn_list = []
totalSend = 0
for row in range(len(self.widgetTable)):
widget_obj = self.widgetTable[row]
if 'OP_RETURN' in widget_obj:
opreturn_msg = str(widget_obj['QLE_ADDR'].text())
if len(opreturn_msg) > 80:
self.updateAddrColor(row, Colors.SlightRed)
QMessageBox.critical(self, self.tr('Negative Value'), \
self.tr('You have specified a OP_RETURN message over 80 bytes long in recipient %1!'
).arg(row + 1), QMessageBox.Ok)
return False
opreturn_list.append(opreturn_msg)
continue
try:
valueStr = str(self.widgetTable[row]['QLE_AMT'].text()).strip()
value = str2coin(valueStr, negAllowed=False)
if value == 0:
QMessageBox.critical(self, self.tr('Zero Amount'), \
self.tr('You cannot send 0 BTC to any recipients. <br>Please enter '
'a positive amount for recipient %1.').arg(row+1), QMessageBox.Ok)
return False
except NegativeValueError:
QMessageBox.critical(self, self.tr('Negative Value'), \
self.tr('You have specified a negative amount for recipient %1. <br>Only '
'positive values are allowed!.').arg(row + 1), QMessageBox.Ok)
return False
except TooMuchPrecisionError:
QMessageBox.critical(self, self.tr('Too much precision'), \
self.tr('Bitcoins can only be specified down to 8 decimal places. '
'The smallest value that can be sent is 0.0000 0001 BTC. '
'Please enter a new amount for recipient %1.').arg(row + 1), QMessageBox.Ok)
return False
except ValueError:
QMessageBox.critical(self, self.tr('Missing recipient amount'), \
self.tr('You did not specify an amount to send!'), QMessageBox.Ok)
return False
except:
QMessageBox.critical(self, self.tr('Invalid Value String'), \
self.tr('The amount you specified '
'to send to address %1 is invalid (%2).').arg(row + 1, valueStr), QMessageBox.Ok)
LOGERROR('Invalid amount specified: "%s"', valueStr)
return False
totalSend += value
script = self.widgetTable[row]['FUNC_GETSCRIPT']()['Script']
scriptValPairs.append([script, value])
self.comments.append((str(self.widgetTable[row]['QLE_COMM'].text()), value))
try:
utxoSelect = self.getUsableTxOutList()
except RuntimeError as e:
QMessageBox.critical(self, self.tr('Coin Selection Failure'), \
self.tr('Coin selection failed with error: <b>%1<b/>').arg(e.message), \
QMessageBox.Ok)
return False
fee = self.coinSelection.getFlatFee()
fee_byte = self.coinSelection.getFeeByte()
# Warn user of excessive fee specified
if peek == False:
feebyteStr = "%.2f" % fee_byte
if fee_byte > 10 * MIN_FEE_BYTE:
reply = QMessageBox.warning(self, self.tr('Excessive Fee'), self.tr(
'Your transaction comes with a fee rate of <b>%1 satoshis per byte</b>. '
'</br></br> '
'This is at least an order of magnitude higher than the minimum suggested fee rate of <b>%2 satoshi/Byte</b>. '
'<br><br>'
'Are you <i>absolutely sure</i> that you want to send with this '
'fee? If you do not want to proceed with this fee rate, click "No".').arg(\
feebyteStr, unicode(MIN_FEE_BYTE)), QMessageBox.Yes | QMessageBox.No)
if not reply==QMessageBox.Yes:
return False
elif fee_byte < MIN_FEE_BYTE:
reply = QMessageBox.warning(self, self.tr('Insufficient Fee'), self.tr(
'Your transaction comes with a fee rate of <b>%1 satoshi/Byte</b>. '
'</br><br> '
'This is lower than the suggested minimum fee rate of <b>%2 satoshi/Byte</b>. '
'<br><br>'
'Are you <i>absolutely sure</i> that you want to send with this '
'fee? If you do not want to proceed with this fee rate, click "No".').arg(\
feebyteStr, unicode(MIN_FEE_BYTE)), QMessageBox.Yes | QMessageBox.No)
if not reply==QMessageBox.Yes:
return False
if len(utxoSelect) == 0:
QMessageBox.critical(self, self.tr('Coin Selection Error'), self.tr(
'There was an error constructing your transaction, due to a '
'quirk in the way Bitcoin transactions work. If you see this '
'error more than once, try sending your BTC in two or more '
'separate transactions.'), QMessageBox.Ok)
return False
# ## IF we got here, everything is good to go...
# Just need to get a change address and then construct the tx
totalTxSelect = sum([u.getValue() for u in utxoSelect])
totalChange = totalTxSelect - (totalSend + fee)
self.changeScript = ''
self.selectedBehavior = ''
if totalChange > 0:
script,behavior = self.determineChangeScript(\
utxoSelect, scriptValPairs, peek)
self.changeScript = script
self.selectedBehavior = behavior
scriptValPairs.append([self.changeScript, totalChange])
LOGINFO('Change address behavior: %s', self.selectedBehavior)
else:
self.selectedBehavior = NO_CHANGE
# Keep a copy of the originally-sorted list for display
origSVPairs = scriptValPairs[:]
# Anonymize the outputs
if self.shuffleEntries:
random.shuffle(scriptValPairs)
p2shMap = {}
pubKeyMap = {}
if self.getRBFFlag():
for utxo in utxoSelect:
if utxo.sequence == 2**32 - 1:
utxo.sequence = 2**32 - 3
# In order to create the USTXI objects, need to make sure we supply a
# map of public keys that can be included
if self.lbox:
p2shMap = self.lbox.getScriptDict()
ustx = UnsignedTransaction().createFromTxOutSelection( \
utxoSelect, scriptValPairs, \
p2shMap = p2shMap, \
lockTime=TheBDM.getTopBlockHeight())
for i in range(len(ustx.ustxInputs)):
ustx.ustxInputs[i].contribID = self.lbox.uniqueIDB58
for i in range(len(ustx.decorTxOuts)):
if ustx.decorTxOuts[i].binScript == self.lbox.binScript:
ustx.decorTxOuts[i].contribID = self.lbox.uniqueIDB58
else:
# If this has nothing to do with lockboxes, we need to make sure
# we're providing a key map for the inputs
for utxo in utxoSelect:
scrType = getTxOutScriptType(utxo.getScript())
scrAddr = utxo.getRecipientScrAddr()
if scrType in CPP_TXOUT_STDSINGLESIG:
a160 = scrAddr_to_hash160(scrAddr)[1]
addrObj = self.wlt.getAddrByHash160(a160)
if addrObj:
pubKeyMap[scrAddr] = addrObj.binPublicKey65.toBinStr()
elif scrType == CPP_TXOUT_P2SH:
p2shScript = self.wlt.cppWallet.getP2SHScriptForHash(utxo.getScript())
p2shKey = binary_to_hex(script_to_scrAddr(script_to_p2sh_script(
p2shScript)))
p2shMap[p2shKey] = p2shScript
addrIndex = self.wlt.cppWallet.getAssetIndexForAddr(utxo.getRecipientHash160())
try:
addrStr = self.wlt.chainIndexMap[addrIndex]
except:
if addrIndex < -2:
importIndex = self.wlt.cppWallet.convertToImportIndex(addrIndex)
addrStr = self.wlt.linearAddr160List[importIndex]
else:
raise Exception("invalid address index")
addrObj = self.wlt.addrMap[addrStr]
pubKeyMap[scrAddr] = addrObj.binPublicKey65.toBinStr()
'''
If we are consuming any number of SegWit utxos, pass the utxo selection
and outputs to the new signer for processing instead of creating the
unsigned tx in Python.
'''
# Now create the unsigned USTX
ustx = UnsignedTransaction().createFromTxOutSelection(\
utxoSelect, scriptValPairs, pubKeyMap, p2shMap=p2shMap, \
lockTime=TheBDM.getTopBlockHeight())
for msg in opreturn_list:
ustx.addOpReturnOutput(str(msg))
#ustx.pprint()
txValues = [totalSend, fee, totalChange]
if not peek:
if not self.unsignedCheckbox.isChecked():
dlg = DlgConfirmSend(self.wlt, origSVPairs, txValues[1], self, \
self.main, True, ustx)
if not dlg.exec_():
return False
self.signerType = dlg.getSignerType()
else:
self.main.warnNewUSTXFormat()
return ustx
def createTxAndBroadcast(self):
def unlockWallet():
if self.wlt.isLocked:
Passphrase = None
unlockdlg = DlgUnlockWallet(self.wlt, \
self, self.main, 'Send Transaction', returnPassphrase=True)
if unlockdlg.exec_():
if unlockdlg.Accepted == 1:
Passphrase = unlockdlg.securePassphrase.copy()
unlockdlg.securePassphrase.destroy()
if Passphrase is None or self.wlt.kdf is None:
QMessageBox.critical(self.parent(), self.tr('Wallet is Locked'), \
self.tr('Cannot sign transaction while your wallet is locked. '), \
QMessageBox.Ok)
return
else:
self.wlt.kdfKey = self.wlt.kdf.DeriveKey(Passphrase)
Passphrase.destroy()
# The Send! button is clicked validate and broadcast tx
ustx = self.validateInputsGetUSTX()
if ustx:
self.updateUserComments()
if self.createUnsignedTxCallback and self.unsignedCheckbox.isChecked():
self.createUnsignedTxCallback(ustx)
else:
try:
unlockWallet()
self.wlt.mainWnd = self.main
self.wlt.parent = self
commentStr = ''
if len(self.comments) == 1:
commentStr = self.comments[0][0]
else:
for i in range(len(self.comments)):
amt = self.comments[i][1]
if len(self.comments[i][0].strip()) > 0:
commentStr += '%s (%s); ' % (self.comments[i][0], coin2str_approx(amt).strip())
ustxSigned = self.wlt.signUnsignedTx(ustx, signer=self.signerType)
finalTx = ustxSigned.getSignedPyTx(signer=ustxSigned.signerType)
if len(commentStr) > 0:
self.wlt.setComment(finalTx.getHash(), commentStr)
self.main.broadcastTransaction(finalTx)
except:
LOGEXCEPT('Problem sending transaction!')
# TODO: not sure what errors to catch here, yet...
raise
if self.sendCallback:
self.sendCallback()
#############################################################################
def getUsableBalance(self):
if self.lbox is None:
if self.altBalance == None:
return self.wlt.getBalance('Spendable')
else:
return self.altBalance
else:
lbID = self.lbox.uniqueIDB58
cppWlt = self.main.cppLockboxWltMap.get(lbID)
if cppWlt is None:
LOGERROR('Somehow failed to get cppWlt for lockbox: %s', lbID)
return cppWlt.getSpendableBalance()
#############################################################################
def getUsableTxOutList(self):
self.resolveCoinSelection()
utxoVec = self.coinSelection.getUtxoSelection()
utxoSelect = []
for i in range(len(utxoVec)):
pyUtxo = PyUnspentTxOut().createFromCppUtxo(utxoVec[i])
utxoSelect.append(pyUtxo)
return utxoSelect
#############################################################################
def getDefaultChangeAddress(self, scriptValPairs, peek):
if len(scriptValPairs) == 0:
raise Exception("cannot calculate change without at least one recipient")
def getAddr(addrObj, typeStr):
if typeStr == 'P2PKH':
addrStr = self.wlt.getP2PKHAddrForIndex(addrObj.chainIndex)
elif typeStr == 'P2SH-P2WPKH':
addrStr = self.wlt.getNestedSWAddrForIndex(addrObj.chainIndex)
elif typeStr == 'P2SH-P2PK':
addrStr = self.wlt.getNestedP2PKAddrForIndex(addrObj.chainIndex)
return addrStr
if peek is True:
newAddr = self.wlt.peekNextUnusedAddr()
else:
newAddr = self.wlt.getNextUnusedAddress()
changeType = self.main.getSettingOrSetDefault('Default_ChangeType', DEFAULT_CHANGE_TYPE)
#check if there are any P2SH recipients
haveP2SH = False
haveP2PKH = False
haveBech32 = False
homogenousOutputs = True
for script, val in scriptValPairs:
scripttype = Cpp.BtcUtils.getTxOutScriptTypeInt(script)
if scripttype == TXOUT_SCRIPT_P2SH:
haveP2SH = True
if scripttype == TXOUT_SCRIPT_P2WSH or \
scripttype == TXOUT_SCRIPT_P2WPKH:
haveBech32 = True
else:
haveP2PKH
count = 0
if haveP2SH:
count = count + 1
if haveP2PKH:
count = count + 1
if haveBech32:
count = count + 1
if count > 1:
homogenousOutputs = False
def changeTypeMismatch(changetype, rectype):
QMessageBox.warning(self, self.tr('Change address type mismatch'), self.tr(
"Armory is set to force the change address type to %1.<br>"
"All the recipients in this transaction are of the %2 type.<br><br>"
"If sent as such, this transaction will damage your privacy. It is recommended "
"you let Armory define the change script type automatically. You can do this "
"by going to File -> Settings and picking <u>'Auto change'</u> in the "
"Fee & Change settings tab.<br><br>"
"<b>Note</b>: When paying a P2SH script with the auto change setting on, the "
"change script type will be set to P2SH. Only Armory 0.96 and later can spend "
"from these scripts.<br>"
"If you use an offline signing setup, make sure your signer is up "
"to date.").arg(changetype, rectype), QMessageBox.Ok)
if changeType != 'Auto':
if homogenousOutputs:
scripttype = Cpp.BtcUtils.getTxOutScriptTypeInt(scriptValPairs[0][0])
if scripttype == TXOUT_SCRIPT_P2SH:
scripttype = 'P2SH'
else:
scripttype = 'P2PKH'
if scripttype[0:4] != str(changeType[0:4]):
changeTypeMismatch(changeType, scripttype)
return getAddr(newAddr, changeType)
if not haveP2SH and not haveBech32:
return getAddr(newAddr, 'P2PKH')
#is our Tx SW?
if TheBDM.isSegWitEnabled() == True and self.coinSelection.isSW():
return getAddr(newAddr, 'P2SH-P2WPKH')
else:
return getAddr(newAddr, 'P2SH-P2PK')
#############################################################################
def determineChangeScript(self, utxoList, scriptValPairs, peek=False):
changeScript = ''
changeAddrStr = ''
changeAddr160 = ''
selectedBehavior = 'NewAddr' if self.lbox is None else 'Feedback'
if not self.main.usermode == USERMODE.Expert or \
not self.chkDefaultChangeAddr.isChecked():
# Default behavior for regular wallets is 'NewAddr', but for lockboxes
# the default behavior is "Feedback" (send back to the original addr
if self.lbox is None:
changeAddrStr = self.getDefaultChangeAddress(scriptValPairs, peek)
changeAddr160 = addrStr_to_hash160(changeAddrStr)[1]
changeScript = scrAddr_to_script(addrStr_to_scrAddr(changeAddrStr))
self.wlt.setComment(changeAddr160, CHANGE_ADDR_DESCR_STRING)
else:
changeScript = self.lbox.getScript()
if self.main.usermode == USERMODE.Expert:
if not self.chkDefaultChangeAddr.isChecked():
self.main.setWltSetting(self.wltID, 'ChangeBehavior', selectedBehavior)
else:
if self.radioFeedback.isChecked():
selectedBehavior = 'Feedback'
changeScript = utxoList[0].getScript()
elif self.radioSpecify.isChecked():
selectedBehavior = 'Specify'
changeScript = self.getUserChangeScript()['Script']
if changeScript is None:
QMessageBox.warning(self, self.tr('Invalid Address'), self.tr(
'You specified an invalid change address for this transcation.'), QMessageBox.Ok)
return None
scrType = getTxOutScriptType(changeScript)
if scrType in CPP_TXOUT_HAS_ADDRSTR:
changeAddrStr = script_to_addrStr(changeScript)
elif scrType==CPP_TXOUT_MULTISIG:
scrP2SH = script_to_p2sh_script(changeScript)
changeAddrStr = script_to_addrStr(scrP2SH)
if self.main.usermode == USERMODE.Expert and self.chkRememberChng.isChecked():
self.main.setWltSetting(self.wltID, 'ChangeBehavior', selectedBehavior)
if selectedBehavior == 'Specify' and len(changeAddrStr) > 0:
self.main.setWltSetting(self.wltID, 'ChangeAddr', changeAddrStr)
else:
self.main.setWltSetting(self.wltID, 'ChangeBehavior', 'NewAddr')
return changeScript,selectedBehavior
#####################################################################
def getMaxRecipientID(self):
for widget_obj in self.widgetTable:
if 'OP_RETURN' in widget_obj:
continue
if widget_obj['BTN_MAX'].isChecked():
return widget_obj['UID']
return None
#####################################################################
def setMaximum(self, targWidgetID):
#is the box checked?
targetWidget = None
for widget_obj in self.widgetTable:
if widget_obj['UID'] == targWidgetID:
targetWidget = widget_obj
if targetWidget != None and targetWidget['BTN_MAX'].isChecked():
#disable all check boxes but this one
for widget_obj in self.widgetTable:
if 'BTN_MAX' in widget_obj:
widget_obj['BTN_MAX'].setEnabled(False)
targetWidget['BTN_MAX'].setEnabled(True)
targetWidget['QLE_AMT'].setEnabled(False)
else:
#enable all checkboxes and return
for widget_obj in self.widgetTable:
if 'BTN_MAX' in widget_obj:
widget_obj['BTN_MAX'].setEnabled(True)
widget_obj['QLE_AMT'].setEnabled(True)
return
nRecip = len(self.widgetTable)
totalOther = 0
r = 0
try:
bal = self.getUsableBalance()
txFee, fee_byte, adjust = self.feeDialog.getFeeData()
while r < nRecip:
# Use while loop so 'r' is still in scope in the except-clause
if targWidgetID == self.widgetTable[r]['UID']:
r += 1
continue
if 'OP_RETURN' in self.widgetTable[r]:
r += 1
continue
amtStr = str(self.widgetTable[r]['QLE_AMT'].text()).strip()
if len(amtStr) > 0:
totalOther += str2coin(amtStr)
r += 1
if txFee == 0 and fee_byte != 0:
if self.customUtxoList != None and len(self.customUtxoList) > 0:
serializedUtxoList = self.serializeUtxoList(self.customUtxoList)
txFee = self.coinSelection.getFeeForMaxValUtxoVector(serializedUtxoList, fee_byte)
else:
txFee = self.coinSelection.getFeeForMaxVal(fee_byte)
except:
QMessageBox.warning(self, self.tr('Invalid Input'), \
self.tr('Cannot compute the maximum amount '
'because there is an error in the amount '
'for recipient %1.').arg(r + 1,), QMessageBox.Ok)
return
maxStr = coin2str((bal - (txFee + totalOther)), maxZeros=0)
if bal < txFee + totalOther:
QMessageBox.warning(self, self.tr('Insufficient funds'), \
self.tr('You have specified more than your spendable balance to '
'the other recipients and the transaction fee. Therefore, the '
'maximum amount for this recipient would actually be negative.'), \
QMessageBox.Ok)
return
targetWidget['QLE_AMT'].setText(maxStr.strip())
self.isMax = True
#####################################################################
def createSetMaxButton(self, targWidgetID):
newBtn = QCheckBox('MAX')
#newBtn.setMaximumWidth(relaxedSizeStr(self, 'MAX')[0])
newBtn.setToolTip(self.tr('Fills in the maximum spendable amount minus '
'the amounts specified for other recipients '
'and the transaction fee '))
funcSetMax = lambda: self.setMaximum(targWidgetID)
self.connect(newBtn, SIGNAL(CLICKED), funcSetMax)
return newBtn
#####################################################################
def makeRecipFrame(self, nRecip, is_opreturn=False):
frmRecip = QFrame()
frmRecip.setFrameStyle(QFrame.NoFrame)
frmRecipLayout = QVBoxLayout()
def recipientAddrChanged(widget_obj):
def callbk():
self.updateWidgetAddrColor(widget_obj, Colors.Background)
self.updateCoinSelectionRecipient(widget_obj['UID'])
return callbk
def recipientValueChanged(uid):
def callbk():
self.updateCoinSelectionRecipient(uid)
return callbk
def createAddrWidget(widget_obj, r):
widget_obj['LBL_ADDR'] = QLabel('Address %d:' % (r+1))
addrEntryWidgets = self.main.createAddressEntryWidgets(self, maxDetectLen=45, boldDetectParts=1)
widget_obj['FUNC_GETSCRIPT'] = addrEntryWidgets['CALLBACK_GETSCRIPT']
widget_obj['QLE_ADDR'] = addrEntryWidgets['QLE_ADDR']
widget_obj['QLE_ADDR'].setMinimumWidth(relaxedSizeNChar(GETFONT('var'), 20)[0])
widget_obj['QLE_ADDR'].setMaximumHeight(self.maxHeight)
widget_obj['QLE_ADDR'].setFont(GETFONT('var', 9))
self.connect(widget_obj['QLE_ADDR'], SIGNAL('textChanged(QString)'),
recipientAddrChanged(widget_obj))
widget_obj['BTN_BOOK'] = addrEntryWidgets['BTN_BOOK']
widget_obj['LBL_DETECT'] = addrEntryWidgets['LBL_DETECT']
widget_obj['LBL_AMT'] = QLabel('Amount:')
widget_obj['QLE_AMT'] = QLineEdit()
widget_obj['QLE_AMT'].setFont(GETFONT('Fixed'))
widget_obj['QLE_AMT'].setMinimumWidth(tightSizeNChar(GETFONT('Fixed'), 14)[0])
widget_obj['QLE_AMT'].setMaximumHeight(self.maxHeight)
widget_obj['QLE_AMT'].setAlignment(Qt.AlignLeft)
self.connect(widget_obj['QLE_AMT'], SIGNAL('textChanged(QString)'),
recipientValueChanged(widget_obj['UID']))
widget_obj['LBL_BTC'] = QLabel('BTC')
widget_obj['LBL_BTC'].setAlignment(Qt.AlignLeft | Qt.AlignVCenter)
widget_obj['BTN_MAX'] = \
self.createSetMaxButton(widget_obj['UID'])
widget_obj['LBL_COMM'] = QLabel('Comment:')
widget_obj['QLE_COMM'] = QLineEdit()
widget_obj['QLE_COMM'].setFont(GETFONT('var', 9))
widget_obj['QLE_COMM'].setMaximumHeight(self.maxHeight)
widget_obj['QLE_COMM'].setMaxLength(MAX_COMMENT_LENGTH)
def opReturnMessageChanged(widget_obj):
def callbk():
self.updateCoinSelectionRecipient(widget_obj['UID'])
return callbk
def createOpReturnWidget(widget_obj):
widget_obj['LBL_ADDR'] = QLabel('OP_RETURN Message:')
widget_obj['QLE_ADDR'] = QLineEdit()
widget_obj['OP_RETURN'] = ""
self.connect(widget_obj['QLE_ADDR'], SIGNAL('textChanged(QString)'),
recipientAddrChanged(widget_obj))
recip_diff = nRecip - len(self.widgetTable)
if recip_diff > 0:
for i in range(recip_diff):
r = len(self.widgetTable)
self.widgetTable.append({})
self.widgetTable[r]['UID'] = SecureBinaryData().GenerateRandom(8).toHexStr()
if not is_opreturn:
createAddrWidget(self.widgetTable[r], r)
else:
createOpReturnWidget(self.widgetTable[r])
else:
self.widgetTable = self.widgetTable[0:len(self.widgetTable) + recip_diff]
for widget_obj in self.widgetTable:
subfrm = QFrame()
subfrm.setFrameStyle(STYLE_RAISED)
subLayout = QGridLayout()
subLayout.addWidget(widget_obj['LBL_ADDR'], 0,0, 1,1)
subLayout.addWidget(widget_obj['QLE_ADDR'], 0,1, 1,5)
try:
subLayout.addWidget(widget_obj['BTN_BOOK'], 0,6, 1,1)
subLayout.addWidget(widget_obj['LBL_DETECT'], 1,1, 1,6)
subLayout.addWidget(widget_obj['LBL_AMT'], 2,0, 1,1)
subLayout.addWidget(widget_obj['QLE_AMT'], 2,1, 1,2)
subLayout.addWidget(widget_obj['LBL_BTC'], 2,3, 1,1)
subLayout.addWidget(widget_obj['BTN_MAX'], 2,4, 1,1)
subLayout.addWidget(QLabel(''), 2, 5, 1, 2)
subLayout.addWidget(widget_obj['LBL_COMM'], 3,0, 1,1)
subLayout.addWidget(widget_obj['QLE_COMM'], 3,1, 1,6)
except:
pass
subLayout.setContentsMargins(5, 5, 5, 5)
subLayout.setSpacing(3)
subfrm.setLayout(subLayout)
frmRecipLayout.addWidget(subfrm)
btnFrm = QFrame()
btnFrm.setFrameStyle(QFrame.NoFrame)
btnLayout = QHBoxLayout()
lbtnAddRecip = QLabelButton(self.tr('+ Recipient'))
lbtnAddRecip.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
lbtnRmRecip = QLabelButton(self.tr('- Recipient'))
lbtnRmRecip.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
self.connect(lbtnAddRecip, SIGNAL(CLICKED), lambda: self.makeRecipFrame(nRecip + 1))
self.connect(lbtnRmRecip, SIGNAL(CLICKED), lambda: self.makeRecipFrame(nRecip - 1))
if self.main.usermode == USERMODE.Expert:
lbtnAddOpReturn = QLabelButton('+ OP_RETURN')
lbtnAddOpReturn.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
self.connect(lbtnAddOpReturn, SIGNAL(CLICKED), lambda: self.makeRecipFrame(nRecip + 1, True))
btnLayout.addStretch()
btnLayout.addWidget(lbtnAddRecip)
if self.main.usermode == USERMODE.Expert:
btnLayout.addWidget(lbtnAddOpReturn)
btnLayout.addWidget(lbtnRmRecip)
btnFrm.setLayout(btnLayout)
frmRecipLayout.addWidget(btnFrm)
frmRecipLayout.addStretch()
frmRecip.setLayout(frmRecipLayout)
# return frmRecip
self.scrollRecipArea.setWidget(frmRecip)
self.scrollRecipArea.setWidgetResizable(True)
if recip_diff < 0:
self.resetCoinSelectionRecipients()
#############################################################################
def clickEnterURI(self):
dlg = DlgUriCopyAndPaste(self.parent(), self.main)
dlg.exec_()
if len(dlg.uriDict) > 0:
lastIsEmpty = True
for widg in ['QLE_ADDR', 'QLE_AMT', 'QLE_COMM']:
if len(str(self.widgetTable[-1][widg].text())) > 0:
lastIsEmpty = False
if not lastIsEmpty:
self.makeRecipFrame(len(self.widgetTable) + 1)
self.widgetTable[-1]['QLE_ADDR'].setText(dlg.uriDict['address'])
if 'amount' in dlg.uriDict:
amtStr = coin2str(dlg.uriDict['amount'], maxZeros=1).strip()
self.widgetTable[-1]['QLE_AMT'].setText(amtStr)
haveLbl = 'label' in dlg.uriDict
haveMsg = 'message' in dlg.uriDict
dispComment = ''
if haveLbl and haveMsg:
dispComment = dlg.uriDict['label'] + ': ' + dlg.uriDict['message']
elif not haveLbl and haveMsg:
dispComment = dlg.uriDict['message']
elif haveLbl and not haveMsg:
dispComment = dlg.uriDict['label']
self.widgetTable[-1]['QLE_COMM'].setText(dispComment)
#############################################################################
def toggleSpecify(self, b):
self.lblChangeAddr.setVisible(b)
self.edtChangeAddr.setVisible(b)
self.btnChangeAddr.setVisible(b)
self.lblAutoDetect.setVisible(b)
#############################################################################
def toggleChngAddr(self, b):
self.radioFeedback.setVisible(b)
self.radioSpecify.setVisible(b)
self.ttipFeedback.setVisible(b)
self.ttipSpecify.setVisible(b)
self.chkRememberChng.setVisible(b)
self.lblAutoDetect.setVisible(b)
self.vertLine.setVisible(b)
if not self.radioFeedback.isChecked() and not self.radioSpecify.isChecked():
self.radioFeedback.setChecked(True)
self.toggleSpecify(b and self.radioSpecify.isChecked())
#############################################################################
def updateWidgetAddrColor(self, widget, color):
palette = QPalette()
palette.setColor(QPalette.Base, color)
widget['QLE_ADDR'].setPalette(palette);
widget['QLE_ADDR'].setAutoFillBackground(True);
#############################################################################
def updateAddrColor(self, idx, color):
self.updateWidgetAddrColor(self.widgetTable[idx], color)
#############################################################################
def previewTx(self):
ustx = self.validateInputsGetUSTX(peek=True)
if not isinstance(ustx, UnsignedTransaction):
return
txDlg = DlgDispTxInfo(ustx, self.wlt, self.parent(), self.main)
txDlg.exec_()
#############################################################################
def resetRecipients(self):
self.widgetTable = []
#############################################################################
def prefillFromURI(self, prefill):
amount = prefill.get('amount','')
message = prefill.get('message','')
label = prefill.get('label','')
if prefill.get('lockbox',''):
plainStr = createLockboxEntryStr(prefill.get('lockbox',''))
self.addOneRecipient(None, amount, message, None, plainStr)
else:
addrStr = prefill.get('address','')
atype, addr160 = addrStr_to_hash160(addrStr)
if atype == getAddrByte():
self.addOneRecipient(addr160, amount, message, label)
else:
self.addOneRecipient(None, amount, message, label, plainText=addrStr)
#############################################################################
def prefillFromBatch(self, txBatchStr):
batch = TransactionBatch()
batch.processBatchStr(txBatchStr)
prefillData = {}
walletID = batch.getWalletID()
prefillData['walletID'] = walletID
prefillData['recipients'] = []
rcpDict = prefillData['recipients']
recipients = batch.getRecipients()
recCount = len(recipients)
for rcp in recipients:
rcpDict.append([rcp.address_, rcp.value_, rcp.comment_])
spenders = batch.getSpenders()
if len(spenders) > 0:
prefillData['spenders'] = []
spdDict = prefillData['spenders']
for spd in spenders:
spdDict.append([spd.txHash_, spd.index_, spd.sequence_])
changeAddr = batch.getChange().address_
if len(changeAddr) > 0:
prefillData['change'] = changeAddr
fee_rate = batch.getFeeRate()
if fee_rate != 0:
prefillData['fee_rate'] = fee_rate
flat_fee = batch.getFlatFee()
if flat_fee != 0:
prefillData['flat_fee'] = flat_fee
self.prefill(prefillData)
#############################################################################
def prefill(self, prefill):
'''
format:
{
walleID:str,
recipients:[[b58addr, value, comment], ...],
spenders:[[txHashStr, txOutID, seq], ...],
change:b58addr,
fee_rate:integer,
flat_fee:float
}
'''
#reset recipients
self.resetRecipients()
#wallet
try:
wltid = prefill['walletID']
comboIndex = self.wltIDList.index(wltid)
self.frmSelectedWlt.walletComboBox.setCurrentIndex(comboIndex)
self.fireWalletChange()
except:
pass
#recipients
recipients = prefill['recipients']
for rpt in recipients:
addrStr = rpt[0]
value = rpt[1]
comment = ""
if len(rpt) == 3:
comment = rpt[2]
hash160 = None
try:
prefix, hash160 = addrStr_to_hash160(addrStr)
except:
pass
try:
self.addOneRecipient(hash160, value, comment, plainText=addrStr)
except:
pass
try:
self.resetCoinSelectionRecipients()
except:
pass
#do not shuffle outputs on batches
self.shuffleEntries = False
#change
try:
changeAddr = prefill['change']
self.chkDefaultChangeAddr.setChecked(True)
self.radioSpecify.setChecked(True)
self.edtChangeAddr.setText(changeAddr)
except:
pass
#fee
#spenders
spenders = prefill['spenders']
def findUtxo(utxoList):
utxoDict = {}
for utxo in utxoList:
txhashstr = utxo.getTxHashStr()
if not txhashstr in utxoDict:
utxoDict[txhashstr] = {}
hashDict = utxoDict[txhashstr]
txoutid = int(utxo.getTxOutIndex())
hashDict[txoutid] = utxo
customUtxoList = []
customBalance = 0
for spd in spenders:
txhashstr = spd[0]
txoutid = int(spd[1])
seq = spd[2]
hashDict = utxoDict[txhashstr]
utxo = hashDict[txoutid]
utxo.sequence = seq
customUtxoList.append(utxo)
customBalance += utxo.getValue()
return customUtxoList, customBalance
try:
utxolist, balance = findUtxo(self.wlt.getFullUTXOList())
self.frmSelectedWlt.customUtxoList = utxolist
self.frmSelectedWlt.altBalance = balance
self.frmSelectedWlt.useAllCCList = True
self.frmSelectedWlt.updateOnCoinControl()
except:
utxolist, balance = findUtxo(self.wlt.getRBFTxOutList())
self.frmSelectedWlt.customUtxoList = utxolist
self.frmSelectedWlt.altBalance = balance
self.frmSelectedWlt.updateOnRBF(True)
#############################################################################
def updateUserComments(self):
for row in range(len(self.widgetTable)):
widget_obj = self.widgetTable[row]
if 'OP_RETURN' in widget_obj:
continue
addr_comment = str(self.widgetTable[row]['QLE_COMM'].text())
addr_str = str(self.widgetTable[row]['QLE_ADDR'].text())
try:
addr160 = addrStr_to_hash160(addr_str)[1]
self.wlt.setComment(addr160, addr_comment)
except:
pass
################################################################################
class ReviewOfflineTxFrame(ArmoryDialog):
def __init__(self, parent=None, main=None, initLabel=''):
super(ReviewOfflineTxFrame, self).__init__(parent, main)
self.ustx = None
self.wlt = None
self.lblDescr = QRichLabel('')
ttipDataIsSafe = self.main.createToolTipWidget(\
self.tr('There is no security-sensitive information in this data below, so '
'it is perfectly safe to copy-and-paste it into an '
'email message, or save it to a borrowed USB key.'))
btnSave = QPushButton(self.tr('Save as file...'))
self.connect(btnSave, SIGNAL(CLICKED), self.doSaveFile)
ttipSave = self.main.createToolTipWidget(\
self.tr('Save this data to a USB key or other device, to be transferred to '
'a computer that contains the private keys for this wallet.'))
btnCopy = QPushButton(self.tr('Copy to clipboard'))
self.connect(btnCopy, SIGNAL(CLICKED), self.copyAsciiUSTX)
self.lblCopied = QRichLabel(' ')
self.lblCopied.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
ttipCopy = self.main.createToolTipWidget(\
self.tr('Copy the transaction data to the clipboard, so that it can be '
'pasted into an email or a text document.'))
lblInstruct = QRichLabel(self.tr('<b>Instructions for completing this transaction:</b>'))
self.lblUTX = QRichLabel('')
frmUTX = makeLayoutFrame(HORIZONTAL, [ttipDataIsSafe, self.lblUTX])
frmUpper = makeLayoutFrame(HORIZONTAL, [self.lblDescr], STYLE_SUNKEN)
# Wow, I just cannot get the txtEdits to be the right size without
# forcing them very explicitly
self.txtUSTX = QTextEdit()
self.txtUSTX.setFont(GETFONT('Fixed', 8))
w,h = relaxedSizeNChar(self.txtUSTX, 68)[0], int(12 * 8.2)
self.txtUSTX.setMinimumWidth(w)
self.txtUSTX.setMinimumHeight(h)
self.txtUSTX.setReadOnly(True)
frmLower = QFrame()
frmLower.setFrameStyle(STYLE_RAISED)
frmLowerLayout = QGridLayout()
frmLowerLayout.addWidget(frmUTX, 0, 0, 1, 3)
frmLowerLayout.addWidget(self.txtUSTX, 1, 0, 3, 1)
frmLowerLayout.addWidget(btnSave, 1, 1, 1, 1)
frmLowerLayout.addWidget(ttipSave, 1, 2, 1, 1)
frmLowerLayout.addWidget(btnCopy, 2, 1, 1, 1)
frmLowerLayout.addWidget(ttipCopy, 2, 2, 1, 1)
frmLowerLayout.addWidget(self.lblCopied, 3, 1, 1, 2)
frmLowerLayout.setColumnStretch(0, 1)
frmLowerLayout.setColumnStretch(1, 0)
frmLowerLayout.setColumnStretch(2, 0)
frmLowerLayout.setColumnStretch(3, 0)
frmLowerLayout.setRowStretch(0, 0)
frmLowerLayout.setRowStretch(1, 1)
frmLowerLayout.setRowStretch(2, 1)
frmLowerLayout.setRowStretch(3, 1)
frmLower.setLayout(frmLowerLayout)
frmAll = makeLayoutFrame(VERTICAL, [lblInstruct, \
frmUpper, \
'Space(5)', \
frmLower])
frmAll.layout().setStretch(0, 0)
frmAll.layout().setStretch(1, 0)
frmAll.layout().setStretch(2, 0)
frmAll.layout().setStretch(3, 2)
frmAll.layout().setStretch(4, 1)
frmAll.layout().setStretch(5, 0)
dlgLayout = QVBoxLayout()
dlgLayout.addWidget(frmAll)
self.setLayout(dlgLayout)
def setUSTX(self, ustx):
self.ustx = ustx
self.lblUTX.setText(self.tr('<b>Transaction Data</b> \t (Unsigned ID: %1)').arg(ustx.uniqueIDB58))
self.txtUSTX.setText(ustx.serializeAscii())
def setWallet(self, wlt):
self.wlt = wlt
if determineWalletType(wlt, self.main)[0] in \
[ WLTTYPES.Offline, WLTTYPES.WatchOnly ]:
self.lblDescr.setText(self.tr(
'The block of data shown below is the complete transaction you '
'just requested, but is invalid because it does not contain any '
'signatures. You must take this data to the computer with the '
'full wallet to get it signed, then bring it back here to be '
'broadcast to the Bitcoin network. '
'<br><br>'
'Use "Save as file..." to save an <i>*.unsigned.tx</i> '
'file to USB drive or other removable media. '
'On the offline computer, click "Offline Transactions" on the main '
'window. Load the transaction, <b>review it</b>, then sign it '
'(the filename now end with <i>*.signed.tx</i>). Click "Continue" '
'below when you have the signed transaction on this computer. '
'<br><br>'
'<b>NOTE:</b> The USB drive only ever holds public transaction '
'data that will be broadcast to the network. This data may be '
'considered privacy-sensitive, but does <u>not</u> compromise '
'the security of your wallet.'))
else:
self.lblDescr.setText(self.tr(
'You have chosen to create the previous transaction but not sign '
'it or broadcast it, yet. You can save the unsigned '
'transaction to file, or copy&paste from the text box. '
'You can use the following window (after clicking "Continue") to '
'sign and broadcast the transaction when you are ready'))
def copyAsciiUSTX(self):
clipb = QApplication.clipboard()
clipb.clear()
clipb.setText(self.txtUSTX.toPlainText())
self.lblCopied.setText('<i>Copied!</i>')
def doSaveFile(self):
""" Save the Unsigned-Tx block of data """
dpid = self.ustx.uniqueIDB58
suffix = ('' if OS_WINDOWS else '.unsigned.tx')
toSave = self.main.getFileSave(\
'Save Unsigned Transaction', \
['Armory Transactions (*.unsigned.tx)'], \
'armory_%s_%s' % (dpid, suffix))
LOGINFO('Saving unsigned tx file: %s', toSave)
try:
theFile = open(toSave, 'w')
theFile.write(self.txtUSTX.toPlainText())
theFile.close()
except IOError:
LOGEXCEPT('Failed to save file: %s', toSave)
pass
################################################################################
class SignBroadcastOfflineTxFrame(ArmoryFrame):
"""
We will make the assumption that this Frame is used ONLY for outgoing
transactions from your wallet. This simplifies the logic if we don't
have to identify input senders/values, and handle the cases where those
may not be specified
"""
def __init__(self, parent=None, main=None, initLabel=''):
super(SignBroadcastOfflineTxFrame, self).__init__(parent, main)
self.wlt = None
self.sentToSelfWarn = False
self.fileLoaded = None
lblDescr = QRichLabel(self.tr(
'Copy or load a transaction from file into the text box below. '
'If the transaction is unsigned and you have the correct wallet, '
'you will have the opportunity to sign it. If it is already signed '
'you will have the opportunity to broadcast it to '
'the Bitcoin network to make it final.'))
self.txtUSTX = QTextEdit()
self.txtUSTX.setFont(GETFONT('Fixed', 8))
w,h = relaxedSizeNChar(self.txtUSTX, 68)
#self.txtUSTX.sizeHint = lambda: QSize(w, h)
self.txtUSTX.setMinimumWidth(w)
self.txtUSTX.setMinimumHeight(8*h)
self.txtUSTX.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.btnSign = QPushButton(self.tr('Sign'))
self.btnBroadcast = QPushButton(self.tr('Broadcast'))
self.btnSave = QPushButton(self.tr('Save file...'))
self.btnLoad = QPushButton(self.tr('Load file...'))
self.btnCopy = QPushButton(self.tr('Copy Text'))
self.btnCopyHex = QPushButton(self.tr('Copy Raw Tx (Hex)'))
self.lblCopied = QRichLabel('')
self.lblCopied.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
self.btnSign.setEnabled(False)
self.btnBroadcast.setEnabled(False)
self.connect(self.txtUSTX, SIGNAL('textChanged()'), self.processUSTX)
self.connect(self.btnSign, SIGNAL(CLICKED), self.signTx)
self.connect(self.btnBroadcast, SIGNAL(CLICKED), self.broadTx)
self.connect(self.btnSave, SIGNAL(CLICKED), self.saveTx)
self.connect(self.btnLoad, SIGNAL(CLICKED), self.loadTx)
self.connect(self.btnCopy, SIGNAL(CLICKED), self.copyTx)
self.connect(self.btnCopyHex, SIGNAL(CLICKED), self.copyTxHex)
self.lblStatus = QRichLabel('')
self.lblStatus.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
wStat, hStat = relaxedSizeStr(self.lblStatus, self.tr('Signature is Invalid!'))
self.lblStatus.setMinimumWidth(int(wStat * 1.2))
self.lblStatus.setMinimumHeight(int(hStat * 1.2))
frmDescr = makeLayoutFrame(HORIZONTAL, [lblDescr], STYLE_RAISED)
self.infoLbls = []
# ##
self.infoLbls.append([])
self.infoLbls[-1].append(self.main.createToolTipWidget(\
self.tr('This is wallet from which the offline transaction spends bitcoins')))
self.infoLbls[-1].append(QRichLabel('<b>Wallet:</b>'))
self.infoLbls[-1].append(QRichLabel(''))
# ##
self.infoLbls.append([])
self.infoLbls[-1].append(self.main.createToolTipWidget(self.tr('The name of the wallet')))
self.infoLbls[-1].append(QRichLabel(self.tr('<b>Wallet Label:</b>')))
self.infoLbls[-1].append(QRichLabel(''))
# ##
self.infoLbls.append([])
self.infoLbls[-1].append(self.main.createToolTipWidget(self.tr(
'A unique string that identifies an <i>unsigned</i> transaction. '
'This is different than the ID that the transaction will have when '
'it is finally broadcast, because the broadcast ID cannot be '
'calculated without all the signatures')))
self.infoLbls[-1].append(QRichLabel(self.tr('<b>Pre-Broadcast ID:</b>')))
self.infoLbls[-1].append(QRichLabel(''))
# ##
self.infoLbls.append([])
self.infoLbls[-1].append(self.main.createToolTipWidget(\
self.tr('Net effect on this wallet\'s balance')))
self.infoLbls[-1].append(QRichLabel(self.tr('<b>Transaction Amount:</b>')))
self.infoLbls[-1].append(QRichLabel(''))
self.moreInfo = QLabelButton(self.tr('Click here for more<br> information about <br>this transaction'))
self.connect(self.moreInfo, SIGNAL(CLICKED), self.execMoreTxInfo)
frmMoreInfo = makeLayoutFrame(HORIZONTAL, [self.moreInfo], STYLE_SUNKEN)
frmMoreInfo.setMinimumHeight(tightSizeStr(self.moreInfo, 'Any String')[1] * 5)
expert = (self.main.usermode == USERMODE.Expert)
frmBtn = makeLayoutFrame(VERTICAL, [ self.btnSign, \
self.btnBroadcast, \
self.btnSave, \
self.btnLoad, \
self.btnCopy, \
self.btnCopyHex if expert else QRichLabel(''), \
self.lblCopied, \
HLINE(), \
self.lblStatus, \
HLINE(), \
'Stretch', \
frmMoreInfo])
frmBtn.setMaximumWidth(tightSizeNChar(QPushButton(''), 30)[0])
frmInfoLayout = QGridLayout()
for r in range(len(self.infoLbls)):
for c in range(len(self.infoLbls[r])):
frmInfoLayout.addWidget(self.infoLbls[r][c], r, c, 1, 1)
frmInfo = QFrame()
frmInfo.setFrameStyle(STYLE_SUNKEN)
frmInfo.setLayout(frmInfoLayout)
frmBottom = QFrame()
frmBottom.setFrameStyle(STYLE_SUNKEN)
frmBottomLayout = QGridLayout()
frmBottomLayout.addWidget(self.txtUSTX, 0, 0, 1, 1)
frmBottomLayout.addWidget(frmBtn, 0, 1, 2, 1)
frmBottomLayout.addWidget(frmInfo, 1, 0, 1, 1)
# frmBottomLayout.addWidget(frmMoreInfo, 1,1, 1,1)
frmBottom.setLayout(frmBottomLayout)
layout = QVBoxLayout()
layout.addWidget(frmDescr)
layout.addWidget(frmBottom)
self.setLayout(layout)
self.processUSTX()
def processUSTX(self):
# TODO: it wouldn't be TOO hard to modify this dialog to take
# arbitrary hex-serialized transactions for broadcast...
# but it's not trivial either (for instance, I assume
# that we have inputs values, etc)
self.wlt = None
self.leValue = None
self.ustxObj = None
self.idxSelf = []
self.idxOther = []
self.lblStatus.setText('')
self.lblCopied.setText('')
self.enoughSigs = False
self.sigsValid = False
self.ustxReadable = False
ustxStr = str(self.txtUSTX.toPlainText())
if len(ustxStr) > 0:
try:
self.ustxObj = UnsignedTransaction().unserializeAscii(ustxStr)
self.signStat = self.ustxObj.evaluateSigningStatus()
self.enoughSigs = self.signStat.canBroadcast
self.sigsValid = self.ustxObj.verifySigsAllInputs(self.ustxObj.signerType)
self.ustxReadable = True
except BadAddressError:
QMessageBox.critical(self, self.tr('Inconsistent Data!'), \
self.tr('This transaction contains inconsistent information. This '
'is probably not your fault...'), QMessageBox.Ok)
self.ustxObj = None
self.ustxReadable = False
except NetworkIDError:
QMessageBox.critical(self, self.tr('Wrong Network!'), \
self.tr('This transaction is actually for a different network! '
'Did you load the correct transaction?'), QMessageBox.Ok)
self.ustxObj = None
self.ustxReadable = False
except (UnserializeError, IndexError, ValueError):
self.ustxObj = None
self.ustxReadable = False
if not self.enoughSigs or not self.sigsValid or not self.ustxReadable:
self.btnBroadcast.setEnabled(False)
else:
if self.main.netMode == NETWORKMODE.Full:
self.btnBroadcast.setEnabled(True)
else:
self.btnBroadcast.setEnabled(False)
self.btnBroadcast.setToolTip(self.tr('No connection to Bitcoin network!'))
else:
self.ustxObj = None
self.ustxReadable = False
self.btnBroadcast.setEnabled(False)
self.btnSave.setEnabled(True)
self.btnCopyHex.setEnabled(False)
if not self.ustxReadable:
if len(ustxStr) > 0:
self.lblStatus.setText(self.tr('<b><font color="red">Unrecognized!</font></b>'))
else:
self.lblStatus.setText('')
self.btnSign.setEnabled(False)
self.btnBroadcast.setEnabled(False)
self.btnSave.setEnabled(False)
self.makeReviewFrame()
return
elif not self.enoughSigs:
if not self.main.getSettingOrSetDefault('DNAA_ReviewOfflineTx', False):
result = MsgBoxWithDNAA(self, self.main, MSGBOX.Warning, title=self.tr('Offline Warning'), \
msg=self.tr('<b>Please review your transaction carefully before '
'signing and broadcasting it!</b> The extra security of '
'using offline wallets is lost if you do '
'not confirm the transaction is correct!'), dnaaMsg=None)
self.main.writeSetting('DNAA_ReviewOfflineTx', result[1])
self.lblStatus.setText(self.tr('<b><font color="red">Unsigned</font></b>'))
self.btnSign.setEnabled(True)
self.btnBroadcast.setEnabled(False)
elif not self.sigsValid:
self.lblStatus.setText(self.tr('<b><font color="red">Bad Signature!</font></b>'))
self.btnSign.setEnabled(True)
self.btnBroadcast.setEnabled(False)
else:
self.lblStatus.setText(self.tr('<b><font color="green">All Signatures Valid!</font></b>'))
self.btnSign.setEnabled(False)
self.btnCopyHex.setEnabled(True)
# NOTE: We assume this is an OUTGOING transaction. When I pull in the
# multi-sig code, I will have to either make a different dialog,
# or add some logic to this one
FIELDS = enum('Hash', 'OutList', 'SumOut', 'InList', 'SumIn', 'Time', 'Blk', 'Idx')
data = extractTxInfo(self.ustxObj, -1)
# Collect the input wallets (hopefully just one of them)
fromWlts = set()
for scrAddr, amt, a, b, c, script in data[FIELDS.InList]:
wltID = self.main.getWalletForAddr160(scrAddr[1:])
if not wltID == '':
fromWlts.add(wltID)
if len(fromWlts) > 1:
QMessageBox.warning(self, self.tr('Multiple Input Wallets'), \
self.tr('Somehow, you have obtained a transaction that actually pulls from more '
'than one wallet. The support for handling multi-wallet signatures is '
'not currently implemented (this also could have happened if you imported '
'the same private key into two different wallets).') , QMessageBox.Ok)
self.makeReviewFrame()
return
elif len(fromWlts) == 0:
QMessageBox.warning(self, self.tr('Unrelated Transaction'), \
self.tr('This transaction appears to have no relationship to any of the wallets '
'stored on this computer. Did you load the correct transaction?'), \
QMessageBox.Ok)
self.makeReviewFrame()
return
spendWltID = fromWlts.pop()
self.wlt = self.main.walletMap[spendWltID]
toWlts = set()
myOutSum = 0
theirOutSum = 0
rvPairs = []
idx = 0
for scrType, amt, binScript, multiSigList in data[FIELDS.OutList]:
recip = script_to_scrAddr(binScript)
try:
wltID = self.main.getWalletForAddr160(CheckHash160(recip))
except BadAddressError:
wltID = ''
if wltID == spendWltID:
toWlts.add(wltID)
myOutSum += amt
self.idxSelf.append(idx)
else:
rvPairs.append([recip, amt])
theirOutSum += amt
self.idxOther.append(idx)
idx += 1
myInSum = data[FIELDS.SumIn] # because we assume all are ours
if myInSum == None:
fee = None
else:
fee = myInSum - data[FIELDS.SumOut]
self.leValue = theirOutSum
self.makeReviewFrame()
############################################################################
def makeReviewFrame(self):
# ##
if self.ustxObj == None:
self.infoLbls[0][2].setText('')
self.infoLbls[1][2].setText('')
self.infoLbls[2][2].setText('')
self.infoLbls[3][2].setText('')
else:
##### 0
##### 1
if self.wlt:
self.infoLbls[0][2].setText(self.wlt.uniqueIDB58)
self.infoLbls[1][2].setText(self.wlt.labelName)
else:
self.infoLbls[0][2].setText(self.tr('[[ Unrelated ]]'))
self.infoLbls[1][2].setText('')
##### 2
self.infoLbls[2][2].setText(self.ustxObj.uniqueIDB58)
##### 3
if self.leValue:
self.infoLbls[3][2].setText(coin2strNZS(self.leValue) + ' BTC')
else:
self.infoLbls[3][2].setText('')
self.moreInfo.setVisible(True)
def execMoreTxInfo(self):
if not self.ustxObj:
self.processUSTX()
if not self.ustxObj:
QMessageBox.warning(self, self.tr('Invalid Transaction'), \
self.tr('Transaction data is invalid and cannot be shown!'), QMessageBox.Ok)
return
leVal = 0 if self.leValue is None else -self.leValue
dlgTxInfo = DlgDispTxInfo(self.ustxObj, self.wlt, self.parent(), self.main, \
precomputeIdxGray=self.idxSelf, precomputeAmt=leVal, txtime=-1)
dlgTxInfo.exec_()
def signTx(self):
if not self.ustxObj:
QMessageBox.critical(self, self.tr('Cannot Sign'), \
self.tr('This transaction is not relevant to any of your wallets.'
'Did you load the correct transaction?'), QMessageBox.Ok)
return
if self.ustxObj == None:
QMessageBox.warning(self, self.tr('Not Signable'), \
self.tr('This is not a valid transaction, and thus it cannot '
'be signed. '), QMessageBox.Ok)
return
elif self.enoughSigs and self.sigsValid:
QMessageBox.warning(self, self.tr('Already Signed'), \
self.tr('This transaction has already been signed!'), QMessageBox.Ok)
return
if self.wlt and self.wlt.watchingOnly:
QMessageBox.warning(self, self.tr('No Private Keys!'), \
self.tr('This transaction refers one of your wallets, but that wallet '
'is a watching-only wallet. Therefore, private keys are '
'not available to sign this transaction.'), \
QMessageBox.Ok)
return
# We should provide the same confirmation dialog here, as we do when
# sending a regular (online) transaction. But the DlgConfirmSend was
# not really designed
ustx = self.ustxObj
svpairs = []
svpairsMine = []
theFee = ustx.calculateFee()
for scrType,value,script,msInfo in ustx.pytxObj.makeRecipientsList():
svpairs.append([script, value])
if scrType in CPP_TXOUT_STDSINGLESIG:
addrStr = script_to_addrStr(script)
if self.wlt.hasAddr(addrStr_to_hash160(addrStr)[1]):
svpairsMine.append([script, value])
elif scrType == CPP_TXOUT_P2SH:
addrStr = script_to_addrStr(script)
if self.wlt.hasScrAddr(addrStr_to_hash160(addrStr)[1]):
svpairsMine.append([script, value])
if len(svpairsMine) == 0 and len(svpairs) > 1:
QMessageBox.warning(self, self.tr('Missing Change'), self.tr(
'This transaction has %1 recipients, and none of them '
'are addresses in this wallet (for receiving change). '
'This can happen if you specified a custom change address '
'for this transaction, or sometimes happens solely by '
'chance with a multi-recipient transaction. It could also '
'be the result of someone tampering with the transaction. '
'<br><br>The transaction is valid and ready to be signed. '
'Please verify the recipient and amounts carefully before '
'confirming the transaction on the next screen.').arg(len(svpairs)), QMessageBox.Ok)
dlg = DlgConfirmSend(self.wlt, svpairs, theFee, self, self.main, pytxOrUstx=ustx)
if not dlg.exec_():
return
if self.wlt.useEncryption and self.wlt.isLocked:
Passphrase = None
unlockdlg = DlgUnlockWallet(self.wlt, self, self.main, self.tr('Send Transaction'), returnPassphrase=True)
if unlockdlg.exec_():
if unlockdlg.Accepted == 1:
Passphrase = unlockdlg.securePassphrase.copy()
unlockdlg.securePassphrase.destroy()
if Passphrase is None or self.wlt.kdf is None:
QMessageBox.critical(self.parent(), self.tr('Wallet is Locked'), \
self.tr('Cannot sign transaction while your wallet is locked. '), \
QMessageBox.Ok)
return
else:
self.wlt.kdfKey = self.wlt.kdf.DeriveKey(Passphrase)
Passphrase.destroy()
newUstx = self.wlt.signUnsignedTx(self.ustxObj, signer=dlg.getSignerType())
self.wlt.advanceHighestIndex(isNew=True)
self.txtUSTX.setText(newUstx.serializeAscii())
self.ustxObj = newUstx
if not self.fileLoaded == None:
self.saveTxAuto()
def broadTx(self):
if self.main.netMode == NETWORKMODE.Disconnected:
QMessageBox.warning(self, self.tr('No Internet!'), \
self.tr('Armory lost its connection to Bitcoin Core, and cannot '
'broadcast any transactions until it is reconnected. '
'Please verify that Bitcoin Core (or bitcoind) is open '
'and synchronized with the network.'), QMessageBox.Ok)
return
elif self.main.netMode == NETWORKMODE.Offline:
QMessageBox.warning(self, self.tr('No Internet!'), \
self.tr('You do not currently have a connection to the Bitcoin network. '
'If this does not seem correct, verify that is open '
'and synchronized with the network.'), QMessageBox.Ok)
return
try:
finalTx = self.ustxObj.getSignedPyTx(signer=self.ustxObj.signerType)
except SignatureError:
QMessageBox.warning(self, self.tr('Signature Error'), self.tr(
'Not all signatures are valid. This transaction '
'cannot be broadcast.'), QMessageBox.Ok)
except:
QMessageBox.warning(self, self.tr('Error'), self.tr(
'There was an error processing this transaction, for reasons '
'that are probably not your fault...'), QMessageBox.Ok)
return
# We should provide the same confirmation dialog here, as we do when
# sending a regular (online) transaction. But the DlgConfirmSend was
# not really designed
ustx = self.ustxObj
svpairs = [[r[2],r[1]] for r in ustx.pytxObj.makeRecipientsList()]
theFee = ustx.calculateFee()
doIt = True
if self.wlt:
dlg = DlgConfirmSend(self.wlt, svpairs, theFee, self, self.main,
sendNow=True, pytxOrUstx=ustx)
doIt = dlg.exec_()
if doIt:
self.main.broadcastTransaction(finalTx)
if self.fileLoaded and os.path.exists(self.fileLoaded):
try:
# pcs = self.fileLoaded.split('.')
# newFileName = '.'.join(pcs[:-2]) + '.DONE.' + '.'.join(pcs[-2:])
shutil.move(self.fileLoaded, self.fileLoaded.replace('signed', 'SENT'))
except:
QMessageBox.critical(self, self.tr('File Remove Error'), \
self.tr('The file could not be deleted. If you want to delete '
'it, please do so manually. The file was loaded from: '
'<br><br>%1: ').arg(self.fileLoaded), QMessageBox.Ok)
try:
self.parent().accept()
except:
# This just attempts to close the OfflineReview&Sign window. If
# it fails, the user can close it themselves.
LOGEXCEPT('Could not close/accept parent dialog.')
def saveTxAuto(self):
if not self.ustxReadable:
QMessageBox.warning(self, self.tr('Formatting Error'), \
self.tr('The transaction data was not in a format recognized by '
'Armory.'))
return
if not self.fileLoaded == None and self.enoughSigs and self.sigsValid:
newSaveFile = self.fileLoaded.replace('unsigned', 'signed')
LOGINFO('New save file: %s' % newSaveFile)
f = open(newSaveFile, 'w')
f.write(str(self.txtUSTX.toPlainText()))
f.close()
if not newSaveFile == self.fileLoaded:
os.remove(self.fileLoaded)
self.fileLoaded = newSaveFile
QMessageBox.information(self, self.tr('Transaction Saved!'), \
self.tr('Your transaction has been saved to the following location:'
'\n\n%1\n\nIt can now be broadcast from any computer running '
'Armory in online mode.').arg(newSaveFile), QMessageBox.Ok)
return
def saveTx(self):
if not self.ustxReadable:
QMessageBox.warning(self, self.tr('Formatting Error'), \
self.tr('The transaction data was not in a format recognized by '
'Armory.'))
return
# The strange windows branching is because PyQt in Windows automatically
# adds the ffilter suffix to the default filename, where as it needs to
# be explicitly added in PyQt in Linux. Not sure why this behavior exists.
defaultFilename = ''
if not self.ustxObj == None:
if self.enoughSigs and self.sigsValid:
suffix = '' if OS_WINDOWS else '.signed.tx'
defaultFilename = 'armory_%s_%s' % (self.ustxObj.uniqueIDB58, suffix)
ffilt = 'Transactions (*.signed.tx *.unsigned.tx)'
else:
suffix = '' if OS_WINDOWS else '.unsigned.tx'
defaultFilename = 'armory_%s_%s' % (self.ustxObj.uniqueIDB58, suffix)
ffilt = 'Transactions (*.unsigned.tx *.signed.tx)'
filename = self.main.getFileSave('Save Transaction', \
[ffilt], \
defaultFilename)
if len(str(filename)) > 0:
LOGINFO('Saving transaction file: %s', filename)
f = open(filename, 'w')
f.write(str(self.txtUSTX.toPlainText()))
f.close()
def loadTx(self):
filename = self.main.getFileLoad(self.tr('Load Transaction'), \
['Transactions (*.signed.tx *.unsigned.tx *.SENT.tx)'])
if len(str(filename)) > 0:
LOGINFO('Selected transaction file to load: %s', filename)
f = open(filename, 'r')
self.txtUSTX.setText(f.read())
f.close()
self.fileLoaded = filename
def copyTx(self):
clipb = QApplication.clipboard()
clipb.clear()
clipb.setText(str(self.txtUSTX.toPlainText()))
self.lblCopied.setText(self.tr('<i>Copied!</i>'))
def copyTxHex(self):
clipb = QApplication.clipboard()
clipb.clear()
clipb.setText(binary_to_hex(\
self.ustxObj.getSignedPyTx(signer=self.ustxObj.signerType).serialize()))
self.lblCopied.setText(self.tr('<i>Copied!</i>'))
# Need to put circular imports at the end of the script to avoid an import deadlock
from qtdialogs import CLICKED, DlgConfirmSend, DlgUriCopyAndPaste, \
DlgUnlockWallet, extractTxInfo, DlgDispTxInfo, NO_CHANGE, STRETCH
|
from base import Error
class Integrity(Error):
def __init__(self):
super().__init__(401, 'integrity isn\'t valid')
|
# -*- coding: utf-8 -*-
from selenium.webdriver.common.keys import Keys
from selenium import webdriver
from bs4 import BeautifulSoup
import time
class ETA:
def __init__(self):
self.path = r'D:\PythonProjects\Personal\python-sandbox\drivers' \
r'\chromedriver.exe'
self.options = webdriver.ChromeOptions()
self.driver = webdriver.Chrome(self.path, options=self.options)
self.url = 'https://web.telegram.org/'
self.driver.get(self.url)
# Used in __call__
def select_chat(self):
self.driver.find_element_by_xpath(
'//*[@id="LeftColumn-main"]/div[2]/div/div/div/div/div/div/div[2]/div/div[3]'
).click()
# Used in __call__
def get_last_message(self):
html = self.driver.page_source
soup = BeautifulSoup(html, 'html.parser')
return soup.find_all('div', {
'class': 'im_message_text'
})[-1].text.lower()
# Used in verify_eta
def send_message(self):
input_box = self.driver.find_element_by_xpath(
'//*[@id="ng-app"]/body/div[1]/div[2]/div/div[2]/div[3]/div/'
'div[3]/div[2]/div/div/div/form/div[2]/div[5]'
)
input_box.send_keys('This is the way')
input_box.send_keys(Keys.ENTER)
def __call__(self, *args, **kwargs):
input()
self.select_chat()
# msg = self.get_last_message()
# if msg == 'this is the way':
self.send_message()
time.sleep(0.7)
if __name__ == '__main__':
ETA().__call__()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declared_attr
from ziggurat_foundations.models.base import BaseModel
__all__ = ["GroupMixin"]
class GroupMixin(BaseModel):
""" Mixin for Group model """
__table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8"}
@declared_attr
def __tablename__(self):
return "groups"
# lists app wide permissions we might want to assign to groups
__possible_permissions__ = ()
@declared_attr
def id(self):
return sa.Column(sa.Integer(), primary_key=True)
@declared_attr
def group_name(self):
return sa.Column(sa.Unicode(128), nullable=False, unique=True)
@declared_attr
def description(self):
return sa.Column(sa.Text())
@declared_attr
def member_count(self):
return sa.Column(sa.Integer, nullable=False, default=0)
@declared_attr
def users(self):
""" relationship for users belonging to this group"""
return sa.orm.relationship(
"User",
secondary="users_groups",
order_by="User.user_name",
passive_deletes=True,
passive_updates=True,
backref="groups",
)
# dynamic property - useful
@declared_attr
def users_dynamic(self):
""" dynamic relationship for users belonging to this group
one can use filter """
return sa.orm.relationship(
"User", secondary="users_groups", order_by="User.user_name", lazy="dynamic"
)
@declared_attr
def permissions(self):
""" non-resource permissions assigned to this group"""
return sa.orm.relationship(
"GroupPermission",
backref="groups",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
)
@declared_attr
def resource_permissions(self):
""" permissions to specific resources this group has"""
return sa.orm.relationship(
"GroupResourcePermission",
backref="groups",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
)
@declared_attr
def resources(self):
""" Returns all resources directly owned by group, can be used to assign
ownership of new resources::
user.resources.append(resource) """
return sa.orm.relationship(
"Resource",
cascade="all",
passive_deletes=True,
passive_updates=True,
backref="owner_group",
)
@declared_attr
def resources_dynamic(self):
""" Returns all resources directly owned by group, can be used to assign
ownership of new resources::
user.resources.append(resource) """
return sa.orm.relationship(
"Resource",
cascade="all",
passive_deletes=True,
passive_updates=True,
lazy="dynamic",
)
@sa.orm.validates("permissions")
def validate_permission(self, key, permission):
""" validates if group can get assigned with permission"""
if permission.perm_name not in self.__possible_permissions__:
raise AssertionError(
"perm_name is not one of {}".format(self.__possible_permissions__)
)
return permission
def __repr__(self):
return "<Group: %s, %s>" % (self.group_name, self.id)
|
#!/usr/bin/env python
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import random
import click
import dimod
import dwave_networkx as dnx
def generate_random_chimera_problem(adjacency, h_range, j_range, offset=0, vartype=dimod.BINARY):
"""Generate a random chimera problem, with
h int chosen randomly from h_range, j int chosen randomly from j_range.
Typically: h_range = [0, 0] and j_range = [-k, +k].
Args:
adjacency (dict[/{node: {neighbor_node_1, ...}}): Adjacency dictionary
h_range (tuple/(upper,lower)): bounds for h
j_range (tuple/(upper,lower)): bounds for j
offset (float): energy offset
vartype (dimod.Vartype): BQM's vartype
Returns:
dimod.BinaryQuadraticModel
"""
h = {n: random.randint(*h_range) for n in adjacency.keys()}
J = {(n,e): random.randint(*j_range)
for n, edges in adjacency.items()
for e in edges
if e > n}
return dimod.BinaryQuadraticModel(h, J, offset, vartype)
@click.command()
@click.option('--size', type=(int, int, int), default=(16, 16, 4),
help='Size of generated problems. For Chimera, use three-tuple.')
@click.option('--vartype', type=click.Choice(['SPIN', 'BINARY']), default='SPIN',
help="Generated problems' type (Ising/QUBO).")
@click.option('--count', type=int, default=10,
help='Number of generated problems.')
@click.option('--format', 'fmt', type=click.Choice(['coo', 'json']), default='coo',
help='Output format.')
@click.option('--outdir', type=click.Path(exists=True, file_okay=False), required=False,
help='Output directory. Defaults to stdout.')
def generate_chimera(size, vartype, count, fmt, outdir):
"""Generate `count` of random Chimera-structured problems
with `size` topology, with zero biases and random J's in +/-k range
(where k goes from 1 to `count`).
"""
def store(bqm, fp):
if fmt == 'coo':
fp.write(bqm.to_coo(vartype_header=True))
elif fmt == 'json':
fp.write(bqm.to_json())
ext = {'SPIN': 'ising', 'BINARY': 'qubo'}
adj = dnx.chimera_graph(*size).adj
for k in range(1, count+1):
bqm = generate_random_chimera_problem(adj, (0, 0), (-k, k), vartype=vartype)
if outdir:
path = os.path.join(outdir, '{}.{:0>2}.{}'.format(len(bqm), k, ext[vartype]))
with open(path, 'w') as fp:
store(bqm, fp)
else:
store(bqm, sys.stdout)
if __name__ == '__main__':
generate_chimera()
|
# Package pyarmor
import logging
import sys
import os
sys.path.append(os.path.dirname(__file__))
def main():
from pyarmor2 import main as main_entry
logging.basicConfig(
level=logging.INFO,
format='%(levelname)-8s %(message)s',
)
main_entry(sys.argv[1:])
|
#!/usr/bin/python
# -*- coding: utf8 -*-
from datetime import datetime
from Crypto.Cipher import AES
import time
import random
import socket
import threading
import parser
import struct
version = "1.1.3"
def gendevice(devtype , host, mac,name=None, cloud=None,update_interval = 0):
#print format(devtype,'02x')
##We only care about 1 device type...
if devtype == 0x4E2a: # Danham Bush
return ac_db(host=host, mac=mac,name=name, cloud=cloud,devtype= devtype,update_interval = 0)
if devtype == 0xFFFFFFF: # test
return ac_db_debug(host=host, mac=mac,name=name, cloud=cloud,devtype= devtype,update_interval = 0)
else:
return device(host=host, mac=mac,devtype =devtype,update_interval = update_interval)
def discover(timeout=None, bind_to_ip=None):
if bind_to_ip is None:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 53)) # connecting to a UDP address doesn't send packets
bind_to_ip = s.getsockname()[0]
address = bind_to_ip.split('.')
cs = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
cs.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
cs.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
cs.bind((bind_to_ip,0))
port = cs.getsockname()[1]
starttime = time.time()
devices = []
timezone = int(time.timezone/-3600)
packet = bytearray(0x30)
year = datetime.now().year
if timezone < 0:
packet[0x08] = 0xff + timezone - 1
packet[0x09] = 0xff
packet[0x0a] = 0xff
packet[0x0b] = 0xff
else:
packet[0x08] = timezone
packet[0x09] = 0
packet[0x0a] = 0
packet[0x0b] = 0
packet[0x0c] = year & 0xff
packet[0x0d] = year >> 8
packet[0x0e] = datetime.now().minute
packet[0x0f] = datetime.now().hour
subyear = str(year)[2:]
packet[0x10] = int(subyear)
packet[0x11] = datetime.now().isoweekday()
packet[0x12] = datetime.now().day
packet[0x13] = datetime.now().month
packet[0x18] = int(address[0])
packet[0x19] = int(address[1])
packet[0x1a] = int(address[2])
packet[0x1b] = int(address[3])
packet[0x1c] = port & 0xff
packet[0x1d] = port >> 8
packet[0x26] = 6
checksum = 0xbeaf
for i in range(len(packet)):
checksum += packet[i]
checksum = checksum & 0xffff
packet[0x20] = checksum & 0xff
packet[0x21] = checksum >> 8
cs.sendto(packet, ('255.255.255.255', 80))
if timeout is None:
response = cs.recvfrom(1024)
responsepacket = bytearray(response[0])
host = response[1]
mac = responsepacket[0x3a:0x40]
mac = mac[::-1] ##Flip correct
devtype = responsepacket[0x34] | responsepacket[0x35] << 8
name = responsepacket[0x40:].split(b'\x00')[0].decode('utf-8')
if not name:
name = mac
cloud = bool(responsepacket[-1])
cs.close()
return gendevice(devtype, host, mac,name=name,cloud=cloud)
else:
while (time.time() - starttime) < timeout:
cs.settimeout(timeout - (time.time() - starttime))
try:
response = cs.recvfrom(1024)
except socket.timeout:
return devices
responsepacket = bytearray(response[0])
#print ":".join("{:02x}".format(c) for c in responsepacket)
#print ":".join("{:c}".format(c) for c in responsepacket)
host = response[1]
devtype = responsepacket[0x34] | responsepacket[0x35] << 8
mac = responsepacket[0x3a:0x40]
mac = mac[::-1] ##flip Correct
name = responsepacket[0x40:].split(b'\x00')[0].decode('utf-8')
##Make sure there is some name
if not name:
name = mac
cloud = bool(responsepacket[-1])
dev = gendevice(devtype, host, mac,name=name,cloud=cloud)
devices.append(dev)
cs.close()
return devices
class device:
def __init__(self, host, mac, timeout=10,name=None,cloud=None,devtype=None,update_interval=0,bind_to_ip=None):
self.host = host
self.mac = mac
self.name = name
self.cloud = cloud
self.timeout = timeout
self.devtype = devtype
self.count = random.randrange(0xffff)
self.key = bytearray([0x09, 0x76, 0x28, 0x34, 0x3f, 0xe9, 0x9e, 0x23, 0x76, 0x5c, 0x15, 0x13, 0xac, 0xcf, 0x8b, 0x02])
self.iv = bytearray([0x56, 0x2e, 0x17, 0x99, 0x6d, 0x09, 0x3d, 0x28, 0xdd, 0xb3, 0xba, 0x69, 0x5a, 0x2e, 0x6f, 0x58])
self.id = bytearray([0, 0, 0, 0])
self.cs = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.cs.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.cs.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
#self.cs.bind(('',0))
self.type = "Unknown"
self.lock = threading.Lock()
self.update_interval = update_interval
self.bind_to_ip = bind_to_ip
def auth(self):
payload = bytearray(0x50)
payload[0x04] = 0x31
payload[0x05] = 0x31
payload[0x06] = 0x31
payload[0x07] = 0x31
payload[0x08] = 0x31
payload[0x09] = 0x31
payload[0x0a] = 0x31
payload[0x0b] = 0x31
payload[0x0c] = 0x31
payload[0x0d] = 0x31
payload[0x0e] = 0x31
payload[0x0f] = 0x31
payload[0x10] = 0x31
payload[0x11] = 0x31
payload[0x12] = 0x31
payload[0x1e] = 0x01
payload[0x2d] = 0x01
payload[0x30] = ord('T')
payload[0x31] = ord('e')
payload[0x32] = ord('s')
payload[0x33] = ord('t')
payload[0x34] = ord(' ')
payload[0x35] = ord(' ')
payload[0x36] = ord('1')
response = self.send_packet(0x65, payload)
enc_payload = response[0x38:]
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
payload = aes.decrypt(bytes(enc_payload))
if not payload:
return False
key = payload[0x04:0x14]
if len(key) % 16 != 0:
return False
self.id = payload[0x00:0x04]
self.key = key
return True
def get_type(self):
return self.type
def send_packet(self, command, payload):
self.count = (self.count + 1) & 0xffff
packet = bytearray(0x38)
packet[0x00] = 0x5a
packet[0x01] = 0xa5
packet[0x02] = 0xaa
packet[0x03] = 0x55
packet[0x04] = 0x5a
packet[0x05] = 0xa5
packet[0x06] = 0xaa
packet[0x07] = 0x55
packet[0x24] = 0x2a #==> Type
packet[0x25] = 0x4e #==> Type
packet[0x26] = command
packet[0x28] = self.count & 0xff
packet[0x29] = self.count >> 8
packet[0x2a] = self.mac[0]
packet[0x2b] = self.mac[1]
packet[0x2c] = self.mac[2]
packet[0x2d] = self.mac[3]
packet[0x2e] = self.mac[4]
packet[0x2f] = self.mac[5]
packet[0x30] = self.id[0]
packet[0x31] = self.id[1]
packet[0x32] = self.id[2]
packet[0x33] = self.id[3]
checksum = 0xbeaf
for i in range(len(payload)):
checksum += payload[i]
checksum = checksum & 0xffff
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
payload = aes.encrypt(bytes(payload))
packet[0x34] = checksum & 0xff
packet[0x35] = checksum >> 8
for i in range(len(payload)):
packet.append(payload[i])
checksum = 0xbeaf
for i in range(len(packet)):
checksum += packet[i]
checksum = checksum & 0xffff
packet[0x20] = checksum & 0xff
packet[0x21] = checksum >> 8
#print 'Sending Packet:\n'+''.join(format(x, '02x') for x in packet)+"\n"
starttime = time.time()
with self.lock:
while True:
try:
self.cs.sendto(packet, self.host)
self.cs.settimeout(1)
response = self.cs.recvfrom(1024)
break
except socket.timeout:
if (time.time() - starttime) < self.timeout:
pass
raise ConnectTimeout(200,self.host)
return bytearray(response[0])
#******************************************************** ac db debug class *****************************************
class ac_db(device):
import logging
type = "ac_db"
class STATIC:
##Static stuff
class FIXATION:
class VERTICAL:
#STOP= 0b00000000
TOP= 0b00000001
MIDDLE1= 0b00000010
MIDDLE2 = 0b00000011
MIDDLE3 = 0b00000100
BOTTOM= 0b00000101
SWING= 0b00000110
AUTO = 0b00000111
class HORIZONTAL: ##Don't think this really works for all devices.
LEFT_FIX = 2
LEFT_FLAP = 1
LEFT_RIGHT_FIX = 7
LEFT_RIGHT_FLAP = 0
RIGHT_FIX = 6
RIGHT_FLAP = 5
ON = 0
OFF = 1
class FAN:
LOW = 0b00000011
MEDIUM = 0b00000010
HIGH = 0b00000001
AUTO = 0b00000101
NONE = 0b00000000
class MODE:
COOLING = 0b00000001
DRY = 0b00000010
HEATING = 0b00000100
AUTO = 0b00000000
FAN = 0b00000110
class ONOFF:
OFF = 0
ON = 1
def get_ac_status(self,force_update = False):
##Get AC info(also populates the current temp)
self.logger.debug("Getting AC Info")
status = self.get_ac_info()
self.logger.debug("AC Info Retrieved")
return status
def __init__ (self, host, mac,name=None,cloud=None,debug = False,update_interval = 0,devtype=None,bind_to_ip=None):
device.__init__(self, host, mac,name=name,cloud=cloud,devtype=devtype,update_interval=update_interval)
devtype = devtype
self.status = {}
self.logger = self.logging.getLogger(__name__)
self.update_interval = update_interval
##Set default values
#mac = mac[::-1]
self.set_default_values()
self.status['macaddress'] = ''.join(format(x, '02x') for x in mac)
self.status['hostip'] = host
self.status['name'] = name
self.logging.basicConfig(level=(self.logging.DEBUG if debug else self.logging.INFO))
self.logger.debug("Debugging Enabled")
##Populate array with latest data
self.logger.debug("Authenticating")
if self.auth() == False:
self.logger.critical("Authentication Failed to AC")
return False
self.logger.debug("Getting current details in init")
##Get the current details
self.get_ac_status(force_update = True)
def get_ac_status(self,force_update = False):
##Check if the status is up to date to reduce timeout issues. Can be overwritten by force_update
self.logger.debug("Last update was: %s"%self.status['lastupdate'] )
if (force_update == False and (self.status['lastupdate'] + self.update_interval) > time.time()) :
return self.make_nice_status(self.status)
##Get AC info(also populates the current temp)
self.logger.debug("Getting AC Info")
self.get_ac_info()
self.logger.debug("AC Info Retrieved")
##Get the current status ... get_ac_states does make_nice_status in return.
self.logger.debug("Getting AC States")
status = self.get_ac_states(True)
self.logger.debug("AC States retrieved")
return status
def set_default_values(self):
self.status['temp'] = float(19)
self.status['fixation_v'] = self.STATIC.FIXATION.VERTICAL.AUTO
self.status['power'] = self.STATIC.ONOFF.ON
self.status['mode'] = self.STATIC.MODE.AUTO
self.status['sleep'] = self.STATIC.ONOFF.OFF
self.status['display'] = self.STATIC.ONOFF.ON
self.status['health'] = self.STATIC.ONOFF.OFF
self.status['ifeel'] = self.STATIC.ONOFF.OFF
self.status['fixation_h'] = self.STATIC.FIXATION.HORIZONTAL.LEFT_RIGHT_FIX
self.status['fanspeed'] = self.STATIC.FAN.AUTO
self.status['turbo'] = self.STATIC.ONOFF.OFF
self.status['mute'] = self.STATIC.ONOFF.OFF
self.status['clean'] = self.STATIC.ONOFF.OFF
self.status['mildew'] = self.STATIC.ONOFF.OFF
self.status['macaddress'] = None
self.status['hostip'] = None
self.status['lastupdate'] = None
self.status['ambient_temp'] = None
self.status['devicename'] = None
def set_temperature(self,temperature):
self.logger.debug("Setting temprature to %s",temperature)
self.get_ac_states()
self.status['temp'] = float(temperature)
self.set_ac_status()
return self.make_nice_status(self.status)
def switch_off(self):
##Make sure latest info as cannot just update one things, have set all
self.get_ac_states()
self.status['power'] = self.STATIC.ONOFF.OFF
self.set_ac_status()
return self.make_nice_status(self.status)
def switch_on(self):
##Make sure latest info as cannot just update one things, have set all
self.get_ac_states()
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
def set_mode(self,mode_text):
##Make sure latest info as cannot just update one things, have set all
self.get_ac_states()
mode = self.STATIC.MODE.__dict__.get(mode_text.upper())
if mode != None:
self.status['mode'] = mode
self.set_ac_status()
return self.make_nice_status(self.status)
else:
self.logger.debug("Not found mode value %s" , str(mode_text))
return False
def set_fanspeed(self,mode_text):
##Make sure latest info as cannot just update one things, have set all
self.get_ac_states()
mode = self.STATIC.FAN.__dict__.get(mode_text.upper())
if mode != None:
self.status['fanspeed'] = mode
self.status['turbo'] = self.STATIC.ONOFF.OFF
self.status['mute'] = self.STATIC.ONOFF.OFF
self.set_ac_status()
return self.make_nice_status(self.status)
else:
self.logger.debug("Not found mode value %s" , str(mode_text))
return False
def set_mute(self,value):
##Make sure latest info as cannot just update one things, have set all
self.get_ac_states()
mode = self.STATIC.ONOFF.__dict__.get(value)
if mode != None:
self.status['mute'] = mode
self.status['turbo'] = self.STATIC.ONOFF.OFF
self.status['fanspeed'] = self.STATIC.FAN.NONE
self.set_ac_status()
return self.make_nice_status(self.status)
else:
self.logger.debug("Not found mute value %s" , str(value))
return False
def set_turbo(self,value):
##Make sure latest info as cannot just update one things, have set all
self.get_ac_states()
mode = self.STATIC.ONOFF.__dict__.get(value)
if mode != None:
self.status['turbo'] = mode
self.status['mute'] = self.STATIC.ONOFF.OFF
self.status['fanspeed'] = self.STATIC.FAN.NONE
self.set_ac_status()
return self.make_nice_status(self.status)
else:
self.logger.debug("Not found Turbo value %s" , str(value))
return False
def set_homekit_mode(self,status):
if type(status) is not str:
self.logger.debug('Status variable is not string %s',type(status))
return False
if status.lower() == 'coolon':
self.status['mode'] = self.STATIC.MODE.COOLING
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
elif status.lower() == 'heaton':
self.status['mode'] = self.STATIC.MODE.HEATING
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
elif status.lower() == 'auto':
self.status['mode'] = self.STATIC.MODE.AUTO
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
if status.lower() == 'dry':
self.status['mode'] = self.STATIC.MODE.DRY
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
if status.lower() == 'fan_only':
self.status['mode'] = self.STATIC.MODE.FAN
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
elif status.lower() == "off":
self.status['power'] = self.STATIC.ONOFF.OFF
self.set_ac_status()
return self.make_nice_status(self.status)
else:
self.logger.debug('Invalid status for homekit %s',status)
return False
def set_homeassistant_mode(self,status):
if type(status) is not str:
self.logger.debug('Status variable is not string %s',type(status))
return False
if status.lower() == 'cool':
self.status['mode'] = self.STATIC.MODE.COOLING
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
elif status.lower() == 'heat':
self.status['mode'] = self.STATIC.MODE.HEATING
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
elif status.lower() == 'auto':
self.status['mode'] = self.STATIC.MODE.AUTO
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
if status.lower() == 'dry':
self.status['mode'] = self.STATIC.MODE.DRY
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
if status.lower() == 'fan_only':
self.status['mode'] = self.STATIC.MODE.FAN
self.status['power'] = self.STATIC.ONOFF.ON
self.set_ac_status()
return self.make_nice_status(self.status)
elif status.lower() == "off":
self.status['power'] = self.STATIC.ONOFF.OFF
self.set_ac_status()
return self.make_nice_status(self.status)
else:
self.logger.debug('Invalid status for homekit %s',status)
return False
def get_ac_info(self):
GET_AC_INFO = bytearray.fromhex("0C00BB0006800000020021011B7E0000")
response = self.send_packet(0x6a, GET_AC_INFO)
#print "Resposnse:" + ''.join(format(x, '02x') for x in response)
#print "Response:" + ' '.join(format(x, '08b') for x in response[9:])
err = response[0x22] | (response[0x23] << 8)
if err == 0:
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
# response = bytearray.fromhex("5aa5aa555aa5aa55000000000000000000000000000000000000000000000000c6d000002a4e6a0055b9af41a70d43b401000000b9c00000aeaac104468cf91b485f38c67f7bf57f");
#response = bytearray.fromhex("5aa5aa555aa5aa5547006f008d9904312c003e00000000003133a84d00400000d8d500002a4e6a0070a1b88c08b043a001000000b9c0000038821c66e3b38a5afe79dcb145e215d7")
response_payload = aes.decrypt(bytes(response[0x38:]))
response_payload = bytearray(response_payload)
self.logger.debug ("Acinfo Raw Response: " + ' '.join(format(x, '08b') for x in response_payload ) )
self.logger.debug ("Acinfo Raw Hex: " + ' '.join(format(x, '02x') for x in response_payload ) )
response_payload = response_payload[2:] ##Drop leading stuff as dont need
self.logger.debug ("AcInfo: " + ' '.join(format(x, '08b') for x in response_payload[9:] ) )
if len(response_payload) < 40: ##Hack for some invalid packets. should get proper length at some point. #54
self.logger.debug ("AcInfo: Invalid, seems to short?")
return 0
##Its only the last 5 bits?
ambient_temp = response_payload[15] & 0b00011111
self.logger.debug("Ambient Temp Decimal: %s" % float(response_payload[31] & 0b00011111) ) ## @Anonym-tsk
if ambient_temp:
self.status['ambient_temp'] = ambient_temp
return self.make_nice_status(self.status)
else:
self.logger.debug("Invalid packet received Errorcode %s" % err)
self.logger.debug ("Failed Raw Response: " + ' '.join(format(x, '08b') for x in response ) )
return 0
### Get AC Status
## GEt the current status of the aircon and parse into status array a one have to send full status each time for update, cannot just send one setting
##
def get_ac_states(self,force_update = False):
GET_STATES = bytearray.fromhex("0C00BB0006800000020011012B7E0000") ##From app queryAuxinfo:bb0006800000020011012b7e
##Check if the status is up to date to reduce timeout issues. Can be overwritten by force_update
self.logger.debug("Last update was: %s"%self.status['lastupdate'] )
if (force_update == False and (self.status['lastupdate'] + self.update_interval) > time.time()) :
return self.make_nice_status(self.status)
response = self.send_packet(0x6a, GET_STATES)
##Check response, the checksums should be 0
err = response[0x22] | (response[0x23] << 8)
if err == 0:
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
response_payload = bytes(aes.decrypt(bytes(response[0x38:])))
response_payload = bytearray(response_payload)
packet_type = response_payload[4]
if packet_type != 0x07: ##Should be result packet, otherwise something weird
return False
packet_len = response_payload[0]
if packet_len != 0x19: ##should be 25, if not, then wrong packet
return False
self.logger.debug ("Raw AC Status: " + ' '.join(format(x, '08b') for x in response_payload[9:] ) )
response_payload = response_payload[2:] ##Drop leading stuff as dont need
self.logger.debug ("Raw AC Status: " + ' '.join(format(x, '02x') for x in response_payload ) )
#self.logger.debug ("" + ' '.join(format(x, '08b') for x in response_payload[9:] ) )
#AuxInfo [tem=18, panMode=7, panType=1, nowTimeHour=5, setTem05=0, antoSenseYards=0, nowTimeMin=51, windSpeed=5, timerHour=0, voice=0, timerMin=0, mode=4, hasDew=0, hasSenseYards=0, hasSleep=0, isFollow=0, roomTem=0, roomHum=0, timeEnable=0, open=1, hasElectHeat=0, hasEco=0, hasClean=0, hasHealth=0, hasAir=0, weedSet=0, electronicLock=0, showDisplyBoard=1, mouldProof=0, controlMode=0, sleepMode=0]
self.status['temp'] = 8+ (response_payload[10]>>3) + (0.5 * float(response_payload[12]>>7))
self.status['power'] = response_payload[18] >>5 & 0b00000001
self.status['fixation_v'] = response_payload[10] & 0b00000111
self.status['mode'] = response_payload[15] >> 5 & 0b00001111
self.status['sleep'] = response_payload[15] >> 2 & 0b00000001
self.status['display'] =response_payload[20] >> 4 & 0b00000001
self.status['mildew'] = response_payload[20] >> 3 & 0b00000001
self.status['health'] = response_payload[18] >> 1 & 0b00000001
self.status['fixation_h'] = response_payload[10] & 0b00000111
self.status['fanspeed'] = response_payload[13] >> 5 & 0b00000111
self.status['ifeel'] = response_payload[15] >> 3& 0b00000001
self.status['mute'] = response_payload[14] >> 7& 0b00000001
self.status['turbo'] =response_payload[14] >> 6& 0b00000001
self.status['clean'] = response_payload[18] >> 2& 0b00000001
self.status['lastupdate'] = time.time()
return self.make_nice_status(self.status)
else:
return 0
return self.status
def make_nice_status(self,status):
status_nice = {}
status_nice['temp'] = status['temp']
status_nice['ambient_temp'] = status['ambient_temp']
status_nice['power'] = self.get_key(self.STATIC.ONOFF.__dict__,status['power'])
status_nice['fixation_v'] = self.get_key(self.STATIC.FIXATION.VERTICAL.__dict__,status['fixation_v'])
status_nice['mode'] = self.get_key(self.STATIC.MODE.__dict__,status['mode'])
status_nice['sleep'] = self.get_key(self.STATIC.ONOFF.__dict__,status['sleep'])
status_nice['display'] = self.get_key(self.STATIC.ONOFF.__dict__,status['display'])
status_nice['mildew'] = self.get_key(self.STATIC.ONOFF.__dict__,status['mildew'])
status_nice['health'] = self.get_key(self.STATIC.ONOFF.__dict__,status['health'])
status_nice['fixation_h'] = self.get_key(self.STATIC.FIXATION.VERTICAL.__dict__,status['fixation_h'])
status_nice['ifeel'] = self.get_key(self.STATIC.ONOFF.__dict__,status['ifeel'])
status_nice['mute'] = self.get_key(self.STATIC.ONOFF.__dict__,status['mute'])
status_nice['turbo'] = self.get_key(self.STATIC.ONOFF.__dict__,status['turbo'])
status_nice['clean'] = self.get_key(self.STATIC.ONOFF.__dict__,status['clean'])
status_nice['macaddress'] = status['macaddress']
status_nice['device_name'] = status['devicename']
##HomeKit topics
if self.status['power'] == self.STATIC.ONOFF.OFF:
status_nice['mode_homekit'] = "Off"
elif status['power'] == self.STATIC.ONOFF.ON and status['mode'] == self.STATIC.MODE.AUTO :
status_nice['mode_homekit'] = "Auto"
elif status['power'] == self.STATIC.ONOFF.ON and status['mode'] == self.STATIC.MODE.HEATING :
status_nice['mode_homekit'] = "HeatOn"
elif status['power'] == self.STATIC.ONOFF.ON and status['mode'] == self.STATIC.MODE.COOLING :
status_nice['mode_homekit'] = "CoolOn"
else:
status_nice['mode_homekit'] = "Error"
##Home Assist topic
if self.status['power'] == self.STATIC.ONOFF.OFF:
status_nice['mode_homeassistant'] = "off"
elif status['power'] == self.STATIC.ONOFF.ON and status['mode'] == self.STATIC.MODE.AUTO :
status_nice['mode_homeassistant'] = "auto"
elif status['power'] == self.STATIC.ONOFF.ON and status['mode'] == self.STATIC.MODE.HEATING :
status_nice['mode_homeassistant'] = "heat"
elif status['power'] == self.STATIC.ONOFF.ON and status['mode'] == self.STATIC.MODE.COOLING :
status_nice['mode_homeassistant'] = "cool"
elif status['power'] == self.STATIC.ONOFF.ON and status['mode'] == self.STATIC.MODE.DRY :
status_nice['mode_homeassistant'] = "dry"
elif status['power'] == self.STATIC.ONOFF.ON and status['mode'] == self.STATIC.MODE.FAN :
status_nice['mode_homeassistant'] = "fan_only"
else:
status_nice['mode_homeassistant'] = "Error"
##Make fanspeed logic
status_nice['fanspeed'] = self.get_key(self.STATIC.FAN.__dict__,status['fanspeed'])
status_nice['fanspeed_homeassistant'] = self.get_key(self.STATIC.FAN.__dict__,status['fanspeed']).title()
if status_nice['mute'] == "ON":
status_nice['fanspeed_homeassistant'] = "Mute"
status_nice['fanspeed'] = "MUTE"
elif status_nice['turbo'] == "ON":
status_nice['fanspeed_homeassistant'] = "Turbo"
status_nice['fanspeed'] = "TURBO"
return status_nice
def get_key(self,list,search_value):
for key,value in list.items():
if value == search_value:
return key
##Not found so return value;
return search_value
### UDP checksum function
def checksum_func(self,data):
checksum = 0
data_len = len(data)
if (data_len%2) == 1:
data_len += 1
data += struct.pack('!B', 0)
for i in range(0, len(data), 2):
w = (data[i] << 8) + (data[i + 1])
checksum += w
checksum = (checksum >> 16) + (checksum & 0xFFFF)
checksum = ~checksum&0xFFFF
return checksum
def set_ac_status(self):
self.logger.debug("Start set_ac_status")
#packet = bytearray(32)
#10111011 00000000 00000110 10000000 00000000 00000000 00001111 00000000 00000001 9 00000001 10 01000111 11 00101000 12 00100000 13 10100000 14 00000000 15 00100000 16 00000000 17 00000000 18 00100000 19 00000000 20 00010000 21 00000000 22 00000101 10010001 10010101
if self.status['temp'] < 16:
temperature = 16-8
temperature_05 = 0
##Make sure to fix the global status as well
self.status['temp'] = 16
elif self.status['temp'] > 32:
temperature = 32-8
temperature_05 = 0
##Make sure to fix the global status as well
self.status['temp'] = 32
else:
##if 0.5 then make true . Also offset with 8
if self.status['temp'].is_integer():
temperature = int( self.status['temp'] - 8 )
temperature_05 = 0
else:
temperature_05 = 1
temperature = int(self.status['temp'] -8)
payload = bytearray(23)
payload[0] = 0xbb
payload[1] = 0x00
payload[2] = 0x06 # Send command, seems like 07 is response
payload[3] = 0x80
payload[4] = 0x00
payload[5] = 0x00
payload[6] = 0x0f # Set status .. #02 -> get info?
payload[7] = 0x00
payload[8] = 0x01
payload[9] = 0x01
payload[10] = 0b00000000 | temperature << 3 | self.status['fixation_v']
payload[11] = 0b00000000 | self.status['fixation_h'] <<5
payload[12] = 0b00001111 | temperature_05 << 7 # bit 1: 0.5 #bit if 0b?1 then nothing done.... last 6 is some sort of packet_id
payload[13] = 0b00000000 | self.status['fanspeed'] << 5
payload[14] = 0b00000000 | self.status['turbo'] << 6 | self.status['mute'] << 7
payload[15] = 0b00000000 | self.status['mode'] << 5 | self.status['sleep'] << 2
payload[16] = 0b00000000
payload[17] = 0x00
payload[18] = 0b00000000 | self.status['power']<<5 | self.status['health'] << 1 | self.status['clean'] << 2
payload[19] = 0x00
payload[20] = 0b00000000 | self.status['display'] <<4 | self.status['mildew'] << 3
payload[21] = 0b00000000
payload[22] = 0b00000000
self.logger.debug ("Payload:"+ ''.join(format(x, '02x') for x in payload))
# first byte is length, Then placeholder then payload +2 for CRC16
request_payload = bytearray(32)
request_payload[0] = len(payload) + 2 ##Length plus of payload plus crc
request_payload[2:len(payload)+2] = payload ##Add the Payload
# append CRC
crc = self.checksum_func(payload)
self.logger.debug ("Checksum:"+format(crc,'02x'))
request_payload[len(payload)+1] = ((crc >> 8) & 0xFF)
request_payload[len(payload)+2] = crc & 0xFF
self.logger.debug ("Packet:"+ ''.join(format(x, '02x') for x in request_payload))
response = self.send_packet(0x6a, request_payload)
self.logger.debug ("Resposnse:" + ''.join(format(x, '02x') for x in response))
err = response[0x22] | (response[0x23] << 8)
if err == 0:
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
response_payload = aes.decrypt(bytes(response[0x38:]))
response_payload = bytearray(response_payload)
packet_type = response_payload[4]
if packet_type == 0x07: ##Should be result packet, otherwise something weird
return self.status
else:
return False
self.logger.debug ("Payload: Nice:" + ''.join(x.encode('hex') for x in response_payload ))
return "done"
class ConnectError(Exception):
"""Base error class"""
pass
class ConnectTimeout(ConnectError):
"""Connection Timeout"""
pass
class ac_db_debug(device):
import logging
type = "ac_db"
def __init__ (self, host, mac,name=None,cloud=None,debug = False,update_interval = 0,devtype=None,auth=False):
device.__init__(self, host, mac,name=name,cloud=cloud,devtype=devtype,update_interval=update_interval)
devtype = devtype
self.status = {}
self.logger = self.logging.getLogger(__name__)
self.update_interval = update_interval
##Set default values
#mac = mac[::-1]
self.set_default_values()
self.status['macaddress'] = ''.join(format(x, '02x') for x in mac)
self.status['hostip'] = host
self.status['name'] = name
self.logging.basicConfig(level=(self.logging.DEBUG if debug else self.logging.INFO))
self.logger.debug("Debugging Enabled")
self.logger.debug("Authenticating")
if self.auth() == False:
print ("Authentication Failed to AC")
self.logger.debug("Setting test temperature")
self.set_temperature(25)
##Get the current details
self.logger.debug("Getting current details in init")
#self.get_ac_states(force_update = True)
def get_ac_states(self,force_update = False):
GET_STATES = bytearray.fromhex("0C00BB0006800000020011012B7E0000") ##From app queryAuxinfo:bb0006800000020011012b7e
##Check if the status is up to date to reduce timeout issues. Can be overwritten by force_update
self.logger.debug("Last update was: %s"%self.status['lastupdate'] )
if (force_update == False and (self.status['lastupdate'] + self.update_interval) > time.time()) :
return self.make_nice_status(self.status)
response = self.send_packet(0x6a, GET_STATES)
##Check response, the checksums should be 0
err = response[0x22] | (response[0x23] << 8)
if err == 0:
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
response_payload = bytes(aes.decrypt(bytes(response[0x38:])))
response_payload = bytearray(response_payload)
packet_type = response_payload[4]
if packet_type != 0x07: ##Should be result packet, otherwise something weird
return False
packet_len = response_payload[0]
if packet_len != 0x19: ##should be 25, if not, then wrong packet
return False
self.logger.debug ("Raw AC Status: " + ' '.join(format(x, '08b') for x in response_payload[9:] ) )
response_payload = response_payload[2:] ##Drop leading stuff as dont need
self.logger.debug ("Raw AC Status: " + ' '.join(format(x, '02x') for x in response_payload ) )
#self.logger.debug ("" + ' '.join(format(x, '08b') for x in response_payload[9:] ) )
#AuxInfo [tem=18, panMode=7, panType=1, nowTimeHour=5, setTem05=0, antoSenseYards=0, nowTimeMin=51, windSpeed=5, timerHour=0, voice=0, timerMin=0, mode=4, hasDew=0, hasSenseYards=0, hasSleep=0, isFollow=0, roomTem=0, roomHum=0, timeEnable=0, open=1, hasElectHeat=0, hasEco=0, hasClean=0, hasHealth=0, hasAir=0, weedSet=0, electronicLock=0, showDisplyBoard=1, mouldProof=0, controlMode=0, sleepMode=0]
self.status['temp'] = 8+ (response_payload[10]>>3) + (0.5 * float(response_payload[12]>>7))
self.status['power'] = response_payload[18] >>5 & 0b00000001
self.status['fixation_v'] = response_payload[10] & 0b00000111
self.status['mode'] = response_payload[15] >> 5 & 0b00001111
self.status['sleep'] = response_payload[15] >> 2 & 0b00000001
self.status['display'] =response_payload[20] >> 4 & 0b00000001
self.status['mildew'] = response_payload[20] >> 3 & 0b00000001
self.status['health'] = response_payload[18] >> 1 & 0b00000001
self.status['fixation_h'] = response_payload[10] & 0b00000111
self.status['fanspeed'] = response_payload[13] >> 5 & 0b00000111
self.status['ifeel'] = response_payload[15] >> 3& 0b00000001
self.status['mute'] = response_payload[14] >> 7& 0b00000001
self.status['turbo'] =response_payload[14] >> 6& 0b00000001
self.status['clean'] = response_payload[18] >> 2& 0b00000001
self.status['lastupdate'] = time.time()
return self.make_nice_status(self.status)
else:
return 0
return self.status
def set_default_values(self):
self.status['temp'] = float(19)
self.status['fixation_v'] = ac_db.STATIC.FIXATION.VERTICAL.AUTO
self.status['power'] = ac_db.STATIC.ONOFF.ON
self.status['mode'] = ac_db.STATIC.MODE.AUTO
self.status['sleep'] = ac_db.STATIC.ONOFF.OFF
self.status['display'] = ac_db.STATIC.ONOFF.ON
self.status['health'] = ac_db.STATIC.ONOFF.OFF
self.status['ifeel'] = ac_db.STATIC.ONOFF.OFF
self.status['fixation_h'] = ac_db.STATIC.FIXATION.HORIZONTAL.LEFT_RIGHT_FIX
self.status['fanspeed'] = ac_db.STATIC.FAN.AUTO
self.status['turbo'] = ac_db.STATIC.ONOFF.OFF
self.status['mute'] = ac_db.STATIC.ONOFF.OFF
self.status['clean'] = ac_db.STATIC.ONOFF.OFF
self.status['mildew'] = ac_db.STATIC.ONOFF.OFF
self.status['macaddress'] = None
self.status['hostip'] = None
self.status['lastupdate'] = None
self.status['ambient_temp'] = None
self.status['devicename'] = None
def set_temperature(self,temperature):
self.logger.debug("Setting temprature to %s",temperature)
#self.get_ac_states()
self.status['temp'] = float(temperature)
self.set_ac_status()
#return self.make_nice_status(self.status)
def set_ac_status(self):
self.logger.debug("Start set_ac_status")
#packet = bytearray(32)
#10111011 00000000 00000110 10000000 00000000 00000000 00001111 00000000 00000001 9 00000001 10 01000111 11 00101000 12 00100000 13 10100000 14 00000000 15 00100000 16 00000000 17 00000000 18 00100000 19 00000000 20 00010000 21 00000000 22 00000101 10010001 10010101
#print "setting something"
if self.status['temp'] < 16:
temperature = 16-8
temperature_05 = 0
##Make sure to fix the global status as well
self.status['temp'] = 16
elif self.status['temp'] > 32:
temperature = 32-8
temperature_05 = 0
##Make sure to fix the global status as well
self.status['temp'] = 32
else:
##if 0.5 then make true . Also offset with 8
if self.status['temp'].is_integer():
temperature = int( self.status['temp'] - 8 )
temperature_05 = 0
else:
temperature_05 = 1
temperature = int(self.status['temp'] -8)
#print temperature
payload = bytearray(23)
payload[0] = 0xbb
payload[1] = 0x00
payload[2] = 0x06 # Send command, seems like 07 is response
payload[3] = 0x80
payload[4] = 0x00
payload[5] = 0x00
payload[6] = 0x0f # Set status .. #02 -> get info?
payload[7] = 0x00
payload[8] = 0x01
payload[9] = 0x01
payload[10] = 0b00000000 | temperature << 3 | self.status['fixation_v']
payload[11] = 0b00000000 | self.status['fixation_h'] <<5
payload[12] = 0b00001111 | temperature_05 << 7 # bit 1: 0.5 #bit if 0b?1 then nothing done.... last 6 is some sort of packet_id
payload[13] = 0b00000000 | self.status['fanspeed'] << 5
payload[14] = 0b00000000 | self.status['turbo'] << 6 | self.status['mute'] << 7
payload[15] = 0b00000000 | self.status['mode'] << 5 | self.status['sleep'] << 2
payload[16] = 0b00000000
payload[17] = 0x00
payload[18] = 0b00000000 | self.status['power']<<5 | self.status['health'] << 1 | self.status['clean'] << 2
payload[19] = 0x00
payload[20] = 0b00000000 | self.status['display'] <<4 | self.status['mildew'] << 3
payload[21] = 0b00000000
payload[22] = 0b00000000
#print ("Payload:"+ ''.join(format(x, '02x') for x in payload))
# first byte is length, Then placeholder then payload +2 for CRC16
request_payload = bytearray(32)
request_payload[0] = len(payload) + 2 ##Length plus of payload plus crc
request_payload[2:len(payload)+2] = payload ##Add the Payload
# append CRC
crc = self.checksum_func(payload)
#print ("Checksum:"+format(crc,'02x'))
request_payload[len(payload)+1] = ((crc >> 8) & 0xFF)
request_payload[len(payload)+2] = crc & 0xFF
#print ("Packet:"+ ''.join(format(x, '02x') for x in request_payload))
response = self.send_packet(0x6a, request_payload)
self.logger.debug ("Resposnse:" + ''.join(format(x, '02x') for x in response))
err = response[0x22] | (response[0x23] << 8)
if err == 0:
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
response_payload = aes.decrypt(bytes(response[0x38:]))
response_payload = bytearray(response_payload)
packet_type = response_payload[4]
if packet_type == 0x07: ##Should be result packet, otherwise something weird
return self.status
else:
return False
self.logger.debug ("Payload: Nice:" + ''.join(x.encode('hex') for x in response_payload ))
return "done"
def checksum_func(self,data):
checksum = 0
data_len = len(data)
if (data_len%2) == 1:
data_len += 1
data += struct.pack('!B', 0)
for i in range(0, len(data), 2):
w = (data[i] << 8) + (data[i + 1])
checksum += w
checksum = (checksum >> 16) + (checksum & 0xFFFF)
checksum = ~checksum&0xFFFF
return checksum
def send_packet(self, command, payload):
self.count = (self.count + 1) & 0xffff
packet = bytearray(0x38)
packet[0x00] = 0x5a
packet[0x01] = 0xa5
packet[0x02] = 0xaa
packet[0x03] = 0x55
packet[0x04] = 0x5a
packet[0x05] = 0xa5
packet[0x06] = 0xaa
packet[0x07] = 0x55
packet[0x24] = 0x2a #==> Type
packet[0x25] = 0x4e #==> Type
packet[0x26] = command
packet[0x28] = self.count & 0xff
packet[0x29] = self.count >> 8
packet[0x2a] = self.mac[0]
packet[0x2b] = self.mac[1]
packet[0x2c] = self.mac[2]
packet[0x2d] = self.mac[3]
packet[0x2e] = self.mac[4]
packet[0x2f] = self.mac[5]
packet[0x30] = self.id[0]
packet[0x31] = self.id[1]
packet[0x32] = self.id[2]
packet[0x33] = self.id[3]
checksum = 0xbeaf
for i in range(len(payload)):
checksum += payload[i]
checksum = checksum & 0xffff
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
payload = aes.encrypt(bytes(payload))
packet[0x34] = checksum & 0xff
packet[0x35] = checksum >> 8
for i in range(len(payload)):
packet.append(payload[i])
checksum = 0xbeaf
for i in range(len(packet)):
checksum += packet[i]
checksum = checksum & 0xffff
packet[0x20] = checksum & 0xff
packet[0x21] = checksum >> 8
#print 'Sending Packet:\n'+''.join(format(x, '02x') for x in packet)+"\n"
starttime = time.time()
with self.lock:
while True:
try:
self.cs.sendto(packet, self.host)
self.cs.settimeout(1)
response = self.cs.recvfrom(1024)
#print response
break
except socket.timeout:
if (time.time() - starttime) < self.timeout:
pass
#print "timedout"
raise ConnectTimeout(200,self.host)
return bytearray(response[0])
def auth(self):
payload = bytearray(0x50)
payload[0x04] = 0x31
payload[0x05] = 0x31
payload[0x06] = 0x31
payload[0x07] = 0x31
payload[0x08] = 0x31
payload[0x09] = 0x31
payload[0x0a] = 0x31
payload[0x0b] = 0x31
payload[0x0c] = 0x31
payload[0x0d] = 0x31
payload[0x0e] = 0x31
payload[0x0f] = 0x31
payload[0x10] = 0x31
payload[0x11] = 0x31
payload[0x12] = 0x31
payload[0x1e] = 0x01
payload[0x2d] = 0x01
payload[0x30] = ord('T')
payload[0x31] = ord('e')
payload[0x32] = ord('s')
payload[0x33] = ord('t')
payload[0x34] = ord(' ')
payload[0x35] = ord(' ')
payload[0x36] = ord('1')
response = self.send_packet(0x65, payload)
enc_payload = response[0x38:]
aes = AES.new(bytes(self.key), AES.MODE_CBC, bytes(self.iv))
payload = aes.decrypt(bytes(enc_payload))
if not payload:
return False
key = payload[0x04:0x14]
if len(key) % 16 != 0:
return False
self.id = payload[0x00:0x04]
self.key = key
return True
|
from setuptools import setup
long_description = '''
Python package for numerai
'''
setup(
name='jax_tools',
version='0.0.1',
description='lib build on top of JAX to make it easier for code reuse and fast implementation',
long_description=long_description,
author='Clement Jambou',
packages=["jax_tools"],
install_requires=[]
)
|
# Defines class items handling a dataset.
class TrackingDataset:
def __init__(self, train_file, valid_file, folder_prefix):
# self.train_list = open(train_file, 'r').read().splitlines() #读取每个图片的文件名?
# self.valid_list = open(valid_file, 'r').read().splitlines()
# self.train_images = [folder_prefix + '/Ref/' + train_item + '.png' for train_item in self.train_list]
# self.train_labels = [folder_prefix + '/Ell/' + train_item + '.txt' for train_item in self.train_list]
# self.train_seg = [folder_prefix + '/Seg/' + train_item + '.png' for train_item in self.train_list]
# self.train_size = len(self.train_list)
# self.valid_images = [folder_prefix + '/Ref/' + valid_item + '.png' for valid_item in self.valid_list]
# self.valid_labels = [folder_prefix + '/Ell/' + valid_item + '.txt' for valid_item in self.valid_list]
# self.valid_seg = [folder_prefix + '/Seg/' + valid_item + '.png' for valid_item in self.valid_list]
# self.valid_size = len(self.valid_list)
self.train_list = open(train_file, 'r').read().splitlines() #读取每个图片的文件名?
self.valid_list = open(valid_file, 'r').read().splitlines()
self.train_images = [folder_prefix + '/training/' + train_item for train_item in self.train_list]
self.train_seg = [folder_prefix + '/label/' + train_item for train_item in self.train_list]
self.train_size = len(self.train_list)
self.valid_images = [folder_prefix + '/training/' + valid_item for valid_item in self.valid_list]
self.valid_seg = [folder_prefix + '/label/' + valid_item for valid_item in self.valid_list]
self.valid_size = len(self.valid_list)
self.train_labels = [folder_prefix + '/Ell/' + train_item + '.txt' for train_item in self.train_list]
self.valid_labels = [folder_prefix + '/Ell/' + valid_item + '.txt' for valid_item in self.valid_list]
|
### Data to pickle
import os, sys, pickle
### DEF ###
def openMyJarOfPickles():
if os.path.exists( pickleFile ):
picklesFromMyJar = open( pickleFile, 'rb')
else:
out = chngCurrentUser()
picklesFromMyJar = open( pickleFile, 'rb')
return picklesFromMyJar
def readFromPickle():
### Returns DICT
if os.path.exists( pickleFile ):
with open( pickleFile, 'rb') as inputFile:
try:
out = pickle.loads( inputFile.read() ) #<< Main
logging.info( out )
return out
except:
logging.exception(sys.exc_info())
sys.exit()
else:
createNewX()
with open( pickleFile, 'rb') as inputFile:
try:
out = pickle.loads( inputFile.read() ) #<< Main
logging.info( out )
return out
except:
logging.exception(sys.exc_info())
sys.exit()
def writeToPickle( usr_name, grp_name ):
dictDetails = {}
if os.path.exists( pickleFile ) == True:
logging.debug( os.path.exists(pickleFile) )
with open( pickleFile, 'rb') as inputFile:
try:
dictDetails = pickle.load( inputFile ) #<< Main
logging.debug("Current Pickle: \n" + str(dictDetails))
inputFile.close()
except:
logging.exception(sys.exc_info())
sys.exit()
logging.debug("1:" + str(dictDetails) )
with open( pickleFile, 'wb') as fileWritePickle:
if usr_name != None:
dictDetails['USER_NAME'] = usr_name
if grp_name != None:
dictDetails['USER_NAME'] = usr_name
dictDetails['GRP_NAME'] = grp_name
pickle.dump( dictGroupDetails, fileWritePickle, protocol=pickle.HIGHEST_PROTOCOL)
logging.debug("2:" + str(dictDetails) )
|
#!/usr/bin/env python
import rospy
from std_msgs.msg import Int32
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(12, GPIO.OUT)
class Led_publish(object):
def __init__(self):
self.pub_control = rospy.Publisher("~led_test_control", Int32, queue_size=1)
self.controlBox()
def controlBox(self):
p = GPIO.PWM(12,1)
p.start(100)
key = eval(raw_input("light level: "))
override_msg = Int32()
override_msg.data = key
self.pub_control.publish(override_msg)
print("[led_publish_node] Publish successfully!")
if __name__ == "__main__":
rospy.init_node("led_publish_node", anonymous=False)
led_control_publish = Led_publish()
rospy.spin()
|
import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class Complex(object):
''' Simple Complex Number Class for pytorch '''
def __init__(self, real, imag=None):
''' if imag is none we divide real --> [re, im]'''
if imag is not None:
assert real.size() == imag.size(), "{}re != {}im".format(
real.size(), imag.size())
self._real = real
self._imag = imag
else:
assert real.size(-1) % 2 == 0, "need to be div by two"
assert real.dim() == 2, "only 2d supported"
half = real.size(-1) // 2
self._real = real[:, 0:half]
self._imag = real[:, half:]
def unstack(self):
return torch.cat([self._real, self._imag], dim=-1)
def __add__(self, other):
real = self._real + other._real
imag = self._imag + other._imag
return Complex(real, imag)
def __sub__(self, other):
real = self._real - other._real
imag = self._imag - other._imag
return Complex(real, imag)
def __mul__(self, other):
real = self._real * other._real + self._imag * other._imag
imag = self._real * other._imag + self._imag * other._real
return Complex(real, imag)
def __rmul__(self, other):
real = other._real * self._real + other._imag * self._imag
imag = other._imag * self._real + other._real * self._imag
return Complex(real, imag)
def abs(self):
return torch.sqrt(self._real * self._real + self._imag * self._imag)
def conj(self):
return Complex(self._real, -self._imag)
def size(self):
return self._real.size()
def real(self):
return self._real
def imag(self):
return self._imag
def long_type(use_cuda):
return torch.cuda.LongTensor if use_cuda else torch.LongTensor
def float_type(use_cuda):
return torch.cuda.FloatTensor if use_cuda else torch.FloatTensor
def one_hot(num_cols, indices, use_cuda=False):
""" Creates a matrix of one hot vectors.
- num_cols: int
- indices: FloatTensor array
"""
batch_size = indices.size(0)
mask = long_type(use_cuda)(batch_size, num_cols).fill_(0)
ones = 1
if isinstance(indices, Variable):
ones = Variable(long_type(use_cuda)(indices.size()).fill_(1))
mask = Variable(mask, volatile=indices.volatile)
return mask.scatter_(1, indices, ones)
def circular_convolution_conv(keys, values, cuda=False):
'''
For the circular convolution of x and y to be equivalent,
you must pad the vectors with zeros to length at least N + L - 1
before you take the DFT. After you invert the product of the
DFTs, retain only the first N + L - 1 elements.
'''
assert values.dim() == keys.dim() == 2, "only 2 dims supported"
batch_size = keys.size(0)
keys_feature_size = keys.size(1)
values_feature_size = values.size(1)
required_size = keys_feature_size + values_feature_size - 1
# zero pad upto N+L-1
zero_for_keys = Variable(float_type(cuda)(
batch_size, required_size - keys_feature_size).zero_())
zero_for_values = Variable(float_type(cuda)(
batch_size, required_size - values_feature_size).zero_())
keys = torch.cat([keys, zero_for_keys], -1)
values = torch.cat([values, zero_for_values], -1)
# do the conv and reshape and return
print('values = ', values.view(batch_size, 1, -1).size(), ' keys = ', keys.view(batch_size, 1, -1).size())
print('conv = ', F.conv1d(values.view(batch_size, 1, -1),
keys.view(batch_size, 1, -1)).size())
return F.conv1d(values.view(batch_size, 1, -1),
keys.view(batch_size, 1, -1)).squeeze()[:, 0:required_size]
def circular_convolution_fft(keys, values, normalized=True, conj=False, cuda=False):
'''
For the circular convolution of x and y to be equivalent,
you must pad the vectors with zeros to length at least N + L - 1
before you take the DFT. After you invert the product of the
DFTs, retain only the first N + L - 1 elements.
'''
assert values.dim() == keys.dim() == 2, "only 2 dims supported"
assert values.size(-1) % 2 == keys.size(-1) % 2 == 0, "need last dim to be divisible by 2"
batch_size, keys_feature_size = keys.size(0), keys.size(1)
values_feature_size = values.size(1)
required_size = keys_feature_size + values_feature_size - 1
required_size = required_size + 1 if required_size % 2 != 0 else required_size
# conj transpose
keys = Complex(keys).conj().unstack() if conj else keys
# reshape to [batch, [real, imag]]
half = keys.size(-1) // 2
keys = torch.cat([keys[:, 0:half].unsqueeze(2), keys[:, half:].unsqueeze(2)], -1)
values = torch.cat([values[:, 0:half].unsqueeze(2), values[:, half:].unsqueeze(2)], -1)
# do the fft, ifft and return num_required
kf = torch.fft(keys, signal_ndim=1, normalized=normalized)
vf = torch.fft(values, signal_ndim=1, normalized=normalized)
kvif = torch.ifft(kf*vf, signal_ndim=1, normalized=normalized)#[:, 0:required_size]
# if conj:
# return Complex(kvif[:, :, 1], kvif[:, :, 0]).unstack()
#return Complex(kvif[:, :, 0], kvif[:, :, 1]).abs() if not conj \
# return Complex(kvif[:, :, 0], kvif[:, :, 1]).unstack() # if not conj \
# else Complex(kvif[:, :, 1], kvif[:, :, 0]).abs()
return Complex(kvif[:, :, 0], kvif[:, :, 1]).unstack().view(batch_size, -1)
class HolographicMemory(nn.Module):
def __init__(self, num_init_memories, normalization='complex', cuda=True):
super(HolographicMemory, self).__init__()
self.perms, self.inv_perms, self.memories = None, None, None
self.num_memories = num_init_memories
self.complex_normalize = normalization == 'complex'
self.l2_normalize = normalization == 'l2'
self.conv_fn = circular_convolution_fft
self.cuda = cuda
@staticmethod
def _generate_perms_and_inverses(feature_size, num_perms):
perms = [torch.randperm(feature_size)
for _ in range(num_perms)]
inv_perms = [torch.cat([(perm == i).nonzero()
for i in range(feature_size)], 0).squeeze()
for perm in perms]
return perms, inv_perms
def normalize(self, arr):
if self.complex_normalize:
return self._complex_normalize(arr)
return F.normalize(arr, dim=-1)
def _complex_normalize(self, arr):
assert arr.size(-1) % 2 == 0, "dim[-1] need to be divisible by 2"
half = arr.size(-1) // 2
cplx = Complex(arr[:, 0:half], arr[:, half:]).abs()
mag = torch.max(cplx, torch.ones_like(cplx))
return arr / torch.cat([mag, mag], -1)
def encode(self, keys, values):
'''
Encoders some keys and values together
values: [batch_size, feature_size]
keys: [batch_size, feature_size]
sets memories: [num_memories, features]
'''
assert values.dim() == keys.dim() == 2, "only operate over 2 dims"
batch_size, feature_size = list(values.size())
if self.perms is None:
''' initial generation of random perms '''
self.perms, self.inv_perms = self._generate_perms_and_inverses(
feature_size, self.num_memories
)
keys = self.normalize(keys)
permed_keys = torch.cat([keys[:, perm] for perm in self.perms], 0)
conv_output = self.conv_fn(permed_keys,
values.repeat([self.num_memories, 1]),
cuda=self.cuda)
self.memories = self.memories + conv_output if self.memories is not None else conv_output
def extend_memory(self, batch_size, feature_size, num_to_extend):
if num_to_extend < 1:
return
new_perms, new_inv_perms = self._generate_perms_and_inverses(
feature_size, num_to_extend
)
self.perms.extend(new_perms)
self.inv_perms.extend(new_inv_perms)
if self.memories is not None:
zero_vectors = float_type(self.cuda)(batch_size*num_to_extend, feature_size).zero_()
self.memories = torch.cat([self.memories, zero_vectors], 0)
self.num_memories += num_to_extend
def decode(self, keys):
'''
Decoders values out of memories
keys: [batch_size, feature_size]
returns: [batch, features]
'''
keys = self.normalize(keys)
batch_size = keys.size(0)
# re-gather keys to avoid mixing between different keys.
permed_keys = torch.cat([keys[:, perm] for perm in self.perms], 0)
unsplit_conv = self.conv_fn(permed_keys, self.memories, conj=False, cuda=self.cuda)
indices = [[i for i in range(j, self.num_memories*batch_size, batch_size)]
for j in range(batch_size)]
return torch.cat([torch.sum(unsplit_conv[ind], 0) for ind in indices], 0)
if __name__ == "__main__":
# simple test on MNIST recovery
import argparse
import torchvision
from torchvision import datasets, transforms
parser = argparse.ArgumentParser(description='HolographicMemory MNIST Recovery')
# Task parameters
parser.add_argument('--key-type', type=str, default='gaussian',
help="type of key: gaussian or onehot (default: gaussian)")
parser.add_argument('--batch-size', type=int, default=10,
help="batch size (default: 10)")
parser.add_argument('--batches-to-encode', type=int, default=10,
help="how many minibatches to encode (default: 10)")
parser.add_argument('--num-memories', type=int, default=10,
help="number of memory traces (default: 10)")
parser.add_argument('--increment-memories-per-batch', type=int, default=0,
help="number of memory traces to increase per batch (default: 0)")
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
feature_size = 784
mnist = torch.utils.data.DataLoader(
datasets.MNIST('.datasets', train=True, download=True, transform=transforms.ToTensor()),
batch_size=args.batch_size,
drop_last=True,
shuffle=True,
)
# build memory and some random keys
memory = HolographicMemory(num_init_memories=args.num_memories,
normalization='complex', cuda=args.cuda)
if args.key_type == 'gaussian':
keys = [torch.randn(args.batch_size, feature_size)
for _ in range(args.batches_to_encode)]
else:
rv = torch.distributions.OneHotCategorical(probs=torch.rand(args.batch_size, feature_size))
keys = [rv.sample() for _ in range(args.batches_to_encode)]
if args.cuda:
keys = [k.cuda() for k in keys]
# encode some images
img_container, key_container = [], []
for i, (img, lbl) in enumerate(mnist):
if i > args.batches_to_encode - 1:
break
img, lbl = img.cuda() if args.cuda else img, lbl.cuda() if args.cuda else lbl
img_container.append(img)
memory.encode(keys[i], img.view(args.batch_size, -1))
# lbl = lbl.unsqueeze(1) if lbl.dim() < 2 else lbl
# key_container.append(one_hot(feature_size, lbl, True).type(float_type(True)))
# print(img.size(), lbl.size(), key_container[-1].size())
# memory.encode(key_container[-1], img.view(args.batch_size, -1))
# expand_mem if requested
memory.extend_memory(args.batch_size, feature_size, args.increment_memories_per_batch)
img_container = torch.cat(img_container, 0)
# keys = torch.cat(key_container, 0)
# print("key container post = ", keys.size())
print("encoded {} samples x {} --> {}".format(
args.batch_size, list(img.size()), list(memory.memories.size())))
# try to decode
values = torch.cat([memory.decode(key) for key in keys], 0)
print("decoded {} keys --> {}".format(
list(torch.cat(keys, 0).size()), values.size()))
# save image for visualization
grid = torchvision.utils.make_grid(
torch.cat([img_container, values.view(-1, 1, 28, 28)], 0),
nrow=args.batch_size, normalize=True, scale_each=True
)
def show(img):
import matplotlib.pyplot as plt
npimg = img.cpu().numpy()
plt.imshow(np.transpose(npimg, (1,2,0)), interpolation='nearest')
plt.show()
show(grid)
|
# ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Diplomacy Research """
# Setting up root logger
import os
import logging
import sys
# Adding path to proto/ dir
sys.path.append(os.path.join(os.path.dirname(__file__), 'proto'))
LOGGING_LEVEL = {'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG}.get(os.environ.get('DIPLOMACY_LOGGING', 'INFO'), logging.INFO)
# Defining root logger
ROOT_LOGGER = logging.getLogger('diplomacy_research')
ROOT_LOGGER.setLevel(LOGGING_LEVEL)
ROOT_LOGGER.propagate = False
# Adding output to stdout by default
STREAM_HANDLER = logging.StreamHandler(sys.stdout)
STREAM_HANDLER.setLevel(logging.DEBUG)
FORMATTER = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
STREAM_HANDLER.setFormatter(FORMATTER)
ROOT_LOGGER.addHandler(STREAM_HANDLER)
|
"""
Script to upload IOTile Device Streamer Reports (containing stream data)
"""
import argparse
import datetime
import getpass
import json
import logging
import os
import sys
from pprint import pprint
import pytz
from iotile_cloud.api.connection import Api
from iotile_cloud.api.exceptions import HttpClientError
PRODUCTION_DOMAIN_NAME = 'https://iotile.cloud'
STAGE_DOMAIN_NAME = 'https://cloud.corp.archsys.io'
TEST_DOMAIN_NAME = 'http://127.0.0.1:8000'
logger = logging.getLogger(__name__)
if __name__ == '__main__':
# Test
# Logger Format
from logging import Formatter, StreamHandler
FORMAT = '[%(asctime)-15s] %(levelname)-6s %(message)s'
DATE_FORMAT = '%d/%b/%Y %H:%M:%S'
formatter = Formatter(fmt=FORMAT, datefmt=DATE_FORMAT)
handler = StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-u', '--user', dest='email', type=str, help='Email used for login')
parser.add_argument('-s', '--server', dest='server', type=str,
default=TEST_DOMAIN_NAME, help='Server to upload to')
parser.add_argument('file', metavar='file', type=str, help='File Path')
parser.add_argument('-t', '-timestamp', dest='ts', type=str, help='timestamp when the gateway received the report')
args = parser.parse_args()
logger.info('--------------')
if not args.email:
logger.error('User email is required: --user')
sys.exit(1)
# 1.- Check that file exists
if not os.path.exists(args.file):
logger.error('File not found: {}'.format(args.file))
sys.exit(1)
password = getpass.getpass()
domain = args.server
logger.info('------------------------------')
logger.info('Uploading to {}'.format(domain))
logger.info('------------------------------')
c = Api(domain)
ok = c.login(email=args.email, password=password)
if ok:
logger.info('Welcome {0}'.format(args.email))
logger.info('Uploading: {0}'.format(args.file))
if not args.ts:
ts_aware = pytz.utc.localize(datetime.datetime.utcnow())
ts = '{}'.format(ts_aware.isoformat())
else:
ts = args.ts
logger.info('timestamp {}'.format(ts))
try:
# resp = c.streamer().report.upload_file(filename=args.file, timestamp=ts)
with open(args.file, 'rb') as fp:
resp = c.streamer().report.upload_fp(fp=fp, timestamp=ts)
pprint(resp)
except HttpClientError as e:
logger.error(e)
for item in json.loads(e.content.decode()):
logger.error(item)
logger.info('Goodbye!!')
c.logout()
|
import sys
import asyncio
import logging
import grpc
import app_pb2
import app_pb2_grpc
PORT = 50052
async def run() -> None:
async with grpc.aio.insecure_channel(f"localhost:{PORT}") as channel:
stub = app_pb2_grpc.GreeterStub(channel)
response = await stub.SayHello(
app_pb2.HelloRequest(name='World')
)
print("Greeter client received: " + response.message)
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG,
stream=sys.stdout,
)
asyncio.run(run())
|
"""
Description:
"""
__author__ = "James Banting, Darren Wiens, Jonathan Healy"
|
__author__ = "Sanju Sci"
__email__ = "sanju.sci9@gmail.com"
__copyright__ = "Copyright 2019"
from nltk import pos_tag, ne_chunk
from nltk.tokenize import word_tokenize
from nltk.corpus import state_union
from nltk.tokenize import PunktSentenceTokenizer
class NamedEntityRecognization(object):
def __init__(self, train_text, sample_text):
"""
:param train_text:
:param sample_text:
"""
self.train_text = state_union.raw(train_text)
self.sample_text = state_union.raw(sample_text)
self.custom_sent_tokenizer = PunktSentenceTokenizer(self.train_text)
self.tokenized = self.custom_sent_tokenizer.tokenize(self.sample_text)
def process_content(self):
"""
:return:
"""
try:
for i in self.tokenized:
words = word_tokenize(i)
tagged = pos_tag(words)
namedEnt = ne_chunk(tagged, binary=True)
namedEnt.draw()
except Exception as e:
print(str(e))
if __name__ == "__main__":
tk = NamedEntityRecognization("2005-GWBush.txt", "2006-GWBush.txt")
tk.process_content()
|
import typing as t
import sys
from .app import app, SECRET_KEY
from .config import APP_CONFIGS
def debug():
print(f"SECRET_KEY: {SECRET_KEY}")
app.run(**APP_CONFIGS)
COMMANDS: t.Dict[str, t.Callable] = {
"debug": debug,
"production": lambda: print("Not done yet")
}
def main(args: t.List[str]) -> int:
function = COMMANDS.get(args[1], None)
if function is None:
print(f"Could not find command named {args[1]}")
return 1
else:
try:
function()
except Exception as e:
raise e
else:
return 0
def run():
return main(sys.argv)
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
import logging
from tensorflow import keras
from tensorflow.keras import losses
from tensorflow.keras import regularizers
from tensorflow.keras import backend as K
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import GRU
from tensorflow.keras.constraints import Constraint
from tensorflow.keras.constraints import min_max_norm
logger = logging.getLogger('RNNoise')
def rnnCrossentropy(y_true, y_pred):
return K.mean(2 * K.abs(y_true - 0.5) * K.binary_crossentropy(y_pred, y_true), axis=-1)
def mask(y_true):
return K.minimum(y_true + 1., 1.)
def msse(y_true, y_pred):
return K.mean(mask(y_true) * K.square(K.sqrt(y_pred) - K.sqrt(y_true)), axis=-1)
def rnnCost(y_true, y_pred):
return K.mean(mask(y_true) * (10 * K.square(K.square(K.sqrt(y_pred) - K.sqrt(y_true))) + K.square(
K.sqrt(y_pred) - K.sqrt(y_true)) + 0.01 * K.binary_crossentropy(y_pred, y_true)), axis=-1)
def rnnAccuracy(y_true, y_pred):
return K.mean(2 * K.abs(y_true - 0.5) * K.equal(y_true, K.round(y_pred)), axis=-1)
class WeightClip(Constraint):
def __init__(self, c=2):
self.c = c
def __call__(self, p):
return K.clip(p, -self.c, self.c)
def get_config(self):
return {'name': self.__class__.__name__,
'c': self.c}
class RNNoise:
def __init__(self, checkpoint=None):
self.batch_size = 32
self.reg = 0.000001
self.constraint = WeightClip(0.499)
if checkpoint:
logger.info('Model checkpoint input obtained')
self.__model = keras.models.load_model(checkpoint)
else:
logger.info('Creating new model')
self.__createModel()
def __createModel(self):
mainInput = Input(shape=(None, 42), name='main_input')
tmp = Dense(24, activation='tanh', name='input_dense', kernel_constraint=self.constraint,
bias_constraint=self.constraint)(mainInput)
vadGRU = GRU(24, activation='tanh', recurrent_activation='sigmoid', return_sequences=True, name='vad_gru',
kernel_regularizer=regularizers.l2(self.reg), recurrent_regularizer=regularizers.l2(self.reg),
kernel_constraint=self.constraint, recurrent_constraint=self.constraint,
bias_constraint=self.constraint)(tmp)
vadOutput = Dense(1, activation='sigmoid', name='vad_output', kernel_constraint=self.constraint,
bias_constraint=self.constraint)(vadGRU)
noiseInput = keras.layers.concatenate([tmp, vadGRU, mainInput])
noiseGRU = GRU(48, activation='relu', recurrent_activation='sigmoid', return_sequences=True, name='noise_gru',
kernel_regularizer=regularizers.l2(self.reg), recurrent_regularizer=regularizers.l2(self.reg),
kernel_constraint=self.constraint, recurrent_constraint=self.constraint,
bias_constraint=self.constraint)(noiseInput)
denoiseInput = keras.layers.concatenate([vadGRU, noiseGRU, mainInput])
denoiseGRU = GRU(96, activation='tanh', recurrent_activation='sigmoid', return_sequences=True,
name='denoise_gru', kernel_regularizer=regularizers.l2(self.reg),
recurrent_regularizer=regularizers.l2(self.reg), kernel_constraint=self.constraint,
recurrent_constraint=self.constraint, bias_constraint=self.constraint)(denoiseInput)
denoiseOutput = Dense(22, activation='sigmoid', name='denoise_output', kernel_constraint=self.constraint,
bias_constraint=self.constraint)(denoiseGRU)
self.__model = Model(inputs=mainInput, outputs=[denoiseOutput, vadOutput])
self.__model.compile(loss=[rnnCost, rnnCrossentropy],
metrics=[msse],
optimizer='adam', loss_weights=[10, 0.5])
def __save(self, filename):
logger.info('Save model to file ' + filename)
self.__model.save(filename)
def __prepateInput(self, input):
pass
def __prepareOutput(self, output):
pass
def train(self, xTrain, yTrain, voiceAvailable):
self.__model.fit(xTrain, [yTrain, voiceAvailable],
batch_size=self.batch_size,
epochs=1,
validation_split=0.1)
|
"""Tests for degredations."""
from prysm import degredations
def test_smear():
sm = degredations.Smear(1, 1)
assert sm.analytic_ft(0, 0) == 1 / sm.width
def test_jitter():
jt = degredations.Jitter(1)
assert jt.analytic_ft(0, 0) == 1
|
import pandas as pd
import tweepy
import time
import pickle
key = {
"consumer_key": ,# key
"consumer_secret": , # secret
"access_token": , # Token
"access_token_secret": # Token secret
}
#Twitter API credentials
consumer_key = key["consumer_key"]
consumer_secret = key["consumer_secret"]
access_key = key["access_token"]
access_secret = key["access_token_secret"]
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
api = tweepy.API(auth)
def collect(id_list):
batch_size = 100 # http://docs.tweepy.org/en/v3.5.0/api.html#API.statuses_lookup
batch_indices = range(0, len(id_list), batch_size)
batches = [id_list[i:i + batch_size] for i in batch_indices]
tweetbatchlist = []
for batch in batches:
tweetbatchlist.append(api.statuses_lookup(batch, tweet_mode='extended'))
time.sleep(1) # don't hit API rate limit
return tweetbatchlist
def dataframiseTweet(tweet):
tempdict = {}
tempdict['created_at'] = tweet['created_at']
tempdict['tweetid'] = tweet['id']
tempdict['text'] = tweet['full_text']
tempdict['userId'] = tweet['user']['id']
tempdict['userName'] = tweet['user']['name']
tempdict['screen_name'] = tweet['user']['screen_name']
tempdict['in_reply_to_status_id'] = tweet['in_reply_to_status_id']
tempdict['mentioneeId'] = tweet['in_reply_to_user_id']
tempdict['mentioneeScreenname'] = tweet['in_reply_to_screen_name']
tempdict['RT'] = tweet['retweet_count']
tempdict['Like'] = tweet['favorite_count']
if len(tweet['entities']['user_mentions'])>0:
tempdict['mentioneeName'] = tweet['entities']['user_mentions'][0]['name']
return pd.DataFrame(tempdict, index =[''])
# Collect tweets and save
idlist = [] # insert list of ID here
tweets = collect(idlist)
pickle.dump(tweets, open('tweets.pkl','wb'))
# Read Twitter result and convert to dataframe
dfs = []
count = 0
for resultset in tweets:
for tweet in resultset:
dfs.append(dataframiseTweet(tweet._json))
count+=1
tweetdf = pd.concat(dfs)
tweetdf.to_pickle('TweetsDF.pkl')
|
import example
# Create the Circle object
r = 2;
print " Creating circle (radium: %d) :" % r
c = example.Circle(r)
# Set the location of the object
c.x = 20
c.y = 30
print " Here is its current position:"
print " Circle = (%f, %f)" % (c.x,c.y)
# ----- Call some methods -----
print "\n Here are some properties of the Circle:"
print " area = ", c.area()
print " perimeter = ", c.perimeter()
dx = 1;
dy = 1;
print " Moving with (%d, %d)..." % (dx, dy)
c.move(dx, dy)
del c
print "==================================="
# test move function */
r = 2;
print " Creating circle (radium: %d) :" % r
c = example.Circle(r)
# Set the location of the object
c.x = 20
c.y = 30
print " Here is its current position:"
print " Circle = (%f, %f)" % (c.x,c.y)
# ----- Call some methods -----
print "\n Here are some properties of the Circle:"
print " area = ", c.area()
print " perimeter = ", c.perimeter()
# no error for Circle's pre-assertion
dx = 1;
dy = -1;
print " Moving with (%d, %d)..." % (dx, dy)
c.move(dx, dy)
# error with Shape's pre-assertion
dx = -1;
dy = 1;
print " Moving with (%d, %d)..." % (dx, dy)
c.move(dx, dy)
|
import concurrent.futures as cf
import subprocess
import os
def run_processes(commands, worker_count=4):
with cf.ProcessPoolExecutor(max_workers=worker_count) as executer:
for c in commands:
executer.submit(run_batch, command=c)
def run_batch(command):
subprocess.call([command], stdout=open(os.devnull, 'wb'), stderr=open(os.devnull, 'wb'))
print('.', end='', flush=True)
|
from typing import List
from flask import jsonify, request
from flask_accepts import accepts, responds
from flask_cors import cross_origin
from flask_restx import Namespace, Resource, abort
from flask_jwt_extended import get_jwt_claims, jwt_required
from .interface import IPup
from .model import Pup
from .schema import PupSchema
from .service import PupService
api = Namespace(
'pups',
description='Ns with Pup entity',
decorators=[cross_origin()],
)
sub_api = Namespace(
'subscription',
description='Ns with User subscriptions.',
decorators=[cross_origin()],
)
@api.route('/')
class PupResource(Resource):
"""Pups."""
@responds(schema=PupSchema(many=True), api=api)
@api.doc(security='loggedIn')
@jwt_required
def get(self) -> List[Pup]:
"""Get all Pups."""
return PupService.get_all()
@accepts(schema=PupSchema, api=api)
@responds(schema=PupSchema, api=api)
@api.doc(security='loggedIn')
@jwt_required
def post(self) -> Pup:
"""Create Pup instance."""
claim = get_jwt_claims()
user_id = int(claim['id'])
new_pup = request.parsed_obj
new_pup['creator_id'] = user_id
return PupService.create(request.parsed_obj)
@api.route('/owned')
class PupCreatorResource(Resource):
"""Pups."""
@responds(schema=PupSchema(many=True), api=api)
@api.doc(security='loggedIn')
@jwt_required
def get(self) -> List[Pup]:
"""Get all Pups created by current user."""
claim = get_jwt_claims()
user_id = int(claim['id'])
return PupService.get_authored_by(user_id)
@api.route('/search/<string:str_to_find>')
@api.param('str_to_find', 'Part of Pup name to search')
class PupSearchResource(Resource):
"""Providing Pup search."""
@api.doc(responses={
200: '{"status": "Match",\n "pups": [Pup Model object]}',
404: '{"status": "No match"}',
})
def get(self, str_to_find: str):
"""Get matching pups."""
pups: List[Pup] = PupService.search_by_name(str_to_find)
if pups:
serialized_pups = PupSchema().dump(pups, many=True)
return jsonify(
{'status': 'Match', 'pups': serialized_pups},
)
return jsonify({'status': 'No match'}), 404
@api.route('/<int:pup_id>')
@api.param('pup_id', 'Pup db ID')
class PupIdResource(Resource):
def get(self, pup_id: int):
"""Get count of downloads of Pup."""
return jsonify({'count':PupService.get_downloads_count(pup_id)})
@api.doc(responses={
200: '{"status": "Success",\n "id": deleted_id}',
})
@api.doc(security='loggedIn')
@jwt_required
def delete(self, pup_id: int):
"""Delete single Pup."""
claim = get_jwt_claims()
user_id = int(claim['id'])
if PupService.has_permission(user_id, pup_id):
deleted_id = PupService.delete_by_id(pup_id)
return jsonify({'status': 'Success', 'id': deleted_id})
else:
abort(403)
@accepts(schema=PupSchema, api=api)
@responds(schema=PupSchema, api=api)
@api.doc(security='loggedIn')
@jwt_required
def put(self, pup_id: int):
"""Update single Pup."""
claim = get_jwt_claims()
user_id = int(claim['id'])
if PupService.has_permission(user_id, pup_id):
changes: IPup = request.parsed_obj
loc: Pup = PupService.get_by_id(pup_id)
return PupService.update(loc, changes)
else:
abort(403)
@sub_api.route('/')
class SubscriptionResource(Resource):
@responds(schema=PupSchema(many=True), api=sub_api)
@sub_api.doc(security='loggedIn')
@jwt_required
def get(self):
"""Get user saved Pups."""
claim = get_jwt_claims()
user_id = int(claim['id'])
return PupService.get_subscriptions(user_id)
@sub_api.route('/<int:pup_id>')
@sub_api.param('pup_id', 'Pup db ID')
class SubscriptionsIdResource(Resource):
@sub_api.doc(security='loggedIn')
@jwt_required
def post(self, pup_id: int):
"""Save Pup for current user."""
claim = get_jwt_claims()
user_id = int(claim['id'])
if PupService.add_subscription_by_id(user_id, pup_id):
return jsonify({'status': 'Success'})
else:
abort(404)
@sub_api.doc(security='loggedIn')
@jwt_required
def delete(self, pup_id: int):
"""Delete Pup for current user from saved."""
claim = get_jwt_claims()
user_id = int(claim['id'])
if PupService.delete_subscription(user_id, pup_id):
return jsonify({'status': 'Success'})
else:
abort(404)
@sub_api.route('/join/<string:join_code>')
@sub_api.param('join_code', 'Pup join code to add Sub.')
class JoinCodeResource(Resource):
@sub_api.doc(security='loggedIn')
@jwt_required
def post(self, join_code: str):
"""Save Pup for current user."""
claim = get_jwt_claims()
user_id = int(claim['id'])
if PupService.add_subscription_by_join_code(user_id, join_code):
return jsonify({'status': 'Success'})
else:
abort(404)
|
def test(x):
foo = None
if x is True:
def foo():
print('yes')
elif not x:
def foo():
print('no')
return foo
print(test(1))
test(True)()
test(False)()
|
def factorial(n):
fat = 1
while n > 0:
fat = fat * n
n = n - 1
return fat
def main():
num = 1
print("Bem-vindo a sua calculadora de fatoriais em série! Para parar a execução digite um valor negativo")
while num >=0:
num = int(input("Digite o número que deseja o fatorial: "))
result = factorial(num)
print(f'O fatorial de {num} é {result}')
print("***Fim do exercício de Fatorial***")
main()
|
"""
Tools for creating a DC/OS cluster.
"""
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
import click
from docker.models.networks import Network
from docker.types import Mount
from dcos_e2e.backends import Docker
from dcos_e2e.distributions import Distribution
from dcos_e2e.docker_storage_drivers import DockerStorageDriver
from dcos_e2e.docker_versions import DockerVersion
from dcos_e2e.node import Transport
from dcos_e2e_cli.common.arguments import installer_argument
from dcos_e2e_cli.common.create import CREATE_HELP, create_cluster, get_config
from dcos_e2e_cli.common.credentials import add_authorized_key
from dcos_e2e_cli.common.doctor import get_doctor_message
from dcos_e2e_cli.common.install import (
install_dcos_from_path,
run_post_install_steps,
)
from dcos_e2e_cli.common.options import (
cluster_id_option,
copy_to_master_option,
extra_config_option,
genconf_dir_option,
license_key_option,
security_mode_option,
variant_option,
verbosity_option,
)
from dcos_e2e_cli.common.options.cluster_size import (
agents_option,
masters_option,
public_agents_option,
)
from dcos_e2e_cli.common.utils import (
check_cluster_id_unique,
command_path,
write_key_pair,
)
from dcos_e2e_cli.common.variants import get_install_variant
from dcos_e2e_cli.common.workspaces import workspace_dir_option
from ._cgroup_mount_option import cgroup_mount_option
from ._common import (
CLUSTER_ID_LABEL_KEY,
NODE_TYPE_AGENT_LABEL_VALUE,
NODE_TYPE_LABEL_KEY,
NODE_TYPE_MASTER_LABEL_VALUE,
NODE_TYPE_PUBLIC_AGENT_LABEL_VALUE,
WORKSPACE_DIR_LABEL_KEY,
ClusterContainers,
existing_cluster_ids,
)
from ._docker_network import docker_network_option
from ._docker_storage_driver import docker_storage_driver_option
from ._docker_version import docker_version_option
from ._linux_distribution import linux_distribution_option
from ._options import node_transport_option, wait_for_dcos_option
from ._port_mapping import one_master_host_port_map_option
from ._volume_options import (
AGENT_VOLUME_OPTION,
MASTER_VOLUME_OPTION,
PUBLIC_AGENT_VOLUME_OPTION,
VOLUME_OPTION,
)
from .doctor import doctor
from .wait import wait
@click.command('create', help=CREATE_HELP)
@installer_argument
@docker_version_option
@linux_distribution_option
@docker_storage_driver_option
@cgroup_mount_option
@masters_option
@agents_option
@public_agents_option
@extra_config_option
@security_mode_option
@cluster_id_option
@license_key_option
@genconf_dir_option
@copy_to_master_option
@VOLUME_OPTION
@MASTER_VOLUME_OPTION
@AGENT_VOLUME_OPTION
@PUBLIC_AGENT_VOLUME_OPTION
@workspace_dir_option
@variant_option
@wait_for_dcos_option
@docker_network_option
@node_transport_option
@one_master_host_port_map_option
@verbosity_option
@click.pass_context
def create(
ctx: click.core.Context,
agents: int,
installer: Path,
cluster_id: str,
docker_storage_driver: Optional[DockerStorageDriver],
docker_version: DockerVersion,
extra_config: Dict[str, Any],
linux_distribution: Distribution,
masters: int,
public_agents: int,
license_key: Optional[Path],
security_mode: Optional[str],
copy_to_master: List[Tuple[Path, Path]],
genconf_dir: Optional[Path],
workspace_dir: Path,
custom_volume: List[Mount],
custom_master_volume: List[Mount],
custom_agent_volume: List[Mount],
custom_public_agent_volume: List[Mount],
variant: str,
transport: Transport,
wait_for_dcos: bool,
network: Network,
one_master_host_port_map: Dict[str, int],
mount_sys_fs_cgroup: bool,
) -> None:
"""
Create a DC/OS cluster.
"""
check_cluster_id_unique(
new_cluster_id=cluster_id,
existing_cluster_ids=existing_cluster_ids(),
)
http_checks = bool(transport == Transport.SSH)
wait_command_name = command_path(sibling_ctx=ctx, command=wait)
doctor_command_name = command_path(sibling_ctx=ctx, command=doctor)
doctor_message = get_doctor_message(
doctor_command_name=doctor_command_name,
)
public_key_path = workspace_dir / 'id_rsa.pub'
private_key_path = workspace_dir / 'id_rsa'
write_key_pair(
public_key_path=public_key_path,
private_key_path=private_key_path,
)
dcos_variant = get_install_variant(
given_variant=variant,
installer_path=installer,
workspace_dir=workspace_dir,
doctor_message=doctor_message,
)
# This is useful for some people to identify containers.
container_name_prefix = Docker().container_name_prefix + '-' + cluster_id
cluster_backend = Docker(
container_name_prefix=container_name_prefix,
custom_container_mounts=custom_volume,
custom_master_mounts=custom_master_volume,
custom_agent_mounts=custom_agent_volume,
custom_public_agent_mounts=custom_public_agent_volume,
linux_distribution=linux_distribution,
docker_version=docker_version,
storage_driver=docker_storage_driver,
docker_container_labels={
CLUSTER_ID_LABEL_KEY: cluster_id,
WORKSPACE_DIR_LABEL_KEY: str(workspace_dir),
},
docker_master_labels={
NODE_TYPE_LABEL_KEY: NODE_TYPE_MASTER_LABEL_VALUE,
},
docker_agent_labels={NODE_TYPE_LABEL_KEY: NODE_TYPE_AGENT_LABEL_VALUE},
docker_public_agent_labels={
NODE_TYPE_LABEL_KEY: NODE_TYPE_PUBLIC_AGENT_LABEL_VALUE,
},
workspace_dir=workspace_dir,
transport=transport,
network=network,
one_master_host_port_map=one_master_host_port_map,
mount_sys_fs_cgroup=mount_sys_fs_cgroup,
)
cluster = create_cluster(
cluster_backend=cluster_backend,
masters=masters,
agents=agents,
public_agents=public_agents,
doctor_message=doctor_message,
)
cluster_containers = ClusterContainers(
cluster_id=cluster_id,
transport=transport,
)
private_ssh_key_path = cluster_containers.ssh_key_path
private_ssh_key_path.parent.mkdir(parents=True)
private_key_path.replace(private_ssh_key_path)
add_authorized_key(cluster=cluster, public_key_path=public_key_path)
for node in cluster.masters:
for path_pair in copy_to_master:
local_path, remote_path = path_pair
node.send_file(
local_path=local_path,
remote_path=remote_path,
)
dcos_config = get_config(
cluster_representation=cluster_containers,
extra_config=extra_config,
dcos_variant=dcos_variant,
security_mode=security_mode,
license_key=license_key,
)
install_dcos_from_path(
cluster=cluster,
cluster_representation=cluster_containers,
dcos_config=dcos_config,
ip_detect_path=cluster_backend.ip_detect_path,
doctor_message=doctor_message,
dcos_installer=installer,
local_genconf_dir=genconf_dir,
)
run_post_install_steps(
cluster=cluster,
cluster_id=cluster_id,
dcos_config=dcos_config,
doctor_command_name=doctor_command_name,
http_checks=http_checks,
wait_command_name=wait_command_name,
wait_for_dcos=wait_for_dcos,
)
|
from pathlib import Path
from dogebuild import dependencies, doge, directory
from dogebuild_protobuf import proto_mode, ProtobufPlugin
proto_mode()
dependencies(doge(directory("../dependency"), tasks=["proto-sources"],))
ProtobufPlugin(src=Path("src").glob("**/*.proto"),)
|
import avalon.api
from maya import cmds
class SelectSubset(avalon.api.InventoryAction):
label = "Select Subset"
icon = "hand-o-down"
color = "#d8d8d8"
order = 99
def process(self, containers):
groups = list()
for container in containers:
if container.get("loader") == "LookLoader":
continue
groups.append(container["subsetGroup"])
cmds.select(groups)
|
import pymysql.cursors
import csv
import json
from datetime import date, datetime
import dateutil.parser
# JSON serializer for objects not serializable by default json code
def json_serial(obj):
if isinstance(obj, (datetime, date)):
return obj.isoformat()
raise TypeError ("Type %s not serializable" % type(obj))
### Define database connection ###
#
#connection = pymysql.connect(host='localhost',
# user='root',
# password='root',
# db='azure_person_db',
# charset='utf8mb4',
# cursorclass=pymysql.cursors.DictCursor)
# Get face recognition data between start_time and end_time
def get_data(start_time, end_time):
### Obtain data from SQL database ###
#
#with connection.cursor() as cursor:
# sql = "SELECT `id`, `person_first_id`, `person_first_similarity`, `person_second_id`, \
# `person_second_similarity`, `face_rectangle`, `right_person_id`, `monitor_time`, \
# `img_path` FROM `azure_person_identity_checkout` WHERE `checkout_status` != 0 AND `monitor_time` \
# BETWEEN %s AND %s ORDER BY `monitor_time`"
# cursor.execute(sql, (start_time, end_time))
# datas = cursor.fetchall()
### Data pre-processing ###
# This is used to counter a name difference
#
#for data in datas:
# data['final_id'] = data['right_person_id']
### Use to save data to JSON ###
#
#with open('data/azure_face_recognition_result_data.json', 'w') as f:
# for obj in datas:
# json.dump(obj, f, default=json_serial)
# f.write('\n')
# Initialize before query from JSON
datas = []
start_time = dateutil.parser.parse(start_time)
end_time = dateutil.parser.parse(end_time)
# Load demo data from local JSON file ###
with open('data/azure_face_recognition_result_data.json') as f:
for line in f:
this_data = json.loads(line)
if start_time < dateutil.parser.parse(this_data['monitor_time']) < end_time:
datas.append(this_data)
return datas
# Get all possible faces to match with using the similarity convertion table
def get_face_count(group_similarity_src):
with open(group_similarity_src, 'r', encoding='utf-8') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',')
input_list = list(csv.reader(csvfile))
index_array = input_list.pop(0)
index_array.pop(0) # pop the 0th empty field
face_count = []
for face_id in index_array:
face_count.append(int(face_id))
face_count.append(0)
face_count = sorted(face_count)
print ('face_count', face_count)
return face_count
|
"""
Created by yan on 2018/9/26 16:31
"""
import json
from flask import request, jsonify, flash
from app.models import db
from app.libs.helper import get_book_info
from app.models.book import Book
from app.models.user import User
from app.web import web
__author__ = 'yan'
@web.route('/addbook',methods = ["POST" ])
def add_book():
resp = {'code': 200, 'msg': '', 'data': {}}
args = json.loads(str(request.data,encoding='utf-8'))
isbn = args.get('isbn')
openid = args.get('openid')
# isbn 和openID存在
if isbn and openid:
# 判断isbn是否存在
book = Book.query.filter_by(isbn = isbn).first()
if not book:
# 书不存在
book_info = get_book_info(isbn)
if book_info:
# 获取图书的字段
rate = book_info['rating'].get('average')
title, image, alt, publisher, summary, price = book_info['title'],book_info['image'],book_info['alt'],book_info['publisher'],book_info['summary'],book_info['price']
tags = ','.join([tag['title'] for tag in book_info['tags']])
author = ','.join( book_info['author'])
book = Book(isbn=isbn,rate=rate,title=title,image=image,alt=alt,publisher=publisher,summary=summary,price=price,tags=tags,author=author,openid=openid)
db.session.add(book)
db.session.commit()
book_obj = book.__dict__
book_obj.pop('_sa_instance_state')
resp['msg'] = '图书添加成功'
resp['data'] = book_obj
return jsonify(resp)
resp['code'] = -1
resp['msg'] = '图书不存在'
return jsonify(resp)
else:
book_obj = book.__dict__
book_obj.pop('_sa_instance_state')
resp['code'] = -1
resp['data'] = book_obj
resp['msg'] = '图书已存在'
return jsonify(resp)
flash('openid 或者isbn不存在')
resp['code'] = -1
resp['msg'] = 'openid 或者isbn不存在'
return jsonify(resp)
@web.route('/top',methods = [ "GET","POST" ])
def tops():
resp = {'code': 200, 'msg': '', 'data': {}}
books = Book.query.order_by(Book.count.desc())[0:9]
data = []
for item in books:
data.append({
'id':item.id,
'title':item.title,
'image':item.image,
'count':item.count
})
resp['data'] = data
return jsonify(resp)
@web.route('/booklist',methods = ["GET"])
def book_list():
resp = {'code': 200, 'msg': '', 'data': {}}
# if request.values.get('openid'):
# openid = request.values.get('openid')
# books = Book.query.filter_by(openid=openid).all()
#
# else:
books = Book.query.all()
data = []
for item in books:
# 获取添加人的信息
user = User.query.filter_by(openid = item.openid).first().user_info
user_obj = json.loads(user)
data.append({
'id':item.id,
'title':item.title,
'image':item.image,
'count':item.count,
'rate':item.rate,
'author':item.author,
'publisher':item.publisher,
'nickName':user_obj['nickName']
})
resp['data'] = data
resp['msg'] = '获取图书数据成功'
return jsonify(resp)
@web.route('/bookdetail',methods = ["GET"])
def book_detail():
resp = {'code': 200, 'msg': '', 'data': {}}
book_id = request.values.get('id')
book_detail = Book.query.filter_by(id=book_id).first()
if book_detail:
book_obj = book_detail.__dict__
book_obj.pop('_sa_instance_state')
user = User.query.filter_by(openid=book_obj['openid']).first()
user_obj = json.loads(user.user_info)
book_obj['user_info'] = {
'name': user_obj['nickName'],
'image': user_obj['avatarUrl'],
}
book_obj['summary'] = book_obj['summary'].split('\n'),
book_obj['tags'] = book_obj['tags'].split(',')
resp['data'] = book_obj
return jsonify(resp)
resp['msg'] = '图书不存在'
return jsonify(resp)
@web.route('/mybooks',methods = ["GET"])
def my_books():
resp = {'code': 200, 'msg': '', 'data': {}}
openid = request.values.get('openid')
books = Book.query.filter_by(openid=openid).all()
data = []
for item in books:
# 获取添加人的信息
user = User.query.filter_by(openid=item.openid).first().user_info
user_obj = json.loads(user)
data.append({
'id': item.id,
'title': item.title,
'image': item.image,
'count': item.count,
'rate': item.rate,
'author': item.author,
'publisher': item.publisher,
'nickName': user_obj['nickName']
})
resp['data'] = data
resp['msg'] = '获取我的图书数据成功'
return jsonify(resp)
|
"""The Concat Implementation."""
from concat.transpile import transpile
import concat.astutils
import concat.level1.typecheck
import concat.level2.stdlib.repl
import concat.level2.execute
import argparse
import os.path
import sys
import io
from typing import Callable, IO, AnyStr, TextIO
filename = '<stdin>'
def file_type(mode: str) -> Callable[[str], IO[AnyStr]]:
"""Capture the filename and create a file object."""
def func(name: str) -> IO[AnyStr]:
global filename
filename = name
return open(name, mode=mode)
return func
def get_line_at(file: TextIO, location: concat.astutils.Location) -> str:
file.seek(0, io.SEEK_SET)
lines = [*file]
return lines[location[0] - 1]
arg_parser = argparse.ArgumentParser(description='Run a Concat program.')
arg_parser.add_argument(
'file',
nargs='?',
type=file_type('r'),
default=sys.stdin,
help='file to run',
)
arg_parser.add_argument(
'--debug',
action='store_true',
default=False,
help='turn stack debugging on',
)
# We should pass any unknown args onto the program we're about to run. There
# might be a better way to go about this, but I think this is fine for now.
args, rest = arg_parser.parse_known_args()
sys.argv = [sys.argv[0], *rest]
# interactive mode
if args.file.isatty():
concat.level2.stdlib.repl.repl([], [], args.debug)
else:
try:
python_ast = transpile(args.file.read(), os.path.dirname(filename))
except concat.level1.typecheck.StaticAnalysisError as e:
print('Static Analysis Error:\n')
print(e, 'in line:')
if e.location:
print(get_line_at(args.file, e.location), end='')
print(' ' * e.location[1] + '^')
except concat.level1.parse.ParseError as e:
print('Parse Error:')
print(e)
except Exception:
print('An internal error has occurred.')
print('This is a bug in Concat.')
raise
else:
concat.level2.execute.execute(filename, python_ast, {}, should_log_stacks=args.debug)
finally:
args.file.close()
|
from ..midinote import MidiNote
from ...constants import EVENT_NOTE, EVENT_AMPLITUDE, EVENT_DURATION, EVENT_GATE
from ...pattern import PSequence
import mido
import logging
log = logging.getLogger(__name__)
class MidiFileInputDevice:
""" Read events from a MIDI file.
Requires mido. """
def __init__(self, filename):
self.filename = filename
def read(self, quantize=None):
midi_reader = mido.MidiFile(self.filename)
log.info("Loading MIDI data from %s, ticks per beat = %d" % (self.filename, midi_reader.ticks_per_beat))
note_tracks = list(filter(lambda track: any(message.type == 'note_on' for message in track),
midi_reader.tracks))
if not note_tracks:
raise ValueError("Could not find any tracks with note data")
#------------------------------------------------------------------------
# TODO: Support for multiple tracks
#------------------------------------------------------------------------
track = note_tracks[0]
notes = []
offset = 0
for event in track:
if event.type == 'note_on' and event.velocity > 0:
#------------------------------------------------------------------------
# Found a note_on event.
#------------------------------------------------------------------------
#------------------------------------------------------------------------
# Sanitisation (some midifiles seem to give invalid input).
#------------------------------------------------------------------------
if event.velocity > 127:
event.velocity = 127
offset += event.time / midi_reader.ticks_per_beat
note = MidiNote(event.note, event.velocity, offset)
notes.append(note)
elif event.type == 'note_off' or (event.type == 'note_on' and event.velocity == 0):
#------------------------------------------------------------------------
# Found a note_off event.
#------------------------------------------------------------------------
offset += event.time / midi_reader.ticks_per_beat
for note in reversed(notes):
if note.pitch == event.note and note.duration is None:
note.duration = offset - note.location
break
#------------------------------------------------------------------------
# Quantize
#------------------------------------------------------------------------
for note in notes:
if quantize:
note.location = round(note.location / quantize) * quantize
note.duration = round(note.duration / quantize) * quantize
#------------------------------------------------------------------------
# Construct a sequence which honours chords and relative lengths.
# First, group all notes by their starting time.
#------------------------------------------------------------------------
notes_by_time = {}
for note in notes:
log.debug(" - MIDI event (t = %.2f): Note %d, velocity %d, duration %.3f" %
(note.location, note.pitch, note.velocity, note.duration))
location = note.location
if location in notes_by_time:
notes_by_time[location].append(note)
else:
notes_by_time[location] = [note]
note_dict = {
EVENT_NOTE: [],
EVENT_AMPLITUDE: [],
EVENT_GATE: [],
EVENT_DURATION: []
}
#------------------------------------------------------------------------
# Next, iterate through groups of notes chronologically, figuring out
# appropriate parameters for duration (eg, inter-note distance) and
# gate (eg, proportion of distance note extends across).
#------------------------------------------------------------------------
times = sorted(notes_by_time.keys())
for i, t in enumerate(times):
t = times[i]
notes = notes_by_time[t]
#------------------------------------------------------------------------
# Our duration is always determined by the time of the next note event.
# If a next note does not exist, this is the last note of the sequence;
# use the maximal length of note currently playing (assuming a chord)
#------------------------------------------------------------------------
if i < len(times) - 1:
next_time = times[i + 1]
else:
next_time = t + max([note.duration for note in notes])
time_until_next_note = next_time - t
note_dict[EVENT_DURATION].append(time_until_next_note)
if len(notes) > 1:
note_dict[EVENT_NOTE].append(tuple(note.pitch for note in notes))
note_dict[EVENT_AMPLITUDE].append(tuple(note.velocity for note in notes))
note_dict[EVENT_GATE].append(tuple(note.duration / time_until_next_note for note in notes))
else:
if time_until_next_note:
note = notes[0]
note_dict[EVENT_NOTE].append(note.pitch)
note_dict[EVENT_AMPLITUDE].append(note.velocity)
note_dict[EVENT_GATE].append(note.duration / time_until_next_note)
for key, value in note_dict.items():
note_dict[key] = PSequence(value, 1)
return note_dict
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.