content
stringlengths 5
1.05M
|
|---|
import unittest
import pymysql.cursors
# Not a Python dev, this is probably pretty ugly
class TestDjConventions(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.conn = pymysql.connect(
host="localhost",
user="user",
password="password",
db="helium_from_dj"
)
@classmethod
def tearDownClass(cls):
cls.conn.close()
def test_class_names(self):
expected_names = [
"sample_manual",
"#sample_lookup",
"_sample_imported",
"__sample_computed",
"sample_master",
"sample_master__part1",
"sample_master__part2"
]
with TestDjConventions.conn.cursor() as cursor:
cursor.execute("SHOW TABLES;")
tables = [row[0] for row in cursor.fetchall()]
for expected_name in expected_names:
self.assertIn(expected_name, tables)
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
from saml2.saml import NAME_FORMAT_URI
__author__ = 'rolandh'
import json
BASE = "http://localhost:8088"
metadata = open("idp_test/idp.xml").read()
info = {
"entity_id": "%s/idp.xml" % BASE,
"interaction": [
{
"matches": {
"url": "%s/login" % BASE,
"title": 'IDP test login'
},
"page-type": "login",
"control": {
"type": "form",
"set": {"login": "roland", "password": "dianakra"}
}
},
{
"matches": {
"url": "%s/sso/redirect" % BASE,
"title": "SAML 2.0 POST"
},
"page-type": "other",
"control": {
"index": 0,
"type": "form",
"set": {}
}
},
{
"matches": {
"url": "%s/sso/post" % BASE,
"title": "SAML 2.0 POST"
},
"page-type": "other",
"control": {
"index": 0,
"type": "form",
"set": {}
}
},
{
"matches": {
"url": "%s/slo/post" % BASE,
"title": "SAML 2.0 POST"
},
"page-type": "other",
"control": {
"index": 0,
"type": "form",
"set": {}
}
}
],
"metadata": metadata,
"name_format": NAME_FORMAT_URI
}
print json.dumps(info)
|
import jieba
class TabooChineseChecker:
def __init__(self):
pass
def check(self, sent, word):
segmentation = list(jieba.cut(sent))
if word in segmentation:
return True
else:
return False
|
import csv
import os
from util.singleton import Singleton
class ThrottleFrame:
def __init__(self, time: float, distance: float, speed: float):
self.time = float(time)
self.distance = float(distance)
self.speed = float(speed)
def copy(self):
return ThrottleFrame(self.time, self.distance, self.speed)
@Singleton
class ThrottleAnalysis():
# Read frames from csv
def __init__(self):
self.frames = []
filename = 'data/throttle.csv'
with open(os.path.join(os.path.dirname(__file__), filename), newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.frames.append(ThrottleFrame(row['time'], row['distance'], row['speed']))
print(f'Read {len(self.frames)} frames from {filename}')
# TODO could be improved with a binary search or gradient descent
def get_frame_by_speed(self, speed: float) -> ThrottleFrame:
closest = None
for frame in self.frames:
if closest == None or abs(frame.speed - speed) < abs(closest.speed - speed):
closest = frame
return closest.copy()
# TODO obvious repitition with above, could probably be generalized later
def get_frame_by_distance(self, distance: float) -> ThrottleFrame:
closest = None
for frame in self.frames:
if closest == None or abs(frame.distance - distance) < abs(closest.distance - distance):
closest = frame
return closest.copy()
def travel_distance(self, distance: float, initial_speed: float = 0):
start = self.get_frame_by_speed(initial_speed)
end_dist = start.distance + distance
end = self.get_frame_by_distance(end_dist)
# Handle speeds greater than than max throttle (e.g. throttle, without boost, while supersonic)
if initial_speed > end.speed:
end.speed = initial_speed
# Interpolate any remaining distance using constant velocity
if end_dist > end.distance:
dist_left = end_dist - end.distance
end.time += dist_left / end.speed
end.distance = end_dist
return ThrottleFrame(end.time - start.time, end.distance - start.distance, end.speed)
|
"""
LocalStorage
------------
Simple local storage that create directories for packages and
put releases files in it.
"""
import os
import re
import io
import shutil
import pkginfo
from hashlib import md5
from .base import BaseStorage
class LocalStorage(BaseStorage):
NAME = 'LocalStorage'
def __init__(self, packages_root=None):
if packages_root is None:
raise RuntimeError("Cannot use LocalStorage without PACKAGES_ROOT set")
self.packages_root = packages_root
def _get_metadata(self, release):
try:
metadata = pkginfo.get_metadata(release).__dict__
except Exception: # bad archive
metadata = {}
md5_hash = md5()
with open(release, 'rb') as fp:
for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b''):
md5_hash.update(content)
metadata.update({'md5_digest': md5_hash.hexdigest()})
return metadata
def get_releases_metadata(self):
"""List all releases metadata from PACKAGES_ROOT
:return: generator
:rtype: list
"""
for root, dirs, files in os.walk(self.packages_root):
for f in files:
path = os.path.join(root, f)
yield (os.path.basename(path), self._get_metadata(path))
def delete_package(self, package):
"""Delete entire package directory
"""
path = os.path.join(
self.packages_root,
package.name
)
try:
shutil.rmtree(path)
return True
except Exception:
return False
def delete_release(self, package, version):
"""Delete all files matching specified version
"""
path = os.path.join(self.packages_root, package.name)
if not os.path.isdir(path):
return False
files = os.listdir(path)
regex = '.*-(?P<version>[0-9\.]*)[\.-].*'
r = re.compile(regex)
files = filter(
lambda f: r.match(f) and r.match(f).group('version') == version,
files
)
files = list(files)
for f in files:
os.remove(os.path.join(path, f))
return True
def create_package(self, package):
"""Create new directory for a given package
"""
path = os.path.join(
self.packages_root,
package.name
)
try:
os.mkdir(path)
return True
except OSError:
return False
def create_release(self, package, release_file):
"""Copy release file inside package directory
If package directory does not exists, it will create it before
"""
package_path = os.path.join(
self.packages_root,
package.name
)
if not os.path.isdir(package_path):
if not self.create_package(package):
return False
release_path = os.path.join(package_path, release_file.filename)
release_file.save(release_path)
return True
def get_files(self, package, release=None):
"""Get all files associated to a package
If release is not None, it will filter files on release version,
based on a regex
"""
path = os.path.join(self.packages_root, package.name)
if not os.path.isdir(path):
return None
files = os.listdir(path)
if release is not None:
regex = '.*-(?P<version>[0-9\.]*)[\.-].*'.format(package.name)
r = re.compile(regex)
v = release.version
files = filter(
lambda f: r.match(f) and r.match(f).group('version') == v,
files
)
files = list(files)
return files
def get_file(self, package, file, release=None):
"""Get a single file from filesystem
"""
return os.path.join(self.packages_root, package.name, file)
|
import abc
import time
class Question:
__metaclass__ = abc.ABCMeta
def __init__(self, name: str):
self.name = name
self.result = None
def __enter__(self):
self.start_time = time.time()
return self
def __exit__(self, *args):
self.end_time = time.time()
self.total_time = self.end_time - self.start_time
print(f'[{self.name}] - Result: {self.result}')
print(f'[{self.name}] - Total time: {self.total_time:.6f}s')
@abc.abstractmethod
def solve(self):
pass
|
#
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Author: Frederic Lepied <frederic.lepied@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from hardware import megacli
from hardware.tests.utils import sample
class TestMegacliTest(unittest.TestCase):
def setUp(self):
def my_run(*args, **arr):
return self.output
self.run = megacli.run_megacli
megacli.run_megacli = my_run
def tearDown(self):
megacli.run = self.run
def test_parse_output_empty(self):
self.assertEqual(megacli.parse_output(''), {})
def test_parse_output_simple(self):
self.assertEqual(megacli.parse_output(' a : b'), {'A': 'b'})
def test_parse_output_adpcount(self):
self.assertEqual(megacli.parse_output(sample('megacli_adpcount')),
{'ControllerCount': 1,
'ExitCode': '0x01'})
def test_adp_count(self):
self.output = sample('megacli_adpcount')
self.assertEqual(megacli.adp_count(), 1)
def test_adp_all_info(self):
self.output = sample('megacli_adp_all_info')
self.assertEqual(megacli.adp_all_info(0),
{'CriticalDisks': 0,
'Degraded': 0,
'Disks': 6,
'FwPackageBuild': '21.1.0-0007',
'FailedDisks': 0,
'Offline': 0,
'PhysicalDevices': 7,
'ProductName': 'PERC H710 Mini',
'SerialNo': '29F026R',
'VirtualDrives': 1})
def test_pd_get_num(self):
self.output = '''
Number of Physical Drives on Adapter 0: 6'''
self.assertEqual(megacli.pd_get_num(0), 6)
def test_split_parts(self):
self.assertEqual(len(megacli.split_parts(' +Enclosure [0-9]+:',
ENC_OUTPUT)),
2)
def test_enc_info(self):
self.output = '''
Number of enclosures on adapter 0 -- 1
Enclosure 0:
Device ID : 32
Number of Slots : 8'''
self.assertEqual(megacli.enc_info(0),
[{'Enclosure': 0,
'DeviceId': 32,
'NumberOfSlots': 8}])
def test_enc_info2(self):
self.output = ENC_OUTPUT
info = megacli.enc_info(0)
self.assertEqual(len(info), 2)
self.assertEqual(info[0]['Enclosure'], 0)
self.assertEqual(info[1]['Enclosure'], 1)
def test_pdinfo(self):
self.output = '''
Enclosure Device ID: 32
Slot Number: 5
Enclosure position: 1
Device Id: 5
WWN: 5000C50054C07E80
Sequence Number: 1
Media Error Count: 0
Other Error Count: 0
Predictive Failure Count: 0
Last Predictive Failure Event Seq Number: 0
PD Type: SAS'''
self.assertEqual(megacli.pdinfo(0, 32, 5),
{'DeviceId': 5,
'EnclosureDeviceId': 32,
'EnclosurePosition': 1,
'LastPredictiveFailureEventSeqNumber': 0,
'MediaErrorCount': 0,
'OtherErrorCount': 0,
'PdType': 'SAS',
'PredictiveFailureCount': 0,
'SequenceNumber': 1,
'SlotNumber': 5,
'Wwn': '5000C50054C07E80'}
)
def test_ld_get_num(self):
self.output = '''
Number of Virtual Drives Configured on Adapter 0: 1'''
self.assertEqual(megacli.ld_get_num(0), 1)
def test_ld_get_info(self):
self.output = sample('megacli_ld_get_info')
self.assertEqual(megacli.ld_get_info(0, 0),
{'Adapter0--VirtualDriveInformation': '',
'BadBlocksExist': 'No',
'CacheCadeType': 'Read Only',
'CanSpinUpIn1Minute': 'Yes',
'CurrentAccessPolicy': 'Read/Write',
'CurrentCachePolicy':
'WriteBack, ReadAdaptive, Direct, '
'No Write Cache if Bad BBU',
'CurrentPowerSavingsPolicy': 'None',
'DefaultAccessPolicy': 'Read/Write',
'DefaultCachePolicy': 'WriteBack, ReadAdaptive, '
'Direct, No Write Cache if Bad BBU',
'DefaultPowerSavingsPolicy': 'Controller Defined',
'DiskCachePolicy': "Disk's Default",
'EncryptionType': 'None',
'IsVdCached': 'Yes',
"Ld'SIoProfileSupportsMaxPowerSavings"
"WithCachedWrites": 'No',
'LdHasDrivesThatSupportT10PowerConditions': 'No',
'MirrorData': '465.25 GB',
'Name': '',
'NumberOfDrives': 2,
'RaidLevel': 'Primary-1, Secondary-0, RAID Level '
'Qualifier-0',
'SectorSize': 512,
'Size': '465.25 GB',
'SpanDepth': 1,
'State': 'Optimal',
'StripSize': '64 KB'})
ENC_OUTPUT = sample('megacli_enc')
if __name__ == "__main__":
unittest.main()
# test_megacli.py ends here
|
# -*- coding: utf-8 -*-
import PyWMOFiles.Error
import PyWMOFiles.BUFR
|
"""Minimal example
Implement a equivariant polynomial to fit the tetris dataset
Exact equivariance to :math:`E(3)`
This example is minimal:
* there is dependency on the distance to the neighbors (tetris pieces are made of edges of length 1)
* there is no non-linearities except that the tensor product, therefore this model is a polynomial
>>> test()
"""
import torch
from torch_cluster import radius_graph
from torch_geometric.data import Data, DataLoader
from torch_scatter import scatter
from e3nn import o3
from e3nn.o3 import FullyConnectedTensorProduct
def tetris():
pos = [
[(0, 0, 0), (0, 0, 1), (1, 0, 0), (1, 1, 0)], # chiral_shape_1
[(0, 0, 0), (0, 0, 1), (1, 0, 0), (1, -1, 0)], # chiral_shape_2
[(0, 0, 0), (1, 0, 0), (0, 1, 0), (1, 1, 0)], # square
[(0, 0, 0), (0, 0, 1), (0, 0, 2), (0, 0, 3)], # line
[(0, 0, 0), (0, 0, 1), (0, 1, 0), (1, 0, 0)], # corner
[(0, 0, 0), (0, 0, 1), (0, 0, 2), (0, 1, 0)], # L
[(0, 0, 0), (0, 0, 1), (0, 0, 2), (0, 1, 1)], # T
[(0, 0, 0), (1, 0, 0), (1, 1, 0), (2, 1, 0)], # zigzag
]
pos = torch.tensor(pos, dtype=torch.get_default_dtype())
# Since chiral shapes are the mirror of one another we need an *odd* scalar to distinguish them
labels = torch.tensor([
[+1, 0, 0, 0, 0, 0, 0], # chiral_shape_1
[-1, 0, 0, 0, 0, 0, 0], # chiral_shape_2
[0, 1, 0, 0, 0, 0, 0], # square
[0, 0, 1, 0, 0, 0, 0], # line
[0, 0, 0, 1, 0, 0, 0], # corner
[0, 0, 0, 0, 1, 0, 0], # L
[0, 0, 0, 0, 0, 1, 0], # T
[0, 0, 0, 0, 0, 0, 1], # zigzag
], dtype=torch.get_default_dtype())
# apply random rotation
pos = torch.einsum('zij,zaj->zai', o3.rand_matrix(len(pos)), pos)
# put in torch_geometric format
dataset = [Data(pos=pos) for pos in pos]
data = next(iter(DataLoader(dataset, batch_size=len(dataset))))
return data, labels
class InvariantPolynomial(torch.nn.Module):
def __init__(self) -> None:
super().__init__()
self.irreps_sh = o3.Irreps.spherical_harmonics(3)
irreps_mid = o3.Irreps("64x0e + 24x1e + 24x1o + 16x2e + 16x2o")
irreps_out = o3.Irreps("0o + 6x0e")
self.tp1 = FullyConnectedTensorProduct(
irreps_in1=self.irreps_sh,
irreps_in2=self.irreps_sh,
irreps_out=irreps_mid,
)
self.tp2 = FullyConnectedTensorProduct(
irreps_in1=irreps_mid,
irreps_in2=self.irreps_sh,
irreps_out=irreps_out,
)
def forward(self, data) -> torch.Tensor:
num_neighbors = 2 # typical number of neighbors
num_nodes = 4 # typical number of nodes
edge_src, edge_dst = radius_graph(data.pos, 1.1, data.batch) # tensors of indices representing the graph
edge_vec = data.pos[edge_src] - data.pos[edge_dst]
edge_sh = o3.spherical_harmonics(self.irreps_sh, edge_vec, False, normalization='component')
# For each node, the initial features are the sum of the spherical harmonics of the neighbors
node_features = scatter(edge_sh, edge_dst, dim=0).div(num_neighbors**0.5)
# For each edge, tensor product the features on the source node with the spherical harmonics
edge_features = self.tp1(node_features[edge_src], edge_sh)
node_features = scatter(edge_features, edge_dst, dim=0).div(num_neighbors**0.5)
edge_features = self.tp2(node_features[edge_src], edge_sh)
node_features = scatter(edge_features, edge_dst, dim=0).div(num_neighbors**0.5)
# For each graph, all the node's features are summed
return scatter(node_features, data.batch, dim=0).div(num_nodes**0.5)
def main():
data, labels = tetris()
f = InvariantPolynomial()
optim = torch.optim.Adam(f.parameters(), lr=1e-2)
for step in range(200):
pred = f(data)
loss = (pred - labels).pow(2).sum()
optim.zero_grad()
loss.backward()
optim.step()
if step % 10 == 0:
accuracy = pred.round().eq(labels).double().mean().item()
print(f"{100 * accuracy:.1f}% accuracy")
# Check equivariance
rotated_data, _ = tetris()
error = f(rotated_data) - f(data)
print(f"Equivariance error = {error.abs().max().item():.1e}")
if __name__ == '__main__':
main()
def test():
data, labels = tetris()
f = InvariantPolynomial()
pred = f(data)
loss = (pred - labels).pow(2).sum()
loss.backward()
rotated_data, _ = tetris()
error = f(rotated_data) - f(data)
assert error.abs().max() < 1e-5
|
#!/usr/bin/env python3
import os
from typing import Optional
EON = os.path.isfile('/EON')
class Service:
def __init__(self, port: int, should_log: bool, frequency: float, decimation: Optional[int] = None):
self.port = port
self.should_log = should_log
self.frequency = frequency
self.decimation = decimation
service_list = {
"roadCameraState": Service(8002, True, 20., 1),
"sensorEvents": Service(8003, True, 100., 100),
"gpsNMEA": Service(8004, True, 9.),
"deviceState": Service(8005, True, 2., 1),
"can": Service(8006, True, 100.),
"controlsState": Service(8007, True, 100., 100),
"features": Service(8010, True, 0.),
"pandaState": Service(8011, True, 2., 1),
"radarState": Service(8012, True, 20., 5),
"roadEncodeIdx": Service(8015, True, 20., 1),
"liveTracks": Service(8016, True, 20.),
"sendcan": Service(8017, True, 100.),
"logMessage": Service(8018, True, 0.),
"liveCalibration": Service(8019, True, 4., 4),
"androidLog": Service(8020, True, 0., 1),
"carState": Service(8021, True, 100., 10),
"carControl": Service(8023, True, 100., 10),
"longitudinalPlan": Service(8024, True, 20., 2),
"liveLocation": Service(8025, True, 0., 1),
"procLog": Service(8031, True, 0.5),
"gpsLocationExternal": Service(8032, True, 10., 1),
"ubloxGnss": Service(8033, True, 10.),
"clocks": Service(8034, True, 1., 1),
"liveMpc": Service(8035, False, 20.),
"liveLongitudinalMpc": Service(8036, False, 20.),
"ubloxRaw": Service(8042, True, 20.),
"liveLocationKalman": Service(8054, True, 20., 2),
"uiLayoutState": Service(8060, True, 0.),
"liveParameters": Service(8064, True, 20., 2),
"cameraOdometry": Service(8066, True, 20., 5),
"lateralPlan": Service(8067, True, 20., 2),
"thumbnail": Service(8069, True, 0.2, 1),
"carEvents": Service(8070, True, 1., 1),
"carParams": Service(8071, True, 0.02, 1),
"driverCameraState": Service(8072, True, 10. if EON else 20., 1),
"driverEncodeIdx": Service(8061, True, 10. if EON else 20., 1),
"driverState": Service(8063, True, 10. if EON else 20., 1),
"driverMonitoringState": Service(8073, True, 10. if EON else 20., 1),
"offroadLayout": Service(8074, False, 0.),
"wideRoadEncodeIdx": Service(8075, True, 20., 1),
"wideRoadCameraState": Service(8076, True, 20., 1),
"modelV2": Service(8077, True, 20., 20),
"managerState": Service(8078, True, 2., 1),
"testModel": Service(8040, False, 0.),
"testLiveLocation": Service(8045, False, 0.),
"testJoystick": Service(8056, False, 0.),
}
def build_header():
h = ""
h += "/* THIS IS AN AUTOGENERATED FILE, PLEASE EDIT services.py */\n"
h += "#ifndef __SERVICES_H\n"
h += "#define __SERVICES_H\n"
h += "struct service { char name[0x100]; int port; bool should_log; int frequency; int decimation; };\n"
h += "static struct service services[] = {\n"
for k, v in service_list.items():
should_log = "true" if v.should_log else "false"
decimation = -1 if v.decimation is None else v.decimation
h += ' { .name = "%s", .port = %d, .should_log = %s, .frequency = %d, .decimation = %d },\n' % \
(k, v.port, should_log, v.frequency, decimation)
h += "};\n"
h += "#endif\n"
return h
if __name__ == "__main__":
print(build_header())
|
import sys
import matplotlib.pyplot as plt
import torch
from sklearn.decomposition import PCA
def main(model_filepath, input_filepath):
model = torch.load(model_filepath)
images, labels = torch.load(input_filepath)
print([10], "load")
with torch.no_grad():
model.eval()
k = 100
perm = torch.randperm(images.shape[0])
idx = perm[:k]
inter_rep = model.conv(images[idx])
inter_lab = labels[idx]
print([19], "inter load")
inter_pca = PCA(n_components=2).fit_transform(inter_rep.view(k, -1))
print([21], "inter tsne")
plt.figure(figsize=(4, 4))
plt.scatter(inter_pca[:, 0], inter_pca[:, 1], c=inter_lab)
plt.legend()
plt.savefig("reports/figures/report.png")
plt.show()
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2])
|
import numpy as np
import math
def selfInductance():
firstAmp = float(input('First Amps: '))
firstrads = float(input('First Rads/second: '))
volt = float(input('Volts: '))
max = firstAmp * firstrads
Henry = (volt / max) * 1000
print(Henry)
print("mH")
selfInductance()
|
from . import data
from . import models
from . import metrics
|
# proxy module
from __future__ import absolute_import
from codetools.blocks.compiler_.ast.ast import *
|
"""
"""
from __future__ import absolute_import
from qgis.PyQt.QtWidgets import QMenu, QMessageBox
from .environment import BASE_DIR
from .version import VERSION
#include here the new function for a connection with the menu
from .dikeline_export import DikelineExport
from .observationpoint_export import ObservationPointExport
from .polygon_export import PolygonExport
from .coastline_export import CoastlineExport
from .densify_linestring import DensifyLinestring
from .river_profile_export import RiverProfileExport
from .dem_export import DEMExport
from .database_export import DatabaseExport
from .hello_world import HelloWorld
from .crosssectioncreator import CrossSectionCreator
from .time_viewer import TimeViewer
from .rain_generator import RainGenerator
from .dam_raster import DAMRasterExport
from .cin_point import CINPointExport
from .cin_connector import CINConnectorExport
from .cin_polygon import CINPolygonExport
from .cin_connector_automatic import CINConnectorExportAuto
import os
class PromaidesToolbox(object):
def __init__(self, iface):
self.iface = iface
#init
self.plugin_menu = None
self.submenu_hyd = None
self.submenu_general = None
#HYD
self.dikeline_exprt = DikelineExport(self.iface)
self.observationpoint_exprt = ObservationPointExport(self.iface)
self.polygon_exprt = PolygonExport(self.iface)
self.coastline_exprt = CoastlineExport(self.iface)
self.crosssection = CrossSectionCreator(self.iface)
self.densify = DensifyLinestring(self.iface)
self.river_profile_exprt = RiverProfileExport(self.iface)
self.dem_export = DEMExport(self.iface)
self.time = TimeViewer(self.iface)
#HAZ
self.rain = RainGenerator(self.iface)
#DAM
self.dam_raster = DAMRasterExport(self.iface)
#CIN
self.cin_point = CINPointExport(self.iface)
self.cin_polygon = CINPolygonExport(self.iface)
self.cin_connector = CINConnectorExport(self.iface)
self.cin_connector_automatic = CINConnectorExportAuto(self.iface)
#General
self.hello_world = HelloWorld(self.iface)
self.db_exprt = DatabaseExport(self.iface)
def initGui(self):
"""
"""
self.plugin_menu = QMenu('ProMaIDes Toolbox', self.iface.mainWindow())
#Add a submenu
self.submenu_general = self.plugin_menu.addMenu('General')
self.submenu_haz = self.plugin_menu.addMenu('HAZ')
self.submenu_hyd = self.plugin_menu.addMenu('HYD')
self.submenu_dam = self.plugin_menu.addMenu('DAM')
self.submenu_cin = self.plugin_menu.addMenu('CIN')
#Add and coonnect to funtions in other .py-files
#HYD
self.observationpoint_exprt.initGui(self.submenu_hyd)
self.submenu_hyd.addSeparator()
self.crosssection.initGui(self.submenu_hyd)
self.densify.initGui(self.submenu_hyd)
self.river_profile_exprt.initGui(self.submenu_hyd)
self.submenu_hyd.addSeparator()
self.dem_export.initGui(self.submenu_hyd)
self.dikeline_exprt.initGui(self.submenu_hyd)
self.polygon_exprt.initGui(self.submenu_hyd)
self.submenu_hyd.addSeparator()
self.coastline_exprt.initGui(self.submenu_hyd)
self.submenu_hyd.addSeparator()
self.time.initGui(self.submenu_hyd)
#HAZ
self.rain.initGui(self.submenu_haz)
#DAM
self.dam_raster.initGui(self.submenu_dam)
#CIN
self.cin_point.initGui(self.submenu_cin)
self.cin_polygon.initGui(self.submenu_cin)
self.cin_connector.initGui(self.submenu_cin)
self.cin_connector_automatic.initGui(self.submenu_cin)
#General
self.hello_world.initGui(self.submenu_general)
self.db_exprt.initGui(self.submenu_general)
#Add about
self.plugin_menu.addAction('About', self.showAbout)
self.iface.pluginMenu().addMenu(self.plugin_menu)
def unload(self):
"""
"""
#HYD
self.dikeline_exprt.unload(self.submenu_hyd)
self.observationpoint_exprt.unload(self.submenu_hyd)
self.polygon_exprt.unload(self.submenu_hyd)
self.coastline_exprt.unload(self.submenu_hyd)
self.densify.unload(self.submenu_hyd)
self.crosssection.unload(self.submenu_hyd)
self.river_profile_exprt.unload(self.submenu_hyd)
self.dem_export.unload(self.submenu_hyd)
self.time.unload(self.submenu_hyd)
#HAZ
self.rain.unload(self.submenu_haz)
#DAM
self.dam_raster.unload(self.submenu_dam)
#CIN
self.cin_point.unload(self.submenu_cin)
self.cin_connector.unload(self.submenu_cin)
self.cin_polygon.unload(self.submenu_cin)
self.cin_connector_automatic.unload(self.submenu_cin)
#General
self.db_exprt.unload(self.submenu_general)
self.hello_world.unload(self.submenu_general)
self.iface.pluginMenu().removeAction(self.plugin_menu.menuAction())
def showAbout(self):
about = open(os.path.join(BASE_DIR, 'ABOUT.html')).read().format(version='.'.join(map(str, VERSION)))
QMessageBox.about(self.iface.mainWindow(), 'ProMaIDes Toolbox', about)
|
from app import app
from flask import request
from werkzeug import secure_filename
from os import path, remove as remove_file
from random import randrange
from time import time
from openpyxl import load_workbook
def allowed_ext(filename, allowed=[".xlsx"]):
# Extension
ext = path.splitext(filename)[1]
# End
return ext in allowed
def stringify(value):
if value:
return bytes(str(value), 'utf-8').decode('utf-8-sig').strip()
else:
return ""
def remove_temp(filepath):
if path.isfile(filepath):
remove_file(filepath)
return True
def get_uploaded_import_wb_file():
# Default output
output = None
# Form name
form_name = "file_import"
# On post
if request.method == "POST":
# Check form
if form_name in request.files:
# Get file
file = request.files[form_name]
filename = file.filename.strip()
is_update = request.form.get("update") == "y"
# Check filename
if not filename == "":
# Check extension
if allowed_ext(filename):
# Path
filename_clean = secure_filename(f"import_{randrange(1000, 9999)}_{int(time())}.xlsx")
save_path = path.join(
app.config.get("PRIVATE_DIR"),
"temp",
filename_clean
)
# Save
file.save(save_path)
# Load file
try:
# Load workbook
wb = load_workbook(save_path)
# End
return (True, save_path, wb, is_update)
except Exception as e:
# Remove file
remove_file(save_path)
# End
return (False, "Terjadi kesalahan saat memuat file")
else:
# End
return (False, "Ekstensi tidak diizinkan, silahkan upload file berekstensi *.xlsx")
else:
# End
return (False, "Silahkan pilih file terlebih dahulu")
else:
# End
return (False, "Gagal menemukan file pada permintaan form")
# End
return output
|
# -*- coding: utf-8 -*-
from urllib.parse import urlparse
from django.contrib.auth import get_user_model
from django.shortcuts import resolve_url
from django.test import TestCase
class SetupTest(TestCase):
def test_initial_setup_redirect(self):
resp = self.client.get(resolve_url('login'))
self.assertEqual(resp.status_code, 302)
url = urlparse(resp['Location'])
self.assertEqual(url.path, resolve_url('first_time_setup'))
# Don't redirect if there's already a user
get_user_model().objects.create_user(username='test')
resp = self.client.get(resolve_url('login'))
self.assertEqual(resp.status_code, 200)
def test_initial_setup_requires(self):
resp = self.client.post(resolve_url('first_time_setup'))
self.assertEqual(resp.status_code, 400)
def test_initial_setup_post(self):
resp = self.client.post(
resolve_url('first_time_setup'),
data={
'username': '',
'password': 'pass'
})
self.assertEqual(resp.status_code, 400)
resp = self.client.post(
resolve_url('first_time_setup'),
data={
'username': 'test',
'password': ''
})
self.assertEqual(resp.status_code, 400)
self.assertFalse(get_user_model().objects.exists())
resp = self.client.post(
resolve_url('first_time_setup'),
data={
'username': 'test',
'password': 'pass'
})
self.assertEqual(resp.status_code, 302)
self.assertTrue(get_user_model().objects.exists())
def test_initial_setup_post_with_email(self):
resp = self.client.post(
resolve_url('first_time_setup'),
data={
'username': 'test',
'email': 'fail',
'password': 'pass'
})
self.assertEqual(resp.status_code, 400)
self.assertFalse(get_user_model().objects.exists())
resp = self.client.post(
resolve_url('first_time_setup'),
data={
'username': 'test',
'email': 'real@email.com',
'password': 'pass'
})
self.assertEqual(resp.status_code, 302)
self.assertTrue(get_user_model().objects.exists())
def test_cant_setup_with_existing_user(self):
get_user_model().objects.create_user(username='test')
resp = self.client.post(
resolve_url('first_time_setup'),
data={
'username': 'test',
'email': 'real@email.com',
'password': 'pass'
})
self.assertEqual(get_user_model().objects.count(), 1)
|
import logs
import veri
def monitorStuff(Net):
Val = logs.peek(Net)
if Val!=0:
logs.log_error('PANIC activated on %s %s'%(Net,veri.peek(Net)))
return 1
return 0
def monitorStuffs():
panics=0
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.a_rcount")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.b_rcount")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.c_rcount")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.d_rcount")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.a_ar_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.a_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.a_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.a_ids_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.b_ar_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.b_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.b_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.b_ids_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.c_ar_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.c_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.c_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.c_ids_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.d_ar_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.d_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.d_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_rd_4_merger.d_ids_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.panic_acount")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.panic_bcount")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.panic_ccount")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.panic_dcount")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.a_bcount")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_bcount")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.c_bcount")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.d_bcount")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.a_aw_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.a_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.a_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.a_b_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.a_win_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.a_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_aw_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_b_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_out_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_out_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_win_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.b_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.c_aw_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.c_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.c_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.c_b_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.c_win_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.c_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.d_aw_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.d_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.d_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.d_b_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.d_win_fifo.count")
counts += monitorStuff("tb.dut.merger0.axi_wr_4_merger.d_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.a_rcount")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.b_rcount")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.c_rcount")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.d_rcount")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.a_ar_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.a_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.a_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.a_ids_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.b_ar_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.b_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.b_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.b_ids_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.c_ar_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.c_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.c_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.c_ids_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.d_ar_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.d_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.d_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_rd_4_merger.d_ids_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.panic_acount")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.panic_bcount")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.panic_ccount")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.panic_dcount")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.a_bcount")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_bcount")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.c_bcount")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.d_bcount")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.a_aw_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.a_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.a_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.a_b_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.a_win_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.a_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_aw_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_b_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_out_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_out_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_win_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.b_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.c_aw_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.c_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.c_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.c_b_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.c_win_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.c_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.d_aw_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.d_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.d_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.d_b_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.d_win_fifo.count")
counts += monitorStuff("tb.dut.merger1.axi_wr_4_merger.d_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.a_rcount")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.b_rcount")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.c_rcount")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.d_rcount")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.a_ar_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.a_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.a_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.a_ids_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.b_ar_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.b_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.b_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.b_ids_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.c_ar_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.c_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.c_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.c_ids_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.d_ar_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.d_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.d_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_rd_4_merger.d_ids_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.panic_acount")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.panic_bcount")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.panic_ccount")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.panic_dcount")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.a_bcount")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_bcount")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.c_bcount")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.d_bcount")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.a_aw_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.a_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.a_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.a_b_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.a_win_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.a_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_aw_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_b_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_out_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_out_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_win_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.b_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.c_aw_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.c_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.c_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.c_b_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.c_win_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.c_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.d_aw_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.d_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.d_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.d_b_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.d_win_fifo.count")
counts += monitorStuff("tb.dut.merger2.axi_wr_4_merger.d_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.a_rcount")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.b_rcount")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.c_rcount")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.d_rcount")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.a_ar_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.a_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.a_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.a_ids_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.b_ar_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.b_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.b_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.b_ids_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.c_ar_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.c_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.c_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.c_ids_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.d_ar_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.d_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.d_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_rd_4_merger.d_ids_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.panic_acount")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.panic_bcount")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.panic_ccount")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.panic_dcount")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.a_bcount")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_bcount")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.c_bcount")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.d_bcount")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.a_aw_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.a_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.a_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.a_b_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.a_win_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.a_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_aw_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_b_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_out_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_out_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_win_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.b_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.c_aw_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.c_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.c_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.c_b_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.c_win_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.c_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.d_aw_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.d_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.d_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.d_b_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.d_win_fifo.count")
counts += monitorStuff("tb.dut.merger3.axi_wr_4_merger.d_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.a_rcount")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.b_rcount")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.c_rcount")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.d_rcount")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.a_ar_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.a_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.a_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.a_ids_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.b_ar_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.b_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.b_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.b_ids_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.c_ar_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.c_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.c_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.c_ids_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.d_ar_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.d_ar_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.d_ids_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_rd_4_merger.d_ids_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.panic_acount")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.panic_bcount")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.panic_ccount")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.panic_dcount")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.a_bcount")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_bcount")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.c_bcount")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.d_bcount")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.a_aw_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.a_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.a_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.a_b_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.a_win_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.a_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_aw_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_b_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_out_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_out_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_win_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.b_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.c_aw_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.c_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.c_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.c_b_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.c_win_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.c_win_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.d_aw_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.d_aw_fifo.int_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.d_b_fifo.next_count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.d_b_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.d_win_fifo.count")
counts += monitorStuff("tb.dut.merger4.axi_wr_4_merger.d_win_fifo.int_count")
counts += monitorStuff("tb.dut.splitter0.axi_rd_4_splitter.ar_fifo.next_count")
counts += monitorStuff("tb.dut.splitter0.axi_rd_4_splitter.ar_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_rd_4_splitter.r_fifo.next_count")
counts += monitorStuff("tb.dut.splitter0.axi_rd_4_splitter.r_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.a_bcount")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.b_bcount")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.c_bcount")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.d_bcount")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.aw_fifo.next_count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.aw_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.b_fifo.next_count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.b_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.back_bid_a_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.back_bid_a_fifo.int_count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.back_bid_b_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.back_bid_b_fifo.int_count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.back_bid_c_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.back_bid_c_fifo.int_count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.back_bid_d_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.back_bid_d_fifo.int_count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.order_fifo.next_count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.order_fifo.count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.w_fifo.next_count")
counts += monitorStuff("tb.dut.splitter0.axi_wr_4_splitter.w_fifo.count")
veri.force('tb.Panics',str(panics))
def snapshot():
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.a_rcount" % logs.peek("tb.dut.merger0.axi_rd_4_merger.a_rcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.b_rcount" % logs.peek("tb.dut.merger0.axi_rd_4_merger.b_rcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.c_rcount" % logs.peek("tb.dut.merger0.axi_rd_4_merger.c_rcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.d_rcount" % logs.peek("tb.dut.merger0.axi_rd_4_merger.d_rcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.a_ar_fifo.count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.a_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.a_ar_fifo.int_count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.a_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.a_ids_fifo.next_count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.a_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.a_ids_fifo.count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.a_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.b_ar_fifo.count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.b_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.b_ar_fifo.int_count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.b_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.b_ids_fifo.next_count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.b_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.b_ids_fifo.count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.b_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.c_ar_fifo.count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.c_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.c_ar_fifo.int_count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.c_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.c_ids_fifo.next_count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.c_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.c_ids_fifo.count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.c_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.d_ar_fifo.count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.d_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.d_ar_fifo.int_count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.d_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.d_ids_fifo.next_count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.d_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_rd_4_merger.d_ids_fifo.count" % logs.peek("tb.dut.merger0.axi_rd_4_merger.d_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.panic_acount" % logs.peek("tb.dut.merger0.axi_wr_4_merger.panic_acount"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.panic_bcount" % logs.peek("tb.dut.merger0.axi_wr_4_merger.panic_bcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.panic_ccount" % logs.peek("tb.dut.merger0.axi_wr_4_merger.panic_ccount"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.panic_dcount" % logs.peek("tb.dut.merger0.axi_wr_4_merger.panic_dcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.a_bcount" % logs.peek("tb.dut.merger0.axi_wr_4_merger.a_bcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_bcount" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_bcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.c_bcount" % logs.peek("tb.dut.merger0.axi_wr_4_merger.c_bcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.d_bcount" % logs.peek("tb.dut.merger0.axi_wr_4_merger.d_bcount"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.a_aw_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.a_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.a_aw_fifo.int_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.a_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.a_b_fifo.next_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.a_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.a_b_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.a_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.a_win_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.a_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.a_win_fifo.int_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.a_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_aw_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_aw_fifo.int_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_b_fifo.next_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_b_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_out_fifo.next_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_out_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_out_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_out_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_win_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.b_win_fifo.int_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.b_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.c_aw_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.c_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.c_aw_fifo.int_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.c_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.c_b_fifo.next_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.c_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.c_b_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.c_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.c_win_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.c_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.c_win_fifo.int_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.c_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.d_aw_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.d_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.d_aw_fifo.int_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.d_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.d_b_fifo.next_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.d_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.d_b_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.d_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.d_win_fifo.count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.d_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger0.axi_wr_4_merger.d_win_fifo.int_count" % logs.peek("tb.dut.merger0.axi_wr_4_merger.d_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.a_rcount" % logs.peek("tb.dut.merger1.axi_rd_4_merger.a_rcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.b_rcount" % logs.peek("tb.dut.merger1.axi_rd_4_merger.b_rcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.c_rcount" % logs.peek("tb.dut.merger1.axi_rd_4_merger.c_rcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.d_rcount" % logs.peek("tb.dut.merger1.axi_rd_4_merger.d_rcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.a_ar_fifo.count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.a_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.a_ar_fifo.int_count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.a_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.a_ids_fifo.next_count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.a_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.a_ids_fifo.count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.a_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.b_ar_fifo.count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.b_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.b_ar_fifo.int_count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.b_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.b_ids_fifo.next_count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.b_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.b_ids_fifo.count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.b_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.c_ar_fifo.count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.c_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.c_ar_fifo.int_count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.c_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.c_ids_fifo.next_count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.c_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.c_ids_fifo.count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.c_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.d_ar_fifo.count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.d_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.d_ar_fifo.int_count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.d_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.d_ids_fifo.next_count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.d_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_rd_4_merger.d_ids_fifo.count" % logs.peek("tb.dut.merger1.axi_rd_4_merger.d_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.panic_acount" % logs.peek("tb.dut.merger1.axi_wr_4_merger.panic_acount"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.panic_bcount" % logs.peek("tb.dut.merger1.axi_wr_4_merger.panic_bcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.panic_ccount" % logs.peek("tb.dut.merger1.axi_wr_4_merger.panic_ccount"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.panic_dcount" % logs.peek("tb.dut.merger1.axi_wr_4_merger.panic_dcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.a_bcount" % logs.peek("tb.dut.merger1.axi_wr_4_merger.a_bcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_bcount" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_bcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.c_bcount" % logs.peek("tb.dut.merger1.axi_wr_4_merger.c_bcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.d_bcount" % logs.peek("tb.dut.merger1.axi_wr_4_merger.d_bcount"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.a_aw_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.a_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.a_aw_fifo.int_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.a_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.a_b_fifo.next_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.a_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.a_b_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.a_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.a_win_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.a_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.a_win_fifo.int_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.a_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_aw_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_aw_fifo.int_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_b_fifo.next_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_b_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_out_fifo.next_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_out_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_out_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_out_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_win_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.b_win_fifo.int_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.b_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.c_aw_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.c_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.c_aw_fifo.int_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.c_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.c_b_fifo.next_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.c_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.c_b_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.c_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.c_win_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.c_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.c_win_fifo.int_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.c_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.d_aw_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.d_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.d_aw_fifo.int_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.d_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.d_b_fifo.next_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.d_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.d_b_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.d_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.d_win_fifo.count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.d_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger1.axi_wr_4_merger.d_win_fifo.int_count" % logs.peek("tb.dut.merger1.axi_wr_4_merger.d_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.a_rcount" % logs.peek("tb.dut.merger2.axi_rd_4_merger.a_rcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.b_rcount" % logs.peek("tb.dut.merger2.axi_rd_4_merger.b_rcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.c_rcount" % logs.peek("tb.dut.merger2.axi_rd_4_merger.c_rcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.d_rcount" % logs.peek("tb.dut.merger2.axi_rd_4_merger.d_rcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.a_ar_fifo.count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.a_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.a_ar_fifo.int_count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.a_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.a_ids_fifo.next_count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.a_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.a_ids_fifo.count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.a_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.b_ar_fifo.count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.b_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.b_ar_fifo.int_count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.b_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.b_ids_fifo.next_count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.b_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.b_ids_fifo.count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.b_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.c_ar_fifo.count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.c_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.c_ar_fifo.int_count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.c_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.c_ids_fifo.next_count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.c_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.c_ids_fifo.count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.c_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.d_ar_fifo.count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.d_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.d_ar_fifo.int_count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.d_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.d_ids_fifo.next_count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.d_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_rd_4_merger.d_ids_fifo.count" % logs.peek("tb.dut.merger2.axi_rd_4_merger.d_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.panic_acount" % logs.peek("tb.dut.merger2.axi_wr_4_merger.panic_acount"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.panic_bcount" % logs.peek("tb.dut.merger2.axi_wr_4_merger.panic_bcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.panic_ccount" % logs.peek("tb.dut.merger2.axi_wr_4_merger.panic_ccount"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.panic_dcount" % logs.peek("tb.dut.merger2.axi_wr_4_merger.panic_dcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.a_bcount" % logs.peek("tb.dut.merger2.axi_wr_4_merger.a_bcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_bcount" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_bcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.c_bcount" % logs.peek("tb.dut.merger2.axi_wr_4_merger.c_bcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.d_bcount" % logs.peek("tb.dut.merger2.axi_wr_4_merger.d_bcount"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.a_aw_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.a_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.a_aw_fifo.int_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.a_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.a_b_fifo.next_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.a_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.a_b_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.a_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.a_win_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.a_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.a_win_fifo.int_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.a_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_aw_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_aw_fifo.int_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_b_fifo.next_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_b_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_out_fifo.next_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_out_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_out_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_out_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_win_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.b_win_fifo.int_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.b_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.c_aw_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.c_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.c_aw_fifo.int_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.c_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.c_b_fifo.next_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.c_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.c_b_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.c_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.c_win_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.c_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.c_win_fifo.int_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.c_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.d_aw_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.d_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.d_aw_fifo.int_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.d_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.d_b_fifo.next_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.d_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.d_b_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.d_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.d_win_fifo.count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.d_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger2.axi_wr_4_merger.d_win_fifo.int_count" % logs.peek("tb.dut.merger2.axi_wr_4_merger.d_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.a_rcount" % logs.peek("tb.dut.merger3.axi_rd_4_merger.a_rcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.b_rcount" % logs.peek("tb.dut.merger3.axi_rd_4_merger.b_rcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.c_rcount" % logs.peek("tb.dut.merger3.axi_rd_4_merger.c_rcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.d_rcount" % logs.peek("tb.dut.merger3.axi_rd_4_merger.d_rcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.a_ar_fifo.count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.a_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.a_ar_fifo.int_count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.a_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.a_ids_fifo.next_count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.a_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.a_ids_fifo.count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.a_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.b_ar_fifo.count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.b_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.b_ar_fifo.int_count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.b_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.b_ids_fifo.next_count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.b_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.b_ids_fifo.count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.b_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.c_ar_fifo.count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.c_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.c_ar_fifo.int_count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.c_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.c_ids_fifo.next_count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.c_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.c_ids_fifo.count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.c_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.d_ar_fifo.count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.d_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.d_ar_fifo.int_count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.d_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.d_ids_fifo.next_count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.d_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_rd_4_merger.d_ids_fifo.count" % logs.peek("tb.dut.merger3.axi_rd_4_merger.d_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.panic_acount" % logs.peek("tb.dut.merger3.axi_wr_4_merger.panic_acount"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.panic_bcount" % logs.peek("tb.dut.merger3.axi_wr_4_merger.panic_bcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.panic_ccount" % logs.peek("tb.dut.merger3.axi_wr_4_merger.panic_ccount"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.panic_dcount" % logs.peek("tb.dut.merger3.axi_wr_4_merger.panic_dcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.a_bcount" % logs.peek("tb.dut.merger3.axi_wr_4_merger.a_bcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_bcount" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_bcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.c_bcount" % logs.peek("tb.dut.merger3.axi_wr_4_merger.c_bcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.d_bcount" % logs.peek("tb.dut.merger3.axi_wr_4_merger.d_bcount"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.a_aw_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.a_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.a_aw_fifo.int_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.a_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.a_b_fifo.next_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.a_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.a_b_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.a_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.a_win_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.a_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.a_win_fifo.int_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.a_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_aw_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_aw_fifo.int_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_b_fifo.next_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_b_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_out_fifo.next_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_out_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_out_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_out_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_win_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.b_win_fifo.int_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.b_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.c_aw_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.c_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.c_aw_fifo.int_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.c_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.c_b_fifo.next_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.c_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.c_b_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.c_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.c_win_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.c_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.c_win_fifo.int_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.c_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.d_aw_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.d_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.d_aw_fifo.int_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.d_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.d_b_fifo.next_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.d_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.d_b_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.d_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.d_win_fifo.count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.d_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger3.axi_wr_4_merger.d_win_fifo.int_count" % logs.peek("tb.dut.merger3.axi_wr_4_merger.d_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.a_rcount" % logs.peek("tb.dut.merger4.axi_rd_4_merger.a_rcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.b_rcount" % logs.peek("tb.dut.merger4.axi_rd_4_merger.b_rcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.c_rcount" % logs.peek("tb.dut.merger4.axi_rd_4_merger.c_rcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.d_rcount" % logs.peek("tb.dut.merger4.axi_rd_4_merger.d_rcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.a_ar_fifo.count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.a_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.a_ar_fifo.int_count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.a_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.a_ids_fifo.next_count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.a_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.a_ids_fifo.count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.a_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.b_ar_fifo.count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.b_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.b_ar_fifo.int_count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.b_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.b_ids_fifo.next_count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.b_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.b_ids_fifo.count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.b_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.c_ar_fifo.count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.c_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.c_ar_fifo.int_count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.c_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.c_ids_fifo.next_count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.c_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.c_ids_fifo.count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.c_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.d_ar_fifo.count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.d_ar_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.d_ar_fifo.int_count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.d_ar_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.d_ids_fifo.next_count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.d_ids_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_rd_4_merger.d_ids_fifo.count" % logs.peek("tb.dut.merger4.axi_rd_4_merger.d_ids_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.panic_acount" % logs.peek("tb.dut.merger4.axi_wr_4_merger.panic_acount"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.panic_bcount" % logs.peek("tb.dut.merger4.axi_wr_4_merger.panic_bcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.panic_ccount" % logs.peek("tb.dut.merger4.axi_wr_4_merger.panic_ccount"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.panic_dcount" % logs.peek("tb.dut.merger4.axi_wr_4_merger.panic_dcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.a_bcount" % logs.peek("tb.dut.merger4.axi_wr_4_merger.a_bcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_bcount" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_bcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.c_bcount" % logs.peek("tb.dut.merger4.axi_wr_4_merger.c_bcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.d_bcount" % logs.peek("tb.dut.merger4.axi_wr_4_merger.d_bcount"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.a_aw_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.a_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.a_aw_fifo.int_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.a_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.a_b_fifo.next_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.a_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.a_b_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.a_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.a_win_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.a_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.a_win_fifo.int_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.a_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_aw_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_aw_fifo.int_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_b_fifo.next_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_b_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_out_fifo.next_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_out_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_out_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_out_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_win_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.b_win_fifo.int_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.b_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.c_aw_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.c_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.c_aw_fifo.int_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.c_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.c_b_fifo.next_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.c_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.c_b_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.c_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.c_win_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.c_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.c_win_fifo.int_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.c_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.d_aw_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.d_aw_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.d_aw_fifo.int_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.d_aw_fifo.int_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.d_b_fifo.next_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.d_b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.d_b_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.d_b_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.d_win_fifo.count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.d_win_fifo.count"))
logs.log_info("SNP %x tb.dut.merger4.axi_wr_4_merger.d_win_fifo.int_count" % logs.peek("tb.dut.merger4.axi_wr_4_merger.d_win_fifo.int_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_rd_4_splitter.ar_fifo.next_count" % logs.peek("tb.dut.splitter0.axi_rd_4_splitter.ar_fifo.next_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_rd_4_splitter.ar_fifo.count" % logs.peek("tb.dut.splitter0.axi_rd_4_splitter.ar_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_rd_4_splitter.r_fifo.next_count" % logs.peek("tb.dut.splitter0.axi_rd_4_splitter.r_fifo.next_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_rd_4_splitter.r_fifo.count" % logs.peek("tb.dut.splitter0.axi_rd_4_splitter.r_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.a_bcount" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.a_bcount"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.b_bcount" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.b_bcount"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.c_bcount" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.c_bcount"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.d_bcount" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.d_bcount"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.aw_fifo.next_count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.aw_fifo.next_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.aw_fifo.count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.aw_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.b_fifo.next_count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.b_fifo.next_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.b_fifo.count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.b_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.back_bid_a_fifo.count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.back_bid_a_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.back_bid_a_fifo.int_count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.back_bid_a_fifo.int_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.back_bid_b_fifo.count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.back_bid_b_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.back_bid_b_fifo.int_count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.back_bid_b_fifo.int_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.back_bid_c_fifo.count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.back_bid_c_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.back_bid_c_fifo.int_count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.back_bid_c_fifo.int_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.back_bid_d_fifo.count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.back_bid_d_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.back_bid_d_fifo.int_count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.back_bid_d_fifo.int_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.order_fifo.next_count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.order_fifo.next_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.order_fifo.count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.order_fifo.count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.w_fifo.next_count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.w_fifo.next_count"))
logs.log_info("SNP %x tb.dut.splitter0.axi_wr_4_splitter.w_fifo.count" % logs.peek("tb.dut.splitter0.axi_wr_4_splitter.w_fifo.count"))
|
#!/usr/bin/python3
from datetime import datetime
import sys
filename="shm-lolo-100-delay"
try:
if sys.argv[1]:
fileName = sys.argv[1]
except IndexError:
print("Using default file name.")
fileName = 'loglistener.txt'
f = open(fileName,"r")
total_count=0
ctr_count=0
RPL_count=0
data_count=0
for line in f:
if 'forwarding control' in line:
total_count+=1
ctr_count+=1
if 'DIO' in line or "DIS" in line or "DAO" in line:
total_count+=1
RPL_count+=1
if 'now sending hello' in line:
total_count+=1
data_count+=1
print("data")
print(data_count)
print("ctr:")
print(ctr_count)
print("RPL:")
print(RPL_count)
print("SUM:")
print(total_count)
|
"""Top-level package for python_project_template."""
__author__ = """Mason Lin"""
__email__ = 'pizza0117@gmail.com'
__version__ = '0.1.0'
|
# Write a method/function DISPLAYWORDS() in python to read lines from a text file STORY.TXT, and display those words, which are less than 4 characters.
def displayWords() :
file = open("story.txt", "r")
listObj = file.readlines()
for index in listObj :
word = index.split()
for search in word:
if len(search) < 4:
print(search)
file.close()
displayWords()
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.fluid.core import is_compiled_with_cuda, is_compiled_with_rocm, CUDAPlace
if is_compiled_with_cuda() and not is_compiled_with_rocm():
from paddle.fluid.core import CUDAGraph as CoreCUDAGraph
class CUDAGraph:
def __init__(self, place=None, mode="thread_local"):
ALL_MODES = ["global", "thread_local", "relaxed"]
self._graph = None
if place is None:
place = CUDAPlace(0)
self._place = place
assert mode in ALL_MODES
self._mode = ALL_MODES.index(mode)
def capture_begin(self):
CoreCUDAGraph.begin_capture(self._place, self._mode)
def capture_end(self):
self._graph = CoreCUDAGraph.end_capture()
def replay(self):
self._graph.replay()
def reset(self):
self._graph.reset()
else:
class CUDAGraph:
def __init__(self, place=None, mode="thread_local"):
raise NotImplementedError()
def capture_begin(self):
raise NotImplementedError()
def capture_end(self):
raise NotImplementedError()
def replay(self):
raise NotImplementedError()
def reset(self):
raise NotImplementedError()
|
# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the NiBabel package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
""" Test for scaling / rounding in volumeutils module """
import numpy as np
import warnings
from io import BytesIO
from ..volumeutils import finite_range, apply_read_scaling, array_to_file, array_from_file
from ..casting import type_info
from ..testing import suppress_warnings
from .test_volumeutils import _calculate_scale
from numpy.testing import (assert_array_almost_equal, assert_array_equal)
import pytest
# Debug print statements
DEBUG = True
@pytest.mark.parametrize("in_arr, res", [
([[-1, 0, 1], [np.inf, np.nan, -np.inf]], (-1, 1)),
(np.array([[-1, 0, 1], [np.inf, np.nan, -np.inf]]), (-1, 1)),
([[np.nan], [np.nan]], (np.inf, -np.inf)), # all nans slices
(np.zeros((3, 4, 5)) + np.nan, (np.inf, -np.inf)),
([[-np.inf], [np.inf]], (np.inf, -np.inf)), # all infs slices
(np.zeros((3, 4, 5)) + np.inf, (np.inf, -np.inf)),
([[np.nan, -1, 2], [-2, np.nan, 1]], (-2, 2)),
([[np.nan, -np.inf, 2], [-2, np.nan, np.inf]], (-2, 2)),
([[-np.inf, 2], [np.nan, 1]], (1, 2)), # good max case
([[np.nan, -np.inf, 2], [-2, np.nan, np.inf]], (-2, 2)),
([np.nan], (np.inf, -np.inf)),
([np.inf], (np.inf, -np.inf)),
([-np.inf], (np.inf, -np.inf)),
([np.inf, 1], (1, 1)), # only look at finite values
([-np.inf, 1], (1, 1)),
([[], []], (np.inf, -np.inf)), # empty array
(np.array([[-3, 0, 1], [2, -1, 4]], dtype=int), (-3, 4)),
(np.array([[1, 0, 1], [2, 3, 4]], dtype=np.uint), (0, 4)),
([0., 1, 2, 3], (0, 3)),
# Complex comparison works as if they are floats
([[np.nan, -1 - 100j, 2], [-2, np.nan, 1 + 100j]], (-2, 2)),
([[np.nan, -1, 2 - 100j], [-2 + 100j, np.nan, 1]], (-2 + 100j, 2 - 100j)),
])
def test_finite_range(in_arr, res):
# Finite range utility function
assert finite_range(in_arr) == res
assert finite_range(in_arr, False) == res
assert finite_range(in_arr, check_nan=False) == res
has_nan = np.any(np.isnan(in_arr))
assert finite_range(in_arr, True) == res + (has_nan,)
assert finite_range(in_arr, check_nan=True) == res + (has_nan,)
in_arr = np.array(in_arr)
flat_arr = in_arr.ravel()
assert finite_range(flat_arr) == res
assert finite_range(flat_arr, True) == res + (has_nan,)
# Check float types work as complex
if in_arr.dtype.kind == 'f':
c_arr = in_arr.astype(np.complex128)
assert finite_range(c_arr) == res
assert finite_range(c_arr, True) == res + (has_nan,)
def test_finite_range_err():
# Test error cases
a = np.array([[1., 0, 1], [2, 3, 4]]).view([('f1', 'f')])
with pytest.raises(TypeError):
finite_range(a)
@pytest.mark.parametrize("out_type", [np.int16, np.float32])
def test_a2f_mn_mx(out_type):
# Test array to file mn, mx handling
str_io = BytesIO()
arr = np.arange(6, dtype=out_type)
arr_orig = arr.copy() # safe backup for testing against
# Basic round trip to warm up
array_to_file(arr, str_io)
data_back = array_from_file(arr.shape, out_type, str_io)
assert_array_equal(arr, data_back)
# Clip low
array_to_file(arr, str_io, mn=2)
data_back = array_from_file(arr.shape, out_type, str_io)
# arr unchanged
assert_array_equal(arr, arr_orig)
# returned value clipped low
assert_array_equal(data_back, [2, 2, 2, 3, 4, 5])
# Clip high
array_to_file(arr, str_io, mx=4)
data_back = array_from_file(arr.shape, out_type, str_io)
# arr unchanged
assert_array_equal(arr, arr_orig)
# returned value clipped high
assert_array_equal(data_back, [0, 1, 2, 3, 4, 4])
# Clip both
array_to_file(arr, str_io, mn=2, mx=4)
data_back = array_from_file(arr.shape, out_type, str_io)
# arr unchanged
assert_array_equal(arr, arr_orig)
# returned value clipped high
assert_array_equal(data_back, [2, 2, 2, 3, 4, 4])
def test_a2f_nan2zero():
# Test conditions under which nans written to zero
arr = np.array([np.nan, 99.], dtype=np.float32)
str_io = BytesIO()
array_to_file(arr, str_io)
data_back = array_from_file(arr.shape, np.float32, str_io)
assert_array_equal(np.isnan(data_back), [True, False])
# nan2zero ignored for floats
array_to_file(arr, str_io, nan2zero=True)
data_back = array_from_file(arr.shape, np.float32, str_io)
assert_array_equal(np.isnan(data_back), [True, False])
# Integer output with nan2zero gives zero
with np.errstate(invalid='ignore'):
array_to_file(arr, str_io, np.int32, nan2zero=True)
data_back = array_from_file(arr.shape, np.int32, str_io)
assert_array_equal(data_back, [0, 99])
# Integer output with nan2zero=False gives whatever astype gives
with np.errstate(invalid='ignore'):
array_to_file(arr, str_io, np.int32, nan2zero=False)
data_back = array_from_file(arr.shape, np.int32, str_io)
assert_array_equal(data_back, [np.array(np.nan).astype(np.int32), 99])
@pytest.mark.parametrize("in_type, out_type", [
(np.int16, np.int16),
(np.int16, np.int8),
(np.uint16, np.uint8),
(np.int32, np.int8),
(np.float32, np.uint8),
(np.float32, np.int16)
])
def test_array_file_scales(in_type, out_type):
# Test scaling works for max, min when going from larger to smaller type,
# and from float to integer.
bio = BytesIO()
out_dtype = np.dtype(out_type)
arr = np.zeros((3,), dtype=in_type)
info = type_info(in_type)
arr[0], arr[1] = info['min'], info['max']
slope, inter, mn, mx = _calculate_scale(arr, out_dtype, True)
array_to_file(arr, bio, out_type, 0, inter, slope, mn, mx)
bio.seek(0)
arr2 = array_from_file(arr.shape, out_dtype, bio)
arr3 = apply_read_scaling(arr2, slope, inter)
# Max rounding error for integer type
max_miss = slope / 2.
assert np.all(np.abs(arr - arr3) <= max_miss)
@pytest.mark.parametrize("category0, category1, overflow",[
# Confirm that, for all ints and uints as input, and all possible outputs,
# for any simple way of doing the calculation, the result is near enough
('int', 'int', False),
('uint', 'int', False),
# Converting floats to integer
('float', 'int', True),
('float', 'uint', True),
('complex', 'int', True),
('complex', 'uint', True),
])
def test_scaling_in_abstract(category0, category1, overflow):
for in_type in np.sctypes[category0]:
for out_type in np.sctypes[category1]:
if overflow:
with suppress_warnings():
check_int_a2f(in_type, out_type)
else:
check_int_a2f(in_type, out_type)
def check_int_a2f(in_type, out_type):
# Check that array to / from file returns roughly the same as input
big_floater = np.maximum_sctype(np.float64)
info = type_info(in_type)
this_min, this_max = info['min'], info['max']
if not in_type in np.sctypes['complex']:
data = np.array([this_min, this_max], in_type)
# Bug in numpy 1.6.2 on PPC leading to infs - abort
if not np.all(np.isfinite(data)):
if DEBUG:
print(f'Hit PPC max -> inf bug; skip in_type {in_type}')
return
else: # Funny behavior with complex256
data = np.zeros((2,), in_type)
data[0] = this_min + 0j
data[1] = this_max + 0j
str_io = BytesIO()
try:
scale, inter, mn, mx = _calculate_scale(data, out_type, True)
except ValueError as e:
if DEBUG:
warnings.warn(str((in_type, out_type, e)))
return
array_to_file(data, str_io, out_type, 0, inter, scale, mn, mx)
data_back = array_from_file(data.shape, out_type, str_io)
data_back = apply_read_scaling(data_back, scale, inter)
assert np.allclose(big_floater(data), big_floater(data_back))
# Try with analyze-size scale and inter
scale32 = np.float32(scale)
inter32 = np.float32(inter)
if scale32 == np.inf or inter32 == np.inf:
return
data_back = array_from_file(data.shape, out_type, str_io)
data_back = apply_read_scaling(data_back, scale32, inter32)
# Clip at extremes to remove inf
info = type_info(in_type)
out_min, out_max = info['min'], info['max']
assert np.allclose(big_floater(data), big_floater(np.clip(data_back, out_min, out_max)))
|
from vvspy import get_trips
"""
Check connections between two stations and alarm on delay.
Note that there are destination.delay and origin.delay.
origin.delay => departure delay on first station
destination.delay => arrival delay on the final station
"""
station_1 = 5006118 # Stuttgart main station
station_2 = 5001303 # Weil der Stadt
result = get_trips(station_1, station_2)
for res in result:
if res.connections[0].destination.delay > 0:
print(f"{res.connections[0].transportation.number} is too late!"
f" Now arriving {res.connections[0].destination.arrival_time_estimated}")
|
import torch
import torchvision
import torch.nn as nn
import torch.nn.functional as F
class BnReluConv(nn.Module):
"""docstring for BnReluConv"""
def __init__(self, inChannels, outChannels, kernelSize = 1, stride = 1, padding = 0):
super(BnReluConv, self).__init__()
self.inChannels = inChannels
self.outChannels = outChannels
self.kernelSize = kernelSize
self.stride = stride
self.padding = padding
self.bn = nn.BatchNorm2d(self.inChannels)
self.relu = nn.ReLU()
self.conv = nn.Conv2d(self.inChannels, self.outChannels, self.kernelSize, self.stride, self.padding)
def forward(self, x):
x = self.bn(x)
x = self.relu(x)
x = self.conv(x)
return x
class Pyramid(nn.Module):
"""docstring for Pyramid"""
def __init__(self, D, cardinality, inputRes):
super(Pyramid, self).__init__()
self.D = D
self.cardinality = cardinality
self.inputRes = inputRes
self.scale = 2**(-1/self.cardinality)
_scales = []
for card in range(self.cardinality):
temp = nn.Sequential(
nn.FractionalMaxPool2d(2, output_ratio = self.scale**(card + 1)),
nn.Conv2d(self.D, self.D, 3, 1, 1),
nn.Upsample(size = self.inputRes)#, mode='bilinear')
)
_scales.append(temp)
self.scales = nn.ModuleList(_scales)
def forward(self, x):
#print(x.shape, self.inputRes)
out = torch.zeros_like(x)
for card in range(self.cardinality):
out += self.scales[card](x)
return out
class BnReluPyra(nn.Module):
"""docstring for BnReluPyra"""
def __init__(self, D, cardinality, inputRes):
super(BnReluPyra, self).__init__()
self.D = D
self.cardinality = cardinality
self.inputRes = inputRes
self.bn = nn.BatchNorm2d(self.D)
self.relu = nn.ReLU()
self.pyra = Pyramid(self.D, self.cardinality, self.inputRes)
def forward(self, x):
x = self.bn(x)
x = self.relu(x)
x = self.pyra(x)
return x
class ConvBlock(nn.Module):
"""docstring for ConvBlock"""
def __init__(self, inChannels, outChannels):
super(ConvBlock, self).__init__()
self.inChannels = inChannels
self.outChannels = outChannels
self.outChannelsby2 = outChannels//2
self.cbr1 = BnReluConv(self.inChannels, self.outChannelsby2, 1, 1, 0)
self.cbr2 = BnReluConv(self.outChannelsby2, self.outChannelsby2, 3, 1, 1)
self.cbr3 = BnReluConv(self.outChannelsby2, self.outChannels, 1, 1, 0)
def forward(self, x):
x = self.cbr1(x)
x = self.cbr2(x)
x = self.cbr3(x)
return x
class PyraConvBlock(nn.Module):
"""docstring for PyraConvBlock"""
def __init__(self, inChannels, outChannels, inputRes, baseWidth, cardinality, type = 1):
super(PyraConvBlock, self).__init__()
self.inChannels = inChannels
self.outChannels = outChannels
self.inputRes = inputRes
self.baseWidth = baseWidth
self.cardinality = cardinality
self.outChannelsby2 = outChannels//2
self.D = self.outChannels // self.baseWidth
self.branch1 = nn.Sequential(
BnReluConv(self.inChannels, self.outChannelsby2, 1, 1, 0),
BnReluConv(self.outChannelsby2, self.outChannelsby2, 3, 1, 1)
)
self.branch2 = nn.Sequential(
BnReluConv(self.inChannels, self.D, 1, 1, 0),
BnReluPyra(self.D, self.cardinality, self.inputRes),
BnReluConv(self.D, self.outChannelsby2, 1, 1, 0)
)
self.afteradd = BnReluConv(self.outChannelsby2, self.outChannels, 1, 1, 0)
def forward(self, x):
x = self.branch2(x) + self.branch1(x)
x = self.afteradd(x)
return x
class SkipLayer(nn.Module):
"""docstring for SkipLayer"""
def __init__(self, inChannels, outChannels):
super(SkipLayer, self).__init__()
self.inChannels = inChannels
self.outChannels = outChannels
if (self.inChannels == self.outChannels):
self.conv = None
else:
self.conv = nn.Conv2d(self.inChannels, self.outChannels, 1)
def forward(self, x):
if self.conv is not None:
x = self.conv(x)
return x
class Residual(nn.Module):
"""docstring for Residual"""
def __init__(self, inChannels, outChannels, inputRes=None, baseWidth=None, cardinality=None, type=None):
super(Residual, self).__init__()
self.inChannels = inChannels
self.outChannels = outChannels
self.cb = ConvBlock(self.inChannels, self.outChannels)
self.skip = SkipLayer(self.inChannels, self.outChannels)
def forward(self, x):
out = 0
out = out + self.cb(x)
out = out + self.skip(x)
return out
class ResidualPyramid(nn.Module):
"""docstring for ResidualPyramid"""
def __init__(self, inChannels, outChannels, inputRes, baseWidth, cardinality, type = 1):
super(ResidualPyramid, self).__init__()
self.inChannels = inChannels
self.outChannels = outChannels
self.inputRes = inputRes
self.baseWidth = baseWidth
self.cardinality = cardinality
self.type = type
self.cb = PyraConvBlock(self.inChannels, self.outChannels, self.inputRes, self.baseWidth, self.cardinality, self.type)
self.skip = SkipLayer(self.inChannels, self.outChannels)
def forward(self, x):
out = 0
out = out + self.cb(x)
out = out + self.skip(x)
return out
|
# coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Evaluation script for RL agents.
Example invocation:
python -m tensor2tensor.rl.evaluator \
--policy_dir=$HOME/t2t/rl_v1/policy \
--eval_metrics_dir=$HOME/t2t/rl_v1/full_eval_metrics \
--hparams_set=rlmb_base \
--hparams='batch_size=64'
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensor2tensor.models.research import rl # pylint: disable=unused-import
from tensor2tensor.rl import rl_utils
from tensor2tensor.rl import trainer_model_based_params # pylint: disable=unused-import
from tensor2tensor.utils import flags as t2t_flags # pylint: disable=unused-import
from tensor2tensor.utils import trainer_lib
import tensorflow as tf
flags = tf.flags
FLAGS = flags.FLAGS
flags.DEFINE_string("policy_dir", "", "Directory with policy checkpoints.")
flags.DEFINE_string(
"eval_metrics_dir", "", "Directory to output the eval metrics at."
)
flags.DEFINE_bool("full_eval", True, "Whether to ignore the timestep limit.")
flags.DEFINE_enum("agent", "policy", ["random", "policy"], "Agent type to use.")
flags.DEFINE_bool(
"eval_with_learner", True,
"Whether to use the PolicyLearner.evaluate function instead of an "
"out-of-graph one. Works only with --agent=policy."
)
def make_agent(
agent_type, env, policy_hparams, policy_dir, sampling_temp
):
"""Factory function for Agents."""
return {
"random": lambda: rl_utils.RandomAgent( # pylint: disable=g-long-lambda
env.batch_size, env.observation_space, env.action_space
),
"policy": lambda: rl_utils.PolicyAgent( # pylint: disable=g-long-lambda
env.batch_size, env.observation_space, env.action_space,
policy_hparams, policy_dir, sampling_temp
),
}[agent_type]()
def make_eval_fn_with_agent(agent_type):
"""Returns an out-of-graph eval_fn using the Agent API."""
def eval_fn(env, hparams, policy_hparams, policy_dir, sampling_temp):
"""Eval function."""
base_env = env
env = rl_utils.BatchStackWrapper(env, hparams.frame_stack_size)
agent = make_agent(
agent_type, env, policy_hparams, policy_dir, sampling_temp
)
num_dones = 0
first_dones = [False] * env.batch_size
observations = env.reset()
while num_dones < env.batch_size:
actions = agent.act(observations)
(observations, _, dones) = env.step(actions)
observations = list(observations)
now_done_indices = []
for (i, done) in enumerate(dones):
if done and not first_dones[i]:
now_done_indices.append(i)
first_dones[i] = True
num_dones += 1
if now_done_indices:
# Reset only envs done the first time in this timestep to ensure that
# we collect exactly 1 rollout from each env.
reset_observations = env.reset(now_done_indices)
for (i, observation) in zip(now_done_indices, reset_observations):
observations[i] = observation
observations = np.array(observations)
assert len(base_env.current_epoch_rollouts()) == env.batch_size
return eval_fn
def evaluate(
hparams, policy_dir, eval_metrics_dir, agent_type, eval_with_learner,
report_fn=None, report_metric=None
):
"""Evaluate."""
if eval_with_learner:
assert agent_type == "policy"
if report_fn:
assert report_metric is not None
eval_metrics_writer = tf.summary.FileWriter(eval_metrics_dir)
kwargs = {}
if not eval_with_learner:
kwargs["eval_fn"] = make_eval_fn_with_agent(agent_type)
eval_metrics = rl_utils.evaluate_all_configs(hparams, policy_dir, **kwargs)
rl_utils.summarize_metrics(eval_metrics_writer, eval_metrics, 0)
# Report metrics
if report_fn:
if report_metric == "mean_reward":
metric_name = rl_utils.get_metric_name(
sampling_temp=hparams.eval_sampling_temps[0],
max_num_noops=hparams.eval_max_num_noops,
clipped=False
)
report_fn(eval_metrics[metric_name], 0)
else:
report_fn(eval_metrics[report_metric], 0)
return eval_metrics
def main(_):
hparams = trainer_lib.create_hparams(FLAGS.hparams_set, FLAGS.hparams)
if FLAGS.full_eval:
hparams.eval_rl_env_max_episode_steps = -1
evaluate(
hparams, FLAGS.policy_dir, FLAGS.eval_metrics_dir, FLAGS.agent,
FLAGS.eval_with_learner
)
if __name__ == "__main__":
tf.app.run()
|
"""
Better brain parcellations for Region of Interest analysis
"""
import numbers
import numpy as np
from scipy.ndimage import label
from scipy.stats import scoreatpercentile
from sklearn.externals.joblib import Memory
from .. import masking
from ..input_data import NiftiMapsMasker
from .._utils import check_niimg, check_niimg_4d
from ..image import new_img_like, resample_img
from ..image.image import _smooth_array, threshold_img
from .._utils.niimg_conversions import concat_niimgs, _check_same_fov
from .._utils.niimg import _safe_get_data
from .._utils.compat import _basestring
from .._utils.ndimage import _peak_local_max
from .._utils.segmentation import _random_walker
def _threshold_maps_ratio(maps_img, threshold):
""" Automatic thresholding of atlas maps image.
Considers the given threshold as a ratio to the total number of voxels
in the brain volume. This gives a certain number within the data
voxel size which means that nonzero voxels which fall above than this
size will be kept across all the maps.
Parameters
----------
maps_img: Niimg-like object
an image of brain atlas maps.
threshold: float
If float, value is used as a ratio to n_voxels to get a certain threshold
size in number to threshold the image. The value should be positive and
within the range of number of maps (i.e. n_maps in 4th dimension).
Returns
-------
threshold_maps_img: Nifti1Image
gives us thresholded image.
"""
maps = check_niimg(maps_img)
n_maps = maps.shape[-1]
if not isinstance(threshold, numbers.Real) or threshold <= 0 or threshold > n_maps:
raise ValueError("threshold given as ratio to the number of voxels must "
"be Real number and should be positive and between 0 and "
"total number of maps i.e. n_maps={0}. "
"You provided {1}".format(n_maps, threshold))
else:
ratio = threshold
maps_data = np.nan_to_num(maps.get_data())
abs_maps = np.abs(maps_data)
# thresholding
cutoff_threshold = scoreatpercentile(
abs_maps, 100. - (100. / n_maps) * ratio)
maps_data[abs_maps < cutoff_threshold] = 0.
threshold_maps_img = new_img_like(maps, maps_data)
return threshold_maps_img
def connected_regions(maps_img, min_region_size=1350,
extract_type='local_regions', smoothing_fwhm=6,
mask_img=None):
""" Extraction of brain connected regions into separate regions.
Note: the region size should be defined in mm^3. See the documentation for
more details.
.. versionadded:: 0.2
Parameters
----------
maps_img: Niimg-like object
an image of brain activation or atlas maps to be extracted into set of
separate brain regions.
min_region_size: int, default 1350 mm^3, optional
Minimum volume in mm3 for a region to be kept. For example, if the voxel
size is 3x3x3 mm then the volume of the voxel is 27mm^3. By default, it
is 1350mm^3 which means we take minimum size of 1350 / 27 = 50 voxels.
extract_type: str {'connected_components', 'local_regions'} \
default local_regions, optional
If 'connected_components', each component/region in the image is extracted
automatically by labelling each region based upon the presence of unique
features in their respective regions.
If 'local_regions', each component/region is extracted based on their
maximum peak value to define a seed marker and then using random walker
segementation algorithm on these markers for region separation.
smoothing_fwhm: scalar, default 6mm, optional
To smooth an image to extract most sparser regions. This parameter
is passed `_smooth_array` and exists only for extract_type 'local_regions'.
mask_img: Niimg-like object, default None
If given, mask image is applied to input data.
If None, no masking is applied.
Returns
-------
regions_extracted_img: Nifti1Image
gives the image in 4D of extracted brain regions. Each 3D image consists
of only one separated region.
index_of_each_map: numpy array
an array of list of indices where each index denotes the identity
of each extracted region to their family of brain maps.
"""
all_regions_imgs = []
index_of_each_map = []
maps_img = check_niimg(maps_img, atleast_4d=True)
maps = _safe_get_data(maps_img).copy()
affine = maps_img.get_affine()
min_region_size = min_region_size / np.prod(np.diag(abs(affine[:3])))
allowed_extract_types = ['connected_components', 'local_regions']
if extract_type not in allowed_extract_types:
message = ("'extract_type' should be given either of these {0} "
"You provided extract_type='{1}'").format(allowed_extract_types, extract_type)
raise ValueError(message)
if mask_img is not None:
if not _check_same_fov(maps_img, mask_img):
mask_img = resample_img(mask_img,
target_affine=maps_img.get_affine(),
target_shape=maps_img.shape[:3],
interpolation="nearest")
mask_data, _ = masking._load_mask_img(mask_img)
# Set as 0 to the values which are outside of the mask
maps[mask_data == 0.] = 0.
for index in range(maps.shape[-1]):
regions = []
map_3d = maps[..., index]
# Mark the seeds using random walker
if extract_type == 'local_regions':
smooth_map = _smooth_array(map_3d, affine=affine, fwhm=smoothing_fwhm)
seeds = _peak_local_max(smooth_map)
seeds_label, seeds_id = label(seeds)
# Assign -1 to values which are 0. to indicate to ignore
seeds_label[map_3d == 0.] = -1
rw_maps = _random_walker(map_3d, seeds_label)
# Now simply replace "-1" with "0" for regions separation
rw_maps[rw_maps == -1] = 0.
label_maps = rw_maps
else:
# Connected component extraction
label_maps, n_labels = label(map_3d)
# Takes the size of each labelized region data
labels_size = np.bincount(label_maps.ravel())
# set background labels sitting in zero index to zero
labels_size[0] = 0.
for label_id, label_size in enumerate(labels_size):
if label_size > min_region_size:
region_data = (label_maps == label_id) * map_3d
region_img = new_img_like(maps_img, region_data)
regions.append(region_img)
index_of_each_map.extend([index] * len(regions))
all_regions_imgs.extend(regions)
regions_extracted_img = concat_niimgs(all_regions_imgs)
return regions_extracted_img, index_of_each_map
class RegionExtractor(NiftiMapsMasker):
"""Class for brain region extraction.
Region Extraction is a post processing technique which
is implemented to automatically segment each brain atlas maps
into different set of separated brain activated region.
Particularly, to show that each decomposed brain maps can be
used to focus on a target specific Regions of Interest analysis.
.. versionadded:: 0.2
Parameters
----------
maps_img: 4D Niimg-like object
Image containing a set of whole brain atlas maps or statistically
decomposed brain maps.
mask_img: Niimg-like object or None, default None, optional
Mask to be applied to input data, passed to NiftiMapsMasker.
If None, no masking is applied.
min_region_size: int, default 1350 mm^3, optional
Minimum volume in mm3 for a region to be kept. For example, if
the voxel size is 3x3x3 mm then the volume of the voxel is
27mm^3. By default, it is 1350mm^3 which means we take minimum
size of 1350 / 27 = 50 voxels.
threshold: number, default 1., optional
A value used either in ratio_n_voxels or img_value or percentile
`thresholding_strategy` based upon the choice of selection.
thresholding_strategy: str {'ratio_n_voxels', 'img_value', 'percentile'}, optional
If default 'ratio_n_voxels', we apply thresholding that will keep
the more intense nonzero brain voxels (denoted as n_voxels)
across all maps (n_voxels being the number of voxels in the brain
volume). A float value given in `threshold` parameter indicates
the ratio of voxels to keep meaning (if float=2. then maps will
together have 2. x n_voxels non-zero voxels). If set to
'percentile', images are thresholded based on the score obtained
with the given percentile on the data and the voxel intensities
which are survived above this obtained score will be kept. If set
to 'img_value', we apply thresholding based on the non-zero voxel
intensities across all maps. A value given in `threshold`
parameter indicates that we keep only those voxels which have
intensities more than this value.
extractor: str {'connected_components', 'local_regions'} default 'local_regions', optional
If 'connected_components', each component/region in the image is
extracted automatically by labelling each region based upon the
presence of unique features in their respective regions. If
'local_regions', each component/region is extracted based on
their maximum peak value to define a seed marker and then using
random walker segementation algorithm on these markers for region
separation.
standardize: bool, True or False, default False, optional
If True, the time series signals are centered and normalized by
putting their mean to 0 and variance to 1. Recommended to
set as True if signals are not already standardized.
passed to class NiftiMapsMasker.
detrend: bool, True or False, default False, optional
This parameter is passed to nilearn.signal.clean basically
indicates whether to detrend timeseries signals or not.
passed to class NiftiMapsMasker.
low_pass: float, default None, optional
This value will be applied on the signals by passing to signal.clean
Please see the related documentation signal.clean for more details.
passed to class NiftiMapsMasker.
high_pass: float, default None, optional
This value will be applied on the signals by passing to signal.clean
Please see the related documentation signal.clean for more details.
passed to NiftiMapsMasker.
t_r: float, default None, optional
Repetition time in sec. This value is given to signal.clean
Please see the related documentation for details.
passed to NiftiMapsMasker.
memory: instance of joblib.Memory, string, default None, optional
Used to cache the masking process. If a string is given, the path
is set with this string as a folder name in the directory.
passed to NiftiMapsMasker.
memory_level: int, default 0, optional
Aggressiveness of memory catching. The higher the number, the higher
the number of functions that will be cached. Zero mean no caching.
passed to NiftiMapsMasker.
verbose: int, default 0, optional
Indicates the level of verbosity by printing the message. Zero
indicates nothing is printed.
Attributes
----------
`index_` : numpy array
array of list of indices where each index value is assigned to
each separate region of its corresponding family of brain maps.
`regions_img_` : Nifti1Image
List of separated regions with each region lying on an
original volume concatenated into a 4D image.
References
----------
* Abraham et al. "Region segmentation for sparse decompositions:
better brain parcellations from rest fMRI", Sparsity Techniques in
Medical Imaging, Sep 2014, Boston, United States. pp.8
"""
def __init__(self, maps_img, mask_img=None, min_region_size=1350,
threshold=1., thresholding_strategy='ratio_n_voxels',
extractor='local_regions', standardize=False, detrend=False,
low_pass=None, high_pass=None, t_r=None,
memory=Memory(cachedir=None), memory_level=0, verbose=0):
super(RegionExtractor, self).__init__(
maps_img=maps_img, mask_img=mask_img,
standardize=standardize, detrend=detrend, low_pass=low_pass,
high_pass=high_pass, t_r=t_r, memory=memory,
memory_level=memory_level, verbose=verbose)
self.maps_img = maps_img
self.min_region_size = min_region_size
self.thresholding_strategy = thresholding_strategy
self.threshold = threshold
self.extractor = extractor
def fit(self, X=None, y=None):
""" Prepare the data and setup for the region extraction
"""
maps_img = check_niimg_4d(self.maps_img)
list_of_strategies = ['ratio_n_voxels', 'img_value', 'percentile']
if self.thresholding_strategy not in list_of_strategies:
message = ("'thresholding_strategy' should be "
"either of these {0}").format(list_of_strategies)
raise ValueError(message)
if self.threshold is None or isinstance(self.threshold, _basestring):
raise ValueError("The given input to threshold is not valid. "
"Please submit a valid number specific to either of "
"the strategy in {0}".format(list_of_strategies))
elif isinstance(self.threshold, numbers.Number):
# foreground extraction
if self.thresholding_strategy == 'ratio_n_voxels':
threshold_maps = _threshold_maps_ratio(maps_img, self.threshold)
else:
if self.thresholding_strategy == 'percentile':
self.threshold = "{0}%".format(self.threshold)
threshold_maps = threshold_img(maps_img, mask_img=self.mask_img,
threshold=self.threshold)
# connected component extraction
self.regions_img_, self.index_ = connected_regions(threshold_maps,
self.min_region_size,
self.extractor)
self.maps_img = self.regions_img_
super(RegionExtractor, self).fit()
return self
|
from django import forms
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin, PermissionRequiredMixin
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse_lazy
from django.views.generic import ListView, DeleteView, DetailView
from django.views.generic.base import View
from product_manager_ices.forms import AddIceForm, AddFlavourForm, AddOrderItem
from product_manager_ices.models import Ices, Order, OrderItem
class Homepage(View):
"""
Welcome Page
"""
def get(self, request):
return render(request, 'Homepage.html')
class AddIce(LoginRequiredMixin, View):
"""
Class to add products by type and flavoures,
Two separate forms given
"""
def get(self, request):
form_type = AddIceForm()
form_flavour = AddFlavourForm()
return render(request, 'product_manager_ices/add_ices.html', context={"form_type": form_type,
"form_flavour": form_flavour, })
def post(self, request):
form_type = AddIceForm(request.POST)
form_flavour = AddFlavourForm(request.POST)
if form_type.is_valid():
form_type.save()
messages.success(request, "Type Added")
return redirect("add-ice")
if form_flavour.is_valid():
form_flavour.save()
messages.success(request, "Flavour Added")
return redirect("add-ice")
else:
form_type = AddIceForm()
form_flavour = AddFlavourForm()
messages.success(request, "Wrong Data")
return render(request, 'product_manager_ices/add_ices.html', context={"form_type": form_type,
"form_flavour": form_flavour, })
class CreateOrder(LoginRequiredMixin, View):
"""
Main Page to service the Ice sale:
Choosing type,quantity,flavoures
SideBar Shop Cart
Only one order can be open and being active / orders can be postpone or deleted
"""
def get(self, request):
add_order_form = AddOrderItem()
try:
order_in_cart = Order.objects.get(worker_owner=request.user, status=1)
sumarize = order_in_cart.get_total()
except ObjectDoesNotExist:
order_in_cart = None
sumarize = None
return render(request, 'product_manager_ices/order_form.html', context={"add_order_form": add_order_form,
"order_in_cart": order_in_cart,
"sumarize": sumarize,
})
def post(self, request):
add_order_form = AddOrderItem(request.POST)
if add_order_form.is_valid():
ice = add_order_form.cleaned_data.get('ice')
quantity = add_order_form.cleaned_data.get('quantity')
# Create order_items-ices
ice_in_order = OrderItem.objects.create(ice_id=ice, quantity=quantity)
# Adding flavoure to orderitem(ice)
ice_in_order.flavour.set(request.POST.getlist('flavour'))
# Order add order_items-ices to cart
order = Order.objects.get(worker_owner=request.user, status=1)
ice_in_order.order.add(order.id)
ice_in_order.save()
messages.success(request, "OrderItem Added to cart")
return redirect("create-order")
else:
add_order_form = AddOrderItem()
messages.info(request, "OrdetItem must be made of type and flavoure")
return redirect("create-order")
@login_required
def open_order(request):
"""
OPEN NEW ORDER , one user can have only one order opened
"""
if request.method == "POST":
order_opened = Order.objects.filter(worker_owner=request.user, status=1).exists()
if not order_opened:
Order.objects.create(worker_owner=request.user, status=1)
return redirect("create-order")
else:
messages.info(request, "You have opened order")
return redirect("create-order")
@login_required
def delete_orderitem(request, id=None):
"""
Deleting orderitems in current order CART
"""
if request.method == "POST":
order_to_delete = OrderItem.objects.get(id=id)
order_to_delete.delete()
return redirect('create-order')
@login_required
def change_status_order_for_finish(request, id=None):
"""
Change status of order to finished
Boostrap Modal > buttons PAY>Finish
"""
if request.method == "POST":
order_to_change_status = Order.objects.get(id=id, worker_owner=request.user, status=1)
order_to_change_status.status = 3
order_to_change_status.save()
return redirect('create-order')
@login_required
def postpone_order(request, id):
"""
Postpone current order in CART
button> POSTPONE
"""
if request.method == "POST":
order_to_change_status = Order.objects.get(id=id, worker_owner=request.user, status="1")
order_to_change_status.status = 2
order_to_change_status.save()
return redirect('create-order')
@login_required
def return_order(request, id=None):
"""
Change status of order form postpone to started
List-of-orders button> return order to active
Only the same user can return the order to active
ONLY ONE ORDER CAN BE ACTIVE IN CART
"""
if Order.objects.filter(worker_owner=request.user, status=1).exists():
messages.info(request, "You have active order opened, Please postpone or delete it")
return redirect('create-order')
else:
order_to_change_status = Order.objects.get(worker_owner=request.user, id=id)
order_to_change_status.status = 1
order_to_change_status.save()
return redirect('create-order')
class OrderDelete(LoginRequiredMixin, DeleteView):
"""
Deleting whole current order in CART
"""
model = Order
success_url = reverse_lazy("create-order")
class ListOfOrders(LoginRequiredMixin, ListView):
"""
List of finished orders
Search of orders by USER/TIMESELL/FLAVOUR/TYPEOFICE
Only the same user can return the order to active
"""
model = Order
context_object_name = "orderlist"
paginate_by = 7
def get_queryset(self):
query = self.request.GET.get('q')
if query:
queryset = Order.objects.filter(Q(worker_owner__username__icontains=query) |
Q(time_sell__icontains=query) |
Q(orderitem__flavour__flavour__icontains=query) |
Q(orderitem__ice__type__contains=query)).order_by(
"-time_sell").distinct()
else:
queryset = Order.objects.filter(worker_owner=self.request.user).order_by("-time_sell")
return queryset
class OrderDetail(UserPassesTestMixin, LoginRequiredMixin, DetailView):
"""
Detail of every order
Only owner of order can see the details.
"""
model = Order
def test_func(self):
user = Order.objects.get(id=self.kwargs.get("pk"))
return self.request.user.id == user.worker_owner.id
|
import network
import time
import _thread
def connwifithread(ssid, clave):
nic = network.WLAN(network.STA_IF)
try:
nic.active(True)
except Exception as e:
print("No se pudo activar WiFi: "+str(e))
if nic.active():
print("WiFi activado")
while True:
if not nic.isconnected():
try:
nic.connect(ssid, clave)
except Exception as e:
print(e)
time.sleep(3)
if nic.isconnected():
print("Se ha conectado a la red")
print(nic.ifconfig())
elif nic.isconnected():
pass
else:
print("Intentar ejecutar el metodo connectwifi de nuevo")
def connectwifi(nombre, clave):
_thread.start_new_thread(connwifithread(nombre, clave), ())
|
import time
def wait(sec):
time.sleep(sec)
while True:
wait(0.1)
print("----------")
wait(0.1)
print("*---------")
wait(0.1)
print("**--------")
wait(0.1)
print("***-------")
wait(0.1)
print("****------")
wait(0.1)
print("*****-----")
wait(0.1)
print("******----")
wait(0.1)
print("*******---")
wait(0.1)
print("********--")
wait(0.1)
print("*********-")
wait(0.1)
print("**********")
wait(0.1)
print("*********-")
wait(0.1)
print("********--")
wait(0.1)
print("*******---")
wait(0.1)
print("******----")
wait(0.1)
print("*****-----")
wait(0.1)
print("****------")
wait(0.1)
print("***-------")
wait(0.1)
print("**--------")
wait(0.1)
print("*---------")
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import unittest
from pytext.config.doc_classification import ModelInput, ModelInputConfig
from pytext.config.field_config import FeatureConfig
from pytext.data import DocClassificationDataHandler, RawData
from pytext.data.featurizer import SimpleFeaturizer
from pytext.utils.test_utils import import_tests_module
tests_module = import_tests_module()
class DocClassificationDataHandlerTest(unittest.TestCase):
def setUp(self):
handler_config = DocClassificationDataHandler.Config()
handler_config.columns_to_read.append(ModelInput.DENSE_FEAT)
self.data_handler = DocClassificationDataHandler.from_config(
DocClassificationDataHandler.Config(),
ModelInputConfig(),
[],
featurizer=SimpleFeaturizer.from_config(
SimpleFeaturizer.Config(), FeatureConfig()
),
)
def test_create_from_config(self):
expected_columns = [
RawData.DOC_LABEL,
RawData.TEXT,
RawData.DICT_FEAT,
ModelInput.DENSE_FEAT,
]
# check that the list of columns is as expected
self.assertTrue(self.data_handler.raw_columns == expected_columns)
def test_read_from_file(self):
file_name = tests_module.test_file("train_dense_features_tiny.tsv")
data = self.data_handler.read_from_file(
file_name, self.data_handler.raw_columns
)
# Check if the data has 10 rows and 6 columns
self.assertEqual(len(data), 10)
self.assertEqual(len(data[0]), 4)
self.assertEqual(data[0][RawData.DOC_LABEL], "alarm/modify_alarm")
def test_tokenization(self):
file_name = tests_module.test_file("train_dense_features_tiny.tsv")
data = self.data_handler.read_from_file(
file_name, self.data_handler.raw_columns
)
data = list(self.data_handler.preprocess(data))
# test tokenization without language-specific tokenizers
self.assertEqual(
data[0][ModelInput.WORD_FEAT][0], "16:24:datetime,39:57:datetime"
)
self.assertIsNotNone(data[0][ModelInput.DENSE_FEAT])
|
from werkzeug.utils import cached_property
from xml.etree.ElementTree import parse
from homebank.libraries.banking.account import Account
from homebank.libraries.banking.category import Category
from homebank.libraries.banking.payee import Payee
from homebank.libraries.banking.transaction import Transaction
class Banking(object):
def __init__(self, filename):
self.tree = parse(filename)
self.root = self.tree.getroot()
def _get_list(self, predicate, cls):
data = []
for x in self.root.iter(predicate):
data.append(cls(self, x))
return data
@cached_property
def accounts(self):
return self._get_list("account", Account)
@property
def transactions(self):
data = self._get_list("ope", Transaction)
for i, t in enumerate(data):
t.id = i
return data
@property
def categories(self):
return self._get_list("cat", Category)
@property
def payees(self):
return self._get_list("pay", Payee)
|
"""
This example shows how the Fisher statistics can be computed and displayed.
*Based on example 5.21 and example 5.23 in* [Fisher1993]_.
============= =========== ==========
Data in: Table B2 (page 279)
Mean Vector: 144.2/57.2 (page 130)
K-Value: 109 (page 130)
Fisher-Angle: 2.7 deg. (page 132)
============= =========== ==========
Reference
---------
.. [Fisher1993] Fisher, N.I., Lewis, T., Embleton, B.J.J. (1993) "Statistical
Analysis of Spherical Data"
"""
import matplotlib.pyplot as plt
import mplstereonet as mpl
decl = [122.5, 130.5, 132.5, 148.5, 140.0, 133.0, 157.5, 153.0, 140.0, 147.5,
142.0, 163.5, 141.0, 156.0, 139.5, 153.5, 151.5, 147.5, 141.0, 143.5,
131.5, 147.5, 147.0, 149.0, 144.0, 139.5]
incl = [55.5, 58.0, 44.0, 56.0, 63.0, 64.5, 53.0, 44.5, 61.5, 54.5, 51.0, 56.0,
59.5, 56.5, 54.0, 47.5, 61.0, 58.5, 57.0, 67.5, 62.5, 63.5, 55.5, 62.0,
53.5, 58.0]
confidence = 95
fig = plt.figure()
ax = fig.add_subplot(111, projection='stereonet')
ax.line(incl, decl, color="black", markersize=2)
vector, stats = mpl.find_fisher_stats(incl, decl, conf=confidence)
template = (u"Mean Vector P/B: {plunge:0.0f}\u00B0/{bearing:0.0f}\u00B0\n"
"Confidence: {conf}%\n"
u"Fisher Angle: {fisher:0.2f}\u00B0\n"
u"R-Value {r:0.3f}\n"
"K-Value: {k:0.2f}")
label = template.format(plunge=vector[0], bearing=vector[1], conf=confidence,
r=stats[0], fisher=stats[1], k=stats[2])
ax.line(vector[0], vector[1], color="red", label=label)
ax.cone(vector[0], vector[1], stats[1], facecolor="None", edgecolor="red")
ax.legend(bbox_to_anchor=(1.1, 1.1), numpoints=1)
plt.show()
|
abc = 'With three words or four'
stuff = abc.split()
print(stuff)
print(len(stuff))
print(stuff[1])
print('\n')
for i in stuff:
print(i)
line = 'A lot'
etc = line.split()
print(etc)
line = 'first;second;third;fourth'
thing = line.split()
print(thing)
print(len(thing))
thing = line.split(';')
print(thing)
print(len(thing))
# parsing
fhand = open('mbox-short.text')
for line in fhand:
line = line.rstrip()
if not line.startswith('From '):continue
words = line.split()
print(words[2])
# double split
words = line.split()
email = words[1]
pieces = email.split('@')
print(pieces[1])
|
import os
import sys
from unittest.mock import Mock
import pytest
from hg.gfx.sprite_renderer.sprite import Image
from hg.gfx.sprite_renderer.renderer import SpriteRenderer
from hg.res.loaders.image_loader import ImageLoader
from hg.res.loaders.loader import Loader
from hg.res.loaders.sprite_sheet_loader import SpriteSheetLoader
class SDL2(Mock):
@classmethod
def SDL_GetTicks(cls):
return 0
sys.modules['sdl2'] = SDL2()
class _ImageLoader(Loader):
def load(self, path):
return Image(path=os.path.basename(path), width=32, height=32)
@pytest.fixture
def inject_config(mocker):
return {
ImageLoader: _ImageLoader,
SpriteSheetLoader: SpriteSheetLoader,
SpriteRenderer: lambda: SpriteRenderer(sdl_renderer=mocker.Mock()),
}
@pytest.fixture
def inject(inject_config):
import inject
def bind(binder: inject.Binder):
for cls, constructor in inject_config.items():
binder.bind_to_constructor(cls, constructor)
inject.clear_and_configure(bind)
yield
inject.clear()
def pytest_configure(config):
config.addinivalue_line('markers', 'inject: configure the dependency injector for a given test')
def pytest_collection_modifyitems(session, config, items):
for item in items:
if item.get_closest_marker('inject'):
item.fixturenames.append('inject')
|
"""Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python.ops.sets import set_difference
from tensorflow.python.ops.sets import set_intersection
from tensorflow.python.ops.sets import set_size
from tensorflow.python.ops.sets import set_union
|
# Generated by Django 3.2.4 on 2021-07-03 20:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0063_auto_20210703_2039'),
]
operations = [
migrations.RenameField(
model_name='detail',
old_name='visibile',
new_name='visible',
),
]
|
#!/usr/local/bin/python3
"""Para_breaker.py"""
paragraph = """\
We hold these truths to be self-evident, that all men are created equal, that they are endowed by their Creator with certain unalienable Rights, that among these are Life, Liberty and the pursuit of Happiness. - That to secure these rights, Governments are instituted among Men, deriving their just powers from the consent of the governed, -
That whenever any Form of Government becomes destructive of these ends, it is the Right of the People
to alter or to abolish it, and to institute new Government, laying its foundation on such principles
and organizing its powers in such form, as to them shall seem most likely to effect their Safety and
Happiness. Prudence, indeed, will dictate that Governments long established should not be changed for
light and transient causes; and accordingly all experience hath shewn that mankind are more disposed
to suffer, while evils are sufferable than to right themselves by abolishing the forms to which they
are accustomed. But when a long train of abuses and usurpations, pursuing invariably the same Object
evinces a design to reduce them under absolute Despotism, it is their right, it is their duty, to
throw off such Government, and to provide new Guards for their future security. - Such has been the
patient sufferance of these Colonies; and such is now the necessity which constrains them to alter
their former Systems of Government. The history of the present King of Great Britain is a history
of repeated injuries and usurpations, all having in direct object the establishment of an absolute
Tyranny over these States. To prove this, let Facts be submitted to a candid world."""
for i, sentence in enumerate(paragraph.split(".")):
print("*" * 50)
print("Sentence #{0}".format(i+1))
for i, phrase in enumerate(sentence.split(",")):
while phrase.startswith(" "):
phrase = phrase[1:]
print("Phrase {0}: {1:<60}".format(i+1, phrase))
|
#!/usr/bin/python
###########################################################################
# Copyright (c) 2014, Yahoo.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# * Neither the name of Yahoo. nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior
# written permission of Yahoo.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###########################################################################
import sys, ConfigParser
try:
import argparse
except:
print("'argparse' python library not found.\n If using Ubuntu, run 'sudo apt-get install python-argparse'")
sys.exit()
from Discovery import *
from ConnectionUtils import *
DEFAULT_APP_ID = "0xeTgF3c"
DEFAULT_CONSUMER_KEY = "dj0yJmk9T1Y0MmVIWWEzWVc3JmQ9WVdrOU1IaGxWR2RHTTJNbWNHbzlNVEUzTkRFM09ERTJNZy0tJnM9Y29uc3VtZXJzZWNyZXQmeD0yNA--"
DEFAULT_SECRET = "1b8f0feb4d8d468676293caa769e19958bf36843"
DEFAULT_APP_NAME = "Test Client (client.py)"
IS_RUNNING=False
class ReceiverThread(threading.Thread):
def __init__(self, connection):
threading.Thread.__init__(self)
self.connection=connection
self.isClosing=False
def run(self):
while not self.isClosing:
cmnd = getUserInput("SEND: ")
if cmnd == "" or cmnd == -1 : continue
if cmnd == "q": break
self.connection.handler.push(cmnd)
self.connection.handler.close_when_done()
userInputState = {"prompt":"", "isWaiting":False}
def printMessage(msg):
if(userInputState["isWaiting"]):
print "\n",msg
sys.stdout.write(userInputState["prompt"])
else:
print msg
def getUserInput(prompt):
userInputState["isWaiting"] = True
userInputState["prompt"] = prompt
data = raw_input(prompt)
userInputState["isWaiting"] = False
return data
def api(args):
def onMessageRecieved(msg):
print "RCVD:", msg
client = Connection(args.host, args.port, onMessageRecieved=onMessageRecieved)
if args.instanceId:
resetSession(client, args.instanceId)
elif args.manual_auth == False:
createSession(client, args.app_id, args.consumer_key, args.secret, args.app_name)
authSession(client, raw_input("Please enter code:"))
inputReader = ReceiverThread(client)
inputReader.start()
client.startLoop();
inputReader.isClosing=True
def setupReadlineHistory(historyFile):
try:
readline.read_history_file(historyFile)
readline.parse_and_bind("set set editing-mode vi")
readline.parse_and_bind("set horizontal-scroll-mode On")
except IOError, e:
print(e)
pass
import atexit
atexit.register(readline.write_history_file, historyFile)
def parse_args():
parser = argparse.ArgumentParser(description='Connect to a Device Communication-enabled TV and send messages')
parser.add_argument('host', nargs='?', help='hostname or IP to connect to, omit for automatic search')
parser.add_argument('port', type=int, nargs='?', default=8099, help='port of device, defaults to 8099')
parser.add_argument('-m', '--manual-auth', action='store_true', help='do not prompt for code, just connect')
parser.add_argument('-i', '--instanceId', help='use an instanceID to connect, will override --manual-auth')
parser.add_argument('-y', '--history', default=os.path.join(os.environ["HOME"], ".client.py.hist"), help='use non-default history file')
parser.add_argument('-c', '--config', default=os.path.join(os.environ["HOME"], ".client.py.config"), help='configuration file that stores authorization keys, leave blank to use default non-production keys. See config.sample for configuration file example. Default location: %s' % os.path.join(os.environ["HOME"], ".client.py.config"))
return parser.parse_args()
def load_config(args):
config = ConfigParser.RawConfigParser({"app_id": DEFAULT_APP_ID, "consumer_key":DEFAULT_CONSUMER_KEY, "secret": DEFAULT_SECRET, "app_name":DEFAULT_APP_NAME})
configsRead = config.read(args.config)
if configsRead is None:
print("WARNING: Using default auth keys. Note these can only be used in a simulator environment. See --help for more information." % args.config)
elif args.config not in configsRead:
print("Unable to load config file %s, using default auth keys. Note these can only be used in a simulator environment." % args.config)
args.app_id = config.get("DEFAULT", "app_id")
args.consumer_key = config.get("DEFAULT", "consumer_key")
args.secret = config.get("DEFAULT", "secret")
args.app_name = config.get("DEFAULT", "app_name")
def main():
args = parse_args()
load_config(args)
if not args.host and PYBONJOUR_AVAILABLE:
print("Starting automatic discovery... For manual usage, see the -h option")
args.host, args.port = discover()
if args.host is None or args.port is None:
print("Unable to automatically resolve host and port. For manual usage, see the -h option")
elif not args.host and not PYBONJOUR_AVAILABLE:
print("Automatic search not available, please install pybonjour")
if args.host != None and args.port != None:
setupReadlineHistory(args.history)
api(args)
if __name__ == "__main__":
main()
|
import math
def main():
people = int(input("How many people are eating? "))
slices_per_person = float(input("How many slices per person? "))
slices = slices_per_person * people
slices_per_pie = int(input("How many slices per pie? "))
pizzas = math.ceil(slices / slices_per_pie)
print("You need", pizzas, "pizzas to feed", people, "people.")
total_slices = slices_per_pie * pizzas
slices_left = total_slices - slices
print("There will be", slices_left, "leftover slices.")
main()
|
#!/usr/bin/env python
"""
test_python_binding.py
Test that passing types to Python bindings works successfully.
mlpack is free software; you may redistribute it and/or modify it under the
terms of the 3-clause BSD license. You should have received a copy of the
3-clause BSD license along with mlpack. If not, see
http://www.opensource.org/licenses/BSD-3-Clause for more information.
"""
import unittest
import pandas as pd
import numpy as np
import copy
from mlpack.test_python_binding import test_python_binding
class TestPythonBinding(unittest.TestCase):
"""
This class tests the basic functionality of the Python bindings.
"""
def testRunBindingCorrectly(self):
"""
Test that when we run the binding correctly (with correct input parameters),
we get the expected output.
"""
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True)
self.assertEqual(output['string_out'], 'hello2')
self.assertEqual(output['int_out'], 13)
self.assertEqual(output['double_out'], 5.0)
def testRunBindingNoFlag(self):
"""
If we forget the mandatory flag, we should get wrong results.
"""
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0])
self.assertNotEqual(output['string_out'], 'hello2')
self.assertNotEqual(output['int_out'], 13)
self.assertNotEqual(output['double_out'], 5.0)
def testRunBindingWrongString(self):
"""
If we give the wrong string, we should get wrong results.
"""
output = test_python_binding(string_in='goodbye',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True)
self.assertNotEqual(output['string_out'], 'hello2')
def testRunBindingWrongInt(self):
"""
If we give the wrong int, we should get wrong results.
"""
output = test_python_binding(string_in='hello',
int_in=15,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True)
self.assertNotEqual(output['int_out'], 13)
def testRunBindingWrongDouble(self):
"""
If we give the wrong double, we should get wrong results.
"""
output = test_python_binding(string_in='hello',
int_in=12,
double_in=2.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True)
self.assertNotEqual(output['double_out'], 5.0)
def testRunBadFlag(self):
"""
If we give the second flag, this should fail.
"""
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
flag2=True)
self.assertNotEqual(output['string_out'], 'hello2')
self.assertNotEqual(output['int_out'], 13)
self.assertNotEqual(output['double_out'], 5.0)
def testNumpyMatrix(self):
"""
The matrix we pass in, we should get back with the third dimension doubled
and the fifth forgotten.
"""
x = np.random.rand(100, 5);
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_in=z)
self.assertEqual(output['matrix_out'].shape[0], 100)
self.assertEqual(output['matrix_out'].shape[1], 4)
self.assertEqual(output['matrix_out'].dtype, np.double)
for i in [0, 1, 3]:
for j in range(100):
self.assertEqual(x[j, i], output['matrix_out'][j, i])
for j in range(100):
self.assertEqual(2 * x[j, 2], output['matrix_out'][j, 2])
def testNumpyMatrixForceCopy(self):
"""
The matrix we pass in, we should get back with the third dimension doubled
and the fifth forgotten.
"""
x = np.random.rand(100, 5);
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['matrix_out'].shape[0], 100)
self.assertEqual(output['matrix_out'].shape[1], 4)
self.assertEqual(output['matrix_out'].dtype, np.double)
for i in [0, 1, 3]:
for j in range(100):
self.assertEqual(x[j, i], output['matrix_out'][j, i])
for j in range(100):
self.assertEqual(2 * x[j, 2], output['matrix_out'][j, 2])
def testNumpyFContiguousMatrix(self):
"""
The matrix with F_CONTIGUOUS set we pass in, we should get back with the third
dimension doubled and the fifth forgotten.
"""
x = np.array(np.random.rand(100, 5), order='F');
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_in=z)
self.assertEqual(output['matrix_out'].shape[0], 100)
self.assertEqual(output['matrix_out'].shape[1], 4)
self.assertEqual(output['matrix_out'].dtype, np.double)
for i in [0, 1, 3]:
for j in range(100):
self.assertEqual(x[j, i], output['matrix_out'][j, i])
for j in range(100):
self.assertEqual(2 * x[j, 2], output['matrix_out'][j, 2])
def testNumpyFContiguousMatrixForceCopy(self):
"""
The matrix with F_CONTIGUOUS set we pass in, we should get back with the third
dimension doubled and the fifth forgotten.
"""
x = np.array(np.random.rand(100, 5), order='F');
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['matrix_out'].shape[0], 100)
self.assertEqual(output['matrix_out'].shape[1], 4)
self.assertEqual(output['matrix_out'].dtype, np.double)
for i in [0, 1, 3]:
for j in range(100):
self.assertEqual(x[j, i], output['matrix_out'][j, i])
for j in range(100):
self.assertEqual(2 * x[j, 2], output['matrix_out'][j, 2])
def testPandasSeriesMatrix(self):
"""
Test that we can pass pandas.Series as input parameter.
"""
x = pd.Series(np.random.rand(100))
z = x.copy(deep=True)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
smatrix_in=z)
self.assertEqual(output['smatrix_out'].shape[0], 100)
self.assertEqual(output['smatrix_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['smatrix_out'][i,0], x.iloc[i] * 2)
def testPandasSeriesMatrixForceCopy(self):
"""
Test that we can pass pandas.Series as input parameter.
"""
x = pd.Series(np.random.rand(100))
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
smatrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['smatrix_out'].shape[0], 100)
self.assertEqual(output['smatrix_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['smatrix_out'][i,0], x.iloc[i] * 2)
def testPandasSeriesUMatrix(self):
"""
Test that we can pass pandas.Series as input parameter.
"""
x = pd.Series(np.random.randint(0, high=500, size=100))
z = x.copy(deep=True)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
s_umatrix_in=z)
self.assertEqual(output['s_umatrix_out'].shape[0], 100)
self.assertEqual(output['s_umatrix_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['s_umatrix_out'][i, 0], x.iloc[i] * 2)
def testPandasSeriesUMatrixForceCopy(self):
"""
Test that we can pass pandas.Series as input parameter.
"""
x = pd.Series(np.random.randint(0, high=500, size=100))
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
s_umatrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['s_umatrix_out'].shape[0], 100)
self.assertEqual(output['s_umatrix_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['s_umatrix_out'][i, 0], x.iloc[i] * 2)
def testPandasSeries(self):
"""
Test a Pandas Series input paramter
"""
x = pd.Series(np.random.rand(100))
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
col_in=z)
self.assertEqual(output['col_out'].shape[0], 100)
self.assertEqual(output['col_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['col_out'][i], z[i] * 2)
def testPandasSeriesForceCopy(self):
"""
Test a Pandas Series input paramter
"""
x = pd.Series(np.random.rand(100))
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
col_in=x,
copy_all_inputs=True)
self.assertEqual(output['col_out'].shape[0], 100)
self.assertEqual(output['col_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['col_out'][i], x[i] * 2)
def testPandasDataFrameMatrix(self):
"""
The matrix we pass in, we should get back with the third dimension doubled
and the fifth forgotten.
"""
x = pd.DataFrame(np.random.rand(100, 5))
z = x.copy(deep=True)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_in=z)
self.assertEqual(output['matrix_out'].shape[0], 100)
self.assertEqual(output['matrix_out'].shape[1], 4)
self.assertEqual(output['matrix_out'].dtype, np.double)
for i in [0, 1, 3]:
for j in range(100):
self.assertEqual(x.iloc[j, i], output['matrix_out'][j, i])
for j in range(100):
self.assertEqual(2 * x.iloc[j, 2], output['matrix_out'][j, 2])
def testPandasDataFrameMatrixForceCopy(self):
"""
The matrix we pass in, we should get back with the third dimension doubled
and the fifth forgotten.
"""
x = pd.DataFrame(np.random.rand(100, 5))
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['matrix_out'].shape[0], 100)
self.assertEqual(output['matrix_out'].shape[1], 4)
self.assertEqual(output['matrix_out'].dtype, np.double)
for i in [0, 1, 3]:
for j in range(100):
self.assertEqual(x.iloc[j, i], output['matrix_out'][j, i])
for j in range(100):
self.assertEqual(2 * x.iloc[j, 2], output['matrix_out'][j, 2])
def testArraylikeMatrix(self):
"""
Test that we can pass an arraylike matrix.
"""
x = [[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15]]
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_in=x)
self.assertEqual(output['matrix_out'].shape[0], 3)
self.assertEqual(output['matrix_out'].shape[1], 4)
self.assertEqual(output['matrix_out'].dtype, np.double)
self.assertEqual(output['matrix_out'][0, 0], 1)
self.assertEqual(output['matrix_out'][0, 1], 2)
self.assertEqual(output['matrix_out'][0, 2], 6)
self.assertEqual(output['matrix_out'][0, 3], 4)
self.assertEqual(output['matrix_out'][1, 0], 6)
self.assertEqual(output['matrix_out'][1, 1], 7)
self.assertEqual(output['matrix_out'][1, 2], 16)
self.assertEqual(output['matrix_out'][1, 3], 9)
self.assertEqual(output['matrix_out'][2, 0], 11)
self.assertEqual(output['matrix_out'][2, 1], 12)
self.assertEqual(output['matrix_out'][2, 2], 26)
self.assertEqual(output['matrix_out'][2, 3], 14)
def testArraylikeMatrixForceCopy(self):
"""
Test that we can pass an arraylike matrix.
"""
x = [[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15]]
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['matrix_out'].shape[0], 3)
self.assertEqual(output['matrix_out'].shape[1], 4)
self.assertEqual(len(x), 3)
self.assertEqual(len(x[0]), 5)
self.assertEqual(output['matrix_out'].dtype, np.double)
self.assertEqual(output['matrix_out'][0, 0], 1)
self.assertEqual(output['matrix_out'][0, 1], 2)
self.assertEqual(output['matrix_out'][0, 2], 6)
self.assertEqual(output['matrix_out'][0, 3], 4)
self.assertEqual(output['matrix_out'][1, 0], 6)
self.assertEqual(output['matrix_out'][1, 1], 7)
self.assertEqual(output['matrix_out'][1, 2], 16)
self.assertEqual(output['matrix_out'][1, 3], 9)
self.assertEqual(output['matrix_out'][2, 0], 11)
self.assertEqual(output['matrix_out'][2, 1], 12)
self.assertEqual(output['matrix_out'][2, 2], 26)
self.assertEqual(output['matrix_out'][2, 3], 14)
def testNumpyUmatrix(self):
"""
Same as testNumpyMatrix() but with an unsigned matrix.
"""
x = np.random.randint(0, high=500, size=[100, 5])
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
umatrix_in=z)
self.assertEqual(output['umatrix_out'].shape[0], 100)
self.assertEqual(output['umatrix_out'].shape[1], 4)
self.assertEqual(output['umatrix_out'].dtype, np.dtype('intp'))
for i in [0, 1, 3]:
for j in range(100):
self.assertEqual(x[j, i], output['umatrix_out'][j, i])
for j in range(100):
self.assertEqual(2 * x[j, 2], output['umatrix_out'][j, 2])
def testNumpyUmatrixForceCopy(self):
"""
Same as testNumpyMatrix() but with an unsigned matrix.
"""
x = np.random.randint(0, high=500, size=[100, 5])
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
umatrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['umatrix_out'].shape[0], 100)
self.assertEqual(output['umatrix_out'].shape[1], 4)
self.assertEqual(output['umatrix_out'].dtype, np.dtype('intp'))
for i in [0, 1, 3]:
for j in range(100):
self.assertEqual(x[j, i], output['umatrix_out'][j, i])
for j in range(100):
self.assertEqual(2 * x[j, 2], output['umatrix_out'][j, 2])
def testArraylikeUmatrix(self):
"""
Test that we can pass an arraylike unsigned matrix.
"""
x = [[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15]]
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
umatrix_in=x)
self.assertEqual(output['umatrix_out'].shape[0], 3)
self.assertEqual(output['umatrix_out'].shape[1], 4)
self.assertEqual(output['umatrix_out'].dtype, np.dtype('intp'))
self.assertEqual(output['umatrix_out'][0, 0], 1)
self.assertEqual(output['umatrix_out'][0, 1], 2)
self.assertEqual(output['umatrix_out'][0, 2], 6)
self.assertEqual(output['umatrix_out'][0, 3], 4)
self.assertEqual(output['umatrix_out'][1, 0], 6)
self.assertEqual(output['umatrix_out'][1, 1], 7)
self.assertEqual(output['umatrix_out'][1, 2], 16)
self.assertEqual(output['umatrix_out'][1, 3], 9)
self.assertEqual(output['umatrix_out'][2, 0], 11)
self.assertEqual(output['umatrix_out'][2, 1], 12)
self.assertEqual(output['umatrix_out'][2, 2], 26)
self.assertEqual(output['umatrix_out'][2, 3], 14)
def testArraylikeUmatrixForceCopy(self):
"""
Test that we can pass an arraylike unsigned matrix.
"""
x = [[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15]]
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
umatrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['umatrix_out'].shape[0], 3)
self.assertEqual(output['umatrix_out'].shape[1], 4)
self.assertEqual(len(x), 3)
self.assertEqual(len(x[0]), 5)
self.assertEqual(output['umatrix_out'].dtype, np.dtype('intp'))
self.assertEqual(output['umatrix_out'][0, 0], 1)
self.assertEqual(output['umatrix_out'][0, 1], 2)
self.assertEqual(output['umatrix_out'][0, 2], 6)
self.assertEqual(output['umatrix_out'][0, 3], 4)
self.assertEqual(output['umatrix_out'][1, 0], 6)
self.assertEqual(output['umatrix_out'][1, 1], 7)
self.assertEqual(output['umatrix_out'][1, 2], 16)
self.assertEqual(output['umatrix_out'][1, 3], 9)
self.assertEqual(output['umatrix_out'][2, 0], 11)
self.assertEqual(output['umatrix_out'][2, 1], 12)
self.assertEqual(output['umatrix_out'][2, 2], 26)
self.assertEqual(output['umatrix_out'][2, 3], 14)
def testCol(self):
"""
Test a column vector input parameter.
"""
x = np.random.rand(100)
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
col_in=z)
self.assertEqual(output['col_out'].shape[0], 100)
self.assertEqual(output['col_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['col_out'][i], x[i] * 2)
def testColForceCopy(self):
"""
Test a column vector input parameter.
"""
x = np.random.rand(100)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
col_in=x,
copy_all_inputs=True)
self.assertEqual(output['col_out'].shape[0], 100)
self.assertEqual(output['col_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['col_out'][i], x[i] * 2)
def testUcol(self):
"""
Test an unsigned column vector input parameter.
"""
x = np.random.randint(0, high=500, size=100)
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
ucol_in=z)
self.assertEqual(output['ucol_out'].shape[0], 100)
self.assertEqual(output['ucol_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['ucol_out'][i], x[i] * 2)
def testUcolForceCopy(self):
"""
Test an unsigned column vector input parameter.
"""
x = np.random.randint(0, high=500, size=100)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
ucol_in=x,
copy_all_inputs=True)
self.assertEqual(output['ucol_out'].shape[0], 100)
self.assertEqual(output['ucol_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['ucol_out'][i], x[i] * 2)
def testRow(self):
"""
Test a row vector input parameter.
"""
x = np.random.rand(100)
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
row_in=z)
self.assertEqual(output['row_out'].shape[0], 100)
self.assertEqual(output['row_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['row_out'][i], x[i] * 2)
def testRowForceCopy(self):
"""
Test a row vector input parameter.
"""
x = np.random.rand(100)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
row_in=x,
copy_all_inputs=True)
self.assertEqual(output['row_out'].shape[0], 100)
self.assertEqual(output['row_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['row_out'][i], x[i] * 2)
def testUrow(self):
"""
Test an unsigned row vector input parameter.
"""
x = np.random.randint(0, high=500, size=100)
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
urow_in=z)
self.assertEqual(output['urow_out'].shape[0], 100)
self.assertEqual(output['urow_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['urow_out'][i], x[i] * 2)
def testUrowForceCopy(self):
"""
Test an unsigned row vector input parameter.
"""
x = np.random.randint(0, high=500, size=100)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
urow_in=x,
copy_all_inputs=True)
self.assertEqual(output['urow_out'].shape[0], 100)
self.assertEqual(output['urow_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['urow_out'][i], x[i] * 2)
def testMatrixAndInfoNumpy(self):
"""
Test that we can pass a matrix with all numeric features.
"""
x = np.random.rand(100, 10)
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_and_info_in=z)
self.assertEqual(output['matrix_and_info_out'].shape[0], 100)
self.assertEqual(output['matrix_and_info_out'].shape[1], 10)
for i in range(10):
for j in range(100):
self.assertEqual(output['matrix_and_info_out'][j, i], x[j, i] * 2.0)
def testMatrixAndInfoNumpyForceCopy(self):
"""
Test that we can pass a matrix with all numeric features.
"""
x = np.random.rand(100, 10)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_and_info_in=x,
copy_all_inputs=True)
self.assertEqual(output['matrix_and_info_out'].shape[0], 100)
self.assertEqual(output['matrix_and_info_out'].shape[1], 10)
for i in range(10):
for j in range(100):
self.assertEqual(output['matrix_and_info_out'][j, i], x[j, i] * 2.0)
def testMatrixAndInfoPandas(self):
"""
Test that we can pass a matrix with some categorical features.
"""
x = pd.DataFrame(np.random.rand(10, 4), columns=list('abcd'))
x['e'] = pd.Series(['a', 'b', 'c', 'd', 'a', 'b', 'e', 'c', 'a', 'b'],
dtype='category')
z = x.copy(deep=True)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_and_info_in=z)
self.assertEqual(output['matrix_and_info_out'].shape[0], 10)
self.assertEqual(output['matrix_and_info_out'].shape[1], 5)
cols = list('abcde')
for i in range(4):
for j in range(10):
self.assertEqual(output['matrix_and_info_out'][j, i], z[cols[i]][j] * 2)
for j in range(10):
self.assertEqual(output['matrix_and_info_out'][j, 4], z[cols[4]][j])
def testMatrixAndInfoPandasForceCopy(self):
"""
Test that we can pass a matrix with some categorical features.
"""
x = pd.DataFrame(np.random.rand(10, 4), columns=list('abcd'))
x['e'] = pd.Series(['a', 'b', 'c', 'd', 'a', 'b', 'e', 'c', 'a', 'b'],
dtype='category')
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_and_info_in=x,
copy_all_inputs=True)
self.assertEqual(output['matrix_and_info_out'].shape[0], 10)
self.assertEqual(output['matrix_and_info_out'].shape[1], 5)
cols = list('abcde')
for i in range(4):
for j in range(10):
self.assertEqual(output['matrix_and_info_out'][j, i], x[cols[i]][j] * 2)
for j in range(10):
self.assertEqual(output['matrix_and_info_out'][j, 4], x[cols[4]][j])
def testIntVector(self):
"""
Test that we can pass a vector of ints and get back that same vector but
with the last element removed.
"""
x = [1, 2, 3, 4, 5]
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
vector_in=x)
self.assertEqual(output['vector_out'], [1, 2, 3, 4])
def testStringVector(self):
"""
Test that we can pass a vector of strings and get back that same vector but
with the last element removed.
"""
x = ['one', 'two', 'three', 'four', 'five']
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
str_vector_in=x)
self.assertEqual(output['str_vector_out'],
['one', 'two', 'three', 'four'])
def testModel(self):
"""
First create a GaussianKernel object, then send it back and make sure we get
the right double value.
"""
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
build_model=True)
output2 = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
model_in=output['model_out'])
self.assertEqual(output2['model_bw_out'], 20.0)
def testOneDimensionNumpyMatrix(self):
"""
Test that we can pass one dimension matrix from matrix_in
"""
x = np.random.rand(100)
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
smatrix_in=z)
self.assertEqual(output['smatrix_out'].shape[0], 100)
self.assertEqual(output['smatrix_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['smatrix_out'][i, 0], x[i] * 2)
def testOneDimensionNumpymatrixForceCopy(self):
"""
Test that we can pass one dimension matrix from matrix_in
"""
x = np.random.rand(100)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
smatrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['smatrix_out'].shape[0], 100)
self.assertEqual(output['smatrix_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['smatrix_out'][i, 0], x[i] * 2)
def testOneDimensionNumpyUmatrix(self):
"""
Same as testNumpyMatrix() but with an unsigned matrix and One Dimension Matrix.
"""
x = np.random.randint(0, high=500, size=100)
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
s_umatrix_in=z)
self.assertEqual(output['s_umatrix_out'].shape[0], 100)
self.assertEqual(output['s_umatrix_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['s_umatrix_out'][i, 0], x[i] * 2)
def testOneDimensionNumpyUmatrixForceCopy(self):
"""
Same as testNumpyMatrix() but with an unsigned matrix and One Dimension Matrix.
"""
x = np.random.randint(0, high=500, size=100)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
s_umatrix_in=x,
copy_all_inputs=True)
self.assertEqual(output['s_umatrix_out'].shape[0], 100)
self.assertEqual(output['s_umatrix_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['s_umatrix_out'][i, 0], x[i] * 2)
def testTwoDimensionCol(self):
"""
Test that we pass Two Dimension column vetor as input paramter
"""
x = np.random.rand(100,1)
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
col_in=z)
self.assertEqual(output['col_out'].shape[0], 100)
self.assertEqual(output['col_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['col_out'][i], x[i] * 2)
def testTwoDimensionColForceCopy(self):
"""
Test that we pass Two Dimension column vetor as input paramter
"""
x = np.random.rand(100,1)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
col_in=x,
copy_all_inputs=True)
self.assertEqual(output['col_out'].shape[0], 100)
self.assertEqual(output['col_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['col_out'][i], x[i] * 2)
def testTwoDimensionUcol(self):
"""
Test that we pass Two Dimension unsigned column vector input parameter.
"""
x = np.random.randint(0, high=500, size=[100, 1])
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
ucol_in=z)
self.assertEqual(output['ucol_out'].shape[0], 100)
self.assertEqual(output['ucol_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['ucol_out'][i], x[i] * 2)
def testTwoDimensionUcolForceCopy(self):
"""
Test that we pass Two Dimension unsigned column vector input parameter.
"""
x = np.random.randint(0, high=500, size=[100, 1])
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
ucol_in=x,
copy_all_inputs=True)
self.assertEqual(output['ucol_out'].shape[0], 100)
self.assertEqual(output['ucol_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['ucol_out'][i], x[i] * 2)
def testTwoDimensionRow(self):
"""
Test a two dimensional row vector input parameter.
"""
x = np.random.rand(100,1)
z =copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
row_in=x)
self.assertEqual(output['row_out'].shape[0], 100)
self.assertEqual(output['row_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['row_out'][i], z[i] * 2)
def testTwoDimensionRowForceCopy(self):
"""
Test a two dimensional row vector input parameter.
"""
x = np.random.rand(100,1)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
row_in=x,
copy_all_inputs=True)
self.assertEqual(output['row_out'].shape[0], 100)
self.assertEqual(output['row_out'].dtype, np.double)
for i in range(100):
self.assertEqual(output['row_out'][i], x[i] * 2)
def testTwoDimensionUrow(self):
"""
Test an unsigned two dimensional row vector input parameter.
"""
x = np.random.randint(0, high=500, size=[100, 1])
z = copy.deepcopy(x)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
urow_in=z)
self.assertEqual(output['urow_out'].shape[0], 100)
self.assertEqual(output['urow_out'].dtype, np.dtype('intp'))
for i in range(100):
self.assertEqual(output['urow_out'][i], x[i] * 2)
def testTwoDimensionUrowForceCopy(self):
"""
Test an unsigned two dimensional row vector input parameter.
"""
x = np.random.randint(5, high=500, size=[1, 101])
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
urow_in=x,
copy_all_inputs=True)
self.assertEqual(output['urow_out'].shape[0], 101)
self.assertEqual(output['urow_out'].dtype, np.dtype('intp'))
for i in range(101):
self.assertEqual(output['urow_out'][i], x[0][i] * 2)
def testOneDimensionMatrixAndInfoPandas(self):
"""
Test that we can pass a one dimension matrix with some categorical features.
"""
x = pd.DataFrame(np.random.rand(10))
z = x.copy(deep=True)
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_and_info_in=z[0])
self.assertEqual(output['matrix_and_info_out'].shape[0], 10)
for i in range(10):
self.assertEqual(output['matrix_and_info_out'][i, 0], x[0][i] * 2)
def testOneDimensionMatrixAndInfoPandasForceCopy(self):
"""
Test that we can pass a one dimension matrix with some categorical features.
"""
x = pd.DataFrame(np.random.rand(10))
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
matrix_and_info_in=x[0],
copy_all_inputs=True)
self.assertEqual(output['matrix_and_info_out'].shape[0], 10)
for j in range(10):
self.assertEqual(output['matrix_and_info_out'][j, 0], x[0][j]*2)
def testThrownException(self):
"""
Test that we pass wrong type and get back TypeError
"""
self.assertRaises(TypeError,
lambda : test_python_binding(string_in=10,
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=10.0,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in='bad',
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
flag2=10))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
matrix_in= 10.0))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
matrix_in= 1))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
matrix_and_info_in = 10.0))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
copy_all_inputs = 10.0))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
col_in = 10))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
row_in = 10.0))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
str_vector_in = 'bad'))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
urow_in = 10.0))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
ucol_in = 10.0))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
umatrix_in = 10.0))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
verbose = 10))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
flag1=True,
vector_in = 10.0))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=False,
col_req_in=[1.0],
flag1=True))
self.assertRaises(TypeError,
lambda : test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=False,
flag1=True))
def testModelForceCopy(self):
"""
First create a GaussianKernel object, then send it back and make sure we get
the right double value.
"""
output = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
build_model=True)
output2 = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
model_in=output['model_out'],
copy_all_inputs=True)
output3 = test_python_binding(string_in='hello',
int_in=12,
double_in=4.0,
mat_req_in=[[1.0]],
col_req_in=[1.0],
model_in=output['model_out'])
self.assertEqual(output2['model_bw_out'], 20.0)
self.assertEqual(output3['model_bw_out'], 20.0)
if __name__ == '__main__':
unittest.main()
|
# `from __future__` has to be the very first thing in a module
# otherwise a syntax error is raised
from __future__ import annotations # type: ignore # noqa # Python 3.6 linters complain
from dataclasses import dataclass, fields
from enum import Enum
import pytest
from omegaconf import OmegaConf, ValidationError
class Height(Enum):
SHORT = 0
TALL = 1
@dataclass
class SimpleTypes:
num: int = 10
pi: float = 3.1415
is_awesome: bool = True
height: "Height" = Height.SHORT # test forward ref
description: str = "text"
def simple_types_class() -> None:
# confirm that the type annotations are in fact stored as strings
# i.e., that the `from future` import worked
num_field = fields(SimpleTypes)[0]
assert isinstance(num_field.type, str)
assert num_field.type == "int"
conf = OmegaConf.structured(SimpleTypes)
assert conf.num == 10
assert conf.pi == 3.1415
assert conf.is_awesome is True
assert conf.height == Height.SHORT
assert conf.description == "text"
def conversions() -> None:
conf: SimpleTypes = OmegaConf.structured(SimpleTypes)
conf.num = 20
conf.num = "20" # type: ignore
assert conf.num == 20
with pytest.raises(ValidationError):
# ValidationError: "one" cannot be converted to an integer
conf.num = "one" # type: ignore
|
#!/usr/bin/env python
kingdoms = ['Bacteria', 'Protozoa', 'Chromista', 'Plantae', 'Fungi', 'Animalia']
print(kingdoms[0])
print(kingdoms[5])
print(kingdoms[0:3])
print(kingdoms[2:5])
print(kingdoms[4:])
|
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from .forms import *
from django.http import HttpResponse
from sep.settings import MEDIA_URL
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from django.core.mail import EmailMessage
from django.views import View
from .models import *
from dashboard.models import *
from authenticate_app.tokens import account_activation_token
from sep import settings
from dashboard import views
# Create your views here
def index(request):
books = Book.objects.all()
categories = Category.objects.all()
context = {
"books":books,
"categories":categories,
}
if request.user.is_authenticated:
pass
else:
pass
return render(request,'store/index.html',context)
@login_required(login_url="/auth/login")
def user_profile(request):
if request.user.is_authenticated:
if request.method == 'POST':
print(request.POST)
user_form = UserForm(data=request.POST, instance=request.user)
user = UserProfileInfo.objects.get(user=request.user)
print(user)
form = UserProfileInfoForm(data=request.POST, instance=request.user)
print(form)
type_form = None
if user:
if user.type == '1': # Student
type_form = StudentForm(data=request.POST,instance=request.user)
elif user.type == '2': # Faculty
type_form = FacultyForm(data=request.POST,instance=request.user)
else: # Alumni
type_form = AlumniForm(data=request.POST,instance=request.user)
print(type_form)
if user_form.is_valid() and form.is_valid() and type_form.is_valid():
u = user_form.save()
t = form.save(False)
form.user = u
form.save()
custom_form = type_form.save(commit=False)
#print(custom_form)
type_form.user = u
type_form.save()
args = {}
args['user_form'] = user_form
args['form'] = form
args['type_form'] = type_form
args['success'] = "Profile updated successfully!"
return render(request, 'store/my_profile.html', args)
else:
return render(request, 'store/my_profile.html', {"error":str(form.errors) + " " + str(type_form.errors)})
else:
user_form = UserForm(instance=request.user)
print(request.user)
user = UserProfileInfo.objects.get(user=request.user)
print(user)
form = UserProfileInfoForm(instance=user)
print(form)
type_form = None
if user:
if user.type == '1': # Student
user = Student.objects.get(user=request.user)
type_form = StudentForm(instance=user)
elif user.type == '2': # Faculty
user = Faculty.objects.get(user=request.user)
type_form = FacultyForm(instance=user)
else: # Alumni
user = Alumni.objects.get(user=request.user)
type_form = AlumniForm(instance=user)
print(type_form)
args = {}
args['user_form'] = user_form
args['form'] = form
args['type_form'] = type_form
return render(request, 'store/my_profile.html', args)
else:
return redirect('authenticate_app:user_login')
def about_us(request):
return render(request,'store/about_us.html')
def contact_us(request):
return render(request,'store/contact_us.html')
@login_required(login_url="/auth/login")
def send_request(request, book):
current_site = get_current_site(request)
mail_subject = 'Share-E-Pustak: A Request for your Book'
message = render_to_string('store/book_request_link.html', {
'book': book,
'request_user': request.user,
'domain': current_site.domain,
'uid': urlsafe_base64_encode(force_bytes(book.user.pk)),
'token':account_activation_token.make_token(book.user),
})
to_email = book.user.email
email = EmailMessage(mail_subject, message,settings.EMAIL_HOST_USER, to=[to_email])
email.send()
def request_accepted(request, uidb64, token, book):
try:
uid = force_text(urlsafe_base64_decode(uidb64))
user = User.objects.get(pk=uid)
except(TypeError, ValueError, OverflowError, User.DoesNotExist):
user = None
if user is not None and account_activation_token.check_token(user, token):
views.degrade_stock(book.id, book.isbn)
# current_site = get_current_site(request)
# mail_subject = 'Share-E-Pustak: Your request has been accepted'
# message = "Hi, you request for the book " + book.title + " has been accepted."
# to_email = book.user.email
# email = EmailMessage(mail_subject, message,settings.EMAIL_HOST_USER, to=[to_email])
# email.send()
return HttpResponse('Thank you! The donee is now notified.')
else:
return HttpResponse('Link is invalid!')
|
import radio # 导入radio
from mpython import * # 导入mpython
import music # 导入music
CH = 1 # channel变量
radio.on()
radio.config(channel=CH) # radio通道设置
btna_stat, btnb_stat, touch_stat = [0] * 3 # 按键状态标志
def set_channel(): # radio 通道设置函数
global CH, btna_stat, btnb_stat
if button_a.value() == 0 and btna_stat == 0: # a按键,减通道
CH -= 1
if CH < 1:
CH = 13
radio.config(channel=CH) # radio通道设置
oled.DispChar("Channel: %02d" % CH, 25, 5) # 通道显示
oled.show()
btna_stat = 1
elif button_a.value() == 1:
btna_stat = 0
if button_b.value() == 0 and btnb_stat == 0: # b按键,加通道
CH += 1
if CH > 13:
CH = 1
radio.config(channel=CH) # radio通道设置
oled.DispChar("Channel: %02d" % CH, 25, 5) # 通道显示
oled.show()
btnb_stat = 1
elif button_b.value() == 1:
btnb_stat = 0
def ding(): # 叮叮响
global touch_stat
if touchPad_T.read() < 300 and touch_stat == 0: # 检测按键按下时,发出‘ding’响,并广播
music.pitch(500, 100, wait=False) # 播放"ding"
radio.send('ding') # radio 广播 "ding"
touch_stat = 1
elif touchPad_T.read() >= 300:
touch_stat = 0
oled.DispChar("Channel: %d" % CH, 25, 5) # 开机显示
oled.DispChar("电报机:触摸T", 25, 25)
oled.show()
while True:
set_channel() # 设置通道函数
ding() # 叮叮响函数
temp = radio.receive() # radio接收广播
if temp == 'ding': # 当接收到"ding"广播,发出叮响
rgb.fill((0, 10, 0)) # 指示灯
rgb.write()
music.pitch(500, 100, wait=False)
else:
rgb.fill((0, 0, 0))
rgb.write()
|
"""
MyHDL utility functions
(c) 2019 The Bonfire Project
License: See LICENSE
"""
from myhdl import intbv
from math import log2
def signed_resize(v,size):
result = intbv(0)[size:]
result[len(v):]=v
sign = v[len(v)-1]
for i in range(len(v),size):
result[i] =sign
return result
def int_log2(v):
l = log2(v)
assert int(l)==l, "{} must be power of 2".format(v)
return int(l)
|
import numpy as np
import glob
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import matplotlib.colors as color
import math
def ParamToInten(AoP, DoP, Inten, angle):
return ((Inten/2.0) * (1 + DoP*np.cos(math.radians(2*AoP) - 2*math.radians(angle))))
if __name__ == "__main__":
imagedir = "output/image/"
listimage = glob.glob(f"{imagedir}*.tiff")
for pth in listimage:
img = color.rgb_to_hsv(mpimg.imread(pth))
#array = np.zeros_like(img)
AoP = img[:, :, 0] * 360.0
DoP = img[:, :, 1] * 100.0
Inten = img[:, :, 2] / 255.0
print(np.amax(AoP))
# plt.imshow(img)
# plt.show()
|
import re
from typing import List
import requests
from youtube_series_downloader.core.channel import Channel
from youtube_series_downloader.core.video import Video
class YoutubeGateway:
__RSS_PREFX: str = "https://www.youtube.com/feeds/videos.xml?channel_id="
__REGEX = re.compile(
r"<entry>.*?<yt:videoId>(.*?)<\/yt:videoId>.*?<title>(.*?)<\/title>.*?<published>(.*?)<\/published>.*?<\/entry>",
re.DOTALL,
)
@staticmethod
def get_videos(channel: Channel) -> List[Video]:
url = YoutubeGateway.__RSS_PREFX + channel.id
xml = requests.get(url).text
matches = YoutubeGateway.__REGEX.findall(xml)
matches.reverse()
videos = []
for groups in matches:
id = groups[0]
title = groups[1]
date = groups[2]
video = Video(id, date, title)
videos.append(video)
return videos
|
# Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from collections import namedtuple
import logging
# When a task is started, it will register a set of triggers
# which, for a specific kind of event (see below) and a further given
# filtering condition, it will call the specified event_handler function
Trigger = namedtuple('Trigger', ('type', 'condition', 'event_handler'))
class EventManager:
def __init__(self):
self.triggers = {}
self.logger = logging.getLogger(__name__)
def register(self, observer, trigger):
'''
Register a trigger (a tuple containing an
ActivationCondition -a function/functor- and an EventHandler
- another function/functor-)
'''
# initialize a list for each type of event (it's just an optimizaiton)
if trigger.type not in self.triggers:
self.triggers[trigger.type] = []
self.logger.debug(
"Registering Trigger for {0} event with handler {1} of object of "
"type {2}".format(trigger.type.__name__, trigger.event_handler,
observer.__class__.__name__))
# save the trigger
self.triggers[trigger.type].append((observer, trigger))
def deregister(self, observer, trigger):
self.triggers[trigger.type].remove((observer, trigger))
def clear(self):
'''
Deregisters all triggers
'''
self.triggers.clear()
def raise_event(self, event):
handled = False
# check if we have any trigger at all of this type of event
if event.__class__ in self.triggers:
# for all the triggers registered for this type of event
for observer, trigger in self.triggers[event.__class__]:
# check if the filtering condition is a go
condition_outcome = trigger.condition(event)
if condition_outcome:
# save, if the event expects it, the outcome of the
# condition checking
try:
event.condition_outcome = condition_outcome
except AttributeError:
self.logger.debug("Couldn't save condition outcome for "
"event {0}".format(event))
self.logger.debug('{0} handled by {1}'.format(
event, trigger.event_handler))
# call the event handler
trigger.event_handler(observer, event)
# remember we handled the event and
# keep on processing other events
handled = True
return handled
|
from mesa import Agent, Model
from mesa.space import MultiGrid
import networkx as nx
class RunnerAgent(Agent):
''' This class will represent runners in this model '''
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.reset_runner_info()
self.day_to_run = []
def reset_runner_info(self):
# Personal attributes
self.type = 'runner'
self.preference = None
self.state = 'rest'
self.distance_gone = 0
self.estimate_distance_back_home = 0
self.estimate_distance_back_home_previous = 0
self.estimate_distance_cell_to_init_point = 0
self.check_closer_or_farer = 0
self.want_to_go_home = False
self.begin_going_home = False
# Possibility rate that runner choose an alternative road only once at intersection on the way home instead of following shortest path
self.possibility_change_way_home = 0.2
# Possibility rate that runner choose an alternative road only once at intersection on the way to park instead of following shostest path
self.possibility_change_way_park = 0.1
# NODE GONE MEMORY
self.memory_node_osmid = []
self.memory_node_coord = []
# NODE DEAD END MEMORY
self.memory_deadend_osmid = []
self.memory_deadend_coord = []
# ADDITION MEMORY ON THE WAY GO HOME
self.memory_node_osmid_way_home = []
self.memory_node_coord_way_home = []
# EDGE/ROAD ATTRIBUTES
self.init_road_name = None
self.init_road_index = None
self.init_road_type = None
self.init_index = None # choose one index from init road index
self.current_road_name = None
self.current_road_index = None
self.current_road_type = None
self.current_road_cells = None
self.current_road_length = None
self.amount_added = None
# index of cell within the list cells of current edge
self.current_cell_index_on_roadset = None
self.num_cells_gone_on_road = None
# END POINTS OF A STRAIGHT ROAD
self.endpoint1 = None
self.endpoint2 = None
self.start_from_one_endpoint = False
self.finish_at_one_endpoint = False
# total length of this current straight road
self.straight_road_length = 0
# NODE ATTRIBUTES
self.current_node_index = None
self.current_node_position = None
self.next_node_index = None # osmid of next node
self.next_node_position = None # grid coord of next node/intersection.
self.previous_node_index = None
self.previous_node_position = None
self.to_node = None # 'u' or 'v'
# CONSIDER NEXT NODE AT INTERSECTION
self.possible_next_node_index = []
self._set_consider_list_node()
# type of roads around, not including previous road
self.type_roads_around = []
# ON THE PARK FLAG
self.on_the_park = False
self.target_node_coming_park = None
self.current_set_node_on_park = None
self.target_node_running_around_park = None
self.start_on_road = False
self.key = None
# POSSIBILITY RATE
# to make a turn at the traffic signals point
self.pr_turn_at_traffic_signals = None
# to go straight (get over) at the traffic signals point
self.pr_go_straight_at_traffic_signals = None
self.pr_turn_on_ter = None # to make a turn on tertiary roads
self.pr_turn_on_sec = None # to make a turn on secondary roads
self.pr_turn_on_pri = None # to make a turn on primary roads
self.pr_turn_on_res = None # to make a turn on residential roads
# to go to the park at first
self.pr_go_to_park = None
# to continue its running on the park when come across
self.pr_continue_on_park = None
# distance want to run on around park
self.distance_run_around_park = None
# the distance point that once reached, runner will go home or get out park and continue running
self.distance_will_get_out_park = None
# follow preference 1 when start on residential road
self.pr_start_pref1_on_res = None
# follow preference 2 when start on tertiary or secondary road
self.pr_start_pref2_on_ter_sec = None
# Preference while running
self.preference = None
def _set_consider_list_node(self):
self.list_node_on_same_road_name = []
self.list_node_on_same_road_type = []
self.list_node_on_other_road_type = []
def get_initial_road_info(self):
# Flag start on road for calculating the distance
self.start_on_road = True
self.num_cells_gone_on_road = 0
# Figure out what road the runner is standing now
for key, value in self.model.cells_of_edge.items():
if self.init_position in value:
u, v, k = key
# EDGE/ROAD
# Get road info
self.init_road_name = self.model.graph_edges.loc[key]['name']
self.current_road_name = self.model.graph_edges.loc[key]['name']
self.current_road_type = self.model.graph_edges.loc[key]['highway']
self.current_road_length = self.model.graph_edges.loc[key]['length']
# Find list of all cells in this current road
self.current_road_cells = self.model.cells_of_edge[key]
# store the tuple key of this initial road
self.init_road_index = key
self.current_road_index = key
self.init_road_type = self.model.graph_edges.loc[key]['highway']
# NODES
# Choose one of these nodes as next destination
self.next_node_index = self.random.choice([u, v])
# Find the pos of next node
self.next_node_position = self.model.cell_of_node[self.next_node_index]
# Find current index of cell start, in the list of the current road cells
self.current_cell_index_on_roadset = self.current_road_cells.index(
self.init_position)
# Store cell behind as memory so runner could avoid at first
if self.next_node_index != u:
self.memory_node_osmid.append(u)
self.memory_node_coord.append(self.model.cell_of_node[u])
self.memory_node_coord
elif self.next_node_index != v:
self.memory_node_osmid.append(v)
self.memory_node_coord.append(self.model.cell_of_node[v])
# check to see runner goes to u or v
if self.next_node_index == u:
self.to_node = 'u'
else:
self.to_node = 'v'
break
def reset_possibility_rate(self):
# POSSIBILITY RATE AT AN INTERSECTION
self.pr_turn_at_traffic_signals = 0.8
self.pr_go_straight_at_traffic_signals = 0.2
# POSSIBILITY RATE TO TURN
if self.init_road_type == 'primary':
self.pr_turn_on_pri = None
self.pr_turn_on_sec = 0.5
self.pr_turn_on_ter = 0.5
self.pr_turn_on_res = 0.2
elif self.init_road_type == 'tertiary' or self.init_road_type == 'secondary':
self.pr_turn_on_pri = 0.4
self.pr_turn_on_sec = 0.4
self.pr_turn_on_ter = 0.4
self.pr_turn_on_res = 0.1
elif self.init_road_type == 'residential':
if self.gender == 'male':
self.pr_turn_on_pri = 0.2
self.pr_turn_on_sec = 0.4
self.pr_turn_on_ter = 0.4
self.pr_turn_on_res = 0.3
elif self.gender == 'female':
self.pr_turn_on_pri = 0.2
self.pr_turn_on_sec = 0.4
self.pr_turn_on_ter = 0.4
self.pr_turn_on_res = 0.3
# POSSIBILITY TO GO TO PARK AT FIRST AND CONTINUE RUNNING ON THE PARK (when come across)
# POSSIBILITY TO START WITH PREFERENCE 1 AT FIRST
if self.gender == 'male':
if self.fitness_level == 'low':
self.pr_go_to_park = 0.4
self.pr_start_pref1_on_res = 0.3
self.pr_start_pref2_on_ter_sec = 0.9
elif self.fitness_level == 'moderate':
self.pr_go_to_park = 0.35
self.pr_start_pref1_on_res = 0.4
self.pr_start_pref2_on_ter_sec = 0.85
elif self.fitness_level == 'high':
self.pr_go_to_park = 0.2
self.pr_start_pref1_on_res = 0.6
self.pr_start_pref2_on_ter_sec = 0.8
elif self.fitness_level == 'very_high':
self.pr_go_to_park = 0.15
self.pr_start_pref1_on_res = 0.7
self.pr_start_pref2_on_ter_sec = 0.8
self.pr_continue_on_park = 0.12
if self.gender == 'female':
if self.fitness_level == 'low':
self.pr_go_to_park = 0.6
self.pr_start_pref1_on_res = 0.1
self.pr_start_pref2_on_ter_sec = 0.9
elif self.fitness_level == 'moderate':
self.pr_go_to_park = 0.45
self.pr_start_pref1_on_res = 0.2
self.pr_start_pref2_on_ter_sec = 0.85
elif self.fitness_level == 'high':
self.pr_go_to_park = 0.4
self.pr_start_pref1_on_res = 0.3
self.pr_start_pref2_on_ter_sec = 0.8
elif self.fitness_level == 'very_high':
self.pr_go_to_park = 0.3
self.pr_start_pref1_on_res = 0.4
self.pr_start_pref2_on_ter_sec = 0.8
self.pr_continue_on_park = 0.2
# distance run around park
self.distance_run_around_park = None
# the distance point to go home or get out park
self.distance_will_get_out_park = None
def _set_distance_run_around_park(self):
# SET THE DISTANCE THAT RUNNERS WILL RUN BEFORE ASSIGNED preference 2 TO CONTINUE
if self.fitness_level == 'low' or self.fitness_level == 'moderate':
portion = self.random.randrange(60, 90) # 60% - 90%
elif self.fitness_level == 'high' or self.fitness_level == 'very_high':
portion = self.random.randrange(30, 60) # 30% - 60%
self.distance_run_around_park = (
portion / 100) * (self.distance_goal - self.distance_gone - self.estimate_distance_back_home)
# reach this distance, then go home or get out of park
self.distance_will_get_out_park = self.distance_run_around_park + self.distance_gone
def get_ready(self):
self.state = '_continue_forward'
# CHECK IF WANT TO GO TO PARK AT FIRST
# find the length of shortest path
length_shortest_path = None
for index in self.model.footway_nodes_access:
length = nx.shortest_path_length(
self.model.graph, self.next_node_index, index, weight='length')
if length_shortest_path == None or length < length_shortest_path:
length_shortest_path = length
# if len of shortest path < 480m and fall within rate
if length_shortest_path < 480 and self.random.random() < self.pr_go_to_park:
self.preference = 'preference3'
if self.preference == None:
if self.current_road_type == 'residential':
# check if want to start with pref 1
if self.random.random() < self.pr_start_pref1_on_res:
self.preference = 'preference1'
# otherwise, assign pref 2
else:
self.preference = 'preference2'
elif self.current_road_type == 'tertiary' or self.current_road_type == 'secondary':
# check if want to start with pref 2
if self.random.random() < self.pr_start_pref2_on_ter_sec:
self.preference = 'preference2'
else:
self.preference = 'preference1'
elif self.current_road_type == 'primary':
self.preference = 'preference2'
else:
# for a few agents in special area (footway but not footway in real life)
self.preference = 'preference1'
def step(self):
# Reaasigne preference 2 for preference 1 if runners are on one-way road (avoid error)
if self.preference == 'preference2' and self.model.graph_edges.loc[self.current_road_index]['oneway'] == True:
self.preference = 'preference1'
# FOLLOW STATE MACHINE BY ITS OWN TYPE AND PREFERENCE
if self.want_to_go_home and self.pos == self.init_position:
self.state = 'rest'
elif not self.want_to_go_home:
if self.preference == 'preference1':
self.preference1()
elif self.preference == 'preference2':
self.preference2()
elif self.preference == 'preference3':
self.preference3()
elif self.want_to_go_home:
self.preference4()
# RUNNING ON ROADS AS STRAIGHT AS POSSIBLE, AND AVOID REPEATING ROUTES
def preference1(self):
if self.state == '_continue_forward':
self.continue_forward()
elif self.state == '_intersection':
self.intersection1()
# RUNNING ON A STRAIGHT ROAD BACK AND FORTH
def preference2(self):
if self.state == '_continue_forward':
self.continue_forward()
elif self.state == '_intersection':
self.intersection2()
# GET TO THE PARK AS SOON AS IT CAN AND RUNNING A WHILE UNTIL WANT TO GO HOME OR CHANGE TO OTHER STATE AND KEEP RUNNING
def preference3(self):
if self.state == '_continue_forward':
self.continue_forward()
elif self.state == '_intersection':
self.intersection3()
# GET BACK TO THE INITIAL POSSITION AS SOON AS IT CAN BY FOLLOWING THE SHORTEST PATH
def preference4(self):
if self.state == '_continue_forward':
self.continue_forward()
elif self.state == '_intersection':
self.intersection4()
def continue_forward(self):
# About to get to the destination if at the an endpoint (before node) of road
if self.to_node == 'u' and self.current_cell_index_on_roadset == 0:
# update previous node before current node
self.previous_node_index = self.current_node_index
self.previous_node_position = self.current_node_position
self.current_node_index = self.next_node_index
self.current_node_position = self.next_node_position
# get to u position
self.next_position = self.next_node_position
# update distance
self._adding_distance_goal()
# change state
self.state = '_intersection'
# About to get to the destination if at the an endpoint (after node) of road
elif self.to_node == 'v' and self.current_cell_index_on_roadset == len(self.current_road_cells) - 1:
# update previous node before current node
self.previous_node_index = self.current_node_index
self.previous_node_position = self.current_node_position
self.current_node_index = self.next_node_index
self.current_node_position = self.next_node_position
# get to v position
self.next_position = self.next_node_position
# update distance
self._adding_distance_goal()
# change state
self.state = '_intersection'
# Move to next cell if still on road
elif self.to_node == 'u':
self.current_cell_index_on_roadset = self.current_cell_index_on_roadset - 1
self.next_position = self.current_road_cells[self.current_cell_index_on_roadset]
elif self.to_node == 'v':
self.current_cell_index_on_roadset = self.current_cell_index_on_roadset + 1
self.next_position = self.current_road_cells[self.current_cell_index_on_roadset]
# Make a move
self._move_agent_update_attributes()
def switch_previous_and_current_node_index(self):
# update previous node before current node
self.previous_node_index = self.current_node_index
self.previous_node_position = self.previous_node_position
self.current_node_index = self.next_node_index
self.current_node_position = self.next_node_position
pass
def intersection1(self):
# STORE INTERSECTION FOR MEMORY
# intersection memory
self.memory_node_osmid.append(self.current_node_index)
self.memory_node_coord.append(self.current_node_position)
# dead end memory
if len(self.model.graph_edges.loc[self.current_node_index]['osmid']) == 1 and self.current_node_index not in self.init_road_index:
self.memory_deadend_osmid.append(self.current_node_index)
self.memory_deadend_coord.append(self.current_node_position)
# CHOOSE NEXT NODE IN THE SAME ROAD OR SAME TYPE
# find all possible nodes around
self.possible_next_node_index = self.model.node_connections[self.current_node_index]
# ignore all known dead ends
self.possible_next_node_index_copy = [
index for index in self.possible_next_node_index if index not in self.memory_deadend_osmid]
# ignore previous road
self.possible_next_node_index_copy1 = [
index for index in self.possible_next_node_index if index != self.previous_node_index]
# ignore all roads have been gone
self.possible_next_node_index_copy2 = [
index for index in self.possible_next_node_index if index not in self.memory_node_osmid]
# if at dead end for first time --> u turn (intersection of 1)
if len(self.possible_next_node_index_copy1) == 0:
self.next_node_index = self.possible_next_node_index_copy[0]
# encounter traffic signals
elif self.model.graph_nodes.loc[self.current_node_index]['highway'] == 'traffic_signals':
self.list_straight_road = [index for index in self.possible_next_node_index_copy1[:] if self.model.graph_edges.loc[(
self.current_node_index, index, 0)]['name'] == self.current_road_name]
# make a turn if don't have straight road or fall within the possibility rate of turning
if len(self.list_straight_road) == 0 or self.random.random() < self.pr_turn_at_traffic_signals:
self.next_node_index = self.random.choice([index for index in self.possible_next_node_index_copy1[:] if self.model.graph_edges.loc[(
self.current_node_index, index, 0)]['name'] != self.current_road_name])
# or go straight
else:
self.next_node_index = self.list_straight_road[0]
# choose a random road if all roads around have been gone (intersection of 3-4)
elif len(self.possible_next_node_index_copy2) == 0:
self.next_node_index = self.random.choice(
self.possible_next_node_index_copy1)
# otherwise, take the road have the same name
# no same name, choose same type of road
# no same type of road, prefer primary, secondary, tertiary, or residential
else:
self._set_consider_list_node()
for index in self.possible_next_node_index_copy2:
self.current_road_index = (self.current_node_index, index, 0)
# list node on current road name
if self.model.graph_edges.loc[self.current_road_index]['name'] == self.current_road_name:
self.list_node_on_same_road_name.append(index)
# list node on current road type
elif self.model.graph_edges.loc[self.current_road_index]['highway'] == self.current_road_type:
self.list_node_on_same_road_type.append(index)
# list node on other road types
else:
self.list_node_on_other_road_type.append(index)
# prefer to choose same road name first
if len(self.list_node_on_same_road_name) != 0:
self.next_node_index = self.list_node_on_same_road_name[0]
# if not, choose same road type
elif len(self.list_node_on_same_road_type) != 0:
self.next_node_index = self.random.choice(
self.list_node_on_same_road_type)
# if not, choose the other road type
elif len(self.list_node_on_other_road_type) != 0:
self.next_node_index = self.random.choice(
self.list_node_on_other_road_type)
self._update_road_info()
self._check_next_node_in_same_or_next_cell()
self._make_move()
def intersection2(self):
# STORE INTERSECTION FOR MEMORY
# intersection memory
self.memory_node_osmid.append(self.current_node_index)
self.memory_node_coord.append(self.current_node_position)
# dead end memory
if len(self.model.graph_edges.loc[self.current_node_index]['osmid']) == 1 and self.current_node_index not in self.init_road_index:
self.memory_deadend_osmid.append(self.current_node_index)
self.memory_deadend_coord.append(self.current_node_position)
# find all possible nodes around
self.possible_next_node_index = self.model.node_connections[self.current_node_index]
# ignore previous road
self.possible_next_node_index_copy = [
index for index in self.possible_next_node_index if index != self.previous_node_index]
# GET LIST ROADS AROUND HAVING DIFFERENT TYPE TO TURN, IF NO DIFFERENT TYPE, THEN LIST INCLUDE SAME TYPE NOT SAME NAME (NOT STRAIGHT), not including previous road
self.type_roads_around = []
self.type_roads_around_nodes = []
if len(self.possible_next_node_index_copy) != 0:
# IGNORE STRAIGHT ROAD (ROAD HAS SAME NAME)
self.possible_next_node_index_copy3 = []
for index in self.possible_next_node_index_copy:
if self.model.graph_edges.loc[(self.current_node_index, index, 0)]['name'] != self.current_road_name:
self.possible_next_node_index_copy3.append(index)
for index in self.possible_next_node_index_copy3:
type_road = self.model.graph_edges.loc[(
self.current_node_index, index, 0)]['highway']
self.type_roads_around.append(type_road)
self.type_roads_around_nodes.append(index)
# GET OUT OF A STRAIGHT ROAD THAT HAS LENGTH < 300
if self.straight_road_length < 300 and self.endpoint1 != None and self.endpoint2 != None:
if len(self.possible_next_node_index_copy) > 0:
self.next_node_index = self.random.choice(
self.possible_next_node_index_copy)
self._update_endpoint_when_make_a_turn()
else:
self.next_node_index = self.random.choice(
self.possible_next_node_index)
# POSSIBILITY TO TURN WHILE RUNNING STRAIGHT
# turn to tertiary
# rate != None, have tertiary road to turn, fall within rate
elif self.pr_turn_on_ter != None and ('tertiary' in self.type_roads_around) and self.random.random() < self.pr_turn_on_ter:
index = self.type_roads_around.index('tertiary')
self.next_node_index = self.type_roads_around_nodes[index]
self._update_endpoint_when_make_a_turn()
# turn to secondary
# rate != None, have secondary road to turn, fall within rate
elif self.pr_turn_on_sec != None and ('secondary' in self.type_roads_around) and self.random.random() < self.pr_turn_on_sec:
index = self.type_roads_around.index('secondary')
self.next_node_index = self.type_roads_around_nodes[index]
self._update_endpoint_when_make_a_turn()
# turn to residential
# rate != None, have residential road to turn, fall within rate
elif self.pr_turn_on_res != None and ('residential' in self.type_roads_around) and self.random.random() < self.pr_turn_on_res:
index = self.type_roads_around.index('residential')
self.next_node_index = self.type_roads_around_nodes[index]
self._update_endpoint_when_make_a_turn()
# turn to primary
# rate != None, have primary road to turn, fall within rate
elif self.pr_turn_on_pri != None and ('primary' in self.type_roads_around) and self.random.random() < self.pr_turn_on_pri:
index = self.type_roads_around.index('primary')
self.next_node_index = self.type_roads_around_nodes[index]
self._update_endpoint_when_make_a_turn()
# ENCOUNTER TRAFFIC SIGNALS, MAKE A U TURN OR GO STRAIGHT
elif self.model.graph_nodes.loc[self.current_node_index]['highway'] == 'traffic_signals':
# go straight if fall within the possibility rate
self.list_straight_road = [index for index in self.possible_next_node_index_copy[:] if self.model.graph_edges.loc[(
self.current_node_index, index, 0)]['name'] == self.current_road_name]
if len(self.list_straight_road) != 0 and self.random.random() < self.pr_go_straight_at_traffic_signals:
self.next_node_index = self.list_straight_road[0]
# otherwise, make U-turn
else:
self._set_node_to_make_u_turn_on_straight_road()
# update endpoint
self._update_endpoint_when_make_u_turn()
# CHOOSE NEXT NODE IN THE SAME ROAD (straight road)
# if at dead end for first time --> u turn (intersection of 1)
elif len(self.possible_next_node_index_copy) == 0 and len(self.model.graph_edges.loc[self.current_node_index]['osmid']) == 1:
self.next_node_index = self.previous_node_index
# Store one endpoint if not set yet
self._update_endpoint_when_make_u_turn()
else:
# choose road forward (straight road)
have_forward_road = False
for index in self.possible_next_node_index_copy:
self.current_road_index = (self.current_node_index, index, 0)
# list node on current road name
if self.model.graph_edges.loc[self.current_road_index]['name'] == self.current_road_name:
self.next_node_index = index
have_forward_road = True
break
# at intersection of 3, previous road is same road, 2 other roads are different roads. Therefore, U-turn
if not have_forward_road:
self._set_node_to_make_u_turn_on_straight_road()
# Store one endpoint if not set yet
self._update_endpoint_when_make_u_turn()
self._update_road_info()
self._check_next_node_in_same_or_next_cell()
self._make_move()
def _update_endpoint_when_make_u_turn(self):
if self.endpoint1 == None:
self.endpoint1 = self.current_node_index
self.start_from_one_endpoint = True
elif self.endpoint2 == None:
self.endpoint2 = self.current_node_index
self.start_from_one_endpoint = False
def _update_endpoint_when_make_a_turn(self):
# reset endpoint for new road
self.straight_road_length = 0
self.endpoint1 = None
self.endpoint2 = None
# since runner could turn to the middle of road
self.start_from_one_endpoint = False
def _set_node_to_make_u_turn_on_straight_road(self):
# U-turn on any road
if self.previous_node_index != None:
self.next_node_index = self.previous_node_index
# U-turn on the init road
elif self.current_node_index == self.current_road_index[0]:
self.next_node_index = self.current_road_index[1]
elif self.current_node_index == self.current_road_index[1]:
self.next_node_index = self.current_road_index[0]
def intersection3(self):
# Change to mode running on park once runner stand on the node of park
if self.current_node_index == self.target_node_coming_park:
self.on_the_park = True
# Reset target node while running on park once runner's on the target node
if self.current_node_index == self.target_node_running_around_park:
self.target_node_running_around_park = None
# STORE INTERSECTION FOR MEMORY
# intersection memory
self.memory_node_osmid.append(self.current_node_index)
self.memory_node_coord.append(self.current_node_position)
# dead end memory
if len(self.model.graph_edges.loc[self.current_node_index]['osmid']) == 1 and self.current_node_index not in self.init_road_index:
self.memory_deadend_osmid.append(self.current_node_index)
self.memory_deadend_coord.append(self.current_node_position)
# FIND ALL POSSIBLE ROADS AROUND CURRENT POSITION
self.possible_next_node_index = self.model.node_connections[self.current_node_index]
# ignore previous road starting from the second intersection from the point want to go home
if len(self.memory_node_osmid_way_home) == 1:
self.possible_next_node_index_copy = self.possible_next_node_index[:]
else:
self.possible_next_node_index_copy = [
index for index in self.possible_next_node_index if index != self.previous_node_index]
# got to the node of park on the initial road
if self.current_node_index in self.model.footway_all_nodes:
self.on_the_park = True
# ON THE WAY TO PARK
if not self.on_the_park:
# if it just start, set the next node index as the current node index
if self.current_node_index == None:
self.current_node_index = self.next_node_index
# find the shortest path and the nodes of that
shortest_length = None
for index in self.model.footway_nodes_access:
length = nx.shortest_path_length(
self.model.graph, self.current_node_index, index, weight='length')
if shortest_length == None:
shortest_length = length
self.target_node_coming_park = index
elif length < shortest_length:
shortest_length = length
self.target_node_coming_park = index
# get shortest path to that node
self.shortest_path = nx.shortest_path(
self.model.graph, self.current_node_index, self.target_node_coming_park, weight='length')
# choose to follow the shortest path
if self.random.random() > self.possibility_change_way_park:
self.next_node_index = self.shortest_path[1]
# if not, choose one of the way around that current node
else:
if len(self.possible_next_node_index_copy) != 0:
self.next_node_index = self.random.choice(
self.possible_next_node_index_copy)
else:
self.next_node_index = self.random.choice(
self.possible_next_node_index)
# GET TO THE PARK ALREADY AND RUNNING AROUND
elif self.on_the_park:
# set the set node around the current node on park if don't have yet
if self.current_set_node_on_park == None:
if self.current_node_index in self.model.footway_nodes_set1:
self.current_set_node_on_park = self.model.footway_nodes_set1[:]
elif self.current_node_index in self.model.footway_nodes_set2:
self.current_set_node_on_park = self.model.footway_nodes_set2[:]
# set distance running on park if don't have yet
if self.distance_run_around_park == None:
self._set_distance_run_around_park()
# choose next cell from the set data and avoid previous road as much as it can
if self.target_node_running_around_park == None:
self.target_node_running_around_park = self.random.choice(
[index for index in self.current_set_node_on_park if index != self.current_node_index and index != self.previous_node_index])
self.possible_next_node_index = nx.shortest_path(
self.model.graph, self.current_node_index, self.target_node_running_around_park, weight='length')
self.next_node_index = self.possible_next_node_index[1]
self._update_road_info()
self._check_next_node_in_same_or_next_cell()
self._make_move()
# REACH THE DISTANCE SET TO RUN AROUND PARK, THEN GO HOME OR GET OUT OF PARK
if self.distance_will_get_out_park != None and self.distance_gone > self.distance_will_get_out_park and not self.want_to_go_home:
self.preference = 'preference2'
def intersection4(self):
# STORE INTERSECTION FOR MEMORY
# intersection memory
self.memory_node_osmid.append(self.current_node_index)
self.memory_node_coord.append(self.current_node_position)
# dead end memory
if len(self.model.graph_edges.loc[self.current_node_index]['osmid']) == 1 and self.current_node_index not in self.init_road_index:
self.memory_deadend_osmid.append(self.current_node_index)
self.memory_deadend_coord.append(self.current_node_position)
# on way home memory
if not self.begin_going_home: # ignore the first starting point going home
self.memory_node_osmid_way_home.append(self.current_node_index)
self.memory_node_coord_way_home.append(self.current_node_position)
self.begin_going_home = False
# CHOOSE AN INDEX FROM INITIAL ROAD INDEX AS A TARGET POINT TO GET BACK HOME
if self.init_index == None:
self.init_index = self.random.choice(
[self.init_road_index[0], self.init_road_index[1]])
# FIND ALL POSSIBLE ROADS AROUND CURRENT POSITION
self.possible_next_node_index = self.model.node_connections[self.current_node_index]
# ignore previous road starting from the second intersection from the point want to go home
if len(self.memory_node_osmid_way_home) == 1:
self.possible_next_node_index_copy = self.possible_next_node_index[:]
else:
self.possible_next_node_index_copy = [
index for index in self.possible_next_node_index if index != self.previous_node_index]
# stand on the init_index
if self.current_node_index == self.init_index:
u, v, k = self.init_road_index
if self.current_node_index == u:
self.next_node_index = v
else:
self.next_node_index = u
# choose to follow the shortest path
elif self.random.random() > self.possibility_change_way_home:
self.shortest_path = nx.shortest_path(
self.model.graph, self.current_node_index, self.init_index, weight='length')
self.next_node_index = self.shortest_path[1]
# change, want to explore/try a section of road
# select randomly a road for this step
else:
if len(self.possible_next_node_index_copy) > 0:
self.next_node_index = self.random.choice(
self.possible_next_node_index_copy)
else:
self.next_node_index = self.random.choice(
self.possible_next_node_index)
self._update_road_info()
self._check_next_node_in_same_or_next_cell()
self._make_move()
def _update_road_info(self):
# update current_road_index and next_node_position
self.current_road_index = (
self.current_node_index, self.next_node_index, 0)
self.next_node_position = self.model.cell_of_node[self.next_node_index]
# UPDATE NEW ROAD INFO
self.current_road_name = self.model.graph_edges.loc[self.current_road_index]['name']
self.current_road_type = self.model.graph_edges.loc[self.current_road_index]['highway']
self.current_road_cells = self.model.cells_of_edge[self.current_road_index]
self.current_road_length = self.model.graph_edges.loc[self.current_road_index]['length']
self.current_cell_index_on_roadset = 0
def _check_next_node_in_same_or_next_cell(self):
# WHAT IF THE NEXT NODE IS IN THE SAME OR NEXT CELL?
if len(self.current_road_cells) == 0:
self.next_position = self.next_node_position
self.switch_previous_and_current_node_index()
self._adding_distance_goal()
# keep state
else:
self.next_position = self.current_road_cells[self.current_cell_index_on_roadset]
# CHANGE STATE BACK TO _continue_forward
self.state = '_continue_forward'
def _make_move(self):
# MAKE A MOVE
self.to_node = 'v'
self._move_agent_update_attributes()
def _adding_distance_goal(self):
# ADDING DISTANCE GONE
if self.start_on_road:
if self.num_cells_gone_on_road == 0:
self.num_cells_gone_on_road = 1
self.amount_added = round(((self.num_cells_gone_on_road / len(
self.current_road_cells)) * self.current_road_length), 0)
self.distance_gone += self.amount_added
self.start_on_road = False
else:
self.amount_added = self.current_road_length
self.distance_gone += self.amount_added
# CHECK TO SEE IF IT WILL RUNNER GETS FARER OR CLOSER FROM INIT POS & UPDATE DISTANCE BACK HOME
a, b = self.init_position
x, y = self.next_node_position
# calculate to find appro. distance
self.check_closer_or_farer = (a - x)**2 + (b - y)**2
# set previous distance back home
self.estimate_distance_back_home_previous = self.estimate_distance_back_home
# check to see add or subtract
if self.check_closer_or_farer > self.estimate_distance_cell_to_init_point:
self.estimate_distance_back_home += self.amount_added
elif self.check_closer_or_farer < self.estimate_distance_cell_to_init_point:
self.estimate_distance_back_home -= self.amount_added
# set new estimate distance cell to init point by the new number
self.estimate_distance_cell_to_init_point = self.check_closer_or_farer
# UPDATE EDGES' USING MEMORY
self.model.memory_edge_using[self.current_road_index] += 1
# ADD LENGTH FOR STRAIGHT ROAD
self._add_length_straight_road()
def _add_length_straight_road(self):
# START ADDING DISTANCE FROM ENDPOINT 1 OF A STRAIGHT ROAD
if self.start_from_one_endpoint:
self.straight_road_length += self.amount_added
def _move_agent_update_attributes(self):
x, y = self.next_position
# IF JUST STARTS ON A ROAD, NEED TO ADD NUM OF CELLS GONE TO CALCULATE APPRO. LENGTH
if self.start_on_road:
self.num_cells_gone_on_road += 1
# MOVE AGENT
self.model.grid.move_agent(self, self.next_position)
# UPDATE THE HEATMAP FOR EVERYTIME IT GOES TO A NEW LOCATION
self.model.heatmap_data[y][x] += 1
# SET WANT TO GO HOME MODE ONCE DISTANCE GONE + DISTANCE BACK HOME > DISTANCE GOAL
if (self.distance_gone + self.estimate_distance_back_home) >= self.distance_goal:
# self.state = 'rest'
self.want_to_go_home = True
self.begin_going_home = True
self.preference = 'preference4'
|
import pytest
from wikidict import gen_dict
@pytest.mark.parametrize(
"locale, words",
[
("fr", "logiciel"), # Single word
("fr", "base,logiciel"), # Multiple words
("fr", "cercle unité"), # Accentued word + space
],
)
@pytest.mark.parametrize("format", ["kobo", "stardict"])
def test_gen_dict(locale, words, format, tmp_path):
res = gen_dict.main(locale, words, tmp_path, format=format)
assert res == 0
|
#!/usr/bin/env python2.7
""" This module is a utility module for Windows.
The Win32 ThreeSpace Utils module is a collection of classes, functions,
structures, and static variables use exclusivly for Windows. All functions
in this module are used to scan for available ThreeSpace devices on the host
system and information on them. This module can be used with a system
running Python 2.5 and newer (including Python 3.x).
"""
__authors__ = [
'"Chris George" <cgeorge@yeitechnology.com>',
'"Dan Morrison" <dmorrison@yeitechnology.com>',
]
from threespace_utils import *
import struct
# import serial
# from serial.win32 import ULONG_PTR, is_64bit
import re
import sys
import copy
import ctypes
from ctypes.wintypes import HANDLE
from ctypes.wintypes import BOOL
from ctypes.wintypes import HWND
from ctypes.wintypes import DWORD
from ctypes.wintypes import WORD
from ctypes.wintypes import LONG
from ctypes.wintypes import ULONG
from ctypes.wintypes import LPCSTR
from ctypes.wintypes import HKEY
from ctypes import c_ubyte as BYTE
from ctypes import c_longlong as ULONGLONG
from ctypes import c_wchar as WCHAR
from ctypes import c_ushort as USHORT
from serial.win32 import ULONG_PTR, is_64bit
### Globals ###
NULL = 0
HDEVINFO = ctypes.c_void_p
PCTSTR = ctypes.c_char_p
CHAR = ctypes.c_char
LPDWORD = PDWORD = ctypes.POINTER(DWORD)
LPBYTE = PBYTE = ctypes.c_void_p # XXX avoids error about types
PHKEY = ctypes.POINTER(HKEY)
ACCESS_MASK = DWORD
REGSAM = ACCESS_MASK
UCHAR = BYTE
BTH_ADDR = ULONGLONG
BLUETOOTH_MAX_NAME_SIZE = 248
HBLUETOOTH_DEVICE_FIND = HANDLE
# Common error enums
ERROR_NO_MORE_ITEMS = 259
ERROR_INVALID_PARAMETER = 87
ERROR_REVISION_MISMATCH = 1306
ERROR_OUTOFMEMORY = 14
ERROR_SUCCESS = 0
ERROR_INVALID_HANDLE = 6
ERROR_MORE_DATA = 234
# hardware enumeration flags
DIGCF_PRESENT = 2
DIGCF_DEVICEINTERFACE = 16
DIGCF_ALLCLASSES = 4
INVALID_HANDLE_VALUE = 0
ERROR_INSUFFICIENT_BUFFER = 122
SPDRP_HARDWAREID = 1
SPDRP_FRIENDLYNAME = 12
ERROR_NO_MORE_ITEMS = 259
DICS_FLAG_GLOBAL = 1
DIREG_DEV = 0x00000001
KEY_READ = 0x20019
REG_SZ = 1
SPDRP_DEVICEDESC = 0
SPDRP_DEVTYPE = 19
SPDRP_DRIVER = 9
SPDRP_ENUMERATOR_NAME = 0x16
SPDRP_LOCATION_INFORMATION = 0xD
SPDRP_PHYSICAL_DEVICE_OBJECT_NAME = 0xE
SPDRP_MFG = 0xB
SPDRP_SERVICE = 4
SPDRP_CLASS = 7
SPDRP_COMPATIBLEIDS = 2
SPDRP_CLASSGUID = 0x8
SPDRP_ADDRESS = 0x1C
# libraries we use
bthprops = ctypes.windll["bthprops.cpl"]
kernel32 = ctypes.windll["Kernel32.dll"]
setupapi = ctypes.windll.LoadLibrary("setupapi")
advapi32 = ctypes.windll.LoadLibrary("Advapi32")
PortName = b'PortName'
### Classes ###
# COM Port stuctures
class GUID(ctypes.Structure):
_fields_ = [
('Data1', DWORD),
('Data2', WORD),
('Data3', WORD),
('Data4', BYTE * 8),
]
def __str__(self):
return "{%08X-%04X-%04X-%s-%s}" % (
self.Data1,
self.Data2,
self.Data3,
''.join(["%02X" % d for d in self.Data4[:2]]),
''.join(["%02X" % d for d in self.Data4[2:]]),
)
class SP_DEVINFO_DATA(ctypes.Structure):
_fields_ = [
('cbSize', DWORD),
('ClassGuid', GUID),
('DevInst', DWORD),
('Reserved', ULONG_PTR),
]
def __str__(self):
return "ClassGuid:%s DevInst:%s" % (self.ClassGuid, self.DevInst)
class SP_DEVICE_INTERFACE_DATA(ctypes.Structure):
_fields_ = [
('cbSize', DWORD),
('InterfaceClassGuid', GUID),
('Flags', DWORD),
('Reserved', ULONG_PTR),
]
def __str__(self):
return "InterfaceClassGuid:%s Flags:%s" % (self.InterfaceClassGuid, self.Flags)
# Bluetooth structures
class BLUETOOTH_DEVICE_SEARCH_PARAMS(ctypes.Structure):
_fields_ = [
('cbSize', DWORD),
('fReturnAuthenticated', BOOL),
('fReturnRemembered', BOOL),
('fReturnUnknown', BOOL),
('fReturnConnected', BOOL),
('fIssueInquiry', BOOL),
('cTimeoutMultiplier', UCHAR),
('hRadio', HANDLE),
]
class BLUETOOTH_ADDRESS(ctypes.Union):
_fields_ = [
('ullLong', BTH_ADDR),
('rgBytes', UCHAR * 6),
]
def __str__(self):
return self.__repr__()
def __repr__(self):
addr_str = ""
for i in range(len(self.rgBytes) - 1, -1, -1):
tmp_str = hex(self.rgBytes[i])[2:].upper()
if len(tmp_str) < 2:
tmp_str = "0" + tmp_str
if i != 0:
tmp_str += ":"
addr_str += tmp_str
return addr_str
def __eq__(self, other):
if str(self) == str(other):
return True
else:
return False
class SYSTEMTIME(ctypes.Structure):
_fields_ = [
('wYear', WORD),
('wMonth', WORD),
('wDayOfWeek', WORD),
('wDay', WORD),
('wHour', WORD),
('wMinute', WORD),
('wSecond', WORD),
('wMilliseconds', WORD),
]
def __str__(self):
month_map = {
0: "Month_Zero",
1: "January",
2: "February",
3: "March",
4: "April",
5: "May",
6: "June",
7: "July",
8: "August",
9: "September",
10: "October",
11: "November",
12: "December"
}
day_of_week_map = {
0: "Sunday",
1: "Monday",
2: "Tuesday",
3: "Wednesday",
4: "Thursday",
5: "Friday",
6: "Saturday"
}
return "%s, %s %d, %d\n%d:%d:%d.%d" % (
day_of_week_map[self.wDayOfWeek],
month_map[self.wMonth],
self.wDay,
self.wYear,
self.wHour,
self.wMinute,
self.wSecond,
self.wMilliseconds
)
class BLUETOOTH_DEVICE_INFO(ctypes.Structure):
_fields_ = [
('cbSize', DWORD),
('Address', BLUETOOTH_ADDRESS),
('ulClassofDevice', ULONG),
('fConnected', BOOL),
('fRemembered', BOOL),
('fAuthenticated', BOOL),
('stLastSeen', SYSTEMTIME),
('stLastUsed', SYSTEMTIME),
('szName', WCHAR * BLUETOOTH_MAX_NAME_SIZE),
]
def __str__(self):
class_str = hex(self.ulClassofDevice)
if class_str[-1] == "L":
class_str = class_str[:-1]
while len(class_str) < 10:
class_str = "0x0" + class_str[2:]
if self.fConnected == 0:
connected_str = "False"
else:
connected_str = "True"
if self.fRemembered == 0:
remembered_str = "False"
else:
remembered_str = "True"
if self.fAuthenticated == 0:
authenticated_str = "False"
else:
authenticated_str = "True"
return (
"Size: %d\n" % self.cbSize +
"Address: %s\n" % str(self.Address) +
"Class Of Device: %s\n" % class_str +
"Connected: %s\n" % connected_str +
"Remembered: %s\n" % remembered_str +
"Authenticated: %s\n" % authenticated_str +
"Last Seen: %s\n" % str(self.stLastSeen) +
"Last Used: %s\n" % str(self.stLastUsed) +
"Name: %s" % str(self.szName)
)
### Helper Functions ###
if sys.version_info >= (3, 0):
def toLong(number, base=None):
if base:
return int(number, base)
return int(number)
else:
def toLong(number, base=None):
if base:
return long(number, base)
return long(number)
def _byteBuffer(length):
return (BYTE * length)()
def _string(buffer):
s = []
for c in buffer:
if c == 0: break
s.append(chr(c & 0xff)) # "& 0xff": hack to convert signed to unsigned
return ''.join(s)
def _validHandle(value, func, arguments):
if value == 0:
raise ctypes.WinError()
return value
def _stringToGUID(GUID_string):
""" Assuming GUID string is formatted as such:
'{XXXXXXXX-XXXX-XXXX-XXXXXXXXXXXX}'
"""
return GUID(
toLong(GUID_string[1:9], 16),
toLong(GUID_string[10:14], 16),
toLong(GUID_string[15:19], 16),
(BYTE * 8)(
int(GUID_string[20:22], 16),
int(GUID_string[22:24], 16),
int(GUID_string[25:27], 16),
int(GUID_string[27:29], 16),
int(GUID_string[29:31], 16),
int(GUID_string[31:33], 16),
int(GUID_string[33:35], 16),
int(GUID_string[35:37], 16)
)
)
def _stringToBluetoothAddress(address_string):
""" Assumming address string is formatted as such:
'XXXXXXXXXXXX'
"""
tmp_addr = BLUETOOTH_ADDRESS()
tmp_addr.ullLong = toLong(address_string, 16)
return tmp_addr
### Structures/Class Pointers ###
PSP_DEVINFO_DATA = ctypes.POINTER(SP_DEVINFO_DATA)
PSP_DEVICE_INTERFACE_DATA = ctypes.POINTER(SP_DEVICE_INTERFACE_DATA)
PSP_DEVICE_INTERFACE_DETAIL_DATA = ctypes.c_void_p
SetupDiDestroyDeviceInfoList = setupapi.SetupDiDestroyDeviceInfoList
SetupDiDestroyDeviceInfoList.argtypes = [HDEVINFO]
SetupDiDestroyDeviceInfoList.restype = BOOL
SetupDiGetClassDevs = setupapi.SetupDiGetClassDevsA
SetupDiGetClassDevs.argtypes = [ctypes.POINTER(GUID), PCTSTR, HWND, DWORD]
SetupDiGetClassDevs.restype = HDEVINFO
SetupDiGetClassDevs.errcheck = _validHandle
SetupDiEnumDeviceInterfaces = setupapi.SetupDiEnumDeviceInterfaces
SetupDiEnumDeviceInterfaces.argtypes = [HDEVINFO, PSP_DEVINFO_DATA, ctypes.POINTER(GUID), DWORD, PSP_DEVICE_INTERFACE_DATA]
SetupDiEnumDeviceInterfaces.restype = BOOL
SetupDiGetDeviceInterfaceDetail = setupapi.SetupDiGetDeviceInterfaceDetailA
SetupDiGetDeviceInterfaceDetail.argtypes = [HDEVINFO, PSP_DEVICE_INTERFACE_DATA, PSP_DEVICE_INTERFACE_DETAIL_DATA, DWORD, PDWORD, PSP_DEVINFO_DATA]
SetupDiGetDeviceInterfaceDetail.restype = BOOL
SetupDiGetDeviceRegistryProperty = setupapi.SetupDiGetDeviceRegistryPropertyA
SetupDiGetDeviceRegistryProperty.argtypes = [HDEVINFO, PSP_DEVINFO_DATA, DWORD, PDWORD, PBYTE, DWORD, PDWORD]
SetupDiGetDeviceRegistryProperty.restype = BOOL
SetupDiOpenDevRegKey = setupapi.SetupDiOpenDevRegKey
SetupDiOpenDevRegKey.argtypes = [HDEVINFO, PSP_DEVINFO_DATA, DWORD, DWORD, DWORD, REGSAM]
SetupDiOpenDevRegKey.restype = HKEY
RegCloseKey = advapi32.RegCloseKey
RegCloseKey.argtypes = [HKEY]
RegCloseKey.restype = LONG
RegQueryValueEx = advapi32.RegQueryValueExA
RegQueryValueEx.argtypes = [HKEY, LPCSTR, LPDWORD, LPDWORD, LPBYTE, LPDWORD]
RegQueryValueEx.restype = LONG
# Used to find 3-Space Sensor devices connected via USB
GUID_DEVINTERFACE_SERENUM_BUS_ENUMERATOR = GUID(toLong(0x4D36E978), 0xE325, 0x11CE, (BYTE * 8)(0xBF, 0xC1, 0x08, 0x00, 0x2B, 0xE1, 0x03, 0x18))
# Used to find Bluetooth and Unknown devices
GUID_DEVINTERFACE_COMPORT = GUID(toLong(0x86E0D1E0), 0x8089, 0x11D0, (BYTE * 8)(0x9C, 0xE4, 0x08, 0x00, 0x3E, 0x30, 0x1F, 0x73))
### Functions ###
def _getBluetoothDevices():
found_devices = []
## Create our needed structures
m_SearchParams = BLUETOOTH_DEVICE_SEARCH_PARAMS()
m_SearchParams.cbSize = ctypes.sizeof(m_SearchParams)
m_SearchParams.fReturnAuthenticated = 1 # true
m_SearchParams.fReturnRemembered = 0 # false
m_SearchParams.fReturnUnknown = 1 # true
m_SearchParams.fReturnConnected = 1 # true
m_SearchParams.fIssueInquiry = 1 # true
m_SearchParams.cTimeoutMultiplier = 1
m_SearchParams.hRadio = 0 # Search all available radios
m_DeviceInfo = BLUETOOTH_DEVICE_INFO()
m_DeviceInfo.cbSize = ctypes.sizeof(m_DeviceInfo)
device_find_handle = bthprops.BluetoothFindFirstDevice(ctypes.byref(m_SearchParams), ctypes.byref(m_DeviceInfo))
if device_find_handle == 0:
# We failed to find a device
error_code = kernel32.GetLastError()
# Not sure this is ever returned, but who knows
if error_code == ERROR_NO_MORE_ITEMS:
return found_devices
elif '-d' in sys.argv:
if error_code == ERROR_INVALID_PARAMETER:
raise Exception("FindFirstDevice: Either the search params or the device info structure is NULL.")
elif error_code == ERROR_REVISION_MISMATCH:
raise Exception("FindFirstDevice: Either the search params or the device info structure is the wrong size.")
else:
raise Exception("FindFirstDevice: Unknown function error: %d" % error_code)
else:
# We found an initial device
found_devices.append(copy.deepcopy(m_DeviceInfo))
while True:
# Now to find more devices
found_more_devices = bthprops.BluetoothFindNextDevice(device_find_handle, ctypes.byref(m_DeviceInfo))
if found_more_devices == 0:
# We failed to find a device
error_code = kernel32.GetLastError()
if error_code == ERROR_NO_MORE_ITEMS:
break
elif '-d' in sys.argv:
if error_code == ERROR_INVALID_HANDLE:
raise Exception("FindNextDevice: The find handle is NULL.")
elif error_code == ERROR_OUTOFMEMORY:
raise Exception("FindNextDevice: Out of memory.")
else:
raise Exception("FindNextDevice: Unknown function error: %d" % error_code)
else:
found_devices.append(copy.deepcopy(m_DeviceInfo))
return found_devices
def _yeiGrep(reg_exp):
for port, desc, hw_id, vid_pid in _yeiComPorts():
if (re.search(reg_exp, port, re.I) or re.search(reg_exp, desc) or re.search(reg_exp, hw_id)):
yield port, desc, hw_id, vid_pid
def _yeiComPorts():
""" This generator scans the device registry for com ports and yields port,
desc, hw_id
"""
GUID_list = [GUID_DEVINTERFACE_SERENUM_BUS_ENUMERATOR, GUID_DEVINTERFACE_COMPORT]
ports_yielded = []
bt_device_list = None
for device_GUID in GUID_list:
g_hdi = SetupDiGetClassDevs(ctypes.byref(device_GUID), None, NULL, DIGCF_PRESENT|DIGCF_DEVICEINTERFACE)
for dw_index in range(256):
friendly_name_string = ""
did = SP_DEVICE_INTERFACE_DATA()
did.cbSize = ctypes.sizeof(did)
if not SetupDiEnumDeviceInterfaces(g_hdi, None, ctypes.byref(device_GUID), dw_index, ctypes.byref(did)):
if ctypes.GetLastError() != ERROR_NO_MORE_ITEMS:
if '-d' in sys.argv:
raise ctypes.WinError()
break
dw_needed = DWORD()
# Get the size
if not SetupDiGetDeviceInterfaceDetail(g_hdi, ctypes.byref(did), None, 0, ctypes.byref(dw_needed), None):
# Ignore ERROR_INSUFFICIENT_BUFFER
if ctypes.GetLastError() != ERROR_INSUFFICIENT_BUFFER:
if '-d' in sys.argv:
raise ctypes.WinError()
# Allocate buffer
class SP_DEVICE_INTERFACE_DETAIL_DATA_A(ctypes.Structure):
_fields_ = [
('cbSize', DWORD),
('DevicePath', CHAR * (dw_needed.value - ctypes.sizeof(DWORD))),
]
def __str__(self):
return "DevicePath: %s" % self.DevicePath
idd = SP_DEVICE_INTERFACE_DETAIL_DATA_A()
if is_64bit():
idd.cbSize = 8
else:
idd.cbSize = 5
dev_info = SP_DEVINFO_DATA()
dev_info.cbSize = ctypes.sizeof(dev_info)
if not SetupDiGetDeviceInterfaceDetail(g_hdi, ctypes.byref(did), ctypes.byref(idd), dw_needed, None, ctypes.byref(dev_info)):
if '-d' in sys.argv:
raise ctypes.WinError()
# hardware ID
sz_hardware_id = _byteBuffer(1024)
if not SetupDiGetDeviceRegistryProperty(g_hdi, ctypes.byref(dev_info), SPDRP_HARDWAREID, None, ctypes.byref(sz_hardware_id), ctypes.sizeof(sz_hardware_id) - 1, None):
# Ignore ERROR_INSUFFICIENT_BUFFER
if ctypes.GetLastError() != ERROR_INSUFFICIENT_BUFFER:
if '-d' in sys.argv:
raise ctypes.WinError()
#Build VID/PID string
vid_pid_string = ""
hw_string = _string(sz_hardware_id)
hw_string = hw_string.upper()
vid_idx = hw_string.find("VID_")
pid_idx = hw_string.find("PID_")
if vid_idx != -1 and pid_idx != -1:
vid_end = hw_string.find("&", vid_idx + 1)
vid = hw_string[vid_idx:vid_end]
pid_end = hw_string.find("&", pid_idx + 1)
pid = hw_string[pid_idx:pid_end]
vid_pid_string = vid + "&" + pid
enum_name_buff = _byteBuffer(1024)
if SetupDiGetDeviceRegistryProperty(g_hdi, ctypes.byref(dev_info), SPDRP_ENUMERATOR_NAME, None, ctypes.byref(enum_name_buff), ctypes.sizeof(enum_name_buff) - 1, None):
if _string(enum_name_buff).upper() == "BTHENUM":
# This is a bluetooth enumerator, we should do further
# investigation
if bt_device_list is None:
bt_device_list = _getBluetoothDevices()
device_path_str = idd.DevicePath
if type(device_path_str) is bytes:
device_path_str = bytes.decode(device_path_str)
start_idx = device_path_str.rfind("&") + 1
end_idx = start_idx + 12
bt_addr_string = device_path_str[start_idx:end_idx]
bt_address = _stringToBluetoothAddress(bt_addr_string)
if bt_address == _stringToBluetoothAddress("0"):
continue
connected_dev = None
for bt_dev in bt_device_list:
if bt_dev.Address == bt_address:
connected_dev = bt_dev
break
if connected_dev is not None:
if (str(connected_dev.szName).find("YEI_3SpaceBT") != -1):
# The device is a 3-Space Sensor!
vid_pid_string = "VID_2476&PID_1060"
friendly_name_string = "3 Space Bluetooth over Bluetooth link "
sz_friendly_name = _byteBuffer(1024)
if not SetupDiGetDeviceRegistryProperty(g_hdi, ctypes.byref(dev_info), SPDRP_FRIENDLYNAME, None, ctypes.byref(sz_friendly_name), ctypes.sizeof(sz_friendly_name) - 1, None):
# Ignore ERROR_INSUFFICIENT_BUFFER
if ctypes.GetLastError() != ERROR_INSUFFICIENT_BUFFER:
if '-d' in sys.argv:
raise IOError("Failed to get details for %s (%s)" % (dev_info, sz_hardware_id.value))
port_name = None
else:
# The real com port name has to read differently...
h_key = SetupDiOpenDevRegKey(g_hdi, ctypes.byref(dev_info), DICS_FLAG_GLOBAL, 0, DIREG_DEV, KEY_READ)
port_name_buffer = _byteBuffer(1024)
port_name_length = ULONG(ctypes.sizeof(port_name_buffer))
RegQueryValueEx(h_key, PortName, None, None, ctypes.byref(port_name_buffer), ctypes.byref(port_name_length))
RegCloseKey(h_key)
# We either use the generated friendly name or our overridden
# one, with preference to the overridden one.
if friendly_name_string == "":
friendly_name_string = _string(sz_friendly_name)
else:
friendly_name_string += "(" + _string(port_name_buffer) + ")"
if _string(port_name_buffer) not in ports_yielded:
ports_yielded.append(_string(port_name_buffer))
yield (_string(port_name_buffer), friendly_name_string, _string(sz_hardware_id), vid_pid_string)
SetupDiDestroyDeviceInfoList(g_hdi)
def getComPorts(filter=TSS_FIND_ALL):
""" Queries the system for all available serial COM ports and returns a list
of them.
Args:
filter: An interger denoting a flag of what 3-Space Sensors device
type to be found (default is TSS_FIND_ALL)
Returns:
A list of all known serial COM ports. Each element of the list is a
tuple formatted as such:
(COM_PORT_NAME, FRIENDLY_NAME, YEI_TECH_DEVICE_TYPE)
Note:
YEI_TECH_DEVICE_TYPE will be an empty string if the port's
driver's vendor and product IDs do not match any known YEI
Techology products.
Possible YEI_TECH_DEVICE_TYPE strings are:
'???' - Unknown
'BTL' - Bootloader (No Firmware)
'USB' - USB
'DNG' - Dongle
'WL' - Wireless
'EM' - Embedded
'DL' - Data-logging
'BT' - Bluetooth
"""
port_list = []
serial_port_list = _yeiComPorts()
pid_map = {
"PID_1000": ("BTL", TSS_FIND_BTL),
"PID_1010": ("USB", TSS_FIND_USB),
"PID_1020": ("DNG", TSS_FIND_DNG),
"PID_1030": ("WL", TSS_FIND_WL),
"PID_1040": ("EM", TSS_FIND_EM),
"PID_1050": ("DL", TSS_FIND_DL),
"PID_1060": ("BT", TSS_FIND_BT)
}
for cur_port in serial_port_list:
hw_string = cur_port[2]
if cur_port[3] != "":
vid, pid = cur_port[3].split("&")
if vid == "VID_2476" and pid in pid_map and pid_map[pid][1] & filter:
port_list.append(ComInfo(cur_port[0], cur_port[1], pid_map[pid][0]))
continue
elif TSS_FIND_UNKNOWN & filter:
port_list.append(ComInfo(cur_port[0], cur_port[1], "???"))
return port_list
def _getSoftwareVersionFromPort(serial_port):
# Figure out whether the current hardware is on "old" or "new" firmware
serial_port.write(bytearray((0xf7, 0xdf, 0xdf)))
response = convertString(serial_port.read(9))
if len(response) == 0:
# Check and see if in bootloader
return None
elif response[:3] == "TSS":
# Old firmware version remainder
serial_port.read(9)
raise Exception("Firmware for device on ( %s ) is out of date for this API. Recommend updating to latest firmware." % serial_port.name)
# Hour-minute remainder
serial_port.read(3)
return response
def getDeviceInfoFromComPort(port_name, poll_device=True):
""" Analyzes a serial COM port of a 3-Space Sensor and returns details about
the device.
Args:
port_name: A string representing the name of the serial COM port to
analyze.
poll_device: An optional boolean that controls whether the named COM
port is written to and queried for information about the device.
If this value is True, please take caution as the COM port's
device will be written to and may produce undesired effects if
the device is unknown or not a 3-Space Sensor (default is True)
Returns:
A list of 5 values describing various details about the COM port's
device:
Friendly name,
3-Space Type,
3-Space ID,
3-Space Firmware Version String,
3-Space Hardware Version String,
isInBootloader
Raises:
No explicit exceptions are raised.
"""
friendly_name = ""
dev_type = "???"
dev_serial = 0
dev_fw_ver = ""
dev_hw_ver = ""
in_bootloader = False
pid_map = {
"PID_1000": "BTL",
"PID_1010": "USB",
"PID_1020": "DNG",
"PID_1030": "WL",
"PID_1040": "EM",
"PID_1050": "DL",
"PID_1060": "BT"
}
matched_ports = _yeiGrep(port_name)
for cur_port in matched_ports:
if cur_port[0] == port_name:
# friendly name
friendly_name = cur_port[1]
# device type
if cur_port[3] != "":
vid, pid = cur_port[3].split("&")
if vid == "VID_2476":
# The VID matches the YEI vendor ID
if pid in pid_map:
dev_type = pid_map[pid]
break
if poll_device:
tmp_port = None
try:
tmp_port = serial.Serial(port_name, timeout=0.1, baudrate=115200)
if dev_type == "BT":
tmp_port.timeout = 5.0
except:
tmp_port = None
if tmp_port is not None:
# Try to get the serial, if it fails try to see if in bootloader
tmp_port.write(bytearray((0xf7, 0xed, 0xed)))
response = tmp_port.read(4)
if len(response) == 4:
dev_serial = "{0:08X}".format(struct.unpack('>I', response)[0])
# Get the version strings (and device type if the
# previous method did not resolve it)
software_version = _getSoftwareVersionFromPort(tmp_port)
if software_version is not None:
# This is in fact a 3-Space sensor
dev_fw_ver = software_version
tmp_port.write(bytearray((0xf7, 0xe6, 0xe6)))
hardware_version = convertString(tmp_port.read(32))
dev_hw_ver = hardware_version
if dev_type == "???":
dev_type = hardware_version[4:-8].strip()
else:
tmp_port.write(bytearray((0x3f,))) # this is ascii '?'
response = convertString(tmp_port.read(2))
if response:
if response == "OK":
in_bootloader = True
dev_type = "BTL"
else:
raise Exception("Either device on( %s ) is not a 3-Space Sensor or the firmware is out of date for this API and recommend updating to latest firmware." % port_name)
tmp_port.close()
return SensorInfo(
friendly_name,
dev_type,
dev_serial,
dev_fw_ver,
dev_hw_ver,
in_bootloader
)
|
from __future__ import (
absolute_import,
unicode_literals,
)
import os
import signal
import sys
import unittest
from pysoa.test.compatibility import mock
standalone = None
def setup_module(_):
"""
We want this setup to run before any of the tests in this module, to ensure that the `standalone` module gets
imported.
"""
global standalone
try:
from pysoa.server import standalone
assert False, 'Should not have been able to import standalone; should have received SystemExit'
except SystemExit as e:
# This first bit is actually a test; it confirms that the double-import trap is triggered
assert e.args[0] == 99
# Now we actually import the module, but we have to make sure the double-import trap isn't triggered before we do.
# Running `pytest` or `setup.py` looks to `standalone` like there is a problem, so we temporarily remove `pytest`
# or `setup.py` from the first path item...
prev_path_0 = sys.path[0]
sys.path[0] = ''
try:
from pysoa.server import standalone
except SystemExit as e:
assert False, 'Expected import to succeed, instead got SystemExit with code {}'.format(e.args[0])
finally:
# ...and then we put it back in so that we haven't caused any problems.
sys.path[0] = prev_path_0
class TestSimpleMain(unittest.TestCase):
def setUp(self):
self.assertIsNotNone(standalone, 'Something went wrong with setup_module or the import')
self.prev_argv = sys.argv
def tearDown(self):
sys.argv = self.prev_argv
def test_no_arguments(self):
server_getter = mock.MagicMock()
sys.argv = ['/path/to/example_service/standalone.py']
standalone.simple_main(server_getter)
server_getter.assert_called_once_with()
server_getter.return_value.main.assert_called_once_with()
@mock.patch('pysoa.server.autoreload.get_reloader')
def test_only_file_watcher_argument_no_values(self, mock_get_reloader):
server_getter = mock.MagicMock()
sys.argv = ['/path/to/example_service/standalone.py', '--use-file-watcher']
standalone.simple_main(server_getter)
server_getter.assert_called_once_with()
self.assertFalse(server_getter.return_value.main.called)
mock_get_reloader.assert_called_once_with('', None, signal_forks=False)
self.assertEqual(1, mock_get_reloader.return_value.main.call_count)
self.assertEqual(
server_getter.return_value,
mock_get_reloader.return_value.main.call_args_list[0][0][1][1],
)
@mock.patch('pysoa.server.autoreload.get_reloader')
def test_only_file_watcher_argument_some_values(self, mock_get_reloader):
server_getter = mock.MagicMock()
sys.argv = ['/path/to/example_service/standalone.py', '--use-file-watcher', 'example,pysoa,conformity']
standalone.simple_main(server_getter)
server_getter.assert_called_once_with()
self.assertFalse(server_getter.return_value.main.called)
mock_get_reloader.assert_called_once_with('', ['example', 'pysoa', 'conformity'], signal_forks=False)
self.assertEqual(1, mock_get_reloader.return_value.main.call_count)
self.assertEqual(0, mock_get_reloader.return_value.main.call_args_list[0][0][1][0].fork_processes)
self.assertEqual(
server_getter.return_value,
mock_get_reloader.return_value.main.call_args_list[0][0][1][1],
)
@mock.patch('pysoa.server.autoreload.get_reloader')
def test_file_watcher_argument_no_values_with_forking(self, mock_get_reloader):
server_getter = mock.MagicMock()
sys.argv = ['/path/to/example_service/standalone.py', '--use-file-watcher', '-f', '5']
standalone.simple_main(server_getter)
server_getter.assert_called_once_with()
self.assertFalse(server_getter.return_value.main.called)
mock_get_reloader.assert_called_once_with('', None, signal_forks=True)
self.assertEqual(1, mock_get_reloader.return_value.main.call_count)
self.assertEqual(5, mock_get_reloader.return_value.main.call_args_list[0][0][1][0].fork_processes)
self.assertEqual(
server_getter.return_value,
mock_get_reloader.return_value.main.call_args_list[0][0][1][1],
)
@mock.patch('pysoa.server.autoreload.get_reloader')
def test_file_watcher_argument_some_values_with_forking(self, mock_get_reloader):
server_getter = mock.MagicMock()
sys.argv = ['/path/to/example_service/standalone.py', '--use-file-watcher', 'pysoa', '-f', '5']
standalone.simple_main(server_getter)
server_getter.assert_called_once_with()
self.assertFalse(server_getter.return_value.main.called)
mock_get_reloader.assert_called_once_with('', ['pysoa'], signal_forks=True)
self.assertEqual(1, mock_get_reloader.return_value.main.call_count)
self.assertEqual(5, mock_get_reloader.return_value.main.call_args_list[0][0][1][0].fork_processes)
self.assertEqual(
server_getter.return_value,
mock_get_reloader.return_value.main.call_args_list[0][0][1][1],
)
@mock.patch('multiprocessing.Process')
@mock.patch('multiprocessing.cpu_count')
def test_only_forking_not_limited(self, mock_cpu_count, mock_process):
server_getter = mock.MagicMock()
mock_cpu_count.return_value = 2
sys.argv = ['/path/to/example_service/standalone.py', '-f', '10']
prev_sigint = prev_sigterm = prev_sighup = False
try:
prev_sigint = signal.signal(signal.SIGINT, signal.SIG_IGN)
prev_sigterm = signal.signal(signal.SIGTERM, signal.SIG_IGN)
prev_sighup = signal.signal(signal.SIGHUP, signal.SIG_IGN)
processes = [mock.MagicMock() for _ in range(0, 10)]
mock_process.side_effect = processes
standalone.simple_main(server_getter)
server_getter.assert_called_once_with()
self.assertFalse(server_getter.return_value.main.called)
self.assertEqual(10, mock_process.call_count)
i = 0
for i, call in enumerate(mock_process.call_args_list):
self.assertEqual(server_getter.return_value.main, call[1]['target'])
self.assertEqual('pysoa-worker-{}'.format(i), call[1]['name'])
i += 1
self.assertEqual(10, i)
for i, process in enumerate(processes):
self.assertTrue(process.start.called, 'Process {} was not started'.format(i))
self.assertTrue(process.join.called, 'Process {} was not joined'.format(i))
self.assertFalse(process.terminate.called, 'Process {} should not have been terminated'.format(i))
os.kill(os.getpid(), signal.SIGHUP)
for i, process in enumerate(processes):
self.assertTrue(process.terminate.called, 'Process {} was terminated'.format(i))
finally:
if prev_sigint is not False:
signal.signal(signal.SIGINT, prev_sigint or signal.SIG_IGN)
if prev_sigterm is not False:
signal.signal(signal.SIGTERM, prev_sigterm or signal.SIG_IGN)
if prev_sighup is not False:
signal.signal(signal.SIGHUP, prev_sighup or signal.SIG_IGN)
@mock.patch('multiprocessing.Process')
@mock.patch('multiprocessing.cpu_count')
def test_only_forking_limited(self, mock_cpu_count, mock_process):
server_getter = mock.MagicMock()
mock_cpu_count.return_value = 1
sys.argv = ['/path/to/example_service/standalone.py', '-f', '10']
prev_sigint = prev_sigterm = prev_sighup = False
try:
prev_sigint = signal.signal(signal.SIGINT, signal.SIG_IGN)
prev_sigterm = signal.signal(signal.SIGTERM, signal.SIG_IGN)
prev_sighup = signal.signal(signal.SIGHUP, signal.SIG_IGN)
processes = [mock.MagicMock() for _ in range(0, 5)]
mock_process.side_effect = processes
standalone.simple_main(server_getter)
server_getter.assert_called_once_with()
self.assertFalse(server_getter.return_value.main.called)
self.assertEqual(5, mock_process.call_count)
i = 0
for i, call in enumerate(mock_process.call_args_list):
self.assertEqual(server_getter.return_value.main, call[1]['target'])
self.assertEqual('pysoa-worker-{}'.format(i), call[1]['name'])
i += 1
self.assertEqual(5, i)
for i, process in enumerate(processes):
self.assertTrue(process.start.called, 'Process {} was not started'.format(i))
self.assertTrue(process.join.called, 'Process {} was not joined'.format(i))
self.assertFalse(process.terminate.called, 'Process {} should not have been terminated'.format(i))
os.kill(os.getpid(), signal.SIGHUP)
for i, process in enumerate(processes):
self.assertTrue(process.terminate.called, 'Process {} was terminated'.format(i))
finally:
if prev_sigint is not False:
signal.signal(signal.SIGINT, prev_sigint or signal.SIG_IGN)
if prev_sigterm is not False:
signal.signal(signal.SIGTERM, prev_sigterm or signal.SIG_IGN)
if prev_sighup is not False:
signal.signal(signal.SIGHUP, prev_sighup or signal.SIG_IGN)
|
from django.conf.urls import url
from rest_framework_jwt.views import obtain_jwt_token
from .views import statistical, users
urlpatterns = [
# 登录
url(r'^authorizations/$', obtain_jwt_token),
# -------------------- 数据统计 --------------------
# 用户总量
url(r'^statistical/total_count/$', statistical.UserCountView.as_view()),
# 日增用户
url(r'^statistical/day_increment/$', statistical.UserDayCountView.as_view()),
# 日活用户
url(r'^statistical/day_active/$', statistical.UserDayActiveCountView.as_view()),
# 下单用户
url(r'^statistical/day_orders/$', statistical.UserDayOrdersCountView.as_view()),
# 月增用户
url(r'^statistical/month_increment/$', statistical.UserMonthCountView.as_view()),
# 日分类商品访问量
url(r'^statistical/goods_day_views/$', statistical.UserGoodsCountView.as_view()),
# -------------------- 用户管理路由 --------------------
url(r'^users/$', users.UserView.as_view()),
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. codeauthor:: Cédric Dumay <cedric.dumay@gmail.com>
Schema allow to specify a mapping for :class:`logging.LogRecord`. It based on
:class:`marshmallow.Schema`. All schema MUST inherit from
:class:`logging_gelf.schemas.GelfSchema`.
"""
import socket
import time
from marshmallow import Schema, fields
from logging_gelf import SYSLOG_LEVELS
from marshmallow import post_dump
GELF_1_1_FIELDS = [
'version', 'host', 'short_message', 'full_message', 'timestamp', 'level',
'line', 'file'
]
class GelfSchema(Schema):
version = fields.Constant("1.1")
host = fields.String(required=True, default=socket.gethostname)
short_message = fields.Method('to_message')
full_message = fields.String()
timestamp = fields.Method('to_timestamp')
level = fields.Method('to_syslog_level')
lineno = fields.Integer(dump_to="line")
pathname = fields.String(dump_to="file")
@classmethod
def to_syslog_level(cls, value):
"""description of to_syslog_level"""
return SYSLOG_LEVELS.get(value.levelno, 1)
@classmethod
def to_timestamp(cls, value):
"""to_timestamp"""
if value.created:
return value.created
else:
return time.time()
@classmethod
def to_message(cls, value):
"""description of to_message"""
return value.getMessage() % vars(value)
@staticmethod
def key_path(*args):
"""description of key_path"""
return "_".join(args)
@staticmethod
def to_flat_dict(prefix, data):
flat_result = dict()
for dkey, dvalue in data.items():
path = GelfSchema.key_path(prefix, dkey)
if isinstance(dvalue, dict):
flat_result.update(GelfSchema.to_flat_dict(path, dvalue))
else:
flat_result[path] = dvalue
return flat_result
@post_dump
def fix_additional_fields(self, data):
"""description of fix_additional_fields"""
result = dict()
for key, value in data.items():
rkey = key if key in GELF_1_1_FIELDS else '_{}'.format(key)
if isinstance(value, dict):
result.update(self.to_flat_dict(rkey, value))
else:
result[rkey] = value
return result
|
__version__ = "0.0.1"
import logging
from logging import NullHandler
from .vault import get_secret_or_env, get_vault_secret_keys, is_vault_initialised
__all__ = [
"get_secret_or_env",
"get_vault_secret_keys",
"is_vault_initialised"
]
logging.getLogger(__name__).addHandler(NullHandler())
|
# ref: https://www.youtube.com/watch?v=O20Y1XR6g0A&list=PLoVvAgF6geYMb029jpxqMuz5dRDtO0ydM&index=4
#import os
from influxdb import InfluxDBClient
from config import HOST, PORT, USERNAME, PASSWORD, DATABASE, TEMPERATURE, HUMIDITY, ROOM1
# following config moved to config.py file
# InfluxDB credentials
#HOST = os.environ.get('INFLUXDB_HOST', 'localhost')
#PORT = os.environ.get('INFLUXDB_PORT', 8086)
#USERNAME = os.environ.get('INFLUXDB_USER', 'influxDBuser')
#PASSWORD = os.environ.get('INFLUXDB_USER_PASSWORD', 'influxDBpass')
#DATABASE = os.environ.get('INFLUXDB_DB', 'strawberry_factory')
def client():
# InfluxDB client setup
client = InfluxDBClient(host=HOST, port=int(PORT), username=USERNAME, password=PASSWORD)
# databases
#client.get_list_database()
# create a database
client.create_database(DATABASE)
# use a database
client.switch_database(DATABASE)
# measurements/tables
#client.get_list_measurements()
return client
def save(db_client, measurement, fields, tags=None):
# json data
"""
json_body = {}
json_body['measurement'] = measurement
if tags != None:
json_body['tags'] = tags
json_body['fields'] = fields
# make list
json_body = [json_body]
"""
# alternatively
json_body = [{'measurement': measurement, 'tags': tags, 'fields': fields}]
# write / save into a row
db_client.write_points(json_body)
def send_influxdb(data, measurement='temperature'):
# InfluxDB server
db_client = client()
if measurement == 'temperature':
# json body for temperature
t_measurement = TEMPERATURE
t_tags = { "place": ROOM1 }
t_fields = { "value" : data}
# save @influxdb
save(db_client, t_measurement, t_fields, tags=t_tags)
elif measurement == 'humidity':
# json body for humidity
h_measurement = HUMIDITY
h_tags = { "place": ROOM1 }
h_fields = { "value" : data}
# save @influxdb
save(db_client, h_measurement, h_fields, tags=h_tags)
else:
print("Positional argument (measurement) required!")
|
# Code generated by `typeddictgen`. DO NOT EDIT.
"""V1beta1RunAsGroupStrategyOptionsDict generated type."""
from typing import TypedDict, List
from kubernetes_typed.client import V1beta1IDRangeDict
V1beta1RunAsGroupStrategyOptionsDict = TypedDict(
"V1beta1RunAsGroupStrategyOptionsDict",
{
"ranges": List[V1beta1IDRangeDict],
"rule": str,
},
total=False,
)
|
# coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import sys
import io
from azure.cli.testsdk import LiveScenarioTest
from contextlib import contextmanager
@contextmanager
def capture_output():
class stream_buffer_tee(object):
def __init__(self):
self.stdout = sys.stdout
self.buffer = io.StringIO()
def write(self, message):
self.stdout.write(message)
self.buffer.write(message)
def flush(self):
self.stdout.flush()
self.buffer.flush()
def get_output(self):
return self.buffer.getvalue()
def close(self):
self.buffer.close()
_stdout = sys.stdout
buffer_tee = stream_buffer_tee()
sys.stdout = buffer_tee
try:
yield buffer_tee
finally:
sys.stdout = _stdout
buffer_tee.close()
class CaptureOutputLiveScenarioTest(LiveScenarioTest):
def __init__(self, test_scenario):
super(CaptureOutputLiveScenarioTest, self).__init__(test_scenario)
# TODO: @digimaun - Maybe put a helper like this in the shared lib, when you create it?
def command_execute_assert(self, command, asserts):
from . import capture_output
with capture_output() as buffer:
self.cmd(command, checks=None)
output = buffer.get_output()
for a in asserts:
assert a in output
return output
|
import re
import unittest
import ir_datasets
from ir_datasets.formats import GenericDoc, GenericQuery, TrecQrel
from .base import DatasetIntegrationTest
_logger = ir_datasets.log.easy()
# Note: there's > 100k combinations here, so we are only testing a few cases
class TestCLIRMatrix(DatasetIntegrationTest):
def test_docs(self):
self._test_docs('clirmatrix/af', count=87705, items={
0: GenericDoc('123393', 'Weeskindertjies (plant) weeskind'),
9: GenericDoc('14515', re.compile('^Die Groot Beer \\(Latyn: Ursa Major\\) is ’n sterrebeeld wat heeljaar in die Noordelike Halfrond sigbaar.{873}8\xa0mag\\. 47\xa0Ursae Majoris het twee bevestigde planete, wat 2,54 en 0,76 keer die massa van Jupiter is\\.$', flags=48)),
87704: GenericDoc('18801', re.compile('^Die Suid\\-Afrikaanse Leër is die landmagkomponent van die Suid\\-Afrikaanse Nasionale Weermag en van sy.{964}Amptelike webwerf Hierdie artikel is ’n saadjie\\. Voel vry om Wikipedia te help deur dit uit te brei\\.$', flags=48)),
})
self._test_docs('clirmatrix/en', count=5984197, items={
0: GenericDoc('4274592', re.compile('^Transtar was the model name given to the line of trucks produced by the Studebaker Corporation of So.{910}asons, the Transtar name was dropped for the 1959 4E series Studebaker trucks and changed to Deluxe\\.$', flags=48)),
9: GenericDoc('23065547', re.compile('^Standard sea\\-level conditions \\(SSL\\), also known as sea\\-level standard \\(SLS\\), defines a set of atmosp.{827}orda, Introduction to Aerospace Engineering with a Flight Test Perspective, John Wiley \\& Sons, 2017\\.$', flags=48)),
5984196: GenericDoc('2160901', re.compile('^Resentment \\(also called ranklement or bitterness\\) is a complex, multilayered emotion that has been d.{1021}of by others; and having achievements go unrecognized, while others succeed without working as hard\\.$', flags=48)),
})
self._test_docs('clirmatrix/simple', count=153408, items={
0: GenericDoc('12559', re.compile('^A superlative, in grammar, is an adjective describing a noun that is the best example of a given qua.{684}the adverb "most" before the adjective\\. For instance, you do not say "funnest," or "interestingest"\\.$', flags=48)),
9: GenericDoc('120355', re.compile('^Occult refers to an area of knowledge or thought that is hidden\\. The word occult has many uses in th.{1069}pretation of Hinduism within Theosophy or the various occult interpretations of the Jewish Kabbalah\\.$', flags=48)),
153407: GenericDoc('54463', re.compile('^The history of the Christian religion and the Christian church began with Jesus and his apostles\\. Ch.{934}t\\. Peter, was that they did not, and the matter was further addressed with the Council of Jerusalem\\.$', flags=48)),
})
self._test_docs('clirmatrix/zh', count=1089043, items={
0: GenericDoc('449241', '虿盆,商朝时酷刑之一。将作弊官人跣剥干净,送下坑中,餵毒蛇、毒蝎等物。相传商朝最后一任君主纣王曾在大将黄飞虎之妻与纣王之妃子苏妲己发生口角之后将其推下虿盆,令其惨死。此刑罚在历史上使用较少。'),
9: GenericDoc('664068', re.compile('^篡位是一個貶义詞,即不合法或有爭議地取得王位\\(皇位\\)。包括殺上任皇帝/太子/廢立/逼迫上現任皇帝或君主交出皇位 在非君主制语境下,亦可泛指非法谋夺更高权力的行为(例如違反憲法而推行独裁,或在權限以外越.{29}为在元武宗\\(1307年\\)至元寧宗\\(1332年\\)的25年間,竟然換了八個皇帝,当中有三位皇帝\\(元天順帝、元明宗、元寧宗\\)在位時間甚至不足一年。 在同一王朝中通过杀害或逼退合法继承人或在位者的篡位者 政变$', flags=48)),
1089042: GenericDoc('6844113', re.compile('^谷風隧道為台灣的一條公路隧道,屬「台9線蘇花公路山區路段改善計劃」\\(蘇花改\\)南澳\\~和平段的其中一座隧道,北起鼓音橋,南接漢本高架橋,它穿越中央山脈鼓音溪至花蓮縣漢本的山區。谷風隧道南下及北上線均為45.{425}作、避難聯絡通道襯砌、通風隔板施作、新建通風機房,此外還須在避難聯絡通道內安裝照明系統及通訊設備,主隧道亦須安裝隧道照明燈具結線,安裝水霧支管,安裝噴流風機,此外隧道的所有土建工程及機電工程同步施工。$', flags=48)),
})
def test_queries(self):
self._test_queries('clirmatrix/af/bi139-base/en/train', count=9999, items={
0: GenericQuery('690', 'Aruba'),
9: GenericQuery('5615', 'Cretaceous'),
9998: GenericQuery('62732112', 'Efrain Gusquiza'),
})
self._test_queries('clirmatrix/af/bi139-base/en/dev', count=1000, items={
0: GenericQuery('2038', 'August Horch'),
9: GenericQuery('77606', 'Charles VIII of France'),
999: GenericQuery('62708410', '2020 in Morocco'),
})
self._test_queries('clirmatrix/af/bi139-base/en/test1', count=1000, items={
0: GenericQuery('3649', 'Geography of the British Virgin Islands'),
9: GenericQuery('107443', 'Coalinga, California'),
999: GenericQuery('62716625', 'Kevin Hall (disambiguation)'),
})
self._test_queries('clirmatrix/af/bi139-base/en/test2', count=1000, items={
0: GenericQuery('6011', 'Chomsky hierarchy'),
9: GenericQuery('97597', 'Flag of San Marino'),
999: GenericQuery('62707449', 'Machiel Kiel'),
})
self._test_queries('clirmatrix/en/bi139-base/af/train', count=10000, items={
0: GenericQuery('3', 'Lys van Afrikaanse skrywers'),
9: GenericQuery('95', 'Geskiedenis'),
9999: GenericQuery('285953', 'Jean-Claude Casadesus'),
})
self._test_queries('clirmatrix/en/bi139-full/af/train', count=58745, items={
0: GenericQuery('3', 'Lys van Afrikaanse skrywers'),
9: GenericQuery('26', 'Benue-Kongo-tale'),
58744: GenericQuery('286010', 'Lugmag van die Volksbevrydingsleër'),
})
self._test_queries('clirmatrix/en/multi8/fr/train', count=10000, items={
0: GenericQuery('45187', 'Mort'),
9: GenericQuery('7740', 'Lituanie'),
9999: GenericQuery('28573', 'Chiffres arabes'),
})
self._test_queries('clirmatrix/fr/multi8/en/train', count=10000, items={
0: GenericQuery('8221', 'Death'),
9: GenericQuery('17675', 'Lithuania'),
9999: GenericQuery('1786', 'Arabic numerals'),
})
self._test_queries('clirmatrix/de/multi8/en/train', count=10000, items={
0: GenericQuery('8221', 'Death'),
9: GenericQuery('17675', 'Lithuania'),
9999: GenericQuery('1786', 'Arabic numerals'),
})
def test_qrels(self):
self._test_qrels('clirmatrix/af/bi139-base/en/train', count=999900, items={
0: TrecQrel('690', '14013', 6, '0'),
9: TrecQrel('690', '15050', 0, '0'),
999899: TrecQrel('62732112', '259879', 0, '0'),
})
self._test_qrels('clirmatrix/af/bi139-base/en/dev', count=100000, items={
0: TrecQrel('2038', '13762', 3, '0'),
9: TrecQrel('2038', '272786', 0, '0'),
99999: TrecQrel('62708410', '258719', 0, '0'),
})
self._test_qrels('clirmatrix/af/bi139-base/en/test1', count=100000, items={
0: TrecQrel('3649', '50129', 5, '0'),
9: TrecQrel('3649', '93300', 0, '0'),
99999: TrecQrel('62716625', '140128', 0, '0'),
})
self._test_qrels('clirmatrix/af/bi139-base/en/test2', count=100000, items={
0: TrecQrel('6011', '11475', 6, '0'),
9: TrecQrel('6011', '69338', 0, '0'),
99999: TrecQrel('62707449', '112726', 0, '0'),
})
self._test_qrels('clirmatrix/en/bi139-base/af/train', count=1000000, items={
0: TrecQrel('3', '1617690', 5, '0'),
9: TrecQrel('3', '3943287', 3, '0'),
999999: TrecQrel('285953', '43443609', 0, '0'),
})
self._test_qrels('clirmatrix/en/bi139-full/af/train', count=3011938, items={
0: TrecQrel('3', '1617690', 5, '0'),
9: TrecQrel('3', '3943287', 3, '0'),
3011937: TrecQrel('286010', '400853', 1, '0'),
})
self._test_qrels('clirmatrix/en/multi8/fr/train', count=1000000, items={
0: TrecQrel('45187', '49703357', 5, '0'),
9: TrecQrel('45187', '12161221', 3, '0'),
999999: TrecQrel('28573', '40255894', 0, '0'),
})
self._test_qrels('clirmatrix/fr/multi8/en/train', count=1000000, items={
0: TrecQrel('8221', '45187', 6, '0'),
9: TrecQrel('8221', '1331378', 4, '0'),
999999: TrecQrel('1786', '9567503', 0, '0'),
})
self._test_qrels('clirmatrix/de/multi8/en/train', count=1000000, items={
0: TrecQrel('8221', '5204', 6, '0'),
9: TrecQrel('8221', '1092811', 4, '0'),
999999: TrecQrel('1786', '10264293', 0, '0'),
})
if __name__ == '__main__':
unittest.main()
|
"""
Python 3.6
PyTorch 0.4
"""
from abc import ABC, abstractmethod
from functools import partialmethod
import logging
import os
import torch
import utils
import Networks.MNIST_model as MNIST_model
import Networks.DCGAN_64 as DCGAN_64
Models = {'mnist': MNIST_model,
'fashion_mnist': MNIST_model,
'small-mnist':MNIST_model,
'cifar10': DCGAN_64,
}
class AbstractModel(ABC):
""" the abstract class of each model """
def __init__(self, dataset, hidden_dim, tanh = True, gpu_mode = True, lr = 1e-4, **kwargs):
super(AbstractModel, self).__init__()
# parameter
self.dataset = dataset.lower()
self.hidden_dim = hidden_dim
self.tanh = tanh
self.lr = lr
self.model_name = None
self.traversal_code_limit = 6
# device
self.device = torch.device('cuda:0') if gpu_mode else torch.device('cpu')
self.num_visual_samples = 6**2
self.fixed_noise = self.get_noise( self.num_visual_samples )
logging.debug('AbstractModel initialized.')
@abstractmethod
def init_net_arch(self, specified_net_arch = None):
raise NotImplementedError
# models = Models[self.dataset] if specified_net_arch == None else specified_net_arch
# # your G E D R or something
# self.name_model_dict = {'Encoder': self.E, 'Decoder': self.G}
# self.init_net_component(**self.name_model_dict)
def init_net_component(self, **nets):
""" initialise a network component """
for name, net in nets.items():
# info
logging.debug(f'Initialzing {name} of {self.model_name}.')
# init weights
utils.initialize_weights_kaiming_normal(net)
# cuda() or cpu()
net.to(self.device)
# print net arch info
logging.debug( utils.network_num_parameters(net) )
logging.debug( str(net) )
@abstractmethod
def init_optimizer(self):
raise NotImplementedError
# beta1, beta2 = 0.5, 0.99
# self.G_optimizer = optim.Adam(self.G.parameters(), lr=self.lr, betas=(beta1, beta2), weight_decay = 0)
# self.D_optimizer = optim.Adam(self.D.parameters(), lr=self.lr, betas=(beta1, beta2), weight_decay = 0)
# utils.print_line()
# print('Use ADAM optimizers for G and D.')
def set_fixed_noise(self, num_visual_samples):
""" reset the numbe of fixed visual samples """
self.num_visual_samples = num_visual_samples
self.fixed_noise = self.get_noise( num_visual_samples )
# ================ training part ================
def get_noise(self, num_ = 1):
""" get the noise in the hidden space of the predefined distribution """
out_noise = torch.randn(num_, self.hidden_dim, device=self.device)
return out_noise
@abstractmethod
def stepTraining(self, batch_x):
""" training of each step, implemented by the model """
raise NotImplementedError
# ================ sampling part ================
def sample(self, in_noise):
""" sample num_visual_samples images from current G model. """
assert in_noise.size(1) == self.hidden_dim
self.G.eval()
in_noise = in_noise.to(self.device)
with torch.no_grad():
samples = self.G( in_noise )
return samples.cpu()
def sampleN(self, N):
""" N random samples from self.sample(). """
return self.sample( self.get_noise( max(N, 1) ) )
sampleOne = partialmethod(sampleN, N = 1)
def sample_fixed(self):
""" sample from the fixed noise """
return self.sample(self.fixed_noise)
def sample_yielder(self, num = 0):
""" a python generator of the generator. Gives a random new sample. """
if num == 0:
while True:
yield self.sampleOne()
else:
for _ in range(num):
yield self.sampleOne()
def encode(self, X):
""" encode and decode the samples X
with the encoder and the decoder of the model.
"""
if not hasattr(self, 'E'):
logging.warning(f'{self.__name__} does not have Encoder.')
return None
self.E.eval()
X = X.to(self.device)
with torch.no_grad():
z = self.E( X )
mu, log_var = torch.chunk(z, 2, dim=1) # mean and log variance.
return mu.cpu()
def reconstruct(self, X):
""" encode and decode the samples X
with the encoder and the decoder of the model.
"""
if not hasattr(self, 'E'):
logging.warning(f'{self.model.__name__} does not have Encoder.')
return None
self.E.eval()
self.G.eval()
X = X.to(self.device)
with torch.no_grad():
z = self.E( X )
mu, log_var = torch.chunk(z, 2, dim=1) # mean and log variance.
samples = self.G( mu )
return samples.cpu()
def latent_traversal_dim(self, dim, num_range = 61):
"""
Generate the samples when the the specified hidden code varies.
Return a list of torch tensors.
num_range : the num of code to change
"""
code_variant = torch.linspace(-self.traversal_code_limit, self.traversal_code_limit, num_range).to(self.device)
zeros = torch.zeros(1, self.hidden_dim, device = self.device)
images = []
# each sub picture
for varying_code in code_variant:
this_code = torch.cat( [ self.fixed_noise.clone(), zeros ], 0 )
this_code[:, dim] = varying_code
samples = self.sample(this_code)
images.append(samples)
return images
def latent_traversal_given_samples_dim(self, X, dim, num_range =61):
"""
Reconstruct the sample sequence when the the specified hidden code varies.
Return a list of torch tensors.
num_range : the num of code to change
"""
# encode the samples
codes = self.encode(X)
print(self.model_name, self.traversal_code_limit)
code_variant = torch.linspace(-self.traversal_code_limit, self.traversal_code_limit, num_range).to(self.device)
# reconstruct
images = []
# each sub picture
for varying_code in code_variant:
this_code = codes.clone().to(self.device)
this_code[:, dim] = varying_code
samples = self.sample(this_code)
images.append(samples)
return images
# ================ Save / Load part ================
def save(self, save_path):
""" save models in the specific save path """
for name, model in self.name_model_dict.items():
torch.save(model.state_dict(), os.path.join(save_path, f"{name}.pkl") )
logging.info("Models saving completed!")
def load(self, save_path):
""" load models in the specific save path """
flag = True
for name, model in self.name_model_dict.items():
try:
model.load_state_dict( torch.load(os.path.join(save_path, f"{name}.pkl")) )
except FileNotFoundError as e:
logging.critical(f'The model {name} is not found!')
flag = False
if flag:
logging.info("Models loading completed!")
|
from datetime import datetime
from typing import List, Optional, Any
from sqlalchemy.orm import Session
from lib import aes
from lib.account.account import Account
from lib.account.account_entity import AccountEntity
from lib.account.account_entity_adapter import AccountEntityAdapter
from lib.db import session_scope
from lib.kms import Kms
class AccountRepository:
session: Session
kms: Kms
def __init__(self, session: Session, kms: Kms):
self.session = session
self.kms = kms
def get_all_active_accounts(self, alias: Optional[str] = None) -> List[Account]:
db: Session
accounts: List[AccountEntity]
with session_scope(self.session) as db:
start_of_day = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
query = db.query(AccountEntity) \
.filter(AccountEntity.enabled, AccountEntity.expired_at >= start_of_day) \
if alias:
query.filter(AccountEntity.alias == alias)
accounts = query.all()
return [AccountEntityAdapter(self.__decrypt(account)) for account in accounts]
def get_active_account(self) -> Optional[Account]:
db: Session
account: AccountEntity
with session_scope(self.session) as db:
account = db.query(AccountEntity) \
.filter_by(enabled=True) \
.first()
return AccountEntityAdapter(self.__decrypt(account)) if account is not None else None
def add_account(self, vendor: str, access_key: str, secret_key: str, expired_at: datetime, alias: str):
db: Session
account: AccountEntity
data_key, plain_key = self.kms.create_data_key()
key = plain_key.decode("utf-8")
access_key_enc = aes.encrypt(plaintext=access_key, key=key)
secret_key_enc = aes.encrypt(plaintext=secret_key, key=key)
with session_scope(self.session) as db:
account = AccountEntity(
vendor=vendor,
enabled=True,
access_key=access_key_enc,
secret_key=secret_key_enc,
expired_at=expired_at,
alias=alias,
data_key=data_key
)
db.add(account)
def __decrypt(self, account: AccountEntity):
key = self.kms.decrypt_data_key(blob=account.data_key)
account.access_key = aes.decrypt(cipher_text=account.access_key, key=key)
account.secret_key = aes.decrypt(cipher_text=account.secret_key, key=key)
return account
|
# ---------------------------------------------------------------------
# Report Discovery Link Summary
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Third-party modules
from collections import defaultdict
# NOC modules
from noc.lib.app.simplereport import SimpleReport, PredefinedReport, SectionRow, TableColumn
from noc.lib.app.reportdatasources.base import ReportModelFilter
from noc.main.models.pool import Pool
from noc.sa.models.managedobject import ManagedObject
from noc.core.translation import ugettext as _
class ReportFilterApplication(SimpleReport):
title = _("Discovery Links Summary")
predefined_reports = {"default": PredefinedReport(_("Discovery Links Summary"), {})}
save_perc = None
def calc_percent(self, column, val):
if column != _("All polling"):
if val == 0:
return "%.2f %%" % 100
else:
r = "%.2f %%" % ((val / float(self.save_perc)) * 100)
# self.save_perc = None
return r
elif column == _("All polling"):
self.save_perc = val
return ""
def get_data(self, request, **kwargs):
columns, columns_desr = [], []
r_map = [
(_("All polling"), "2is1.6is1.9a2"), # "Is Managed, object type defined"
(_("0"), "2is1.6is1.9a2.3hs0"), # "Has 0 Links w type defined"
(_("1"), "2is1.6is1.3hs2"), # "Has 1 links"
(_("2"), "2is1.6is1.3hs3"), # "Has 2 links"
(_("More 3"), "2is1.6is1.3hs4"), # "Has more 3 links"
]
for x, y in r_map:
columns += [y]
columns_desr += [x]
report = ReportModelFilter()
result = report.proccessed(",".join(columns))
summary = defaultdict(int)
data = []
# url = "/sa/reportstat/repstat_download/?report=%s"
url = "/sa/reportobjectdetail/download/?" + "&".join(
[
"o_format=xlsx",
"columns=object_name,object_address,object_profile,object_status,profile_name,admin_domain,segment",
"detail_stat=%s&pool=%s",
]
)
for p in Pool.objects.filter().order_by("name"):
m = []
moss = set(ManagedObject.objects.filter(pool=p).values_list("id", flat=True))
for col in columns:
m += [len(result[col.strip()].intersection(moss))]
summary[col] += m[-1]
data += [SectionRow(name=p.name)]
data += [
(x, y, self.calc_percent(x, y), url % (columns[columns_desr.index(x)], p.name))
for x, y in zip(columns_desr, m)
]
return self.from_dataset(
title=self.title,
columns=[
_("Links count"),
_("MO Count"),
_("Percent at All"),
TableColumn(_("Detail"), format="url"),
],
data=data,
)
|
# import argparse
import logging.config
import optparse
import os
from elastichq import create_app
from elastichq.globals import socketio
from elastichq.utils import find_config
default_host = '0.0.0.0'
default_port = 5000
default_debug = False
default_enable_ssl = False
default_ca_certs = None
default_verify_certs = True
default_client_key = None
default_client_cert = None
default_url = 'http://localhost:9200'
is_gunicorn = "gunicorn" in os.environ.get("SERVER_SOFTWARE", "")
application = create_app()
# set default url, override with env for docker
application.config['DEFAULT_URL'] = os.environ.get('HQ_DEFAULT_URL', default_url)
application.config['ENABLE_SSL'] = os.environ.get('HQ_ENABLE_SSL', default_enable_ssl)
application.config['CA_CERTS'] = os.environ.get('HQ_CA_CERTS', default_ca_certs)
application.config['HQ_VERIFY_CERTS'] = os.environ.get('HQ_VERIFY_CERTS', default_verify_certs)
application.config['DEBUG'] = os.environ.get('HQ_DEBUG', default_debug)
application.config['CLIENT_KEY'] = os.environ.get('CLIENT_KEY', default_client_key)
application.config['CLIENT_CERT'] = os.environ.get('CLIENT_CERT', default_client_cert)
if os.environ.get('HQ_DEBUG') == 'True':
config = find_config('logger_debug.json')
logging.config.dictConfig(config)
if __name__ == '__main__':
# Set up the command-line options
parser = optparse.OptionParser()
parser.add_option("-H", "--host",
help="Hostname of the Flask app " + \
"[default %s]" % default_host,
default=default_host)
parser.add_option("-P", "--port",
help="Port for the Flask app " + \
"[default %s]" % default_port,
default=default_port)
parser.add_option("-d", "--debug",
action="store_true", dest="debug", default=default_debug,
help=optparse.SUPPRESS_HELP)
parser.add_option("-u", "--url", default=default_url)
parser.add_option("-s", "--enable-ssl",
action="store_true", default=default_enable_ssl)
parser.add_option("-c", "--ca-certs", default=default_ca_certs,
help='Required when --use-ssl is set. ' + \
'Path to CA file or directory [default %s]' % default_ca_certs)
parser.add_option("-v", "--verify_certs", default=default_verify_certs,
help='Set to False when using self-signed certs.')
parser.add_option("-x", "--client_cert", default=default_client_cert,
help='Set to path of the client cert file.')
parser.add_option("-X", "--client_key", default=default_client_key,
help='Set to path of the client key file.')
options, _ = parser.parse_args()
application.config['DEFAULT_URL'] = os.environ.get('HQ_DEFAULT_URL', options.url)
application.config['ENABLE_SSL'] = os.environ.get('HQ_ENABLE_SSL', options.enable_ssl)
application.config['CA_CERTS'] = os.environ.get('HQ_CA_CERTS', options.ca_certs)
application.config['VERIFY_CERTS'] = os.environ.get('HQ_VERIFY_CERTS', options.verify_certs)
application.config['CLIENT_KEY'] = os.environ.get('CLIENT_KEY', options.client_key)
application.config['CLIENT_CERT'] = os.environ.get('CLIENT_CERT', options.client_cert)
hq_host = os.environ.get('HQ_HOST', options.host)
hq_port = os.environ.get('HQ_PORT', options.port)
if is_gunicorn:
if options.debug:
config = find_config('logger_debug.json')
logging.config.dictConfig(config)
# we set reloader False so gunicorn doesn't call two instances of all the Flask init functions.
socketio.run(application, hq_host, hq_port, debug=options.debug, use_reloader=False)
else:
if options.debug:
config = find_config('logger_debug.json')
logging.config.dictConfig(config)
socketio.run(application, hq_host, hq_port, debug=options.debug)
|
'''99 Units of Disposable Asset'''
from itertools import chain
# main :: IO ()
def main():
'''Modalised asset dispersal procedure.'''
# localisation :: (String, String, String)
localisation = (
'on the wall',
'Take one down, pass it around',
'Better go to the store to buy some more'
)
print((unlines(list(map(
incantation(localisation),
enumFromThenTo(99)(98)(0)
)))))
# incantation :: (String, String, String) -> Int -> String
def incantation(localisation):
'''Versification of asset disposal
and inventory update.'''
location, distribution, solution = localisation
def inventory(n):
return unwords([asset(n), location])
return lambda n: solution if 0 == n else (
unlines([
inventory(n),
asset(n),
distribution,
inventory(pred(n))
])
)
# asset :: Int -> String
def asset(n):
'''Quantified asset.'''
def suffix(n):
return [] if 1 == n else 's'
return unwords([
str(n),
concat(reversed(concat(cons(suffix(n))(["elttob"]))))
])
# GENERIC -------------------------------------------------
# concat :: [[a]] -> [a]
# concat :: [String] -> String
def concat(xxs):
'''The concatenation of all the elements in a list.'''
xs = list(chain.from_iterable(xxs))
unit = '' if isinstance(xs, str) else []
return unit if not xs else (
''.join(xs) if isinstance(xs[0], str) else xs
)
# cons :: a -> [a] -> [a]
def cons(x):
'''Construction of a list from x as head,
and xs as tail.'''
return lambda xs: [x] + xs if (
isinstance(xs, list)
) else chain([x], xs)
# enumFromThenTo :: Int -> Int -> Int -> [Int]
def enumFromThenTo(m):
'''Integer values enumerated from m to n
with a step defined by nxt-m.'''
def go(nxt, n):
d = nxt - m
return list(range(m, d + n, d))
return lambda nxt: lambda n: (
go(nxt, n)
)
# pred :: Enum a => a -> a
def pred(x):
'''The predecessor of a value. For numeric types, (- 1).'''
return x - 1 if isinstance(x, int) else (
chr(ord(x) - 1)
)
# unlines :: [String] -> String
def unlines(xs):
'''A single string derived by the intercalation
of a list of strings with the newline character.'''
return '\n'.join(xs)
# unwords :: [String] -> String
def unwords(xs):
'''A space-separated string derived from
a list of words.'''
return ' '.join(xs)
if __name__ == '__main__':
main()
|
x = [ [5,2,3], [10,8,9] ]
students = [
{'first_name': 'Michael', 'last_name' : 'Jordan'},
{'first_name' : 'John', 'last_name' : 'Rosales'}
]
sports_directory = {
'basketball' : ['Kobe', 'Jordan', 'James', 'Curry'],
'soccer' : ['Messi', 'Ronaldo', 'Rooney']
}
z = [ {'x': 10, 'y': 20} ]
# Change the value 10 in x to 15. x will now be [ [5,2,3], [15,8,9] ].
x[1][0] = 15
print(x)
# Change the last_name of the first student from 'Jordan' to 'Bryant'
students[0]['last_name'] = "Bryant"
print(students)
# In the sports_directory, change 'Messi' to 'Andres'
sports_directory['soccer'][0] = 'Andres'
print( sports_directory['soccer'])
# Change the value 20 in z to 30
z[0]['y'] = 30
print(z)
# Iterating Through a List of Dictionaries
# Create a function iterateDictionary(some_list) that, given a list of dictionaries,
# the function loops through each dictionary in the list and prints each key and the
# associated value. For example, given the following list:
students = [
{'first_name': 'Michael', 'last_name' : 'Jordan'},
{'first_name' : 'John', 'last_name' : 'Rosales'},
{'first_name' : 'Mark', 'last_name' : 'Guillen'},
{'first_name' : 'KB', 'last_name' : 'Tonel'}
]
def iterate_dictionary(list): # def
for i in range(0, len(list)):
output = ""
for key,val in list[i].items():
output += f" {key} - {val},"
print(output)
iterate_dictionary(students)
# this will output: (it's okay if each key-value pair
# ends up on 2 separate lines)
# bonus section: bonus to get them to appear exactly as below:
# first_name - Michael, last_name - Jordan
# first_name - John, last_name - Rosales
# first_name - Mark, last_name - Guillen
# first_name - KB, last_name - Tonel
# getting value from the list of dictionaries
# create a function iterateDictionary2(key_name, some_list)
# that given a list of dictionaries and a key name,
# the function prints the value stored in that key for each dictionary.
# For example, iterateDictionary2('first_name', students) should output:
# Michael
# John
# Mark
# KB
def iterate_dictionary2(key_name,list):
for i in range(0, len(list)):
for key,val in list[i].items():
if key == key_name:
print(val)
iterate_dictionary2('first_name',students)
iterate_dictionary2('last_name',students)
# And
# iterateDictionary2('last_name', students) should output:
# Jordan
# Rosales
# Guillen
# Tonel
# Iterating through the dictionary with list values
# Create a function printInfo(some_dict)
# that given a dictionary whose values are all lists,
# prints the name of each key along with the size of its list,
# and then prints the associated values within each key's list.
# For example:
dojo = {
'locations': ['San Jose', 'Seattle', 'Dallas', 'Chicago', 'Tulsa', 'DC', 'Burbank'],
'instructors': ['Michael', 'Amy', 'Eduardo', 'Josh', 'Graham', 'Patrick', 'Minh', 'Devon']
}
def print_info(dict):
for key,val in dict.items():
print("--------------") # separation of the print statements
print(f"{len(val)} {key.upper()}")
for i in range(0, len(val)):
print(val[i])
print_info(dojo)
# output:
# 7 LOCATIONS
# San Jose
# Seattle
# Dallas
# Chicago
# Tulsa
# DC
# Burbank
# output after break:
# 8 INSTRUCTORS
# Michael
# Amy
# Eduardo
# Josh
# Graham
# Patrick
# Minh
# Devon
|
from types import MethodType
from PySide6.QtCore import QMimeData, QModelIndex, Slot
from PySide6.QtGui import QIcon
from PySide6.QtWidgets import QGridLayout, QHBoxLayout, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QTableWidget, QTextEdit, QWidget, QLabel
from models.datasheet import DatasheetCollection
from models.settings import Settings
from LocalLogging.logger import LoggerBase
from models.tags import Tag, TagManager
class TagView(QWidget):
#region Init
def __init__(
self,
logger: LoggerBase,
settings: Settings,
tagManager: TagManager,
datasheets: DatasheetCollection,
updateTagsCallback: MethodType,
parent=None
) -> None:
super(TagView, self).__init__(parent)
#region Parameters
self.tagManager: TagManager = tagManager
self.datasheets = datasheets
self.settings = settings
self.logger = logger
self.selectedTag: Tag = None
self.updateTagsCallback = updateTagsCallback
#endregion
#region Tag List
self.tagList = QListWidget()
self.tagList.itemClicked.connect(self.selectedTagChanged)
# self.updateTags()
#endregion
#region Controls
self.controlsLayout = QHBoxLayout()
self.addTagBtn = QPushButton(QIcon(), 'Add')
self.addTagBtn.clicked.connect(self.addTag)
self.controlsLayout.addWidget(self.addTagBtn)
self.addText = QLineEdit('')
self.addText.setPlaceholderText('New Tag')
self.addText.returnPressed.connect(self.addTag)
self.controlsLayout.addWidget(self.addText)
self.removeTagBtn = QPushButton(QIcon(), 'Remove')
self.removeTagBtn.clicked.connect(self.removeTag)
self.controlsLayout.addWidget(self.removeTagBtn)
#endregion
#region Layout
self.mainLayout = QGridLayout()
self.mainLayout.addLayout(self.controlsLayout, 0, 0)
self.mainLayout.addWidget(self.tagList, 1, 0)
self.setLayout(self.mainLayout)
#endregion
#endregion
#region Methods
def updateTags(self):
tags = self.tagManager.getTags()
self.tagList.clear()
for tag in tags:
newItem = QListWidgetItem(QIcon('./src/Resources/Icons/TagIcon.png'), tag.name)
self.tagList.addItem(newItem)
self.updateTagsCallback()
@Slot()
def addTag(self):
self.tagManager.add(self.addText.text())
self.updateTags()
@Slot()
def removeTag(self):
if self.selectedTag != None:
self.datasheets.deleteTag(self.selectedTag)
self.tagManager.remove(self.selectedTag)
self.updateTags()
@Slot()
def selectedTagChanged(self, args: QListWidgetItem):
self.selectedTag = self.tagManager.find(args.text())
#endregion
|
#!usr/bin/python
# -*- coding:utf8 -*-
# 加上锁
import time
import threading
class Singleton(object):
_instance_lock = threading.Lock()
def __init__(self):
time.sleep(1)
print(self)
def __new__(cls, *args, **kwargs):
with cls._instance_lock:
if not hasattr(cls, '_instance'):
cls._instance = super(Singleton, cls).__new__(cls)
return cls._instance
def task():
obj = Singleton()
print(id(obj))
for i in range(10):
t = threading.Thread(target=task)
t.start()
|
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2017, SLAC National Laboratory / Kisensum Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor SLAC / Kisensum,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# SLAC / Kisensum. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# }}}
import time
import pytest
import gevent
import requests
from volttron.platform import get_services_core
DRIVER_NAME = 'sep2'
DEVICE_ID = "097935300833"
TEST_CONFIG = {
"devices": [
{
"sfdi": "097935300833",
"lfdi": "247bd68e3378fe57ba604e3c8bdf9e3f78a3d743",
"load_shed_device_category": "0200",
"pin_code": "130178"
},
{
"sfdi": "111576577659",
"lfdi": "2990c58a59935a7d5838c952b1a453c967341a07",
"load_shed_device_category": "0200",
"pin_code": "130178"
}
],
"sep2_server_sfdi": "413707194130",
"sep2_server_lfdi": "29834592834729384728374562039847629",
"load_shed_device_category": "0020",
"timezone": "America/Los_Angeles"
}
REGISTRY_CONFIG_STRING = """Volttron Point Name,SEP2 Resource Name,SEP2 Field Name,Units,Writable,Default
b1_Md,DeviceInformation,mfModel,NA,FALSE,NA
b1_Opt,DeviceInformation,lFDI,NA,FALSE,NA
b1_SN,DeviceInformation,sFDI,NA,FALSE,NA
b1_Vr,DeviceInformation,mfHwVer,NA,FALSE,NA
b113_A,MirrorMeterReading,PhaseCurrentAvg,NA,FALSE,NA
b113_DCA,MirrorMeterReading,InstantPackCurrent,A,FALSE,NA
b113_DCV,MirrorMeterReading,LineVoltageAvg,V,FALSE,NA
b113_DCW,MirrorMeterReading,PhasePowerAvg,W,FALSE,NA
b113_PF,MirrorMeterReading,PhasePFA,%,FALSE,NA
b113_WH,MirrorMeterReading,EnergyIMP,Wh,FALSE,NA
b120_AhrRtg,DERCapability,rtgAh,Ah,FALSE,NA
b120_ARtg,DERCapability,rtgA,A,FALSE,NA
b120_MaxChaRte,DERCapability,rtgMaxChargeRate,W,FALSE,NA
b120_MaxDisChaRte,DERCapability,rtgMaxDischargeRate,W,FALSE,NA
b120_WHRtg,DERCapability,rtgWh,Wh,FALSE,NA
b120_WRtg,DERCapability,rtgW,W,FALSE,NA
b121_WMax,DERSettings,setMaxChargeRate,W,FALSE,NA
b122_ActWh,MirrorMeterReading,EnergyEXP,Wh,FALSE,NA
b122_StorConn,DERStatus,storConnectStatus,NA,FALSE,NA
b124_WChaMax,DERControl,DERControlBase.opModFixedFlow,W,TRUE,NA
b403_Tmp,MirrorMeterReading,InstantPackTemp,C,FALSE,NA
b404_DCW,PowerStatus,PEVInfo.chargingPowerNow,W,FALSE,NA
b404_DCWh,DERAvailability,SOC,Wh,FALSE,NA
b802_LocRemCtl,DERStatus,localControlModeStatus,NA,FALSE,NA
b802_SoC,DERStatus,inverterStatus,%,FALSE,NA
b802_State,DERStatus,stateOfChargeStatus,NA,FALSE,NA"""
ASSERTED_VALUES = {
'b1_Md': 'Mf Model',
'b1_SN': '097935300833',
'b1_Vr': 'MF-HW: 1.0.0',
'b113_A': '24.0',
'b113_DCA': '125.0',
'b113_DCV': '125.0',
'b113_DCW': '125.0',
'b113_PF': '126.0',
'b113_WH': '127.0',
'b120_AhrRtg': '350.0',
'b120_ARtg': '330.0',
'b120_MaxChaRte': '220.0',
'b120_MaxDisChaRte': '10.0',
'b120_WHRtg': '1230.0',
'b120_WRtg': '10.0',
'b121_WMax': '20.0',
'b122_ActWh': '128.0',
'b122_StorConn': '777',
'b124_WChaMax': '10.0',
'b403_Tmp': '128000.0',
'b404_DCW': '3000.0',
'b404_DCWh': '305.755555556',
'b802_LocRemCtl': '777',
'b802_SoC': '777',
'b802_State': '7.77'}
web_address = ""
@pytest.fixture(scope="module")
def agent(request, volttron_instance_module_web):
test_agent = volttron_instance_module_web.build_agent()
# Configure a SEP2 device in the Master Driver
test_agent.vip.rpc.call('config.store', 'manage_delete_store', 'platform.driver').get(timeout=10)
test_agent.vip.rpc.call('config.store', 'manage_store', 'platform.driver',
'devices/{}'.format(DRIVER_NAME),
"""{
"driver_config": {
"sfdi": "097935300833",
"sep2_agent_id": "test_sep2agent"
},
"campus": "campus",
"building": "building",
"unit": "sep2",
"driver_type": "sep2",
"registry_config": "config://sep2.csv",
"interval": 15,
"timezone": "US/Pacific",
"heart_beat_point": "Heartbeat"
}""",
'json').get(timeout=10)
test_agent.vip.rpc.call('config.store', 'manage_store', 'platform.driver',
'sep2.csv',
REGISTRY_CONFIG_STRING,
'csv').get(timeout=10)
# Install and start a MasterDriverAgent
md_id = volttron_instance_module_web.install_agent(agent_dir=get_services_core("MasterDriverAgent"),
config_file={},
start=True)
print('master driver agent id: ', md_id)
# Install and start a SEP2Agent
sep2_id = volttron_instance_module_web.install_agent(agent_dir=get_services_core("SEP2Agent"),
config_file=TEST_CONFIG,
vip_identity='test_sep2agent',
start=True)
print('sep2 agent id: ', sep2_id)
global web_address
web_address = volttron_instance_module_web.bind_web_address
def stop():
volttron_instance_module_web.stop_agent(md_id)
volttron_instance_module_web.stop_agent(sep2_id)
test_agent.core.stop()
gevent.sleep(10) # wait for agents and devices to start
request.addfinalizer(stop)
return test_agent
class TestSEP2Driver:
"""Regression tests for the SEP2 driver."""
def test_all_points(self, agent):
self.put_sep2_data('edev/0/di', 'edev.di') # device_information
self.put_sep2_data('edev/0/der/1/derg', 'der.derg') # der_settings
self.put_sep2_data('edev/0/der/1/ders', 'der.ders') # der_status
self.put_sep2_data('edev/0/der/1/dera', 'der.dera') # der_availability
self.put_sep2_data('edev/0/der/1/dercap', 'der.dercap') # der_capabililty
self.put_sep2_data('edev/0/ps', 'edev.ps') # power_status
self.put_sep2_data('mup', 'mup.mup') # mup
self.put_sep2_data('mup/0', 'mup.mup2') # mup (update)
self.put_sep2_data('mup/0', 'mup.mmr') # mmr
# Wait a few seconds to allow the HTTP requests to be processed (asynchronously?)
time.sleep(5)
# Set the one settable point, the dispatched power value, and test that it comes back on a get_point
dispatch_point_name = 'b124_WChaMax'
dispatched_value = ASSERTED_VALUES[dispatch_point_name]
self.set_point(agent, dispatch_point_name, dispatched_value)
assert self.get_point(agent, dispatch_point_name) == dispatched_value
# Test that each point has the test value that was posted to it
for point_name, expected_value in ASSERTED_VALUES.iteritems():
assert self.get_point(agent, point_name) == expected_value
@staticmethod
def get_point(test_agent, point_name):
return test_agent.vip.rpc.call('platform.driver', 'get_point', DRIVER_NAME, point_name).get(timeout=10)
@staticmethod
def set_point(test_agent, point_name, value):
return test_agent.vip.rpc.call('platform.driver', 'set_point', DRIVER_NAME, point_name, value).get(timeout=10)
@staticmethod
def put_sep2_data(sep2_resource_name, sep2_filename):
"""
PUT data for a SEP2 resource, using the contents of an XML file in the current directory.
@param sep2_resource_name: The distinguishing part of the name of the SEP2 resource as it appears in the URL.
@param sep2_filename: The distinguishing part of the SEP2 sample data file name.
"""
url = '{}/dcap/{}'.format(web_address, sep2_resource_name)
headers = {'content-type': 'application/sep+xml'}
requests.post(url,
data=open(get_services_core("SEP2Agent/tests/{}.PUT.xml".format(sep2_filename)), 'rb'),
headers=headers)
|
import os
import unittest
from pathlib import Path
from pyutil import IOUtils
from .TestSupport import TestSupport
class test_IOUtils(unittest.TestCase):
def test_cd(self):
with TestSupport.get_playground_path():
oldpath = Path.cwd()
testpath = Path("./aaa").resolve()
testpath.mkdir()
with IOUtils.cd(testpath):
# Checks if changed directory successfully
self.assertEqual(testpath, Path.cwd())
# end with
# Checks if returned to old directory successfully
self.assertEqual(oldpath, Path.cwd())
# end with
return
if __name__ == '__main__':
unittest.main()
|
"""
This app was created to specifically monitor the
OESS-FVD communication. It could be used to generate alarms
when a packetIn is received with current time sent by the FVD
too high compared with the time when the packet was
captured.
"""
from datetime import datetime, timedelta
from libs.core.debugging import debugclass
from libs.core.topo_reader import TopoReader
from libs.tcpiplib.process_data import get_protocol
OFP_PACKET_IN = 10
OFP_PACKET_OUT = 13
@debugclass
class OessFvdTracer:
"""
OessFvdTracer is an app to evaluate the OESS FVD app.
"""
WARN = 10
CRITICAL = 30
def __init__(self, options):
self.links = dict()
self.layout = '%-20s %-14s %-30s %-30s %s'
self.starting()
self.last_printed = None
self.get_params(options)
@staticmethod
def starting():
"""
Just print the app name
"""
print('OESS Forwarding Verification Monitoring')
def get_params(self, options):
"""
Process params provided via CLI
"""
self.WARN = int(options.split(":")[0])
self.CRITICAL = int(options.split(":")[1])
def process_packet(self, pkt):
"""
Method called by ofp_sniffer to process the IP+OF packet
We are only interested in Packet_Ins because these are
messages coming from the switch, which means, the end of
the OESS FV cycle:
(OESS -> packetOut -> dpid -> packetIn -> OESS)
Args:
pkt: Packet class
"""
for msg in pkt.ofmsgs:
if msg.ofp.header.message_type in [OFP_PACKET_IN]:
fvd = get_protocol(msg.ofp.data, oess=True)
if fvd is not False:
self.add_link(fvd, pkt.l1.time)
def add_link(self, fvd, capture_time):
"""
Add detected OESS link to self.links dictionary
Args:
fvd: OESS class
capture_time: time when the packet was capture
by ofp_sniffer
"""
if fvd.side_a not in self.links:
self.links[fvd.side_a] = dict()
capture_time = datetime.strptime(capture_time, '%Y-%m-%d %H:%M:%S.%f')
time_diff = self.calculate_time_diff(capture_time, fvd.timestamp)
self.links[fvd.side_a][fvd.port_a] = {'remote': fvd.side_z,
'port': fvd.port_z,
'timestamp': fvd.timestamp,
'last_seen': capture_time,
'diff': time_diff}
self.print_link_status(fvd.side_a, fvd.port_a)
@staticmethod
def calculate_time_diff(capture_time, oess_time):
"""
Calculate the time difference between packet sent via PacketOut
and the packet received via PacketIn.
Args:
capture_time: PacketIn time
oess_time: PacketOut time
Returns:
difference
"""
return capture_time - datetime.fromtimestamp(oess_time)
def print_link_status(self, dpid, port, alert=False):
"""
Now, just print the OESS link detected. The idea of this method
is to generate alarms when time different from the moment packet
is seen by ofp_sniffer with the time packet was sent is over
many seconds.
Args:
dpid: source DPID in the OESS message
port: source port in the OESS message
alert: print only warning and critical
"""
link = self.links[dpid][port]
timestamp = str(datetime.fromtimestamp(link['timestamp']))
topo_link = TopoReader().get_link_aliases(dpid, port, link['remote'],
link['port'], option="Full")
source_dpid = TopoReader().get_datapath_name(dpid)
if link['diff'] < timedelta(seconds=0):
print("Time Difference is < 0. Adjust NTP of the OF controller")
elif timedelta(seconds=self.CRITICAL) > link['diff'] >= timedelta(seconds=self.WARN):
link['diff'] = str(link['diff']) + ' <-- Warning!'
alert = True
elif link['diff'] >= timedelta(seconds=self.CRITICAL):
link['diff'] = str(link['diff']) + ' <-- Critical!'
alert = True
if alert:
if len(topo_link) > 0:
self.print_header(True)
print(self.layout % (topo_link, source_dpid, timestamp,
link['last_seen'], link['diff']))
else:
self.print_header()
print('%-24s %-4s %-24s %-4s %s\t %s\t %s' %
(dpid, port, link['remote'], link['port'], timestamp,
link['last_seen'], link['diff']))
def print_header(self, topo_link=False):
"""
Print headers just once. In case it keeps changing (because link
was not found in the topology.json), prints the header again.
Args:
topo_link: indicates if link was found in the topology.json
"""
if topo_link and self.last_printed in [None, 'not_topo_link']:
print(self.layout % ('Link', 'Source DPID', 'Sent by OESS-FVD',
'Received by OFP_Sniffer', 'Delay'))
self.last_printed = 'topo_link'
elif not topo_link and self.last_printed in [None, 'topo_link']:
print('%-24s %-4s %-24s %-4s %s\t\t\t\t\t %s\t\t\t\t\t\t %s' %
('DPID', 'Port', 'Neighbor', 'Port', 'Sent', 'Seen', 'Delay'))
self.last_printed = 'not_topo_link'
|
import gc
import time
import tensorflow as tf
import numpy as np
from hyperka.et_apps.util import embed_init, glorot, zeros
from hyperka.hyperbolic.poincare import PoincareManifold
from hyperka.et_funcs.test_funcs import eval_type_hyperbolic
class GCNLayer:
def __init__(self,
adj,
input_dim,
output_dim,
layer_id,
poincare,
bias=True,
act=None,
name=""):
self.poincare = poincare
self.bias = bias
self.act = act
self.adj = adj
with tf.compat.v1.variable_scope(name + "_gcn_layer_" + str(layer_id)):
self.weight_mat = tf.compat.v1.get_variable("gcn_weights" + str(layer_id),
shape=[input_dim, output_dim],
initializer=tf.glorot_uniform_initializer(),
dtype=tf.float64)
if bias:
self.bias_vec = tf.compat.v1.get_variable("gcn_bias" + str(layer_id),
shape=[1, output_dim],
initializer=tf.zeros_initializer(),
dtype=tf.float64)
def call(self, inputs, drop_rate=0.0):
pre_sup_tangent = self.poincare.log_map_zero(inputs)
if drop_rate > 0.0:
pre_sup_tangent = tf.nn.dropout(pre_sup_tangent, rate=drop_rate) * (1 - drop_rate) # not scaled up
output = tf.matmul(pre_sup_tangent, self.weight_mat)
output = tf.sparse.sparse_dense_matmul(self.adj, output)
output = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(output))
if self.bias:
bias_vec = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(self.bias_vec))
output = self.poincare.mobius_addition(output, bias_vec)
output = self.poincare.hyperbolic_projection(output)
if self.act is not None:
output = self.act(self.poincare.log_map_zero(output))
output = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(output))
return output
class HyperKA:
def __init__(self, ins_list, onto_list, cross, ins_adj, onto_adj, params):
self.ins_ent_num = ins_list[3]
self.ins_rel_num = ins_list[4]
self.onto_ent_num = onto_list[3]
self.onto_rel_num = onto_list[4]
self.ins_entities = ins_list[0].ent_list
self.onto_entities = onto_list[0].ent_list
self.ins_sup_ent1 = [item[0] for item in (ins_list[1])]
self.ins_sup_ent2 = [item[2] for item in (ins_list[1])]
self.onto_sup_ent1 = [item[0] for item in (onto_list[1])]
self.onto_sup_ent2 = [item[2] for item in (onto_list[1])]
self.ins_ref_ent1 = [item[0] for item in (ins_list[2])]
self.ins_ref_ent2 = [item[2] for item in (ins_list[2])]
self.onto_ref_ent1 = [item[0] for item in (onto_list[2])]
self.onto_ref_ent2 = [item[2] for item in (onto_list[2])]
self.seed_sup_ent1 = cross[0][0]
self.seed_sup_ent2 = cross[0][1]
self.seed_links = list()
for i in range(len(self.seed_sup_ent1)):
self.seed_links.append((self.seed_sup_ent1[i], self.seed_sup_ent2[i]))
print("# seed associations:", len(self.seed_links))
self.seed_link_set = set(self.seed_links)
self.ref_ent1 = cross[1][0]
self.ref_ent2 = cross[1][1]
self.ref_links = list()
for i in range(len(self.ref_ent1)):
self.ref_links.append((self.ref_ent1[i], self.ref_ent2[i]))
print("# ref associations:", len(self.ref_links))
self.all_ref_type = cross[1][2]
self.params = params
self.poincare = PoincareManifold()
self.ins_adj_mat = tf.SparseTensor(indices=ins_adj[0], values=ins_adj[1], dense_shape=ins_adj[2])
self.onto_adj_mat = tf.SparseTensor(indices=onto_adj[0], values=onto_adj[1], dense_shape=onto_adj[2])
self.activation = tf.tanh
self.ins_layers = list()
self.onto_layers = list()
self.ins_output = list()
self.onto_output = list()
self.ins_layer_num = params.ins_layer_num
self.onto_layer_num = params.onto_layer_num
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
self.session = tf.Session(config=config)
self._generate_variables()
self._generate_triple_graph()
self._generate_mapping_graph()
tf.global_variables_initializer().run(session=self.session)
def _graph_convolution(self):
self.ins_output = list() # reset
self.onto_output = list()
# ************************* instance gnn ***************************
# In this case, we assume that the initialized embeddings are in the hyperbolic space.
ins_output_embeddings = self.poincare.hyperbolic_projection(self.ins_ent_embeddings)
# ins_output_embeddings = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(self.ins_ent_embeddings))
self.ins_output.append(ins_output_embeddings)
for i in range(self.ins_layer_num):
activation = self.activation
if i == self.ins_layer_num - 1:
activation = None
gcn_layer = GCNLayer(self.ins_adj_mat, self.params.dim, self.params.dim, i, self.poincare,
act=activation, name="inst")
self.ins_layers.append(gcn_layer)
ins_output_embeddings = gcn_layer.call(ins_output_embeddings)
ins_output_embeddings = self.poincare.mobius_addition(ins_output_embeddings, self.ins_output[-1])
ins_output_embeddings = self.poincare.hyperbolic_projection(ins_output_embeddings)
self.ins_output.append(ins_output_embeddings)
# ************************* ontology gnn ***************************
# In this case, we assume that the initialized embeddings are in the hyperbolic space.
onto_output_embeddings = self.poincare.hyperbolic_projection(self.onto_ent_embeddings)
# onto_output_embeddings = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(self.onto_ent_embeddings))
self.onto_output.append(onto_output_embeddings)
for i in range(self.onto_layer_num):
activation = self.activation
if i == self.onto_layer_num - 1:
activation = None
gcn_layer = GCNLayer(self.onto_adj_mat, self.params.onto_dim, self.params.onto_dim, i, self.poincare,
act=activation, name="onto")
self.onto_layers.append(gcn_layer)
onto_output_embeddings = gcn_layer.call(onto_output_embeddings)
onto_output_embeddings = self.poincare.mobius_addition(onto_output_embeddings, self.onto_output[-1])
onto_output_embeddings = self.poincare.hyperbolic_projection(onto_output_embeddings)
self.onto_output.append(onto_output_embeddings)
def _generate_variables(self):
with tf.variable_scope('instance_entity' + 'embeddings'):
self.ins_ent_embeddings = embed_init(self.ins_ent_num, self.params.dim, "ins_ent_embeds",
method='glorot_uniform_initializer')
self.ins_ent_embeddings = self.poincare.hyperbolic_projection(
self.poincare.exp_map_zero(self.ins_ent_embeddings))
with tf.variable_scope('ontology_entity' + 'embeddings'):
self.onto_ent_embeddings = embed_init(self.onto_ent_num, self.params.onto_dim, "onto_ent_embeds",
method='glorot_uniform_initializer')
self.onto_ent_embeddings = self.poincare.hyperbolic_projection(
self.poincare.exp_map_zero(self.onto_ent_embeddings))
with tf.variable_scope('instance_relation' + 'embeddings'):
self.ins_rel_embeddings = embed_init(self.ins_rel_num, self.params.dim, "ins_rel_embeds",
method='glorot_uniform_initializer')
self.ins_rel_embeddings = self.poincare.hyperbolic_projection(
self.poincare.exp_map_zero(self.ins_rel_embeddings))
with tf.variable_scope('ontology_relation' + 'embeddings'):
self.onto_rel_embeddings = embed_init(self.onto_rel_num, self.params.onto_dim, "onto_rel_embeds",
method='glorot_uniform_initializer')
self.onto_rel_embeddings = self.poincare.hyperbolic_projection(
self.poincare.exp_map_zero(self.onto_rel_embeddings))
if self.params.mapping:
with tf.variable_scope('instance_mapping' + 'embeddings'):
print("init instance mapping matrix using", "orthogonal", "with dim of", self.params.dim)
self.ins_mapping_matrix = tf.get_variable('mapping_matrix',
dtype=tf.float64,
shape=[self.params.dim, self.params.onto_dim],
initializer=tf.initializers.orthogonal(dtype=tf.float64))
def _generate_riemannian_optimizer(self, loss):
opt = tf.train.AdamOptimizer(self.params.learning_rate)
trainable_grad_vars = opt.compute_gradients(loss)
grad_vars = [(g, v) for g, v in trainable_grad_vars if g is not None]
rescaled = [(g * (1. - tf.reshape(tf.norm(v, axis=1), (-1, 1)) ** 2) ** 2 / 4., v) for g, v in grad_vars]
train_op = opt.apply_gradients(rescaled)
return train_op
def _generate_triple_loss(self, phs, prs, pts, nhs, nrs, nts):
pos_distance = self.poincare.distance(self.poincare.mobius_addition(phs, prs), pts)
neg_distance = self.poincare.distance(self.poincare.mobius_addition(nhs, nrs), nts)
pos_score = tf.reduce_sum(pos_distance, 1)
neg_score = tf.reduce_sum(neg_distance, 1)
pos_loss = tf.reduce_sum(tf.nn.relu(pos_score))
neg_loss = tf.reduce_sum(tf.nn.relu(tf.constant(self.params.neg_triple_margin, dtype=tf.float64) - neg_score))
return pos_loss + neg_loss
def _generate_triple_graph(self):
self.ins_pos_h = tf.placeholder(tf.int32, shape=[None], name="ins_pos_h")
self.ins_pos_r = tf.placeholder(tf.int32, shape=[None], name="ins_pos_r")
self.ins_pos_t = tf.placeholder(tf.int32, shape=[None], name="ins_pos_t")
self.ins_neg_h = tf.placeholder(tf.int32, shape=[None], name="ins_neg_h")
self.ins_neg_r = tf.placeholder(tf.int32, shape=[None], name="ins_neg_r")
self.ins_neg_t = tf.placeholder(tf.int32, shape=[None], name="ins_neg_t")
self.onto_pos_h = tf.placeholder(tf.int32, shape=[None], name="onto_pos_h")
self.onto_pos_r = tf.placeholder(tf.int32, shape=[None], name="onto_pos_r")
self.onto_pos_t = tf.placeholder(tf.int32, shape=[None], name="onto_pos_t")
self.onto_neg_h = tf.placeholder(tf.int32, shape=[None], name="onto_neg_h")
self.onto_neg_r = tf.placeholder(tf.int32, shape=[None], name="onto_neg_h")
self.onto_neg_t = tf.placeholder(tf.int32, shape=[None], name="onto_neg_h")
# ***********************************************************************************
ins_ent_embeddings = self.poincare.hyperbolic_projection(self.ins_ent_embeddings)
ins_rel_embeddings = self.poincare.hyperbolic_projection(self.ins_rel_embeddings)
onto_ent_embeddings = self.poincare.hyperbolic_projection(self.onto_ent_embeddings)
onto_rel_embeddings = self.poincare.hyperbolic_projection(self.onto_rel_embeddings)
# ins_ent_embeddings = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(self.ins_ent_embeddings))
# ins_rel_embeddings = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(self.ins_rel_embeddings))
# onto_ent_embeddings = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(self.onto_ent_embeddings))
# onto_rel_embeddings = self.poincare.hyperbolic_projection(self.poincare.exp_map_zero(self.onto_rel_embeddings))
ins_phs_embeds = tf.nn.embedding_lookup(ins_ent_embeddings, self.ins_pos_h)
ins_prs_embeds = tf.nn.embedding_lookup(ins_rel_embeddings, self.ins_pos_r)
ins_pts_embeds = tf.nn.embedding_lookup(ins_ent_embeddings, self.ins_pos_t)
ins_nhs_embeds = tf.nn.embedding_lookup(ins_ent_embeddings, self.ins_neg_h)
ins_nrs_embeds = tf.nn.embedding_lookup(ins_rel_embeddings, self.ins_neg_r)
ins_nts_embeds = tf.nn.embedding_lookup(ins_ent_embeddings, self.ins_neg_t)
self.ins_triple_loss = self._generate_triple_loss(ins_phs_embeds, ins_prs_embeds, ins_pts_embeds,
ins_nhs_embeds, ins_nrs_embeds, ins_nts_embeds, )
onto_phs_embeds = tf.nn.embedding_lookup(onto_ent_embeddings, self.onto_pos_h)
onto_prs_embeds = tf.nn.embedding_lookup(onto_rel_embeddings, self.onto_pos_r)
onto_pts_embeds = tf.nn.embedding_lookup(onto_ent_embeddings, self.onto_pos_t)
onto_nhs_embeds = tf.nn.embedding_lookup(onto_ent_embeddings, self.onto_neg_h)
onto_nrs_embeds = tf.nn.embedding_lookup(onto_rel_embeddings, self.onto_neg_r)
onto_nts_embeds = tf.nn.embedding_lookup(onto_ent_embeddings, self.onto_neg_t)
self.onto_triple_loss = self._generate_triple_loss(onto_phs_embeds, onto_prs_embeds, onto_pts_embeds,
onto_nhs_embeds, onto_nrs_embeds, onto_nts_embeds, )
self.triple_loss = self.ins_triple_loss + self.onto_triple_loss
self.triple_optimizer = self._generate_riemannian_optimizer(self.triple_loss)
def _generate_mapping_graph(self):
self.cross_pos_left = tf.placeholder(tf.int32, shape=[None], name="cross_pos_left")
self.cross_pos_right = tf.placeholder(tf.int32, shape=[None], name="cross_pos_right")
self._graph_convolution()
ins_embeddings = self.ins_output[-1]
onto_embeddings = self.onto_output[-1]
if self.params.combine:
ins_embeddings = self.poincare.mobius_addition(ins_embeddings, self.ins_output[0])
onto_embeddings = self.poincare.mobius_addition(onto_embeddings, self.onto_output[0])
cross_left = tf.nn.embedding_lookup(ins_embeddings, self.cross_pos_left)
cross_left = self.poincare.hyperbolic_projection(cross_left)
cross_right = tf.nn.embedding_lookup(onto_embeddings, self.cross_pos_right)
cross_right = self.poincare.hyperbolic_projection(cross_right)
mapped_sup_embeds1 = tf.matmul(self.poincare.log_map_zero(cross_left), self.ins_mapping_matrix)
mapped_sup_embeds1 = self.poincare.exp_map_zero(mapped_sup_embeds1)
mapped_sup_embeds1 = self.poincare.hyperbolic_projection(mapped_sup_embeds1)
# mapped_sup_embeds1 = self.poincare.mobius_matmul(cross_left, self.ins_mapping_matrix)
sup_distance = self.poincare.distance(mapped_sup_embeds1, cross_right)
sup_distance = tf.reduce_sum(sup_distance, 1)
# *****************add neg sample***********************************************
self.cross_neg_left = tf.placeholder(tf.int32, shape=[None], name="cross_neg_left")
self.cross_neg_right = tf.placeholder(tf.int32, shape=[None], name="cross_neg_right")
neg_embeds1 = tf.nn.embedding_lookup(ins_embeddings, self.cross_neg_left)
neg_embeds2 = tf.nn.embedding_lookup(onto_embeddings, self.cross_neg_right)
neg_embeds1 = self.poincare.hyperbolic_projection(neg_embeds1)
neg_embeds2 = self.poincare.hyperbolic_projection(neg_embeds2)
mapped_neg_embeds1 = tf.matmul(self.poincare.log_map_zero(neg_embeds1), self.ins_mapping_matrix)
mapped_neg_embeds1 = self.poincare.exp_map_zero(mapped_neg_embeds1)
mapped_neg_embeds1 = self.poincare.hyperbolic_projection(mapped_neg_embeds1)
# mapped_neg_embeds1 = self.poincare.mobius_matmul(neg_embeds1, self.ins_mapping_matrix)
neg_distance = self.poincare.distance(mapped_neg_embeds1, neg_embeds2)
neg_distance = tf.reduce_sum(neg_distance, 1)
pos_loss = tf.reduce_sum(tf.nn.relu(sup_distance))
neg_loss = tf.reduce_sum(
tf.nn.relu(tf.constant(self.params.neg_typing_margin, dtype=tf.float64) - neg_distance))
self.mapping_loss = pos_loss + neg_loss
self.mapping_optimizer = self._generate_riemannian_optimizer(self.mapping_loss)
def test(self):
t = time.time()
ins_embeddings = self.ins_output[-1]
onto_embeddings = self.onto_output[-1]
if self.params.combine:
ins_embeddings = self.poincare.mobius_addition(ins_embeddings, self.ins_output[0])
onto_embeddings = self.poincare.mobius_addition(onto_embeddings, self.onto_output[0])
ref_ins_embed = tf.nn.embedding_lookup(ins_embeddings, self.ref_ent1)
ref_ins_embed = self.poincare.hyperbolic_projection(ref_ins_embed)
ref_ins_embed = tf.matmul(self.poincare.log_map_zero(ref_ins_embed), self.ins_mapping_matrix)
ref_ins_embed = self.poincare.exp_map_zero(ref_ins_embed)
ref_ins_embed = self.poincare.hyperbolic_projection(ref_ins_embed)
# ref_ins_embed = self.poincare.mobius_matmul(ref_ins_embed, self.ins_mapping_matrix)
ref_ins_embed = ref_ins_embed.eval(session=self.session)
onto_embed = onto_embeddings
onto_embed = self.poincare.hyperbolic_projection(onto_embed)
onto_embed = onto_embed.eval(session=self.session)
hits1 = eval_type_hyperbolic(ref_ins_embed, onto_embed, self.all_ref_type,
self.params.ent_top_k, self.params.nums_threads, greedy=True,
mess="greedy ent typing by hyperbolic")
eval_type_hyperbolic(ref_ins_embed, onto_embed, self.all_ref_type, self.params.ent_top_k,
self.params.nums_threads, greedy=False, mess="ent typing by hyperbolic")
print("test totally costs time = {:.3f} s ".format(time.time() - t))
return hits1
def eval_ins_input_embed(self, is_map=False):
embeds = tf.nn.embedding_lookup(self.ins_ent_embeddings, self.ins_entities)
if is_map:
embeds = self.poincare.mobius_matmul(embeds, self.ins_mapping_matrix)
return embeds.eval(session=self.session)
def eval_onto_input_embed(self):
return tf.nn.embedding_lookup(self.onto_ent_embeddings, self.onto_entities).eval(session=self.session)
|
import yaml
import os
# CHeck if running from inside jupyter
# From https://stackoverflow.com/questions/47211324/check-if-module-is-running-in-jupyter-or-not
def type_of_script():
try:
ipy_str = str(type(get_ipython()))
if 'zmqshell' in ipy_str:
return 'jupyter'
if 'terminal' in ipy_str:
return 'ipython'
except:
return 'terminal'
def load_config(model_name):
path = os.path.dirname(__file__)
configs = yaml.safe_load(open(os.path.join(path, "model-config.yaml"),
encoding="utf8"))
try:
model_config = configs[model_name]
model_config = pack_tokenizer_config(model_config)
except KeyError:
raise ValueError(
f"The model '{model_name}' is not defined in Ecco's 'model-config.yaml' file and"
f" so is not explicitly supported yet. Supported models are:",
list(configs.keys())) from KeyError()
return model_config
def pack_tokenizer_config(model_config):
"""
Convenience method to package tokenizer configs into one element to more easily pass it to
JavaScript rendering code.
Args:
model_config: dict of model configuration options used for model-config or in __init__.py
Returns:
model_config dict with 'tokenizer_config' elements
"""
tokenizer_config = {'token_prefix': model_config['token_prefix'],
'partial_token_prefix': model_config['partial_token_prefix']}
model_config['tokenizer_config'] = tokenizer_config
return model_config
def strip_tokenizer_prefix(model_config,
token,
ellipsis_partial_tokens=False):
token = token.lstrip(model_config['token_prefix'])
token = token.lstrip(model_config['partial_token_prefix'])
token= token.lstrip(' ')
return token
def is_partial_token(model_config,
token):
if (token[0: len(model_config['partial_token_prefix'])] == model_config['partial_token_prefix']) and \
((len(model_config['token_prefix']) == 0) or \
token[0:len(model_config['token_prefix'])] != model_config['token_prefix']):
return True
else:
return False
|
# encoding: utf-8
from datetime import date
import pytest
from mock import Mock
try:
from django.db.models import Value
except ImportError:
Value = Mock()
try:
from django.db.models.functions import Concat
except ImportError:
Concat = Mock(return_value=Mock(output_field=None))
from .app_management.models import (ApplicationWithClassBasedProperties, ApplicationWithDecoratorBasedProperties,
CategoryWithClassBasedProperties, CategoryWithDecoratorBasedProperties)
from .dummy_lib.models import ReleaseTypeModel
@pytest.fixture
def categories():
return [
CategoryWithClassBasedProperties.objects.create(name='Linux apps'),
CategoryWithClassBasedProperties.objects.create(name='Windows apps'),
CategoryWithDecoratorBasedProperties.objects.create(name='Linux apps'),
CategoryWithDecoratorBasedProperties.objects.create(name='Windows apps'),
]
@pytest.fixture
def applications(categories):
apps = [
ApplicationWithClassBasedProperties.objects.create(name='My cool App'),
ApplicationWithClassBasedProperties.objects.create(name='Another App'),
ApplicationWithDecoratorBasedProperties.objects.create(name='My cool App'),
ApplicationWithDecoratorBasedProperties.objects.create(name='Another App'),
]
apps[0].categories.add(categories[0])
apps[1].categories.add(categories[0])
apps[1].categories.add(categories[1])
apps[2].categories.add(categories[2])
apps[3].categories.add(categories[2])
apps[3].categories.add(categories[3])
return apps
@pytest.fixture
def versions(applications):
objs = []
for application in applications:
objs.extend([
application.versions.create(major=1, minor=2, patch=3, release_type=ReleaseTypeModel.BETA,
supported_until=date(2016, 12, 31)),
application.versions.create(major=1, minor=3, patch=0,
supported_from=date(2017, 1, 1), supported_until=date(2017, 12, 31)),
application.versions.create(major=1, minor=3, patch=1,
supported_from=date(2018, 1, 1), supported_until=date(2018, 12, 31)),
application.versions.create(major=2, minor=0, patch=0, changes='Amazing new features',
release_type=ReleaseTypeModel.ALPHA, supported_from=date(2018, 11, 1)),
])
return objs
|
import importlib
aws_iam = importlib.import_module('aws-iam')
def test_series_upgrade():
assert aws_iam.hookenv.status_set.call_count == 0
aws_iam.pre_series_upgrade()
assert aws_iam.hookenv.status_set.call_count == 1
|
from common_fixtures import * # NOQA
import time
def test_agent_create(super_client):
uri = "sim://" + str(time.time())
agent = super_client.create_agent(uri=uri)
assert agent.state == "registering"
assert agent.uri == uri
assert agent.transitioning == "yes"
agent = super_client.wait_success(agent)
assert agent.transitioning == "no"
assert agent.state == "active"
assert agent.account() is not None
count = len(agent.account().credentials())
assert count == 1
account = agent.account()
assert account.uuid.startswith("agentAccount")
assert account.state == "active"
assert account.kind == "agent"
creds = filter(lambda x: x.kind == 'agentApiKey', account.credentials())
assert len(creds) == 1
assert creds[0].state == "active"
assert creds[0].publicValue is not None
assert creds[0].secretValue is not None
def test_agent_create_for_container(context, super_client):
client = context.client
c = context.create_container(labels={
'io.rancher.container.create_agent': 'true'
})
c = super_client.reload(c)
agent = c.agent()
account_id = get_plain_id(c.account())
assert agent.state == 'active'
assert agent.data.agentResourcesAccountId == int(account_id)
client.delete(c)
c = client.wait_success(c)
assert c.state == 'removed'
agent = super_client.wait_success(super_client.reload(agent))
assert agent.state == 'removed'
def test_agent_create_for_env_role(context, super_client):
c = context.create_container(labels={
'io.rancher.container.create_agent': 'true',
'io.rancher.container.agent.role': 'environment'
})
c = super_client.reload(c)
agent = super_client.wait_success(c.agent())
assert agent.state == 'active'
cred = agent.account().credentials()[0]
assert cred.publicValue is not None
assert cred.secretValue is not None
agent_client = api_client(cred.publicValue, cred.secretValue)
assert 'POST' in agent_client.schema.types['container'].collectionMethods
def test_agent_create_for_not_env_role(context, super_client):
c = context.create_container(labels={
'io.rancher.container.create_agent': 'true',
'io.rancher.container.agent.role': 'user'
})
c = super_client.reload(c)
agent = super_client.wait_success(c.agent())
assert agent.state == 'active'
cred = agent.account().credentials()[0]
assert cred.publicValue is not None
assert cred.secretValue is not None
agent_client = api_client(cred.publicValue, cred.secretValue)
assert 'container' not in agent_client.schema.types
|
# Copyright (c) 2012-2022, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
from . import AWSObject, AWSProperty, PropsDictType
from .validators import boolean, double, integer
from .validators.batch import (
validate_allocation_strategy,
validate_environment_state,
validate_launch_template_specification,
validate_queue_state,
)
class Ec2ConfigurationObject(AWSProperty):
"""
`Ec2ConfigurationObject <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html>`__
"""
props: PropsDictType = {
"ImageIdOverride": (str, False),
"ImageType": (str, True),
}
class LaunchTemplateSpecification(AWSProperty):
"""
`LaunchTemplateSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html>`__
"""
props: PropsDictType = {
"LaunchTemplateId": (str, False),
"LaunchTemplateName": (str, False),
"Version": (str, False),
}
def validate(self):
validate_launch_template_specification(self)
class ComputeResources(AWSProperty):
"""
`ComputeResources <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html>`__
"""
props: PropsDictType = {
"AllocationStrategy": (validate_allocation_strategy, False),
"BidPercentage": (integer, False),
"DesiredvCpus": (integer, False),
"Ec2Configuration": ([Ec2ConfigurationObject], False),
"Ec2KeyPair": (str, False),
"ImageId": (str, False),
"InstanceRole": (str, False),
"InstanceTypes": ([str], False),
"LaunchTemplate": (LaunchTemplateSpecification, False),
"MaxvCpus": (integer, True),
"MinvCpus": (integer, False),
"PlacementGroup": (str, False),
"SecurityGroupIds": ([str], False),
"SpotIamFleetRole": (str, False),
"Subnets": ([str], True),
"Tags": (dict, False),
"Type": (str, True),
}
class ComputeEnvironment(AWSObject):
"""
`ComputeEnvironment <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html>`__
"""
resource_type = "AWS::Batch::ComputeEnvironment"
props: PropsDictType = {
"ComputeEnvironmentName": (str, False),
"ComputeResources": (ComputeResources, False),
"ServiceRole": (str, False),
"State": (validate_environment_state, False),
"Tags": (dict, False),
"Type": (str, True),
"UnmanagedvCpus": (integer, False),
}
class Environment(AWSProperty):
"""
`Environment <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html>`__
"""
props: PropsDictType = {
"Name": (str, False),
"Value": (str, False),
}
class FargatePlatformConfiguration(AWSProperty):
"""
`FargatePlatformConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html>`__
"""
props: PropsDictType = {
"PlatformVersion": (str, False),
}
class Device(AWSProperty):
"""
`Device <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html>`__
"""
props: PropsDictType = {
"ContainerPath": (str, False),
"HostPath": (str, False),
"Permissions": ([str], False),
}
class Tmpfs(AWSProperty):
"""
`Tmpfs <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html>`__
"""
props: PropsDictType = {
"ContainerPath": (str, True),
"MountOptions": ([str], False),
"Size": (integer, True),
}
class LinuxParameters(AWSProperty):
"""
`LinuxParameters <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html>`__
"""
props: PropsDictType = {
"Devices": ([Device], False),
"InitProcessEnabled": (boolean, False),
"MaxSwap": (integer, False),
"SharedMemorySize": (integer, False),
"Swappiness": (integer, False),
"Tmpfs": ([Tmpfs], False),
}
class Secret(AWSProperty):
"""
`Secret <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html>`__
"""
props: PropsDictType = {
"Name": (str, True),
"ValueFrom": (str, True),
}
class LogConfiguration(AWSProperty):
"""
`LogConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html>`__
"""
props: PropsDictType = {
"LogDriver": (str, True),
"Options": (dict, False),
"SecretOptions": ([Secret], False),
}
class MountPoints(AWSProperty):
"""
`MountPoints <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html>`__
"""
props: PropsDictType = {
"ContainerPath": (str, False),
"ReadOnly": (boolean, False),
"SourceVolume": (str, False),
}
class NetworkConfiguration(AWSProperty):
"""
`NetworkConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html>`__
"""
props: PropsDictType = {
"AssignPublicIp": (str, False),
}
class ResourceRequirement(AWSProperty):
"""
`ResourceRequirement <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html>`__
"""
props: PropsDictType = {
"Type": (str, False),
"Value": (str, False),
}
class Ulimit(AWSProperty):
"""
`Ulimit <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html>`__
"""
props: PropsDictType = {
"HardLimit": (integer, True),
"Name": (str, True),
"SoftLimit": (integer, True),
}
class AuthorizationConfig(AWSProperty):
"""
`AuthorizationConfig <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html>`__
"""
props: PropsDictType = {
"AccessPointId": (str, False),
"Iam": (str, False),
}
class EfsVolumeConfiguration(AWSProperty):
"""
`EfsVolumeConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html>`__
"""
props: PropsDictType = {
"AuthorizationConfig": (AuthorizationConfig, False),
"FileSystemId": (str, True),
"RootDirectory": (str, False),
"TransitEncryption": (str, False),
"TransitEncryptionPort": (integer, False),
}
class VolumesHost(AWSProperty):
"""
`VolumesHost <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html>`__
"""
props: PropsDictType = {
"SourcePath": (str, False),
}
class Volumes(AWSProperty):
"""
`Volumes <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html>`__
"""
props: PropsDictType = {
"EfsVolumeConfiguration": (EfsVolumeConfiguration, False),
"Host": (VolumesHost, False),
"Name": (str, False),
}
class ContainerProperties(AWSProperty):
"""
`ContainerProperties <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html>`__
"""
props: PropsDictType = {
"Command": ([str], False),
"Environment": ([Environment], False),
"ExecutionRoleArn": (str, False),
"FargatePlatformConfiguration": (FargatePlatformConfiguration, False),
"Image": (str, True),
"InstanceType": (str, False),
"JobRoleArn": (str, False),
"LinuxParameters": (LinuxParameters, False),
"LogConfiguration": (LogConfiguration, False),
"Memory": (integer, False),
"MountPoints": ([MountPoints], False),
"NetworkConfiguration": (NetworkConfiguration, False),
"Privileged": (boolean, False),
"ReadonlyRootFilesystem": (boolean, False),
"ResourceRequirements": ([ResourceRequirement], False),
"Secrets": ([Secret], False),
"Ulimits": ([Ulimit], False),
"User": (str, False),
"Vcpus": (integer, False),
"Volumes": ([Volumes], False),
}
class NodeRangeProperty(AWSProperty):
"""
`NodeRangeProperty <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html>`__
"""
props: PropsDictType = {
"Container": (ContainerProperties, False),
"TargetNodes": (str, True),
}
class NodeProperties(AWSProperty):
"""
`NodeProperties <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html>`__
"""
props: PropsDictType = {
"MainNode": (integer, True),
"NodeRangeProperties": ([NodeRangeProperty], True),
"NumNodes": (integer, True),
}
class EvaluateOnExit(AWSProperty):
"""
`EvaluateOnExit <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html>`__
"""
props: PropsDictType = {
"Action": (str, True),
"OnExitCode": (str, False),
"OnReason": (str, False),
"OnStatusReason": (str, False),
}
class RetryStrategy(AWSProperty):
"""
`RetryStrategy <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html>`__
"""
props: PropsDictType = {
"Attempts": (integer, False),
"EvaluateOnExit": ([EvaluateOnExit], False),
}
class Timeout(AWSProperty):
"""
`Timeout <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html>`__
"""
props: PropsDictType = {
"AttemptDurationSeconds": (integer, False),
}
class JobDefinition(AWSObject):
"""
`JobDefinition <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html>`__
"""
resource_type = "AWS::Batch::JobDefinition"
props: PropsDictType = {
"ContainerProperties": (ContainerProperties, False),
"JobDefinitionName": (str, False),
"NodeProperties": (NodeProperties, False),
"Parameters": (dict, False),
"PlatformCapabilities": ([str], False),
"PropagateTags": (boolean, False),
"RetryStrategy": (RetryStrategy, False),
"SchedulingPriority": (integer, False),
"Tags": (dict, False),
"Timeout": (Timeout, False),
"Type": (str, True),
}
class ComputeEnvironmentOrder(AWSProperty):
"""
`ComputeEnvironmentOrder <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html>`__
"""
props: PropsDictType = {
"ComputeEnvironment": (str, True),
"Order": (integer, True),
}
class JobQueue(AWSObject):
"""
`JobQueue <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html>`__
"""
resource_type = "AWS::Batch::JobQueue"
props: PropsDictType = {
"ComputeEnvironmentOrder": ([ComputeEnvironmentOrder], True),
"JobQueueName": (str, False),
"Priority": (integer, True),
"SchedulingPolicyArn": (str, False),
"State": (validate_queue_state, False),
"Tags": (dict, False),
}
class ShareAttributes(AWSProperty):
"""
`ShareAttributes <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html>`__
"""
props: PropsDictType = {
"ShareIdentifier": (str, False),
"WeightFactor": (double, False),
}
class FairsharePolicy(AWSProperty):
"""
`FairsharePolicy <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html>`__
"""
props: PropsDictType = {
"ComputeReservation": (double, False),
"ShareDecaySeconds": (double, False),
"ShareDistribution": ([ShareAttributes], False),
}
class SchedulingPolicy(AWSObject):
"""
`SchedulingPolicy <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html>`__
"""
resource_type = "AWS::Batch::SchedulingPolicy"
props: PropsDictType = {
"FairsharePolicy": (FairsharePolicy, False),
"Name": (str, False),
"Tags": (dict, False),
}
|
# -*- coding: utf-8 -*-
"""
NEUROscience Tool for Interactive Characterization
Curate, visualize, annotate, and share your behavioral ephys data using Python
"""
import os
import sys
import shutil
import copy
import pkg_resources
import collections.abc
import logging
import logging.handlers
import toml
from .version import version as __version__
from .version import git_revision as __git_revision__
# set the user's directory for global settings file, logs, and more
neurotic_dir = os.path.join(os.path.expanduser('~'), '.neurotic')
if not os.path.exists(neurotic_dir):
os.mkdir(neurotic_dir)
class FileLoggingFormatter(logging.Formatter):
"""
A custom formatter for file logging
"""
default_msec_format = '%s.%03d' # use period radix point instead of comma in decimal seconds
def format(self, record):
if logging.getLogger(__name__).level <= logging.DEBUG:
# include more detail if the logger level (not the record level) is
# debug or lower
self._style._fmt = '[%(asctime)s] [%(levelname)-8s] [%(threadName)-10s] [%(name)s:%(lineno)d (%(funcName)s)] %(message)s'
else:
self._style._fmt = '[%(asctime)s] [%(levelname)-8s] %(message)s'
return super().format(record)
class StreamLoggingFormatter(logging.Formatter):
"""
A custom formatter for stream logging
"""
def format(self, record):
if record.levelno == logging.INFO:
# exclude the level name ("INFO") from common log records
self._style._fmt = '[neurotic] %(message)s'
else:
self._style._fmt = '[neurotic] %(levelname)s: %(message)s'
return super().format(record)
# set the file path for logging
log_file = os.path.join(neurotic_dir, 'neurotic-log.txt')
# set the default level for logging to INFO unless it was set to a custom level
# before importing the package
logger = logging.getLogger(__name__)
if logger.level == logging.NOTSET:
default_log_level = logging.INFO
logger.setLevel(default_log_level)
else:
default_log_level = logger.level
# write log records to a file, rotating files if it exceeds 10 MB
logger_filehandler = logging.handlers.RotatingFileHandler(filename=log_file, maxBytes=10000000, backupCount=2)
logger_filehandler.setFormatter(FileLoggingFormatter())
logger.addHandler(logger_filehandler)
logger.info('===========================') # file logger only
logger.info(f'Importing neurotic {__version__}') # file logger only
# stream log records to stderr
logger_streamhandler = logging.StreamHandler(stream=sys.stderr)
logger_streamhandler.setFormatter(StreamLoggingFormatter())
logger.addHandler(logger_streamhandler)
global_config = {
'defaults': {
# defaults used by the command line interface
'file': False,
'dataset': False,
'debug': False,
'lazy': True,
'thick_traces': False,
'show_datetime': False,
'ui_scale': 'medium',
'theme': 'light',
},
'gdrive': {
# parameters for Google Drive access
'client_secret_file': os.path.join(neurotic_dir, 'gdrive-creds', 'client_secret.json'),
'tokens_file': os.path.join(neurotic_dir, 'gdrive-creds', 'tokens.json'),
'save_tokens': False,
},
'app': {
'auto_check_for_updates': True,
},
}
# keep a copy of the original config before it is modified
_global_config_factory_defaults = copy.deepcopy(global_config)
# the global config file is a text file in TOML format owned by the user that
# allows alternate defaults to be specified to replace those in global_config
global_config_file = os.path.join(neurotic_dir, 'neurotic-config.txt')
if not os.path.exists(global_config_file):
# copy a template global config file containing commented-out defaults
shutil.copy(
pkg_resources.resource_filename(
'neurotic', 'global_config_template.txt'),
global_config_file)
def update_dict(d, d_new):
"""
Recursively update the contents of a dictionary. Unlike dict.update(), this
function preserves items in inner dictionaries that are absent from d_new.
For example, if given
>>> d = {'x': 0, 'inner': {'a': 1, 'b': 2}}
>>> d_new = {'inner': {'c': 3}}
then using d.update(d_new) will entirely replace d['inner'] with
d_new['inner']:
>>> d.update(d_new)
>>> d == {'x': 0, 'inner': {'c': 3}}
In contrast, update_dict(d, d_new) will preserve items found in d['inner']
but not in d_new['inner']:
>>> update_dict(d, d_new)
>>> d == {'x': 0, 'inner': {'a': 1, 'b': 2, 'c': 3}}
"""
for k_new, v_new in d_new.items():
if isinstance(v_new, collections.abc.Mapping):
d[k_new] = update_dict(d.get(k_new, {}), v_new)
else:
d[k_new] = v_new
return d
def update_global_config_from_file(file=global_config_file):
"""
Update the global_config dictionary with data from the global config file,
using recursion to traverse nested dictionaries.
"""
with open(file, 'r') as f:
update_dict(global_config, toml.loads(f.read()))
try:
update_global_config_from_file()
except Exception as e:
logger.error(f'Ignoring global config file due to parsing error ({global_config_file}): {e}')
# create directories for storing Google Drive credentials if necessary
for file in [global_config['gdrive']['client_secret_file'],
global_config['gdrive']['tokens_file']]:
if file and not os.path.exists(os.path.dirname(file)):
os.mkdir(os.path.dirname(file))
from .datasets import *
from .gui import *
from .scripts import *
|
# Generated by Django 3.0.5 on 2021-06-26 00:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0011_form_assigned_supervisor_id'),
]
operations = [
migrations.AddField(
model_name='form',
name='action',
field=models.CharField(blank=True, default='Save', max_length=55, null=True),
),
]
|
import torch
import numpy as np
import pandas as pd
import sacred
from datetime import datetime
import h5py
import feature.util as util
dataset_ex = sacred.Experiment("dataset")
@dataset_ex.config
def config():
# Path to reference genome FASTA
reference_fasta = "/users/amtseng/genomes/hg38.fasta"
# Path to chromosome sizes
chrom_sizes_tsv = "/users/amtseng/genomes/hg38.canon.chrom.sizes"
# The size of DNA sequences to fetch as input sequences
input_length = 1346
# The size of profiles to fetch for each coordinate
profile_length = 1000
# One-hot encoding has this depth
input_depth = 4
# Whether or not to perform reverse complement augmentation
revcomp = True
# Maximum size of jitter to the input for augmentation; set to 0 to disable
jitter_size = 128
# Batch size; will be multiplied by two if reverse complementation is done
batch_size = 64
# Sample X negatives randomly from the genome for every positive example
negative_ratio = 1
# Use this stride when tiling coordinates across a peak
peak_tiling_stride = 25
# Amount of dataset for each task to keep; can be a set number of peaks, or
# a fraction (if < 1); set to None to keep everything
peak_retention = None
# Number of workers for the data loader
num_workers = 10
# Negative seed (for selecting negatives)
negative_seed = None
# Jitter seed (for applying random jitter to peaks)
jitter_seed = None
# Shuffle seed (for shuffling data points)
shuffle_seed = None
class GenomeIntervalSampler:
"""
Samples a random interval from the genome. The sampling is performed
uniformly at random (i.e. longer chromosomes are more likely to be sampled
from).
Arguments:
`chrom_sizes_tsv`: path to 2-column TSV listing sizes of each chromosome
`sample_length`: length of sampled sequence
`chroms_keep`: an iterable of chromosomes that specifies which
chromosomes to keep from the sizes; sampling will only occur from
these chromosomes
"""
def __init__(
self, chrom_sizes_tsv, sample_length, chroms_keep=None, seed=None
):
self.sample_length = sample_length
# Create DataFrame of chromosome sizes
chrom_table = self._import_chrom_sizes(chrom_sizes_tsv)
if chroms_keep:
chrom_table = chrom_table[chrom_table["chrom"].isin(chroms_keep)]
# Cut off sizes to avoid overrunning ends of chromosome
chrom_table["max_size"] -= sample_length
chrom_table["weight"] = \
chrom_table["max_size"] / chrom_table["max_size"].sum()
self.chrom_table = chrom_table
self.rng = np.random.RandomState(seed)
def _import_chrom_sizes(self, chrom_sizes_tsv):
"""
Imports a TSV of chromosome sizes, mapping chromosome to maximum size.
Arguments:
`chrom_sizes_tsv`: a 2-column TSV mapping chromosome name to size
Returns a Pandas DataFrame
"""
return pd.read_csv(
chrom_sizes_tsv, sep="\t", header=None, names=["chrom", "max_size"]
)
def sample_intervals(self, num_intervals):
"""
Returns a 2D NumPy array of randomly sampled coordinates. Returns
`num_intervals` intervals, uniformly randomly sampled from the genome.
"""
chrom_sample = self.chrom_table.sample(
n=num_intervals,
replace=True,
weights=self.chrom_table["weight"],
random_state=self.rng
)
chrom_sample["start"] = (
self.rng.rand(num_intervals) * chrom_sample["max_size"]
).astype(int)
chrom_sample["end"] = chrom_sample["start"] + self.sample_length
return chrom_sample[["chrom", "start", "end"]].values.astype(object)
class CoordsToVals:
"""
From an HDF5 file that maps genomic coordinates to profiles, this creates an
object that maps a list of coordinates to a NumPy array of profiles.
Arguments:
`hdf5_path`: path to HDF5 containing profiles; this HDF5 must have a
separate dataset for each chromosome, and it is expected to return
profiles of shape O x P x S, where O is the profile size, P is the
number of profile tracks, and S is for the strands in each track
`profile_size`: for each genomic coordinate, center it and pad it on
both sides to this length to get the final profile; if this is
smaller than the coordinate interval given, then the interval will
be cut to this size by centering
"""
def __init__(self, hdf5_path, profile_size):
self.hdf5_path = hdf5_path
self.profile_size = profile_size
def _resize_interval(start, end, size):
"""
Resizes the interval by centering and trimming/padding to the given
size.
"""
center = int(0.5 * (start + end))
half_size = int(0.5 * size)
left = center - half_size
right = left + size
return left, right
def _get_profile(self, chrom, start, end, hdf5_reader):
"""
Fetches the profile for the given coordinates, with an instantiated
HDF5 reader. Returns the profile as a NumPy array of numbers. This may
pad or cut from the center to a specified length.
"""
if self.profile_size:
start, end = CoordsToVals._resize_interval(
start, end, self.profile_size
)
return hdf5_reader[chrom][start:end]
def _get_ndarray(self, coords):
"""
From an iterable of coordinates, retrieves a values for that coordinate.
This will be a 4D NumPy array of corresponding profile values.
Note that all coordinate intervals need to be of the same length (after
padding).
"""
with h5py.File(self.hdf5_path, "r") as reader:
profiles = np.stack([
self._get_profile(chrom, start, end, reader) \
for chrom, start, end in coords
])
return profiles
def __call__(self, coords):
return self._get_ndarray(coords)
class SamplingCoordsBatcher(torch.utils.data.sampler.Sampler):
"""
Creates a batch producer that batches positive coordinates and samples
negative coordinates. Each batch will have some positives and negatives
according to `neg_ratio`. When multiple sets of positive coordinates are
given, the coordinates are all pooled together and drawn from uniformly.
Arguments:
`pos_coords_beds`: list of paths to gzipped BED files containing the
sets of positive coordinates for various tasks; these BED files
should be in ENCODE NarrowPeak format
`batch_size`: number of samples per batch
`neg_ratio`: number of negatives to select for each positive example
`jitter`: maximum random amount to jitter each positive coordinate
example by
`chrom_sizes_tsv`: path to 2-column TSV listing sizes of each chromosome
`sample_length`: length of sampled sequence
`genome_sampler`: a GenomeIntervalSampler instance, which samples
intervals randomly from the genome
`chroms_keep`: if specified, only considers this set of chromosomes from
the coordinate BEDs
`peak_retention`: if specified, keeps only this amount of peaks (taking
most confident peaks preferentially); can be a fraction of the
original BED file (if value is < 1), or a number of peaks (if value
is >= 1)
`return_peaks`: if True, returns the peaks and summits sampled from the
peak set as a B x 3 array
`shuffle_before_epoch`: Whether or not to shuffle all examples before
each epoch
"""
def __init__(
self, pos_coords_beds, batch_size, neg_ratio, jitter, chrom_sizes_tsv,
sample_length, genome_sampler, chroms_keep=None, peak_retention=None,
return_peaks=False, shuffle_before_epoch=False, jitter_seed=None,
shuffle_seed=None
):
self.batch_size = batch_size
self.neg_ratio = neg_ratio
self.jitter = jitter
self.genome_sampler = genome_sampler
self.return_peaks = return_peaks
self.shuffle_before_epoch = shuffle_before_epoch
chrom_sizes_table = self._import_chrom_sizes(chrom_sizes_tsv)
all_pos_table = []
for i, pos_coords_bed in enumerate(pos_coords_beds):
peaks_table = self._import_peaks(pos_coords_bed)
coords = self._format_peaks_table(
peaks_table, chroms_keep, peak_retention, sample_length, jitter,
chrom_sizes_table
)
# Add in the status column
coords = np.concatenate(
[coords, np.tile(i + 1, (len(coords), 1))], axis=1
) # Shape: _ x 7
all_pos_table.append(coords)
self.all_pos_table = np.concatenate(all_pos_table) # Shape: N x 7
self.num_total_pos = len(self.all_pos_table)
# Number of positives and negatives per batch
self.neg_per_batch = int(batch_size * neg_ratio / (neg_ratio + 1))
self.pos_per_batch = batch_size - self.neg_per_batch
if shuffle_before_epoch:
self.shuffle_rng = np.random.RandomState(shuffle_seed)
if self.jitter:
self.jitter_rng = np.random.RandomState(jitter_seed)
def _import_peaks(self, peaks_bed):
"""
Imports a peaks BED file in NarrowPeak format as a Pandas DataFrame.
Arguments:
`peaks_bed`: a BED file (gzipped or not) containing peaks in
ENCODE NarrowPeak format
Returns a Pandas DataFrame
"""
return pd.read_csv(
peaks_bed, sep="\t", header=None, # Infer compression
names=[
"chrom", "peak_start", "peak_end", "name", "score",
"strand", "signal", "pval", "qval", "summit_offset"
]
)
def _import_chrom_sizes(self, chrom_sizes_tsv):
"""
Imports a TSV of chromosome sizes, mapping chromosome to maximum size.
Arguments:
`chrom_sizes_tsv`: a 2-column TSV mapping chromosome name to size
Returns a Pandas DataFrame
"""
return pd.read_csv(
chrom_sizes_tsv, sep="\t", header=None, names=["chrom", "max_size"]
)
def _format_peaks_table(
self, peaks_table, chroms_keep, peak_retention, sample_length, jitter,
chrom_sizes_table
):
"""
Takes a table of imported peaks and formats it. This function performs
the following tasks:
1. Optionally filters peaks to only retain a subset of chromosomes
2. Optionally cuts down the set of peaks to a subset of high-confidence
peaks
3. Computes the intervals for inputs being centered around the summits
4. Drops any intervals that would overrun chromosome boundaries
Arguments:
`peaks_table`: a table imported by `_import_peaks`
`chrom_sizes_table`: a table imported by `_import_chrom_sizes`
Returns an N x 6 array, where columns 1-3 are the coordinate of the
input samples centered at summits, columns 4-5 are the original peak
location, and column 6 is the summit location.
"""
if chroms_keep:
# Keep only chromosomes specified
peaks_table = peaks_table[peaks_table["chrom"].isin(chroms_keep)]
if peak_retention is not None:
# Sort coordinates by confidence first
peaks_table = peaks_table.sort_values(by="signal", ascending=False)
# Keep only a subset of the peaks in the table
keep_num = int(len(peaks_table) * peak_retention) if \
peak_retention < 1 else peak_retention
peaks_table = peaks_table.head(keep_num)
# Expand the coordinates to be of size `sample_length`, centered
# around the summits
peaks_table["start"] = peaks_table["peak_start"] + \
peaks_table["summit_offset"] - (sample_length // 2)
peaks_table["end"] = peaks_table["start"] + sample_length
# Toss out any coordinates that (with jittering) may go past
# chromosome boundaries
# Add in the maximum size column
peaks_table = peaks_table.merge(chrom_sizes_table, on="chrom")
# Keep only coordinates that won't overrun the ends
left_mask = peaks_table["start"] - jitter >= 0
right_mask = peaks_table["end"] + jitter < peaks_table["max_size"]
peaks_table = peaks_table.loc[left_mask & right_mask]
# Compute the summit location from offset and peak start
peaks_table["summit"] = peaks_table["peak_start"] + \
peaks_table["summit_offset"]
# Extract the columns desired
coords = peaks_table[[
"chrom", "start", "end", "peak_start", "peak_end", "summit"
]].values.astype(object)
return coords
def __getitem__(self, index):
"""
Fetches a full batch of positive and negative coordinates by filling the
batch with some positive coordinates, and sampling randomly from the
rest of the genome for the negatives. Returns a B x 3 2D NumPy array of
coordinates, along with a parallel B-array of status. Status is 0
for negatives, and [1, n] for positives, where the status is 1 plus the
index of the coordinate BED file it came from.
This method may also perform jittering for dataset augmentation.
If `return_peaks` was specified at object creation-time, also return a
B x 3 2D NumPy array containing the peak information for the original
peaks, consisting of the peak boundaries and the summit location
(respectively); for negative samples drawn from the
GenomeIntervalSampler, these values are all -1.
"""
pos_table = self.all_pos_table[
index * self.pos_per_batch : (index + 1) * self.pos_per_batch
]
pos_coords, pos_peaks, pos_statuses = \
pos_table[:, :3], pos_table[:, 3:6], pos_table[:, 6]
# If specified, apply random jitter to each positive coordinate
if self.jitter:
jitter_vals = self.jitter_rng.randint(
-self.jitter, self.jitter + 1, size=len(pos_coords)
)
pos_coords[:, 1] += jitter_vals
pos_coords[:, 2] += jitter_vals
# Fetch the negatives to fill out the batch
if self.neg_per_batch:
neg_coords = self.genome_sampler.sample_intervals(
self.neg_per_batch
)
else:
neg_coords = np.empty(shape=(0, 3), dtype=object)
# At this point, `pos_coords` and `neg_coords` are both _ x 3
# Concatenate the coordinates together
coords = np.concatenate([pos_coords, neg_coords], axis=0)
# Concatenate the statuses together; status for negatives is just 0
status = np.concatenate(
[pos_statuses, np.zeros(len(neg_coords))] # Col 7
).astype(int)
if self.return_peaks:
# Concatenate the peaks together; peaks for negatives is all -1
neg_peaks = np.full((len(neg_coords), 3), -1)
peaks = np.concatenate([pos_peaks, neg_peaks])
return coords, status, peaks
else:
return coords, status
def __len__(self):
return int(np.ceil(self.num_total_pos / float(self.pos_per_batch)))
def on_epoch_start(self):
if self.shuffle_before_epoch:
perm = self.shuffle_rng.permutation(self.num_total_pos)
self.all_pos_table = self.all_pos_table[perm]
class SummitCenteringCoordsBatcher(SamplingCoordsBatcher):
"""
Creates a batch producer that batches positive coordinates only, each one
centered at a summit.
Arguments:
`pos_coords_beds`: list of paths to gzipped BED files containing the
sets of positive coordinates for various tasks
`batch_size`: number of samples per batch
`chroms_keep`: if specified, only considers this set of chromosomes from
the coordinate BEDs
`chrom_sizes_tsv`: path to 2-column TSV listing sizes of each chromosome
`sample_length`: length of sampled sequence
`return_peaks`: if True, returns the peaks and summits sampled from the
peak set as a B x 3 array
`shuffle_before_epoch`: Whether or not to shuffle all examples before
each epoch
"""
def __init__(
self, pos_coords_beds, batch_size, chrom_sizes_tsv, sample_length,
chroms_keep=None, return_peaks=False, shuffle_before_epoch=False,
shuffle_seed=None
):
# Same as a normal SamplingCoordsBatcher, but with no negatives and no
# jitter, since the coordinates in the positive coordinate BEDs are
# already centered at the summits
super().__init__(
pos_coords_beds=pos_coords_beds,
batch_size=batch_size,
neg_ratio=0,
jitter=0,
chrom_sizes_tsv=chrom_sizes_tsv,
sample_length=sample_length,
genome_sampler=None,
chroms_keep=chroms_keep,
return_peaks=return_peaks,
shuffle_before_epoch=shuffle_before_epoch,
shuffle_seed=shuffle_seed
)
class PeakTilingCoordsBatcher(SamplingCoordsBatcher):
"""
Creates a batch producer that batches positive coordinates only, where the
coordinates are tiled such that all coordinate centers overlap with a peak.
Arguments:
`pos_coords_beds`: list of paths to gzipped BED files containing the
sets of positive coordinates for various tasks
`stride`: amount of stride when tiling the coordinates
`batch_size`: number of samples per batch
`chrom_sizes_tsv`: path to 2-column TSV listing sizes of each chromosome
`sample_length`: length of sampled sequence
`chroms_keep`: if specified, only considers this set of chromosomes from
the coordinate BEDs
`return_peaks`: if True, returns the peaks and summits sampled from the
peak set as a B x 3 array
`shuffle_before_epoch`: Whether or not to shuffle all examples before
each epoch
"""
def __init__(
self, pos_coords_beds, stride, batch_size, chrom_sizes_tsv,
sample_length, chroms_keep=None, return_peaks=False,
shuffle_before_epoch=False, shuffle_seed=None
):
# Similar to normal SamplingCoordsBatcher, but with no negatives and no
# jitter; initialization is similar, but replicate the peaks so that
# there are many summits tiled across each peak
self.batch_size = batch_size
self.jitter = 0
self.chroms_keep = chroms_keep
self.return_peaks = return_peaks
self.shuffle_before_epoch = shuffle_before_epoch
chrom_sizes_table = self._import_chrom_sizes(chrom_sizes_tsv)
def tile_peaks(row):
peak_start, peak_end = row[1], row[2]
summit_offsets = np.arange(0, peak_end - peak_start, stride)
num_expand = len(summit_offsets)
row_expand = np.tile(row, (num_expand, 1))
row_expand[:, -1] = summit_offsets
return row_expand
all_pos_table = []
for i, pos_coords_bed in enumerate(pos_coords_beds):
peaks_table = self._import_peaks(pos_coords_bed)
# Formatting peaks table will expand the peaks to the right sample
# length and filter for anything that overruns the chromosome
# boundaries, so perform replication before
peaks_values = peaks_table.values
expanded_peaks_values = np.concatenate([
tile_peaks(row) for row in peaks_values
], axis=0)
expanded_peaks_table = pd.DataFrame.from_records(
expanded_peaks_values, columns=list(peaks_table)
)
# Now format peaks table into N x 6 array
coords = self._format_peaks_table(
expanded_peaks_table, chroms_keep, None, sample_length, 0,
chrom_sizes_table
)
# Add in the status column
coords = np.concatenate(
[coords, np.tile(i + 1, (len(coords), 1))], axis=1
) # Shape: _ x 7
all_pos_table.append(coords)
self.all_pos_table = np.concatenate(all_pos_table) # Shape: N x 7
self.num_total_pos = len(self.all_pos_table)
# Number of positives and negatives per batch
self.num_coords = len(self.all_pos_table)
self.neg_per_batch = 0
self.pos_per_batch = self.batch_size
if shuffle_before_epoch:
self.shuffle_rng = np.random.RandomState(shuffle_seed)
class CoordDataset(torch.utils.data.IterableDataset):
"""
Generates single samples of a one-hot encoded sequence and value.
Arguments:
`coords_batcher (CoordsDownsampler): maps indices to batches of
coordinates (split into positive and negative binding)
`coords_to_seq (CoordsToSeq)`: maps coordinates to 1-hot encoded
sequences
`coords_to_vals (CoordsToVals)`: instantiated CoordsToVals object,
mapping coordinates to profiles
`revcomp`: whether or not to perform revcomp to the batch; this will
double the batch size implicitly
`return_coords`: if True, along with the 1-hot encoded sequences and
values, the batch also returns the set of coordinates used for the
batch, and the peak/summit locations for the positive examples
"""
def __init__(
self, coords_batcher, coords_to_seq, coords_to_vals, revcomp=False,
return_coords=False
):
self.coords_batcher = coords_batcher
self.coords_to_seq = coords_to_seq
self.coords_to_vals = coords_to_vals
self.revcomp = revcomp
self.return_coords = return_coords
# The dataset returns coordinates iff the batcher returns peak info
assert coords_batcher.return_peaks == return_coords
def get_batch(self, index):
"""
Returns a batch, which consists of an B x I x 4 NumPy array of 1-hot
encoded sequence (I is the length of the input sequence), the associated
profiles, and a 1D length-B NumPy array of statuses. The profiles will
be a B x P x O x S array of profiles. O is the profile length, P is the
number of tracks returned, and S is the number of strands per track (1
or 2). Coordinates and peaks may also be returned as a B x 3 array.
"""
# Get batch of coordinates for this index
if self.return_coords:
coords, status, peaks = self.coords_batcher[index]
else:
coords, status = self.coords_batcher[index]
# Map this batch of coordinates to 1-hot encoded sequences
seqs = self.coords_to_seq(coords, revcomp=self.revcomp)
# Map this batch of coordinates to the associated values
profiles = self.coords_to_vals(coords)
# Profiles are returned as B x O x P x S, so transpose to get
# B x P x O x S
profiles = np.swapaxes(profiles, 1, 2)
# If reverse complementation was done, double sizes of everything else
if self.revcomp:
profiles = np.concatenate(
# To reverse complement, we must swap the strands AND the
# directionality of each strand (i.e. we are assigning the other
# strand to be the plus strand, but still 5' to 3'). If the
# profiles are unstranded, then flipping the last axis will do
# nothing
[profiles, np.flip(profiles, axis=(2, 3))]
)
status = np.concatenate([status, status])
if self.return_coords:
if self.revcomp:
coords_ret = np.concatenate([coords, coords])
peaks_ret = np.concatenate([peaks, peaks])
else:
coords_ret, peaks_ret = coords, peaks
return seqs, profiles, status, coords_ret, peaks_ret
else:
return seqs, profiles, status
def __iter__(self):
"""
Returns an iterator over the batches. If the dataset iterator is called
from multiple workers, each worker will be give a shard of the full
range.
"""
worker_info = torch.utils.data.get_worker_info()
num_batches = len(self.coords_batcher)
if worker_info is None:
# In single-processing mode
start, end = 0, num_batches
else:
worker_id = worker_info.id
num_workers = worker_info.num_workers
shard_size = int(np.ceil(num_batches / num_workers))
start = shard_size * worker_id
end = min(start + shard_size, num_batches)
return (self.get_batch(i) for i in range(start, end))
def __len__(self):
return len(self.coords_batcher)
def on_epoch_start(self):
"""
This should be called manually before the beginning of every epoch (i.e.
before the iteration begins).
"""
self.coords_batcher.on_epoch_start()
@dataset_ex.capture
def create_data_loader(
peaks_bed_paths, profile_hdf5_path, sampling_type, batch_size,
reference_fasta, chrom_sizes_tsv, input_length, profile_length,
negative_ratio, peak_tiling_stride, peak_retention, num_workers, revcomp,
jitter_size, negative_seed, shuffle_seed, jitter_seed, chrom_set=None,
shuffle=True, return_coords=False
):
"""
Creates an IterableDataset object, which iterates through batches of
coordinates and returns profiles for the coordinates.
Arguments:
`peaks_bed_paths`: a list of paths to gzipped 6-column BED files
containing coordinates of positive-binding coordinates
`profile_hdf5_path`: path to HDF5 containing reads mapped to each
coordinate; this HDF5 must be organized by chromosome, with each
dataset being L x S x 2, where L is the length of the chromosome,
S is the number of tracks stored, and 2 is for each strand
`sampling_type`: one of ("SamplingCoordsBatcher",
"SummitCenteringCoordsBatcher", or "PeakTilingCoordsBatcher"), which
corresponds to sampling positive and negative regions, taking only
positive regions centered around summits, and taking only positive
regions tiled across peaks
`chrom_set`: a list of chromosomes to restrict to for the positives and
sampled negatives; defaults to all coordinates in the given BEDs and
sampling over the entire genome
`shuffle`: if specified, shuffle the coordinates before each epoch
`return_coords`: if specified, also return the underlying coordinates
and peak data along with the profiles in each batch
"""
assert sampling_type in (
"SamplingCoordsBatcher", "SummitCenteringCoordsBatcher",
"PeakTilingCoordsBatcher"
)
# Maps set of coordinates to profiles
coords_to_vals = CoordsToVals(profile_hdf5_path, profile_length)
if sampling_type == "SamplingCoordsBatcher":
# Randomly samples from genome
genome_sampler = GenomeIntervalSampler(
chrom_sizes_tsv, input_length, chroms_keep=chrom_set,
seed=negative_seed
)
# Yields batches of positive and negative coordinates
coords_batcher = SamplingCoordsBatcher(
peaks_bed_paths, batch_size, negative_ratio, jitter_size,
chrom_sizes_tsv, input_length, genome_sampler,
chroms_keep=chrom_set, peak_retention=peak_retention,
return_peaks=return_coords, shuffle_before_epoch=shuffle,
jitter_seed=jitter_seed, shuffle_seed=shuffle_seed
)
elif sampling_type == "SummitCenteringCoordsBatcher":
# Yields batches of positive coordinates, centered at summits
coords_batcher = SummitCenteringCoordsBatcher(
peaks_bed_paths, batch_size, chrom_sizes_tsv, input_length,
chroms_keep=chrom_set, return_peaks=return_coords,
shuffle_before_epoch=shuffle, shuffle_seed=shuffle_seed
)
else:
# Yields batches of positive coordinates, tiled across peaks
coords_batcher = PeakTilingCoordsBatcher(
peaks_bed_paths, peak_tiling_stride, batch_size, chrom_sizes_tsv,
input_length, chroms_keep=chrom_set, return_peaks=return_coords,
shuffle_before_epoch=shuffle, shuffle_seed=shuffle_seed
)
# Maps set of coordinates to 1-hot encoding, padded
coords_to_seq = util.CoordsToSeq(
reference_fasta, center_size_to_use=input_length
)
# Dataset
dataset = CoordDataset(
coords_batcher, coords_to_seq, coords_to_vals, revcomp=revcomp,
return_coords=return_coords
)
# Dataset loader: dataset is iterable and already returns batches
loader = torch.utils.data.DataLoader(
dataset, batch_size=None, num_workers=num_workers,
collate_fn=lambda x: x
)
return loader
data = None
loader = None
@dataset_ex.automain
def main():
global data, loader
import os
import tqdm
import json
import matplotlib.pyplot as plt
paths_json_path = "/users/amtseng/att_priors/data/processed/ENCODE_TFChIP/profile/config/SPI1/SPI1_training_paths.json"
with open(paths_json_path, "r") as f:
paths_json = json.load(f)
peak_beds = paths_json["peak_beds"]
profile_hdf5 = paths_json["profile_hdf5"]
splits_json_path = "/users/amtseng/att_priors/data/processed/chrom_splits.json"
with open(splits_json_path, "r") as f:
splits_json = json.load(f)
train_chroms, val_chroms, test_chroms = \
splits_json["1"]["train"], splits_json["1"]["val"], \
splits_json["1"]["test"]
loader = create_data_loader(
peak_beds, profile_hdf5, "SamplingCoordsBatcher",
return_coords=True, chrom_set=val_chroms,
jitter_size=128, jitter_seed=123, peak_retention=0.1,
shuffle_seed=123, negative_seed=123
)
loader.dataset.on_epoch_start()
start_time = datetime.now()
for batch in tqdm.tqdm(loader, total=len(loader.dataset)):
data = batch
end_time = datetime.now()
print("Time: %ds" % (end_time - start_time).seconds)
k = 2
rc_k = int(len(data[0]) / 2) + k
seqs, profiles, statuses, coords, peaks = data
seq, prof, status = seqs[k], profiles[k], statuses[k]
rc_seq, rc_prof, rc_status = \
seqs[rc_k], profiles[rc_k], statuses[rc_k]
coord, peak = coords[k], peaks[k]
rc_coord, rc_peak = coords[rc_k], peaks[rc_k]
def print_one_hot_seq(one_hot):
s = util.one_hot_to_seq(one_hot)
print(s[:20] + "..." + s[-20:])
print_one_hot_seq(seq)
print_one_hot_seq(rc_seq)
print(status, rc_status)
print(coord, rc_coord)
print(peak, rc_peak)
task_ind = 0
num_strands = prof.shape[2]
fig, ax = plt.subplots(2, 1)
for i in range(num_strands):
ax[0].plot(prof[task_ind][:, i])
ax[1].plot(rc_prof[task_ind][:, i])
plt.show()
|
from aws_cdk import core
from aws_cdk import aws_s3 as s3
from aws_cdk import aws_ssm as ssm
from aws_cdk import aws_logs as logs
class CdkMinecraftS3Stack(core.Stack):
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
# This stack defines the S3 buckets that we want to reference in the main stack, but not re-recreate when rebuilding minecraft itself
minecraft_files = s3.Bucket(self, "MinecraftFiles",
block_public_access = s3.BlockPublicAccess.BLOCK_ALL )
ssm.StringParameter(self, "FileBucketURL", parameter_name = "s3_bucket_files",
string_value = minecraft_files.bucket_arn)
if self.node.try_get_context("useS3Backup"):
minecraft_backups = s3.Bucket(self, "MinecraftBackups",
block_public_access = s3.BlockPublicAccess.BLOCK_ALL,
lifecycle_rules = [s3.LifecycleRule(expiration = core.Duration.days(365))])
ssm.StringParameter(self, "BackupBucketName", parameter_name = "s3_bucket_backups",
string_value = minecraft_backups.bucket_arn)
minecraft_log = logs.LogGroup(self, "MinecraftLog", log_group_name = "minecraft.log", retention = logs.RetentionDays.ONE_MONTH)
messages_log = logs.LogGroup(self, "MessagesLog", log_group_name = "/var/log/messages", retention = logs.RetentionDays.ONE_MONTH)
|
from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck
from checkov.common.models.enums import CheckCategories, CheckResult
class GoogleCloudDNSSECEnabled(BaseResourceValueCheck):
"""
Looks for DNSSEC state at dns_managed_zone:
https://www.terraform.io/docs/providers/google/r/dns_managed_zone.html#state
"""
def __init__(self):
name = "Ensure that DNSSEC is enabled for Cloud DNS"
id = "CKV_GCP_16"
supported_resources = ["google_dns_managed_zone"]
categories = [CheckCategories.ENCRYPTION]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
if 'visibility' in conf:
if conf['visibility'][0] == 'private':
return CheckResult.UNKNOWN # check is irrelevant (cannot set DNSSEC to anything else)
# default visibility is public; just use base class implementation
return super().scan_resource_conf(conf)
def get_inspected_key(self):
return "dnssec_config/[0]/state"
def get_expected_value(self):
return "on"
def get_expected_values(self):
return [self.get_expected_value(), "transfer"]
check = GoogleCloudDNSSECEnabled()
|
{
'targets': [
{
'target_name': 'huffin',
'include_dirs' : [
"<!(node -e \"require('nan')\")",
"<(nodedir)/deps/openssl/openssl/include",
'deps/libsodium/src/libsodium/include',
'deps/ed25519-donna',
],
'defines': [
'ED25519_SSE2',
],
'sources': [
'deps/ed25519-donna/ed25519.c',
'src/binding.cc',
],
'xcode_settings': {
'OTHER_CFLAGS': [
'-g',
'-O3',
]
},
'cflags': [
'-g',
'-O3',
],
'libraries': [
'<!(node preinstall.js --print-lib)'
],
'conditions': [
['OS=="linux"', {
'link_settings': {
'libraries': [ "-Wl,-rpath=\\$$ORIGIN/"]
}
}],
],
}
]
}
|
token = "NjAzODc5NDE2NDE3ODc4MDQ3.XTl0iA.PTZPdcnhpiV0Zm3iNmaMUgpphPg"
prefix = "~"
amqp_url = "amqp://guest:guest@127.0.0.1:5672/%2f"
redis_url = "redis://127.0.0.1:6379/0"
owner = 446290930723717120
cogs = [
"events",
"general",
]
|
import json
from . import fields
from . import validate
try:
import urllib2 as request
except ImportError:
from urllib import request
ENABLED_HUMAN_VALUE_MAP = {
0 : 'Disabled',
1 : 'Enabled'
}
class Thermostat(object):
"""
This class implements the most basic functionality of communicating with
an actual thermostat.
"""
MODEL = ''
# The current API doesn't require this header, but it also doesn't hurt,
# and it's the right thing to do.
JSON_HEADER = {'Content-Type' : 'application/json'}
def __init__(self, host, timeout=4):
self.host = host
self.timeout = timeout
def get(self, relative_url):
"""
:param relative_url: The relative URL from the root of the website.
:returns: file-like object as returned by urllib[2,.request].urlopen
"""
url = self._construct_url(relative_url)
return request.urlopen(url, timeout=self.timeout)
def post(self, relative_url, value):
"""
:param relative_url: The relative URL from the root of the website.
:param value: Value to set this attribute to
:returns: file-like object as returned by urllib[2,.request].urlopen
"""
url = self._construct_url(relative_url)
request_instance = request.Request(url, value, self.JSON_HEADER)
return request.urlopen(request_instance, timeout=self.timeout)
def _construct_url(self, relative_url):
"""
:param relative_url: The relative URL from the root of the website
:returns: Full URL, for example 'http://192.168.0.2/tstat'
"""
return 'http://%s/%s' % (self.host, relative_url.lstrip('/'))
class CommonThermostat(Thermostat):
"""
This class implements the common API features that are available and work
across all models of thermostat.
"""
def reboot(self):
"""reboots the thermostat"""
response = self.post('/sys/command', json.dumps({'command' : 'reboot'}).encode('utf-8'))
validate.validate_response(response)
### tstat subsystem ###
tstat = fields.ReadOnlyField('/tstat', None, validate_response=validate.validate_tstat_response)
model = fields.ReadOnlyField('/tstat/model', 'model')
version = fields.Field('/tstat/version', 'version')
temp = fields.ReadOnlyField('/tstat', 'temp')
tmode = fields.Field('/tstat', 'tmode',
human_value_map={
0 : 'Off',
1 : 'Heat',
2 : 'Cool',
3 : 'Auto'
})
fmode = fields.Field('/tstat', 'fmode',
human_value_map={
0 : 'Auto',
1 : 'Auto/Circulate',
2 : 'On'
})
override = fields.ReadOnlyField('/tstat', 'override',
human_value_map=ENABLED_HUMAN_VALUE_MAP)
hold = fields.Field('/tstat', 'hold', human_value_map=ENABLED_HUMAN_VALUE_MAP)
led = fields.Field('/tstat/led', 'energy_led')
t_heat = fields.Field('/tstat/ttemp', 't_heat', post_url='/tstat')
t_cool = fields.Field('/tstat/ttemp', 't_cool', post_url='/tstat')
it_heat = fields.Field('/tstat/ttemp', 't_heat', post_url='/tstat', post_name='it_heat')
it_cool = fields.Field('/tstat/ttemp', 't_cool', post_url='/tstat', post_name='it_cool')
tstate = fields.ReadOnlyField('/tstat', 'tstate',
human_value_map={
0 : 'Off',
1 : 'Heat',
2 : 'Cool'
})
fstate = fields.ReadOnlyField('/tstat', 'fstate',
human_value_map={
0 : 'Off',
1 : 'On'
})
time = fields.Field('/tstat', 'time')
pump = fields.ReadOnlyField('/tstat/hvac_settings', 'pump',
human_value_map={
1 : 'Normal',
2 : 'Heat Pump'
})
aux_type = fields.ReadOnlyField('/tstat/hvac_settings', 'aux_type',
human_value_map={
1 : 'Gas',
2 : 'Electric'
})
# LED status values: 1 = green, 2 = yellow, 4 = red
energy_led = fields.WriteOnlyField('/tstat/led', 'energy_led', None)
# This isn't documented. It might be postable, but I'm not going to try.
power = fields.ReadOnlyField('/tstat/power', 'power')
program_cool = fields.ReadOnlyField('/tstat/program/cool', None)
program_heat = fields.ReadOnlyField('/tstat/program/heat', None)
datalog = fields.ReadOnlyField('/tstat/datalog', None)
# Remote temperature control; posting to rem_temp sets rem_mode to 1
rem_mode = fields.Field('/tstat/remote_temp', 'rem_mode',
human_value_map=ENABLED_HUMAN_VALUE_MAP)
rem_temp = fields.WriteOnlyField('/tstat/remote_temp', 'rem_temp', None)
### sys subsystem ###
sys = fields.ReadOnlyField('/sys', None)
name = fields.Field('/sys/name', 'name')
services = fields.ReadOnlyField('/sys/services', None)
mode = fields.Field('/sys/mode', 'mode', human_value_map={
0 : 'Provisioning',
1: 'Normal'
})
network = fields.ReadOnlyField('/sys/network', None)
security = fields.ReadOnlyField('/sys/network', 'security',
human_value_map = {
1 : 'WEP',
3 : 'WPA',
4 : 'WPA2 Personal'
})
### cloud subsystem ###
cloud = fields.ReadOnlyField('/cloud', None)
### methods ###
def set_day_program(self, heat_cool, day, program):
"""
Sets the program for a particular day. See the API docs for details,
as it is a bit complicated.
:param heat_cool: Ether the string 'heat' or 'cool'
:param day: One of 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'
:param program: See thermostat API docs
:type program: dict
"""
self.post('/tstat/program/%s/%s' % (heat_cool, day), json.dumps(program).encode('utf-8'))
class CT30(CommonThermostat):
"""
Base model for CT30-based thermostats (including the 3M-50)
"""
MODEL = 'CT30'
hvac_code = fields.ReadOnlyField('/tstat/hvac_settings', 'hvac_code',
human_value_map={
1 : '1 stage heat, 1 stage cool',
2 : '2 stage heat, 1 stage cool',
3 : '2 stage heat, 2 stage cool',
4 : '2 stage heat, 1 stage cool',
5 : '2 stage heat, 2 stage cool',
10 : '1 stage pump, 1 stage aux',
11 : '1 stage pump, 1 stage aux',
12 : '1 stage pump, no aux',
})
class CT80(CommonThermostat):
"""
Base model for CT80-based thermostats
"""
MODEL = 'CT80'
### Program Mode (extended tstat subsystem)
program_mode = fields.Field('/tstat', 'program_mode',
human_value_map={
0 : 'Program A',
1 : 'Program B',
2 : 'Vacation',
3 : 'Holiday'
})
# These three stages attributes take place of the CT30's hvac_code
heat_stages = fields.ReadOnlyField('/tstat/hvac_settings', 'heat_stages')
cool_stages = fields.ReadOnlyField('/tstat/hvac_settings', 'cool_stages')
aux_stages = fields.ReadOnlyField('/tstat/hvac_settings', 'aux_stages')
### (De)humidifier system ###
humidity = fields.ReadOnlyField('/tstat/humidity', 'humidity')
humidifier_mode = fields.Field('/tstat/humidifier', 'humidifier_mode',
human_value_map = {
0: 'Off',
1: 'Run only with heat',
2: 'Run any time (runs fan)',
})
humidifier_setpoint = fields.Field('/tstat/thumidity', 'thumidity')
class CT80RevB(CT80):
"""
Base model for all Revision B versions of the CT80
"""
MODEL = 'CT80 RevB'
swing = fields.Field('/tstat/tswing', 'tswing')
# Dehumidifier attributes
dehumidifier_mode = fields.Field('/tstat/dehumidifier', 'mode',
human_value_map = {
0: 'Off',
1: 'On with fan',
2: 'On without fan',
})
dehumidifier_setpoint = fields.Field('/tstat/dehumidifier', 'setpoint')
# External dehumidifier
external_dehumidifier_mode = fields.Field('/tstat/ext_dehumidifier', 'mode',
human_value_map = {
0: 'Off',
1: 'On with fan',
2: 'On without fan',
})
external_dehumidifier_setpoint = fields.Field('/tstat/ext_dehumidifier', 'setpoint')
# Note: the night light is tricky and will return the last-set intensity even if it's off!
night_light = fields.Field('/tstat/night_light', 'intensity',
human_value_map = {
0: 'Off',
1: '25%',
2: '50%',
3: '75%',
4: '100%',
})
# Note: lock_mode 3 can only be changed remotely
lock_mode = fields.Field('/tstat/lock', 'lock_mode',
human_value_map={
0 : 'Lock disabled',
1 : 'Partial lock',
2 : 'Full lock',
3 : 'Utility lock'
})
simple_mode = fields.Field('/tstat/simple_mode', 'simple_mode',
human_value_map={
1 : 'Normal mode',
2 : 'Simple mode'
})
# Specific model classes
class CT50(CT30):
MODEL = 'CT50'
class CT30v175(CT30):
"""
Defines API features that differ for this specific model from
CommonThermostat
"""
MODEL = 'CT30 V1.75'
class CT30v192(CT30):
"""
Defines API features that differ for this specific model from
CommonThermostat
"""
MODEL = 'CT30 V1.92'
class CT30v194(CT30):
"""
Defines API features that differ for this specific model from
CommonThermostat4
"""
MODEL = 'CT30 V1.94'
class CT30v199(CT30):
"""
Defines API features that differ for this specific model from
CommonThermostat
"""
MODEL = 'CT30 V1.99'
class CT50v109(CT50):
"""
Defines API features that differ for this specific model from
CommonThermostat
"""
MODEL = 'CT50 V1.09'
class CT50v188(CT50):
"""
Defines API features that differ for this specific model from
CommonThermostat
"""
MODEL = 'CT50 V1.88'
class CT50v192(CT30):
"""
Defines API features that differ for this specific model from
CommonThermostat
"""
MODEL = 'CT50 V1.92'
class CT50v194(CT50):
"""
Defines API features that differ for this specific model from
CommonThermostat
"""
MODEL = 'CT50 V1.94'
class CT80RevB1v100(CT80RevB):
MODEL = 'CT80 Rev B1 V1.00'
class CT80RevB2v100(CT80RevB):
MODEL = 'CT80 Rev B2 V1.00'
class CT80RevB2v103(CT80RevB):
MODEL = 'CT80 Rev B2 V1.03'
class CT80RevB2v109(CT80RevB):
MODEL = 'CT80 Rev B2 V1.09'
|
# Silvio Dunst
# Create a tuple that stores the months of the year,
# from that tuple create another tuple with just the summer months (May, June, July),
# print out the summer months one at a time.
# Tuples
months = ("January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
)
summer = months[4:7] # Create a new Tuples (summer), copies the elements/Array from Position 4(May) til Position 7-1 Position 6(July)
# in the new Tuples summer 7-1 the real Position
for month in summer: # Create a new Tuples (month), loops the amount of the elements/array Positions in the tuples summer
print(month) # prints out the new Tuple month
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Plot the results from the classification of lab by loading in the .pkl files generated by
figure3f_decoding_lab_membership_basic and figure3f_decoding_lab_membership_full
Guido Meijer
18 Jun 2020
"""
import pandas as pd
import numpy as np
import seaborn as sns
from os.path import join
import matplotlib.pyplot as plt
from paper_behavior_functions import seaborn_style, figpath, load_csv, FIGURE_WIDTH, FIGURE_HEIGHT
# Settings
FIG_PATH = figpath()
colors = [[1, 1, 1], [1, 1, 1], [0.6, 0.6, 0.6]]
seaborn_style()
# Load in results from csv file
decoding_result = load_csv('classification_results', 'classification_results_full_bayes.pkl')
# Calculate if decoder performs above chance
chance_level = decoding_result['original_shuffled'].mean()
significance = np.percentile(decoding_result['original'], 2.5)
sig_control = np.percentile(decoding_result['control'], 0.001)
if chance_level > significance:
print('Classification performance not significanlty above chance')
else:
print('Above chance classification performance!')
# %%
f, ax1 = plt.subplots(1, 1, figsize=(FIGURE_WIDTH/5, FIGURE_HEIGHT))
sns.violinplot(data=pd.concat([decoding_result['control'],
decoding_result['original_shuffled'],
decoding_result['original']], axis=1),
palette=colors, ax=ax1)
ax1.plot([-1, 3.5], [chance_level, chance_level], '--', color='k', zorder=-10)
ax1.set(ylabel='Decoding accuracy', xlim=[-0.8, 2.4], ylim=[-0.1, 0.62])
ax1.set_xticklabels(['Positive\ncontrol', 'Shuffle', 'Mouse\nbehavior'],
rotation=90, ha='center')
plt.tight_layout()
sns.despine(trim=True)
plt.savefig(join(FIG_PATH, 'suppfig_decoding_first_biased.pdf'))
plt.savefig(join(FIG_PATH, 'suppfig_decoding_first_biased.png'), dpi=300)
# %%
f, ax1 = plt.subplots(1, 1, figsize=(FIGURE_WIDTH/4, FIGURE_HEIGHT))
n_labs = decoding_result['confusion_matrix'][0].shape[0]
sns.heatmap(data=decoding_result['confusion_matrix'].mean(), vmin=0, vmax=0.4)
ax1.plot([0, 7], [0, 7], '--w')
ax1.set(xticklabels=np.arange(1, n_labs + 1), yticklabels=np.arange(1, n_labs + 1),
ylim=[0, n_labs], xlim=[0, n_labs],
title='', ylabel='Actual lab', xlabel='Predicted lab')
plt.setp(ax1.xaxis.get_majorticklabels(), rotation=40)
plt.setp(ax1.yaxis.get_majorticklabels(), rotation=40)
plt.gca().invert_yaxis()
plt.tight_layout()
plt.savefig(join(FIG_PATH, 'suppfig_confusion_matrix_first_biased.pdf'))
plt.savefig(join(FIG_PATH, 'suppfig_confusion_matrix_first_biased.png'), dpi=300)
|
from qasrl.nn.initializers import PretrainedModelInitializer_Prefixing
|
import csv
class Player:
# 初始化玩家,每人發 20000 遊戲幣以及出發位置為 0
def __init__(self,money = 20000, po = 0):
self.__money = money
self.__po = po
self.__status = 0
# 設定玩家名稱
def setName(self,name,id):
self.__name = name
self.__id = id
with open('players.csv','a',newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow([self.__id,self.__name,self.__money,self.__po,self.__status])
# 取得玩家名稱
def getName(self):
return self.__name
# 修改玩家遊戲幣
def setMoney(self,money,id):
self.__money += money
table = [['id','name','money','po','status']]
with open('players.csv','r',newline='') as csvfile:
rows = csv.DictReader(csvfile)
for row in rows:
if (row.get('id') == str(id)):
row['money'] = str(int(row['money'])+money)
table.append([row['id'],row['name'],row['money'],row['po'],row['status']])
with open('players.csv','w',newline='') as csvfile:
writer = csv.writer(csvfile)
writer.writerows(table)
# 取得玩家遊戲幣
def getMoney(self):
return self.__money
# 修改玩家位置
def setPo(self,move):
self.__po += move
# 取得玩家位置
def getPo(self):
return self.__po
if __name__ == "__main__":
myplayer = Player()
myplayer.setMoney(1000,0)
|
import spidev
from .abstract_transport import AbstractTransport
from .gpio_interrupt import GPIOInterrupt
class SPITransport(AbstractTransport):
__READ_FLAG = 0x80
__MAGNETOMETER_READ_FLAG = 0xC0
__DUMMY = 0xFF
data_ready_interrupt: GPIOInterrupt
def __init__(self, spi_device: int, magnetometer: bool, data_ready_pin: int = None):
super().__init__()
self.magnetometer = magnetometer
self.spi = spidev.SpiDev()
self._init_spi(spi_device)
self.data_ready_interrupt = None
if data_ready_pin:
self.data_ready_interrupt = GPIOInterrupt(data_ready_pin)
def _init_spi(self, spi_device: int):
self.spi.open(0, spi_device)
self.spi.mode = 0b00
self.spi.max_speed_hz = 8_000_000
def close(self):
self.spi.close()
if self.data_ready_interrupt:
self.data_ready_interrupt.close()
def write_byte(self, address: int, value: int):
self.spi.writebytes([address, value])
def read_byte(self, address: int) -> int:
return self.spi.xfer([address | self.__READ_FLAG, self.__DUMMY])[1]
def read_bytes(self, reg_address, length):
request = [self.__DUMMY] * (length + 1)
if self.magnetometer:
# Need to set bit 1 for multi-byte reads by the magnetometer or we
# just keep reading the same byte
request[0] = reg_address | self.__MAGNETOMETER_READ_FLAG
else:
request[0] = reg_address | self.__READ_FLAG
response = self.spi.xfer(request)
return response[1:]
def data_ready(self, timeout: int) -> bool:
if self.data_ready_interrupt:
return self.data_ready_interrupt.wait_for(timeout)
else:
raise RuntimeError('SPITransport needs a GPIO pin to support data_ready().')
|
from django.dispatch import Signal
# This signal is sent when we get a sub
subscription_update = Signal(providing_args=["user"])
gift_accepted = Signal()
|
import _plotly_utils.basevalidators
class DimensionsValidator(_plotly_utils.basevalidators.CompoundArrayValidator):
def __init__(
self, plotly_name='dimensions', parent_name='parcoords', **kwargs
):
super(DimensionsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str='Dimension',
data_docs="""
constraintrange
The domain range to which the filter on the
dimension is constrained. Must be an array of
`[fromValue, toValue]` with `fromValue <=
toValue`, or if `multiselect` is not disabled,
you may give an array of arrays, where each
inner array is `[fromValue, toValue]`.
label
The shown name of the dimension.
multiselect
Do we allow multiple selection ranges or just a
single range?
range
The domain range that represents the full,
shown axis extent. Defaults to the `values`
extent. Must be an array of `[fromValue,
toValue]` with finite numbers as elements.
tickformat
Sets the tick label formatting rule using d3
formatting mini-language which is similar to
those of Python. See https://github.com/d3/d3-f
ormat/blob/master/README.md#locale_format
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to *array*. Used with
`tickvals`.
ticktextsrc
Sets the source reference on plot.ly for
ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to *array*. Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for
tickvals .
values
Dimension values. `values[n]` represents the
value of the `n`th point in the dataset,
therefore the `values` vector for all
dimensions must be the same (longer vectors
will be truncated). Each value must be a finite
number.
valuessrc
Sets the source reference on plot.ly for
values .
visible
Shows the dimension when set to `true` (the
default). Hides the dimension for `false`.""",
**kwargs
)
|
#!/usr/bin/env python
#
# Can we calculate the flux density of the moon and an antenna G/T based off
# that?
#
# 1/2019 - Kyle Eberhart
#
# Based on - Darko Sekuljica - "Using the Moon as a calibrated noise source
# to measure the G/T figure-of-merit of an X-band satellite receiving station
# with a large antenna 200...400 wavelengths in diameter"
# http://antena.fe.uni-lj.si/literatura/Razno/Diplome/Sekuljica/Master%20Thesis%20-%20popravki%2017-01-2017.pdf
#
# William C. Daywitt - "An Error Analysis for the Use of Presently Avaliable
# Lunar Radio Flux Data in Broadbeam Antenna-System Measurements"
#
# This project made use of Skyfield, http://rhodesmill.org/skyfield/
#
# This project made use of Astropy, http://www.astropy.org a
# community-developed core Python package for Astronomy astropy:2013,
# astropy:2018
#
#------------------------------------------------------------------
import math
from skyfield.api import load
from skyfield.api import Topos
from skyfield.almanac import phase_angle
from astropy import units as u
from datetime import timedelta
import itur
class MoonGoT:
'''Wrap up the Moon Flux and G/T in a class'''
def __init__(self, test=None):
# some constants and globals to use
self.press = None
self.temp = None
self.humid = None
self.k1 = None
self.gt_lin = None
self.gt_db = None
# Boltzmanns constant (m^2 kg s^-2 K^-1)
self.k_bolt = 1.38064852e-23
# Plancks constant (m^2 kg s^-1)
self.h_planck = 6.62607004e-34
# the speed of light in (m s^-1)
self.c = 299792458
self.moon_dia_km = 3476
# Configure and pre-calculate a bit
self.ts = load.timescale(builtin=True)
self.e = load('de421.bsp')
self.earth, self.luna = self.e['earth'], self.e['moon']
if test is None:
# The 13M antennas are close enough together to make no real difference.
# We will use 13M-2 as the location of reference.
#-------------------------------------------------------------
# Change this value to the correct date and time of measurement
self.t = self.ts.utc(2019, 1, 27, 14, 10)
#
#------------------------------------------------------------
#self.t = self.ts.now()
self.diam = 13.0
self.lat = 64.97381
self.lon = -147.50575
self.alt = 385.0
self.observer = self.earth + Topos(latitude_degrees=self.lat,
longitude_degrees=self.lon,
elevation_m=self.alt)
self.phi()
self.theta_moon()
else:
# use the values from the paper to check stuff
self.t = self.ts.utc(2016, 10, 13, 19, 23)
self.diam = 11.28
self.frequency = 8177500000
self.y = 2.226
self.k2est = 5.71
self.lat = 40.6664
self.lon = 16.6043
self.alt = 100.0
self.observer = self.earth + Topos(latitude_degrees=self.lat,
longitude_degrees=self.lon,
elevation_m=self.alt)
self.phi()
self.theta_moon()
self.calc_g_over_t()
def set_dtg(self, date=None):
if date is None:
self.t = self.ts.now()
else:
self.t = self.ts.from_datetime(date)
def phi(self, time=None):
'''Calculate the lunar phase angle in degrees'''
# uses the skyfield library which seems to calculate the phase
# angle of the moon weird. Its 180 out from the regular convention that
# I can find.
if time is None:
time = self.t
pa_ = phase_angle(self.e, "moon", time)
pa = 180 - (pa_.to(u.deg)).value
return pa
def theta_moon(self, time=None):
# Moons angular diameter in degrees
if time is None:
time = self.t
astrometric = self.observer.at(time).observe(self.luna)
el, az, d = astrometric.apparent().altaz()
self.elevation = el.to(u.deg)
self.azimuth = az.to(u.deg)
self.distance = d.to(u.km)
arcsec = ((206265 * self.moon_dia_km*u.km) / self.distance)*u.arcsec
self.theta = (arcsec.to(u.deg)).value
return self.theta
def calc_t_moon(self, deg, f=None):
# The average brightness temperature of the Moon in (K)
# f is frequency in Hz
# deg is phi, the lunar phase angle in degrees
# - if phase angle is degreasing use 360 - phi
#
# from equation [4.14 - 4.16]
#
if f is None:
f = self.frequency
self.deg = deg
five_min = timedelta(minutes=5)
now_five = self.ts.utc(self.t.utc_datetime() + five_min)
p1 = self.phi(now_five)
self.t0 = 207.7 + (24.43/(f*10**-9))
# from the paper
self.t1overt0 = 0.004212 * math.pow((f*10**-9),1.224)
# From our moon spreadsheet
# self.t1overt0 = 0.004212 * (f*10**-9) * math.pow(math.e, 1.224)
self.psi = 43.83 / (1 + 0.0109 * (f*10**-9))
if deg > p1:
self.t_moon = self.t0 * ( 1 - self.t1overt0 * math.cos(math.radians((360-deg) - self.psi)))
# print(360-deg, "phi -lunar phase angle in degrees, descending value calc.")
return self.t_moon
else:
self.t_moon = self.t0 * ( 1 - self.t1overt0 * math.cos(math.radians(deg - self.psi)))
# print(deg, "phi- lunar phase angle in degrees, ascending value calc.")
return self.t_moon
def calc_moon_flux(self, f=None):
# this is all from a paper by Darko Sekuljica, "Using the Moon as a calibrated
# noise source to measure the G/T ...." it goes on. At any rate equation [4.13]
#
# s_moon = (2*k_bolt*pi^3)/(4*180^2*c^2) * f^2 * T_moon * Omega_moon
#
# Flux density of the moon in (W m^-2 Hz-1)
#
if f is None:
f = self.frequency
# The first term? is all constants...
term_1 = (2*self.k_bolt*math.pow(math.pi, 3))/(4*math.pow(180,2)*math.pow(self.c,2))
term_2 = math.pow(f, 2)
term_3 = self.calc_t_moon(self.phi())
term_4 = math.pow(self.theta, 2)
self.s_moon = term_1 * term_2 * term_3 * term_4
return self.s_moon
def calc_wavelength(self, f=None):
# convert frequency to wavelength
if f is None:
f = self.frequency
self.wavelength = float(self.c)/float(f)
return self.wavelength
def calc_hpbw(self):
'''calculate the Half Power Beam Width of our antenna.'''
# Our 13M antennas have the S-Band feed at the prime focus and the
# X-band feed is double-shaped, with two reflectors.
#
# also I have learned that this is not necissarily the best formula for
# calculaing HPBW, but we ned to know the edge taper of the dish for
# better results.
if self.frequency > 3*10**9:
self.hpbw = 59.0 * self.wavelength / self.diam
else:
self.hpbw = 67.5 * self.wavelength / self.diam
return self.hpbw
def calc_k2(self, diam=None):
# Calculate the correction factor for angular source size and antennas HPBW
# This seems to work for X-Band frequencies but it is throwing an error
# when the source is smaller than the beamwidth. Such as at S-Band frequencies.
if diam is None:
diam = self.diam
moonbw = self.theta
self.calc_hpbw()
if self.hpbw/moonbw > 1:
# When the source is narrower than the HPBW from the DOC study
x2 = 0.6441*math.pow(moonbw/self.hpbw, 2)
self.k2est = x2 / (1 - math.pow(math.e, -x2))
# from our sun worksheet not sure of its origin, I wouldn't expect
# it to work in this case...
# self.k2est = math.pow(1 + .18 * math.pow(self.theta/self.hpbw, 2), 2)
# from our cas-a worksheet, from Datron we think
# x2 = (self.hpbw*60)
# self.k2est = (1-0.041025/x2
# +8.00685/math.pow(x2, 2)
# -10.673775/math.pow(x2, 3)
# +41.662351/math.pow(x2, 4))
else:
# when the source is wider than the HPBW
common_bit = math.log(2) * math.pow(moonbw/self.hpbw, 2)
self.k2est = common_bit / 1 - math.exp(-common_bit)
return self.k2est
def lin_to_db(self, val):
'''turn a amp factor to a dB value.'''
result = 10*math.log10(val)
return result
def db_to_lin(self, val):
'''turn a dB to amp factor.'''
result = math.pow(10, (val/10))
return result
def calc_g_over_t(self, y=None, f=None, k1=None, k2=None):
'''calculate the G/T using this fun pile of a class.'''
if y is None:
y = self.y
else:
self.y = y
if f is None:
f = self.frequency
else:
self.frequency = f
self.calc_wavelength()
if k1 is None:
k1 = self.calc_k1()
else:
self.k1 = k1
if k2 is None:
k2 = self.calc_k2()
else:
k2 = self.k2est
S = self.calc_moon_flux()
self.gt_lin = ((8*math.pi*self.k_bolt*(y-1))/(math.pow(self.wavelength,2)*S)*k1*k2)
try:
self.gt_db = 10*math.log10(self.gt_lin)
except ValueError,e:
self.gt_db = 0
print "Somethings wrong with a log by less than 0."
print str(e)
def calc_k1(self):
'''Calculate the k1 atmospheric attenuation value.'''
if self.temp is None:
# estimate the k1 based on our spreadsheet and my school notes
if (7.0 < (self.frequency*10**-9) < 9.0):
A_t = 0.060
elif (1.0 < (self.frequency*10**-9) < 3.0):
A_t = 0.032
else:
A_t = 10*math.log(1 + (1/self.elevation.value))
k1 = math.pow(math.e, (+A_t/(
4.343 * math.sin(+(self.elevation.value*math.pi/180)))))
self.k1 = self.db_to_lin(k1)
else:
# this uses the cool itur equations, but it is really slow...
T = self.temp * itur.u.deg_C
P = self.press * itur.u.hPa
H = self.humid
f = (self.frequency*10**-9) * itur.u.GHz
el = self.elevation
hs = (self.alt*10**-3) * itur.u.km
D = self.diam * itur.u.m
p = 0.1
# calculated atmospheric parameters
rho_p = itur.surface_water_vapour_density(self.lat, self.lon, p, hs)
# compute attenuation values
# A_g = itur.gaseous_attenuation_slant_path(f, el, rho_p, P, T)
# A_r = itur.rain_attenuation(lat, lon, f, el, hs=hs, p=p)
# A_c = itur.cloud_attenuation(lat, lon, el, f, p)
# A_s = itur.scintillation_attenuation(lat, lon, f, el, p, D)
A_t = itur.atmospheric_attenuation_slant_path(self.lat, self.lon, f, el,
p, D, hs=hs, rho=rho_p, T=T, H=H, P=P)
# print("\n")
# print("- Rain attenuation ", A_r, " dB")
# print("- Gaseous attenuation ", A_g, " dB")
# print("- Cloud attenuation ", A_c, " dB")
# print("- Scintillation attenuation ", A_s, " dB")
# print("- Total attenuation ", A_t, " dB")
print A_t
print A_t.value
self.k1 = self.db_to_lin(A_t.value)
return self.k1
def __str__(self):
'''Lets print what we have done.'''
if self.k1 is None:
self.k1 = 0.0
if self.gt_lin is None:
self.gt_lin = 0.0
if self.gt_db is None:
self.gt_db = 0.0
compose = [
"{:16} {:>11.4f} {:<13}".format('Distance', self.distance, ''),
"{:16} {:>11.4f} {:<13}".format('Wavelength', self.wavelength, 'm'),
"{:16} {:>11.4f} {:<13}".format('HPBW', self.hpbw, 'deg'),
"{:16} {:>11.4f} {:<13}".format('T0', self.t0, 'K'),
"{:16} {:>11.4f} {:<13}".format('T1/T0', self.t1overt0, ""),
"{:16} {:>11.4f} {:<13} (apparent temperature of the moon)".format('Tmoon', self.t_moon, "K"),
"{:16} {:>10g} {:<13} (Moon flux density)".format('Smoon', self.s_moon, "W m^-2 Hz^-1"),
"{:16} {:>11.4f} {:<13} (lunar phase lag)".format('psi', self.psi, "deg"),
"{:16} {:>11.4f} {:<13} (lunar phase angle)".format('phi', self.deg, "deg"),
"{:16} {:>11.4f} {:<13} (lunar angular diameter)".format('Theta_moon', self.theta, "deg"),
"{:16} {:>11.4f} {:<13} (atmospheric correction factor [linear])".format('K1', self.k1, ""),
"{:16} {:>11.4f} {:<13} (extended source size correction factor [linear])".format('K2', self.k2est, ""),
"\n Measurement Data",
"{:16} {:>11.4f} {:<13}".format('Frequency', self.frequency, "Hz"),
"{:16} {:>11.4f} {:<13}".format('Y-factor', self.y, ""),
"{:16} {:>11.4f} {:<13}".format('Elevation', self.elevation, ''),
"{:16} {:>11.4f} {:<13}".format('Azimuth', self.azimuth, ''),
"{:16} {:>11.4f} {:<13} ([linear])".format('G/T', self.gt_lin, ""),
"{:16} {:>11.4f} {:<13} ".format('G/T', self.gt_db, "dB/K"),
"\n",
]
output = "\n".join(compose)
return output
def freq_input(self, freq):
'''ingest the frequency fed in by the user. Convert whatever to Hz.'''
if 'MHZ' in freq.upper():
f_ = (freq.upper()).split('MHZ')
f = float(f_[0])*10**6
self.frequency = f
elif 'GHZ' in freq.upper():
f_ = (freq.upper()).split('GHZ')
f = float(f_[0])*10**9
self.frequency = f
elif 'HZ' in freq.upper():
f_ = (freq.upper()).split('HZ')
f = float(f_[0])
self.frequency = f
elif freq.find('.') is 1:
f = float(freq)*10**9
self.frequency = f
print "Labels are nice. I think you entered a frequency in GHz."
elif freq.find('.') is 4:
f = float(freq)*10**6
self.frequency = f
print "Labels are nice. I think you entered a frequency in MHz."
elif freq.isdigit():
f = float(freq)
self.frequency = f
print "Labels are nice. I think you entered a frequency in Hz."
if self.frequency > 10*10**9:
print str(self.frequency*10**-9)+" GHz is out of range. Try between 10 and 1 GHz."
sys.exit()
elif self.frequency < 1*10**9:
print str(self.frequency*10**-9)+" GHZ is out of range. Try between 10 and 1 GHz."
sys.exit()
print str(f*10**-9)+" GHz is what I'll use."
def y_input(self, y):
'''Ingest the y-factor for later use. convert whatever to linear units[W].'''
if 'DB' in y.upper():
y_ = (y.upper()).split('DB')
self.y = self.db_to_lin(float(y_[0]))
elif y.isdigit():
self.y = float(y)
print "No Y-factor labels, it must have been a linear value."
def temp_input(self, temp):
'''store the temperature.'''
self.temp = float(temp)
def press_input(self, press):
'''Store the pressure.'''
self.press = float(press)
def humid_input(self, humid):
'''Store the humidity'''
self.humid = float(humid)
if __name__ == "__main__":
import sys
arguments = len(sys.argv) - 1
if arguments is 0:
print "\n"
print "This command requires a Y factor[W] and a Frequency [Hz] Temp[C] Pressure[hPA] Humidity[%] "
print "For example 'figure_of_merit.py Y.YY FFFFFFFFFF TT PPPP HH'"
sys.exit()
if arguments is 1:
merit = MoonGoT(test='yes')
print merit
if arguments is 2:
y_fac = sys.argv[1]
freq = sys.argv[2]
merit = MoonGoT()
merit.freq_input(freq)
merit.y_input(y_fac)
merit.calc_g_over_t()
print merit
if arguments is 5:
merit = MoonGoT()
print "\n"
merit.y_input(sys.argv[1])
merit.freq_input(sys.argv[2])
merit.temp_input(sys.argv[3])
merit.press_input(sys.argv[4])
merit.humid_input(sys.argv[5])
print "\n"
merit.calc_g_over_t()
print merit
# got = MoonGoT()
# got.calc_g_over_t(2.266, 8177500000)
# print got
# iot = MoonGoT(test='yes')
# print iot
#g_over_t(2.266, 8177500000, 11.28)
|
from .data import SHARD_DATA_MAP
from .events import Event
class Telemetry:
def __init__(self, data, url):
self.shard = 'xbox' if 'xbox-' in url else 'pc'
self.events = [
Event.instance(event_data)
for event_data in self.generate_events_data(data)
]
def generate_events_data(self, data):
data_class = SHARD_DATA_MAP[self.shard]
for event in data:
yield data_class(event)
def events_from_type(self, _type):
return [ev for ev in self.events if type(ev).__name__ == _type]
|
# Copyright 2021 Alibaba Group Holding Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import signal
import click
from .common import global_mgr
@click.group(name='process')
def process_group():
pass
def _send_signal_to_process(pid: int or None, sig: int):
if not pid:
return
print('kill process %d with signal %d', pid, sig)
os.kill(pid, sig)
@click.command(name='kill')
def kill_engine_process():
_send_signal_to_process(global_mgr.engine().engine_process_id(), signal.SIGKILL)
process_group.add_command(kill_engine_process)
@click.command(name='stop')
def stop_engine_process():
_send_signal_to_process(global_mgr.engine().engine_process_id(), signal.SIGSTOP)
process_group.add_command(stop_engine_process)
@click.command(name='continue')
def continue_engine_process():
_send_signal_to_process(global_mgr.engine().engine_process_id(), signal.SIGCONT)
process_group.add_command(continue_engine_process)
|
from rsm import test1,test2,test3
import numpy as np
from time import time as perf_counter
N = 40
M = 20
V = 20
mem = np.zeros((N,M), dtype=np.int32)
neg = np.zeros((N,V), dtype=np.int32)
out = np.zeros(N, dtype=np.int32)
input = np.zeros(M, dtype=np.int32)
input[0]=1
for i in range(1,M):
input[i] = i*10
test1(mem,input)
# test 2
a=test2(mem,input,out)
print(out)
print(a)
R=10000
t0=perf_counter()
for i in range(R):
test2(mem,input,out)
dt=perf_counter()-t0
print(1.0*R/dt)
# test3 -> 5x szybszy dla N=40
out.fill(0)
test3(mem,input,out)
print(out)
R=10000
t0=perf_counter()
for i in range(R):
test3(mem,input,out)
dt=perf_counter()-t0
print(1.0*R/dt)
|
from flask import Flask, request, session, g, current_app
from subprocess import Popen
import pprint, time, threading, os, sys, sqlite3, shutil
# Borg pattern, all processManager objects will have the same states
# meaning they all reference the same processes, lock and thread.
class Borg:
_shared_state = {}
def __init__(self):
self.__dict__ = self._shared_state
# Pokes the processes to see their exit status. Stops when there is no running thread left.
class processManager(Borg):
class pollProcesses (threading.Thread):
def __init__(self, pManager):
threading.Thread.__init__(self)
self.pManager = pManager
def run(self):
while True:
#poll exit codes:
#None Running
#0 Finished nicely
#-n terminated by signal n
time.sleep(30)
pprint.pprint("-----------")
stop = True
self.pManager.lock.acquire()
updatedProcesses = []
for p in self.pManager.processes:
keep = True
status = p.poll()
pprint.pprint(str(p.jobid)+" "+str(p.pid)+" "+str(status))
# If the job is running
if status is None:
stop = False # keep the thread alive
job = self.pManager.getJob(p.jobid)
if job["status"] == 2:
self.pManager.stopProcess(p.jobid, True)
continue;
else:
if status != 0:
status = 2
try:
self.pManager.updateJob(p.jobid, status)
keep = False #once we have the new status, keeping track of the process is useless
except OperationalError:
#If the database is locked, we force looping again
stop = False
if keep:
updatedProcesses.append(p)
self.pManager.processes = updatedProcesses
self.pManager.lock.release()
if stop:
pprint.pprint("-----------")
pprint.pprint("No process running, stopping poll.")
break;
def __init__(self):
Borg.__init__(self)
if not hasattr(self, "processes"):
self.processes = []
if not hasattr(self, "lock"):
self.lock = threading.Lock()
if not hasattr(self, "thread"):
self.thread = None
def addProcess(self, job, projectdir, zoomLevels, metatile, extent):
pprint.pprint("-----------")
path = os.path.realpath(__file__)
# Create mapcache folder if not exist
if not os.path.exists(projectdir+"/mapcache"):
os.makedirs(projectdir+"/mapcache")
#If there is a gitignore file, add the mapcache directory
if os.path.exists(projectdir+"/.gitignore"):
with open(projectdir+"/.gitignore", "r+") as gitignore:
lines = gitignore.readlines()
found = False
for line in lines:
if "mapcache" in line:
found = True
if not found:
gitignore.writelines("mapcache/*")
jobdir = projectdir+"/mapcache/job-"+job['title']+str(job['id'])
os.makedirs(jobdir)
inFile = open(path.replace("processManager.py","mapcacheConfig.xml.default"))
outFile = open(jobdir+"/mapcacheConfig.xml","w")
replacements = {'<!--SCRIBEUIPATH-->':jobdir, '<!--SCRIBEUITITLE-->':"job-"+job['title']+str(job['id']), '<!--SCRIBEUIMAPFILEPATH-->':projectdir+'/map/'+job['map_name']+'.map'}
for line in inFile:
for src, target in replacements.iteritems():
line = line.replace(src, target)
outFile.write(line)
inFile.close()
outFile.close()
#start mapcache
pprint.pprint("Adding new process")
p = Popen(["mapcache_seed", "-c", jobdir+"/mapcacheConfig.xml", "-t", "default", "-z", zoomLevels, "-M", metatile, "-e",extent], shell=False)
p.jobid = job['id']
# Lock the processes list before adding data
self.lock.acquire()
self.processes.append(p)
#If thread is finished, start it up
if self.thread is None or not self.thread.isAlive():
self.thread = None
self.thread = self.pollProcesses(self)
self.thread.start()
self.lock.release()
return
#Stops the process.
# If locked is True, the process array cannot be changed as it is already locked
# by the caller, so accessing it is safe
def stopProcess(self, jobid, locked):
pprint.pprint("STOP job"+str(jobid))
if not locked:
self.lock.acquire()
for p in self.processes:
if p.jobid == jobid:
p.terminate()
if not locked:
self.lock.release()
return
def updateJob(self, jobid, status):
con = None
try:
con = sqlite3.connect('db/database.db')
cur = con.cursor()
cur.execute('UPDATE jobs SET status = ? WHERE id=?', [status, jobid])
con.commit()
except sqlite3.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
def getJob(self, jobid):
con = None
try:
con = sqlite3.connect('db/database.db')
cur = con.cursor()
cur.execute('select * from jobs WHERE id=?', [jobid])
rv = [dict((cur.description[idx][0], value)
for idx, value in enumerate(row)) for row in cur.fetchall()]
return rv[0];
except sqlite3.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
|
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
import numpy as np
#%matplotlib inline
import random
dados1 = random.sample(range(100), k=20)
dados2 = random.sample(range(100), k=20)
print(dados1)
print(dados2)
plt.plot(dados1, dados2)
print(plt.plot(dados1, dados2))
fig, ax = plt.subplots(1,2, figsize=(12, 5))
x = range(5)
print(np.array(x))
print(fig)
x = np.array(x)
print(x**3)
print("Tipo de ax = ", type(ax))
print("Conteúdo de ax[0] = ", ax[0])
print("Conteúdo de ax[1] = ", ax[1])
ax[0].plot(x, x, label='eq_1') # cria gráfico sobre eixo 0
ax[0].plot(x, x**2, label='eq_2') # cria gráfico sobre eixo 0
ax[0].plot(x, x**3, label='eq_3') # cria gráfico sobre eixo 0
ax[0].set_xlabel('Eixo x') #nomeia o gráfico para o eixo x
ax[0].set_ylabel('Eixo y') # nomeia o gráfico para o eixo y
ax[0].set_title("Gráfico 1") # titulo para o gráfico
ax[0].legend()
|
__author__ = 'casper'
|
# Time: O(|V| + |E|)
# Space: O(|V| + |E|)
# graph, dfs, bfs
class Solution(object):
def distanceToCycle(self, n, edges):
"""
:type n: int
:type edges: List[List[int]]
:rtype: List[int]
"""
def cycle(parent, v, u):
result = [parent[v], v]
while u != parent[v]:
result.append(u)
u = parent[u]
return result
def iter_dfs(adj):
stk = [0]
parent = [-2]*len(adj)
parent[0] = -1
while stk:
u = stk.pop()
for v in reversed(adj[u]):
if parent[v] != -2:
if v == parent[u]:
continue
return cycle(parent, v, u)
parent[v] = u
stk.append(v)
def bfs(adj, q):
result = [-1]*n
for x in q:
result[x] = 0
d = 1
while q:
new_q = []
for u in q:
for v in adj[u]:
if result[v] != -1:
continue
result[v] = d
new_q.append(v)
q = new_q
d += 1
return result
adj = [[] for _ in xrange(n)]
for u, v in edges:
adj[u].append(v)
adj[v].append(u)
return bfs(adj, iter_dfs(adj))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.