content stringlengths 5 1.05M |
|---|
from django.test import TestCase
from rest_framework.test import APITestCase
from django.test import Client
from django.contrib.auth import get_user_model
import unittest
User = get_user_model()
# Create your tests here.
class UserAPITestCase(APITestCase):
def setUp(self):
self.usr = User(phonenumber="+254741997729", username="TralahTek", is_secretary=True, is_superuser=True)
self.usr.set_password("password")
self.usr.save()
self.usr1 = User(phonenumber="+252724032913", username="Mohan56", is_producer=True)
self.usr1.set_password("password")
self.usr1.save()
def test_user_secretary(self):
self.assertEqual(self.usr.is_secretary, True)
def test_user_producer(self):
self.assertEqual(self.usr1.is_producer, True)
def test_user_superuser(self):
self.assertEqual(self.usr.is_superuser, True)
|
# Python does support boolean logic by making use of the usual operators
x = 2
print(x == 2) # True
y = False
print(y == True) # False
print(y == False) # True
print(x < 5) # True
# if statement. Their code block is delimited by indentation
# and gets started from the ":" character.
# Tabs or spaces work
if x == 2 :
print(f"x = {x}")
# "and" "or" operations
name = "Carlos"
hairStyle = "curly"
if name == "Carlos" and hairStyle == "curly":
print(f'Your name is "{name}" and you have "{hairStyle}" hair')
if name == "Carlos" or name == "Roger":
print(f'Your name is either "Carlos" or "Eduardo"')
generalNames = ["Carlos", "Roger", "Smith", "Aaron"]
if name in generalNames:
print(f'"{name}" is an expected name')
# explicit "not"
bannedNames = ["Roger", "Smith", "Aaron"]
if name not in bannedNames:
print(f'"{name}" is not a banned name')
# if / else
if x == 2:
print("X equals 2!")
else:
print("x is not equal to 2")
# if / else if / else -
# Use the "pass" keyword for an empty block where nothing is needed to be done
if x == 3.1416:
print("I love pie!")
elif x == 2:
print("No pie, huh!")
else:
print("nada...")
# use "pass" not to break proper indentation when
# commenting a line
if x == 3.1416:
pass # if this is not here, code gets broken
# print("I love pie!")
elif x == 2:
pass
# print("No pie, huh!")
else:
pass
# "Is" operator
if x is y:
print("X is not Y")
someNoneVar = None
if someNoneVar is None:
print("That var doesn't have a value") |
import os
import errno
from Crypto.Cipher import AES
from ifstools import IFS, GenericFile, GenericFolder
from kbinxml import KBinXML
from tqdm import tqdm
enc_base = r'D:\infinitas\KONAMI\eacloud\beatmania IIDX INFINITAS'
dec_base = r'D:\infinitas\beatmania IIDX INFINITAS (Decrypted game files)'
# monkeypatching
class CryptFile(GenericFile):
def _load_from_ifs(self, convert_kbin = True, **kwargs):
data = self.ifs_data.get(self.start, self.size)
data = decrypt(self.name + 'r', data)
return data
def main():
for subdir in ['launcher','updater','game']:
enc_path = os.path.join(enc_base, subdir)
dec_path = os.path.join(dec_base, subdir)
mountfile = os.path.join(enc_path, 'conf', 'mounttable.binr')
configfile = os.path.join(enc_path, 'conf', 'config.binr')
# whatever
with open(mountfile,'rb') as f:
mounts = decrypt(mountfile, f.read())
mounts = KBinXML(mounts)
with open(configfile,'rb') as f:
config = decrypt(configfile, f.read())
config = KBinXML(config)
cfg_dec = os.path.join(dec_path, 'config')
mkdir_p(cfg_dec)
with open(os.path.join(cfg_dec, 'config.xml'), 'w') as f:
f.write(config.to_text())
with open(os.path.join(cfg_dec, 'mounttable.xml'), 'w') as f:
f.write(mounts.to_text())
for vfs in tqdm(mounts.xml_doc):
dst = vfs.attrib['dst']
src = vfs.attrib['src']
src = src.lstrip('/')
dst = dst.lstrip('/')
src = os.path.join(enc_path, src)
dst = os.path.join(dec_path, dst)
if dst[-1].isdigit():
dst = dst[:-1]
if not os.path.isfile(src):
tqdm.write("WARNING: mounttable file '{}' doesn't exist, skipping".format(src))
continue
ifs = IFS(src)
for f in ifs.tree.all_files:
if f.name.endswith('r') and type(f) == GenericFile and type(f.parent) == GenericFolder:
f.__class__ = CryptFile
f.name = f.name[:-1] # strip r
mkdir_p(dst)
ifs.extract(use_cache = False, recurse = False, path = dst)
def decrypt(filename, data = None):
'''filename is used for key generation and is always required'''
salt = b'sg_TlTNF80vAUgGLafxkT3YgvKpyh_e2'
name_xor = b'\x117\xd2sc\xe5Ov\x84\x8c)\xf1\x162Tu\xbf\xd8~\xf9#\xa1\xddy\x8c&\xf72\xf7\xe6\xe3e'
decname = filename[:-1] # strip trailing r
mangled = os.path.basename(decname).encode('utf8') + salt
mangled = mangled[:32]
key = bytes(bytearray([x^y for x, y in zip(mangled, name_xor)]))
if data is None:
with open(filename,'rb') as f:
enc = f.read()
else:
enc = data
iv = enc[:16]
aes = AES.new(key, AES.MODE_CBC, iv)
enc = enc[16:]
extra_len = len(enc) % 16
pad_len = 16 - extra_len
# ciphertext stealing, bleurgh
if extra_len:
extra = enc[-extra_len:]
enc = enc[:-extra_len]
last_full = enc[-16:]
enc = enc[:-16]
last_full_dec = AES.new(key, AES.MODE_CBC, b'\0'*16).decrypt(last_full)
extra += last_full_dec[-pad_len:]
dec = aes.decrypt(enc + extra + last_full)[:-pad_len]
else:
dec = aes.decrypt(enc)
if data is None:
with open(decname,'wb') as f:
f.write(dec)
else:
return dec
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
if __name__ == '__main__':
main()
|
"""
Login Module
"""
from selenium.webdriver.support import expected_conditions as ec
from fixtures.mainteny.app.ui.pages.page_base import PageBase
from pylenium.element import Element
class LoginPage(PageBase):
"""
This class contains the elements of login page.
"""
def __init__(self, py):
super().__init__(py)
self.py = py
@property
def username_field(self) -> Element:
"""
:return: element
"""
locator = "[type='email']"
element = self.py.get(locator)
return element
@property
def password_field(self) -> Element:
"""
:return: element
"""
locator = "[type='password']"
element = self.py.get(locator)
return element
@property
def login_button(self) -> Element:
"""
:return: element
"""
locator = ".item>button"
element = self.py.get(locator)
return element
def login(self, username, password):
"""
This function used to login.
:param username: String
:param password: String
"""
self.username_field.type(username)
self.password_field.type(password)
self.login_button.should().be_enabled()
self.login_button.click()
current_wait = 10
if "login" in self.py.url():
self.artifacts.logger.info(f"Current wait time to login is set to {current_wait} seconds")
self.py.wait(current_wait).until(ec.url_contains("dashboard"))
|
import math
import random
from typing import List
import numpy as np
import shapely.geometry as geom
import shapely.ops
from numba import jit
nautical_miles_to_feet = 6076 # ft/nm
class Airplane:
def __init__(self, sim_parameters, name, x, y, h, phi, v, h_min=0, h_max=38000, v_min=100, v_max=300):
"""
State of one aircraft simulated in the environment
:param sim_parameters: Definition of the simulation, timestep and more
:param name: Name of the flight/airplane
:param x: Position in cartesian world coordinates
:param y: Position in cartesian world coordinates
:param h: Height [feet]
:param phi: Angle of direction, between 1 and 360 degrees
:param v: Speed [knots]
:param v_min: Min. speed [knots]
:param v_max: Max. speed [knots]
:param h_min: Min. speed [feet]
:param h_max: Max. speed [feet]
"""
self.sim_parameters = sim_parameters
self.name = name
self.x = x
self.y = y
self.h = h
if (h < h_min) or (h > h_max):
raise ValueError("invalid altitude")
self.v = v
if (v < v_min) or (v > v_max):
raise ValueError("invalid velocity")
self.phi = phi
self.h_min = h_min
self.h_max = h_max
self.v_min = v_min
self.v_max = v_max
self.h_dot_min = -41
self.h_dot_max = 15
self.a_max = 5
self.a_min = -5
self.phi_dot_max = 3
self.phi_dot_min = -3
self.position_history = []
self.id = random.randint(0, 32767)
def above_mva(self, mvas):
for mva in mvas:
if mva.area.contains(geom.Point(self.x, self.y)):
return self.h >= mva.height
raise ValueError('Outside of airspace')
def action_v(self, action_v):
"""
Updates the aircrafts state to a new target speed.
The target speed will be bound by [v_min, v_max] and the rate of change by [a_min, a_max]
:param action_v: New target speed of the aircraft
:return: Change has been made to the self speed
"""
if action_v < self.v_min:
raise ValueError("invalid speed")
if action_v > self.v_max:
raise ValueError("invalid speed")
delta_v = action_v - self.v
# restrict to max acceleration, upper bound
delta_v = min(delta_v, self.a_max * self.sim_parameters.timestep)
# restrict to min acceleration, lower bound
delta_v = max(delta_v, self.a_min * self.sim_parameters.timestep)
self.v = self.v + delta_v
def action_h(self, action_h):
"""
Updates the aircrafts state to a new target height.
The target height will be bound by [h_min, h_max] and the climb/descend rate by [h_dot_min, h__dot_max]
:param action_h: New target height of the aircraft
:return: Change has been made to the height
"""
if action_h < self.h_min:
raise ValueError("invalid altitude")
if action_h > self.h_max:
raise ValueError("invalid altitude")
delta_h = action_h - self.h
# restrict to max climb speed, upper bound
delta_h = min(delta_h, self.h_dot_max * self.sim_parameters.timestep)
# restrict to max decend speed, lower bound
delta_h = max(delta_h, self.h_dot_min * self.sim_parameters.timestep)
self.h = self.h + delta_h
def action_phi(self, action_phi):
"""
Updates the aircrafts state to a new course.
The target course will be bound by [phi_dot_min, phi_dot_max]
:param action_phi: New target course of the aircraft
:return: Change has been made to the target heading
"""
delta_phi = action_phi - self.phi
# restrict to max climb speed, upper bound
delta_phi = min(delta_phi, self.phi_dot_max * self.sim_parameters.timestep)
# restrict to max decend speed, lower bound
delta_phi = max(delta_phi, self.phi_dot_min * self.sim_parameters.timestep)
self.phi = self.phi + delta_phi
def step(self):
self.position_history.append((self.x, self.y))
# convert speed vector to nautical miles per second
v_unrotated = np.array([[0], [(self.v / 3600) * self.sim_parameters.timestep]])
delta_x_y = np.dot(rot_matrix(self.phi), v_unrotated)
self.x += delta_x_y[0][0]
self.y += delta_x_y[1][0]
class SimParameters:
def __init__(self, timestep: float, precision: float = 0.5, reward_shaping: bool = True,
normalize_state: bool = True, discrete_action_space: bool = False):
"""
Defines the simulation parameters
:param timestep: Timestep size [seconds]
:param precision: Epsilon for 0 comparisons
"""
self.timestep = timestep
self.precision = precision
self.reward_shaping = reward_shaping
self.normalize_state = normalize_state
self.discrete_action_space = discrete_action_space
class Corridor:
x: int
y: int
h: int
phi_from_runway: int
phi_to_runway: int
def __init__(self, x: int, y: int, h: int, phi_from_runway: int):
"""
Defines the corridor that belongs to a runway
"""
self.x = x
self.y = y
self.h = h
self.phi_from_runway = phi_from_runway
self.phi_to_runway = (phi_from_runway + 180) % 360
self._faf_iaf_normal = np.dot(rot_matrix(self.phi_from_runway), np.array([[0], [1]]))
faf_threshold_distance = 7.4
faf_angle = 45
self.faf_angle = faf_angle
faf_iaf_distance = 3
faf_iaf_distance_corner = faf_iaf_distance / math.cos(math.radians(faf_angle))
self.faf = np.array([[x], [y]]) + np.dot(rot_matrix(phi_from_runway),
np.array([[0], [faf_threshold_distance]]))
self.corner1 = np.dot(rot_matrix(faf_angle),
np.dot(rot_matrix(phi_from_runway), [[0], [faf_iaf_distance_corner]])) + self.faf
self.corner2 = np.dot(rot_matrix(-faf_angle),
np.dot(rot_matrix(phi_from_runway), [[0], [faf_iaf_distance_corner]])) + self.faf
self.corridor_horizontal = geom.Polygon([self.faf, self.corner1, self.corner2])
self.iaf = np.array([[x], [y]]) + np.dot(rot_matrix(phi_from_runway),
np.array([[0], [faf_threshold_distance + faf_iaf_distance]]))
self.corridor1 = geom.Polygon([self.faf, self.corner1, self.iaf])
self.corridor2 = geom.Polygon([self.faf, self.corner2, self.iaf])
self.corridor_horizontal_list = np.array(list(self.corridor_horizontal.exterior.coords))
self.corridor1_list = np.array(list(self.corridor1.exterior.coords))
self.corridor2_list = np.array(list(self.corridor2.exterior.coords))
def inside_corridor(self, x, y, h, phi):
"""
Performance optimized inside corridor check.
:param x: x-position of the airplane [nm]
:param y: y-position of the airplane [nm]
:param h: Altitude of the airplane [ft]
:param phi: Heading [deg]
:return: Whether the airplane is inside the approach corridor
"""
if not ray_tracing(x, y, self.corridor_horizontal_list):
return False
p = np.array([[x, y]])
t = np.dot(p - np.transpose(self.faf), self._faf_iaf_normal)
projection_on_faf_iaf = self.faf + t * self._faf_iaf_normal
h_max_on_projection = np.linalg.norm(projection_on_faf_iaf - np.array([[self.x], [self.y]])) * \
math.tan(3 * math.pi / 180) * nautical_miles_to_feet + self.h
if not h <= h_max_on_projection:
return False
return self._inside_corridor_angle(x, y, phi)
def _inside_corridor_angle(self, x, y, phi):
direction_correct = False
to_runway = self.phi_to_runway
beta = self.faf_angle - np.arccos(
np.dot(
np.transpose(np.dot(rot_matrix(to_runway), np.array([[0], [1]]))),
np.dot(rot_matrix(phi), np.array([[0], [1]]))
)
)[0][0]
min_angle = self.faf_angle - beta
if ray_tracing(x, y, self.corridor1_list) and min_angle <= relative_angle(to_runway,
phi) <= self.faf_angle:
direction_correct = True
elif ray_tracing(x, y, self.corridor2_list) and min_angle <= relative_angle(phi,
to_runway) <= self.faf_angle:
direction_correct = True
return direction_correct
class Runway:
corridor: Corridor
def __init__(self, x, y, h, phi):
"""
Defines position and orientation of the runway
"""
self.x = x
self.y = y
self.h = h
self.phi_from_runway = phi
self.phi_to_runway = (phi + 180) % 360
self.corridor = Corridor(x, y, h, phi)
def inside_corridor(self, x: int, y: int, h: int, phi: int):
"""
Checks if an airplane at a specific 3D point and heading is inside the approach corridor
:param x: X position of the airplane
:param y: Y position of the airplane
:param h: Altitude of the airplane
:param phi: Heading of the airplane [degrees]
"""
return self.corridor.inside_corridor(x, y, h, phi)
class MinimumVectoringAltitude:
area: geom.Polygon
height: int
def __init__(self, area: geom.Polygon, height: int):
self.area = area
self.height = height
self.area_as_list = np.array(list(area.exterior.coords))
self.outer_bounds = area.bounds
class Airspace:
mvas: List[MinimumVectoringAltitude]
def __init__(self, mvas: List[MinimumVectoringAltitude], runway: Runway):
"""
Defines the airspace. Each area is a polygon entered as a list of tuples, Pass several areas as a list or tuple
MVA is defined by a number (height in feet), pass as a list or tuple equal to the number of
"""
self.mvas = mvas
self.runway = runway
def find_mva(self, x, y):
for mva in self.mvas:
bounds = mva.outer_bounds
# tuple with minx, miny, maxx, maxy
if bounds[0] <= x <= bounds[2] and bounds[1] <= y <= bounds[3]:
if ray_tracing(x, y, mva.area_as_list):
return mva
raise ValueError('Outside of airspace')
def get_mva_height(self, x, y):
return self.find_mva(x, y).height
def get_bounding_box(self):
"""
Returns the bounding box of the airspace
:return Tuple with minx, miny, maxx, maxy
"""
combined_poly = self.get_outline_polygon()
return combined_poly.bounds
def get_outline_polygon(self):
polys: List[geom.polygon] = [mva.area for mva in self.mvas]
combined_poly = shapely.ops.unary_union(polys)
return combined_poly
class EntryPoint:
def __init__(self, x: float, y: float, phi: int, levels: List[int]):
self.x = x
self.y = y
self.phi = phi
self.levels = levels
@jit(nopython=True)
def ray_tracing(x, y, poly):
n = len(poly)
inside = False
p2x = 0.0
p2y = 0.0
xints = 0.0
p1x, p1y = poly[0]
for i in range(n + 1):
p2x, p2y = poly[i % n]
if y > min(p1y, p2y):
if y <= max(p1y, p2y):
if x <= max(p1x, p2x):
if p1y != p2y:
xints = (y - p1y) * (p2x - p1x) / (p2y - p1y) + p1x
if p1x == p2x or x <= xints:
inside = not inside
p1x, p1y = p2x, p2y
return inside
@jit(nopython=True)
def relative_angle(angle1, angle2):
return (angle2 - angle1 + 180) % 360 - 180
@jit(nopython=True)
def rot_matrix(phi):
phi = math.radians(phi)
return np.array([[math.cos(phi), math.sin(phi)], [-math.sin(phi), math.cos(phi)]])
|
from rest_framework import serializers
from product.models import ProductVariation
class ProductVariationSerializer(serializers.ModelSerializer):
product_description = serializers.CharField(source='product.description', read_only=True)
class Meta:
model = ProductVariation
fields = '__all__'
|
from custodian.vasp.validators import VasprunXMLValidator
import os
import unittest
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')
cwd = os.getcwd()
class VasprunXMLValidatorTest(unittest.TestCase):
def test_check_and_correct(self):
os.chdir(os.path.join(test_dir, "bad_vasprun"))
h = VasprunXMLValidator()
self.assertTrue(h.check())
#Unconverged still has a valid vasprun.
os.chdir(os.path.join(test_dir, "unconverged"))
self.assertFalse(h.check())
def test_as_dict(self):
h = VasprunXMLValidator()
d = h.as_dict()
h2 = VasprunXMLValidator.from_dict(d)
self.assertIsInstance(h2, VasprunXMLValidator)
@classmethod
def tearDownClass(cls):
os.chdir(cwd)
if __name__ == "__main__":
unittest.main()
|
# This script has been created to
# takes in four strings, of varying characters, numbers, and specials arranges them in a random order
# and print it out to the system console to be able to use it
# it will also including hashing/encyting the passwords at this stage of the process once produced.
import random
import array
from string import digits
Max_Len = 12
#the different variables that will be used
Digits = ['0','1','2','3','4','5','6','7','8','9']
Lower_Case_Characters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'm', 'n', 'o', 'p', 'q','r', 's', 't', 'u', 'v', 'w', 'x', 'y','z']
Uppercase_Characters = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H','I', 'J', 'K', 'M', 'N', 'O', 'p', 'Q','R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y','Z']
Symbols = ['@', '#', '$', '%', '=', ':', '?', '.', '/', '|', '~', '>', '*', '(', ')', '<']
#adding them all to a list
Combined_List = Digits + Uppercase_Characters + Lower_Case_Characters + Symbols
#making the random choice
Random_Digits = random.choice(Digits)
Random_Upper = random.choice(Uppercase_Characters)
Random_Lower = random.choice(Lower_Case_Characters)
Random_Symbols = random.choice(Symbols)
#putting them all together
temp_pass = Random_Digits + Random_Upper + Random_Lower + Random_Symbols
#Loop for creating the password
for x in range(Max_Len - 4):
temp_pass = temp_pass + random.choice(Combined_List)
temp_pass_list = array.array("u", temp_pass)
random.shuffle(temp_pass_list)
password = ""
for x in temp_pass_list:
password = password + x
print(password) |
try:
x = int(input("Ganzahl eingeben: "))
print("Gut gemacht")
except ValueError as e:
print("Falsche Eingabe")
print(e)
#--
x = int(input("Ganzahl eingeben: "))
if x < 0:
raise ValueError("Falsch gemacht")
print("Programmende")
|
import unittest
import strokes_gained_calculations as sgc
class SimpleStrokesGainedTestCase(unittest.TestCase):
"""Tests for the simple numerical calc of SG putting"""
def test_single_simple_strokes_gained(self):
strokes_gained = sgc.calculate_strokes_gained(1.78, 1)
self.assertEqual(strokes_gained, 0.78)
class StrokesGainedTestCase(unittest.TestCase):
"""Tests for the full SG Putting method"""
def setUp(self):
"""Create some Strokes Gained input data for use in test methods"""
self.sg_putting_test_ref_data = [
{"distance": 2, "putts": 1.01}, {"distance": 3, "putts": 1.04}, {"distance": 4, "putts": 1.13}, {"distance": 5, "putts": 1.23}, {"distance": 6, "putts": 1.34}, {"distance": 7, "putts": 1.42}, {"distance": 8, "putts": 1.5}, {"distance": 9, "putts": 1.56}, {"distance": 10, "putts": 1.61}, {"distance": 15, "putts": 1.78}, {"distance": 20, "putts": 1.87}, {"distance": 30, "putts": 1.98}, {"distance": 40, "putts": 2.06}, {"distance": 50, "putts": 2.14}, {"distance": 60, "putts": 2.21}
]
self.sg_putting_test_input_data = [
{'distance': 1, 'putts': 1},
{'distance': 5, 'putts': 1},
{'distance': 27, 'putts': 1},
{'distance': 10, 'putts': 2}
]
def test_min_putt_distance(self):
"""Does the function handle a distance less than the min ref value"""
strokes_gained = sgc.calculate_strokes_gained_putting(
self.sg_putting_test_ref_data, self.sg_putting_test_input_data[0]
)
self.assertEqual(strokes_gained, 0.01)
def test_exact_distance_match(self):
"""Does a simple exact match of distance return the expected value"""
strokes_gained = sgc.calculate_strokes_gained_putting(
self.sg_putting_test_ref_data, self.sg_putting_test_input_data[1]
)
self.assertEqual(strokes_gained, 0.23)
def test_distance_in_between(self):
"""Does the calc work for a distance between reference numbers"""
strokes_gained = sgc.calculate_strokes_gained_putting(
self.sg_putting_test_ref_data, self.sg_putting_test_input_data[2]
)
self.assertEqual(strokes_gained, 0.95)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, print_function
import os
import re
from setuptools import find_packages, setup
try:
from myhelpers.setup_helpers import find_location, find_version, read
except ImportError as err:
import sys
print('{0}\nYou may download setup_helpers from'.format(err),
'https://github.com/cristobal-sifon/myhelpers')
sys.exit()
def read_requirements(reqfile):
return [i for i in open(reqfile).read().split('\n') if i]
setup(
name='readfile',
version=find_version('src/readfile/__init__.py'),
description='A flexible module to read ascii files',
author='Cristobal Sifon',
author_email='cristobal.sifon@pucv.cl',
long_description=read(os.path.join(find_location(__file__), 'README.rst')),
url='https://github.com/cristobal-sifon/readfile',
package_dir={'': 'src'},
packages=find_packages(where='src'),
install_requires=read_requirements('requirements.txt'),
)
|
import requests
import time
import json
from cython_npm.cythoncompile import require
# from Interservices import Friend
# from ../
Interservices = require('../../microservices_connector/Interservices')
Friend = Interservices.Friend
# test sanic app
timeit = Interservices.timeit
# post is the fastest way
@timeit
def doget():
r = requests.get('http://0.0.0.0:5000/hello', json={"key": "value"})
r.status_code
r.json()
print('port', r.json())
@timeit
def dopost():
r = requests.post('http://localhost:5000/hello',
json={"args": ["value", ], 'kwargs': {'onekey': 'value of key'}})
r.status_code
r.json()
print('get', r.json())
@timeit
def doput():
r = requests.put('http://localhost:5000/hello', json={"key": "value"})
r.status_code
r.json()
print('put', r.json())
@timeit
def dodelete():
r = requests.delete('http://localhost:5000/hello', json={"key": "value"})
r.status_code
r.json()
print('delete', r.json())
# doget()
# dopost()
# doput()
# dodelete()
# @timeit
# def test():
# aFriend= Friend('app1', 'http://localhost:5000')
# aFriend.setRule('/hello')
# r = aFriend.send('/hello', 'A variable value', onekey='A keyword variable value')
# return r
# python run.py
# test return a string
@timeit
def testStr():
print(
"""##############################
Test return string
""")
aFriend= Friend('app1', 'localhost:5000')
print('Test: return a simple string')
x = aFriend.send('/str', 'A variable value', key='A keyword variable value')
print('x=', x, type(x))
print('==========================')
print('Test: return multiple string')
x, y, z = aFriend.send('/str2', 'A variable value','second Variable',
key='A keyword variable value')
print('x=' ,x, type(x))
print('y=', y, type(y))
print('z=', z, type(z))
testStr()
"""[Result]
Test: return a simple string
x= A variable value-A keyword variable value <class 'str'>
==========================
Test: return multiple string
x= A variable value <class 'str'>
y= A keyword variable value <class 'str'>
z= A variable value-A keyword variable value <class 'str'>
'testStr' 23.17 ms
"""
@timeit
def testInt():
print(
"""##############################
Test return a int, float
""")
aFriend= Friend('app1', 'localhost:5000')
print('Test: return a simple Value')
x = aFriend.send('/int', 2018, key=312)
print('x=', x, type(x))
print('==========================')
print('Test: return a simple Value')
x = aFriend.send('/float', 2.018, key=3.12)
print('x=', x, type(x))
print('==========================')
print('Test: return multiple Value')
x, y, z = aFriend.send('/int3', 3.1427,
key=1000000000, key2=2.71230)
print('x=', x, type(x))
print('y=', y, type(y))
print('z=', z, type(z))
testInt()
"""[result]
Test: return a simple Value
x= 2330 <class 'int'>
==========================
Test: return a simple Value
x= 5.138 <class 'float'>
==========================
Test: return multiple Value
x= 1000000003.1427 <class 'float'>
y= 1000000000000000000 <class 'int'>
z= 9.87656329 <class 'float'>
"""
@timeit
def testListDict():
print(
"""##############################
Test return a list, dict
""")
aFriend= Friend('app1', 'localhost:5000')
print('Test: return a simple Value')
x = aFriend.send('/list', [12,34,45], key=['abc','zyz'])
print('x=', x, type(x))
print('==========================')
print('Test: return a simple Value')
x = aFriend.send('/dict', {'keyword':['anything']}, key={'int':20,'str':'adfafsa','float':0.2323})
print('x=', x, type(x))
print('==========================')
print('Test: return multiple Value')
x, y, z = aFriend.send('/list3', {'keyword': ['anything']},
key=['abc', 'zyz'])
print('x=', x, type(x))
print('y=', y, type(y))
print('z=', z, type(z))
testListDict()
"""[Result]
Test: return a simple Value
x= [12, 34, 45, 'abc', 'zyz'] <class 'list'>
==========================
Test: return a simple Value
x= {'dict': {'keyword': ['anything']}, 'float': 0.2323, 'int': 20, 'str': 'adfafsa'} <class 'dict'>
==========================
Test: return multiple Value
x= {'keyword': ['anything']} <class 'dict'>
y= ['abc', 'zyz', 'other value'] <class 'list'>
z= None <class 'NoneType'>
'testListDict' 22.19 ms
"""
class testservice(object):
name = 'test'
Purpose = 'For test only'
empty = None
def __init__(self, value):
self.value = value
def onemethod(self):
print('This is test class')
@timeit
def testClassType():
print(
"""##############################
Test return NoneType, Class, use of Token
""")
aFriend= Friend('app1', 'localhost:5000')
print('Test: return a simple Value')
x = aFriend.send('/None', [12, 34, 45], key=['abc', 'zyz'])
print('x=', x, type(x))
print('==========================')
print('Test: return a simple Value with token')
aFriend.setRule('/class', token='123456')
x = aFriend.send('/class', {'keyword': ['anything']},
key={'int': 20, 'str': 'adfafsa', 'float': 0.2323})
print('x=', x, type(x))
print('==========================')
print('Test: return multiple Value')
aFriend.setRule('/class2', token='123456')
x,y,z = aFriend.send('/class2', {'keyword': ['anything']},
key={'int': 20, 'str': 'adfafsa', 'float': 0.2323})
print('x=', x, type(x))
print('y=', y, type(y))
print('z=', z, type(z))
# Test send class and list of class object
print('Test: send class and list of class object')
aFriend.setRule('/class3', token='123456')
t1 = testservice('value1')
t2 = testservice('value2')
x, y, z = aFriend.send('/class3', [t1,t2],
key={'t1': t1, 't2': t2, 'list': [t1, t2]})
print('x=', x, type(x))
print('y=', y, type(y))
print('z=', z, type(z))
print('=================Response json===============')
x = aFriend.json('/json', a=12,b='This is a text',c={'dict':'a dict'})
print('Synchonous POST:', x)
y = aFriend.json('/json1', method='GET' , a={'dict': 'a only dict'})
print('Asynchonous GET:', y)
z = aFriend.json('/json1', a={'dict': 'a only dict'})
print('Asynchonous POST:', z)
t = aFriend.json('/get/none', method='GET')
print('Synchonous GET:', t)
q = aFriend.json('/post/none', method='POST')
print('Synchonous POST:', q)
testClassType()
"""[Results]
##############################
Test return NoneType, Class, use of Token
Test: return a simple Value
x= None <class 'NoneType'>
==========================
Test: return a simple Value with token
x= {'Purpose': 'For test only', 'empty': None, 'name': 'test', 'value': {'keyword': ['anything']}} <class 'dict'>
==========================
Test: return multiple Value
x= {'Purpose': 'For test only', 'empty': None, 'name': 'test', 'value': {'float': 0.2323, 'int': 20, 'str': 'adfafsa'}} <class 'dict'>
y= {'keyword': ['anything']} <class 'dict'>
z= None <class 'NoneType'>
'testClassType' 19.20 ms
"""
|
#!/usr/bin/env python
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from logging import (
getLogger, StreamHandler, FileHandler, Formatter, DEBUG, INFO
)
from logging.handlers import (
RotatingFileHandler, TimedRotatingFileHandler
)
import platform
import sys
import time
def do_log(args, logger):
for i in range(args.num_iterations):
logger.info('{}'.format(i))
time.sleep(0.01)
def main():
parser = ArgumentParser(description=(__doc__),
formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('-n', '--num-iterations', type=int, default=1000,
help='Number of logs')
parser.add_argument('-d', '--debug', action='store_true',
help='Show debug log')
parser.add_argument('-t', '--rotation-type', default='none',
choices=['none', 'file', 'size', 'time'],
help='Rotation type')
parser.add_argument('-f', '--log-file-path', default='result.log',
help='Log file path')
parser.add_argument('-c', '--rotation-backup-count', default=10,
help='Log backup count')
parser.add_argument('-s', '--rotation-backup-size', default=10*1024,
help='Size of each log (size-based log only)')
parser.add_argument('-w', '--rotation-when', default='S',
help='When log rotates (time-based log only)')
args = parser.parse_args()
logger = getLogger(__name__)
if args.rotation_type == 'file':
handler = FileHandler(args.log_file_path)
elif args.rotation_type == 'size':
handler = RotatingFileHandler(
args.log_file_path,
maxBytes=args.rotation_backup_size,
backupCount=args.rotation_backup_count)
elif args.rotation_type == 'time':
handler = TimedRotatingFileHandler(
args.log_file_path,
when=args.rotation_when,
backupCount=args.rotation_backup_count)
else:
handler = StreamHandler()
logger.addHandler(handler)
if args.debug:
logger.setLevel(DEBUG)
handler.setLevel(DEBUG)
else:
logger.setLevel(INFO)
handler.setLevel(INFO)
handler.setFormatter(Formatter(
'%(asctime)s %(process)5d %(levelname)7s %(message)s'))
logger.debug('Start Running (Python {})'
.format(platform.python_version()))
do_log(args, logger)
logger.debug('Finished Running')
return 0
if __name__ == '__main__':
sys.exit(main())
|
import numpy as np
def expandLP_truelabels(
Data_t, curLP_t, tmpModel, curSS_nott,
**Plan):
''' Create single new state for all target data.
Returns
-------
propLP_t : dict of local params, with K + 1 states
xcurSS_nott : SuffStatBag
first K states are equal to curSS_nott
final few states are empty
'''
assert 'Z' in Data_t.TrueParams
Z = Data_t.TrueParams['Z']
uLabels = np.unique(Z)
origK = curSS_nott.K
propK = origK + len(uLabels)
propResp = np.zeros((curLP_t['resp'].shape[0], propK))
for uid, uval in enumerate(uLabels):
mask_uid = Z == uval
propResp[mask_uid, origK + uid] = 1.0
propLP_t = dict(resp=propResp)
if hasattr(tmpModel.allocModel, 'initLPFromResp'):
propLP_t = tmpModel.allocModel.initLPFromResp(Data_t, propLP_t)
# Make expanded xcurSS to match
xcurSS_nott = curSS_nott.copy(includeELBOTerms=1, includeMergeTerms=0)
xcurSS_nott.insertEmptyComps(propK - origK)
return propLP_t, xcurSS_nott
def expandLP_singleNewState(
Data_t, curLP_t, tmpModel, curSS_nott,
**Plan):
''' Create single new state for all target data.
Returns
-------
propLP_t : dict of local params, with K + 1 states
xcurSS_nott : SuffStatBag
first K states are equal to curSS_nott
final few states are empty
'''
xcurSS_nott = curSS_nott.copy(includeELBOTerms=1, includeMergeTerms=0)
xcurSS_nott.insertEmptyComps(1)
propK = curSS_nott.K + 1
propResp = np.zeros((curLP_t['resp'].shape[0], propK))
propResp[:, -1] = 1.0
propLP_t = dict(resp=propResp)
if hasattr(tmpModel.allocModel, 'initLPFromResp'):
propLP_t = tmpModel.allocModel.initLPFromResp(Data_t, propLP_t)
return propLP_t, xcurSS_nott
def expandLP_randomSplit(
Data_t, curLP_t, tmpModel, curSS_nott,
PRNG=np.random, **Plan):
''' Divide target data into two new states, completely at random.
Returns
-------
propLP_t : dict of local params, with K + 2 states
xcurSS_nott : SuffStatBag
first K states are equal to curSS_nott
final few states are empty
'''
Kfresh = 2
xcurSS_nott = curSS_nott.copy(includeELBOTerms=1, includeMergeTerms=0)
xcurSS_nott.insertEmptyComps(Kfresh)
origK = curSS_nott.K
propK = curSS_nott.K + Kfresh
propResp = np.zeros((curLP_t['resp'].shape[0], propK))
propResp[:, :origK] = curLP_t['resp']
if 'btargetCompID' in Plan:
atomids = np.flatnonzero(
curLP_t['resp'][:, Plan['btargetCompID']] > 0.01)
else:
atomids = np.arange(propResp.shape[0])
# randomly permute atomids
PRNG.shuffle(atomids)
if atomids.size > 20:
Aids = atomids[:10]
Bids = atomids[10:20]
else:
half = atomids.size / 2
Aids = atomids[:half]
Bids = atomids[half:]
# Force all atomids to only be explained by new comps
propResp[atomids, :] = 0.0
propResp[Aids, -2] = 1.0
propResp[Bids, -1] = 1.0
propLP_t = dict(resp=propResp)
if hasattr(tmpModel.allocModel, 'initLPFromResp'):
propLP_t = tmpModel.allocModel.initLPFromResp(Data_t, propLP_t)
propSS = tmpModel.get_global_suff_stats(Data_t, propLP_t)
propSS += xcurSS_nott
tmpModel.update_global_params(propSS)
propLP_t = tmpModel.calc_local_params(Data_t, propLP_t)
return propLP_t, xcurSS_nott
|
#encoding=utf-8
class MyBaseClass(object):
def __init__(self,value):
self.value = value
class MyChildClass(MyBaseClass):
def __init__(self):
MyBaseClass.__init__(self,5)
class TimesTwo(object):
def __init__(self):
self.value*=2
class PlusFive(object):
def __init__(self):
self.value+=5
class OneWay(MyBaseClass,TimesTwo,PlusFive):
def __init__(self,value):
MyBaseClass.__init__(self,value)
TimesTwo.__init__(self)
PlusFive.__init__(self)
foo = OneWay(5)
print('first ordering is (5*2)+5=',foo.value)
class AnotherWay(MyBaseClass,PlusFive,TimesTwo):
def __init__(self,value):
MyBaseClass.__init__(self,value)
TimesTwo.__init__(self)
PlusFive.__init__(self)
bar = AnotherWay(5)
print('second ordering is still is ', bar.value) |
from bbcode import *
from rhizomedotorg.settings import MEDIA_URL
import re
class Smilies(SelfClosingTagNode):
open_pattern = re.compile(':(?P<name>[a-zA-Z-]+):')
def parse(self):
name = self.match.groupdict()['name']
return '<img src="%smedia/smilies/%s.gif" alt="%s" />' % (MEDIA_URL, name, name)
class AlternativeSmilie(SelfClosingTagNode):
def __init__(self, *args, **kwargs):
if not hasattr(self, 'alias'):
self.alias = self.__class__.__name__.lower()
SelfClosingTagNode.__init__(self, *args, **kwargs)
def parse(self):
alias = self.match.group()
return '<img src="%smedia/smilies/%s.gif" alt="%s" />' % (MEDIA_URL,self.alias, alias)
class LOL(AlternativeSmilie):
# :D, :-D, :-d, :d
open_pattern = re.compile(':-?(D|d)')
class Smilie(AlternativeSmilie):
# :), :-)
open_pattern = re.compile(':-?\)')
class Wink(AlternativeSmilie):
# ;), ;-), ;-D, ;D, ;d, ;-d
open_pattern = re.compile(';-?(\)|d|D)')
class Razz(AlternativeSmilie):
# :P, :-P, :p, :-p
open_pattern = re.compile(':-?(P|p)')
class Eek(AlternativeSmilie):
# o_O....
open_pattern = re.compile('(o|O|0)_(o|O|0)')
class Sad(AlternativeSmilie):
# :-(, :(
open_pattern = re.compile(':-?\(')
class Crying(AlternativeSmilie):
# ;_;, :'(, :'-(
open_pattern = re.compile("(;_;|:'-?\()")
class Yell(AlternativeSmilie):
# ^.^
open_pattern = re.compile('^\.^')
class Grin(AlternativeSmilie):
# xD, XD, *g*
open_pattern = re.compile('(xD|XD|\*g\*)')
class Neutral(AlternativeSmilie):
# :-|, :|
open_pattern = re.compile('(:-?\|)')
# register(Smilies)
# register(LOL)
# register(Smilie)
# register(Wink)
# register(Razz)
# register(Eek)
# register(Sad)
# register(Crying)
# register(Yell)
# register(Grin)
# register(Neutral) |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('auth', '0006_require_contenttypes_0002'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ObjectPermission',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('content_id', models.PositiveIntegerField(verbose_name='Content id')),
('content_type', models.ForeignKey(verbose_name='Content type', to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='ObjectPermissionInheritanceBlock',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('content_id', models.PositiveIntegerField(verbose_name='Content id')),
('content_type', models.ForeignKey(verbose_name='Content type', to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='Permission',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=100, verbose_name='Name')),
('codename', models.CharField(unique=True, max_length=100, verbose_name='Codename')),
('content_types', models.ManyToManyField(related_name='content_types', verbose_name='Content Types', to='contenttypes.ContentType', blank=True)),
],
),
migrations.CreateModel(
name='PrincipalRoleRelation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('content_id', models.PositiveIntegerField(null=True, verbose_name='Content id', blank=True)),
('content_type', models.ForeignKey(verbose_name='Content type', blank=True, to='contenttypes.ContentType', null=True)),
('group', models.ForeignKey(verbose_name='Group', blank=True, to='auth.Group', null=True)),
],
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=100)),
],
options={
'ordering': ('name',),
},
),
migrations.AddField(
model_name='principalrolerelation',
name='role',
field=models.ForeignKey(verbose_name='Role', to='permissions.Role'),
),
migrations.AddField(
model_name='principalrolerelation',
name='user',
field=models.ForeignKey(verbose_name='User', blank=True, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AddField(
model_name='objectpermissioninheritanceblock',
name='permission',
field=models.ForeignKey(verbose_name='Permission', to='permissions.Permission'),
),
migrations.AddField(
model_name='objectpermission',
name='permission',
field=models.ForeignKey(verbose_name='Permission', to='permissions.Permission'),
),
migrations.AddField(
model_name='objectpermission',
name='role',
field=models.ForeignKey(verbose_name='Role', blank=True, to='permissions.Role', null=True),
),
]
|
import os
import subprocess
import re
def write_file(filename,a_list):
fw = open(filename, 'w')
for i in range(len(a_list)):
fw.write(a_list[i] +'\n')
fw.close()
def read_file(filename):
f = open(filename, encoding='unicode_escape')
doc_str = f.read()
f.close()
return doc_str
def correct_define(cpp_name,error_char):
label = "using namespace std;"
doc_str = read_file(cpp_name)
index = doc_str.find(label)
if index != -1:
fw = open(cpp_name,'w')
new_str = doc_str[:index+len(label)] + "\n" + "#define" +" " +error_char +" " + str(1000) + doc_str[index+len(label):]
fw.write(new_str)
fw.close()
else:
print(cpp_name,"correct_define exception")
#correct_define("13.cpp","MAX")
def correct_gets(cpp_name):
label = "gets("
doc_str = read_file(cpp_name)
index = doc_str.find(label)
while(index != -1):
index_r = doc_str.find(')',index)
content = doc_str[:index_r][index+5:]
doc_str = doc_str[:index] + "\n" + "cin>>" + content + doc_str[index_r+1:]
index = doc_str.find(label)
#print(doc_str)
fw = open(cpp_name, 'w')
fw.write(doc_str)
fw.close()
def correct_int_main(cpp_name):
label = "main()"
doc_str = read_file(cpp_name)
index = doc_str.find(label)
if index != -1:
fw = open(cpp_name, 'w')
new_str = doc_str[:index] + "int " + doc_str[index:]
#print(new_str)
#fw.write(new_str)
#fw.close()
else:
fw = open(cpp_name, 'w')
label ="main ()"
index = doc_str.find(label)
new_str = doc_str[:index] + "int main()" + doc_str[index+len(label):]
#print(new_str)
fw.write(new_str)
fw.close()
#print(cpp_name, "correct_int_main exception")
def correct_none_return(cpp_name):
label = "return ;"
doc_str = read_file(cpp_name)
index = doc_str.find(label)
if index != -1:
fw = open(cpp_name, 'w')
new_str = doc_str[:index] + "return 0;" + doc_str[index+len(label):]
#print(new_str)
fw.write(new_str)
fw.close()
else:
print(cpp_name, "correct_none_return exception")
def correct_ambiguous(cpp_name,error_char):
doc_str = read_file(cpp_name)
new_str = re.sub(error_char,error_char+"_1",doc_str)
#print(new_str)
fw = open(cpp_name, 'w')
fw.write(new_str)
fw.close()
def correct_missing(cpp_name,line_num_list):
f = open(cpp_name)
lines = f.readlines()
f.close()
for i in range(0,len(line_num_list),2):
num = line_num_list[i] - 1
#print(num)
#print(lines[num])
lines[num] = lines[num].replace("\n"," ")
#print(lines[num])
doc_str = "".join(lines)
fw = open(cpp_name, 'w')
fw.write(doc_str)
fw.close()
#print(doc_str)
def excute_cmd(cmd):
cmd = cmd.split(" ")
p = subprocess.run(cmd, stderr=subprocess.PIPE, shell=False, timeout=145)
err = p.stderr
err = str(err, encoding='utf-8', errors='ignore')
return err
error_file = []
error_list = []
cppdir = "/home/CodeXGLUE-main/Code-Code/Clone-detection-POJ-104/dataset/ProgramData"
num = 0
for filepath, dirnames, filenames in os.walk(cppdir):
num += 1
print(num)
for filename in filenames:
source_file = os.path.join(filepath, filename)
cmd = "g++ " + source_file
err = excute_cmd(cmd)
if err.find("error: missing terminating \" character") != -1:
to_find = re.compile(r':(\d+):\d+: error: missing terminating " character')
find_list = re.findall(to_find, err)
line_num_list = list(map(int, find_list))
# print("1", line_num_list)
correct_missing(source_file, line_num_list)
err = excute_cmd(cmd)
elif err.find("error: missing terminating \' character") != -1:
to_find = re.compile(r":(\d+):\d+: error: missing terminating ' character")
find_list = re.findall(to_find, err)
line_num_list = list(map(int, find_list))
correct_missing(source_file, line_num_list)
err = excute_cmd(cmd)
elif err.find("warning: ISO C++ forbids declaration of ‘main’ with no type") != -1:
# print(source_file)
correct_int_main(source_file)
err = excute_cmd(cmd)
if err.find("error") != -1:
error_file.append(source_file)
error_list.append(err)
'''
elif err.find("error: ‘gets’ was not declared in this scope;") != -1:
#print("error 0: gets\n")
correct_gets(source_file)
elif err.find("error: use of undeclared identifier") != -1:
#print("error 1: undeclared identifier\n")
index = err.find("error: use of undeclared identifier")
# print(err)
index_begin = err.find('\'', index)
index_end = err.find('\'', index_begin + 1)
to_define_char = err[:index_end][index_begin + 1:]
correct_define(source_file, to_define_char)
elif err.find(" was not declared in this scope") != -1: #error: ‘MAX’
#print("error 2: not declared scope\n")
index = err.find(" was not declared in this scope")
err_part = err[:index-1]
index_begin = err_part.rfind('‘')
#print(err_part)
index_end = index - 1
#print(index_begin,index_end)
to_define_char = err[:index_end][index_begin + 1:]
#print(to_define_char)
correct_define(source_file, to_define_char)
elif err.find("error: C++ requires a type specifier for all declarations") != -1:
#print("error 3: none main()\n")
correct_int_main(source_file)
elif err.find("error: return-statement with no value, in function returning ‘int’")!=-1:
#print("error 4: none return\n")
correct_none_return(source_file)
elif err.find("error: reference to ")!=-1: # re.search(r"error: reference to ‘.*’ is ambiguous",err)
#print("error 5: ambiguous reference\n")
label = "error: reference to "
index = err.find(label)
index_begin = index + len(label)
index_end =err.find("’",index_begin + 1)
to_define_char = err[:index_end][index_begin + 1:]
#print(to_define_char)
correct_ambiguous(source_file,to_define_char)
#elif err != "":
# print(err)
#error_file.append(source_file)
#error_file.append(err)
'''
write_file("error_file.txt", error_file)
write_file("error.txt", error_list)
|
import celestial
import logging
logging.basicConfig(level=logging.DEBUG, format='%(name)s %(levelname)s %(message)s')
for _ in range(10):
trial = celestial.Trial(study_id=3)
print(trial.parameters)
print(trial.submit_result(loss=1-trial.parameters['param1'], accuracy=0.9+trial.parameters['param2']/200).json()) |
## sigtools requires numpy; it consumes numpy arrays and emits numpy vectors
import numpy as np
from numpy import genfromtxt, array
from math import pow
import os
import math
## sigtools has a subpackage sigtools.tosig that analyses time series data
import esig
import esig.tosig as ts
## fractional brownian motion package
from fbm import FBM,fbm
import matplotlib.pyplot as plt
stream2logsig = ts.stream2logsig
stream2sig = ts.stream2sig
logsigdim = ts.logsigdim
sigdim = ts.sigdim
from numpy.random import standard_normal
from numpy import array, zeros, sqrt, shape, convolve
# Milstein's method
def ComputeY(Y_last, dt, dB, step):
ans = Y_last+(-np.pi*Y_last+np.sin(np.pi*step*dt))*dt+Y_last*dB+0.5*Y_last*(dB*dB-dt)
return ans
number_of_independent_simulations = 2200
total_no_steps = 1001
maturity = 1
BM = np.zeros([total_no_steps, 2], dtype=float)
# BM_paths - number_of_independent_simulations of one dimensional path
BM_paths = np.zeros([number_of_independent_simulations, total_no_steps], dtype=float)
output = np.zeros([number_of_independent_simulations], dtype = float)
# Simulate SDE with Brownian motion driving path
def SimulteSDEdatabm(number_of_timstep, T, sigma = 1):
output = 0
dT = T/(number_of_timstep - 1)
BM = np.zeros([number_of_timstep, 2], dtype=float)
print(dT)
for i in range(1, number_of_timstep, 1):
for k in range(0, 2, 1):
if (k == 0):
BM[i, k] = dT+BM[i-1, k]
if (k == 1):
dB = standard_normal()*sqrt(dT)
BM[i, k] = sigma*dB+BM[i-1, k]
output = ComputeY(output, dT, dB, i)
return {'output':output, 'BM':BM}
# Simulate SDE with fractional Brownian motion driving path
def SimulteSDEdatafbm(number_of_timstep, T, H=0.75):
output = 0
dT = T/(number_of_timstep-1)
fBM = np.zeros([number_of_timstep, 2], dtype=float)
temp_fbm = fbm(n=number_of_timstep-1, hurst=H, length=T, method='daviesharte')
for i in range(1, number_of_timstep):
for k in range(0,2):
if (k == 0):
fBM[i, k] = dT+fBM[i-1, k]
if (k == 1):
dfB = temp_fbm[i]-temp_fbm[i-1]
fBM[i, k] = temp_fbm[i]
output = ComputeY(output, dT, dfB, i)
return {'output':output, 'BM':fBM}
for j in range(0, number_of_independent_simulations, 1):
print(j)
result1 = SimulteSDEdatabm(total_no_steps, maturity)
output[j] = result1['output']
BM = result1['BM']
BM_paths[j] = np.transpose(BM[:, 1])
np.save('output', output)
np.save('BM_paths', BM_paths)
|
from typing import Container, Optional
import aioredis
from aioredis import Redis
class AsyncRedisUtil:
"""
异步redis操作,
配置说明:
在startup配置:
await AsyncRedisUtil.init(host,port,...),参数
在shutdown配置
await AsyncRedisUtil.close()
note:
exp是指过期时间,单位是秒.
"""
r: Optional[Redis] = None
@classmethod
async def init(cls, host="localhost", port=6379, password=None, db=0, **kwargs):
cls.r = await aioredis.create_redis_pool(
f"redis://{host}:{port}", password=password, db=db, **kwargs
)
return cls.r
@classmethod
async def _exp_of_none(cls, *args, exp_of_none, callback):
if not exp_of_none:
return await getattr(cls.r, callback)(*args)
key = args[0]
tr = cls.r.multi_exec()
fun = getattr(tr, callback)
exists = await cls.r.exists(key)
if not exists:
fun(*args)
tr.expire(key, exp_of_none)
ret, _ = await tr.execute()
else:
fun(*args)
ret = (await tr.execute())[0]
return ret
@classmethod
async def set(cls, key, value, exp=None):
assert cls.r, "must call init first"
await cls.r.set(key, value, expire=exp)
@classmethod
async def get(cls, key, default=None, encoding="utf-8"):
assert cls.r, "must call init first"
value = await cls.r.get(key, encoding=encoding)
if value is None:
return default
return value
@classmethod
async def hget(cls, name, key, default=0, encoding="utf-8"):
"""
缓存清除,接收list or str
"""
assert cls.r, "must call init first"
v = await cls.r.hget(name, key, encoding=encoding)
if v is None:
return default
return v
@classmethod
async def get_or_set(cls, key, default=None, value_fun=None, encoding="utf-8"):
"""
获取或者设置缓存
"""
assert cls.r, "must call init first"
value = await cls.r.get(key, encoding=encoding)
if value is None and default:
return default
if value is not None:
return value
if value_fun:
value, exp = await value_fun()
await cls.r.set(key, value, expire=exp)
return value
@classmethod
async def delete(cls, key):
"""
缓存清除,接收list or str
"""
assert cls.r, "must call init first"
return await cls.r.delete(key)
@classmethod
async def sadd(cls, name, values, exp_of_none=None):
assert cls.r, "must call init first"
return await cls._exp_of_none(name, values, exp_of_none=exp_of_none, callback="sadd")
@classmethod
async def hset(cls, name, key, value, exp_of_none=None):
assert cls.r, "must call init first"
return await cls._exp_of_none(name, key, value, exp_of_none=exp_of_none, callback="hset")
@classmethod
async def hdel(cls, name, key, exp_of_none=None):
assert cls.r, "must call init first"
return await cls._exp_of_none(name, key, exp_of_none=None, callback="hdel")
@classmethod
async def hincrby(cls, name, key, value=1, exp_of_none=None):
assert cls.r, "must call init first"
return await cls._exp_of_none(name, key, value, exp_of_none=exp_of_none, callback="hincrby")
@classmethod
async def hincrbyfloat(cls, name, key, value, exp_of_none=None):
assert cls.r, "must call init first"
return await cls._exp_of_none(
name, key, value, exp_of_none=exp_of_none, callback="hincrbyfloat"
)
@classmethod
async def incrby(cls, name, value=1, exp_of_none=None):
assert cls.r, "must call init first"
return await cls._exp_of_none(name, value, exp_of_none=exp_of_none, callback="incrby")
@classmethod
def multi_exec(cls):
"""
批量提交的方式,不确定是否为原子性
eg:
async def main():
redis = await aioredis.create_redis_pool('redis://localhost')
tr = redis.multi_exec()
tr.set('key1', 'value1')
tr.set('key2', 'value2')
ok1, ok2 = await tr.execute()
assert ok1
assert ok2
"""
assert cls.r, "must call init first"
return cls.r.multi_exec()
@classmethod
async def subscribe(cls, channel: str, *channels: Container[str]):
"""
发布-订阅模式
:param channel:发布者
:param channels: 订阅者
"""
return await cls.r.subscribe(channel, *channels)
@classmethod
async def publish(cls, channel: str, message):
"""
发布消息
:param channel:发布者
:param message: 消息
:return:
"""
await cls.r.publish(channel, message)
@classmethod
async def close(cls):
cls.r.close()
await cls.r.wait_closed()
|
"""This program will encrypt and decrypt messages using a Caesar cipher.
By Ted Silbernagel
"""
import enum
import string
from typing import List
class Operation(enum.Enum):
ENCRYPT = enum.auto()
DECRYPT = enum.auto()
CRACK = enum.auto()
class CaesarCipher:
crypt_base: List[str] = [char for char in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ']
def parse_input(self, message: str) -> List[str]:
return message.translate(str.maketrans('', '', string.punctuation)).upper().split(' ')
def encrypt(self, key: int, message: str) -> str:
encrypted_words: List[str] = []
for word in self.parse_input(message):
current_word = ''
for letter in word:
current_index = self.crypt_base.index(letter) + 1
new_index = (current_index + key) % 26
new_index -= 1
current_word += self.crypt_base[new_index]
encrypted_words.append(current_word)
return ' '.join(encrypted_words)
def decrypt(self, key: int, message: str) -> str:
decrypted_words: List[str] = []
for word in self.parse_input(message):
current_word = ''
for letter in word:
current_index = self.crypt_base.index(letter) + 1
new_index = (current_index - key + 26) % 26
new_index -= 1
current_word += self.crypt_base[new_index]
decrypted_words.append(current_word)
return ' '.join(decrypted_words)
def crack(self, message: str) -> str:
print('Crack starting. Press enter to try again or any other key to exit.')
# Try each combination
for i in range(1, 27):
cracked_words: List[str] = []
# Run through list, crack words
for word in self.parse_input(message):
current_word = ''
for letter in word:
current_index = self.crypt_base.index(letter) + 1
new_index = (current_index - i + 26) % 26
new_index -= 1
current_word += self.crypt_base[new_index]
cracked_words.append(current_word)
cracked_string = ' '.join(cracked_words)
if input(f'Attempt {i}/26: "{cracked_string}": Correct? ').lower() not in ['', 'n']:
return cracked_string
return 'Crack unsuccessful.'
def _gather_operation() -> Operation:
operation_input = input('Would you like to (e)ncrypt, (d)ecrypt, or (c)rack? ').lower()
if operation_input in ['e', 'encrypt']:
return Operation.ENCRYPT
elif operation_input in ['d', 'decrypt']:
return Operation.DECRYPT
elif operation_input in ['c', 'crack']:
return Operation.CRACK
else:
raise Exception('Invalid operation entered.')
def _gather_message(operation: Operation) -> str:
return input(f'Please enter the message to {operation.name.lower()}: ')
def _gather_encryption_key() -> int:
return int(input('Please enter the key to use (1 to 25): '))
if __name__ == '__main__':
operation = _gather_operation()
message = _gather_message(operation)
if operation is Operation.ENCRYPT or operation is Operation.DECRYPT:
encryption_key = _gather_encryption_key()
cipher = CaesarCipher()
if operation is Operation.ENCRYPT:
response = cipher.encrypt(encryption_key, message)
elif operation is Operation.DECRYPT:
response = cipher.decrypt(encryption_key, message)
elif operation is Operation.CRACK:
response = cipher.crack(message)
print(response)
|
#!/usr/bin/env python3
"""
Created by
https://github.com/phoemur
"""
import re
import urllib.request
import urllib.parse
import http.cookiejar
from lxml.html import fragment_fromstring
from collections import OrderedDict
from tqdm import tqdm
def get_data(*args, **kwargs):
url = 'http://www.fundamentus.com.br/resultado.php'
cj = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows; U; Windows NT 6.1; rv:2.2) Gecko/20110201'),
('Accept', 'text/html, text/plain, text/css, text/sgml, */*;q=0.01')]
# Aqui estão os parâmetros de busca das ações
# Estão em branco para que retorne todas as disponíveis
data = {'pl_min':'',
'pl_max':'',
'pvp_min':'',
'pvp_max' :'',
'psr_min':'',
'psr_max':'',
'divy_min':'',
'divy_max':'',
'pativos_min':'',
'pativos_max':'',
'pcapgiro_min':'',
'pcapgiro_max':'',
'pebit_min':'',
'pebit_max':'',
'fgrah_min':'',
'fgrah_max':'',
'firma_ebit_min':'',
'firma_ebit_max':'',
'margemebit_min':'',
'margemebit_max':'',
'margemliq_min':'',
'margemliq_max':'',
'liqcorr_min':'',
'liqcorr_max':'',
'roic_min':'',
'roic_max':'',
'roe_min':'',
'roe_max':'',
'liq_min':'',
'liq_max':'',
'patrim_min':'',
'patrim_max':'',
'divbruta_min':'',
'divbruta_max':'',
'tx_cresc_rec_min':'',
'tx_cresc_rec_max':'',
'setor':'',
'negociada':'',
'ordem':'',
'x':'28',
'y':'16'}
with opener.open(url, urllib.parse.urlencode(data).encode('UTF-8')) as link:
content = link.read().decode('ISO-8859-1')
pattern = re.compile('<table id="resultado".*</table>', re.DOTALL)
reg = re.findall(pattern, content)[0]
page = fragment_fromstring(reg)
lista = OrderedDict()
for rows in page.xpath('tbody')[0].findall("tr"):
lista.update({rows.getchildren()[0][0].getchildren()[0].text: {'cotacao': rows.getchildren()[1].text,
'P/L': rows.getchildren()[2].text,
'P/VP': rows.getchildren()[3].text,
'PSR': rows.getchildren()[4].text,
'DY': rows.getchildren()[5].text,
'P/Ativo': rows.getchildren()[6].text,
'P/Cap.Giro': rows.getchildren()[7].text,
'P/EBIT': rows.getchildren()[8].text,
'P/Ativ.Circ.Liq.': rows.getchildren()[9].text,
'EV/EBIT': rows.getchildren()[10].text,
'EBITDA': rows.getchildren()[11].text,
'Mrg.Liq.': rows.getchildren()[12].text,
'Liq.Corr.': rows.getchildren()[13].text,
'ROIC': rows.getchildren()[14].text,
'ROE': rows.getchildren()[15].text,
'Liq.2m.': rows.getchildren()[16].text,
'Pat.Liq': rows.getchildren()[17].text,
'Div.Brut/Pat.': rows.getchildren()[18].text,
'Cresc.5a': rows.getchildren()[19].text}})
return lista
|
from .base import *
SECRET_KEY = env('DJANGO_SECRET_KEY', default='ew4peb%u0qi2r*igg^5$m#wh4l+e7^-9h7codkl)uilvopyqfa')
DEBUG = env.bool('DJANGO_DEBUG', default=True) |
"""
Copyright (c) Facebook, Inc. and its affiliates.
"""
import os
import sys
# python/ dir, for agent.so
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import faulthandler
import itertools
import logging
import numpy as np
import random
import re
import sentry_sdk
import signal
import time
from multiprocessing import set_start_method
from agent import Agent
from agent_connection import default_agent_name
from voxel_models.subcomponent_classifier import SubComponentClassifier
from voxel_models.geoscorer import Geoscorer
import memory
import perception
import shapes
from util import to_block_pos, pos_to_np, TimingWarn, hash_user
import default_behaviors
from ttad_model_dialogue_manager import TtadModelDialogueManager
faulthandler.register(signal.SIGUSR1)
random.seed(0)
log_formatter = logging.Formatter(
"%(asctime)s [%(filename)s:%(lineno)s - %(funcName)s() %(levelname)s]: %(message)s"
)
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger().handlers.clear()
sentry_sdk.init() # enabled if SENTRY_DSN set in env
DEFAULT_BEHAVIOUR_TIMEOUT = 20
class CraftAssistAgent(Agent):
def __init__(
self,
host="localhost",
port=25565,
name=None,
ttad_model_path=None,
ttad_embeddings_path=None,
ttad_grammar_path=None,
semseg_model_path=None,
voxel_model_gpu_id=-1,
get_perception_interval=20,
draw_fn=None,
no_default_behavior=False,
geoscorer_model_path=None,
):
logging.info("CraftAssistAgent.__init__ started")
self.name = name or default_agent_name()
self.no_default_behavior = no_default_behavior
# files needed to set up ttad model
if ttad_model_path is None:
ttad_model_path = os.path.join(os.path.dirname(__file__), "models/ttad/ttad.pth")
if ttad_embeddings_path is None:
ttad_embeddings_path = os.path.join(
os.path.dirname(__file__), "models/ttad/ttad_ft_embeds.pth"
)
if ttad_grammar_path is None:
ttad_grammar_path = os.path.join(
os.path.dirname(__file__), "models/ttad/dialogue_grammar.json"
)
# set up the SubComponentClassifier model
if semseg_model_path is not None:
self.subcomponent_classifier = SubComponentClassifier(
voxel_model_path=semseg_model_path
)
else:
self.subcomponent_classifier = None
# set up the Geoscorer model
if geoscorer_model_path is not None:
self.geoscorer = Geoscorer(merger_model_path=geoscorer_model_path)
else:
self.geoscorer = None
self.memory = memory.AgentMemory(
db_file=os.environ.get("DB_FILE", ":memory:"),
db_log_path="agent_memory.{}.log".format(self.name),
)
logging.info("Initialized AgentMemory")
self.dialogue_manager = TtadModelDialogueManager(
self, ttad_model_path, ttad_embeddings_path, ttad_grammar_path
)
logging.info("Initialized DialogueManager")
# Log to file
fh = logging.FileHandler("agent.{}.log".format(self.name))
fh.setFormatter(log_formatter)
fh.setLevel(logging.DEBUG)
logging.getLogger().addHandler(fh)
# Login to server
super().__init__(host, port, self.name)
logging.info("Logged in to server")
# Wrap C++ agent methods
self._cpp_send_chat = self.send_chat
self.send_chat = self._send_chat
self.last_chat_time = 0
self.get_perception_interval = get_perception_interval
self.uncaught_error_count = 0
self.last_task_memid = None
self.point_targets = []
def start(self):
logging.info("CraftAssistAgent.start() called")
# start the subcomponent classification model
if self.subcomponent_classifier:
self.subcomponent_classifier.start()
for self.count in itertools.count(): # count forever
try:
if self.count == 0:
logging.info("First top-level step()")
self.step()
except Exception as e:
logging.exception(
"Default handler caught exception, db_log_idx={}".format(
self.memory.get_db_log_idx()
)
)
self.send_chat("Oops! I got confused and wasn't able to complete my last task :(")
sentry_sdk.capture_exception(e)
self.memory.task_stack_clear()
self.dialogue_manager.dialogue_stack.clear()
self.uncaught_error_count += 1
if self.uncaught_error_count >= 100:
sys.exit(1)
def step(self):
self.pos = to_block_pos(pos_to_np(self.get_player().pos))
# Update memory with current world state
# Removed get_perception call due to very slow updates on non-flatworlds
with TimingWarn(2):
self.memory.update(self)
# Process incoming chats
self.dialogue_step()
# Step topmost task on stack
self.task_step()
def task_step(self, sleep_time=0.25):
# Clean finished tasks
while (
self.memory.task_stack_peek() and self.memory.task_stack_peek().task.check_finished()
):
self.memory.task_stack_pop()
# Maybe add default task
if not self.no_default_behavior:
self.maybe_run_slow_defaults()
# If nothing to do, wait a moment
if self.memory.task_stack_peek() is None:
time.sleep(sleep_time)
return
# If something to do, step the topmost task
task_mem = self.memory.task_stack_peek()
if task_mem.memid != self.last_task_memid:
logging.info("Starting task {}".format(task_mem.task))
self.last_task_memid = task_mem.memid
task_mem.task.step(self)
self.memory.task_stack_update_task(task_mem.memid, task_mem.task)
def get_perception(self, force=False):
"""
Get both block objects and component objects and put them
in memory
"""
if not force and (
self.count % self.get_perception_interval != 0
or self.memory.task_stack_peek() is not None
):
return
block_objs_for_vision = []
for obj in perception.all_nearby_objects(self.get_blocks, self.pos):
memory.BlockObjectNode.create(self.memory, obj)
# If any xyz of obj is has not been labeled
if any([(not self.memory.get_component_object_ids_by_xyz(xyz)) for xyz, _ in obj]):
block_objs_for_vision.append(obj)
# TODO formalize this, make a list of all perception calls to make, etc.
# note this directly adds the memories
perception.get_all_nearby_holes(self, self.pos, radius=15)
perception.get_nearby_airtouching_blocks(self, self.pos, radius=15)
if self.subcomponent_classifier is None:
return
for obj in block_objs_for_vision:
self.subcomponent_classifier.block_objs_q.put(obj)
# everytime we try to retrieve as many recognition results as possible
while not self.subcomponent_classifier.loc2labels_q.empty():
loc2labels, obj = self.subcomponent_classifier.loc2labels_q.get()
loc2ids = dict(obj)
label2blocks = {}
def contaminated(blocks):
"""
Check if blocks are still consistent with the current world
"""
mx, Mx, my, My, mz, Mz = shapes.get_bounds(blocks)
yzxb = self.get_blocks(mx, Mx, my, My, mz, Mz)
for b, _ in blocks:
x, y, z = b
if loc2ids[b][0] != yzxb[y - my, z - mz, x - mx, 0]:
return True
return False
for loc, labels in loc2labels.items():
b = (loc, loc2ids[loc])
for l in labels:
if l in label2blocks:
label2blocks[l].append(b)
else:
label2blocks[l] = [b]
for l, blocks in label2blocks.items():
## if the blocks are contaminated we just ignore
if not contaminated(blocks):
memory.ComponentObjectNode.create(self.memory, blocks, [l])
def maybe_run_slow_defaults(self):
"""Pick a default task task to run
with a low probability"""
if self.memory.task_stack_peek() or len(self.dialogue_manager.dialogue_stack) > 0:
return
# list of (prob, default function) pairs
visible_defaults = [
(0.001, default_behaviors.build_random_shape),
(0.005, default_behaviors.come_to_player),
]
# default behaviors of the agent not visible in the game
invisible_defaults = []
defaults = (
visible_defaults + invisible_defaults
if time.time() - self.last_chat_time > DEFAULT_BEHAVIOUR_TIMEOUT
else invisible_defaults
)
defaults = [(p, f) for (p, f) in defaults if f not in self.memory.banned_default_behaviors]
def noop(*args):
pass
defaults.append((1 - sum(p for p, _ in defaults), noop)) # noop with remaining prob
# weighted random choice of functions
p, fns = zip(*defaults)
fn = np.random.choice(fns, p=p)
if fn != noop:
logging.info("Default behavior: {}".format(fn))
fn(self)
def dialogue_step(self):
"""Process incoming chats and modify task stack"""
raw_incoming_chats = self.get_incoming_chats()
if raw_incoming_chats:
# force to get objects
self.get_perception(force=True)
# logging.info("Incoming chats: {}".format(raw_incoming_chats))
incoming_chats = []
for raw_chat in raw_incoming_chats:
match = re.search("^<([^>]+)> (.*)", raw_chat)
if match is None:
logging.info("Ignoring chat: {}".format(raw_chat))
continue
speaker, chat = match.group(1), match.group(2)
speaker_hash = hash_user(speaker)
logging.info("Incoming chat: ['{}' -> {}]".format(speaker_hash, chat))
if chat.startswith("/"):
continue
incoming_chats.append((speaker, chat))
self.memory.add_chat(self.memory.get_player_by_name(speaker).memid, chat)
if len(incoming_chats) > 0:
self.last_chat_time = time.time()
# for now just process the first incoming chat
self.dialogue_manager.step(incoming_chats[0])
else:
self.dialogue_manager.step((None, ""))
# TODO reset all blocks in point area to what they
# were before the point action no matter what
# so e.g. player construction in pointing area during point
# is reverted
def safe_get_changed_blocks(self):
blocks = self.get_changed_blocks()
safe_blocks = []
if len(self.point_targets) > 0:
for pt in self.point_targets:
for b in blocks:
x, y, z = b[0]
xok = x < pt[0] or x > pt[3]
yok = y < pt[1] or y > pt[4]
zok = z < pt[2] or z > pt[5]
if xok and yok and zok:
safe_blocks.append(b)
else:
safe_blocks = blocks
self.point_targets = []
return safe_blocks
def point_at(self, target):
"""Bot pointing.
Args:
target: list of x1 y1 z1 x2 y2 z2, where:
x1 <= x2,
y1 <= y2,
z1 <= z2.
"""
assert len(target) == 6
self.send_chat("/point {} {} {} {} {} {}".format(*target))
self.point_targets.append(target)
# sleep before the bot can take any actions
# otherwise there might be bugs since the object is flashing
time.sleep(4)
def _send_chat(self, chat: str):
logging.info("Sending chat: {}".format(chat))
self.memory.add_chat(self.memory.self_memid, chat)
return self._cpp_send_chat(chat)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--semseg_model_path", type=str, help="path to semantic segmentation model"
)
parser.add_argument("--gpu_id", type=int, default=-1, help="GPU id (-1 for cpu)")
parser.add_argument("--ttad_model_path", help="path to listener model")
parser.add_argument("--geoscorer_model_path", help="path to geoscorer model")
parser.add_argument("--draw_vis", action="store_true", help="use visdom to draw agent vision")
parser.add_argument(
"--no_default_behavior",
action="store_true",
help="do not perform default behaviors when idle",
)
parser.add_argument("--name", help="Agent login name")
parser.add_argument("--verbose", "-v", action="store_true", help="Debug logging")
opts = parser.parse_args()
# set up stdout logging
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG if opts.verbose else logging.INFO)
sh.setFormatter(log_formatter)
logging.getLogger().addHandler(sh)
logging.info("Info logging")
logging.debug("Debug logging")
draw_fn = None
if opts.draw_vis:
import train_cnn
draw_fn = train_cnn.draw_img
set_start_method("spawn", force=True)
sa = CraftAssistAgent(
ttad_model_path=opts.ttad_model_path,
semseg_model_path=opts.semseg_model_path,
voxel_model_gpu_id=opts.gpu_id,
draw_fn=draw_fn,
no_default_behavior=opts.no_default_behavior,
name=opts.name,
geoscorer_model_path=opts.geoscorer_model_path,
)
sa.start()
|
from typing import Any
def Main(a: Any, b: Any) -> bool:
return isinstance(a, b)
|
import sys
from django.core.management.base import BaseCommand
from github import GithubException
from utils.gh import get_github_organization, get_github_repo
class Command(BaseCommand):
help = "List projects associated with an organization"
def add_arguments(self, parser):
parser.add_argument(
"org_or_repo",
type=str,
help="Use the org or org/repo format for fetching related projects",
)
def handle(self, *args, **options):
org_or_repo = options["org_or_repo"]
try:
if "/" in org_or_repo:
gh_obj = get_github_repo(org_or_repo)
else:
gh_obj = get_github_organization(org_or_repo)
projects = list(gh_obj.get_projects())
if not projects:
self.stdout.write(
self.style.WARNING(f"No projects found for {org_or_repo}")
)
sys.exit(0)
self.stdout.write(self.style.SUCCESS("Found github projects data"))
for project in projects:
self.stdout.write(f"{project.id} -- {project.name}")
except GithubException as err:
self.stdout.write(
self.style.ERROR(f"Error fetching data from github - {err}")
)
|
import numpy as np
from keras.models import Sequential, load_model
from keras.layers import Input, Dropout, Flatten, Conv2D, MaxPooling2D, Dense, Activation
from keras.optimizers import SGD
from keras.utils import np_utils
from keras.preprocessing.image import ImageDataGenerator
from keras.callbacks import TensorBoard
import itertools
# all images will be converted to this size
ROWS = 256
COLS = 256
CHANNELS = 3
train_image_generator = ImageDataGenerator(horizontal_flip=True, rescale=1./255, rotation_range=45)
test_image_generator = ImageDataGenerator(horizontal_flip=False, rescale=1./255, rotation_range=0)
train_generator = train_image_generator.flow_from_directory('train', target_size=(ROWS, COLS), class_mode='categorical')
test_generator = test_image_generator.flow_from_directory('test', target_size=(ROWS, COLS), class_mode='categorical')
train_generator.reset()
test_generator.reset()
model = Sequential()
model.add(Conv2D(64, (3,3), input_shape=(ROWS, COLS, CHANNELS)))
model.add(Activation('relu'))
model.add(Conv2D(64, (3,3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(4,4)))
model.add(Conv2D(64, (3,3)))
model.add(Activation('relu'))
model.add(Conv2D(64, (3,3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(4,4)))
model.add(Flatten())
model.add(Dropout(0.5))
model.add(Dense(400))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(200))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adamax', metrics=['accuracy'])
model.summary()
tensorboard = TensorBoard(log_dir='./logs/custom')
model.fit_generator(train_generator, steps_per_epoch=512, epochs=10, callbacks=[tensorboard], verbose=2)
print(model.evaluate_generator(test_generator, steps=1000))
|
import botfw
botfw.test_trade(botfw.BinanceFuture.Trade('BTC/USDT'))
|
import os
import numpy as np
import toml
from .workspace import get_dirs, create_series_dir
from .load_dicom import load_scan_from_dicom
def import_dicom(
path: str, root_dir: str, id: str, num: int, ww: int, wc: int, crop_x, crop_y, replace=False
):
"""Imports dicom as a numpy matrix in the project folder
Arguments
---------
path : str
path to the dicom file
root_dir : str
path to the root of the project
id : str
name of the case (should be a number followed by an underscore)
replace : bool
if True, then even if the file exists, if is replaced
Returns
-------
True if the saved successfully, False if the file already exists
"""
id_dir = create_series_dir(root_dir, id)
filename = os.path.join(id_dir[0], str(num))
if not os.path.isfile(filename + ".npz") or replace:
pixels, spacing, thickness, location = load_scan_from_dicom(path)
np.savez_compressed(
filename,
matrix=pixels,
spacing=np.array(spacing),
windowing=np.array([ww, wc]),
crop_x=crop_x,
crop_y=crop_y,
slice_info=np.array([thickness, location]),
)
return True
else:
return False
def add_to_dataset(paths: list, group_name: str, root_dir: str):
"""Adds the list of paths to the dataset toml"""
filename = os.path.join(root_dir, "dataset.toml")
data = {"files": set(), "groups": {group_name: set()}}
if os.path.isfile(filename):
data = toml.load(os.path.join(root_dir, "dataset.toml"))
if not "files" in data:
data["files"] = set()
if not "groups" in data:
data["groups"] = {}
if not group_name in data["groups"]:
data["groups"][group_name] = set()
data["groups"][group_name] = set(data["groups"][group_name])
data["files"] = set(data["files"])
for path in paths:
path = os.path.basename(path)
data["files"].add(path)
data["groups"][group_name].add(path)
toml.dump(data, open(filename, "w"))
|
import pytest
@pytest.fixture(scope="session")
def test_fixture1():
print("Run once")
return 1
|
import sys
def find_rightmost_char(line):
phrase, target = line.strip().split(',')
for i, c in enumerate(phrase[::-1]):
if c == target:
return len(phrase) - i - 1
with open(sys.argv[1]) as input_file:
for line in input_file.readlines():
print(find_rightmost_char(line))
|
from typing import Any, Dict, List
import hashlib
import re
def extract_fragments(items: Dict[Any, Any], res: List[Any]) -> None:
for item in items:
repository_full_name = item.get("repository").get("full_name")
text_matches = item.get("text_matches")
for text_match in text_matches:
object_url = text_match["object_url"]
fragment = text_match["fragment"]
fragment_hash = hashlib.sha1(fragment.encode("utf-8")).hexdigest()
tmp = [repository_full_name, object_url, fragment_hash]
res.append(tmp)
def has_next_link(response_headers: Dict[Any, Any]) -> bool:
try:
link = response_headers["Link"]
match = re.search(r'<(.*)>; rel="next"', link)
if match:
return True
return False
except KeyError:
return False
|
import pytest
from pytest import raises
from artemis.general.mymath import (softmax, cummean, cumvar, sigm, expected_sigm_of_norm, mode, cummode, normalize,
is_parallel,
align_curves, angle_between, fixed_diff, decaying_cumsum, geosum, selective_sum,
conv_fanout, conv2_fanout_map, proportional_random_assignment, clip_to_sum)
import numpy as np
from six.moves import xrange
__author__ = 'peter'
def test_softmax():
x = np.random.randn(3, 4, 5)
s = softmax(x, axis=1)
assert s.shape==(3, 4, 5) and (s>0).all() and (s<1).all() and np.allclose(np.sum(s, axis=1), 1)
def test_cummean():
arr = np.random.randn(3, 4)
cum_arr = cummean(arr, axis = 1)
assert np.allclose(cum_arr[:, 0], arr[:, 0])
assert np.allclose(cum_arr[:, 1], np.mean(arr[:, :2], axis = 1))
assert np.allclose(cum_arr[:, 2], np.mean(arr[:, :3], axis = 1))
def test_cumvar():
arr = np.random.randn(3, 4)
cum_arr = cumvar(arr, axis = 1, sample = False)
assert np.allclose(cum_arr[:, 0], 0)
assert np.allclose(cum_arr[:, 1], np.var(arr[:, :2], axis = 1))
assert np.allclose(cum_arr[:, 2], np.var(arr[:, :3], axis = 1))
def test_exp_sig_of_norm():
mean = 1
std = 0.8
n_points = 1000
seed = 1234
inputs = np.random.RandomState(seed).normal(mean, std, size = n_points)
vals = sigm(inputs)
sample_mean = np.mean(vals)
for method in ('maclauren-2', 'maclauren-3', 'probit'):
approx_true_mean = expected_sigm_of_norm(mean, std, method = method)
approx_sample_mean = expected_sigm_of_norm(np.mean(inputs), np.std(inputs), method = method)
true_error = np.abs(approx_true_mean-sample_mean)/sample_mean
sample_error = np.abs(approx_sample_mean-sample_mean)/sample_mean
print('Error for %s: %.4f True, %.4f Sample.' % (method, true_error, sample_error))
assert true_error < 0.02, 'Method %s did pretty bad' % (method, )
def test_mode():
arr = np.random.RandomState(0).randint(low=0, high=2, size=(3, 5, 7))
m0 = mode(arr, axis = 0)
assert m0.shape == (5, 7)
assert np.all(np.sum(m0[None, :, :] == arr, axis = 0) > np.sum(m0[None, :, :] != arr, axis = 0))
m1 = mode(arr, axis = 1)
assert m1.shape == (3, 7)
assert np.all(np.sum(m1[:, None, :] == arr, axis = 1) > np.sum(m1[:, None, :] != arr, axis = 1))
m2 = mode(arr, axis = 2)
assert m2.shape == (3, 5)
@pytest.mark.skipif(True, reason='Requires scipy weave, which does not install reliably.')
def test_cummode():
arr = np.random.RandomState(0).randint(low=0, high=3, size=(5, 7))
m = cummode(arr, axis = 1) # (n_samples, n_events)
assert m.shape == arr.shape
uniques = np.unique(arr)
for j in xrange(arr.shape[1]):
n_elements_of_mode_class = np.sum(arr[:, :j+1] == m[:, j][:, None], axis = 1) # (n_samples, )
for k, u in enumerate(uniques):
n_elements_of_this_class = np.sum(arr[:, :j+1] == u, axis = 1) # (n_samples, )
assert np.all(n_elements_of_mode_class >= n_elements_of_this_class)
@pytest.mark.skipif(True, reason='Requires scipy weave, which does not install reliably.')
def test_cummode_weighted():
arr = np.random.RandomState(0).randint(low=0, high=3, size=(5, 7))
w = np.random.rand(5, 7)
m = cummode(arr, weights=w, axis = 1) # (n_samples, n_events)
assert m.shape == arr.shape
uniques = np.unique(arr)
for j in xrange(arr.shape[1]):
bool_ixs_of_mode_class = arr[:, :j+1] == m[:, j][:, None] # (n_samples, j+1)
weights_of_mode_class = np.sum(w[:, :j+1]*bool_ixs_of_mode_class, axis = 1) # (n_samples, )
for k, u in enumerate(uniques):
bool_ixs_of_this_class = arr[:, :j+1] == u # (n_samples, j+1)
weights_of_this_class = np.sum(w[:, :j+1]*bool_ixs_of_this_class, axis = 1) # (n_samples, )
assert np.all(weights_of_mode_class >= weights_of_this_class)
def test_normalize():
# L1 - positive values
arr = np.random.rand(5, 4)
norm_arr = normalize(arr, degree=1, axis = 1)
assert np.allclose(norm_arr.sum(axis=1), 1)
# L1 - with negative values
arr = np.random.randn(5, 4)
norm_arr = normalize(arr, degree=1, axis = 1)
assert np.allclose(np.abs(norm_arr).sum(axis=1), 1)
# L2
arr = np.random.randn(5, 4)
norm_arr = normalize(arr, degree=2, axis = 1)
assert np.allclose(np.sqrt((norm_arr**2).sum(axis=1)), 1)
# L1 - zero row with nan handling
arr = np.random.rand(5, 4)
arr[2, :] = 0
norm_arr = normalize(arr, degree=1, axis = 1)
assert np.all(np.isnan(norm_arr[2, :]))
# L1 - zero row with nan handling
arr = np.random.rand(5, 4)
arr[2, :] = 0
norm_arr = normalize(arr, degree=1, axis = 1, avoid_nans=True)
assert np.allclose(np.abs(norm_arr).sum(axis=1), 1)
assert np.allclose(norm_arr[2, :], 1./arr.shape[1])
# L2 - zero row with nan handling
arr = np.random.rand(5, 4)
arr[2, :] = 0
norm_arr = normalize(arr, degree=2, axis = 1, avoid_nans=True)
assert np.allclose(np.sqrt((norm_arr**2).sum(axis=1)), 1)
assert np.allclose(norm_arr[2, :], np.sqrt(1./arr.shape[1]))
def test_is_parallel():
assert is_parallel([1, 2], [2, 4])
assert not is_parallel([1, 2], [2, 5])
assert is_parallel([1, 2], [2, 5], angular_tolerance=0.5)
assert not is_parallel([1, 2], [-2, -4])
def test_align_curves():
n_curves = 30
n_points = [np.random.randint(20) for _ in xrange(n_curves)]
xs = [np.sort(np.random.rand(n)) for n in n_points]
ys = [np.random.randn(n) for n in n_points]
new_xs, new_ys = align_curves(xs=xs, ys=ys, n_bins=25, spacing='lin')
assert new_xs.shape == (25, )
assert new_ys.shape == (n_curves, 25)
new_xs, new_ys = align_curves(xs=xs, ys=ys, n_bins=25, spacing='log')
assert new_xs.shape == (25, )
assert new_ys.shape == (n_curves, 25)
def test_angle_between():
a = np.array([[0, 1], [1, 1], [1, 0]])
b = np.array([[1, 0], [1, 0], [1, 0]])
angles = angle_between(a, b, in_degrees=True, axis=-1)
assert np.allclose(angles, [90, 45, 0])
assert np.allclose(angle_between([2, 1], [1, 0], in_degrees=True), np.arctan(1/2.)*180/np.pi)
def test_fixed_diff():
a = np.random.randn(2, 3, 4)
da = fixed_diff(a, axis=1, initial_value=0)
assert np.array_equal(da[:, 1:, :], np.diff(a, axis=1))
assert np.array_equal(da[:, 0, :], a[:, 0, :])
da = fixed_diff(a, axis=1, initial_value='first')
assert np.array_equal(da[:, 1:, :], np.diff(a, axis=1))
assert np.array_equal(da[:, 0, :], np.zeros_like(da[:, 0, :]))
assert np.allclose(a, np.cumsum(fixed_diff(a, axis=1), axis=1))
def test_decaying_cumsum():
a = np.random.randn(2, 3, 4)
ca = decaying_cumsum(a, axis=1, memory=0)
assert np.array_equal(ca, a)
ca = decaying_cumsum(a, axis=1, memory=.6)
assert np.allclose(ca[:, 2, :], 0.4*(0.6**2*a[:, 0, :] + 0.6**1*a[:, 1, :] + a[:, 2, :]))
def test_geosum():
assert geosum(0.5, t_end=4, t_start=2) == 0.5**2 + 0.5**3 + 0.5**4 == 0.4375
assert geosum(1, t_end=4, t_start=2) == 1**2+1**3+1**4 == 3
def test_selective_sum():
a = np.arange(16).reshape(4, 4)
assert selective_sum(a, [(1, 3), 2]) == 4 + 5 + 6 + 7 + 12 + 13 + 14 + 15 + 2 + 10 == 88
assert selective_sum(a, [(1, 3), (2, )]) == 4 + 5 + 6 + 7 + 12 + 13 + 14 + 15 + 2 + 10 == 88
assert selective_sum(a, [(1, 3), ()]) == a[[1,3], :].sum()
def test_fanout_map():
m = conv_fanout(input_len=5, kernel_len=3, conv_mode='same')
assert np.array_equal(m, [2, 3, 3, 3, 2])
m = conv_fanout(input_len=5, kernel_len=3, conv_mode=1)
assert np.array_equal(m, [2, 3, 3, 3, 2])
m = conv_fanout(input_len=5, kernel_len=2, conv_mode='same')
assert np.array_equal(m, [2, 2, 2, 2, 1])
m = conv_fanout(input_len=5, kernel_len=1, conv_mode='same')
assert np.array_equal(m, [1, 1, 1, 1, 1])
m = conv_fanout(input_len=5, kernel_len=3, conv_mode='valid')
assert np.array_equal(m, [1, 2, 3, 2, 1])
m = conv_fanout(input_len=5, kernel_len=2, conv_mode='valid')
assert np.array_equal(m, [1, 2, 2, 2, 1])
m = conv_fanout(input_len=5, kernel_len=1, conv_mode='valid')
assert np.array_equal(m, [1, 1, 1, 1, 1])
m = conv_fanout(input_len=6, kernel_len=4, conv_mode=1)
assert np.array_equal(m, [2, 3, 4, 4, 3, 2])
def test_conv2_fanout_map():
m = conv2_fanout_map(input_shape=(5, 5), kernel_shape=(3, 3), conv_mode='same')
assert np.array_equal(m, np.array([
[4, 6, 6, 6, 4],
[6, 9, 9, 9, 6],
[6, 9, 9, 9, 6],
[6, 9, 9, 9, 6],
[4, 6, 6, 6, 4],
]))
def test_proportional_random_assignment():
ass = proportional_random_assignment(10, split=.7, rng=1234)
assert len(ass)==10
assert sum(ass==0)==7
assert sum(ass==1)==3
ass = proportional_random_assignment(33, split=[.7, .2], rng=1234)
assert len(ass)==33
assert sum(ass==0)==23
assert sum(ass==1)==7
assert sum(ass==2)==3
ass = proportional_random_assignment(33, split=[.7, .2, .1], rng=1234)
assert len(ass)==33
assert sum(ass==0)==23
assert sum(ass==1)==7
assert sum(ass==2)==3
with raises(AssertionError):
ass = proportional_random_assignment(33, split=20., rng=1234)
def test_clip_to_sum():
# Test snatched from Divakar: https://stackoverflow.com/a/47043362/851699
assert np.array_equal(clip_to_sum([0, 10, 20, 0], 25), [0, 10, 15, 0])
assert np.array_equal(clip_to_sum([1,4,8,3], 10), [1,3,3,3])
assert np.array_equal(clip_to_sum([1,4,8,3], 11), [1,3,4,3])
assert np.array_equal(clip_to_sum([1,4,8,3], 12), [1,4,4,3])
assert np.array_equal(clip_to_sum([1,4,8,3], 14), [1,4,6,3])
assert np.array_equal(clip_to_sum([1,4,8,3], 16), [1,4,8,3])
assert np.array_equal(clip_to_sum([1,4,8,3], 20), [1,4,8,3])
if __name__ == '__main__':
test_decaying_cumsum()
test_fixed_diff()
test_angle_between()
test_align_curves()
test_is_parallel()
test_normalize()
test_cummode_weighted()
test_cummode()
test_mode()
test_exp_sig_of_norm()
test_cumvar()
test_cummean()
test_softmax()
test_geosum()
test_selective_sum()
test_fanout_map()
test_conv2_fanout_map()
test_proportional_random_assignment()
test_clip_to_sum() |
from dotenv import load_dotenv
load_dotenv()
load_dotenv(verbose=True)
from pathlib import Path
env_path = Path('.') / '.env'
load_dotenv(dotenv_path=env_path) |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os, sys
from git import Repo
import json
import requests
import base64
if sys.version_info[0] == 2:
from Tkinter import *
from tkFont import Font
from ttk import *
#Usage:showinfo/warning/error,askquestion/okcancel/yesno/retrycancel
from tkMessageBox import *
#Usage:f=tkFileDialog.askopenfilename(initialdir='E:/Python')
#import tkFileDialog
#import tkSimpleDialog
else: #Python 3.x
from tkinter import *
from tkinter.font import Font
from tkinter.ttk import *
from tkinter.messagebox import *
#import tkinter.filedialog as tkFileDialog
#import tkinter.simpledialog as tkSimpleDialog #askstring()
class Application_ui(Frame):
#这个类仅实现界面生成功能,具体事件处理代码在子类Application中。
def __init__(self, master=None):
Frame.__init__(self, master)
self.master.title('自动化程序集合')
self.master.geometry('1369x804')
self.createWidgets()
def createWidgets(self):
self.top = self.winfo_toplevel()
self.style = Style()
self.Command8Var = StringVar(value='|||')
self.style.configure('TCommand8.TButton', font=('宋体',9))
self.Command8 = Button(self.top, text='|||', textvariable=self.Command8Var, command=self.Command8_Cmd, style='TCommand8.TButton')
self.Command8.setText = lambda x: self.Command8Var.set(x)
self.Command8.text = lambda : self.Command8Var.get()
self.Command8.place(relx=0.941, rely=0.03, relwidth=0.03, relheight=0.051)
self.Command7Var = StringVar(value='-')
self.style.configure('TCommand7.TButton', font=('宋体',9))
self.Command7 = Button(self.top, text='-', textvariable=self.Command7Var, command=self.Command7_Cmd, style='TCommand7.TButton')
self.Command7.setText = lambda x: self.Command7Var.set(x)
self.Command7.text = lambda : self.Command7Var.get()
self.Command7.place(relx=0.906, rely=0.03, relwidth=0.03, relheight=0.051)
self.Command6Var = StringVar(value='O')
self.style.configure('TCommand6.TButton', font=('宋体',9))
self.Command6 = Button(self.top, text='O', textvariable=self.Command6Var, command=self.Command6_Cmd, style='TCommand6.TButton')
self.Command6.setText = lambda x: self.Command6Var.set(x)
self.Command6.text = lambda : self.Command6Var.get()
self.Command6.place(relx=0.871, rely=0.03, relwidth=0.03, relheight=0.051)
self.Command5Var = StringVar(value='X')
self.style.configure('TCommand5.TButton', font=('宋体',9))
self.Command5 = Button(self.top, text='X', textvariable=self.Command5Var, command=self.Command5_Cmd, style='TCommand5.TButton')
self.Command5.setText = lambda x: self.Command5Var.set(x)
self.Command5.text = lambda : self.Command5Var.get()
self.Command5.place(relx=0.836, rely=0.03, relwidth=0.03, relheight=0.051)
self.CommandTextVar = StringVar(value='')
self.CommandText = Entry(self.top, textvariable=self.CommandTextVar, font=('宋体',9))
self.CommandText.setText = lambda x: self.CommandTextVar.set(x)
self.CommandText.text = lambda : self.CommandTextVar.get()
self.CommandText.place(relx=0.596, rely=0.945, relwidth=0.398, relheight=0.041)
self.Command4Var = StringVar(value='Command4')
self.style.configure('TCommand4.TButton', font=('宋体',9))
self.Command4 = Button(self.top, text='Command4', textvariable=self.Command4Var, command=self.Command4_Cmd, style='TCommand4.TButton')
self.Command4.setText = lambda x: self.Command4Var.set(x)
self.Command4.text = lambda : self.Command4Var.get()
self.Command4.place(relx=0.503, rely=0.945, relwidth=0.088, relheight=0.041)
self.style.configure('TInfoFrame.TLabelframe', font=('宋体',9))
self.style.configure('TInfoFrame.TLabelframe.Label', font=('宋体',9))
self.InfoFrame = LabelFrame(self.top, text='Info', style='TInfoFrame.TLabelframe')
self.InfoFrame.place(relx=0.596, rely=0.1, relwidth=0.398, relheight=0.827)
self.style.configure('TTaskFrame.TLabelframe', font=('宋体',9))
self.style.configure('TTaskFrame.TLabelframe.Label', font=('宋体',9))
self.TaskFrame = LabelFrame(self.top, text='TaskList', style='TTaskFrame.TLabelframe')
self.TaskFrame.place(relx=0.216, rely=0.109, relwidth=0.375, relheight=0.817)
self.style.configure('TControlPannelFrame.TLabelframe', font=('宋体',9))
self.style.configure('TControlPannelFrame.TLabelframe.Label', font=('宋体',9))
self.ControlPannelFrame = LabelFrame(self.top, text='Control', style='TControlPannelFrame.TLabelframe')
self.ControlPannelFrame.place(relx=0.012, rely=0.259, relwidth=0.194, relheight=0.668)
self.style.configure('TUserFrame.TLabelframe', font=('宋体',9))
self.style.configure('TUserFrame.TLabelframe.Label', font=('宋体',9))
self.UserFrame = LabelFrame(self.top, text='User', style='TUserFrame.TLabelframe')
self.UserFrame.place(relx=0.012, rely=0.01, relwidth=0.194, relheight=0.24)
self.LogListVar = StringVar(value='')
self.LogListFont = Font(font=('宋体',9))
self.LogList = Listbox(self.InfoFrame, listvariable=self.LogListVar, font=self.LogListFont)
self.LogList.place(relx=0.029, rely=0.036, relwidth=0.941, relheight=0.944)
self.Command10Var = StringVar(value='Command1')
self.style.configure('TCommand10.TButton', font=('宋体',9))
self.Command10 = Button(self.ControlPannelFrame, text='Command1', textvariable=self.Command10Var, command=self.Command10_Cmd, style='TCommand10.TButton')
self.Command10.setText = lambda x: self.Command10Var.set(x)
self.Command10.text = lambda : self.Command10Var.get()
self.Command10.place(relx=0.06, rely=0.194, relwidth=0.819, relheight=0.061)
self.Command9Var = StringVar(value='Command1')
self.style.configure('TCommand9.TButton', font=('宋体',9))
self.Command9 = Button(self.ControlPannelFrame, text='Command1', textvariable=self.Command9Var, command=self.Command9_Cmd, style='TCommand9.TButton')
self.Command9.setText = lambda x: self.Command9Var.set(x)
self.Command9.text = lambda : self.Command9Var.get()
self.Command9.place(relx=0.06, rely=0.119, relwidth=0.819, relheight=0.061)
self.Command3Var = StringVar(value='Command1')
self.style.configure('TCommand3.TButton', font=('宋体',9))
self.Command3 = Button(self.ControlPannelFrame, text='Command1', textvariable=self.Command3Var, command=self.Command3_Cmd, style='TCommand3.TButton')
self.Command3.setText = lambda x: self.Command3Var.set(x)
self.Command3.text = lambda : self.Command3Var.get()
self.Command3.place(relx=0.06, rely=0.045, relwidth=0.819, relheight=0.061)
self.TaskListVar = StringVar(value='')
self.TaskListFont = Font(font=('宋体',9))
self.TaskList = Listbox(self.TaskFrame, listvariable=self.TaskListVar, font=self.TaskListFont)
self.TaskList.place(relx=0.031, rely=0.037, relwidth=0.938, relheight=0.773)
self.Text3Var = StringVar(value='Text2')
self.Text3 = Entry(self.UserFrame, textvariable=self.Text3Var, font=('宋体',9))
self.Text3.setText = lambda x: self.Text3Var.set(x)
self.Text3.text = lambda : self.Text3Var.get()
self.Text3.place(relx=0.211, rely=0.332, relwidth=0.668, relheight=0.13)
self.Text2Var = StringVar(value='Text2')
self.Text2 = Entry(self.UserFrame, textvariable=self.Text2Var, font=('宋体',9))
self.Text2.setText = lambda x: self.Text2Var.set(x)
self.Text2.text = lambda : self.Text2Var.get()
self.Text2.place(relx=0.211, rely=0.166, relwidth=0.668, relheight=0.13)
self.Command2Var = StringVar(value='Command1')
self.style.configure('TCommand2.TButton', font=('宋体',9))
self.Command2 = Button(self.UserFrame, text='Command1', textvariable=self.Command2Var, command=self.Command2_Cmd, style='TCommand2.TButton')
self.Command2.setText = lambda x: self.Command2Var.set(x)
self.Command2.text = lambda : self.Command2Var.get()
self.Command2.place(relx=0.543, rely=0.746, relwidth=0.366, relheight=0.171)
self.Command1Var = StringVar(value='Command1')
self.style.configure('TCommand1.TButton', font=('宋体',9))
self.Command1 = Button(self.UserFrame, text='Command1', textvariable=self.Command1Var, command=self.Command1_Cmd, style='TCommand1.TButton')
self.Command1.setText = lambda x: self.Command1Var.set(x)
self.Command1.text = lambda : self.Command1Var.get()
self.Command1.place(relx=0.06, rely=0.746, relwidth=0.366, relheight=0.171)
self.Label2Var = StringVar(value='Label1')
self.style.configure('TLabel2.TLabel', anchor='w', font=('宋体',9))
self.Label2 = Label(self.UserFrame, text='Label1', textvariable=self.Label2Var, style='TLabel2.TLabel')
self.Label2.setText = lambda x: self.Label2Var.set(x)
self.Label2.text = lambda : self.Label2Var.get()
self.Label2.place(relx=0.03, rely=0.332, relwidth=0.155, relheight=0.13)
self.Label1Var = StringVar(value='Label1')
self.style.configure('TLabel1.TLabel', anchor='w', font=('宋体',9))
self.Label1 = Label(self.UserFrame, text='Label1', textvariable=self.Label1Var, style='TLabel1.TLabel')
self.Label1.setText = lambda x: self.Label1Var.set(x)
self.Label1.text = lambda : self.Label1Var.get()
self.Label1.place(relx=0.03, rely=0.166, relwidth=0.155, relheight=0.13)
class Application(Application_ui):
#这个类实现具体的事件处理回调函数。界面生成代码在Application_ui中。
def __init__(self, master=None):
Application_ui.__init__(self, master)
self.initData()
def Command8_Cmd(self, event=None):
#TODO, Please finish the function here!
pass
def Command7_Cmd(self, event=None):
#TODO, Please finish the function here!
pass
def Command6_Cmd(self, event=None):
#TODO, Please finish the function here!
pass
def Command5_Cmd(self, event=None):
#TODO, Please finish the function here!
pass
def Command4_Cmd(self, event=None):
#TODO, Please finish the function here!
pass
def Command10_Cmd(self, event=None):
#TODO, Please finish the function here!
self.getSSRData()
pass
def Command9_Cmd(self, event=None):
#TODO, Please finish the function here!
self.getSSData()
pass
def Command3_Cmd(self, event=None):
#TODO, Please finish the function here!
self.getV2rayData()
pass
def Command2_Cmd(self, event=None):
#TODO, Please finish the function here!
pass
def Command1_Cmd(self, event=None):
#TODO, Please finish the function here!
pass
def handler_adaptor(self, fun, **kwds):
return lambda event, fun=fun, kwds=kwds: fun(event, **kwds)
def handler(self, event, name, item, option, item_type, data, log):
self.Log("UI交互:控件["+name+"]类型["+item_type+"]操作["+
option+"]数据["+str(data)+"]提示信息["+str(log)+"]")
print("UI交互:控件["+name+"]类型["+item_type+"]操作["+
option+"]数据["+str(data)+"]提示信息["+str(log)+"]")
def initData(self):
self.app_dir=os.path.dirname(os.path.realpath(sys.argv[0]))
self.Log("App Run at:%s"%self.app_dir)
def Log(self,info):
# if self.LoggerLevel<1999:
# return
_info = str(info)
# if len(info) > 110:
# _info = str(info)[0:50]+"..."+str(info)[len(info)-50:len(info)]
self.LogList.insert(END, " "+str(_info))
self.LogList.itemconfig(END, bg="#C3C3C3")
self.LogList.see(END)
# self.Log2File(info,None)
pass
def LogSuccessful(self, info, event=None):
# if self.LoggerLevel<9:
# return
_info = str(info)
# if len(info) > 110:
# _info = str(info)[0:50]+"..."+str(info)[len(info)-50:len(info)]
self.LogList.insert(END, str(_info))
self.LogList.itemconfig(END, bg="#00FF00")
self.LogList.itemconfig(END, fg="#000000")
self.LogList.see(END)
def LogWarning(self, info, event=None):
# if self.LoggerLevel<199:
# return
_info = str(info)
# if len(info) > 110:
# _info = str(info)[0:50]+"..."+str(info)[len(info)-50:len(info)]
self.LogList.insert(END, " "+str(_info))
self.LogList.itemconfig(END, bg="#FFFF00")
self.LogList.itemconfig(END, fg="#000000")
self.LogList.see(END)
# self.Log2File(info,None)
def LogError(self, info, event=None):
# if self.LoggerLevel<19:
# return
_info = str(info)
# if len(info) > 100:
# _info = str(info)[0:100]+"\n"+str(info)[len(info)-100:len(info)]
# if len(info) > 200:
# _info = str(info)[0:100]+"\n"+str(info)[100:200]+"\n"+str(info)[len(info)-200:len(info)]
# if len(info) > 300:
# _info = str(info)[0:100]+"\n"+str(info)[100:200]+"\n"+str(info)[200:300]+"\n"+str(info)[len(info)-300:len(info)]
self.LogList.insert(END, str(_info))
self.LogList.itemconfig(END, bg="#FF0000")
self.LogList.itemconfig(END, fg="#FFFFFF")
self.LogList.see(END)
# self.Log2File(info,None)
pass
def CheckPath(self, filename):
try:
print(" check file>>> %s "% filename)
_file = filename
if None == _file:
print(" file>>> %s >>>None"% filename)
return False
if _file == "":
print(" file>>> %s >>>Empty"% filename)
return False
if len(str(_file)) < 3:
print(" file>>> %s >>>Not a File"% filename)
return False
if not os.path.exists(_file):
print(" file>>> %s >>>Not Exists"% filename)
return False
if os.path.isfile(filename):
print(" file>>> %s >>>Is File"% filename)
return False
print(" path>>> %s >>>Success"% filename)
return True
except Exception as e:
print(e)
return False
def commit(self,gitPath):
self.Log("Commit Request:%s"%str(gitPath))
print("Commit Request:%s"%str(gitPath))
repo = Repo(gitPath, search_parent_directories=True)
self.Log("RepoStatus:%s"%self.repo.git.status())
print("RepoStatus:%s"%self.repo.git.status())
remote = repo.remote()
origin = repo.remotes.origin
index = repo.index
changedFiles = [ item.a_path for item in repo.index.diff(None) ]
self.Log("ChangedFiles:%s"%changedFiles)
untracked_files = repo.untracked_files
self.Log("UntrackedFiles:%s"%untracked_files)
index.add(changedFiles)
print("add files 2 master : %s"%changedFiles)
self.LogSuccessful("modify added file:%s"%changedFiles)
index.add(untracked_files)
print("add untracked_files 2 master : %s"%untracked_files)
self.LogSuccessful("added new file:%s"%untracked_files)
commit_msg=""
if len(changedFiles)+len(untracked_files)>0:
print("new commit:%s"%str(self.fileindex))
commit_msg="a new version"
if len(untracked_files)>0:
commit_msg="new:%s,video current id:%s"%(str(untracked_files),str(self.fileindex))
self.index.commit(commit_msg)
self.LogSuccessful("commit:%s"%commit_msg)
if "Your branch is ahead of" in self.repo.git.status():
self.LogWarning("Push:%s"%self.repo.git.status())
self.repo.remote().push()
if "Your branch is up to date" in self.repo.git.status():
self.LogSuccessful("Your branch is up to date!:%s"%self.repo.git.status())
pass
def getV2rayData(self):
self.Log("Start to check V2ray Links:")
c="CN,HK,TW"
speedlow="1"
speedhigh="300"
linktype="vmess"
defaulturl = "http://proxypoolsstest.herokuapp.com/clash/proxies?c=%s&speed=%s,%s&type=%s"%(c,speedlow,speedhigh,linktype)
defaultHeader = {
'upgrade-insecure-requests': "1",
'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
'dnt': "1",
'accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
'accept-encoding': "gzip, deflate",
'accept-language': "zh-CN,zh;q=0.9,en;q=0.8,zh-TW;q=0.7,ja;q=0.6",
'cache-control': "no-cache"
}
ajaxheaders = {
'upgrade-insecure-requests': "1",
'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
'dnt': "1",
'accept-encoding': "gzip, deflate",
'x-requested-with': "XMLHttpRequest",
'accept-language': "zh-CN,zh;q=0.9,en;q=0.8,zh-TW;q=0.7,ja;q=0.6",
'cache-control': "no-cache",
'accept': "application/json, text/javascript, */*; q=0.01",
}
webSession = requests.session()
responseRes = webSession.get(defaulturl, headers=ajaxheaders, verify=False)
print(responseRes.text)
datas =str(responseRes.text).split("- {")
datajson={}
datastr="{"
i=0
for data in datas:
if "}" in data:
datastr= datastr+"\""+str(i) +"\":{"+data+","
i+=1
continue
datastr=datastr[0:len(datastr)-1]
datastr+="}"
print(datastr)
datajson=json.loads(datastr)
print(datajson)
if responseRes.status_code != 200:
return []
def getSSData(self):
self.Log("Start to check V2ray Links:")
c="HK"
speedlow="15"
speedhigh="300"
linktype="ss"
defaulturl = "http://proxypoolsstest.herokuapp.com/clash/proxies?c=%s&speed=%s,%s&type=%s"%(c,speedlow,speedhigh,linktype)
defaultHeader = {
'upgrade-insecure-requests': "1",
'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
'dnt': "1",
'accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
'accept-encoding': "gzip, deflate",
'accept-language': "zh-CN,zh;q=0.9,en;q=0.8,zh-TW;q=0.7,ja;q=0.6",
'cache-control': "no-cache"
}
ajaxheaders = {
'upgrade-insecure-requests': "1",
'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
'dnt': "1",
'accept-encoding': "gzip, deflate",
'x-requested-with': "XMLHttpRequest",
'accept-language': "zh-CN,zh;q=0.9,en;q=0.8,zh-TW;q=0.7,ja;q=0.6",
'cache-control': "no-cache",
'accept': "application/json, text/javascript, */*; q=0.01",
}
webSession = requests.session()
responseRes = webSession.get(defaulturl, headers=ajaxheaders, verify=False)
print(responseRes.text)
datas =str(responseRes.text).split("- {")
datajson={}
datastr="{"
i=0
for data in datas:
if "}" in data:
datastr= datastr+"\""+str(i) +"\":{"+data+","
i+=1
continue
datastr=datastr[0:len(datastr)-1]
datastr+="}"
print(datastr)
datajson=json.loads(datastr)
print(datajson)
for ss in datajson:
resault='ss://'+encodeBase64(str(datajson[str(ss)]['cipher'])+':'+ str(datajson[str(ss)]['password']) +'@'+str(datajson[str(ss)]['server']) +':'+str(datajson[str(ss)]['port']) )
print(resault)
# data_=data
# data_['protocol']='origin'
# data_['obfs']='plain'
# data_['password_base64']=encodeBase64(data['password'])
# # obfsparam=obfsparam_base64&protoparam=protoparam_base64&remarks=remarks_base64&group=group_base64
# tmpdata=data_['server']+':'+data_['port']+':'+data_['protocol']+':'+data_['method']+':'+data_['obfs']+':'+data_['password_base64']+'/?'+encodeBase64("obfsparam=&protoparam=&remarks=&group="+encodeBase64("AAAA"))
# resault='ssr://'+encodeBase64(tmpdata)
if responseRes.status_code != 200:
return []
def getSSRData(self):
self.Log("Start to check V2ray Links:")
c="HK"
speedlow="15"
speedhigh="300"
linktype="ssr"
defaulturl = "http://proxypoolsstest.herokuapp.com/clash/proxies?c=%s&speed=%s,%s&type=%s"%(c,speedlow,speedhigh,linktype)
defaultHeader = {
'upgrade-insecure-requests': "1",
'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
'dnt': "1",
'accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
'accept-encoding': "gzip, deflate",
'accept-language': "zh-CN,zh;q=0.9,en;q=0.8,zh-TW;q=0.7,ja;q=0.6",
'cache-control': "no-cache"
}
ajaxheaders = {
'upgrade-insecure-requests': "1",
'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
'dnt': "1",
'accept-encoding': "gzip, deflate",
'x-requested-with': "XMLHttpRequest",
'accept-language': "zh-CN,zh;q=0.9,en;q=0.8,zh-TW;q=0.7,ja;q=0.6",
'cache-control': "no-cache",
'accept': "application/json, text/javascript, */*; q=0.01",
}
webSession = requests.session()
responseRes = webSession.get(defaulturl, headers=ajaxheaders, verify=False)
print(responseRes.text)
datas =str(responseRes.text).split("- {")
datajson={}
datastr="{"
i=0
for data in datas:
if "}" in data:
datastr= datastr+"\""+str(i) +"\":{"+data+","
i+=1
continue
datastr=datastr[0:len(datastr)-1]
datastr+="}"
print(datastr)
datastr=datastr.replace(" ","")
datajson=json.loads(datastr)
print(datajson)
for ssr in datajson:
# resault='ss://'+encodeBase64(str(datajson[str(ss)]['cipher'])+':'+ str(datajson[str(ss)]['password']) +'@'+str(datajson[str(ss)]['server']) +':'+str(datajson[str(ss)]['port']) )
data_=datajson[str(ssr)]
data_['password_base64']=encodeBase64(str(data_['password']))
# obfsparam=obfsparam_base64&protoparam=protoparam_base64&remarks=remarks_base64&group=group_base64
tmpdata=str(data_['server'])+':'+str(data_['port'])+':'+str(data_['protocol'])+':'+str(data_['cipher'])+':'+str(data_['obfs'])+':'+str(data_['password_base64'])+'/?'+encodeBase64("obfsparam=myazuressr&protoparam=myazure&remarks=&group="+encodeBase64("AAAA"))
resault='ssr://'+encodeBase64(tmpdata)
print(resault)
# ssr://server:port:protocol:method:obfs:password_base64/?params_base64
if responseRes.status_code != 200:
return []
def decodeBase64(data):
return str(base64.b64decode((data.replace('-','+').replace('_','/')).encode('utf-8')),'utf-8')
def encodeBase64(data):
return str(base64.b64encode(data.encode('utf-8')),'utf-8').replace('+','-').replace('/','_')
if __name__ == "__main__":
top = Tk()
Application(top).mainloop()
|
"""Handles incoming route53resolver requests/responses."""
import json
from moto.core.exceptions import InvalidToken
from moto.core.responses import BaseResponse
from moto.route53resolver.exceptions import InvalidNextTokenException
from moto.route53resolver.models import route53resolver_backends
from moto.route53resolver.validations import validate_args
class Route53ResolverResponse(BaseResponse):
"""Handler for Route53Resolver requests and responses."""
@property
def route53resolver_backend(self):
"""Return backend instance specific for this region."""
return route53resolver_backends[self.region]
def create_resolver_endpoint(self):
"""Create an inbound or outbound Resolver endpoint."""
creator_request_id = self._get_param("CreatorRequestId")
name = self._get_param("Name")
security_group_ids = self._get_param("SecurityGroupIds")
direction = self._get_param("Direction")
ip_addresses = self._get_param("IpAddresses")
tags = self._get_param("Tags", [])
resolver_endpoint = self.route53resolver_backend.create_resolver_endpoint(
region=self.region,
creator_request_id=creator_request_id,
name=name,
security_group_ids=security_group_ids,
direction=direction,
ip_addresses=ip_addresses,
tags=tags,
)
return json.dumps({"ResolverEndpoint": resolver_endpoint.description()})
def create_resolver_rule(self):
"""Specify which Resolver enpoint the queries will pass through."""
creator_request_id = self._get_param("CreatorRequestId")
name = self._get_param("Name")
rule_type = self._get_param("RuleType")
domain_name = self._get_param("DomainName")
target_ips = self._get_param("TargetIps", [])
resolver_endpoint_id = self._get_param("ResolverEndpointId")
tags = self._get_param("Tags", [])
resolver_rule = self.route53resolver_backend.create_resolver_rule(
region=self.region,
creator_request_id=creator_request_id,
name=name,
rule_type=rule_type,
domain_name=domain_name,
target_ips=target_ips,
resolver_endpoint_id=resolver_endpoint_id,
tags=tags,
)
return json.dumps({"ResolverRule": resolver_rule.description()})
def delete_resolver_endpoint(self):
"""Delete a Resolver endpoint."""
resolver_endpoint_id = self._get_param("ResolverEndpointId")
resolver_endpoint = self.route53resolver_backend.delete_resolver_endpoint(
resolver_endpoint_id=resolver_endpoint_id,
)
return json.dumps({"ResolverEndpoint": resolver_endpoint.description()})
def delete_resolver_rule(self):
"""Delete a Resolver rule."""
resolver_rule_id = self._get_param("ResolverRuleId")
resolver_rule = self.route53resolver_backend.delete_resolver_rule(
resolver_rule_id=resolver_rule_id,
)
return json.dumps({"ResolverRule": resolver_rule.description()})
def get_resolver_endpoint(self):
"""Return info about a specific Resolver endpoint."""
resolver_endpoint_id = self._get_param("ResolverEndpointId")
resolver_endpoint = self.route53resolver_backend.get_resolver_endpoint(
resolver_endpoint_id=resolver_endpoint_id,
)
return json.dumps({"ResolverEndpoint": resolver_endpoint.description()})
def get_resolver_rule(self):
"""Return info about a specific Resolver rule."""
resolver_rule_id = self._get_param("ResolverRuleId")
resolver_rule = self.route53resolver_backend.get_resolver_rule(
resolver_rule_id=resolver_rule_id,
)
return json.dumps({"ResolverRule": resolver_rule.description()})
def list_resolver_endpoint_ip_addresses(self):
"""Returns list of IP addresses for specified Resolver endpoint."""
resolver_endpoint_id = self._get_param("ResolverEndpointId")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults", 10)
validate_args([("maxResults", max_results)])
try:
(
ip_addresses,
next_token,
) = self.route53resolver_backend.list_resolver_endpoint_ip_addresses(
resolver_endpoint_id=resolver_endpoint_id,
next_token=next_token,
max_results=max_results,
)
except InvalidToken as exc:
raise InvalidNextTokenException() from exc
response = {
"IpAddresses": ip_addresses,
"MaxResults": max_results,
}
if next_token:
response["NextToken"] = next_token
return json.dumps(response)
def list_resolver_endpoints(self):
"""Returns list of all Resolver endpoints, filtered if specified."""
filters = self._get_param("Filters")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults", 10)
validate_args([("maxResults", max_results)])
try:
(
endpoints,
next_token,
) = self.route53resolver_backend.list_resolver_endpoints(
filters, next_token=next_token, max_results=max_results
)
except InvalidToken as exc:
raise InvalidNextTokenException() from exc
response = {
"ResolverEndpoints": [x.description() for x in endpoints],
"MaxResults": max_results,
}
if next_token:
response["NextToken"] = next_token
return json.dumps(response)
def list_resolver_rules(self):
"""Returns list of all Resolver rules, filtered if specified."""
filters = self._get_param("Filters")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults", 10)
validate_args([("maxResults", max_results)])
try:
(rules, next_token) = self.route53resolver_backend.list_resolver_rules(
filters, next_token=next_token, max_results=max_results
)
except InvalidToken as exc:
raise InvalidNextTokenException() from exc
response = {
"ResolverRules": [x.description() for x in rules],
"MaxResults": max_results,
}
if next_token:
response["NextToken"] = next_token
return json.dumps(response)
def list_tags_for_resource(self):
"""Lists all tags for the given resource."""
resource_arn = self._get_param("ResourceArn")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults")
try:
(tags, next_token) = self.route53resolver_backend.list_tags_for_resource(
resource_arn=resource_arn,
next_token=next_token,
max_results=max_results,
)
except InvalidToken as exc:
raise InvalidNextTokenException() from exc
response = {"Tags": tags}
if next_token:
response["NextToken"] = next_token
return json.dumps(response)
def tag_resource(self):
"""Add one or more tags to a specified resource."""
resource_arn = self._get_param("ResourceArn")
tags = self._get_param("Tags")
self.route53resolver_backend.tag_resource(resource_arn=resource_arn, tags=tags)
return ""
def untag_resource(self):
"""Removes one or more tags from the specified resource."""
resource_arn = self._get_param("ResourceArn")
tag_keys = self._get_param("TagKeys")
self.route53resolver_backend.untag_resource(
resource_arn=resource_arn, tag_keys=tag_keys
)
return ""
def update_resolver_endpoint(self):
"""Update name of Resolver endpoint."""
resolver_endpoint_id = self._get_param("ResolverEndpointId")
name = self._get_param("Name")
resolver_endpoint = self.route53resolver_backend.update_resolver_endpoint(
resolver_endpoint_id=resolver_endpoint_id, name=name,
)
return json.dumps({"ResolverEndpoint": resolver_endpoint.description()})
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2012-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import re
import pwd
import grp
import Gaffer
class FileSystemPath( Gaffer.Path ) :
def __init__( self, path=None, root="/", filter=None ) :
Gaffer.Path.__init__( self, path, root, filter )
def isValid( self ) :
return Gaffer.Path.isValid( self ) and os.path.lexists( str( self ) )
def isLeaf( self ) :
return self.isValid() and not os.path.isdir( str( self ) )
def info( self ) :
result = Gaffer.Path.info( self )
if result is None :
return None
pathString = str( self )
try :
# if s is a symlink, this gets the information from
# the pointed-to file, failing if it doesn't exist.
s = os.stat( pathString )
except OSError :
# if a symlink was broken then we fall back to
# getting information from the link itself.
s = os.lstat( pathString )
try :
p = pwd.getpwuid( s.st_uid )
except :
p = None
try :
g = grp.getgrgid( s.st_gid )
except :
g = None
result["fileSystem:owner"] = p.pw_name if p is not None else ""
result["fileSystem:group"] = g.gr_name if g is not None else ""
result["fileSystem:modificationTime"] = s.st_mtime
result["fileSystem:accessTime"] = s.st_atime
result["fileSystem:size"] = s.st_size
return result
def _children( self ) :
try :
c = os.listdir( str( self ) )
except :
return []
return [ FileSystemPath( self[:] + [ x ], self.root() ) for x in c ]
@staticmethod
def createStandardFilter( extensions=[], extensionsLabel=None ) :
result = Gaffer.CompoundPathFilter()
if extensions :
extensions = [ e.lower() for e in extensions ]
if extensionsLabel is None :
extensionsLabel = "Show only " + ", ".join( [ "." + e for e in extensions ] ) + " files"
extensions += [ e.upper() for e in extensions ]
extensions = [ "*." + e for e in extensions ]
# the form below is for file sequences, where the frame range will
# come after the extension
extensions += [ "*.%s *" % e for e in extensions ]
result.addFilter(
Gaffer.FileNamePathFilter(
extensions,
userData = {
"UI" : {
"label" : extensionsLabel,
}
}
)
)
result.addFilter(
Gaffer.FileNamePathFilter(
[ re.compile( "^[^.].*" ) ],
leafOnly=False,
userData = {
"UI" : {
"label" : "Show hidden files",
"invertEnabled" : True,
}
}
)
)
result.addFilter(
Gaffer.InfoPathFilter(
infoKey = "name",
matcher = None, # the ui will fill this in
leafOnly = False,
)
)
return result
|
from typing import Any, Generator, Tuple
import pytest
from dbcat.catalog import Catalog, CatColumn, CatSchema, CatSource, CatTable
from sqlalchemy import create_engine
from piicatcher.dbinfo import get_dbinfo
from piicatcher.generators import (
_get_query,
_get_table_count,
_row_generator,
column_generator,
data_generator,
)
@pytest.fixture(scope="module")
def sqlalchemy_engine(
load_data_and_pull,
) -> Generator[Tuple[Catalog, CatSource, Any], None, None]:
catalog, source_id = load_data_and_pull
with catalog.managed_session:
source = catalog.get_source_by_id(source_id)
engine = create_engine(source.conn_string)
with engine.connect() as conn:
yield catalog, source, conn
@pytest.fixture(scope="module")
def load_source(load_data_and_pull) -> Generator[Tuple[Catalog, CatSource], None, None]:
catalog, source_id = load_data_and_pull
with catalog.managed_session:
source = catalog.get_source_by_id(source_id)
yield catalog, source
def test_column_generator(load_source):
catalog, source = load_source
count = 0
for tpl in column_generator(catalog=catalog, source=source):
count += 1
assert count == 8
def test_column_generator_include_schema(load_source):
catalog, source = load_source
schemata = catalog.search_schema(source_like=source.name, schema_like="%")
count = 0
for tpl in column_generator(
catalog=catalog, source=source, include_schema_regex_str=[schemata[0].name]
):
count += 1
assert count == 8
def test_column_generator_exclude_schema(load_source):
catalog, source = load_source
schemata = catalog.search_schema(source_like=source.name, schema_like="%")
count = 0
for tpl in column_generator(
catalog=catalog, source=source, exclude_schema_regex_str=[schemata[0].name]
):
count += 1
assert count == 0
def test_column_generator_include_table(load_source):
catalog, source = load_source
count = 0
for tpl in column_generator(
catalog=catalog, source=source, include_table_regex_str=["full.*"]
):
count += 1
assert count == 2
def test_column_generator_exclude_table(load_source):
catalog, source = load_source
count = 0
for tpl in column_generator(
catalog=catalog, source=source, exclude_table_regex_str=["full.*"]
):
count += 1
assert count == 6
def test_get_table_count(sqlalchemy_engine):
catalog, source, conn = sqlalchemy_engine
schemata = catalog.search_schema(source_like=source.name, schema_like="%")
table = catalog.get_table(
source_name=source.name, schema_name=schemata[0].name, table_name="full_pii"
)
table_count = _get_table_count(
schema=table.schema,
table=table,
dbinfo=get_dbinfo(source.source_type),
connection=conn,
)
assert table_count == 2
def test_get_query(sqlalchemy_engine):
catalog, source, conn = sqlalchemy_engine
schemata = catalog.search_schema(source_like=source.name, schema_like="%")
table = catalog.get_table(
source_name=source.name, schema_name=schemata[0].name, table_name="full_pii"
)
query = _get_query(
schema=schemata[0],
table=table,
column_list=catalog.get_columns_for_table(table),
dbinfo=get_dbinfo(source.source_type),
connection=conn,
)
if source.source_type == "mysql":
assert query == """select `name`,`state` from piidb.full_pii"""
elif source.source_type == "postgresql":
assert query == """select "name","state" from public.full_pii"""
elif source.source_type == "sqlite":
assert query == """select "name","state" from full_pii"""
def test_get_sample_query(sqlalchemy_engine):
catalog, source, conn = sqlalchemy_engine
schemata = catalog.search_schema(source_like=source.name, schema_like="%")
table = catalog.get_table(
source_name=source.name, schema_name=schemata[0].name, table_name="full_pii"
)
query = _get_query(
schema=schemata[0],
table=table,
column_list=catalog.get_columns_for_table(table),
dbinfo=get_dbinfo(source.source_type),
connection=conn,
sample_size=1,
)
if source.source_type == "mysql":
assert query == """select `name`,`state` from piidb.full_pii limit 1"""
elif source.source_type == "postgresql":
assert (
query
== """SELECT "name","state" FROM public.full_pii TABLESAMPLE BERNOULLI (10) LIMIT 1"""
)
elif source.source_type == "sqlite":
assert query == """select "name","state" from full_pii"""
@pytest.mark.parametrize(
("source_type", "expected_query"),
[
(
"redshift",
'SELECT "column" FROM public.table TABLESAMPLE BERNOULLI (10) LIMIT 1',
),
("snowflake", "SELECT column FROM public.table TABLESAMPLE BERNOULLI (1 ROWS)"),
(
"athena",
'SELECT "column" FROM public.table TABLESAMPLE BERNOULLI (10) LIMIT 1',
),
],
)
def test_get_sample_query_redshift(mocker, source_type, expected_query):
source = CatSource(name="src", source_type=source_type)
schema = CatSchema(source=source, name="public")
table = CatTable(schema=schema, name="table")
column = CatColumn(table=table, name="column")
mocker.patch("piicatcher.generators._get_table_count", return_value=100)
query = _get_query(
schema=schema,
table=table,
column_list=[column],
dbinfo=get_dbinfo(source_type=source.source_type),
connection=None,
sample_size=1,
)
assert query == expected_query
def test_row_generator(sqlalchemy_engine):
catalog, source, conn = sqlalchemy_engine
schemata = catalog.search_schema(source_like=source.name, schema_like="%")
table = catalog.get_table(
source_name=source.name, schema_name=schemata[0].name, table_name="full_pii"
)
count = 0
for row in _row_generator(
source=source,
schema=schemata[0],
table=table,
column_list=catalog.get_columns_for_table(table),
):
count += 1
assert row[0] is not None
assert row[1] is not None
assert count == 2
def test_data_generator(sqlalchemy_engine):
catalog, source, conn = sqlalchemy_engine
count = 0
for tpl in data_generator(catalog=catalog, source=source):
count += 1
assert count == 14
def test_data_generator_include_schema(load_source):
catalog, source = load_source
schemata = catalog.search_schema(source_like=source.name, schema_like="%")
count = 0
for tpl in data_generator(
catalog=catalog, source=source, include_schema_regex_str=[schemata[0].name]
):
count += 1
assert count == 14
def test_data_generator_exclude_schema(load_source):
catalog, source = load_source
schemata = catalog.search_schema(source_like=source.name, schema_like="%")
count = 0
for tpl in data_generator(
catalog=catalog, source=source, exclude_schema_regex_str=[schemata[0].name]
):
count += 1
assert count == 0
def test_data_generator_include_table(load_source):
catalog, source = load_source
count = 0
for tpl in data_generator(
catalog=catalog, source=source, include_table_regex_str=["full.*"]
):
count += 1
assert count == 4
def test_data_generator_exclude_table(load_source):
catalog, source = load_source
count = 0
for tpl in data_generator(
catalog=catalog, source=source, exclude_table_regex_str=["full.*"]
):
count += 1
assert count == 10
def test_data_generator_include_int_table(load_source):
catalog, source = load_source
count = 0
for tpl in data_generator(
catalog=catalog, source=source, include_table_regex_str=["partial_data_type"]
):
count += 1
assert count == 2
|
#!/usr/bin/env python
import sys
prev_show = " "
abc_found = False
viewers_total = 0
line_cnt = 0 # count input lines
for line in sys.stdin:
line = line.strip() # strip out carriage return
key_value = line.split('\t') # split line, into key and value, returns a list
line_cnt += 1
curr_show = key_value[0]
value_in = key_value[1]
if curr_show != prev_show:
if line_cnt > 1 and abc_found:
print('{0} {1}'.format(prev_show, viewers_total))
# now reset lists
viewers_total = 0
abc_found = False
prev_show = curr_show
if value_in[0:3] == 'ABC':
abc_found = True
else:
viewers_total += int(value_in) # if the value field was just the total count then its
# the first (and only) item in this list
# for last row
print('{0} {1}'.format(curr_show, viewers_total))
|
from frontend import db, login_manager
from datetime import datetime
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class Project(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False, unique=True)
description = db.Column(db.String(200))
date_created = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
config_file = db.Column(db.Text, nullable=False)
inventory_file = db.Column(db.Text, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False)
devices = db.relationship("Device", backref="project", lazy=True)
def __repr__(self):
return f"Project('{self.name}', '{self.date_created}', '{self.config_file}')"
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20), nullable=False, unique=True)
email = db.Column(db.String(120), nullable=False, unique=True)
image_file = db.Column(db.String(20), nullable=False, default="default.jpg")
password = db.Column(db.String(60), nullable=False)
projects = db.relationship("Project", backref="author", lazy=True)
def __repr__(self):
return f"User('{self.username}', '{self.email}', '{self.image_file}')"
def add_device(device_name, project_id, management_ip):
device = Device(
name=device_name, project_id=project_id, management_ip=management_ip
)
db.session.add(device)
db.commit()
class Device(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
version = db.Column(db.String(100), nullable=False, default="-")
platform = db.Column(db.String(100), nullable=False, default="-")
image_id = db.Column(db.String(100), nullable=False, default="-")
project_id = db.Column(db.Integer, db.ForeignKey("project.id"), nullable=False)
management_ip = db.Column(db.String(50), nullable=False)
def __repr__(self):
return f"Device('{self.name}', Project ID: '{self.project_id}', Management IP: '{self.management_ip}')"
|
"""Provide a model for the Z-Wave JS node's health checks and power tests."""
from dataclasses import dataclass
from typing import List, Optional
from ...const import TYPING_EXTENSION_FOR_TYPEDDICT_REQUIRED, PowerLevel
if TYPING_EXTENSION_FOR_TYPEDDICT_REQUIRED:
from typing_extensions import TypedDict
else:
from typing import TypedDict
class LifelineHealthCheckResultDataType(TypedDict, total=False):
"""Represent a lifeline health check result data dict type."""
# https://github.com/zwave-js/node-zwave-js/blob/master/packages/zwave-js/src/lib/node/Types.ts#L171
latency: int # required
numNeighbors: int # required
failedPingsNode: int # required
routeChanges: int
minPowerlevel: int
failedPingsController: int
snrMargin: int
class LifelineHealthCheckSummaryDataType(TypedDict):
"""Represent a lifeline health check summary data dict type."""
# https://github.com/zwave-js/node-zwave-js/blob/master/packages/zwave-js/src/lib/node/Types.ts#L211
results: List[LifelineHealthCheckResultDataType]
rating: int
class LifelineHealthCheckResult:
"""Represent a lifeline health check result."""
def __init__(self, data: LifelineHealthCheckResultDataType) -> None:
"""Initialize lifeline health check result."""
self.data = data
@property
def latency(self) -> int:
"""Return latency."""
return self.data["latency"]
@property
def num_neighbors(self) -> int:
"""Return number of neighbors."""
return self.data["numNeighbors"]
@property
def failed_pings_node(self) -> int:
"""Return number of failed pings to node."""
return self.data["failedPingsNode"]
@property
def route_changes(self) -> Optional[int]:
"""Return number of route changes."""
return self.data.get("routeChanges")
@property
def min_power_level(self) -> Optional[PowerLevel]:
"""Return minimum power level."""
power_level = self.data.get("minPowerlevel")
if power_level is not None:
return PowerLevel(power_level)
return None
@property
def failed_pings_controller(self) -> Optional[int]:
"""Return number of failed pings to controller."""
return self.data.get("failedPingsController")
@property
def snr_margin(self) -> Optional[int]:
"""Return SNR margin."""
return self.data.get("snrMargin")
class LifelineHealthCheckSummary:
"""Represent a lifeline health check summary update."""
def __init__(self, data: LifelineHealthCheckSummaryDataType) -> None:
"""Initialize lifeline health check summary."""
self._rating = data["rating"]
self._results = [LifelineHealthCheckResult(r) for r in data.get("results", [])]
@property
def rating(self) -> int:
"""Return rating."""
return self._rating
@property
def results(self) -> List[LifelineHealthCheckResult]:
"""Return lifeline health check results."""
return self._results
class RouteHealthCheckResultDataType(TypedDict, total=False):
"""Represent a route health check result data dict type."""
# https://github.com/zwave-js/node-zwave-js/blob/master/packages/zwave-js/src/lib/node/Types.ts#L242
numNeighbors: int # required
rating: int # required
failedPingsToTarget: int
failedPingsToSource: int
minPowerlevelSource: int
minPowerlevelTarget: int
class RouteHealthCheckSummaryDataType(TypedDict):
"""Represent a route health check summary data dict type."""
# https://github.com/zwave-js/node-zwave-js/blob/master/packages/zwave-js/src/lib/node/Types.ts#L274
results: List[RouteHealthCheckResultDataType]
rating: int
class RouteHealthCheckResult:
"""Represent a route health check result."""
def __init__(self, data: RouteHealthCheckResultDataType) -> None:
"""Initialize route health check result."""
self.data = data
@property
def num_neighbors(self) -> int:
"""Return number of neighbors."""
return self.data["numNeighbors"]
@property
def rating(self) -> int:
"""Return rating."""
return self.data["rating"]
@property
def failed_pings_to_target(self) -> Optional[int]:
"""Return number of failed pings to target."""
return self.data.get("failedPingsToTarget")
@property
def failed_pings_to_source(self) -> Optional[int]:
"""Return number of failed pings to source."""
return self.data.get("failedPingsToSource")
@property
def min_power_level_source(self) -> Optional[PowerLevel]:
"""Return minimum power level source."""
power_level = self.data.get("minPowerlevelSource")
if power_level is not None:
return PowerLevel(power_level)
return None
@property
def min_power_level_target(self) -> Optional[PowerLevel]:
"""Return minimum power level target."""
power_level = self.data.get("minPowerlevelTarget")
if power_level is not None:
return PowerLevel(power_level)
return None
class RouteHealthCheckSummary:
"""Represent a route health check summary update."""
def __init__(self, data: RouteHealthCheckSummaryDataType) -> None:
"""Initialize route health check summary."""
self._rating = data["rating"]
self._results = [RouteHealthCheckResult(r) for r in data.get("results", [])]
@property
def rating(self) -> int:
"""Return rating."""
return self._rating
@property
def results(self) -> List[RouteHealthCheckResult]:
"""Return route health check results."""
return self._results
@dataclass
class TestPowerLevelProgress:
"""Class to represent a test power level progress update."""
acknowledged: int
total: int
@dataclass
class CheckHealthProgress:
"""Represent a check lifeline/route health progress update."""
rounds: int
total_rounds: int
last_rating: int
|
"""module for fobj modifier."""
from __future__ import annotations
from typing import Any, Callable, cast, TYPE_CHECKING
from ..jobject import JObject
from .modifier import Modifier
if TYPE_CHECKING:
from ..ctx import Ctx
from ..types import Types
class FObjModifier(Modifier):
"""Get object at field from a JSONClass object. If the object is not exist,
include it and return it.
"""
def __init__(self, field_name: str | Callable | Types) -> None:
self.field_name = field_name
def transform(self, ctx: Ctx) -> Any:
if ctx.val is None:
return None
field_name = self.resolve_param(self.field_name, ctx)
val = cast(JObject, ctx.val)
if getattr(val, field_name) is None:
val.include(field_name)
return getattr(val, field_name)
|
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import base64
import dns.exception
import dns.rdtypes.util
class Gateway(dns.rdtypes.util.Gateway):
name = 'IPSECKEY gateway'
class IPSECKEY(dns.rdata.Rdata):
"""IPSECKEY record"""
# see: RFC 4025
__slots__ = ['precedence', 'gateway_type', 'algorithm', 'gateway', 'key']
def __init__(self, rdclass, rdtype, precedence, gateway_type, algorithm,
gateway, key):
super().__init__(rdclass, rdtype)
Gateway(gateway_type, gateway).check()
object.__setattr__(self, 'precedence', precedence)
object.__setattr__(self, 'gateway_type', gateway_type)
object.__setattr__(self, 'algorithm', algorithm)
object.__setattr__(self, 'gateway', gateway)
object.__setattr__(self, 'key', key)
def to_text(self, origin=None, relativize=True, **kw):
gateway = Gateway(self.gateway_type, self.gateway).to_text(origin,
relativize)
return '%d %d %d %s %s' % (self.precedence, self.gateway_type,
self.algorithm, gateway,
dns.rdata._base64ify(self.key))
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True,
relativize_to=None):
precedence = tok.get_uint8()
gateway_type = tok.get_uint8()
algorithm = tok.get_uint8()
gateway = Gateway(gateway_type).from_text(tok, origin, relativize,
relativize_to)
b64 = tok.concatenate_remaining_identifiers().encode()
key = base64.b64decode(b64)
return cls(rdclass, rdtype, precedence, gateway_type, algorithm,
gateway, key)
def _to_wire(self, file, compress=None, origin=None, canonicalize=False):
header = struct.pack("!BBB", self.precedence, self.gateway_type,
self.algorithm)
file.write(header)
Gateway(self.gateway_type, self.gateway).to_wire(file, compress,
origin, canonicalize)
file.write(self.key)
@classmethod
def from_wire_parser(cls, rdclass, rdtype, parser, origin=None):
header = parser.get_struct('!BBB')
gateway_type = header[1]
gateway = Gateway(gateway_type).from_wire_parser(parser, origin)
key = parser.get_remaining()
return cls(rdclass, rdtype, header[0], gateway_type, header[2],
gateway, key)
|
from gensim.models import word2vec
import numpy as np
from scipy import linalg, stats
import sys
savePath = "/fs/clip-scratch/shing/output/"
def cos(vec1, vec2):
return vec1.dot(vec2)/(linalg.norm(vec1)*linalg.norm(vec2))
def rho(vec1, vec2):
return stats.stats.spearmanr(vec1, vec2)
if __name__ == '__main__':
modelWord = word2vec.Word2Vec.load(savePath + sys.argv[1])
modelPhrase = word2vec.Word2Vec.load(savePath + sys.argv[2])
phraseVocab = set([word for word in modelPhrase.vocab])
wordVocab = set([word for word in modelWord.vocab])
mutualVocab = phraseVocab & wordVocab
print sys.argv[1] + ' num: ' + str(len(wordVocab))
print sys.argv[2] + ' num: ' + str(len(phraseVocab))
print 'mutual num: ' + str(len(mutualVocab))
mutualVocab = list(mutualVocab)
pairsToCompare = [np.random.choice(mutualVocab, 2, replace=False) for i in range(3000)]
scoreWord = [cos(modelWord[pair[0]], modelWord[pair[1]]) for pair in pairsToCompare]
scorePhrase = [cos(modelPhrase[pair[0]], modelPhrase[pair[1]]) for pair in pairsToCompare]
rhoScore = rho(scoreWord, scorePhrase)
print 'rho is: ', rhoScore[0]
print 'p value is: ', rhoScore[1]
|
from pcc.AST.statement import Statement
class FunctionArgument(Statement):
def __init__(self, type_name, type_decl, identifier, depth):
super(FunctionArgument, self).__init__(depth)
self.type_name = type_name
self.type_decl = type_decl
self.identifier = identifier
def __str__(self):
string = ''
string += self._depth * ' ' + 'Typename: ' \
+ str(self.type_name) + ', []\n'
string += self._depth * ' ' + ' TypeDecl: ' \
+ str(self.type_decl) + ', []\n'
string += self._depth * ' ' + ' IdentifierType: [\'' \
+ str(self.identifier) + '\']\n'
return string
|
# Copyright 2021 MosaicML. All Rights Reserved.
"""The CPU device used for training."""
from __future__ import annotations
import logging
from typing import Any, Dict, TypeVar
import torch
from composer.trainer.devices.device import Device
logger = logging.getLogger(__name__)
__all__ = ["DeviceCPU"]
T_nnModule = TypeVar("T_nnModule", bound=torch.nn.Module)
class DeviceCPU(Device):
"""An extension of :class:`~composer.trainer.devices.device.Device` for CPUs.
This class takes no arguments.
"""
dist_backend = "gloo"
_device = torch.device('cpu')
def module_to_device(self, module: T_nnModule) -> T_nnModule:
return module.to(self._device)
def tensor_to_device(self, tensor: torch.Tensor) -> torch.Tensor:
return tensor.to(self._device)
def state_dict(self) -> Dict[str, Any]:
# CPU device has no RNG state
return {}
def load_state_dict(self, state: Dict[str, Any]) -> None:
if len(state) != 0:
raise ValueError("CPU device has no state.")
|
from abc import ABC, abstractmethod
from copy import copy
from enum import Enum
from typing import List, Tuple, Optional, Sequence, Any, Union
from cardbuilder.exceptions import CardBuilderUsageException
class Value(ABC):
"""Abstract base class for all values."""
def __init__(self):
self._data = None
@abstractmethod
def get_data(self) -> Sequence:
raise NotImplementedError()
def is_empty(self) -> bool:
return len(self._data) == 0
def __eq__(self, other):
return isinstance(other, type(self)) and self._data == other._data
def __hash__(self):
return hash(self._data)
def __repr__(self):
return type(self).__name__ + ':' + repr(self.get_data())
def to_value(input_data: Any) -> Value:
if isinstance(input_data, Value):
return input_data
if isinstance(input_data, SingleValue.input_type):
return SingleValue(input_data)
else:
raise CardBuilderUsageException(f'Cannot convert given input data {input_data} into a Value class')
#TODO: extend to other value types and in effect have an AutoValue method?
class SingleValue(Value):
"""Represents a single value, such as a part of speech, IPA for a word, or a word itself."""
input_type = str
def __init__(self, val: input_type):
super(SingleValue, self).__init__()
if not isinstance(val, self.input_type):
raise CardBuilderUsageException('SingleValue received input of incorrect type ({})'.format(
type(val).__name__))
self._data = val
def get_data(self) -> str:
return copy(self._data)
value_unit_type = Union[SingleValue.input_type, str]
class PitchAccentValue(SingleValue):
"""Represents pitch value for an associated word"""
class PitchAccent(Enum):
HIGH = 'h'
LOW = 'l'
DROP = 'd'
def __init__(self, pitch_accent: Union[str, List[PitchAccent]], word: str):
if isinstance(pitch_accent, list):
super(PitchAccentValue, self).__init__(''.join(p.value for p in pitch_accent))
else:
super(PitchAccentValue, self).__init__(pitch_accent)
self.word = word
if not isinstance(word, str) or len(pitch_accent) != len(word):
raise CardBuilderUsageException('PitchAccentValue word value must be a string of the same length as the'
'pitch accent list')
class MultiValue(Value):
"""Represents multiple values, each optionally paired with a header value. Useful for capturing pairs or mappings
of values, such as words where pronunciation is different based on the part of speech. For straightforward lists
of values, use ListValue"""
def __init__(self, list_header_tuples: List[Tuple[value_unit_type, Optional[value_unit_type]]]):
super(MultiValue, self).__init__()
value_data = [
(to_value(data), to_value(header_data) if header_data is not None else None)
for data, header_data in list_header_tuples
]
self._data = [(content, header) for content, header in value_data if not content.is_empty()]
def get_data(self) -> List[Tuple[SingleValue, Optional[SingleValue]]]:
return copy(self._data)
class ListValue(Value):
"""
Represents a list of values, such as multiple possible parts of speech or multiple definitions.
"""
input_type = List[value_unit_type]
def __init__(self, value_list: input_type):
super(ListValue, self).__init__()
value_data = [to_value(x) for x in value_list]
self._data = [v for v in value_data if not v.is_empty()]
def get_data(self) -> List[SingleValue]:
return copy(self._data)
class MultiListValue(Value):
"""
Represents multiple lists of values, each optionally paired with a header value. Most commonly used in cases where
a word has multiple possible parts of speech, and there is a list of values (such as definitions) associated with
each part of speech.
"""
def __init__(self, list_header_tuples: List[Tuple[ListValue.input_type, Optional[value_unit_type]]]):
super(MultiListValue, self).__init__()
value_data = [
(ListValue(list_data), to_value(header_data) if header_data is not None else None)
for list_data, header_data in list_header_tuples
]
self._data = [(content, header) for content, header in value_data if not content.is_empty()]
def get_data(self) -> List[Tuple[ListValue, Optional[SingleValue]]]:
return copy(self._data)
class LinksValue(Value):
"""
Represents a link in a dictionary to another word. Useful only in very specific cases.
"""
def __init__(self, link_data: List['LookupData']):
self._data = link_data
def get_data(self) -> List['LookupData']:
return self._data
|
"""Server-side views for items."""
from typing import Any
from flask import render_template
from inventorymgr.views.blueprint import views_blueprint
@views_blueprint.route("/items")
def items() -> Any:
"""List view for items."""
return render_template("items.html.j2")
@views_blueprint.route("/items/<item_id>/edit")
def edit_item(item_id: str) -> Any: # pylint: disable=unused-argument
"""Edit view for items."""
return render_template("item_edit.html.j2")
@views_blueprint.route("/items/new")
def item_new() -> Any:
"""View for creating new items."""
return render_template("item_new.html.j2")
@views_blueprint.route("/items/<item_id>")
def item_detail(item_id: str) -> Any: # pylint: disable="unused-argument"
"""Detail view for an item."""
return render_template("item_detail.html.j2")
|
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from ._enums import Browsers as BROWSERS
from ._custom_exceptions import DriverException
class _Browser:
def __init__(self):
self.headless = False
self.selenium_host = ''
self.fullscreen = True
def run_and_return_driver(self):
if self.selenium_host and self.selenium_host != 'localhost':
return webdriver.Remote(
command_executor=self.selenium_host,
desired_capabilities=self.capabilities)
if self.headless:
return self.run_headless_mode()
return self.run_normal_mode()
def run_normal_mode(self):
self.driver = self.driver()
if self.fullscreen:
self.driver.maximize_window()
return self.driver
def run_headless_mode(self):
raise DriverException('Headless mode for browser ' + browser_name + ' is not supported yet\n')
def get_browser(browser_name):
class _Browsers:
class Chrome(_Browser):
def __init__(self):
self.driver = webdriver.Chrome
self.capabilities = DesiredCapabilities.CHROME
def run_headless_mode(self):
options = webdriver.chrome.options.Options()
options.add_argument('--headless')
options.add_argument('--no-sandbox')
options.add_argument('--disable-dev-shm-usage')
return webdriver.Chrome(chrome_options=options)
class Edge(_Browser):
def __init__(self):
self.driver = webdriver.Edge
self.capabilities = DesiredCapabilities.EDGE
class Firefox(_Browser):
def __init__(self):
self.driver = webdriver.Firefox
self.capabilities = DesiredCapabilities.FIREFOX
def run_headless_mode(self):
options = webdriver.firefox.options.Options()
options.headless = True
return webdriver.Firefox(options=option)
class IE(_Browser):
def __init__(self):
self.driver = webdriver.Ie
self.capabilities = DesiredCapabilities.INTERNETEXPLORER
class Opera(_Browser):
def __init__(self):
self.driver = webdriver.Opera
self.capabilities = DesiredCapabilities.OPERA
class PhantomJS(_Browser):
def __init__(self):
self.driver = webdriver.PhantomJS
self.capabilities = DesiredCapabilities.PHANTOMJS
class Safari(_Browser):
def __init__(self):
self.driver = webdriver.Safari
self.capabilities = DesiredCapabilities.SAFARI
try:
browser_class_name = getattr(BROWSERS, browser_name.upper())
browser_class = getattr(_Browsers, browser_class_name)
browser_obj = browser_class()
return browser_obj
except AttributeError:
raise DriverException('Browser ' + browser_name + ' not supported\n')
|
from airflow.plugins_manager import AirflowPlugin
from idea_plugin import BigQueryToFeatherOperator
class IdeaPlugin(AirflowPlugin):
name = "idea_plugin"
operators = [BigQueryToFeatherOperator]
|
__all__ = ['sub_policies']
ops_names = [
'ShearX',
'ShearY',
'TranslateX',
'TranslateY',
'Rotate',
'AutoContrast',
'Invert',
'Equalize',
'Solarize',
'Posterize',
'Contrast',
'Color',
'Brightness',
'Sharpness',
'Cutout',
]
K = 2
sub_policies = []
def dfs(index=0, sub_policy=[], depth=0, sub_policies=[]):
if depth == K:
sub_policies += [tuple(sub_policy)]
return
for i, ops_name in enumerate(ops_names):
if i < index:
continue
dfs(i+1, sub_policy + [ops_name], depth+1, sub_policies)
dfs(index=0, sub_policy=[], depth=0, sub_policies=sub_policies)
|
import unittest
from katas.kyu_8.is_this_my_tail import correct_tail
class CorrectTailTestCase(unittest.TestCase):
def test_true(self):
self.assertTrue(correct_tail('Fox', 'x'))
def test_true_2(self):
self.assertTrue(correct_tail('Rhino', 'o'))
def test_true_3(self):
self.assertTrue(correct_tail('Meerkat', 't'))
def test_false(self):
self.assertFalse(correct_tail('Emu', 't'))
def test_false_2(self):
self.assertFalse(correct_tail('Badger', 's'))
def test_false_3(self):
self.assertFalse(correct_tail('Giraffe', 'd'))
|
import json
import logging
import utc
import pika
import isodate
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from mettle.settings import get_settings
from mettle.models import Service, Pipeline, PipelineRun, PipelineRunNack, Job, Checkin
from mettle.lock import lock_and_announce_job
from mettle.db import make_session_cls
from mettle.notify import notify_failed_run
import mettle_protocol as mp
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def on_announce_service(settings, db, data):
logger.info("Service announced: {service}".format(**data))
try:
service = db.query(Service).filter_by(name=data['service']).one()
except NoResultFound:
service = Service(
name=data['service'],
updated_by='dispatcher',
)
db.add(service)
# Merge the existing pipeline names (if any) with the incoming pipeline_names
# from this announcement. This allows for multiple workers attending specific
# pipelines while still being part of the same service.
service.pipeline_names = list(
set(service.pipeline_names) | set(data['pipeline_names'])
)
def on_pipeline_run_ack(settings, rabbit, db, data):
logger.info("Pipeline run ack {service}:{pipeline}:{run_id}".format(**data))
run = db.query(PipelineRun).filter_by(id=data['run_id']).one()
if run.ack_time is None:
run.ack_time = utc.now()
run.targets = data['targets']
run.target_parameters = data.get('target_parameters', {})
if run.is_ended(db):
if run.end_time is None:
run.end_time = utc.now()
if run.all_targets_succeeded(db):
run.succeeded = True
else:
for target in run.get_ready_targets(db):
job = run.make_job(db, target)
if job:
lock_and_announce_job(db, rabbit, job)
def on_pipeline_run_nack(settings, rabbit, db, data):
logger.info("Pipeline run nack {service}:{pipeline}:{run_id}".format(**data))
run = db.query(PipelineRun).filter_by(id=data['run_id']).one()
# create a new nack record.
if data['reannounce_time'] is None:
rtime = None
# If reannounce_time is None, then give up on this pipeline run.
run.ack_time = utc.now()
run.end_time = utc.now()
else:
rtime = isodate.parse_datetime(data['reannounce_time'])
db.add(PipelineRunNack(
pipeline_run=run,
message=data['message'],
reannounce_time=rtime,
))
def on_job_claim(settings, rabbit, db, data, corr_id):
try:
job = db.query(Job).filter_by(
id=data['job_id'],
start_time=None,
).one()
job.start_time = isodate.parse_datetime(data['start_time'])
job.expires = isodate.parse_datetime(data['expires'])
job.assigned_worker = data['worker_name']
logger.info("Job claim %s:%s:%s:%s" % (
job.pipeline_run_id,
job.target,
job.id,
job.assigned_worker,
))
db.commit()
mp.grant_job(rabbit, data['worker_name'], corr_id, True)
except (OperationalError, NoResultFound):
db.rollback()
logger.info(("Claim of job {job_id} by worker {worker_name} failed. "
"Job already claimed").format(**data))
mp.grant_job(rabbit, data['worker_name'], corr_id, False)
def on_job_end(settings, rabbit, db, data):
logger.info("Job end {service}:{pipeline}:{job_id}".format(**data))
end_time = isodate.parse_datetime(data['end_time'])
job = db.query(Job).filter_by(id=data['job_id']).one()
job.end_time = end_time
job.succeeded = data.get('succeeded') or False
run = job.pipeline_run
if job.succeeded:
# See if this job was a dependency for any other targets. If so, check
# if they're ready to be run now. If they are, kick them off.
depending_targets = [t for t, deps in run.targets.items() if job.target
in deps]
if depending_targets:
# Make sure just-completed job state is saved before we query
db.commit()
for target in depending_targets:
if run.target_is_ready(db, target):
new_job = run.make_job(db, target)
if new_job:
logger.info('Job %s chained from %s' % (new_job.id,
job.id))
lock_and_announce_job(db, rabbit, new_job)
# Force the job update to be committed/published before we start making any
# changes to the run.
db.commit()
if run.end_time is None:
if run.target_is_failed(db, job.target):
notify_failed_run(db, run)
run.end_time = end_time
elif run.is_ended(db):
if run.all_targets_succeeded(db):
run.succeeded = True
run.end_time = end_time
def main():
settings = get_settings()
rabbit_conn = pika.BlockingConnection(pika.URLParameters(settings.rabbit_url))
rabbit = rabbit_conn.channel()
mp.declare_exchanges(rabbit)
queue_name = 'mettle_dispatcher'
rabbit.queue_declare(queue=queue_name, exclusive=False,
durable=True)
rabbit.queue_bind(exchange=mp.ANNOUNCE_SERVICE_EXCHANGE,
queue=queue_name, routing_key='#')
rabbit.queue_bind(exchange=mp.ACK_PIPELINE_RUN_EXCHANGE,
queue=queue_name, routing_key='#')
rabbit.queue_bind(exchange=mp.NACK_PIPELINE_RUN_EXCHANGE,
queue=queue_name, routing_key='#')
rabbit.queue_bind(exchange=mp.CLAIM_JOB_EXCHANGE,
queue=queue_name, routing_key='#')
rabbit.queue_bind(exchange=mp.END_JOB_EXCHANGE,
queue=queue_name, routing_key='#')
rabbit.queue_bind(exchange=settings.dispatcher_ping_exchange,
queue=queue_name,
routing_key='timer')
Session = make_session_cls(settings.db_url)
for method, properties, body in rabbit.consume(queue=queue_name):
db = Session()
if method.exchange == mp.ANNOUNCE_SERVICE_EXCHANGE:
on_announce_service(settings, db, json.loads(body))
elif method.exchange == mp.ACK_PIPELINE_RUN_EXCHANGE:
on_pipeline_run_ack(settings, rabbit, db, json.loads(body))
elif method.exchange == mp.NACK_PIPELINE_RUN_EXCHANGE:
on_pipeline_run_nack(settings, rabbit, db, json.loads(body))
elif method.exchange == mp.CLAIM_JOB_EXCHANGE:
on_job_claim(settings, rabbit, db, json.loads(body),
properties.correlation_id)
elif method.exchange == mp.END_JOB_EXCHANGE:
on_job_end(settings, rabbit, db, json.loads(body))
# get messages from process timer restart queue
elif method.exchange == settings.dispatcher_ping_exchange:
db.merge(Checkin(proc_name='dispatcher', time=utc.now()))
db.commit()
rabbit.basic_ack(method.delivery_tag)
if __name__ == '__main__':
main()
|
alien_color = 'green'
if alien_color == 'green':
print("player get 5 point")
elif alien_color == 'yellow':
print("player get 5 point")
else:
print("player get 15 point")
alien_color = 'yellow'
if alien_color == 'green':
print("player get 5 point")
elif alien_color == 'yellow':
print("player get 10 point")
else:
print("player get 15 point")
alien_color = 'red'
if alien_color == 'green':
print("player get 5 point")
elif alien_color == 'yellow':
print("player get 5 point")
else:
print("player get 15 point") |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-21 13:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('goods', '0024_auto_20171221_1346'),
]
operations = [
migrations.AlterField(
model_name='rfidimagecompareaction',
name='endTime',
field=models.DateField(verbose_name='end time'),
),
migrations.AlterField(
model_name='rfidimagecompareaction',
name='startTime',
field=models.DateField(verbose_name='start time'),
),
]
|
from typing import Dict
from streamlink.exceptions import StreamError
from streamlink.stream.stream import Stream
from streamlink.stream.wrappers import StreamIOIterWrapper, StreamIOThreadWrapper
class HTTPStream(Stream):
"""
An HTTP stream using the :mod:`requests` library.
"""
__shortname__ = "http"
args: Dict
"""A dict of keyword arguments passed to :meth:`requests.Session.request`, such as method, headers, cookies, etc."""
def __init__(
self,
session_,
url: str,
buffered: bool = True,
**args
):
"""
:param streamlink.Streamlink session_: Streamlink session instance
:param url: The URL of the HTTP stream
:param buffered: Wrap stream output in an additional reader-thread
:param args: Additional keyword arguments passed to :meth:`requests.Session.request`
"""
super().__init__(session_)
self.args = dict(url=url, **args)
self.buffered = buffered
def __json__(self):
req = self.session.http.prepare_new_request(**self.args)
return dict(
type=self.shortname(),
method=req.method,
url=req.url,
headers=dict(req.headers),
body=req.body,
)
def to_url(self):
return self.url
@property
def url(self) -> str:
"""
The URL to the stream, prepared by :mod:`requests` with parameters read from :attr:`args`.
"""
return self.session.http.prepare_new_request(**self.args).url
def open(self):
reqargs = self.session.http.valid_request_args(**self.args)
reqargs.setdefault("method", "GET")
timeout = self.session.options.get("stream-timeout")
res = self.session.http.request(
stream=True,
exception=StreamError,
timeout=timeout,
**reqargs,
)
fd = StreamIOIterWrapper(res.iter_content(8192))
if self.buffered:
fd = StreamIOThreadWrapper(self.session, fd, timeout=timeout)
return fd
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import recipe_api
from recipe_engine.types import freeze
DEPS = [
'adb',
'archive',
'chromedriver',
'chromium',
'chromium_android',
'commit_position',
'depot_tools/bot_update',
'depot_tools/gclient',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/step',
]
BUILDERS = freeze({
'chromium.fyi': {
'Android ChromeDriver Tests (dbg)': {
'chromedriver_platform': 'android',
'config': 'main_builder',
'target': 'Debug',
'update_test_log': True,
'android_packages': [
'chrome_beta',
'chrome_stable',
'chromedriver_webview_shell',
'chromium',
],
'install_apks': [
'ChromeDriverWebViewShell.apk',
'ChromePublic.apk',
],
},
},
})
REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
def RunSteps(api):
mastername = api.properties['mastername']
buildername = api.properties['buildername']
builder = BUILDERS[mastername][buildername]
api.chromium_android.configure_from_properties(
builder['config'],
REPO_NAME='src',
REPO_URL=REPO_URL,
INTERNAL=False,
BUILD_CONFIG=builder['target'])
api.chromium_android.apply_config('use_devil_provision')
android_packages = builder.get('android_packages')
update_test_log = builder.get('update_test_log')
platform = builder.get('chromedriver_platform')
api.gclient.set_config('chromium')
api.gclient.apply_config('android')
api.gclient.apply_config('chromedriver')
api.bot_update.ensure_checkout()
api.chromium.ensure_goma()
api.chromium_android.clean_local_files()
api.chromium.runhooks()
api.chromium_android.run_tree_truth()
api.archive.download_and_unzip_build(
step_name='extract build',
target=api.chromium.c.BUILD_CONFIG,
build_url=None,
build_archive_url=api.properties.get('parent_build_archive_url'))
revision_cp = api.bot_update.last_returned_properties['got_revision_cp']
commit_position = api.commit_position.parse_revision(revision_cp)
api.chromium_android.common_tests_setup_steps(skip_wipe=True)
if builder['install_apks']:
for apk in builder['install_apks']:
api.chromium_android.adb_install_apk(apk)
api.chromedriver.download_prebuilts()
passed = True
try:
api.chromedriver.run_all_tests(
android_packages=android_packages,
archive_server_logs=True)
except api.step.StepFailure:
passed = False
if update_test_log:
api.chromedriver.update_test_results_log(platform, commit_position, passed)
api.chromium_android.common_tests_final_steps()
if not passed:
raise api.step.StepFailure('Test failures')
def GenTests(api):
sanitize = lambda s: ''.join(c if c.isalnum() else '_' for c in s)
yield (
api.test('%s_basic' % sanitize('Android ChromeDriver Tests (dbg)')) +
api.properties.generic(
buildername='Android ChromeDriver Tests (dbg)',
bot_id='bot_id',
mastername='chromium.fyi') +
api.properties(
parent_build_archive_url='gs://test-domain/test-archive.zip',
got_revision='4f4b02f6b7fa20a3a25682c457bbc8ad589c8a00',
got_revision_cp='refs/heads/master@{#333333}'))
yield (
api.test(
'%s_test_failure' % sanitize('Android ChromeDriver Tests (dbg)')) +
api.properties.generic(
buildername='Android ChromeDriver Tests (dbg)',
bot_id='bot_id',
mastername='chromium.fyi') +
api.properties(
parent_build_archive_url='gs://test-domain/test-archive.zip',
got_revision='4f4b02f6b7fa20a3a25682c457bbc8ad589c8a00',
got_revision_cp='refs/heads/master@{#333333}') +
api.step_data('java_tests chrome_stable.Run Tests', retcode=1))
|
import csv
import scipy.misc
from random import shuffle
import cv2
from skimage.util import random_noise
from numpy.random import uniform as random
import numpy as np
class data_handler(object):
def __init__(self, validation_split = 0.2, batch_size = 128, left_and_right_images = False, root_path = '', left_right_offset = 0.2, test_root_path = '', test_left_and_right_images = False):
# Name of file where metadata is present
filename = 'driving_log.csv'
test_filename = 'test_driving_log.csv'
self.left_and_right_images = left_and_right_images
self.left_right_offset = left_right_offset
self.metadata = []
# loading metadata
with open(filename, 'r') as f:
reader = csv.reader(f)
i = 0
for row in reader:
self.metadata.append(row)
# removing first row if it has column names
if(self.metadata[0][0]=='center'):
self.metadata.reverse()
self.metadata.pop()
self.metadata.reverse()
# shuffle the training data
shuffle(self.metadata)
self.test_metadata = []
# loading metadata
with open(test_filename, 'r') as f:
reader = csv.reader(f)
i = 0
for row in reader:
self.test_metadata.append(row)
# removing first row if it has column names
if(self.test_metadata[0][0]=='center'):
self.test_metadata.reverse()
self.test_metadata.pop()
self.test_metadata.reverse()
# splitting into training and validation set
if(validation_split<1.0):
self.metadata_train = self.metadata[0:int((1-validation_split)*len(self.metadata))]
if(not validation_split==0):
self.metadata_val = self.metadata[int((1-validation_split)*len(self.metadata)):]
else:
print("Validation split can't be 1.")
raise Exception("Validation split not valid.")
# setting batch size
self.batch_size = batch_size
# setting current training step (in the beginning we are at the 0th step)
self.step_train = 0
# setting current validation step (in the beginning we are at the 0th step)
self.step_val = 0
# setting current validation step (in the beginning we are at the 0th test step)
self.step_test = 0
# root path of images
self.root_path = root_path
# root path of test images
self.test_root_path = test_root_path
# left and right images for
self.test_left_and_right_images = test_left_and_right_images
def generate_train_batch(self):
while 1:
X_train = []
y_train = []
# start and end of current batch
start = self.step_train*self.batch_size
end = (self.step_train+1)*self.batch_size
# if number of training samples are not a multiple of batch size
if(end>=len(self.metadata_train)):
end = len(self.metadata_train)
# restart from the beginning
self.step_train = 0
shuffle(self.metadata_train)
# load images and steering angles for current batch
for j in range(start,end,1):
if(not self.metadata_train[j][0][0] == 'C'):
center_path = self.root_path+self.metadata_train[j][0]
else:
center_path = self.metadata_train[j][0]
center_steer = [float(self.metadata_train[j][3])]
# X_train.append(self.get_image(self.root_path+self.metadata_train[j][0]))
# y_train.append([float(self.metadata_train[j][3])])
center_image, center_steer[0] = self.get_image_and_steering(center_path,center_steer[0])
X_train.append(center_image)
y_train.append(center_steer)
if(self.left_and_right_images):
if(self.metadata_train[j][1][0] == ' ' and not self.metadata_train[j][1][1]=='C'):
left_path = self.root_path+self.metadata_train[j][1][1:]
elif(self.metadata_train[j][1][0] == ' ' and self.metadata_train[j][1][1]=='C'):
left_path = self.metadata_train[j][1][1:]
elif(self.metadata_train[j][1][0] == 'C'):
left_path = self.metadata_train[j][1]
else:
left_path = self.root_path + self.metadata_train[j][1]
left_steer = [float(self.metadata_train[j][3])+self.left_right_offset]
if(self.metadata_train[j][2][0] == ' ' and not self.metadata_train[j][2][1]=='C'):
right_path = self.root_path+self.metadata_train[j][2][1:]
elif(self.metadata_train[j][2][0] == ' ' and self.metadata_train[j][2][1]=='C'):
right_path = self.metadata_train[j][2][1:]
elif(self.metadata_train[j][2][0] == 'C'):
right_path = self.metadata_train[j][2]
else:
right_path = self.root_path + self.metadata_train[j][2]
right_steer = [float(self.metadata_train[j][3])-self.left_right_offset]
left_image, left_steer[0] = self.get_image_and_steering(left_path, left_steer[0])
right_image, right_steer[0] = self.get_image_and_steering(right_path, right_steer[0])
X_train.append(left_image)
y_train.append(left_steer)
X_train.append(right_image)
y_train.append(right_steer)
# X_train.append(self.get_image(self.root_path+self.metadata_train[j][1][1:]))
# y_train.append([float(self.metadata_train[j][3])+self.left_right_offset])
# X_train.append(self.get_image(self.root_path+self.metadata_train[j][2][1:]))
# y_train.append([float(self.metadata_train[j][3])-self.left_right_offset])
# incrementing step
self.step_train = self.step_train + 1
yield (X_train, y_train)
def generate_validation_batch(self):
while 1:
X_val = []
y_val = []
# start and end of current batch
start = self.step_val*self.batch_size
end = (self.step_val+1)*self.batch_size
# if number of validation samples are not a multiple of batch size
if(end>=len(self.metadata_val)):
end = len(self.metadata_val)
# restart from the beginning
self.step_val = 0
shuffle(self.metadata_val)
# laod images and steering angles for current batch
for j in range(start,end):
if(not self.metadata_val[j][0][0] == 'C'):
center_path = self.root_path+self.metadata_val[j][0]
else:
center_path = self.metadata_val[j][0]
center_steer = [float(self.metadata_val[j][3])]
# X_val.append(self.get_image(self.root_path+self.metadata_val[j][0]))
# y_val.append([float(self.metadata_val[j][3])])
center_image, center_steer[0] = self.get_image_and_steering(center_path, center_steer[0])
X_val.append(center_image)
y_val.append(center_steer)
if(self.left_and_right_images):
if(self.metadata_val[j][1][0]==' ' and not self.metadata_val[j][1][1] == 'C'):
path_left = self.root_path + self.metadata_val[j][1][1:]
elif(self.metadata_val[j][1][0]==' ' and self.metadata_val[j][1][1] == 'C'):
path_left = self.metadata_val[j][1][1:]
elif(self.metadata_val[j][1][0] == 'C'):
path_left = self.metadata_val[j][1]
else:
path_left = self.root_path + self.metadata_val[j][1]
steer_left = [float(self.metadata_val[j][3])+self.left_right_offset]
if(self.metadata_val[j][2][0] == ' ' and not self.metadata_val[j][2][1] == 'C'):
path_right = self.root_path+self.metadata_val[j][2][1:]
elif(self.metadata_val[j][2][0] == ' ' and self.metadata_val[j][2][1] == 'C'):
path_right = self.metadata_val[j][2][1:]
elif(self.metadata_val[j][2][0] == 'C'):
path_right = self.metadata_val[j][2]
else:
path_right = self.root_path+self.metadata_val[j][2]
steer_right = [float(self.metadata_val[j][3])-self.left_right_offset]
image_left, steer_left[0] = self.get_image_and_steering(path_left,steer_left[0])
image_right, steer_right[0] = self.get_image_and_steering(path_right, steer_right[0])
X_val.append(image_left)
y_val.append(steer_left)
X_val.append(image_right)
y_val.append(steer_right)
#
# X_val.append(self.get_image(self.root_path+self.metadata_train[j][1][1:]))
# y_val.append([float(self.metadata_train[j][3])+self.left_right_offset])
# X_val.append(self.get_image(self.root_path+self.metadata_train[j][2][1:]))
# y_val.append([float(self.metadata_train[j][3])-self.left_right_offset])
# incrementing step
self.step_val = self.step_val + 1
yield (X_val, y_val)
def generate_test_batch(self):
while 1:
X_test = []
y_test = []
start = self.step_test*self.batch_size
end = (self.step_test+1)*self.batch_size
if(end >= len(self.test_metadata)):
end = len(self.test_metadata)
self.step_test = 0
shuffle(self.test_metadata)
for j in range(start,end):
center_path = self.root_path +self.test_metadata[j][0]
center_steer = [float(self.test_metadata[j][3])]
# X_val.append(self.get_image(self.root_path+self.metadata_val[j][0]))
# y_val.append([float(self.metadata_val[j][3])])
center_image, center_steer[0] = self.get_image_and_steering(center_path, center_steer[0])
X_test.append(center_image)
y_test.append(center_steer)
if(self.test_left_and_right_images):
path_left = self.test_root_path + self.test_metadata[j][1][1:]
steer_left = [float(self.test_metadata[j][3])+self.left_right_offset]
path_right = self.test_root_path + self.test_metadata[j][2][1:]
steer_right = [float(self.test_metadata[j][3])-self.left_right_offset]
image_left, steer_left[0] = self.get_image_and_steering(path_left,steer_left[0])
image_right, steer_right[0] = self.get_image_and_steering(path_right, steer_right[0])
X_test.append(image_left)
y_test.append(steer_left)
X_test.append(image_right)
y_test.append(steer_right)
self.step_test = self.step_test + 1
yield X_test, y_test, int(len(self.test_metadata)/self.batch_size)
def set_root_image_path(self,path):
self.root_path = path
def move_to_start_train(self):
self.step_train = 0
def move_to_start_val(self):
self.step_val = 0
def num_train_batches(self):
return int(len(self.metadata_train) / self.batch_size)
def num_val_batches(self):
return int(len(self.metadata_val) / self.batch_size)
def add_noise(self,x):
return random_noise(x, mode='gaussian')
def get_image_and_steering(self,path,steering):
image = scipy.misc.imresize(scipy.misc.imread(path)[25:135], [66, 200])
if(self.coin_flip()):
image = self.random_saturation_change(image)
if(self.coin_flip()):
image = self.random_lightness_change(image)
if(self.coin_flip()):
image = self.invert_image(image)
image = self.random_shadow(image)
image, steering = self.random_translation(image,steering)
if(self.coin_flip()):
image, steering = self.horizontal_flip_image(image,steering)
image = cv2.cvtColor(image, cv2.COLOR_RGB2YUV)
return (image/255.0)-0.5, steering
def coin_flip(self):
return random()<0.5
def make_yuv_grey_scale(self,x):
x = np.array(x)
x[:,:,1] = 0
x[:,:,2] = 0
return x
def random_gamma_correction_rgb(self,x):
# Partially taken from http://www.pyimagesearch.com/2015/10/05/opencv-gamma-correction/
# build a lookup table mapping the pixel values [0, 255] to
# their adjusted gamma values
gamma = 0.4 + random() * 1.2
invGamma = 1.0 / gamma
table = np.array([((i / 255.0) ** invGamma) * 255 for i in np.arange(0, 256)]).astype("uint8")
# apply gamma correction using the lookup table
return cv2.LUT(x, table)
def random_brightness_change_rgb(self,x):
brightness_change = 0.4 + random()*1.2
x = np.array(x)
x = cv2.cvtColor(x,cv2.COLOR_RGB2HSV)
x[:,:,2] = x[:,:,2]*brightness_change
return cv2.cvtColor(x,cv2.COLOR_HSV2RGB)
def random_saturation_change(self,x):
saturation_change = 1.5*random()
x = np.array(x)
x = cv2.cvtColor(x,cv2.COLOR_RGB2HSV)
x[:,:,1] = x[:,:,1]*saturation_change
return cv2.cvtColor(x,cv2.COLOR_HSV2RGB)
def invert_image(self,x):
return -x+255
def random_lightness_change(self,x):
lightness_change = 0.2 + 1.4*random()
x = np.array(x)
x = cv2.cvtColor(x,cv2.COLOR_RGB2HLS)
x[:,:,1] = x[:,:,1]*lightness_change
return cv2.cvtColor(x,cv2.COLOR_HLS2RGB)
def random_translation(self,x,steer):
x = np.array(x)
rows,cols,rgb = x.shape
rand_for_x = random()
translate_y = -10 + random()*20
translate_x = -30 + rand_for_x*60
M = np.float32([[1,0,translate_x],[0,1,translate_y]])
return cv2.warpAffine(x,M,(cols,rows)), (steer+(rand_for_x-0.5)*0.4)
# def random_translation(self,x,steer):
# x = np.array(x)wwwwwwwwwwwwwwwwwwwwww
# rows,cols,rgb = x.shape
#
# rand_for_x = random()
# rand_for_y = random()
#
# translate_y = -15 + rand_for_y*30
# translate_x = -30 + rand_for_x*60
#
# M = np.float32([[1,0,translate_x],[0,1,translate_y]])
#
# return cv2.warpAffine(x,M,(cols,rows)), ((steer+(rand_for_x-0.5)*0.27))
def random_rotation_image(self,x):
x = np.array(x)
rows,cols,rgb = x.shape
rand_angle = 3*(random()-0.5)
M = cv2.getRotationMatrix2D((cols/2,rows/2),rand_angle,1)
x = cv2.warpAffine(x,M,(cols,rows))
return x
def horizontal_flip_image(self,x,steer):
steer = -steer
x = np.array(x)
return cv2.flip(x,1), steer
def random_shadow(self,x):
x = cv2.cvtColor(x,cv2.COLOR_RGB2HSV)
max_x = 200
max_y = 66
if(self.coin_flip()):
i_1 = (0,0)
i_2 = (0,max_y)
i_3 = (random()*max_x,max_y)
i_4 = (random()*max_x,0)
else:
i_1 = (random()*max_x,0)
i_2 = (random()*max_x,max_y)
i_3 = (max_x,max_y)
i_4 = (max_x,0)
vertices = np.array([[i_1,i_2,i_3,i_4]], dtype = np.int32)
x = self.region_of_interest(x,vertices)
x = cv2.cvtColor(x,cv2.COLOR_HSV2RGB)
return x
def random_blur(self,x):
kernel_size = 1+int(random()*9)
if(kernel_size%2 == 0):
kernel_size = kernel_size + 1
x = cv2.GaussianBlur(x,(kernel_size,kernel_size),0)
return x
def region_of_interest(self,x, vertices):
random_brightness = 0.13
mask = np.zeros_like(x)
ignore_mask_color = [0,0,255]
cv2.fillPoly(mask, vertices, ignore_mask_color)
indices = mask[:,:,2] == 255
x[:,:,2][indices] = x[:,:,2][indices]*random_brightness
return x
def cut_top(self,x):
x = cv2.cvtColor(x,cv2.COLOR_RGB2HSV)
vertices = np.array([[(0,0),(200,0),(200,33),(0,33)]],np.int32)
random_brightness = 0
mask = np.zeros_like(x)
ignore_mask_color = [0,0,255]
cv2.fillPoly(mask, vertices, ignore_mask_color)
indices = mask[:,:,2] == 255
x[:,:,2][indices] = x[:,:,2][indices]*random_brightness
x = cv2.cvtColor(x,cv2.COLOR_HSV2RGB)
return x
|
from django.conf import settings
from social_core.backends.azuread_tenant import AzureADTenantOAuth2
from social_core.backends.google import GoogleOAuth2
def social_uid(backend, details, response, *args, **kwargs):
if settings.AZUREAD_TENANT_OAUTH2_ENABLED and isinstance(backend, AzureADTenantOAuth2):
"""Return user details from Azure AD account"""
fullname, first_name, last_name, upn = (
response.get('name', ''),
response.get('given_name', ''),
response.get('family_name', ''),
response.get('upn'),
)
uid = backend.get_user_id(details, response)
return {'username': upn,
'email': upn,
'fullname': fullname,
'first_name': first_name,
'last_name': last_name,
'uid': uid}
elif settings.GOOGLE_OAUTH_ENABLED and isinstance(backend, GoogleOAuth2):
"""Return user details from Google account"""
if 'sub' in response:
google_uid = response['sub']
elif 'email' in response:
google_uid = response['email']
else:
google_uid = response['id']
fullname, first_name, last_name, email = (
response.get('fullname', ''),
response.get('first_name', ''),
response.get('last_name', ''),
response.get('email'),
)
return {'username': email,
'email': email,
'fullname': fullname,
'first_name': first_name,
'last_name': last_name,
'uid': google_uid}
else:
uid = backend.get_user_id(details, response)
# Used for most backends
if uid:
return {'uid': uid}
# Until OKTA PR in social-core is merged
# This modified way needs to work
else:
return {'uid': response.get('preferred_username')}
def modify_permissions(backend, uid, user=None, social=None, *args, **kwargs):
if kwargs.get('is_new'):
user.is_staff = False
|
from argparse import Namespace
from datetime import datetime
from os import listdir
from os.path import isfile, join
from gras.base_miner import BaseMiner
from gras.pipermail.downloader import Downloader
BUGS_URL = "bugs.python.org"
class CPythonMiner(BaseMiner):
def __init__(self, args, url, path):
try:
super().__init__(args=args)
except AttributeError:
pass
self.url = url
self.path = path
self.files = [f for f in listdir(self.path) if isfile(join(self.path, f))]
self.files.sort(key=lambda x: datetime.strptime(x.split('.')[0], "%Y-%B"))
self.process()
def load_from_file(self, file):
pass
def dump_to_file(self, path):
pass
def download_files(self):
downloader = Downloader(url=self.url, path=self.path)
downloader.process()
@staticmethod
def _parse(path):
issues = {}
with open(path, "r") as fp:
obj = {}
toggle = False
message = []
for line in fp.readlines():
if not toggle:
if BUGS_URL in line and "http://" not in line:
continue
elif "http://" + BUGS_URL in line:
value = line[line.find("<") + 1:line.find(">")]
obj["number"] = int(value.split("issue")[-1])
issues[value.strip()] = obj.copy()
obj.clear()
message.clear()
elif "New submission from" in line:
value = line.split("New submission from")[1]
if '<' in line:
obj["author_email"] = value[value.find("<") + 1:value.find(">")].strip()
else:
obj["author_email"] = None
toggle = True
elif ":" in line:
temp = line.split(":", maxsplit=1)
key, value = temp[0].strip().lower(), temp[1].strip()
if key == "from":
key = "author"
value = value[value.find("(") + 1:value.find(")")].strip()
elif key == "date":
try:
value = datetime.strptime(value, "%a, %d %b %Y %H:%M:%S %z")
except ValueError:
value = datetime.strptime(value, "%a, %d %b %Y %H:%M:%S")
elif key == "subject":
value = value.split("[New-bugs-announce]")[1].strip()
elif key in ["nosy", "files", "components", "versions", "keywords"]:
value = value.split(",")
elif key in ["severity", "priority", "status", "title", "type", "assignee", "message-id"]:
value = value.strip()
else:
print(key)
continue
obj[key] = value
else:
continue
else:
if "----------" in line:
obj["message"] = "\n".join(message)
toggle = False
else:
message.append(line.strip())
return issues
def process(self):
from pprint import pprint
for f in self.files:
issues = self._parse(join(self.path, f))
for iss in issues:
pprint(issues[iss])
break
if __name__ == '__main__':
CPythonMiner(args=Namespace(), url=None, path="/home/mahen/PycharmProjects/GRAS/custom_miners/cpython/data")
|
import os
import csv
import json
import pickle
import logging
from random import choice, randint, shuffle
from django.core.exceptions import ObjectDoesNotExist
import python_football
from settings.base import SITE_ROOT
from .models import Playbook, City, Nickname, Team
from people import names
from people.models import Coach, Player
from teams.models import get_draft_position_order
CSV_SOURCE_DIR = os.path.join(SITE_ROOT, 'teams', 'csv_source_files')
## Initialization Functions
def create_playbook():
playbook = Playbook(name='Basic',
plays=json.dumps(pickle.dumps(python_football.new_playbook())))
playbook.save()
def initialize_cities():
cities = []
with open(os.path.join(CSV_SOURCE_DIR, 'metroareas.csv'), 'r') as cities_file:
cities_reader = csv.reader(cities_file,delimiter=',')
for city in cities_reader:
cities.append(City(name=city[0],
state = city[1],
pro = bool(int(city[2])),
semipro = bool(int(city[3])),
amateur = bool(int(city[4])),
region = city[5],
division = city[6],
)
)
City.objects.bulk_create(cities)
def initialize_nicknames():
nicknames = []
with open(os.path.join(CSV_SOURCE_DIR, 'nicknames.csv'), 'r') as nicknames_file:
nickname_reader = csv.reader(nicknames_file,delimiter=',')
for nickname in nickname_reader:
nicknames.append(Nickname(name=nickname[0],
pro = bool(int(nickname[1])),
semipro = bool(int(nickname[2]))
)
)
Nickname.objects.bulk_create(nicknames)
# TODO investigate better way of testing presence of data
def initialize_team_source_data():
try:
Playbook.objects.get(id=1)
except ObjectDoesNotExist:
create_playbook()
try:
City.objects.get(id=1)
except ObjectDoesNotExist:
initialize_cities()
try:
Nickname.objects.get(id=1)
except ObjectDoesNotExist:
initialize_nicknames()
## Universe Creation Functions
def determine_number_pro_teams(universe):
position_counts=[]
for position in ['qb', 'rb', 'wr', 'og', 'c', 'ot', 'dt', 'de', 'lb', 'cb', 's', 'k', 'p']:
position_counts.append(Player.objects.filter(universe=universe,
position=position.upper(),
age__gte=23,
ratings__gte=70).count())
return sum(position_counts) / len(position_counts)
def create_initial_universe_teams(universe, level):
logger = logging.getLogger('django.request')
number_teams = determine_number_pro_teams(universe)
cities = []
nicknames = []
if level == 'any':
while len(cities) < number_teams:
cities.extend(City.objects.all())
while len(nicknames) < number_teams:
nicknames.extend(Nickname.objects.all())
elif level in ['pro', 'semipro', 'amateur']:
level_filter = {}
level_filter[level] = True
while len(cities) < number_teams:
cities.extend(City.objects.filter(**level_filter))
while len(nicknames) < number_teams:
nicknames.extend(Nickname.objects.filter(**level_filter))
else:
return HttpResponse("Invalid level for team creation.")
shuffle(cities)
shuffle(nicknames)
coaches = [Coach(universe=universe,
first_name=names.first_name(),
last_name=names.last_name(),
skill=randint(60,90),
play_probabilities = json.dumps({}),
fg_dist_probabilities = json.dumps({})
) for x in xrange(int(number_teams))]
for coach in coaches:
coach.save()
teams = [Team(universe=universe,
city=cities.pop(),
nickname=nicknames.pop(),
human_control=False,
home_field_advantage=randint(1,3),
draft_position_order = get_draft_position_order(),
coach = coaches.pop(),
playbook = Playbook.objects.get(id=1)) for x in xrange(int(number_teams))]
Team.objects.bulk_create(teams)
logger.info('{0} teams created in universe {1}'.format(number_teams, universe.name)) |
from functools import reduce
from itertools import permutations
from typing import Dict
from typing import Optional
from typing import Tuple
import torch
from torch_complex.tensor import ComplexTensor
from typeguard import check_argument_types
from espnet2.enh.decoder.abs_decoder import AbsDecoder
from espnet2.enh.encoder.abs_encoder import AbsEncoder
from espnet2.enh.encoder.conv_encoder import ConvEncoder
from espnet2.enh.separator.abs_separator import AbsSeparator
from espnet2.torch_utils.device_funcs import force_gatherable
from espnet2.train.abs_espnet_model import AbsESPnetModel
import pdb
ALL_LOSS_TYPES = (
# mse_loss(predicted_mask, target_label)
"mask_mse",
# mse_loss(enhanced_magnitude_spectrum, target_magnitude_spectrum)
"magnitude",
# mse_loss(enhanced_complex_spectrum, target_complex_spectrum)
"spectrum",
# log_mse_loss(enhanced_complex_spectrum, target_complex_spectrum)
"spectrum_log",
# si_snr(enhanced_waveform, target_waveform)
"si_snr",
)
class ESPnetEnhancementTIModel(AbsESPnetModel):
"""Speech enhancement or separation Frontend model"""
def __init__(
self,
enh_model: Optional[AbsEnhancement],
stft_consistency: bool = False,
component_loss: bool = False
):
assert check_argument_types()
super().__init__()
self.enh_model = enh_model
self.num_spk = enh_model.num_spk
self.num_noise_type = getattr(self.enh_model, "num_noise_type", 1)
self.component_loss = component_loss
# get mask type for TF-domain models (only used when loss_type="mask_*")
self.mask_type = getattr(self.enh_model, "mask_type", None)
# get loss type for model training
self.loss_type = getattr(self.enh_model, "loss_type", None)
# whether to compute the TF-domain loss while enforcing STFT consistency
if stft_consistency and loss_type in ["mask_mse", "si_snr"]:
raise ValueError(
f"stft_consistency will not work when '{loss_type}' loss is used"
)
assert self.loss_type in ALL_LOSS_TYPES, self.loss_type
# for multi-channel signal
self.ref_channel = getattr(self.separator, "ref_channel", -1)
def load(self, *custom_pretrain_paths):
if custom_pretrain_paths:
self.enh_model.load(*custom_pretrain_paths)
@staticmethod
def _create_mask_label(mix_spec, ref_spec, mask_type="IAM"):
"""Create mask label.
Args:
mix_spec: ComplexTensor(B, T, F)
ref_spec: List[ComplexTensor(B, T, F), ...]
mask_type: str
Returns:
labels: List[Tensor(B, T, F), ...] or List[ComplexTensor(B, T, F), ...]
"""
assert mask_type in [
"IBM",
"IRM",
"IAM",
"PSM",
"NPSM",
"PSM^2",
], f"mask type {mask_type} not supported"
eps = 10e-8
mask_label = []
for r in ref_spec:
mask = None
if mask_type == "IBM":
flags = [abs(r) >= abs(n) for n in ref_spec]
mask = reduce(lambda x, y: x * y, flags)
mask = mask.int()
elif mask_type == "IRM":
# TODO(Wangyou): need to fix this,
# as noise referecens are provided separately
mask = abs(r) / (sum(([abs(n) for n in ref_spec])) + eps)
elif mask_type == "IAM":
mask = abs(r) / (abs(mix_spec) + eps)
mask = mask.clamp(min=0, max=1)
elif mask_type == "PSM" or mask_type == "NPSM":
phase_r = r / (abs(r) + eps)
phase_mix = mix_spec / (abs(mix_spec) + eps)
# cos(a - b) = cos(a)*cos(b) + sin(a)*sin(b)
cos_theta = (
phase_r.real * phase_mix.real + phase_r.imag * phase_mix.imag
)
mask = (abs(r) / (abs(mix_spec) + eps)) * cos_theta
mask = (
mask.clamp(min=0, max=1)
if mask_type == "NPSM"
else mask.clamp(min=-1, max=1)
)
elif mask_type == "PSM^2":
# This is for training beamforming masks
phase_r = r / (abs(r) + eps)
phase_mix = mix_spec / (abs(mix_spec) + eps)
# cos(a - b) = cos(a)*cos(b) + sin(a)*sin(b)
cos_theta = (
phase_r.real * phase_mix.real + phase_r.imag * phase_mix.imag
)
mask = (abs(r).pow(2) / (abs(mix_spec).pow(2) + eps)) * cos_theta
mask = mask.clamp(min=-1, max=1)
assert mask is not None, f"mask type {mask_type} not supported"
mask_label.append(mask)
return mask_label
def forward(
self,
speech_mix: torch.Tensor,
text_ref1: torch.Tensor,
speech_mix_lengths: torch.Tensor = None,
text_ref1_lengths: torch.Tensor = None,
mode = -1,
**kwargs,
) -> Tuple[torch.Tensor, Dict[str, torch.Tensor], torch.Tensor]:
"""Frontend + Encoder + Decoder + Calc loss
Args:
speech_mix: (Batch, samples) or (Batch, samples, channels)
speech_ref: (Batch, num_speaker, samples)
or (Batch, num_speaker, samples, channels)
speech_mix_lengths: (Batch,), default None for chunk interator,
because the chunk-iterator does not have the
speech_lengths returned. see in
espnet2/iterators/chunk_iter_factory.py
"""
# pdb.set_trace()
# clean speech signal of each speaker
speech_ref = [
kwargs["speech_ref{}".format(spk + 1)] for spk in range(self.num_spk)
]
# (Batch, num_speaker, samples) or (Batch, num_speaker, samples, channels)
speech_ref = torch.stack(speech_ref, dim=1)
if "noise_ref1" in kwargs:
# noise signal (optional, required when using
# frontend models with beamformering)
noise_ref = [
kwargs["noise_ref{}".format(n + 1)] for n in range(self.num_noise_type)
]
# (Batch, num_noise_type, samples) or
# (Batch, num_noise_type, samples, channels)
noise_ref = torch.stack(noise_ref, dim=1)
else:
noise_ref = None
if mode != -1:
gan_ref = kwargs["gan_ref"]
else:
gan_ref = None
# dereverberated (noisy) signal
# (optional, only used for frontend models with WPE)
if "dereverb_ref1" in kwargs:
# noise signal (optional, required when using
# frontend models with beamformering)
dereverb_speech_ref = [
kwargs["dereverb_ref{}".format(n + 1)]
for n in range(self.num_spk)
if "dereverb_ref{}".format(n + 1) in kwargs
]
assert len(dereverb_speech_ref) in (1, self.num_spk), len(
dereverb_speech_ref
)
# (Batch, N, samples) or (Batch, N, samples, channels)
dereverb_speech_ref = torch.stack(dereverb_speech_ref, dim=1)
else:
dereverb_speech_ref = None
batch_size = speech_mix.shape[0]
speech_lengths = (
speech_mix_lengths
if speech_mix_lengths is not None
else torch.ones(batch_size).int() * speech_mix.shape[1]
)
assert speech_lengths.dim() == 1, speech_lengths.shape
# Check that batch_size is unified
assert speech_mix.shape[0] == speech_ref.shape[0] == speech_lengths.shape[0], (
speech_mix.shape,
speech_ref.shape,
speech_lengths.shape,
)
# for data-parallel
speech_ref = speech_ref[:, :, : speech_lengths.max()]
speech_mix = speech_mix[:, : speech_lengths.max()]
# speech_mix, speech_lengths = self.ti_frontend(speech_mix, text_ref_1, speech_lengths, text_lengths)
loss, speech_pre, mask_pre, out_lengths, perm, si_snr_loss, disc_loss, stats_ = self._compute_loss(
speech_mix,
text_ref1,
speech_lengths,
text_ref1_lengths,
speech_ref,
dereverb_speech_ref=dereverb_speech_ref,
noise_ref=noise_ref,
gan_ref=gan_ref,
mode=mode
)
# pdb.set_trace()
# add stats for logging
if self.loss_type != "si_snr":
if self.training:
speech_pre = [
self.enh_model.stft.inverse(ps, speech_lengths)[0]
for ps in speech_pre
]
speech_ref = torch.unbind(speech_ref, dim=1)
if speech_ref[0].dim() == 3:
# For si_snr loss, only select one channel as the reference
speech_ref = [sr[..., self.ref_channel] for sr in speech_ref]
# compute si-snr loss
si_snr_loss, perm = self._permutation_loss(
speech_ref, speech_pre, self.si_snr_loss, perm=perm
)
si_snr = -si_snr_loss.detach()
# compute si_snr(clean_mag*mix_phase, clean_mag*clean_phase)
# List[ComplexTensor(Batch, T, F)] or List[ComplexTensor(Batch, T, C, F)]
spectrum_ref = [
ComplexTensor(*torch.unbind(self.enh_model.stft(sr)[0], dim=-1))
for sr in speech_ref
]
spectrum_mix = self.enh_model.stft(speech_mix)[0]
spectrum_mix = ComplexTensor(spectrum_mix[..., 0], spectrum_mix[..., 1])
print('type of spectrum_ref: {}, type of spectrum_mix {}'.format(type(spectrum_ref),type(spectrum_mix)))
magnitude_mix = [abs(ps) for ps in spectrum_mix]
phase_mix = [ps/abs(ps + 1e-15) for ps in spectrum_mix]
if spectrum_ref[0].dim() > magnitude_mix[0].dim():
# only select one channel as the reference
magnitude_ref = [
abs(sr[..., self.ref_channel, :]) for sr in spectrum_ref
]
else:
magnitude_ref = [abs(sr) for sr in spectrum_ref]
assert len(magnitude_ref) == len(phase_mix), 'len(magnitude_ref) != len(phase_mix) {} != {}'.format(len(magnitude_ref), len(phase_mix))
spectrum_h = [magnitude_ref[i] * phase_mix[i] for i in range(len(magnitude_ref))]
speech_h = [
self.enh_model.stft.inverse(ps, speech_lengths)[0]
for ps in spectrum_h
]
si_snr_loss1, perm = self._permutation_loss(
speech_ref, speech_h, self.si_snr_loss, perm=perm
)
si_snr1 = -si_snr_loss1.detach()
else:
speech_pre = [
self.enh_model.stft.inverse(ps, speech_lengths)[0]
for ps in speech_pre
]
speech_ref = torch.unbind(speech_ref, dim=1)
if speech_ref[0].dim() == 3:
# For si_snr loss, only select one channel as the reference
speech_ref = [sr[..., self.ref_channel] for sr in speech_ref]
# compute si-snr loss
si_snr_loss, perm = self._permutation_loss(
speech_ref, speech_pre, self.si_snr_loss, perm=perm
)
si_snr = -si_snr_loss.detach()
si_snr1 = None
stats = dict(
si_snr=si_snr,
si_snr1=si_snr1,
loss=loss.detach(),
)
else:
stats = dict(si_snr=-si_snr_loss.detach(), loss=loss.detach(), disc_loss=disc_loss.detach())
stats.update(stats_)
# force_gatherable: to-device and to-tensor if scalar for DataParallel
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
def _compute_loss(
self,
speech_mix,
text,
speech_lengths,
text_lengths,
speech_ref,
dereverb_speech_ref=None,
noise_ref=None,
gan_ref=None,
cal_loss=True,
mode=-1
):
# mode: 0:G, 1:D, -1:normal
# pdb.set_trace()
"""Compute loss according to self.loss_type.
Args:
speech_mix: (Batch, samples) or (Batch, samples, channels)
speech_lengths: (Batch,), default None for chunk interator,
because the chunk-iterator does not have the
speech_lengths returned. see in
espnet2/iterators/chunk_iter_factory.py
speech_ref: (Batch, num_speaker, samples)
or (Batch, num_speaker, samples, channels)
dereverb_speech_ref: (Batch, num_speaker, samples)
or (Batch, num_speaker, samples, channels)
noise_ref: (Batch, num_speaker, samples)
or (Batch, num_speaker, samples, channels)
cal_loss: whether to calculate enh loss, defualt is True
Returns:
loss: (torch.Tensor) speech enhancement loss
speech_pre: (List[torch.Tensor] or List[ComplexTensor])
enhanced speech or spectrum(s)
mask_pre: (OrderedDict) estimated masks or None
output_lengths: (Batch,)
perm: () best permutation
"""
stats = dict()
if self.component_loss:
assert False
assert self.num_spk == 1, self.num_spk
speech_hat = self.enh_model.forward_rawwav(speech_ref[:, 0], text, speech_lengths, text_lengths)[0]
loss_speech, perm_sp = self._permutation_loss(
speech_ref.unbind(dim=1), speech_hat, self.si_snr_loss_zeromean
)
stats["component_speech"] = loss_speech.detach()
if noise_ref is not None:
noise_hat = self.enh_model.forward_rawwav(noise_ref[:, 0], text, speech_lengths, text_lengths)[0]
loss_noise_distortion, perm_n = self._permutation_loss(
noise_ref.unbind(dim=1), noise_hat, self.si_snr_loss_zeromean
)
loss_noise_scale = (
torch.stack([n.pow(2).sum() for n in noise_hat], dim=0).sum()
/ noise_hat[0].shape[0]
)
loss_noise = loss_noise_distortion + loss_noise_scale
stats["component_noise"] = loss_noise.detach()
else:
loss_noise = 0
else:
loss_speech, loss_noise = 0, 0
disc_loss = torch.tensor(0.)
if self.loss_type != "si_snr":
spectrum_mix = self.enh_model.stft(speech_mix)[0]
spectrum_mix = ComplexTensor(spectrum_mix[..., 0], spectrum_mix[..., 1])
# predict separated speech and masks
if self.stft_consistency:
# pseudo STFT -> time-domain -> STFT (compute loss)
speech_pre, speech_lengths, mask_pre = self.enh_model.forward_rawwav(
speech_mix, speech_lengths
)
if speech_pre is not None:
spectrum_pre = []
for sp in speech_pre:
spec_pre, tf_length = self.enh_model.stft(sp, speech_lengths)
spectrum_pre.append(spec_pre)
else:
spectrum_pre = None
_, tf_length = self.enh_model.stft(speech_mix, speech_lengths)
else:
# compute loss on pseudo STFT directly
spectrum_pre, tf_length, mask_pre = self.enh_model(
speech_mix, speech_lengths
)
if spectrum_pre is not None and not isinstance(
spectrum_pre[0], ComplexTensor
):
spectrum_pre = [
ComplexTensor(*torch.unbind(sp, dim=-1)) for sp in spectrum_pre
]
if not cal_loss:
loss, perm = None, None
return loss, spectrum_pre, mask_pre, tf_length, perm, stats
# prepare reference speech and reference spectrum
speech_ref = torch.unbind(speech_ref, dim=1)
# List[ComplexTensor(Batch, T, F)] or List[ComplexTensor(Batch, T, C, F)]
spectrum_ref = [
ComplexTensor(*torch.unbind(self.enh_model.stft(sr)[0], dim=-1))
for sr in speech_ref
]
# compute TF masking loss
if self.loss_type == "magnitude":
# compute loss on magnitude spectrum
assert spectrum_pre is not None
magnitude_pre = [abs(ps + 1e-15) for ps in spectrum_pre]
if spectrum_ref[0].dim() > magnitude_pre[0].dim():
# only select one channel as the reference
magnitude_ref = [
abs(sr[..., self.ref_channel, :]) for sr in spectrum_ref
]
else:
magnitude_ref = [abs(sr) for sr in spectrum_ref]
tf_loss, perm = self._permutation_loss(
magnitude_ref, magnitude_pre, self.tf_mse_loss
)
elif self.loss_type.startswith("spectrum"):
# compute loss on complex spectrum
if self.loss_type == "spectrum":
loss_func = self.tf_mse_loss
elif self.loss_type == "spectrum_log":
loss_func = self.tf_log_mse_loss
else:
raise ValueError("Unsupported loss type: %s" % self.loss_type)
assert spectrum_pre is not None
if spectrum_ref[0].dim() > spectrum_pre[0].dim():
# only select one channel as the reference
spectrum_ref = [sr[..., self.ref_channel, :] for sr in spectrum_ref]
tf_loss, perm = self._permutation_loss(
spectrum_ref, spectrum_pre, loss_func
)
elif self.loss_type.startswith("mask"):
if self.loss_type == "mask_mse":
loss_func = self.tf_mse_loss
else:
raise ValueError("Unsupported loss type: %s" % self.loss_type)
assert mask_pre is not None
mask_pre_ = [
mask_pre["spk{}".format(spk + 1)] for spk in range(self.num_spk)
]
# prepare ideal masks
mask_ref = self._create_mask_label(
spectrum_mix, spectrum_ref, mask_type=self.mask_type
)
# compute TF masking loss
tf_loss, perm = self._permutation_loss(mask_ref, mask_pre_, loss_func)
if "dereverb1" in mask_pre:
if dereverb_speech_ref is None:
raise ValueError(
"No dereverberated reference for training!\n"
'Please specify "--use_dereverb_ref true" in run.sh'
)
mask_wpe_pre = [
mask_pre["dereverb{}".format(spk + 1)]
for spk in range(self.num_spk)
if "dereverb{}".format(spk + 1) in mask_pre
]
assert len(mask_wpe_pre) == dereverb_speech_ref.size(1), (
len(mask_wpe_pre),
dereverb_speech_ref.size(1),
)
dereverb_speech_ref = torch.unbind(dereverb_speech_ref, dim=1)
dereverb_spectrum_ref = [
ComplexTensor(*torch.unbind(self.enh_model.stft(dr)[0], dim=-1))
for dr in dereverb_speech_ref
]
dereverb_mask_ref = self._create_mask_label(
spectrum_mix, dereverb_spectrum_ref, mask_type=self.mask_type
)
tf_dereverb_loss, perm_d = self._permutation_loss(
dereverb_mask_ref, mask_wpe_pre, loss_func
)
tf_loss = tf_loss + tf_dereverb_loss
if "noise1" in mask_pre:
if noise_ref is None:
raise ValueError(
"No noise reference for training!\n"
'Please specify "--use_noise_ref true" in run.sh'
)
noise_ref = torch.unbind(noise_ref, dim=1)
noise_spectrum_ref = [
ComplexTensor(*torch.unbind(self.enh_model.stft(nr)[0], dim=-1))
for nr in noise_ref
]
noise_mask_ref = self._create_mask_label(
spectrum_mix, noise_spectrum_ref, mask_type=self.mask_type
)
mask_noise_pre = [
mask_pre["noise{}".format(n + 1)]
for n in range(self.num_noise_type)
]
tf_noise_loss, perm_n = self._permutation_loss(
noise_mask_ref, mask_noise_pre, loss_func
)
tf_loss = tf_loss + tf_noise_loss
else:
raise ValueError("Unsupported loss type: %s" % self.loss_type)
loss = tf_loss
loss = loss + loss_speech + loss_noise
return loss, spectrum_pre, mask_pre, tf_length, perm, stats
else:
if speech_ref is not None and speech_ref.dim() == 4:
# For si_snr loss of multi-channel input,
# only select one channel as the reference
speech_ref = speech_ref[..., self.ref_channel]
if mode == -1:
speech_pre, speech_lengths, *__ = self.enh_model.forward_rawwav(
speech_mix, text, speech_lengths, text_lengths
)
else:
speech_pre, speech_lengths, _, latent, zlens = self.enh_model.forward_rawwav(
speech_mix, text, speech_lengths, text_lengths, return_latent=True
)
if mode == 1:
latent = latent.detach()
category_pre = self.disc_model(latent, zlens)
if not cal_loss:
loss, perm = None, None
return loss, speech_pre, None, speech_lengths, perm, stats
# speech_pre: list[(batch, sample)]
assert speech_pre[0].dim() == 2, speech_pre[0].dim()
speech_ref = torch.unbind(speech_ref, dim=1)
si_snr_loss, perm = self._permutation_loss(
speech_ref, speech_pre, self.si_snr_loss_zeromean
)
# pdb.set_trace()
if mode == 0:
loss_real = ((category_pre[gan_ref == 1] - 1) ** 2).sum()
loss_simu = ((category_pre[gan_ref == 0]) ** 2).sum()
disc_loss = (loss_real + loss_simu) / category_pre.shape[0]
loss = disc_loss + si_snr_loss
elif mode == 1:
loss_real = ((category_pre[gan_ref == 1] - 1) ** 2).sum()
loss_simu = ((category_pre[gan_ref == 0]) ** 2).sum()
disc_loss = (loss_real + loss_simu) / category_pre.shape[0]
loss = disc_loss
elif mode == -1:
loss = si_snr_loss
else:
raise Exception(f"no supported mode with gan_ref: {mode}")
# pdb.set_trace()
loss = loss + loss_speech + loss_noise
return loss, speech_pre, None, speech_lengths, perm, si_snr_loss, disc_loss, stats
@staticmethod
def tf_mse_loss(ref, inf):
"""time-frequency MSE loss.
Args:
ref: (Batch, T, F) or (Batch, T, C, F)
inf: (Batch, T, F) or (Batch, T, C, F)
Returns:
loss: (Batch,)
"""
assert ref.shape == inf.shape, (ref.shape, inf.shape)
diff = ref - inf
if isinstance(diff, ComplexTensor):
mseloss = diff.real ** 2 + diff.imag ** 2
else:
mseloss = diff ** 2
if ref.dim() == 3:
mseloss = mseloss.mean(dim=[1, 2])
elif ref.dim() == 4:
mseloss = mseloss.mean(dim=[1, 2, 3])
else:
raise ValueError(
"Invalid input shape: ref={}, inf={}".format(ref.shape, inf.shape)
)
return mseloss
@staticmethod
def tf_log_mse_loss(ref, inf):
"""time-frequency log-MSE loss.
Args:
ref: (Batch, T, F) or (Batch, T, C, F)
inf: (Batch, T, F) or (Batch, T, C, F)
Returns:
loss: (Batch,)
"""
assert ref.shape == inf.shape, (ref.shape, inf.shape)
diff = ref - inf
if isinstance(diff, ComplexTensor):
log_mse_loss = diff.real ** 2 + diff.imag ** 2
else:
log_mse_loss = diff ** 2
if ref.dim() == 3:
log_mse_loss = torch.log10(log_mse_loss.sum(dim=[1, 2])) * 10
elif ref.dim() == 4:
log_mse_loss = torch.log10(log_mse_loss.sum(dim=[1, 2, 3])) * 10
else:
raise ValueError(
"Invalid input shape: ref={}, inf={}".format(ref.shape, inf.shape)
)
return log_mse_loss
@staticmethod
def tf_l1_loss(ref, inf):
"""time-frequency L1 loss.
Args:
ref: (Batch, T, F) or (Batch, T, C, F)
inf: (Batch, T, F) or (Batch, T, C, F)
Returns:
loss: (Batch,)
"""
assert ref.shape == inf.shape, (ref.shape, inf.shape)
if isinstance(inf, ComplexTensor):
l1loss = abs(ref - inf + 1e-15)
else:
l1loss = abs(ref - inf)
if ref.dim() == 3:
l1loss = l1loss.mean(dim=[1, 2])
elif ref.dim() == 4:
l1loss = l1loss.mean(dim=[1, 2, 3])
else:
raise ValueError(
"Invalid input shape: ref={}, inf={}".format(ref.shape, inf.shape)
)
return l1loss
@staticmethod
def si_snr_loss(ref, inf):
"""SI-SNR loss
Args:
ref: (Batch, samples)
inf: (Batch, samples)
Returns:
loss: (Batch,)
"""
ref = ref / torch.norm(ref, p=2, dim=1, keepdim=True)
inf = inf / torch.norm(inf, p=2, dim=1, keepdim=True)
s_target = (ref * inf).sum(dim=1, keepdims=True) * ref
e_noise = inf - s_target
si_snr = 20 * torch.log10(
torch.norm(s_target, p=2, dim=1) / torch.norm(e_noise, p=2, dim=1)
)
return -si_snr
@staticmethod
def si_snr_loss_zeromean(ref, inf):
"""SI-SNR loss with zero-mean in pre-processing.
Args:
ref: (Batch, samples)
inf: (Batch, samples)
Returns:
loss: (Batch,)
"""
eps = 1e-8
assert ref.size() == inf.size()
B, T = ref.size()
# mask padding position along T
# Step 1. Zero-mean norm
mean_target = torch.sum(ref, dim=1, keepdim=True) / T
mean_estimate = torch.sum(inf, dim=1, keepdim=True) / T
zero_mean_target = ref - mean_target
zero_mean_estimate = inf - mean_estimate
# Step 2. SI-SNR with order
# reshape to use broadcast
s_target = zero_mean_target # [B, T]
s_estimate = zero_mean_estimate # [B, T]
# s_target = <s', s>s / ||s||^2
pair_wise_dot = torch.sum(s_estimate * s_target, dim=1, keepdim=True) # [B, 1]
s_target_energy = torch.sum(s_target ** 2, dim=1, keepdim=True) + eps # [B, 1]
pair_wise_proj = pair_wise_dot * s_target / s_target_energy # [B, T]
# e_noise = s' - s_target
e_noise = s_estimate - pair_wise_proj # [B, T]
# SI-SNR = 10 * log_10(||s_target||^2 / ||e_noise||^2)
pair_wise_si_snr = torch.sum(pair_wise_proj ** 2, dim=1) / (
torch.sum(e_noise ** 2, dim=1) + eps
)
# print('pair_si_snr',pair_wise_si_snr[0,:])
pair_wise_si_snr = 10 * torch.log10(pair_wise_si_snr + eps) # [B]
# print(pair_wise_si_snr)
return -1 * pair_wise_si_snr
@staticmethod
def _permutation_loss(ref, inf, criterion, perm=None):
"""The basic permutation loss function.
Args:
ref (List[torch.Tensor]): [(batch, ...), ...] x n_spk
inf (List[torch.Tensor]): [(batch, ...), ...]
criterion (function): Loss function
perm (torch.Tensor): specified permutation (batch, num_spk)
Returns:
loss (torch.Tensor): (batch)
perm (torch.Tensor): (batch, num_spk)
e.g. tensor([[1, 0, 2], [0, 1, 2]])
"""
assert len(ref) == len(inf), (len(ref), len(inf))
num_spk = len(ref)
def pair_loss(permutation):
return sum(
[criterion(ref[s], inf[t]) for s, t in enumerate(permutation)]
) / len(permutation)
if perm is None:
device = ref[0].device
all_permutations = list(permutations(range(num_spk)))
losses = torch.stack([pair_loss(p) for p in all_permutations], dim=1)
loss, perm = torch.min(losses, dim=1)
perm = torch.index_select(
torch.tensor(all_permutations, device=device, dtype=torch.long),
0,
perm,
)
else:
loss = torch.tensor(
[
torch.tensor(
[
criterion(
ref[s][batch].unsqueeze(0), inf[t][batch].unsqueeze(0)
)
for s, t in enumerate(p)
]
).mean()
for batch, p in enumerate(perm)
]
)
return loss.mean(), perm
def collect_feats(
self, speech_mix: torch.Tensor, speech_mix_lengths: torch.Tensor, **kwargs
) -> Dict[str, torch.Tensor]:
# for data-parallel
speech_mix = speech_mix[:, : speech_mix_lengths.max()]
feats, feats_lengths = speech_mix, speech_mix_lengths
return {"feats": feats, "feats_lengths": feats_lengths}
|
# -*- coding: utf-8 -*-
import bpy
def hRH(pos): #height RIGHT HAND
h=bpy.data.objects['3'].pose.bones['hand.ik.R']
c=bpy.data.objects['3'].pose.bones['elbow.pt.ik.R']
cl=bpy.data.objects['3'].pose.bones['clavicle.R']
crh=(0,-0.2,0) #elbow initial position
clp=(1,0,0.1,0.05) #clavicle initial position
if pos ==0: #IDLE
hrh= (0.18,-0.04, 0.32)
rrh=(0.834,0.38,-0.2,-0.3)
elif pos==1:#STOMACH
hrh=(0.25, -0.067, 0.22)
rrh=(0.12,-0.1,-0.02,-0.2)
elif pos==2:#CHEST
hrh=(0.23, -0.16, 0.19)
rrh=(0.595,-0.5,-0.013,-0.5)
elif pos==3:#NECK
hrh =(0.39, -0.2, 0.08)
rrh=(0.5,-0.62,-0.055,-0.6)
elif pos==4:#FACE
hrh=(0.51, -0.26, 0.04)
rrh=(0.476,-0.621,-0,-0.6)
clp=(0.96,0.25,0,0.1)
crh=(0.16,-0.11,0.44)
elif pos==5:#HEAD+
hrh= (0.59, -0.4, -0.02)
rrh=(0.38,-0.71,-0.226,-0.54)
crh=(0.045,0.06,0.7)
clp=(0.986,0.16,0.1,0.03)
else:
hrh= (0.18,-0.04, 0.32)
rrh=(0.834,0.38,-0.2,-0.3)
print('Only numbers from 0 to 5')
h.location=hrh
c.location=crh
h.rotation_quaternion=rrh
cl.rotation_quaternion=clp
#hRH(0) #example, uncomment and run script to test
def hLH(pos): #height LEFT HAND
h=bpy.data.objects['3'].pose.bones['hand.ik.L']
c=bpy.data.objects['3'].pose.bones['elbow.pt.ik.L']
cl=bpy.data.objects['3'].pose.bones['clavicle.L']
clh=(0.02,-0.1,0) #elbow initial position
clp=(1,0,0,0) #clavicle initial position
if pos ==0:
hlh= (-0.12,-0.01, 0.33) #IDLE
rlh=(0.72,0.46,0.227,0.45)
elif pos==1:
hlh=(-0.218, -0.08, 0.2)#ESTOMACH
rlh=(1.2,-0.636,0,1.19)
elif pos==2:
hlh=(-0.26, -0.077, 0.16)#CHEST
rlh=(0.5,-0.5,0.034,0.56)
elif pos==3:
hlh =(-0.4, -0.13, 0.095)#NECK
rlh=(0.412,-0.547,0.4,0.6)
elif pos==4:
hlh=(-0.47, -0.2, 0.01)#FACE
rlh=(0.4,-0.77,0.1,0.47)
clh=(-0.218,-0.15,-0.043)
clp=(0.9,0.2,-0.265,-0.1)
elif pos==5:#HEAD+
hlh= (-0.52, -0.4, -0.01)
rlh=(0.48,-0.676,0.13,0.54)
clh=(-0.31,-0.25,0.2)
clp=(0.965,0.1,-0.85,0.2)
else:
hlh= (-0.12,-0.01, 0.33) #IDLE
rlh=(0.72,0.46,0.227,0.45)
print ('Only numbers from 0 to 5')
h.location=hlh
c.location=clh
h.rotation_quaternion=rlh
cl.rotation_quaternion=clp
#hLH(3) #example, uncomment and run script to test
################################################################################
def dLH(pos): #distance from the body LEFT HAND
h=bpy.data.objects['3'].pose.bones['hand.ik.L']
e=bpy.data.objects['3'].pose.bones['elbow.pt.ik.L']
if pos==0: #CENTER
a=-0
h.location[2]=h.location[2]+a
elif pos==1: #away from body
a=-0.15
h.location[2]=h.location[2]+a
h.location[0]=h.location[0]-a
elif pos==-1: #opposite side
a=0.15
h.location[2]=h.location[2]+a
h.location[0]=h.location[0]-a
e.location=(-0.15,0.15,0.42)
else:
print ('inputs are -1, 0 and 1')
#dLH(0) #example, uncomment and run script to test
def dRH(pos): #distance from body RIGHT HAND
h=bpy.data.objects['3'].pose.bones['hand.ik.R']
if pos==0: #CENTER
a=0
h.location[2]=h.location[2]+a
elif pos==1: #away from body
a=-0.15
h.location[2]=h.location[2]+a
h.location[0]=h.location[0]+a
elif pos==-1: #opposite side
a=0.15
h.location[2]=h.location[2]+a
h.location[0]=h.location[0]+a
else:
print ('inputs are -1, 0 and 1')
#dRH(0) #example, uncomment and run script to test
#############################################################################
def rhF(x1,x2,x3,x4,x5): #finger extension control RIGHT HAND (thumb,index,middle,ring,pinky)
a1=x1/10
if x1>=6:
a1=0.8
bpy.data.objects['3'].pose.bones["thumb.R"].rotation_quaternion[1]=a1
a2=x2/10
bpy.data.objects['3'].pose.bones["index.R"].rotation_quaternion[1]=a2
a3=x3/10
if x3==6:
a3=0.8
bpy.data.objects['3'].pose.bones["middle.R"].rotation_quaternion[1]=a3
a4=x4/10
if x4==6:
a4=0.7
bpy.data.objects['3'].pose.bones["ring.R"].rotation_quaternion[1]=a4
a5=x5/10
bpy.data.objects['3'].pose.bones["pinky.R"].rotation_quaternion[1]=a5
#rhF(0,0,0,0,0) #example, uncomment and run script to test
def lhF(x1,x2,x3,x4,x5): #finger extension control LEFT HAND (thumb,index,middle,ring,pink)
a1=x1/10
bpy.data.objects['3'].pose.bones["thumb.L"].rotation_quaternion[1]=a1
if x1>6:
print('only numbers between 0 and 6')
a2=x2/10
bpy.data.objects['3'].pose.bones["index.L"].rotation_quaternion[1]=a2
a3=x3/10
if x3==6:
a3=0.7
bpy.data.objects['3'].pose.bones["middle.L"].rotation_quaternion[1]=a3
a4=x4/10
if x4==6:
a4=0.7
bpy.data.objects['3'].pose.bones["ring.L"].rotation_quaternion[1]=a4
a5=x5/10
bpy.data.objects['3'].pose.bones["pinky.L"].rotation_quaternion[1]=a5
#lhF(0,0,0,0,0) #example, uncomment and run script to test
##################################################################
def detRF(f,m,r): #especific finger position RIGHT HAND (finger=[1=thumb...,5=pinky],separation from fingers=[-1,0,1], perpendicular to pal=[0,1])
#which finger
y1='thumb.01.R'
y2='f_index.01.R'
y3='f_middle.01.R'
y4='f_ring.01.R'
y5='f_pinky.01.R'
if f==0:
m=0
r=0
elif f==1:
d=bpy.data.objects['3'].pose.bones[y1]
elif f==2:
d=bpy.data.objects['3'].pose.bones[y2]
elif f==3:
d=bpy.data.objects['3'].pose.bones[y3]
elif f==4:
d=bpy.data.objects['3'].pose.bones[y4]
elif f==5:
d=bpy.data.objects['3'].pose.bones[y5]
else:
print ('error')
#separation from other fingers
if m==0:
mov=0
elif m==1: #moves away
mov= -0.2
elif m==-1: #moves closer
mov= 0.2
if f==1 or f==5:
mov=0.2
elif f==4:
mov=0.1
else:
print('error')
if f>0 and f<=2:
d.rotation_quaternion[3]=mov
elif f>2 and f<=5:
d.rotation_quaternion[3]=-mov
else:
print ('error')
#perpendicular from palm
if r==1:
if f==1:
d.rotation_quaternion[1]=0.2
else:
d.rotation_quaternion[1]=0.5
elif r==0:
d.rotation_quaternion[1]=0
#detRF(5,0,0) #example, uncomment and run script to test
def detLF(f,m,r): #especific finger position RIGHT HAND (finger=[1=thumb...,5=pinky],separation from fingers=[-1,0,1], perpendicular to pal=[0,1])
#which finger
y1='thumb.01.L'
y2='f_index.01.L'
y3='f_middle.01.L'
y4='f_ring.01.L'
y5='f_pinky.01.L'
if f==0:
print('no finger was specified')
return
elif f==1:
d=bpy.data.objects['3'].pose.bones[y1]
elif f==2:
d=bpy.data.objects['3'].pose.bones[y2]
elif f==3:
d=bpy.data.objects['3'].pose.bones[y3]
elif f==4:
d=bpy.data.objects['3'].pose.bones[y4]
elif f==5:
d=bpy.data.objects['3'].pose.bones[y5]
else:
print ('no finger was specified')
return
#separation from fingers
if m==0:
mov=0
elif m==-1: #moves closer
mov= -0.2
if f==4:
mov=-0.12
elif m==1: #moves away
mov= 0.1
if f==1:
mov=0.2
else:
print('error')
if f>0 and f<=2:
d.rotation_quaternion[3]=mov
elif f>3 and f<=5:
d.rotation_quaternion[3]=-mov
else:
print ('error')
#perpendicular from palm
if r==0:
d.rotation_quaternion[1]=0
elif r==1:
if f==1:
d.rotation_quaternion[1]=0.2
else:
d.rotation_quaternion[1]=0.5
detLF(1,0,0)
####################################################
def rotLH(t,r):
wik=bpy.data.objects['3'].pose.bones["hand.ik.L"]
wfk=bpy.data.objects['3'].pose.bones["hand.fk.L"]
if r==0: #rot muneca adentro afuera
wik.rotation_quaternion[0]=wik.rotation_quaternion[0]
wik.rotation_quaternion[1]=wik.rotation_quaternion[1]
wik.rotation_quaternion[2]=wik.rotation_quaternion[2]
wik.rotation_quaternion[3]=wik.rotation_quaternion[3]
elif r==1:
a=2
b=0.05
if wik.location[2]<0.1 and wik.location[2]>=0.05:
a=5
b=-0.3
if wik.location[2]<0.05:
a=6
b=-2
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_L 2 3 12")
wfk.rotation_quaternion[0]=a
wfk.rotation_quaternion[1]=b
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_L 2 3 12")
elif r==2:
a=30
b=-25
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_L 2 3 12")
wfk.rotation_quaternion[0]=a
wfk.rotation_quaternion[2]=b
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_L 2 3 12")
else:
print ('error')
if t==0:
r=0
elif t==1:
a=0.4
b=-0.7
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_L 2 3 12")
wfk.rotation_quaternion[1]=a
wfk.rotation_quaternion[3]=b
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_L 2 3 12")
elif t==-1:
a=-1
b=-0.1
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_L 2 3 12")
wfk.rotation_quaternion[1]=a
wfk.rotation_quaternion[3]=b
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_L 2 3 12")
#rotLH(0,0)
def rotRH(t,r): #rotacion mano derecha
wik=bpy.data.objects['3'].pose.bones["hand.ik.R"]
wfk=bpy.data.objects['3'].pose.bones["hand.fk.R"]
if r==0: #rot muneca
wik.rotation_quaternion[0]=wik.rotation_quaternion[0]
wik.rotation_quaternion[1]=wik.rotation_quaternion[1]
wik.rotation_quaternion[2]=wik.rotation_quaternion[2]
wik.rotation_quaternion[3]=wik.rotation_quaternion[3]
elif r==-1:
a=2
b=0.05
if wik.location[2]<0.1 and wik.location[2]>=0.05:
a=9
b=-4
if wik.location[2]<0.05:
a=9
b=-2
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_R 18 19 18")
wfk.rotation_quaternion[0]=a
wfk.rotation_quaternion[1]=b
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_R 18 19 18")
elif r==1:
a=2
b=0.05
if wik.location[2]<0.1 and wik.location[2]>=0.05:
a=4
b=1
if wik.location[2]<0.05:
a=9
b=-2
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_R 18 19 18")
wfk.rotation_quaternion[0]=a
wfk.rotation_quaternion[1]=b
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_R 18 19 18")
elif r==2:
a=0
b=1
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_R 18 19 18")
wfk.rotation_quaternion[1]=a
wfk.rotation_quaternion[2]=b
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_R 18 19 18")
else:
print('error')
if t==0:
r=0
elif t==-1:
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_R 18 19 18")
wfk.rotation_quaternion[1]=0
wfk.rotation_quaternion[3]=0.9
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_R 18 19 18")
elif t==1:
bpy.ops.mhx2.snap_fk_ik(data="MhaArmIk_R 18 19 18")
wfk.rotation_quaternion[1]=1
wfk.rotation_quaternion[3]=0.5
bpy.ops.mhx2.snap_ik_fk(data="MhaArmIk_R 18 19 18")
#rotRH(0,2)
def default():
hRH(0)
rhF(0,0,0,0,0)
hLH(0)
lhF(0,0,0,0,0)
|
from collections import deque
def knight(p1, p2):
h1 = { 'a':1, 'b':2, 'c':3, 'd':4, 'e':5, 'f':6, 'g':7, 'h':8}
h2 = { 1:'a', 2:'b', 3:'c', 4:'d', 5:'e', 6:'f', 7:'g', 8:'h'}
s = set()
start = [h1[p1[0]], int(p1[1])]
target = [h1[p2[0]],int(p2[1])]
que1 = deque()
que1.append(start)
count = 0
directions = [[2,1],[2,-1],[-2,1],[-2,-1],[1,2],[1,-2],[-1,2],[-1,-2]]
while que1:
que2 = deque()
count += 1
while que1:
temp = que1.pop()
s.add(str(temp))
a,b = temp[0],temp[1]
for x,y in directions:
x1 = a + x
y1 = b + y
if x1 < 1 or y1 < 1 or x1 > 8 or y1 > 8:
continue
if [x1,y1] == target:
return count
if str([x1,y1]) not in s:
que2.appendleft([x1,y1])
que1 = que2
return 0
|
import os
from espider.dbs.easy_csv import OpenCsv
from espider.spider import Spider
class MySpider(Spider):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.request_setting.update(max_retry=3)
def start_requests(self):
url = 'https://desk.zol.com.cn/fengjing/{}.html'
for i in range(1, 10):
self.request(url=url.format(i), args={'page': i})
def parse(self, resp, *args, **kwargs):
print(f'{resp}:{resp.url} {resp.retry_count}')
title = resp.css('a.pic img::attr(alt)').extract()
urls = resp.css('a.pic::attr(href)').extract()
ut = dict(zip(title, urls))
if len(ut) == 0:
print(urls)
for t, u in ut.items():
if u:
self.request(url=u, callback=self.download_image, args={'title': t, 'page': kwargs.get('page')},
priority=1)
def download_image(self, resp, title=None, page=None):
path = f'./images/{title}'
if not os.path.exists(path):
os.mkdir(path)
name = f'{resp.css("h3 span span::text").extract_first()}.jpg'
save_path = f'{path}/{name}'
imgurl = resp.css('#bigImg::attr(src)').extract_first()
with OpenCsv(f'{path}/images.csv') as f:
f.write_dict_to_csv({'page': page, 'index': name, 'title': title, 'url': imgurl})
if imgurl:
self.request(url=imgurl, callback=save_img, args={'path': save_path}, priority=4)
next = resp.css('#pageNext::attr(href)').extract_first()
if next and 'http' in next and next != 'None':
self.request(url=next, callback=self.download_image, args={'title': title, 'page': page}, priority=3)
def save_img(resp, path):
print(f'Save: {path}')
with open(path, 'wb') as f:
f.write(resp.content)
def test(resq):
print(resq.url)
return resq
if __name__ == '__main__':
s = MySpider()
s.run()
|
a = float(input('Digite um valor: '))
b = float(input('Digite outro valor: '))
c = float(input('Digite mais um valor: '))
if a == b or b == c:
print('Com os números digitados, formam um triângulo EQUILATERO.')
elif a <> b and b <> c and c == a and b == c:
print('Com os números digitados, formam um triângulo ISOSELES.')
else:
print('Com os número digitados, formam triângulo ESCALENO.') |
from typing import Dict, Optional
import cv2
import numpy
from ._params import Params as _Params
from ._types import Image, TemplateMatchResult
from ._utils import convert_to_hls, crop_rect, match_template
from .exceptions import DialsNotFoundError, ImageLoadingError
class ImageFile:
def __init__(
self,
filename: str,
params: _Params,
bgr_image: Optional[Image] = None,
) -> None:
self.filename = filename
self.params = params
self.bgr_image = bgr_image
def get_dials_hls(self) -> Image:
hls_image = self.get_hls_image()
match_result = self._find_dials(hls_image)
dials_hls = crop_rect(hls_image, match_result.rect)
return dials_hls
def get_hls_image(self) -> Image:
bgr_image = self.get_bgr_image()
hls_image = convert_to_hls(bgr_image, self.params.hue_shift)
return hls_image
def get_bgr_image_t(self) -> Image:
bgr_image = self.get_bgr_image()
hls_image = convert_to_hls(bgr_image, self.params.hue_shift)
dials = self._find_dials(hls_image)
tl = dials.rect.top_left
m = numpy.array([
[1, 0, 30 - tl[0]],
[0, 1, 116 - tl[1]]
], dtype=numpy.float32)
(h, w) = bgr_image.shape[0:2]
return cv2.warpAffine(bgr_image, m, (w, h))
def get_bgr_image(self) -> Image:
if self.bgr_image is not None:
return self.bgr_image
img = cv2.imread(self.filename)
if img is None:
raise ImageLoadingError(self.filename)
return self._crop_meter(img)
def _crop_meter(self, img: Image) -> Image:
return crop_rect(img, self.params.meter_rect)
def _find_dials(self, img_hls: Image) -> TemplateMatchResult:
template = _get_dials_template(self.params)
lightness = cv2.split(img_hls)[1]
match_result = match_template(lightness, template)
if match_result.max_val < self.params.dials_match_threshold:
raise DialsNotFoundError(
self.filename, extra_info={'match val': match_result.max_val})
return match_result
_dials_template_map: Dict[int, Image] = {}
def _get_dials_template(params: _Params) -> Image:
dials_template = _dials_template_map.get(id(params))
if dials_template is None:
dials_template = cv2.imread(params.dials_file, cv2.IMREAD_GRAYSCALE)
if dials_template is None:
raise IOError(
"Cannot read dials template: {}".format(params.dials_file))
_dials_template_map[id(params)] = dials_template
assert dials_template.shape == params.dials_template_size
return dials_template
|
import matplotlib.pyplot as plt
import numpy as np
import pytest
import torch
from src import datamodules
from src.augmentations.seg_aug import get_composed_augmentations
from src.datamodules.cityscapes import Cityscapes
from src.datamodules.dadatamodule import DADataModule
from src.datamodules.gta5 import GTA5
from src.datamodules.synthia import Synthia
aug = {
"rcrop": [768, 768],
"brightness": 0.5,
"contrast": 0.5,
"saturation": 0.5,
"hflip": 0.5,
}
def template_test(data_class, name, **kwargs):
augmentations = get_composed_augmentations(aug)
dst = data_class(augmentations=augmentations, **kwargs)
bs = 4
trainloader = torch.utils.data.DataLoader(dst, batch_size=bs, num_workers=0)
imgs, labels, ind = next(iter(trainloader))
imgs = imgs.numpy()[:, ::-1, :, :]
imgs = np.transpose(imgs, [0, 2, 3, 1])
f, axarr = plt.subplots(bs, 2)
for j in range(bs):
axarr[j][0].set_axis_off()
axarr[j][1].set_axis_off()
axarr[j][0].imshow(imgs[j])
axarr[j][1].imshow(dst.decode_segmap(labels.numpy()[j]))
plt.savefig(f"{name}_test.png")
def test_Cityscapes():
return template_test(
Cityscapes,
"cityscapes",
rootpath="./data/cityscapes",
split="train",
n_classes=19,
img_cols=2048,
img_rows=1024,
norm=False,
)
def test_GTA5():
return template_test(
GTA5,
"gta5",
rootpath="./data/GTA5",
n_classes=19,
img_cols=1914,
img_rows=1052,
norm=False,
)
def test_synthia():
return
return template_test(
Synthia,
"synthia",
rootpath="./data/SYNTHIA",
img_cols=1914,
img_rows=1052,
n_classes=13,
is_transform=True,
norm=False,
)
def test_dadatamodule():
loader = DADataModule(augmentations=aug)
loader.setup()
assert not loader.is_da_task
it = loader.train_dataloader()["src"]
ret = next(iter(it))
_, _, _ = ret
it = loader.val_dataloader()
ret = next(iter(it))
_, _, _ = ret
|
from django.db import models
from django.utils import timezone
from datetime import timedelta
class Post(models.Model):
# TODO: Add rating
# TODO: Link author with a user
content = models.CharField(max_length=500)
author = models.CharField(max_length=20)
created = models.DateTimeField('Created')
edited = models.DateTimeField('Edited', null=True)
class Meta:
abstract = True
def __str__(self):
return self.content
class Tag(models.Model):
# TODO: Sort out tag creation, author, etc.
name = models.CharField(max_length=20)
class Question(Post):
# TODO: Question rating works the same as answer rating?
tags = models.ManyToManyField(Tag, verbose_name='List of tags')
def is_recent(self):
now = timezone.now()
return now - timedelta(days=1) <= self.created <= now
class Answer(Post):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
|
from flask import Blueprint
response = Blueprint('response', __name__)
from . import views
|
#!/opt/cloudera/parcels/Anaconda/bin/python
# -*- coding: utf-8 -*-
__author__ = 'Robert (Bob) L. Jones'
__coauthor__ = 'N/A'
__copyright__ = 'Copyright 2018, File'
__credits__ = ['Robert (Bob) L. Jones']
__license__ = 'GPL'
__version__ = '0.0.1'
__maintainer__ = 'Robert (Bob) L. Jones'
__status__ = 'Development'
__created_date__= 'Dec 5, 2018'
__modified_date__= 'Dec 5, 2018'
'''
FILE
‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
file.py
SYNOPSIS
‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
File(subpath1[, subpath2, ..., extension=extension, dir=dir])
DESCRIPTION
‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
The class "File":
* Encapulates file metadata;
* Handles file operations.
EXAMPLES
‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
from file import File
file1 = File('file1.csv', dir='~')
print("TEST 1a: file1.name=%s" % file1.name)
print("TEST 1b: file1.stem=%s" % file1.stem)
print("TEST 1c: file1.parent=%s" % file1.parent)
print("TEST 1d: file1.suffix=%s" % file1.suffix)
print("TEST 1e: file1=%s" % file1)
print("")
file2 = File('file2', extension='.csv', dir='.')
print("TEST 2a: file2.name=%s" % file2.name)
print("TEST 2b: file2.stem=%s" % file2.stem)
print("TEST 2c: file2.parent=%s" % file2.parent)
print("TEST 2d: file2.suffix=%s" % file2.suffix)
print("TEST 2e: file2=%s" % file2)
print("")
file3 = File('file3', extension='.json.jinja', dir='.')
print("TEST 3a: file3.name=%s" % file3.name)
print("TEST 3b: file3.stem=%s" % file3.stem)
print("TEST 3c: file3.parent=%s" % file3.parent)
print("TEST 3d: file3.suffix=%s" % file3.suffix)
print("TEST 3e: file3=%s" % file3)
file4 = File('file4.json.jinja', extension='.json.jinja', dir='.')
print("TEST 4a: file4.name=%s" % file4.name)
print("TEST 4b: file4.stem=%s" % file4.stem)
print("TEST 4c: file4.parent=%s" % file4.parent)
print("TEST 4d: file4.suffix=%s" % file4.suffix)
print("TEST 4e: file4=%s" % file4)
file5 = File('../etc/file5.json.jinja', extension='.json.jinja', dir='.')
print("TEST 5a: file5.name=%s" % file5.name)
print("TEST 5b: file5.stem=%s" % file5.stem)
print("TEST 5c: file5.parent=%s" % file5.parent)
print("TEST 5d: file5.suffix=%s" % file5.suffix)
print("TEST 5e: file5=%s" % file5)
file6 = File('../etc/file6.json.jinja', dir='.')
print("TEST 6a: file6.name=%s" % file6.name)
print("TEST 6b: file6.stem=%s" % file6.stem)
print("TEST 6c: file6.parent=%s" % file6.parent)
print("TEST 6d: file6.suffix=%s" % file6.suffix)
print("TEST 6e: file6=%s" % file6)
file7a = File('../etc/file7.json', dir='.')
file7b = File(file7a.name, extension='.jinja', dir=file7a.parent)
print("TEST 7a: file7b.name=%s" % file7b.name)
print("TEST 7b: file7b.stem=%s" % file7b.stem)
print("TEST 7c: file7b.parent=%s" % file7b.parent)
print("TEST 7d: file7b.suffix=%s" % file7b.suffix)
print("TEST 7e: file7b=%s" % file7b)
REFERENCES
‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
1. https://codereview.stackexchange.com/questions/162426/subclassing-pathlib-path
2. https://cpython-test-docs.readthedocs.io/en/latest/library/pathlib.html
3. https://github.com/bc-python/mistool/blob/master/mistool/os_use.py
4. https://github.com/chris1610/pbpython/blob/master/extras/Pathlib-Cheatsheet.pdf
5. https://www.pythoncentral.io/how-to-slice-listsarrays-and-tuples-in-python/
6. https://realpython.com/python-pathlib/
7. https://spyhce.com/blog/understanding-new-and-init
8. https://stackoverflow.com/questions/576169/understanding-python-super-with-init-methods
'''
### Libraries ###
# 3rd-party
#from pathlib import Path, _windows_flavour, _posix_flavour
from pathlib import Path
# Custom
### Class Declaration ###
class File(type(Path())):
'''
This class:
* Encapulates file metadata;
* Handles file operations.
Properties:
All properties are inherited as is from the parent class "Path".
Return (object):
An uninitialized 'File' class instance.
'''
# '__new__' is the constructor and exists solely for creating the object.
def __new__(cls, *args, **kwargs):
'''
The initializer for the class 'File' that exists solely for creating the object.
Args:
args: A comma-separated list of arguments representing parts of path
kwargs: An optional comma-separated list of key/value pairs
Return (object):
An initialized 'File' class instance.
'''
extension = kwargs.get('extension', '') # The file's extension (e.g., ".csv")
dir = kwargs.get('dir', '') # The path (absolute or relative) of the file's parent directory
name = args[-1]
name_part = name.split('/')[-1] or name.split('\\')[-1]
name_parts = name_part.split('.')
name_extension = '.' + '.'.join(name_parts[1:len(name_parts)])
new_name = name if extension == '' or extension == name_extension else name + extension
return super(File, cls).__new__(cls, dir, new_name, **kwargs)
if __name__ == '__main__':
#pass
print('UNIT TESTS:')
print('‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾')
print('')
file1 = File('file1.csv', dir='~')
print("file1 = File('file1.csv', dir='~')")
print("TEST 1a: file1.name=%s" % file1.name)
print("TEST 1b: file1.stem=%s" % file1.stem)
print("TEST 1c: file1.parent=%s" % file1.parent)
print("TEST 1d: file1.suffix=%s" % file1.suffix)
print("TEST 1e: file1=%s" % file1)
print("")
file2 = File('file2', extension='.csv', dir='.')
print("file2 = File('file2', extension='.csv', dir='.')")
print("TEST 2a: file2.name=%s" % file2.name)
print("TEST 2b: file2.stem=%s" % file2.stem)
print("TEST 2c: file2.parent=%s" % file2.parent)
print("TEST 2d: file2.suffix=%s" % file2.suffix)
print("TEST 2e: file2=%s" % file2)
print("")
file3 = File('file3', extension='.json.jinja', dir='.')
print("file3 = File('file3', extension='.json.jinja', dir='.')")
print("TEST 3a: file3.name=%s" % file3.name)
print("TEST 3b: file3.stem=%s" % file3.stem)
print("TEST 3c: file3.parent=%s" % file3.parent)
print("TEST 3d: file3.suffix=%s" % file3.suffix)
print("TEST 3e: file3=%s" % file3)
print("")
file4 = File('file4.json.jinja', extension='.json.jinja', dir='.')
print("file4 = File('file4.json.jinja', extension='.json.jinja', dir='.')")
print("TEST 4a: file4.name=%s" % file4.name)
print("TEST 4b: file4.stem=%s" % file4.stem)
print("TEST 4c: file4.parent=%s" % file4.parent)
print("TEST 4d: file4.suffix=%s" % file4.suffix)
print("TEST 4e: file4=%s" % file4)
print("")
file5 = File('../etc/file5.json.jinja', extension='.json.jinja', dir='.')
print("file5 = File('../etc/file5.json.jinja', extension='.json.jinja', dir='.')")
print("TEST 5a: file5.name=%s" % file5.name)
print("TEST 5b: file5.stem=%s" % file5.stem)
print("TEST 5c: file5.parent=%s" % file5.parent)
print("TEST 5d: file5.suffix=%s" % file5.suffix)
print("TEST 5e: file5=%s" % file5)
print("")
file6 = File('../etc/file6.json.jinja', dir='.')
print("file6 = File('../etc/file6.json.jinja', dir='.')")
print("TEST 6a: file6.name=%s" % file6.name)
print("TEST 6b: file6.stem=%s" % file6.stem)
print("TEST 6c: file6.parent=%s" % file6.parent)
print("TEST 6d: file6.suffix=%s" % file6.suffix)
print("TEST 6e: file6=%s" % file6)
print("")
file7a = File('../etc/file7.json', dir='.')
file7b = File(file7a.name, extension='.jinja', dir=file7a.parent)
print("file7a = File('../etc/file7.json', dir='.')")
print("file7b = File(file7a.name, extension='.jinja', dir=file7a.parent)")
print("TEST 7a: file7b.name=%s" % file7b.name)
print("TEST 7b: file7b.stem=%s" % file7b.stem)
print("TEST 7c: file7b.parent=%s" % file7b.parent)
print("TEST 7d: file7b.suffix=%s" % file7b.suffix)
print("TEST 7e: file7b=%s" % file7b)
print("")
|
# DynaMine
# /Memoire/dynamine
# coding=utf-8
from numpy import *
import petl as etl
from re import *
import operator
import glob
import pandas as pd
import re
import csv
import matplotlib.pyplot as plt
import numpy as np
from scipy import stats
#dsysmap
b = pd.read_csv('all_mutations.dat','\t') #28'695 rows x 11 columns
b['Protein_change'] = ''
b['Protein_change'] = b.RES_ORIG + b.RES_NUM.map(str) + b.RES_MUT
b = b.rename(columns = {'UNIPROT_AC':'Uniprot_ACC'})
#DIDA
a = pd.read_csv('didavariantsgenes.csv',',')
d = a.merge(b, on=['Protein_change','Uniprot_ACC'])
d = d.drop(['Protein_change','Uniprot_ACC','GENE_NAME','RES_NUM','RES_ORIG','RES_MUT'],1)
d['ID'].value_counts().to_csv('nb_DIDA.csv')#118 variants
d.to_csv('dsysmap_results_DIDA.csv', index=False) #132 rows x 9 columns
d_fil = d.drop_duplicates('ID', take_last=True)
d_fil.to_csv('dsysmap_results_DIDA_filtered.csv', index=False)
#Neutral
c = pd.read_csv('neutral_variants_final.csv','\t')
c['Protein_change'] = ''
c['Protein_change'] = c.Protein_ref_allele + c.Protein_position_Biomart.map(str) + c.Protein_alt_allele
c=c.rename(columns = {'transcript_uniprot_id':'Uniprot_ACC','gene_symbol':'Gene_name','dbsnp_id':'ID','snpeff_effect':'Variant_effect'})
e = c.merge(b, on=['Protein_change','Uniprot_ACC'])
e = e.drop(['Protein_position_Biomart','Protein_ref_allele','Protein_alt_allele','GENE_NAME','RES_NUM','RES_ORIG','RES_MUT','id','hgvs_protein','protein_pos','protein_length','gene_ensembl','transcript_ensembl','Uniprot_ACC','Protein_change'],1)
e = e[['ID','Gene_name','Variant_effect','DISEASE_GROUP','DISEASE','MIM','SWISSVAR_ID','STRUCT_CLASS','INTERFACE']]
e['ID'].value_counts().to_csv('nb_1kgp.csv')#241 variants
e.to_csv('dsysmap_results_neutral.csv', index=False) #286 rows x 9 columns
e_fil = e.drop_duplicates('ID', take_last=True)
e_fil.to_csv('dsysmap_results_neutral_filtered.csv', index=False)
####################################################################################
# BARCHARTS en fonction de SURF/BURIED/NA=Not_classified/INTERFACE
####################################################################################
#DIDA -> 241
D = open ('dsysmap_results_DIDA_filtered.csv','r')
c1 = csv.reader(D, delimiter=',')
buried1,surface1,interface1,notclassified1 = 0,0,0,0
for line in c1:
if line[7] == 'BURIED':
buried1 = buried1 + 1
elif line[7]== 'SURF' and line[8]=='NO':
surface1 = surface1 + 1
elif line[7]== 'SURF' and line[8]=='YES':
interface1 = interface1 + 1
elif line[7]=='': # Not classified
notclassified1 = notclassified1 + 1
D.close()
#1KGP -> 5'851
N = open ('dsysmap_results_neutral_filtered.csv','r')
c2=csv.reader(N, delimiter=',')
buried2,surface2,interface2,notclassified2 = 0,0,0,0
for line in c2:
if line[7] == 'BURIED':
buried2 = buried2 + 1
elif line[7]== 'SURF' and line[8]=='NO':
surface2 = surface2 + 1
elif line[7]== 'SURF' and line[8]=='YES':
interface2 = interface2 + 1
elif line[7]=='': # Not classified
notclassified2 = notclassified2 + 1
N.close()
# BAR PLOT
N = 5
ind = np.arange(N) # the x locations for the groups
width = 0.30 # the width of the bars
fig, ax = plt.subplots()
DIDA = (float(buried1)/241,float(surface1)/241,float(interface1)/241,float(notclassified1)/241,float(123)/241)
rects1 = ax.bar(ind, DIDA, width, color='red')
neutral = (float(buried2)/5851,float(surface2)/5851,float(interface2)/5851,float(notclassified2)/5851,float(5610)/5851)
rects2 = ax.bar(ind + width, neutral, width, color='blue')
ax.set_ylabel('Frequency')
ax.set_xticks(ind + width)
ax.set_xticklabels(('Buried', 'Surface','Interface','Not classified','NA'))
ax.legend((rects1[0], rects2[0]), ('Deleterious DIDA mutants', 'Neutral 1KGP mutants'),loc='upper left')
#plt.ylim(0,0.35)
fig.savefig('barplot_dsysmap.png')
stats.chi2_contingency(np.column_stack((DIDA,neutral))) #(0.51988356450737627, 0.97153666825577745, 4, array([[ 0.03246648, 0.03233785],[ 0.06887172, 0.06859884],[ 0.02025051, 0.02017028],[ 0.14651059, 0.14593011],[ 0.73605008, 0.73313383]]))
|
from base64 import b64encode, b64decode
from hashlib import scrypt
import os
def format_password_hash(password):
if len(password) > 1024:
raise ValueError("Password is too long")
scrypt_n = 2 ** 14
scrypt_r = 9
scrypt_p = 1
salt = os.urandom(16)
h = scrypt(password.encode(), salt=salt, n=scrypt_n, r=scrypt_r, p=scrypt_p)
return (
f"sc${scrypt_n}-{scrypt_r}-{scrypt_p}${b64encode(salt).decode()}${b64encode(h).decode()}"
)
def verify(password, password_hash):
if not password_hash.startswith("sc$"):
raise ValueError("Unknown hash format")
_, params, salt, pw_h = password_hash.split("$")
salt = b64decode(salt)
pw_h = b64decode(pw_h)
n, r, p = params.split("-")
dklen = len(pw_h)
h = scrypt(password.encode(), salt=salt, n=int(n), r=int(r), p=int(p), dklen=dklen)
return h == pw_h
|
from django.contrib import admin
from django_blog_example import models
# Register your models here.
admin.site.register(models.Post)
admin.site.register(models.Comment)
|
from yapypy.extended_python.pybc_emit import *
from bytecode import dump_bytecode
def emit_function(node: typing.Union[ast.AsyncFunctionDef, ast.FunctionDef, ast.Lambda],
new_ctx: Context, is_async: bool):
"""
https://docs.python.org/3/library/dis.html#opcode-MAKE_FUNCTION
MAKE_FUNCTION flags:
0x01 a tuple of default values for positional-only and positional-or-keyword parameters in positional order
0x02 a dictionary of keyword-only parameters’ default values
0x04 an annotation dictionary
0x08 a tuple containing cells for free variables, making a closure
the code associated with the function (at TOS1)
the qualified name of the function (at TOS)
"""
parent_ctx: Context = new_ctx.parent
name = getattr(node, 'name', '<lambda>')
new_ctx.bc.name = f'{parent_ctx.bc.name}.{name}' if parent_ctx.bc.name else name
for decorator in getattr(node, 'decorator_list', ()):
py_emit(decorator, parent_ctx)
if is_async:
new_ctx.bc.flags |= CompilerFlags.COROUTINE
if isinstance(node, ast.Lambda):
py_emit(node.body, new_ctx)
new_ctx.bc.append(RETURN_VALUE(lineno=node.lineno))
else:
head = node.body
if isinstance(head, ast.Expr) and isinstance(head.value, ast.Str):
new_ctx.bc.docstring = head.value.s
for each in node.body:
py_emit(each, new_ctx)
args = node.args
new_ctx.bc.argcount = len(args.args)
new_ctx.bc.kwonlyargcount = len(args.kwonlyargs)
make_function_flags = 0
if new_ctx.sym_tb.freevars:
make_function_flags |= 0x08
if args.defaults:
make_function_flags |= 0x01
if args.kw_defaults:
make_function_flags |= 0x02
annotations = []
argnames = []
for arg in args.args:
argnames.append(arg.arg)
if arg.annotation:
annotations.append((arg.arg, arg.annotation))
for arg in args.kwonlyargs:
argnames.append(arg.arg)
if arg.annotation:
annotations.append((arg.arg, arg.annotation))
arg = args.vararg
if arg:
new_ctx.bc.flags |= CompilerFlags.VARARGS
argnames.append(arg.arg)
if arg.annotation:
annotations.append((arg.arg, arg.annotation))
arg = args.kwarg
if arg:
new_ctx.bc.flags |= CompilerFlags.VARKEYWORDS
argnames.append(arg.arg)
if arg.annotation:
annotations.append((arg.arg, arg.annotation))
if any(annotations):
make_function_flags |= 0x04
new_ctx.bc.argnames.extend(argnames)
if make_function_flags & 0x01:
for each in args.defaults:
py_emit(each, parent_ctx)
parent_ctx.bc.append(Instr('BUILD_TUPLE', len(args.defaults), lineno=node.lineno))
if make_function_flags & 0x02:
for each in args.kw_defaults:
py_emit(each, parent_ctx)
parent_ctx.bc.append(
Instr('BUILD_TUPLE', len(args.kw_defaults), lineno=node.lineno))
if make_function_flags & 0x04:
keys, annotation_values = zip(*annotations)
for each in annotation_values:
py_emit(each, parent_ctx)
parent_ctx.bc.append(Instr('LOAD_CONST', tuple(keys), lineno=node.lineno))
parent_ctx.bc.append(
Instr("BUILD_CONST_KEY_MAP", len(annotation_values), lineno=node.lineno))
if make_function_flags & 0x08:
new_ctx.load_closure(lineno=node.lineno)
new_ctx.bc.append(Instr('LOAD_CONST', None))
new_ctx.bc.append(Instr('RETURN_VALUE'))
inner_code = new_ctx.bc.to_code()
parent_ctx.bc.append(Instr('LOAD_CONST', inner_code, lineno=node.lineno))
# when it comes to nested, the name is not generated correctly now.
parent_ctx.bc.append(Instr('LOAD_CONST', new_ctx.bc.name, lineno=node.lineno))
parent_ctx.bc.append(Instr("MAKE_FUNCTION", make_function_flags, lineno=node.lineno))
parent_ctx.bc.extend(
[CALL_FUNCTION(1, lineno=node.lineno)] * len(getattr(node, 'decorator_list', ())))
if isinstance(node, ast.Lambda):
pass
else:
parent_ctx.store_name(node.name, lineno=node.lineno)
@py_emit.case(ast.FunctionDef)
def py_emit(node: ast.FunctionDef, new_ctx: Context):
"""
title: function def
prepare:
>>> import unittest
>>> self: unittest.TestCase
test:
>>> def call(f):
>>> return f()
>>> @call
>>> def f():
>>> return 42
>>> self.assertEqual(f, 42)
"""
emit_function(node, new_ctx, is_async=False)
@py_emit.case(ast.AsyncFunctionDef)
def py_emit(node: ast.AsyncFunctionDef, new_ctx: Context):
emit_function(node, new_ctx, is_async=True)
@py_emit.case(ast.Lambda)
def py_emit(node: ast.Lambda, new_ctx: Context):
"""
title: lambda
test:
>>> print(lambda x: x + 1)
>>> assert (lambda x: x + 1)(1) == 2
>>> assert (lambda x: x * 10)(20) == 200
"""
emit_function(node, new_ctx, is_async=False)
@py_emit.case(ast.ClassDef)
def py_emit(node: ast.ClassDef, ctx: Context):
"""
title: class
test:
>>> class S:
>>> pass
>>> print(S)
>>> class T(type):
>>>
>>> def __new__(mcs, name, bases, ns):
>>> assert name == 'S' and bases == (list, ) and '__module__' in ns and '__qualname__' in ns
>>> return type(name, bases, ns)
>>>
>>> class S(list, metaclass=T):
>>> def get2(self):
>>> return self[2]
>>>
>>> s = S([1, 2, 3])
>>> assert s.get2() == 3
>>> def call(f): return f()
>>> @call
>>> class S:
>>> def p(self): return 42
>>> assert S.p == 42
"""
lineno = node.lineno
col_offset = node.col_offset
name = node.name
parent_ctx: Context = ctx.parent
for decorator in getattr(node, 'decorator_list', ()):
py_emit(decorator, parent_ctx)
parent_ctx.bc.append(Instr('LOAD_BUILD_CLASS'))
ctx.bc.name = f'{parent_ctx.bc.name}.{name}' if parent_ctx.bc.name else name
head = node.body
if isinstance(head, ast.Expr) and isinstance(head.value, ast.Str):
ctx.bc.docstring = head.value.s
ctx.bc.argcount = 0
ctx.bc.kwonlyarbgcount = 0
ctx.load_closure(lineno=node.lineno)
ctx.bc.extend([
LOAD_GLOBAL('__name__'),
STORE_NAME('__module__'),
LOAD_CONST(ctx.bc.name),
STORE_NAME('__qualname__'),
])
for each in node.body:
py_emit(each, ctx)
# https://docs.python.org/3/reference/datamodel.html#creating-the-class-object
ctx.bc.extend([
Instr('LOAD_CLOSURE', CellVar('__class__')),
STORE_NAME('__classcell__'),
LOAD_CONST(None),
RETURN_VALUE()
])
inner_code = ctx.bc.to_code()
parent_ctx.bc.append(LOAD_CONST(inner_code, lineno=lineno))
# when it comes to nested, the name is not generated correctly now.
parent_ctx.bc.append(LOAD_CONST(name, lineno=lineno))
parent_ctx.bc.append(MAKE_FUNCTION(0x08, lineno=lineno))
parent_ctx.bc.extend([LOAD_CONST(name), BUILD_TUPLE(2)])
# *args
if node.bases:
vararg = ast.Tuple(node.bases, ast.Load(), lineno=lineno, col_offset=col_offset)
ast.fix_missing_locations(vararg)
py_emit(vararg, parent_ctx)
else:
parent_ctx.bc.append(LOAD_CONST(()))
parent_ctx.bc.append(Instr('BUILD_TUPLE_UNPACK_WITH_CALL', 2))
# **kwargs
if node.keywords:
keys, values = zip(*[(ast.Str(
keyword.arg, lineno=keyword.value.lineno, col_offset=keyword.value.
col_offset) if keyword.arg else None, keyword.value)
for keyword in node.keywords])
ex_dict = ex_ast.ExDict(keys, values, ast.Load())
ast.fix_missing_locations(ex_dict)
py_emit(ex_dict, parent_ctx)
else:
parent_ctx.bc.append(BUILD_MAP(0))
parent_ctx.bc.append(CALL_FUNCTION_EX(1))
parent_ctx.bc.extend(
[CALL_FUNCTION(1, lineno=lineno)] * len(getattr(node, 'decorator_list', ())))
parent_ctx.store_name(node.name, lineno=lineno)
|
# Boom!!!
def conteo(segundos):
if segundos < 1:
print('No puedo contar menos de un segundo.')
else:
for segundo_actual in reversed(range(1, segundos + 1)):
print(segundo_actual)
print('BOOM!!!')
conteo(10)
|
import unittest
import flask_schema.types
import flask_schema.errors
class StringTest(unittest.TestCase):
# STRING TESTS
def test_min_only(self):
prop = flask_schema.types.String(min_length=0)
self.assertEqual(prop("12345"), "12345")
def test_min_only_out_of_range(self):
prop = flask_schema.types.String(min_length=1)
self.assertRaises(flask_schema.errors.SchemaValidationError, prop, "")
def test_max_only(self):
prop = flask_schema.types.String(max_length=10)
self.assertEqual(prop("12345"), "12345")
def test_max_only_out_of_range(self):
prop = flask_schema.types.String(max_length=10)
self.assertRaises(
flask_schema.errors.SchemaValidationError, prop, "12345123451234512345"
)
def test_min_and_max(self):
prop = flask_schema.types.String(min_length=0, max_length=10)
self.assertEqual(prop("12345"), "12345")
def test_min_and_max_out_of_range(self):
prop = flask_schema.types.String(min_length=0, max_length=10)
self.assertRaises(
flask_schema.errors.SchemaValidationError, prop, "12345123451234512345"
)
def test_no_range(self):
prop = flask_schema.types.String()
self.assertEqual(prop("12345123451234512345"), "12345123451234512345")
# PROPERTY TESTS
def test_nullable_by_default(self):
prop = flask_schema.types.String()
self.assertIsNone(prop(None))
def test_nullable_allows_null(self):
prop = flask_schema.types.String(nullable=True)
self.assertIsNone(prop(None))
def test_nullable_raises_error(self):
prop = flask_schema.types.String(nullable=False)
self.assertRaises(flask_schema.errors.SchemaValidationError, prop, None)
def test_default_is_none(self):
prop = flask_schema.types.String(default=None)
self.assertIsNone(prop(None))
def test_default_value(self):
prop = flask_schema.types.String(default="yep")
self.assertEqual(prop(None), "yep")
def test_default_passive_when_value_not_none(self):
prop = flask_schema.types.String(default="pey")
self.assertEqual(prop("yep"), "yep")
def test_default_callable(self):
prop = flask_schema.types.String(default=lambda: "yep")
self.assertEqual(prop(None), "yep")
def test_wrong_type(self):
prop = flask_schema.types.String(callback=None)
self.assertRaises(flask_schema.errors.SchemaValidationError, prop, 12)
def test_callback(self):
prop = flask_schema.types.String(callback=lambda v: f"{v}{v}")
self.assertEqual(prop("yep"), "yepyep")
def test_no_callback(self):
prop = flask_schema.types.String(callback=None)
self.assertEqual(prop("yep"), "yep")
|
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.views.generic import DetailView
from . import models
class UserProfileDetail(DetailView):
model = models.UserProfile
context_object_name = 'profile'
slug_field = 'user__slug'
def profile_redirect(request):
if request.user.is_authenticated:
url = reverse('profile-detail', args=[str(request.user.slug)])
return HttpResponseRedirect(url)
|
import pyodbc, re
import WordParse
def main():
doc = 'jesus.tsv'
WordParse.main(file_name=doc)
with open(doc, 'r') as f:
s = re.split('\n', f.read())
cnxn = pyodbc.connect(r'DSN=mynewdsn;')
cursor = cnxn.cursor()
cursor.execute("""IF OBJECT_ID('tempdb.dbo.##CSV') IS NOT NULL
DROP TABLE dbo.##CSV;""")
cursor.execute(r"""CREATE TABLE ##CSV
(
Name VARCHAR(50) NULL
, [ProdCode] VARCHAR(10) NOT NULL
, SKU VARCHAR(10) NOT NULL
, Density VARCHAR(10) NOT NULL
, SKU_Letter VARCHAR(10) NOT NULL
, form_factor VARCHAR(200)
, PBA VARCHAR(500) NULL
, [Date] DATE NULL
, Link VARCHAR(2000) NULL
);
""")
cursor.commit()
for x in s:
y = re.split('\t', x)
if len(y) < 9:
continue
cursor.execute("""insert into ##csv(Name, ProdCode, SKU, Density, SKU_Letter, form_factor, PBA, [Date], Link) values ('%s','%s','%s','%s','%s','%s','%s', CONVERT(DATE, '%s', 102),'%s')""" % (y[0], y[1], y[2], y[3], y[4], y[5], y[6], y[7], y[8]))
cnxn.commit()
cursor.execute("""SELECT * FROM ##CSV""")
row = cursor.fetchone()
print(2+2)
if __name__ == '__main__':
main()
|
#There are n particles numbered from 0 to n − 1 lined up from smallest to largest ID along the x-axis.
#The particles are all released simultaneously. Once released, each particle travels indefinitely in a
#straight line along the positive x-axis at a speed. When two particles collide, the faster particle moves through
#the slower particle and they both continue moving without changing speed or direction. Given a list of particle
#speeds for particles arranged left to right by position, determine the number of collisions that occur with the
#particle at index pos.For example, assume there are n=2 particles, p[0] and p[1], located at positions 0 and 1 at
#time t = 0. The particle p[0] is traveling to the right at speed[0] = 2 units velocity and particle pos[1] is
#traveling at speed[1] = 1 unit velocity per unit of time. At time t = 1, p[0] has moved to position 0 + 2 = 2, and
#[1] is at position 1 + 1 = 2 on the x-axis. Since they both occupy the same position, they have collided at time t =
#1. At time t = 2, the particle p[0] is at position 2 + 2 = 4, and p[1] is at 2 + 1 = 3 at time t = 2. Since p[0] is
#moving faster than p[1], and is now ahead of p[1] on the x-axis, they will never collide again. In this case, there
#is 1 collision.
speed = [6,6,1,6,3,4,6,8]
pos = 2
def collision(speed, pos):
# Write your code here
new_pos = [0 for x in range(len(speed))]
counter = 0
collide = 0
k = pos
#append new position of each particle in a list
for i in speed:
new_pos[counter] = counter + i
counter += 1
#check if pos is less or equal to particles below
for j in range(pos):
if new_pos[pos] <= new_pos[k-1]:
collide += 1
k -= 1
#check if pos is greater or equal to particles above
k = pos
for l in range(len(speed) - (pos+1)):
if new_pos[pos] >= new_pos[k+1]:
collide += 1
k += 1
return collide
print(collision(speed, pos)) |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-27 14:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reporting', '0013_auto_20171014_1526'),
]
operations = [
migrations.AlterField(
model_name='communication',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='communication',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='daily',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='daily',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='incident',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='incident',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='medical',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='medical',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='mood',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='mood',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='repair',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='repair',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='sleep',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='sleep',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='sleepquality',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='sleepquality',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='sociodynamicreport',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='sociodynamicreport',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='sociodynamicreportentry',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='sociodynamicreportentry',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
migrations.AlterField(
model_name='waste',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Add Datetime [UTC]'),
),
migrations.AlterField(
model_name='waste',
name='modified',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Modified Datetime [UTC]'),
),
]
|
from functools import lru_cache
from time import gmtime, strftime
from flask import Flask, render_template
from flask import url_for
from werkzeug.utils import redirect
from superhub.pages import DeviceConnectionStatusPage, DhcpReservationPage, IpFilteringPage, MacFilteringPage, PortBlockingPage, PortForwardingPage, PortTriggeringPage
from superhub.router import Router
from superhub.utils.password_vault import PasswordVault
app = Flask(__name__)
@lru_cache()
def get_router():
host = "192.168.0.1"
password = PasswordVault().get()
router = Router(host, password)
router.login()
print("Connected to router!")
return router
def get_timestamp():
return strftime("%Y-%m-%d %H:%M:%S %Z", gmtime())
@app.route("/")
def status():
return redirect(url_for('devices'))
@app.route("/devices")
def devices():
page = DeviceConnectionStatusPage(get_router())
tables = [page.wired_devices, page.wireless_devices]
return render_template("status.html", tables=tables, timestamp=get_timestamp(), active="devices")
@app.route("/dhcp")
def dhcp():
page = DhcpReservationPage(get_router())
tables = [page.ip_lease_table]
return render_template("status.html", tables=tables, timestamp=get_timestamp(), active="dhcp")
@app.route("/ip")
def ip():
page = IpFilteringPage(get_router())
tables = [page.ip_filter_list]
return render_template("status.html", tables=tables, timestamp=get_timestamp(), active="ip")
@app.route("/mac")
def mac():
page = MacFilteringPage(get_router())
tables = [page.mac_filter_list]
return render_template("status.html", tables=tables, timestamp=get_timestamp(), active="mac")
@app.route("/ports")
def ports():
tables = [
PortBlockingPage(get_router()).port_blocking_rules,
PortForwardingPage(get_router()).port_forwarding_rules,
PortTriggeringPage(get_router()).port_triggering_rules,
]
return render_template("status.html", tables=tables, timestamp=get_timestamp(), active="ports")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=True)
|
<code object <module> at #0, file "testcorpus/95_annotation_global_simple.py", line 1>
1 0 LOAD_CONST 0 ('<code object f at #1, file "testcorpus/95_annotation_global_simple.py", line 1>')
2 LOAD_CONST 1 ('f')
4 MAKE_FUNCTION 0
6 STORE_NAME 0 (f)
8 LOAD_CONST 2 (None)
10 RETURN_VALUE
co_argcount: 0
co_kwonlyargcount: 0
co_consts: ('<code object f at #1, file "testcorpus/95_annotation_global_simple.py", line 1>', 'f', None)
co_firstlineno: 1
co_names: ('f',)
co_varnames: ()
co_cellvars: ()
co_freevars: ()
co_lnotab: b''
<code object f at #1, file "testcorpus/95_annotation_global_simple.py", line 1>
3 0 LOAD_GLOBAL 0 (print)
2 LOAD_FAST 0 (some_global)
4 CALL_FUNCTION 1
6 POP_TOP
8 LOAD_CONST 0 (None)
10 RETURN_VALUE
co_argcount: 0
co_kwonlyargcount: 0
co_consts: (None,)
co_firstlineno: 1
co_names: ('print',)
co_varnames: ('some_global',)
co_cellvars: ()
co_freevars: ()
co_lnotab: b'\x00\x02'
|
#!/usr/bin/env python
import re
from pip.req import parse_requirements
from setuptools import setup, find_packages
try:
install_reqs = parse_requirements('requirements.txt', session=False)
requirements = [str(ir.req) for ir in install_reqs]
except OSError:
requirements = []
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as history_file:
history = history_file.read()
with open('expan/core/version.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
test_requirements = [
'pytest==3.0.7' #the latest version 3.1.0 will lead to fail tests with python 2.7
]
setup(
name='expan',
version=version,
description="Experiment Analysis Library",
long_description=readme + '\n\n' + history,
author="Zalando SE",
author_email='octopus@zalando.de',
url='https://github.com/zalando/expan',
packages=find_packages(),
package_dir={'expan': 'expan'},
include_package_data=True,
install_requires=requirements,
license="MIT",
zip_safe=False,
keywords='expan',
entry_points={
'console_scripts': [
'expan = expan.cli.cli:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
# 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
# 'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
# Aula 15 - Desafio 70: Estatisticas em produtos
# Ler o nome e o preço de varios produtos. O programa deve perguntar se o usuario vai continuar.
# No final, mostre:
# - qual o total gasto
# - quantos produtos custam mais de R$1000
# - qual eh o nome do produto mais barato
total = contMil = menor = 0
barato = ''
while True:
produto = str(input('Informe o produto: ')).strip()
preco = float(input('Informe o preço R$: '))
if total == 0 or preco < menor:
menor = preco
barato = produto
if preco >= 1000:
contMil += 1
total += preco
opcao = ' '
while opcao not in 'sn':
opcao = str(input('Deseja continuar [S/N]: ')).lower().strip()[0]
if opcao == 'n':
break
elif opcao == 's':
pass
print('======== FECHAMENTO DOS PEDIDOS ========')
print(f'Valor total dos produtos R$ {total:.2f}')
print(f'{contMil} produtos estao acima dos R$ 1000,00')
print(f'O produto mais barato foi {barato}, custando R$ {menor:.2f}')
|
def yes_no_bool(val):
if val.lower() == 'yes':
return True
elif val.lower() == 'no':
return False
else:
raise ValueError(
'Cannot translate "{}" to bool'.format(val))
|
"""
MobileNet & FD-MobileNet for CUB-200-2011, implemented in torch.
Original papers:
- 'MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications,'
https://arxiv.org/abs/1704.04861.
- 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,' https://arxiv.org/abs/1802.03750.
"""
__all__ = ['mobilenet_w1_cub', 'mobilenet_w3d4_cub', 'mobilenet_wd2_cub', 'mobilenet_wd4_cub', 'fdmobilenet_w1_cub',
'fdmobilenet_w3d4_cub', 'fdmobilenet_wd2_cub', 'fdmobilenet_wd4_cub']
from .mobilenet import get_mobilenet
from .fdmobilenet import get_fdmobilenet
def mobilenet_w1_cub(num_classes=200, **kwargs):
"""
1.0 MobileNet-224 model for CUB-200-2011 from 'MobileNets: Efficient Convolutional Neural Networks for Mobile
Vision Applications,' https://arxiv.org/abs/1704.04861.
Parameters:
----------
num_classes : int, default 200
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_mobilenet(num_classes=num_classes, width_scale=1.0, model_name="mobilenet_w1_cub", **kwargs)
def mobilenet_w3d4_cub(num_classes=200, **kwargs):
"""
0.75 MobileNet-224 model for CUB-200-2011 from 'MobileNets: Efficient Convolutional Neural Networks for Mobile
Vision Applications,' https://arxiv.org/abs/1704.04861.
Parameters:
----------
num_classes : int, default 200
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_mobilenet(num_classes=num_classes, width_scale=0.75, model_name="mobilenet_w3d4_cub", **kwargs)
def mobilenet_wd2_cub(num_classes=200, **kwargs):
"""
0.5 MobileNet-224 model for CUB-200-2011 from 'MobileNets: Efficient Convolutional Neural Networks for Mobile
Vision Applications,' https://arxiv.org/abs/1704.04861.
Parameters:
----------
num_classes : int, default 200
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_mobilenet(num_classes=num_classes, width_scale=0.5, model_name="mobilenet_wd2_cub", **kwargs)
def mobilenet_wd4_cub(num_classes=200, **kwargs):
"""
0.25 MobileNet-224 model for CUB-200-2011 from 'MobileNets: Efficient Convolutional Neural Networks for Mobile
Vision Applications,' https://arxiv.org/abs/1704.04861.
Parameters:
----------
num_classes : int, default 200
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_mobilenet(num_classes=num_classes, width_scale=0.25, model_name="mobilenet_wd4_cub", **kwargs)
def fdmobilenet_w1_cub(num_classes=200, **kwargs):
"""
FD-MobileNet 1.0x model for CUB-200-2011 from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,'
https://arxiv.org/abs/1802.03750.
Parameters:
----------
num_classes : int, default 200
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_fdmobilenet(num_classes=num_classes, width_scale=1.0, model_name="fdmobilenet_w1_cub", **kwargs)
def fdmobilenet_w3d4_cub(num_classes=200, **kwargs):
"""
FD-MobileNet 0.75x model for CUB-200-2011 from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,'
https://arxiv.org/abs/1802.03750.
Parameters:
----------
num_classes : int, default 200
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_fdmobilenet(num_classes=num_classes, width_scale=0.75, model_name="fdmobilenet_w3d4_cub", **kwargs)
def fdmobilenet_wd2_cub(num_classes=200, **kwargs):
"""
FD-MobileNet 0.5x model for CUB-200-2011 from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,'
https://arxiv.org/abs/1802.03750.
Parameters:
----------
num_classes : int, default 200
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_fdmobilenet(num_classes=num_classes, width_scale=0.5, model_name="fdmobilenet_wd2_cub", **kwargs)
def fdmobilenet_wd4_cub(num_classes=200, **kwargs):
"""
FD-MobileNet 0.25x model for CUB-200-2011 from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,'
https://arxiv.org/abs/1802.03750.
Parameters:
----------
num_classes : int, default 200
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_fdmobilenet(num_classes=num_classes, width_scale=0.25, model_name="fdmobilenet_wd4_cub", **kwargs)
def _calc_width(net):
import numpy as np
net_params = filter(lambda p: p.requires_grad, net.parameters())
weight_count = 0
for param in net_params:
weight_count += np.prod(param.size())
return weight_count
def _test():
import torch
pretrained = False
models = [
mobilenet_w1_cub,
mobilenet_w3d4_cub,
mobilenet_wd2_cub,
mobilenet_wd4_cub,
fdmobilenet_w1_cub,
fdmobilenet_w3d4_cub,
fdmobilenet_wd2_cub,
fdmobilenet_wd4_cub,
]
for model in models:
net = model(pretrained=pretrained)
# net.train()
net.eval()
weight_count = _calc_width(net)
print("m={}, {}".format(model.__name__, weight_count))
assert (model != mobilenet_w1_cub or weight_count == 3411976)
assert (model != mobilenet_w3d4_cub or weight_count == 1970360)
assert (model != mobilenet_wd2_cub or weight_count == 921192)
assert (model != mobilenet_wd4_cub or weight_count == 264472)
assert (model != fdmobilenet_w1_cub or weight_count == 2081288)
assert (model != fdmobilenet_w3d4_cub or weight_count == 1218104)
assert (model != fdmobilenet_wd2_cub or weight_count == 583528)
assert (model != fdmobilenet_wd4_cub or weight_count == 177560)
x = torch.randn(1, 3, 224, 224)
y = net(x)
y.sum().backward()
assert (tuple(y.size()) == (1, 200))
if __name__ == "__main__":
_test()
|
# This script defines the Header class, whose functionality is to
# create and maintain the header of each steganograph.
# Builtin modules
from re import compile, Pattern
# Internal modules
from StegLibrary.core import SteganographyConfig as Config
from StegLibrary.core.errors import UnrecognisedHeaderError
class Header:
"""Provides for the preparation of the creation of steganographs."""
# Padding character, used when header is too short
# after writing all the required metadata
padding_character: str = "-"
# Separator is used to make regex easier
separator: str = "?"
# Various types of length for the header
maximum_data_length: int = 8
maximum_flag_length: int = 3
salt_length: int = 24
separator_length: int = 2
header_length: int = maximum_data_length + \
maximum_flag_length + salt_length + separator_length
# Regex pattern of the header
# data_length?flag?salt
pattern: str = r"(\d{1,8})\?(\d{1,3})\?"
hash_pattern: str = r"((?:[A-Za-z0-9+/]{4})+(?:[A-Za-z0-9+/]{2}==" + \
r"|[A-Za-z0-9+/]{3}=)?)"
pattern: Pattern = compile(f"^{pattern + hash_pattern}$")
def __str__(self) -> str:
"""Returns the header."""
return self.header
def __repr__(self) -> str:
"""Same as __str__, returns the header."""
return str(self)
def __init__(self, data_length: int, compression: int, density: int,
salt: str) -> None:
self.data_length: int = data_length
self.compression: int = compression
self.density: int = density
self.salt: str = salt
self.generate()
def generate(self) -> None:
"""
Generates a header created from data given during
Header initialisation.
There is no need to call this method, unless any metadata has been
modified after initialisation.
"""
# Create a flag from compression level and density level.
# Bit 6 - 2: Compression level (0 (no compression) - 9)
# Bit 1 - 0: Density level (1 - 3)
flag = (self.compression << 2) + self.density
result_header = Header.separator.join(
(str(self.data_length), str(flag), self.salt))
assert Header.pattern.match(result_header)
# Assign as a class attribute
self.header = result_header
def build_header(
*,
data_length: int,
compression: int = Config.default_compression,
density: int = Config.default_density,
salt: str,
) -> Header:
"""Builds the steganograph header with given data.
### Positional arguments
- data_length (int)
- The length of the steganograph (excluding the header)
- compression (int) (default = Config.default_compression)
- The compression level
- density (int) (default = Config.default_density)
- The data density
- salt (str)
- The 24-character salt string
### Returns
A Header object containing all the data given
"""
# Initialise the Header instance
header = Header(
data_length=data_length,
compression=compression,
density=density,
salt=salt,
)
return header.header
def validate_header(b: bytes) -> bool:
"""Check if the bytes string contains valid Header.
### Positional arguments
- b (bytes)
- The bytes string to check
### Returns
True if a Header is present, otherwise False
### Raises
- TypeError
- Raised when the parametres given are in incorrect types
"""
# Type checking
if not isinstance(b, bytes):
raise TypeError(f"Must be a bytes string (given {type(b)})")
# Try to decode into string
try:
s = str(b, "utf-8")
return True if Header.pattern.match(s) else False
except UnicodeDecodeError:
return False
def parse_header(b: bytes) -> Header:
"""Parse a bytes string into a Header object.
### Postional arguments
- b (bytes)
- The bytes string to parse
### Returns
A Header object from the bytes string
### Raises
- TypeError
- Raised when the parametres given are in incorrect types
- UnrecognisedHeaderError
- Raised when failing to parse a header.
"""
# Type checking
if not isinstance(b, bytes):
raise TypeError(f"Must be a bytes string (given {type(b)})")
# Validate header first
if not validate_header(b):
raise UnrecognisedHeaderError("Invalid header!")
# Generate Match object of the header
# Decode bytes string to string first
header_match = Header.pattern.match(str(b, "utf-8"))
# Extract data from capturing groups
# Ignore first capturing groups
# 1. Data length
hdr_data_length = int(header_match[1])
# 2. Setting flag
hdr_flag = int(header_match[2])
# 3. Salt
hdr_salt = header_match[3]
# Process flag
hdr_density = hdr_flag & 0b11
hdr_compression = (hdr_flag - hdr_density) >> 2
# Build and return a Header object
return build_header(
data_length=hdr_data_length,
compression=hdr_compression,
density=hdr_density,
salt=hdr_salt
)
|
"""
In this file we going to create
classes and methods that require
run tasks in background asynchronously
"""
from logging import getLogger
from django.conf import settings # noqa
from background_task import background
from .models import Feed
logger = getLogger(__name__)
# Execute this task
# 20 seconds after call it
@background(schedule=20)
def update_feeds_posts():
"""
Iterate over all Feeds in order to
update their posts and make sure
users have the latest news from
their Feeds
"""
logger.debug("Background Task 'update_feeds_posts' started")
for feed in Feed.objects.all():
try:
feed.fetch_latest_posts()
except Exception as error:
logger.debug(
'Fail to update posts. '
f'Feed ID: {feed.id} ',
f'Error: {error}')
logger.debug("Background Task 'update_feeds_posts' finished")
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for compression."""
import unittest
from apitools.base.py import compression
from apitools.base.py import gzip
import six
class CompressionTest(unittest.TestCase):
def setUp(self):
# Sample highly compressible data (~50MB).
self.sample_data = b'abc' * 16777216
# Stream of the sample data.
self.stream = six.BytesIO()
self.stream.write(self.sample_data)
self.length = self.stream.tell()
self.stream.seek(0)
def testCompressionExhausted(self):
"""Test full compression.
Test that highly compressible data is actually compressed in entirety.
"""
output, read, exhausted = compression.CompressStream(
self.stream,
self.length,
9)
# Ensure the compressed buffer is smaller than the input buffer.
self.assertLess(output.length, self.length)
# Ensure we read the entire input stream.
self.assertEqual(read, self.length)
# Ensure the input stream was exhausted.
self.assertTrue(exhausted)
def testCompressionUnbounded(self):
"""Test unbounded compression.
Test that the input stream is exhausted when length is none.
"""
output, read, exhausted = compression.CompressStream(
self.stream,
None,
9)
# Ensure the compressed buffer is smaller than the input buffer.
self.assertLess(output.length, self.length)
# Ensure we read the entire input stream.
self.assertEqual(read, self.length)
# Ensure the input stream was exhausted.
self.assertTrue(exhausted)
def testCompressionPartial(self):
"""Test partial compression.
Test that the length parameter works correctly. The amount of data
that's compressed can be greater than or equal to the requested length.
"""
output_length = 40
output, _, exhausted = compression.CompressStream(
self.stream,
output_length,
9)
# Ensure the requested read size is <= the compressed buffer size.
self.assertLessEqual(output_length, output.length)
# Ensure the input stream was not exhausted.
self.assertFalse(exhausted)
def testCompressionIntegrity(self):
"""Test that compressed data can be decompressed."""
output, read, exhausted = compression.CompressStream(
self.stream,
self.length,
9)
# Ensure uncompressed data matches the sample data.
with gzip.GzipFile(fileobj=output) as f:
original = f.read()
self.assertEqual(original, self.sample_data)
# Ensure we read the entire input stream.
self.assertEqual(read, self.length)
# Ensure the input stream was exhausted.
self.assertTrue(exhausted)
class StreamingBufferTest(unittest.TestCase):
def setUp(self):
self.stream = compression.StreamingBuffer()
def testSimpleStream(self):
"""Test simple stream operations.
Test that the stream can be written to and read from. Also test that
reading from the stream consumes the bytes.
"""
# Ensure the stream is empty.
self.assertEqual(self.stream.length, 0)
# Ensure data is correctly written.
self.stream.write(b'Sample data')
self.assertEqual(self.stream.length, 11)
# Ensure data can be read and the read data is purged from the stream.
data = self.stream.read(11)
self.assertEqual(data, b'Sample data')
self.assertEqual(self.stream.length, 0)
def testPartialReads(self):
"""Test partial stream reads.
Test that the stream can be read in chunks while perserving the
consumption mechanics.
"""
self.stream.write(b'Sample data')
# Ensure data can be read and the read data is purged from the stream.
data = self.stream.read(6)
self.assertEqual(data, b'Sample')
self.assertEqual(self.stream.length, 5)
# Ensure the remaining data can be read.
data = self.stream.read(5)
self.assertEqual(data, b' data')
self.assertEqual(self.stream.length, 0)
def testTooShort(self):
"""Test excessive stream reads.
Test that more data can be requested from the stream than available
without raising an exception.
"""
self.stream.write(b'Sample')
# Ensure requesting more data than available does not raise an
# exception.
data = self.stream.read(100)
self.assertEqual(data, b'Sample')
self.assertEqual(self.stream.length, 0)
|
from typing import Dict, Union
import gym
import numpy as np
from stable_baselines3.common.type_aliases import GymStepReturn
class SimpleMultiObsEnv(gym.Env):
def __init__(
self,
num_col: int = 4,
num_row: int = 4,
random_start: bool = True,
discrete_actions: bool = True,
channel_last: bool = True,
):
super(SimpleMultiObsEnv, self).__init__()
self.vector_size = 5
if channel_last:
self.img_size = [64, 64, 1]
else:
self.img_size = [1, 64, 64]
self.random_start = random_start
self.discrete_actions = discrete_actions
if discrete_actions:
self.action_space = gym.spaces.Discrete(4)
else:
self.action_space = gym.spaces.Box(0, 1, (4,))
self.observation_space = gym.spaces.Dict(
spaces={
"vec": gym.spaces.Box(0, 1, (self.vector_size,)),
"img": gym.spaces.Box(0, 255, self.img_size, dtype=np.uint8),
}
)
self.count = 0
self.max_count = 100
self.log = ""
self.state = 0
self.action2str = ["left", "down", "right", "up"]
self.init_possible_transitions()
self.num_col = num_col
self.state_mapping = []
self.init_state_mapping(num_col, num_row)
self.max_state = len(self.state_mapping) - 1
def init_state_mapping(self, num_col: int, num_row: int) -> None:
col_vecs = np.random.random((num_col, self.vector_size))
row_imgs = np.random.randint(0, 255, (num_row, 64, 64), dtype=np.int32)
for i in range(num_col):
for j in range(num_row):
self.state_mapping.append(
{"vec": col_vecs[i], "img": row_imgs[j].reshape(self.img_size)}
)
def get_state_mapping(self) -> Dict[str, np.ndarray]:
return self.state_mapping[self.state]
def init_possible_transitions(self) -> None:
self.left_possible = [1, 2, 3, 13, 14, 15]
self.down_possible = [0, 4, 8, 3, 7, 11]
self.right_possible = [0, 1, 2, 12, 13, 14]
self.up_possible = [4, 8, 12, 7, 11, 15]
def step(self, action: Union[int, float, np.ndarray]) -> GymStepReturn:
if not self.discrete_actions:
action = np.argmax(action)
else:
action = int(action)
self.count += 1
prev_state = self.state
reward = -0.1
if self.state in self.left_possible and action == 0:
self.state -= 1
elif self.state in self.down_possible and action == 1:
self.state += self.num_col
elif self.state in self.right_possible and action == 2:
self.state += 1
elif self.state in self.up_possible and action == 3:
self.state -= self.num_col
got_to_end = self.state == self.max_state
reward = 1 if got_to_end else reward
done = self.count > self.max_count or got_to_end
self.log = f"Went {self.action2str[action]} in state {prev_state}, got to state {self.state}"
return self.get_state_mapping(), reward, done, {"got_to_end": got_to_end}
def render(self, mode: str = "human") -> None:
print(self.log)
def reset(self) -> Dict[str, np.ndarray]:
self.count = 0
if not self.random_start:
self.state = 0
else:
self.state = np.random.randint(0, self.max_state)
return self.state_mapping[self.state]
|
import csv
from pathlib import Path
def parse_from_csv(csv_path, outfile, fieldname=None, delimiter=','):
if not fieldname:
raise ValueError('The CSV fieldname for text content cannot be None')
in_csv = Path(csv_path)
with open(outfile, 'w') as f_out:
with open(in_csv, mode='r') as f_in:
reader = csv.DictReader(f_in)
for row in reader:
f_out.write(row[fieldname] + '\n')
|
# coding: utf-8
"""
Copyright (c) 2021 Aspose.Cells Cloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
"""
from pprint import pformat
from six import iteritems
import re
class Style(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'link': 'Link',
'pattern': 'str',
'text_direction': 'str',
'custom': 'str',
'shrink_to_fit': 'bool',
'is_date_time': 'bool',
'culture_custom': 'str',
'rotation_angle': 'int',
'indent_level': 'int',
'is_percent': 'bool',
'foreground_color': 'Color',
'name': 'str',
'foreground_theme_color': 'ThemeColor',
'border_collection': 'list[Border]',
'is_locked': 'bool',
'vertical_alignment': 'str',
'background_color': 'Color',
'background_theme_color': 'ThemeColor',
'is_formula_hidden': 'bool',
'is_gradient': 'bool',
'number': 'int',
'horizontal_alignment': 'str',
'is_text_wrapped': 'bool',
'font': 'Font'
}
attribute_map = {
'link': 'link',
'pattern': 'Pattern',
'text_direction': 'TextDirection',
'custom': 'Custom',
'shrink_to_fit': 'ShrinkToFit',
'is_date_time': 'IsDateTime',
'culture_custom': 'CultureCustom',
'rotation_angle': 'RotationAngle',
'indent_level': 'IndentLevel',
'is_percent': 'IsPercent',
'foreground_color': 'ForegroundColor',
'name': 'Name',
'foreground_theme_color': 'ForegroundThemeColor',
'border_collection': 'BorderCollection',
'is_locked': 'IsLocked',
'vertical_alignment': 'VerticalAlignment',
'background_color': 'BackgroundColor',
'background_theme_color': 'BackgroundThemeColor',
'is_formula_hidden': 'IsFormulaHidden',
'is_gradient': 'IsGradient',
'number': 'Number',
'horizontal_alignment': 'HorizontalAlignment',
'is_text_wrapped': 'IsTextWrapped',
'font': 'Font'
}
@staticmethod
def get_swagger_types():
return Style.swagger_types
@staticmethod
def get_attribute_map():
return Style.attribute_map
def get_from_container(self, attr):
if attr in self.container:
return self.container[attr]
return None
def __init__(self, link=None, pattern=None, text_direction=None, custom=None, shrink_to_fit=None, is_date_time=None, culture_custom=None, rotation_angle=None, indent_level=None, is_percent=None, foreground_color=None, name=None, foreground_theme_color=None, border_collection=None, is_locked=None, vertical_alignment=None, background_color=None, background_theme_color=None, is_formula_hidden=None, is_gradient=None, number=None, horizontal_alignment=None, is_text_wrapped=None, font=None, **kw):
"""
Associative dict for storing property values
"""
self.container = {}
"""
Style - a model defined in Swagger
"""
self.container['link'] = None
self.container['pattern'] = None
self.container['text_direction'] = None
self.container['custom'] = None
self.container['shrink_to_fit'] = None
self.container['is_date_time'] = None
self.container['culture_custom'] = None
self.container['rotation_angle'] = None
self.container['indent_level'] = None
self.container['is_percent'] = None
self.container['foreground_color'] = None
self.container['name'] = None
self.container['foreground_theme_color'] = None
self.container['border_collection'] = None
self.container['is_locked'] = None
self.container['vertical_alignment'] = None
self.container['background_color'] = None
self.container['background_theme_color'] = None
self.container['is_formula_hidden'] = None
self.container['is_gradient'] = None
self.container['number'] = None
self.container['horizontal_alignment'] = None
self.container['is_text_wrapped'] = None
self.container['font'] = None
if link is not None:
self.link = link
if pattern is not None:
self.pattern = pattern
if text_direction is not None:
self.text_direction = text_direction
if custom is not None:
self.custom = custom
if shrink_to_fit is not None:
self.shrink_to_fit = shrink_to_fit
if is_date_time is not None:
self.is_date_time = is_date_time
if culture_custom is not None:
self.culture_custom = culture_custom
if rotation_angle is not None:
self.rotation_angle = rotation_angle
if indent_level is not None:
self.indent_level = indent_level
if is_percent is not None:
self.is_percent = is_percent
if foreground_color is not None:
self.foreground_color = foreground_color
if name is not None:
self.name = name
if foreground_theme_color is not None:
self.foreground_theme_color = foreground_theme_color
if border_collection is not None:
self.border_collection = border_collection
if is_locked is not None:
self.is_locked = is_locked
if vertical_alignment is not None:
self.vertical_alignment = vertical_alignment
if background_color is not None:
self.background_color = background_color
if background_theme_color is not None:
self.background_theme_color = background_theme_color
if is_formula_hidden is not None:
self.is_formula_hidden = is_formula_hidden
if is_gradient is not None:
self.is_gradient = is_gradient
if number is not None:
self.number = number
if horizontal_alignment is not None:
self.horizontal_alignment = horizontal_alignment
if is_text_wrapped is not None:
self.is_text_wrapped = is_text_wrapped
if font is not None:
self.font = font
@property
def link(self):
"""
Gets the link of this Style.
:return: The link of this Style.
:rtype: Link
"""
return self.container['link']
@link.setter
def link(self, link):
"""
Sets the link of this Style.
:param link: The link of this Style.
:type: Link
"""
self.container['link'] = link
@property
def pattern(self):
"""
Gets the pattern of this Style.
:return: The pattern of this Style.
:rtype: str
"""
return self.container['pattern']
@pattern.setter
def pattern(self, pattern):
"""
Sets the pattern of this Style.
:param pattern: The pattern of this Style.
:type: str
"""
self.container['pattern'] = pattern
@property
def text_direction(self):
"""
Gets the text_direction of this Style.
:return: The text_direction of this Style.
:rtype: str
"""
return self.container['text_direction']
@text_direction.setter
def text_direction(self, text_direction):
"""
Sets the text_direction of this Style.
:param text_direction: The text_direction of this Style.
:type: str
"""
self.container['text_direction'] = text_direction
@property
def custom(self):
"""
Gets the custom of this Style.
:return: The custom of this Style.
:rtype: str
"""
return self.container['custom']
@custom.setter
def custom(self, custom):
"""
Sets the custom of this Style.
:param custom: The custom of this Style.
:type: str
"""
self.container['custom'] = custom
@property
def shrink_to_fit(self):
"""
Gets the shrink_to_fit of this Style.
:return: The shrink_to_fit of this Style.
:rtype: bool
"""
return self.container['shrink_to_fit']
@shrink_to_fit.setter
def shrink_to_fit(self, shrink_to_fit):
"""
Sets the shrink_to_fit of this Style.
:param shrink_to_fit: The shrink_to_fit of this Style.
:type: bool
"""
self.container['shrink_to_fit'] = shrink_to_fit
@property
def is_date_time(self):
"""
Gets the is_date_time of this Style.
:return: The is_date_time of this Style.
:rtype: bool
"""
return self.container['is_date_time']
@is_date_time.setter
def is_date_time(self, is_date_time):
"""
Sets the is_date_time of this Style.
:param is_date_time: The is_date_time of this Style.
:type: bool
"""
self.container['is_date_time'] = is_date_time
@property
def culture_custom(self):
"""
Gets the culture_custom of this Style.
:return: The culture_custom of this Style.
:rtype: str
"""
return self.container['culture_custom']
@culture_custom.setter
def culture_custom(self, culture_custom):
"""
Sets the culture_custom of this Style.
:param culture_custom: The culture_custom of this Style.
:type: str
"""
self.container['culture_custom'] = culture_custom
@property
def rotation_angle(self):
"""
Gets the rotation_angle of this Style.
:return: The rotation_angle of this Style.
:rtype: int
"""
return self.container['rotation_angle']
@rotation_angle.setter
def rotation_angle(self, rotation_angle):
"""
Sets the rotation_angle of this Style.
:param rotation_angle: The rotation_angle of this Style.
:type: int
"""
self.container['rotation_angle'] = rotation_angle
@property
def indent_level(self):
"""
Gets the indent_level of this Style.
:return: The indent_level of this Style.
:rtype: int
"""
return self.container['indent_level']
@indent_level.setter
def indent_level(self, indent_level):
"""
Sets the indent_level of this Style.
:param indent_level: The indent_level of this Style.
:type: int
"""
self.container['indent_level'] = indent_level
@property
def is_percent(self):
"""
Gets the is_percent of this Style.
:return: The is_percent of this Style.
:rtype: bool
"""
return self.container['is_percent']
@is_percent.setter
def is_percent(self, is_percent):
"""
Sets the is_percent of this Style.
:param is_percent: The is_percent of this Style.
:type: bool
"""
self.container['is_percent'] = is_percent
@property
def foreground_color(self):
"""
Gets the foreground_color of this Style.
:return: The foreground_color of this Style.
:rtype: Color
"""
return self.container['foreground_color']
@foreground_color.setter
def foreground_color(self, foreground_color):
"""
Sets the foreground_color of this Style.
:param foreground_color: The foreground_color of this Style.
:type: Color
"""
self.container['foreground_color'] = foreground_color
@property
def name(self):
"""
Gets the name of this Style.
:return: The name of this Style.
:rtype: str
"""
return self.container['name']
@name.setter
def name(self, name):
"""
Sets the name of this Style.
:param name: The name of this Style.
:type: str
"""
self.container['name'] = name
@property
def foreground_theme_color(self):
"""
Gets the foreground_theme_color of this Style.
:return: The foreground_theme_color of this Style.
:rtype: ThemeColor
"""
return self.container['foreground_theme_color']
@foreground_theme_color.setter
def foreground_theme_color(self, foreground_theme_color):
"""
Sets the foreground_theme_color of this Style.
:param foreground_theme_color: The foreground_theme_color of this Style.
:type: ThemeColor
"""
self.container['foreground_theme_color'] = foreground_theme_color
@property
def border_collection(self):
"""
Gets the border_collection of this Style.
:return: The border_collection of this Style.
:rtype: list[Border]
"""
return self.container['border_collection']
@border_collection.setter
def border_collection(self, border_collection):
"""
Sets the border_collection of this Style.
:param border_collection: The border_collection of this Style.
:type: list[Border]
"""
self.container['border_collection'] = border_collection
@property
def is_locked(self):
"""
Gets the is_locked of this Style.
:return: The is_locked of this Style.
:rtype: bool
"""
return self.container['is_locked']
@is_locked.setter
def is_locked(self, is_locked):
"""
Sets the is_locked of this Style.
:param is_locked: The is_locked of this Style.
:type: bool
"""
self.container['is_locked'] = is_locked
@property
def vertical_alignment(self):
"""
Gets the vertical_alignment of this Style.
:return: The vertical_alignment of this Style.
:rtype: str
"""
return self.container['vertical_alignment']
@vertical_alignment.setter
def vertical_alignment(self, vertical_alignment):
"""
Sets the vertical_alignment of this Style.
:param vertical_alignment: The vertical_alignment of this Style.
:type: str
"""
self.container['vertical_alignment'] = vertical_alignment
@property
def background_color(self):
"""
Gets the background_color of this Style.
:return: The background_color of this Style.
:rtype: Color
"""
return self.container['background_color']
@background_color.setter
def background_color(self, background_color):
"""
Sets the background_color of this Style.
:param background_color: The background_color of this Style.
:type: Color
"""
self.container['background_color'] = background_color
@property
def background_theme_color(self):
"""
Gets the background_theme_color of this Style.
:return: The background_theme_color of this Style.
:rtype: ThemeColor
"""
return self.container['background_theme_color']
@background_theme_color.setter
def background_theme_color(self, background_theme_color):
"""
Sets the background_theme_color of this Style.
:param background_theme_color: The background_theme_color of this Style.
:type: ThemeColor
"""
self.container['background_theme_color'] = background_theme_color
@property
def is_formula_hidden(self):
"""
Gets the is_formula_hidden of this Style.
:return: The is_formula_hidden of this Style.
:rtype: bool
"""
return self.container['is_formula_hidden']
@is_formula_hidden.setter
def is_formula_hidden(self, is_formula_hidden):
"""
Sets the is_formula_hidden of this Style.
:param is_formula_hidden: The is_formula_hidden of this Style.
:type: bool
"""
self.container['is_formula_hidden'] = is_formula_hidden
@property
def is_gradient(self):
"""
Gets the is_gradient of this Style.
:return: The is_gradient of this Style.
:rtype: bool
"""
return self.container['is_gradient']
@is_gradient.setter
def is_gradient(self, is_gradient):
"""
Sets the is_gradient of this Style.
:param is_gradient: The is_gradient of this Style.
:type: bool
"""
self.container['is_gradient'] = is_gradient
@property
def number(self):
"""
Gets the number of this Style.
:return: The number of this Style.
:rtype: int
"""
return self.container['number']
@number.setter
def number(self, number):
"""
Sets the number of this Style.
:param number: The number of this Style.
:type: int
"""
self.container['number'] = number
@property
def horizontal_alignment(self):
"""
Gets the horizontal_alignment of this Style.
:return: The horizontal_alignment of this Style.
:rtype: str
"""
return self.container['horizontal_alignment']
@horizontal_alignment.setter
def horizontal_alignment(self, horizontal_alignment):
"""
Sets the horizontal_alignment of this Style.
:param horizontal_alignment: The horizontal_alignment of this Style.
:type: str
"""
self.container['horizontal_alignment'] = horizontal_alignment
@property
def is_text_wrapped(self):
"""
Gets the is_text_wrapped of this Style.
:return: The is_text_wrapped of this Style.
:rtype: bool
"""
return self.container['is_text_wrapped']
@is_text_wrapped.setter
def is_text_wrapped(self, is_text_wrapped):
"""
Sets the is_text_wrapped of this Style.
:param is_text_wrapped: The is_text_wrapped of this Style.
:type: bool
"""
self.container['is_text_wrapped'] = is_text_wrapped
@property
def font(self):
"""
Gets the font of this Style.
:return: The font of this Style.
:rtype: Font
"""
return self.container['font']
@font.setter
def font(self, font):
"""
Sets the font of this Style.
:param font: The font of this Style.
:type: Font
"""
self.container['font'] = font
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.get_swagger_types()):
value = self.get_from_container(attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, Style):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
from flask import Flask, request, jsonify
from flasgger import Swagger
from flasgger.utils import swag_from
from datetime import datetime
from argparse import ArgumentParser
import json
app = Flask(__name__)
app.config["SWAGGER"]={"title": "PAN Telemetry Software", "uiversion": 2}
# Set up the SwaggerUI API
swagger_config={
"headers":[],
"specs":[
{
"endpoint":"apispec_1",
"route":"/apispec_1.json",
"rule_filter":lambda rule:True,
"model_filter":lambda tag:True
}
],
"static_url_path": "/flassger_static",
"swagger_ui":True,
"specs_route":"/swagger/"
}
swagger=Swagger(app, config=swagger_config)
# Endpoint for indexing data in elasticsearch
# Mostly for testing purposes. We don't use this to actually index data in elasticsearch
@app.route("/telemetry", methods=["POST"])
@swag_from("endpoint_configs/telemetry_config.yml")
def index_sf_report():
sf_report=request.get_json()
imei=sf_report["imei"]
data=json.dumps({
sf_report["field"]: sf_report["value"],
"time": str(datetime.now().isoformat())
})
#index statefield report in elasticsearch
sf_res = app.config["es"].index(index='statefield_report_'+str(imei), doc_type='report', body=data)
res={
"Report Status": sf_res['result'],
"Report": json.loads(data),
"Index": 'statefield_report_'+str(imei)
}
return res
# Endpoint for getting data from ElasticSearch
@app.route("/search-es", methods=["GET"])
@swag_from("endpoint_configs/search_es_config.yml")
def search_es():
index = request.args.get('index')
field = str(request.args.get('field'))
# Get the most recent document in the given index which has a given statefield in it
search_object={
'query': {
'exists': {
'field': field
}
},
"sort": [
{
"time": {
"order": "desc"
}
}
],
"size": 1
}
# Get the value of that field from the document
if app.config["es"].indices.exists(index=index):
res = app.config["es"].search(index=index, body=json.dumps(search_object))
if len(res["hits"]["hits"])!=0:
most_recent_field=res["hits"]["hits"][0]["_source"][field]
return str(most_recent_field)
else:
return f"Unable to find field: {field} in index: {index}"
else:
return f"Unable to find index: {index}"
|
# Copyright (C) 2018 Garth N. Wells
#
# SPDX-License-Identifier: MIT
"""Unit test for the station module"""
from floodsystem.station import MonitoringStation, inconsistent_typical_range_stations
def test_create_monitoring_station():
# Create a station
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445)
river = "River X"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, river, town)
assert s.station_id == s_id
assert s.measure_id == m_id
assert s.name == label
assert s.coord == coord
assert s.typical_range == trange
assert s.river == river
assert s.town == town
def test_typical_range_consistent_method():
# Create a station
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445) #Valid range
river = "River X"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, river, town)
assert s.typical_range_consistent()
s.typical_range = None
assert not s.typical_range_consistent()
s.typical_range = (76.004, -12.3)
assert not s.typical_range_consistent()
def test_inconsistent_typical_range_stations():
s0 = MonitoringStation("s_id_0", "m_id_0", "some station", (-2.0, 4.0), (-2.3, 3.4445), "River X", "My Town") #Valid
s1 = MonitoringStation("s_id_1", "m_id_1", "some station", (2.0, 4.0), None, "River X", "My Town") #Invalid
s2 = MonitoringStation("s_id_2", "m_id_2", "some station", (-2.0, -4.0), (76.004, -12.3), "River X", "My Town") #Invalid
s3 = MonitoringStation("s_id_3", "m_id_3", "some station", (2.0, -4.0), (0, 1), "River X", "My Town") #Valid
stations = [s0, s1, s2, s3]
invalid_stations = inconsistent_typical_range_stations(stations)
assert s1 in invalid_stations and s2 in invalid_stations
def test_relative_water_level():
s0 = MonitoringStation("s_id_0", "m_id_0", "some station", (-2.0, 4.0), (0.1, 3), "River X", "My Town" )
s0.latest_level = None #invalid
s1 = MonitoringStation("s_id_1", "m_id_1", "some station", (2.0, 4.0), None, "River X", "My Town")
s1.latest_level = 3 #Invalid
s2 = MonitoringStation("s_id_2", "m_id_2", "some station", (-2.0, -4.0), (3, 0.1), "River X", "My Town")
s2.latest_level = 3 #Invalid
s3 = MonitoringStation("s_id_3", "m_id_3", "some station", (2.0, -4.0), (0.1, 3), "River X", "My Town")
s3.latest_level = 3 #Valid
s4 = MonitoringStation("s_id_3", "m_id_3", "some station", (2.0, -4.0), (0.1, 3), "River X", "My Town")
s4.latest_level = 0.1 #Valid
stations = [s0, s1, s2, s3, s4]
results = []
for station in stations:
results.append(station.relative_water_level())
assert results[0] == None
assert results[1] == None
assert results[2] == None
assert results[3] == 1.0
assert results[4] == 0.0
|
import re, random
from typing import Optional
import ast
from fastapi import (
FastAPI,
Request,
Response,
Cookie,
WebSocket,
Form,
WebSocketDisconnect,
)
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from fastapi.responses import RedirectResponse
from utils.ConnectionManager import ConnectionManager
from utils.Message import Message
from utils.Ratelimits import RatelimitManager
VALIDURL = re.compile(r"^([a-z\-_0-9\/\:\.]*\.(jpg|jpeg|png|gif|webp))", re.IGNORECASE)
URL = re.compile(
"(https?:\/\/(?:www\.|(?!www))[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\.[^\s]{2,}|www\.[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\.[^\s]{2,}|https?:\/\/(?:www\.|(?!www))[a-zA-Z0-9]+\.[^\s]{2,}|www\.[a-zA-Z0-9]+\.[^\s]{2,})",
re.IGNORECASE,
)
DEFAULT_PFP = "https://cdn.discordapp.com/attachments/830269354649452564/863169775646670888/unknown.png"
COLORS = ("purple", "pink", "red", "yellow", "green", "blue", "indigo")
SLOWDOWN = "Slow Down! 5s (Only you can see this message)"
ignore = []
app = FastAPI()
app.mount("/src/static", StaticFiles(directory="src/static"), name="static")
templates = Jinja2Templates(directory="src/templates")
manager = ConnectionManager()
chatlimiter = RatelimitManager(rate=5, per=5.0)
ratelimiter = RatelimitManager(rate=1, per=5.0)
def get_response(
template: str,
request: Request,
response: Response,
data: dict = {},
) -> Response:
data["request"] = request
res = templates.TemplateResponse(template, data)
return res
@app.get("/")
async def read_root(
request: Request, response: Response, group: Optional[str] = "", error: str = ""
):
return get_response(
"index.html", request, response, {error: "border-red-500", "group_id": group}
)
@app.get("/newuser")
async def new_user(
request: Request,
response: Response,
username: str = "",
avatar: str = "",
color: str = "",
group: str = None,
):
if ratelimiter.check_ratelimit(str(request.client.host)):
r = f"&group={group}" if group else ""
if (username.strip() == "") or (not username.isalnum()) or (len(username) > 24):
return RedirectResponse(f"/?error=username{r}")
if avatar and not VALIDURL.match(avatar):
return RedirectResponse(f"/?error=avatar{r}")
if color not in COLORS:
color = random.choice(COLORS)
if group and not manager.group_exists(group):
return RedirectResponse(f"/?error=group")
client_uuid, group_id = manager.wait_user(
username=username,
avatar=avatar or DEFAULT_PFP,
color=color,
group_id=group,
)
return RedirectResponse(f"/chat?user={client_uuid}&group={group_id}")
else:
return RedirectResponse("/?error=limited")
@app.get("/chat")
async def chat(request: Request, response: Response, user: str = ""):
if not manager.is_waiting(user):
return RedirectResponse("/?error=failed")
user = manager.waiting_users.get(user)
return get_response(
"chat.html",
request,
response,
{
"username": user.username,
"avatar": user.avatar,
"color": user.color,
"default_avatar": DEFAULT_PFP,
},
)
@app.websocket("/ws/{group_id}/{client_uuid}")
async def websocket_endpoint(websocket: WebSocket, group_id: str, client_uuid: str):
if (not manager.group_exists(group_id)) or (not manager.is_waiting(client_uuid)):
return
await manager.connect(client_uuid, websocket)
try:
await manager.broadcast(
client_uuid, group_id, f"just joined the chat!", Message.EVENT
)
while True:
data = await websocket.receive_text()
if data.strip() != "":
if chatlimiter.check_ratelimit(client_uuid):
await manager.broadcast(
client_uuid, group_id, data, Message.MESSAGE
)
if client_uuid in ignore:
ignore.remove(client_uuid)
else:
if client_uuid not in ignore:
ignore.append(client_uuid)
await manager.send_ephemeral_event(client_uuid, SLOWDOWN)
except WebSocketDisconnect:
user = await manager.disconnect(client_uuid, group_id, websocket)
if manager.group_exists(group_id):
await manager.broadcast(
"_", group_id, f"{user.username} has left the chat", Message.EVENT
)
|
# -*- coding:UTF-8 -*-
import tensorflow as tf
from deepctr.estimator.inputs import input_fn_tfrecord
from deepctr.estimator.models import DeepFMEstimator
sparse_features = ['C' + str(i) for i in range(1, 27)]
dense_features = ['I' + str(i) for i in range(1, 14)]
dnn_feature_columns = []
linear_feature_columns = []
for i, feat in enumerate(sparse_features):
dnn_feature_columns.append(
tf.feature_column.embedding_column(tf.feature_column.categorical_column_with_identity(feat, 1000), 4)
)
linear_feature_columns.append(tf.feature_column.categorical_column_with_identity(feat, 1000))
for feat in dense_features:
dnn_feature_columns.append(tf.feature_column.numeric_column(feat))
linear_feature_columns.append(tf.feature_column.numeric_column(feat))
feature_description = {k: tf.FixedLenFeature(dtype=tf.int64, shape=1) for k in sparse_features}
feature_description.update(
{k: tf.FixedLenFeature(dtype=tf.float32, shape=1) for k in dense_features}
)
feature_description['label'] = tf.FixedLenFeature(dtype=tf.float32, shape=1)
train_model_input = input_fn_tfrecord('./criteo_sample.tr.tfrecords', feature_description, 'label', batch_size=256,
num_epochs=1, shuffle_factor=10)
test_model_input = input_fn_tfrecord('./criteo_sample.te.tfrecords', feature_description, 'label',
batch_size=2 ** 14, num_epochs=1, shuffle_factor=0)
model = DeepFMEstimator(linear_feature_columns, dnn_feature_columns, task='binary')
model.train(train_model_input)
eval_result = model.evaluate(test_model_input)
print(eval_result) |
import boto3
from django.conf import settings
class Bucket:
"""CDN Bucket Manager
This is Method Create Connection
"""
def __init__(self):
session = boto3.session.Session()
self.conn = session.client(
service_name=settings.AWS_SERVICE_NAME,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
)
def get_objects(self):
result = self.conn.list_objects_v2(Bucket=settings.AWS_STORAGE_BUCKET_NAME)
if result['KeyCount']:
return result['Contents']
else:
return None
def delete_object(self, key):
self.conn.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=key)
return True
def download_object(self, key):
with open(settings.AWS_LOCAL_STORAGE + key, 'wb') as f:
self.conn.donwload_fileobj(settings.AWS_STORAGE_BUCKET_NAME, key, f)
return True
bucket = Bucket()
|
import os
import struct
import numpy as np
import pygltflib as gltf
from transformations import quaternion_from_matrix
from PIL import Image
import io
BYTE_LENGTHS = dict()
BYTE_LENGTHS[gltf.UNSIGNED_SHORT] = 2
BYTE_LENGTHS[gltf.FLOAT] = 4
FORMAT_MAP = dict()
FORMAT_MAP[gltf.UNSIGNED_SHORT] = "H"
FORMAT_MAP[gltf.FLOAT] = 'f'
TYPE_N_COMPONENT_MAP = dict()
TYPE_N_COMPONENT_MAP["SCALAR"] = 1
TYPE_N_COMPONENT_MAP["VEC2"] = 2
TYPE_N_COMPONENT_MAP["VEC3"] = 3
TYPE_N_COMPONENT_MAP["VEC4"] = 4
TYPE_N_COMPONENT_MAP["MAT4"] = 16
SCALE = 1
def extract_values(data, a_idx):
values = []
buffer = data._glb_data
accessor = data.accessors[a_idx]
if accessor.componentType not in FORMAT_MAP or accessor.type not in TYPE_N_COMPONENT_MAP:
print("unhandled component type", accessor.type, accessor.componentType)
else:
n_components = TYPE_N_COMPONENT_MAP[accessor.type]
b_view = data.bufferViews[accessor.bufferView]
#data.buffers[b_view.buffer]
o = accessor.byteOffset
o += b_view.byteOffset
l = b_view.byteLength
stride = b_view.byteStride
if stride is None:
stride = n_components * BYTE_LENGTHS[accessor.componentType]
buffer_slice = buffer[o:o+l]
format_str = "<"+ FORMAT_MAP[accessor.componentType]*n_components
print(format_str, stride, b_view.byteStride)
for idx in range(accessor.count):
_idx = idx*stride
v = struct.unpack(format_str, buffer_slice[_idx:_idx+stride])
if n_components == 1:
v = v[0]
else:
v = list(v)
values.append(v)
return values
def extract_image(data, image):
"""" https://stackoverflow.com/questions/32908639/open-pil-image-from-byte-file"""
if hasattr(image, "bufferView") and image.bufferView is not None and image.bufferView >0 and image.bufferView < len(data.bufferViews):
buffer = data._glb_data
b_view = image.bufferView
b_offset = data.bufferViews[b_view].byteOffset
b_length = data.bufferViews[b_view].byteLength
print(image, data.bufferViews[b_view])
img = Image.open(io.BytesIO(buffer[b_offset:b_offset+b_length]))
return img
elif hasattr(image, "uri") and image.uri is not None:
print("dasdsa", data._path,str(data._path)+os.sep+image.uri)
input_file = str(data._path)+os.sep+image.uri
img = Image.open(input_file)
return img
def extract_inv_bind_matrices(data, a_idx):
matrices = []
buffer = data._glb_data
accessor = data.accessors[a_idx]
if accessor.componentType ==gltf.FLOAT:
b_view_idx = accessor.bufferView
b_view = data.bufferViews[b_view_idx]
o = accessor.byteOffset
o += b_view.byteOffset
l = b_view.byteLength
stride = b_view.byteStride#3*4
if stride is None:
stride = 16 * BYTE_LENGTHS[accessor.componentType]
#stride = 16*4
buffer_slice = buffer[o:o+l]
format_str = "<"+ FORMAT_MAP[accessor.componentType] * 16
for idx in range(accessor.count):
_idx = idx*stride
m = struct.unpack(format_str, buffer_slice[_idx:_idx+stride])
m = np.array(m)#.reshape((4,4)).T
matrix = [[m[0], m[4], m[8], m[12] ],
[m[1], m[5], m[9], m[13] ],
[m[2], m[6], m[10], m[14] ],
[m[3], m[7], m[11], m[15] ]]
#m[:3,3] *= SCALE
matrix = np.array(matrix)
matrices.append(matrix)
return matrices
def extract_mesh(data, p):
mesh = dict()
start_idx = p.indices
if p.mode == gltf.TRIANGLES:
mesh["type"] = "triangles"
else:
print("unsupported primitive type", p.mode)
return mesh
if start_idx is not None and data.accessors[start_idx].bufferView is not None:
mesh["indices"] = extract_values(data, start_idx)
if hasattr(p.attributes, gltf.POSITION) and p.attributes.POSITION is not None:
a_idx = p.attributes.POSITION
if a_idx < len(data.accessors):
v = extract_values(data, a_idx)
v = np.array(v)* SCALE
mesh["vertices"] = v
print("loaded", len(mesh["vertices"]), "vertices")
#print(mesh["vertices"][:5])
if hasattr(p.attributes, gltf.NORMAL) and p.attributes.NORMAL is not None:
a_idx = p.attributes.NORMAL
if a_idx < len(data.accessors):
mesh["normals"] = extract_values(data, a_idx)
print("loaded", len(mesh["normals"]), "normals")
#print(mesh["normals"][:5])
if hasattr(p.attributes, gltf.JOINTS_0) and p.attributes.JOINTS_0 is not None:
a_idx = p.attributes.JOINTS_0
if a_idx < len(data.accessors):
mesh["joint_indices"] = extract_values(data, a_idx)
print("loaded", len(mesh["joint_indices"]), "joint indices")
if hasattr(p.attributes, gltf.WEIGHTS_0) and p.attributes.WEIGHTS_0 is not None:
a_idx = p.attributes.WEIGHTS_0
if a_idx < len(data.accessors):
mesh["weights"] = extract_values(data, a_idx)
print("loaded", len(mesh["weights"]), "joint weights")
if hasattr(p.attributes, gltf.TEXCOORD_0) and p.attributes.TEXCOORD_0 is not None:
a_idx = p.attributes.TEXCOORD_0
if a_idx < len(data.accessors):
uvs = extract_values(data, a_idx)
for i in range(len(uvs)):
uvs[i][0] = uvs[i][0]
uvs[i][1] = -uvs[i][1]
mesh["texture_coordinates"] = uvs
print("loaded", len(mesh["texture_coordinates"]), "joint uv")
if hasattr(p.attributes, gltf.TEXCOORD_1) and p.attributes.TEXCOORD_1 is not None:
a_idx = p.attributes.TEXCOORD_1
if a_idx < len(data.accessors):
mesh["texture_coordinates"] = extract_values(data, a_idx)
print("loaded", len(mesh["texture_coordinates2"]), "joint uv2")
return mesh
def extract_material(data, m_idx):
if 0 > m_idx or m_idx > len(data.materials):
return None
mat_dict = dict()
pbr_mat = data.materials[m_idx].pbrMetallicRoughness
if pbr_mat is not None and pbr_mat.baseColorTexture is not None:
tex_idx = pbr_mat.baseColorTexture.index
image_idx = data.textures[tex_idx].source
tex_coord = 0
if hasattr(data.textures[tex_idx], "textCoord"):
text_coord = data.textures[tex_idx].textCoord
mat_dict["text_coord"] = tex_coord
mat_dict["albedo_texture"] = extract_image(data, data.images[image_idx])
print("albedo", mat_dict["albedo_texture"])
return mat_dict
def create_end_site(name):
node = dict()
node["name"] = name
node["children"] = []
node["channels"] = []
node["offset"] = [0,0,0]
node["rotation"] = [1,0,0,0]
node["fixed"] = True
node["node_type"] = 2
node["index"] = -1
return node
def transform_quat(r):
q = [r[3], r[0], r[1], r[2]]
q = np.array(q)
q /= np.linalg.norm(q)
return q
def transform_pos(t):
p = [t[0], t[1], t[2]]
return np.array(p)* SCALE
def get_local_bind_pose(joint_name, joints, parents):
if "inv_bind_pose" not in joints[joint_name]:
return np.eye(4)
matrix = np.linalg.inv(joints[joint_name]["inv_bind_pose"])
if parents[joint_name] is None:
return matrix
else:
parent_matrix = joints[parents[joint_name]]["inv_bind_pose"]
matrix = np.dot(parent_matrix, matrix)
return matrix
def set_pose_from_bind_pose(joints, parents):
for j in joints:
joints[j]["local_inv_bind_pose"] = get_local_bind_pose(j, joints, parents)
joints[j]["offset"] = joints[j]["local_inv_bind_pose"][3,:3]
joints[j]["rotation"] = quaternion_from_matrix(joints[j]["local_inv_bind_pose"].T)
print(j, joints[j]["offset"])
def extract_skeleton(data, skin_idx):
skeleton = dict()
skin = data.skins[skin_idx]
a_idx = skin.inverseBindMatrices
if a_idx < len(data.accessors):
skeleton["inv_bind_matrices"] = extract_inv_bind_matrices(data, a_idx)
print("loaded", len(skeleton["inv_bind_matrices"]), "matrices")
joints = dict()
joint_count = 0
animated_joints = []
for node_idx in skin.joints:
node = data.nodes[node_idx]
animated_joints.append(node.name)
parent_map = dict()
for node_idx in skin.joints:
node = data.nodes[node_idx]
parent_map[node.name] = None
joint = dict()
joint["index"] = joint_count
joint["name"] = node.name
children = []
for c_idx in node.children:
c_name = data.nodes[c_idx].name
if c_name != node.name:
children.append(c_name)
parent_map[c_name] = node.name
if c_name not in animated_joints:
joints[c_name] = create_end_site(c_name)
joint["children"] = children
joint["offset"] = transform_pos(node.translation)
joint["rotation"] = transform_quat(node.rotation)
joint["scale"] = node.scale
joint["fixed"] = False
joint["inv_bind_pose"] = skeleton["inv_bind_matrices"][joint_count]
#print("bind matrices", node.name, joint["inv_bind_pose"])
#joint["offset"] = offset#joint["inv_bind_pose"][:3,3]
#joint["rotation"] = quaternion_from_matrix(np.linalg.inv(joint["inv_bind_pose"]))
joint["channels"] = ["Xrotation", "Yrotation", "Zrotation"]
joint["node_type"] = 1
joints[node.name] = joint
if len(children) == 0:
end_site_name = node.name + "EndSite"
end_site = create_end_site(end_site_name)
joints[end_site_name] = end_site
joints[node.name]["children"].append(end_site_name)
joint_count+=1
root_joint = animated_joints[0]
while parent_map[root_joint] is not None:
root_joint = parent_map[root_joint]
joints[root_joint]["node_type"] = 0
joints[root_joint]["channels"] = ["Xposition", "Yposition", "ZPosition", "Xrotation", "Yrotation", "Zrotation"]
#set_pose_from_bind_pose(joints, parent_map)
skeleton["nodes"] = joints
skeleton["root"] = root_joint
skeleton["animated_joints"] = animated_joints
skeleton["frame_time"] = 1.0/30
assert len(animated_joints)==len(skeleton["inv_bind_matrices"])
print("loaded",len(animated_joints), "joints")
return skeleton
def extract_anim_func(data, sampler):
time_a_idx = sampler.input
value_a_idx = sampler.output
interpolation = sampler.interpolation
time_func = extract_values(data, time_a_idx)
value_func = extract_values(data, value_a_idx)
return list(zip(time_func, value_func))
def extract_animations(data):
animations = dict()
if hasattr(data, "animations") and data.animations is not None:
for a in data.animations:
print(a.channels)
for c in a.channels:
channel_desc = dict()
sampler_idx = c.sampler
if sampler_idx > len(a.samplers):
continue
node_id = c.target.node
node_name = data.nodes[node_id].name
if node_name not in animations:
animations[node_name] = dict()
print(node_name, c.target.path)
animations[node_name][c.target.path] = extract_anim_func(data, a.samplers[sampler_idx])
return animations
def load_model_from_gltf_file(filename):
data = gltf.GLTF2().load(filename)
data.convert_buffers(gltf.BufferFormat.BINARYBLOB)
#print(data.nodes[0])
model_data = dict()
meshes = list()
skeleton = None
for node in data.nodes:
if node.mesh is not None:
for p in data.meshes[node.mesh].primitives:
mesh = extract_mesh(data, p)
if "vertices" not in mesh:
continue
if p.material is not None:
mesh["material"] = extract_material(data, p.material)
meshes.append(mesh)
if node.skin is not None:
skeleton = extract_skeleton(data, node.skin)
print(skeleton)
#skeleton = None
animations = extract_animations(data)
print("found",len(animations), "animations")
model_data["mesh_list"] = meshes
model_data["skeleton"] = skeleton
model_data["animations"] = animations
return model_data
if __name__ == "__main__":
filename = r"C:\Users\herrmann\Downloads\Box.gltf"
load_model_from_gltf_file(filename)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.