content
stringlengths 5
1.05M
|
|---|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
import InterruptionAnalysis as ia
data = pd.read_csv('./data/timeseries.csv', index_col = 0)
votedata = pd.read_csv('./data/vote-data.csv')
a = 0.99
gID = 'YMY'
pIDs = pd.unique(votedata[votedata['gID'] == gID]['pID'])
votecols = ['voteFor_p1', 'voteFor_p2', 'voteFor_p3', 'voteFor_p4', 'voteFor_p5']
numeric_cols = ['begin', 'end', 'dur', 'lat']
data[numeric_cols] = data[numeric_cols].apply(lambda x: x/1000)
mc = "#00688b"
fc = "#cdad00"
colors = [mc, mc, mc, mc, fc, mc, fc, fc, fc, mc]
plt.rcParams.update({'font.size': 14})
fig, ax = plt.subplots(figsize = [12, 4])
b, c = .25, 0
yticks = []
for pID, color in zip(pIDs, colors):
dat = data[data['pID'] == pID]
dat = dat.sort_values(['begin'])
lines = [[(i, b), (j, b)] for i, j in zip(dat.begin, dat.end)]
lc = LineCollection(lines, linewidths = 10, colors = color, label = pID)
ax.add_collection(lc)
yticks.append(b)
b += .25
if dat.empty:
continue
c += 1
ax.set_xlim([0, max(data['end']) + 5])
ax.set_ylim([0, b])
ax.set_yticks(yticks)
ax.set_yticklabels([pid[3:] for pid in pIDs])
ax.set_xlabel('t (sec)')
ax.set_ylabel('Participant')
fig.tight_layout()
#fig.savefig('./img/ymy-speechactivity.svg', transparent = True)
fig.show()
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
import importlib
from collections import OrderedDict
from typing import Any, Optional, TYPE_CHECKING
from cdm.enums import CdmObjectType
from cdm.utilities import AttributeResolutionDirectiveSet, CdmError, logger, ResolveOptions
if TYPE_CHECKING:
from cdm.objectmodel import CdmCorpusContext, CdmObject
from cdm.utilities import CopyOptions, JObject
class PersistenceLayer:
CDM_EXTENSION = '.cdm.json'
FOLIO_EXTENSION = '.folio.cdm.json'
MANIFEST_EXTENSION = '.manifest.cdm.json'
MODEL_JSON_EXTENSION = 'model.json'
ODI_EXTENSION = 'odi.json'
CDM_FOLDER = 'CdmFolder'
MODEL_JSON = 'ModelJson'
ODI = 'Odi'
def __init__(self, corpus: 'CdmCorpusDefinition'):
self._corpus = corpus
self._registered_persistence_formats = OrderedDict() # type: Dictionary[str, object]
self._is_registered_persistence_async = OrderedDict() # type: Dictionary[object, bool]
# Register known persistence classes.
self.register_format('cdm.persistence.cdmfolder.ManifestPersistence')
self.register_format('cdm.persistence.modeljson.ManifestPersistence')
self.register_format('cdm.persistence.cdmfolder.DocumentPersistence')
# TODO: re-add with ODI
# self.register_format('cdm.persistence.odi.ManifestPersistence')
self._TAG = PersistenceLayer.__name__
@property
def _ctx(self) -> 'CdmCorpusContext':
return self._corpus.ctx
@classmethod
def from_data(cls, *args) -> 'CdmObject':
"""
* @param args arguments passed to the persistence class.
* @param objectType any of cdmObjectType.
* @param persistence_type a type supported by the persistence layer. Can by any of PersistenceTypes.
"""
arglist = list(args)
persistence_type = arglist.pop()
object_type = arglist.pop()
return cls.fetch_persistence_class(object_type, persistence_type).from_data(*arglist)
@classmethod
def to_data(cls, instance: 'CdmObject', res_opt: 'ResolveOptions', copy_options: 'CopyOptions', persistence_type: str) -> 'JObject':
"""
* @param instance the instance that is going to be serialized.
* @param res_opt information about how to resolve the instance.
* @param copy_options set of options to specify how the output format.
* @param persistence_type a type supported by the persistence layer. Can by any of PersistenceTypes.
"""
return cls.fetch_persistence_class(instance.object_type, persistence_type).to_data(instance, res_opt, copy_options)
async def _load_document_from_path_async(self, folder: 'CdmFolderDefinition', doc_name: str, doc_container: 'CdmDocumentDefinition', res_opt: Optional[ResolveOptions] = None) \
-> 'CdmDocumentDefinition':
# go get the doc
doc_content = None # type: Optional[CdmDocumentDefinition]
json_data = None
fs_modified_time = None
doc_path = folder.folder_path + doc_name
adapter = self._ctx.corpus.storage.fetch_adapter(folder.namespace) # type: StorageAdapter
try:
if adapter.can_read():
# log message used by navigator, do not change or remove
logger.debug(self._TAG, self._ctx, 'request file: {}'.format(doc_path), self._load_document_from_path_async.__name__)
json_data = await adapter.read_async(doc_path)
# log message used by navigator, do not change or remove
logger.debug(self._TAG, self._ctx, 'received file: {}'.format(doc_path), self._load_document_from_path_async.__name__)
else:
raise Exception('Storage Adapter is not enabled to read.')
except Exception as e:
# log message used by navigator, do not change or remove
logger.debug(self._TAG, self._ctx, 'fail file: {}'.format(doc_path), self._load_document_from_path_async.__name__)
message = 'Could not read {} from the \'{}\' namespace.\n Reason: {}'.format(doc_path, folder.namespace, e)
# when shallow validation is enabled, log messages about being unable to find referenced documents as warnings instead of errors.
if res_opt and res_opt.shallow_validation:
logger.warning(self._TAG, self._ctx, message, self._load_document_from_path_async.__name__)
else:
logger.error(self._TAG, self._ctx, message, self._load_document_from_path_async.__name__)
return None
try:
fs_modified_time = await adapter.compute_last_modified_time_async(doc_path)
except Exception as e:
logger.warning(self._TAG, self._ctx, 'Failed to compute file last modified time. Reason {}'.format(e), self._load_document_from_path_async.__name__)
if not doc_name:
logger.error(self._TAG, self._ctx, 'Document name cannot be null or empty.', self._load_document_from_path_async.__name__)
return None
# If loading an odi.json/model.json file, check that it is named correctly.
if doc_name.lower().endswith(self.ODI_EXTENSION) and not doc_name.lower() == self.ODI_EXTENSION:
logger.error(self._TAG, self._ctx, 'Failed to load \'{}\', as it\'s not an acceptable file name. It must be {}.'.format(
doc_name, self.ODI_EXTENSION), self._load_document_from_path_async.__name__)
return None
if doc_name.lower().endswith(self.MODEL_JSON_EXTENSION) and not doc_name.lower() == self.MODEL_JSON_EXTENSION:
logger.error(self._TAG, self._ctx, 'Failed to load \'{}\', as it\'s not an acceptable file name. It must be {}.'.format(
doc_name, self.MODEL_JSON_EXTENSION), self._load_document_from_path_async.__name__)
return None
# Fetch the correct persistence class to use.
persistence_class = self._fetch_registered_persistence_format(doc_name)
if persistence_class:
try:
method = persistence_class.from_data
parameters = [self._ctx, doc_name, json_data, folder]
# check if from_data() is asynchronous for this persistence class.
if persistence_class not in self._is_registered_persistence_async:
# Cache whether this persistence class has async methods.
self._is_registered_persistence_async[persistence_class] = persistence_class.is_persistence_async
if self._is_registered_persistence_async[persistence_class]:
doc_content = await method(*parameters)
else:
doc_content = method(*parameters)
except Exception as e:
logger.error(self._TAG, self._ctx, 'Could not convert \'{}\'. Reason \'{}\''.format(doc_name, e), self._load_document_from_path_async.__name__)
return None
else:
# could not find a registered persistence class to handle this document type.
logger.error(self._TAG, self._ctx, 'Could not find a persistence class to handle the file \'{}\''.format(
doc_name), self._load_document_from_path_async.__name__)
return None
# add document to the folder, this sets all the folder/path things, caches name to content association and may trigger indexing on content
if doc_content is not None:
if doc_container:
# there are situations where a previously loaded document must be re-loaded.
# the end of that chain of work is here where the old version of the document has been removed from
# the corpus and we have created a new document and loaded it from storage and after this call we will probably
# add it to the corpus and index it, etc.
# it would be really rude to just kill that old object and replace it with this replicant, especially because
# the caller has no idea this happened. so... sigh ... instead of returning the new object return the one that
# was just killed off but make it contain everything the new document loaded.
doc_content = doc_content.copy(ResolveOptions(wrt_doc=doc_container), doc_container)
folder.documents.append(doc_content, doc_name)
doc_content._file_system_modified_time = fs_modified_time
doc_content._is_dirty = False
return doc_content
@classmethod
def fetch_persistence_class(cls, object_type: CdmObjectType, persistence_type: str) -> 'object':
object_name = object_type.name.lower() # CdmObjectType[object_type]
if object_name.endswith('def'):
object_name = object_name[0:-4]
elif object_name.endswith('ref'):
object_name += 'erence'
persistence_module_name = '{}_persistence'.format(object_name)
persistence_class_name = ''.join([x.title() for x in persistence_module_name.split('_')])
persistence_module = importlib.import_module('cdm.persistence.{}.{}'.format(persistence_type.lower(), persistence_module_name))
PersistenceClass = getattr(persistence_module, persistence_class_name, None)
if not PersistenceClass:
raise CdmError('Persistence class for {} is not implemented in type {}.'.format(persistence_class_name, persistence_type))
instance = PersistenceClass()
return instance
def _fetch_registered_persistence_format(self, doc_name: str) -> 'object':
for registered_persistence_format in self._registered_persistence_formats:
# find the persistence class to use for this document.
if doc_name.lower().endswith(registered_persistence_format):
return self._registered_persistence_formats[registered_persistence_format]
return None
async def _save_document_as_async(self, doc: 'CdmDocumentDefinition', options: 'CopyOptions', new_name: str, save_referenced: bool) -> bool:
"""a manifest or document can be saved with a new or exisitng name. This function on the corpus does all the actual work
because the corpus knows about persistence types and about the storage adapters
if saved with the same name, then consider this document 'clean' from changes. if saved with a back compat model or
to a different name, then the source object is still 'dirty'
an option will cause us to also save any linked documents."""
# find out if the storage adapter is able to write.
namespace = doc.namespace
if namespace is None:
namespace = self._corpus.storage.default_namespace
adapter = self._corpus.storage.fetch_adapter(namespace)
if adapter is None:
logger.error(self._TAG, self._ctx, 'Couldn\'t find a storage adapter registered for the namespace \'{}\''.format(
namespace), self._save_document_as_async.__name__)
return False
if not adapter.can_write():
logger.error(self._TAG, self._ctx, 'The storage adapter \'{}\' claims it is unable to write files.'.format(
namespace), self._save_document_as_async.__name__)
return False
if not new_name:
logger.error(self._TAG, self._ctx, 'Document name cannot be null or empty.', self._save_document_as_async.__name__)
return None
# what kind of document is requested?
# check file extensions using a case-insensitive ordinal string comparison.
persistence_type = self.MODEL_JSON if new_name.lower().endswith(self.MODEL_JSON_EXTENSION) else \
(self.ODI if new_name.lower().endswith(self.ODI_EXTENSION) else self.CDM_FOLDER)
if persistence_type == self.ODI and new_name.lower() != self.ODI_EXTENSION:
logger.error(self._TAG, self._ctx, 'Failed to persist \'{}\', as it\'s not an acceptable filename. It must be {}'.format(
new_name, self.ODI_EXTENSION), self._save_document_as_async.__name__)
return False
if persistence_type == self.MODEL_JSON and new_name.lower() != self.MODEL_JSON_EXTENSION:
logger.error(self._TAG, self._ctx, 'Failed to persist \'{}\', as it\'s not an acceptable filename. It must be {}'.format(
new_name, self.MODEL_JSON_EXTENSION), self._save_document_as_async.__name__)
return False
# save the object into a json blob
res_opt = {'wrt_doc': doc, 'directives': AttributeResolutionDirectiveSet()}
persisted_doc = None
persistence_class = self._fetch_registered_persistence_format(new_name)
if persistence_class:
try:
method = persistence_class.to_data
parameters = [doc, res_opt, options]
# Check if to_data() is asynchronous for this persistence class.
if not persistence_class in self._is_registered_persistence_async:
# Cache whether this persistence class has async methods.
self._is_registered_persistence_async[persistence_class] = persistence_class.is_persistence_async
if self._is_registered_persistence_async[persistence_class]:
# We don't know what the return type of ToData() is going to be and Task<T> is not covariant,
# so we can't use Task<dynamic> here. Instead, we just await on a Task object without a return value
# and fetch the Result property from it, which will have the result of ToData().
persisted_doc = await method(*parameters)
else:
persisted_doc = method(*parameters)
except Exception as e:
logger.error(self._TAG, self._ctx, 'Could not persist file \'{}\'. Reason \'{}\'.'.format(new_name, e), self._save_document_as_async.__name__)
return False
else:
# Could not find a registered persistence class to handle this document type.
logger.error(self._TAG, self._ctx, 'Could not find a persistence class to handle the file \'{}\'.'.format(
new_name), self._save_document_as_async.__name__)
return False
if not persisted_doc:
logger.error(self._TAG, self._ctx, 'Failed to persist \'{}\''.format(new_name), self._save_document_as_async.__name__)
return False
if persistence_type == self.ODI:
await self._save_odi_documents(persisted_doc, adapter, new_name)
return True
# turn the name into a path
new_path = '{}{}'.format(doc.folder_path, new_name)
new_path = self._ctx.corpus.storage.create_absolute_corpus_path(new_path, doc)
if new_path.startswith(namespace + ':'):
new_path = new_path[len(namespace)+1:]
# ask the adapter to make it happen
try:
content = persisted_doc.encode()
await adapter.write_async(new_path, content)
# Write the adapter's config.
if options._is_top_level_document:
await self._corpus.storage.save_adapters_config_async('/config.json', adapter)
# The next document won't be top level, so reset the flag.
options._is_top_level_document = False
except Exception as e:
logger.error(self._TAG, self._ctx, 'Failed to write to the file \'{}\' for reason \'{}\''.format(new_name, e),
self._save_document_as_async.__name__)
return False
# if we also want to save referenced docs, then it depends on what kind of thing just got saved
# if a model.json there are none. If a manifest or definition doc then ask the docs to do the right things
# definition will save imports, manifests will save imports, schemas, sub manifests
if save_referenced and persistence_type == self.CDM_FOLDER:
saved_linked_docs = await doc._save_linked_documents_async(options)
if not saved_linked_docs:
logger.error(self._TAG, self._ctx, 'Failed to save linked documents for file \'{}\''.format(new_name), self._save_document_as_async.__name__)
return False
return True
def register_format(self, persistence_class_name: str, assembly_name: Optional[str] = None) -> None:
try:
path_split = persistence_class_name.split('.')
class_path = '.'.join(path_split[:-1])
class_name = path_split[-1]
persistence_module = importlib.import_module(class_path)
persistence_class = getattr(persistence_module, class_name)
formats = persistence_class.formats # type: List[str]
for form in formats:
self._registered_persistence_formats[form] = persistence_class
except Exception as e:
logger.info(self._TAG, self._ctx, 'Unable to register persistence class {}. Reason: {}.'.format(persistence_class_name, e))
async def _save_odi_documents(self, doc: Any, adapter: 'StorageAdapter', new_name: str) -> None:
if doc is None:
raise Exception('Failed to persist document because doc is null.')
# ask the adapter to make it happen.
try:
old_document_path = doc.documentPath
new_document_path = old_document_path[0: len(old_document_path) - len(self.ODI_EXTENSION)] + new_name
content = doc.encode()
await adapter.write_async(new_document_path, content)
except Exception as e:
logger.error(self._TAG, self._ctx, 'Failed to write to the file \'{}\' for reason {}.'.format(
doc.documentPath, e), self._save_odi_documents.__name__)
# Save linked documents.
if doc.get('linkedDocuments') is not None:
for linked_doc in doc.linkedDocuments:
await self._save_odi_documents(linked_doc, adapter, new_name)
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved
#
# _new_biad_add_1_14() derived from source in
# https://github/tensorflow/tensorflow and therefore
# Copyright 2019 The TensorFlow Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import os
import tensorflow as tf
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class BiasAddTest(test.TestCase):
def _npBias(self, inputs, bias):
assert len(bias.shape) == 1
assert inputs.shape[-1] == bias.shape[0]
return inputs + bias.reshape(([1] * (len(inputs.shape) - 1)) +
[bias.shape[0]])
def testNpBias(self):
self.assertAllClose(
np.array([[11, 22, 33], [41, 52, 63]]),
self._npBias(
np.array([[10, 20, 30], [40, 50, 60]]), np.array([1, 2, 3])))
def _testBias(self, np_inputs, np_bias, use_gpu=False):
np_val = self._npBias(np_inputs, np_bias)
with self.cached_session(use_gpu=use_gpu):
tf_val = nn_ops.bias_add(np_inputs, np_bias).eval()
self.assertAllCloseAccordingToType(np_val, tf_val)
def _AtLeast3d(self, np_value):
# fill the input value to at least 3-dimension
if np_value.ndim < 3:
return np.reshape(np_value, (1,) * (3 - np_value.ndim) + np_value.shape)
return np_value
def _NHWCToNCHW(self, np_value):
# fill the input value to at least 3-dimension
np_value = self._AtLeast3d(np_value)
# move the last dimension to second
np_dim = list(range(np_value.ndim))
np_dim_new = list(np_dim[0:1]) + list(np_dim[-1:]) + list(np_dim[1:-1])
return np.transpose(np_value, np_dim_new)
def _NCHWToNHWC(self, np_value):
assert len(np_value.shape) >= 3
np_dim = list(range(np_value.ndim))
# move the second dimension to the last
np_dim_new = list(np_dim[0:1]) + list(np_dim[2:]) + list(np_dim[1:2])
return np.transpose(np_value, np_dim_new)
def _testBiasNCHW(self, np_inputs, np_bias, use_gpu):
np_val = self._npBias(np_inputs, np_bias)
np_inputs = self._NHWCToNCHW(np_inputs)
with self.cached_session(use_gpu=use_gpu):
tf_val = nn_ops.bias_add(np_inputs, np_bias, data_format="NCHW").eval()
tf_val = self._NCHWToNHWC(tf_val)
self.assertAllCloseAccordingToType(self._AtLeast3d(np_val), tf_val)
def _testAll(self, np_inputs, np_bias):
self._testBias(np_inputs, np_bias, use_gpu=False)
self._testBiasNCHW(np_inputs, np_bias, use_gpu=False)
if np_inputs.dtype in [np.float16, np.float32, np.float64]:
self._testBias(np_inputs, np_bias, use_gpu=True)
self._testBiasNCHW(np_inputs, np_bias, use_gpu=True)
@test_util.run_deprecated_v1
def testIntTypes(self):
for t in [np.int8, np.int16, np.int32, np.int64]:
self._testAll(
np.array([[10, 20, 30], [40, 50, 60]]).astype(t),
np.array([1, 2, 3]).astype(t))
@test_util.run_deprecated_v1
def testFloatTypes(self):
for t in [np.float16, np.float32, np.float64]:
self._testAll(
np.random.rand(4, 3, 3).astype(t), np.random.rand(3).astype(t))
@test_util.run_deprecated_v1
def test4DFloatTypes(self):
for t in [np.float16, np.float32, np.float64]:
self._testAll(
np.random.rand(4, 3, 2, 3).astype(t),
np.random.rand(3).astype(t))
self._testAll(
np.random.rand(2048, 4, 4, 4).astype(t),
np.random.rand(4).astype(t))
self._testAll(
np.random.rand(4, 4, 4, 2048).astype(t),
np.random.rand(2048).astype(t))
@test_util.run_deprecated_v1
def test5DFloatTypes(self):
for t in [np.float16, np.float32, np.float64]:
self._testAll(
np.random.rand(4, 3, 2, 3, 4).astype(t),
np.random.rand(4).astype(t))
def _testGradient(self, np_input, bias, dtype, data_format, use_gpu):
with self.cached_session(use_gpu=use_gpu):
if data_format == "NCHW":
np_input = self._NHWCToNCHW(np_input)
input_tensor = constant_op.constant(
np_input, shape=np_input.shape, dtype=dtype)
bias_tensor = constant_op.constant(bias, shape=bias.shape, dtype=dtype)
output_tensor = nn_ops.bias_add(
input_tensor, bias_tensor, data_format=data_format)
tensor_jacob_t, tensor_jacob_n = gradient_checker.compute_gradient(
input_tensor, np_input.shape, output_tensor, np_input.shape)
bias_jacob_t, bias_jacob_n = gradient_checker.compute_gradient(
bias_tensor, bias.shape, output_tensor, np_input.shape)
# Test gradient of BiasAddGrad
bias_add_grad = gradients_impl.gradients(
nn_ops.l2_loss(output_tensor), bias_tensor)[0]
grad_jacob_t, grad_jacob_n = gradient_checker.compute_gradient(
output_tensor, np_input.shape, bias_add_grad, bias.shape)
if dtype == np.float16:
# Compare fp16 analytical gradients to fp32 numerical gradients,
# since fp16 numerical gradients are too imprecise unless great
# care is taken with choosing the inputs and the delta. This is
# a weaker, but pragmatic, check (in particular, it does not test
# the op itself, only its gradient).
input_tensor = constant_op.constant(
np_input, shape=np_input.shape, dtype=np.float32)
bias_tensor = constant_op.constant(
bias, shape=bias.shape, dtype=np.float32)
output_tensor = nn_ops.bias_add(
input_tensor, bias_tensor, data_format=data_format)
_, tensor_jacob_n = gradient_checker.compute_gradient(input_tensor,
np_input.shape,
output_tensor,
np_input.shape)
_, bias_jacob_n = gradient_checker.compute_gradient(bias_tensor,
bias.shape,
output_tensor,
np_input.shape)
bias_add_grad = gradients_impl.gradients(
nn_ops.l2_loss(output_tensor), bias_tensor)[0]
_, grad_jacob_n = gradient_checker.compute_gradient(output_tensor,
np_input.shape,
bias_add_grad,
bias.shape)
threshold = 5e-3
if dtype == dtypes.float64:
threshold = 1e-10
self.assertAllClose(tensor_jacob_t, tensor_jacob_n, threshold, threshold)
self.assertAllClose(bias_jacob_t, bias_jacob_n, threshold, threshold)
self.assertAllClose(grad_jacob_t, grad_jacob_n, threshold, threshold)
@test_util.run_deprecated_v1
def testGradientTensor2D(self):
for (data_format, use_gpu) in ("NHWC", False), ("NHWC", True):
for dtype in (dtypes.float16, dtypes.float32, dtypes.float64):
np_input = np.array(
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
dtype=dtype.as_numpy_dtype).reshape(3, 2)
bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
@test_util.run_deprecated_v1
def testGradientTensor3D(self):
for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True),
("NCHW", False), ("NCHW", True)]:
for dtype in (dtypes.float16, dtypes.float32, dtypes.float64):
np_input = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
dtype=dtype.as_numpy_dtype).reshape(1, 3, 2)
bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
@test_util.run_deprecated_v1
def testGradientTensor4D(self):
for (data_format, use_gpu) in [("NHWC", False)]:
for dtype in (dtypes.float16, dtypes.float32, dtypes.float64):
np_input = np.arange(
1.0, 49.0, dtype=dtype.as_numpy_dtype).reshape(
[2, 3, 4, 2]).astype(np.float32)
bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
np_input = np.arange(
1.0, 513.0, dtype=dtype.as_numpy_dtype).reshape(
[64, 2, 2, 2]).astype(np.float32)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
np_input = np.arange(
1.0, 513.0, dtype=dtype.as_numpy_dtype).reshape(
[2, 2, 2, 64]).astype(np.float32)
self._testGradient(np_input,
np.random.rand(64).astype(dtype.as_numpy_dtype),
dtype, data_format, use_gpu)
@test_util.run_deprecated_v1
def testGradientTensor5D(self):
for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True),
("NCHW", False), ("NCHW", True)]:
for dtype in (dtypes.float16, dtypes.float32, dtypes.float64):
np_input = np.arange(
1.0, 49.0, dtype=dtype.as_numpy_dtype).reshape(
[1, 2, 3, 4, 2]).astype(np.float32)
bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
@test_util.run_deprecated_v1
def testEmpty(self):
np.random.seed(7)
for shape in (0, 0), (2, 0), (0, 2), (4, 3, 0), (4, 0, 3), (0, 4, 3):
self._testAll(np.random.randn(*shape), np.random.randn(shape[-1]))
@test_util.run_deprecated_v1
def testEmptyGradient(self):
for (data_format, use_gpu) in ("NHWC", False), ("NHWC", True):
for shape in (0, 0), (2, 0), (0, 2):
self._testGradient(
np.random.randn(*shape), np.random.randn(shape[-1]), dtypes.float64,
data_format, use_gpu)
for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True),
("NCHW", False), ("NCHW", True)]:
for shape in (4, 3, 0), (4, 0, 3), (0, 4, 3):
self._testGradient(
np.random.randn(*shape),
np.random.randn(shape[-1]), dtypes.float64, data_format, use_gpu)
# Deterministic testing starts here
def _make_shape_tuple(self, batch_size, channel_count, data_rank, data_dim,
data_layout):
data_dims = data_rank * (data_dim,)
if data_layout == 'channels_first':
shape = (batch_size,) + (channel_count,) + data_dims
elif data_layout == 'channels_last':
shape = (batch_size,) + data_dims + (channel_count,)
else:
raise ValueError("Unknown data format")
return shape
def _data_format_from_data_layout(self, data_layout=None):
if data_layout == 'channels_first':
return 'NCHW'
elif data_layout == 'channels_last':
return 'NHWC'
else:
raise ValueError("Unknown data_layout")
def _random_data_op(self, shape, data_type):
return constant_op.constant(
2 * np.random.random_sample(shape) - 1, dtype=data_type)
def _random_ndarray(self, shape):
return 2 * np.random.random_sample(shape) - 1
def _assert_reproducible(self, operation, feed_dict={}):
with self.cached_session(force_gpu=True):
result_a = operation[0].eval(feed_dict=feed_dict)
result_b = operation[0].eval(feed_dict=feed_dict)
self.assertAllEqual(result_a, result_b)
def _testDeterministicGradientsCase(self, op_binding, data_layout, data_rank,
data_type):
seed = (hash(data_layout) % 256 +
hash(data_rank) % 256 +
hash(data_type) % 256)
np.random.seed(seed)
batch_size = 10
channel_count = 8
data_dim = 14
in_shape = self._make_shape_tuple(batch_size, channel_count, data_rank,
data_dim, data_layout)
bias_shape = (channel_count,)
out_shape = in_shape
in_op = self._random_data_op(in_shape, data_type)
bias_op = self._random_data_op(bias_shape, data_type)
data_format = self._data_format_from_data_layout(data_layout)
bias_add_op = op_binding(in_op, bias_op, data_format=data_format)
upstream_gradients = array_ops.placeholder(data_type, shape=out_shape,
name='upstream_gradients')
gradient_injector_op = bias_add_op * upstream_gradients
# The gradient function behaves as if grad_ys is multiplied by the op
# gradient result, not passing the upstram gradients through the op's
# gradient generation graph. This is the reason for using the
# gradient_injector_op
grad_ys = None
bias_gradients_op = gradients_impl.gradients(
gradient_injector_op, bias_op, grad_ys=grad_ys,
colocate_gradients_with_ops=True)
for i in range(5):
feed_dict = {upstream_gradients: self._random_ndarray(out_shape)}
self._assert_reproducible(bias_gradients_op, feed_dict=feed_dict)
@test_util.run_cuda_only
def testDeterministicGradients(self):
for op_binding in (tf.nn.bias_add, nn.bias_add, nn_ops.bias_add):
for data_layout in ('channels_first', 'channels_last'):
for data_rank in (1, 2, 3):
for data_type in (dtypes.float16, dtypes.float32, dtypes.float64):
self._testDeterministicGradientsCase(op_binding, data_layout,
data_rank, data_type)
if __name__ == "__main__":
import sys
sys.path.append('..')
from tfdeterminism import patch
patch()
test.main()
|
from itertools import combinations
s = {1, 2, 3, 4}
print(list(combinations(s, 2)))
print(list(combinations(s, 3)))
|
## TPsファイル作成
#[print(2**i) for i in range(32)]
fstr_x = '''#> raycasting:block/pos_error/tps/x_{0}
# @within function raycasting:block/pos_error/**
tp @s ~-{0} ~ ~
scoreboard players remove $raycasting:block/pos_error/_.pos[0] temporary {0}
'''
fstr_y = '''#> raycasting:block/pos_error/tps/y_{0}
# @within function raycasting:block/pos_error/**
tp @s ~ ~-{0} ~
scoreboard players remove $raycasting:block/pos_error/_.pos[1] temporary {0}
'''
fstr_z = '''#> raycasting:block/pos_error/tps/z_{0}
# @within function raycasting:block/pos_error/**
tp @s ~ ~ ~-{0}
scoreboard players remove $raycasting:block/pos_error/_.pos[2] temporary {0}
'''
fstr_mx = '''#> raycasting:block/pos_error/tps/x_m{0}
# @within function raycasting:block/pos_error/**
tp @s ~{0} ~ ~
scoreboard players add $raycasting:block/pos_error/_.pos[0] temporary {0}
'''
fstr_my = '''#> raycasting:block/pos_error/tps/y_m{0}
# @within function raycasting:block/pos_error/**
tp @s ~ ~{0} ~
scoreboard players add $raycasting:block/pos_error/_.pos[1] temporary {0}
'''
fstr_mz = '''#> raycasting:block/pos_error/tps/z_m{0}
# @within function raycasting:block/pos_error/**
tp @s ~ ~ ~{0}
scoreboard players add $raycasting:block/pos_error/_.pos[2] temporary {0}
'''
import os
os.makedirs('tps',exist_ok=True)
for i in range(8):
with open(f'tps/x_{2**i}.mcfunction','w') as f:
f.write(fstr_x.format(2**i))
with open(f'tps/y_{2**i}.mcfunction','w') as f:
f.write(fstr_y.format(2**i))
with open(f'tps/z_{2**i}.mcfunction','w') as f:
f.write(fstr_z.format(2**i))
with open(f'tps/x_m{2**i}.mcfunction','w') as f:
f.write(fstr_mx.format(2**i))
with open(f'tps/y_m{2**i}.mcfunction','w') as f:
f.write(fstr_my.format(2**i))
with open(f'tps/z_m{2**i}.mcfunction','w') as f:
f.write(fstr_mz.format(2**i))
fstr1_x = '''#> raycasting:block/pos_error/tps/x
# @within function raycasting:block/pos_error/**
'''
fstr1_y = '''#> raycasting:block/pos_error/tps/y
# @within function raycasting:block/pos_error/**
'''
fstr1_z = '''#> raycasting:block/pos_error/tps/z
# @within function raycasting:block/pos_error/**
'''
with open(f'tps/x.mcfunction','w') as f:
f.write(fstr1_x+'\n')
for i in range(8):
f.write(f'execute if score $raycasting:block/pos_error/_.pos[0] temporary matches {2**i}.. at @s run function raycasting:block/pos_error/tps/x_{2**i}\n')
for i in range(8):
f.write(f'execute if score $raycasting:block/pos_error/_.pos[0] temporary matches ..-{2**i} at @s run function raycasting:block/pos_error/tps/x_m{2**i}\n')
f.write(f'execute unless score $raycasting:block/pos_error/_.pos[0] temporary matches 0 run function raycasting:block/pos_error/tps/x\n')
with open(f'tps/y.mcfunction','w') as f:
f.write(fstr1_y+'\n')
for i in range(8):
f.write(f'execute if score $raycasting:block/pos_error/_.pos[1] temporary matches {2**i}.. at @s run function raycasting:block/pos_error/tps/y_{2**i}\n')
for i in range(8):
f.write(f'execute if score $raycasting:block/pos_error/_.pos[1] temporary matches ..-{2**i} at @s run function raycasting:block/pos_error/tps/y_m{2**i}\n')
f.write(f'execute unless score $raycasting:block/pos_error/_.pos[1] temporary matches 0 run function raycasting:block/pos_error/tps/y\n')
with open(f'tps/z.mcfunction','w') as f:
f.write(fstr1_z+'\n')
for i in range(8):
f.write(f'execute if score $raycasting:block/pos_error/_.pos[2] temporary matches {2**i}.. at @s run function raycasting:block/pos_error/tps/z_{2**i}\n')
for i in range(8):
f.write(f'execute if score $raycasting:block/pos_error/_.pos[2] temporary matches ..-{2**i} at @s run function raycasting:block/pos_error/tps/z_m{2**i}\n')
f.write(f'execute unless score $raycasting:block/pos_error/_.pos[2] temporary matches 0 run function raycasting:block/pos_error/tps/z\n')
|
"""
* Copyright (c) 2018 Anthony Beaucamp.
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from
* the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented * you must not
* claim that you wrote the original software. If you use this software in a
* product, an acknowledgment in the product documentation would be
* appreciated but is not required.
*
* 2. Altered source versions must be plainly marked as such, and must not
* be misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any distribution.
*
* 4. The names of this software and/or it's copyright holders may not be
* used to endorse or promote products derived from this software without
* specific prior written permission.
"""
import io,struct, sys
from PIL import Image
input = sys.argv[1]
output = sys.argv[2]
#################################
# Read source bitmap and palette
img1 = Image.open(input)
rawdata = list(img1.getdata())
colors = max(rawdata)
print "Sprite sheet size: {%i,%i}; Number of colors: %i" % (img1.size[0], img1.size[1], colors)
################################
# Rearrange into 12 * 21 blocks
griddata = []
for row in range(0, img1.size[1], 21):
for col in range(0, img1.size[0], 12):
for j in range(0, 21):
for i in range(0, 12):
griddata.append(rawdata[(row+j)*img1.size[0]+col+i])
#############################
# Split into 4 colors layers
layers = 0
layerdata = []
while (layers+3) <= max(griddata):
pixdata = []
# Transcribe unique colors to different layers
for i in range(len(griddata)):
# Transparent or shared color?
if (griddata[i] in [0,1]):
color = griddata[i]
elif (griddata[i] == 2):
color = 3
# Is it the unique color of layer?
elif (griddata[i] == layers+3):
color = 2
# Otherwise use transparent
else:
color = 0
pixdata.append(color)
# Add to layer data
layerdata.append(pixdata)
layers += 1
####################################
# Convert 4bit pixel data to buffers
block = 12*21
frames = len(griddata)/block
layersize = frames*64
sprdata = [chr(0)] * (layers*layersize)
for layer in range(layers):
for frame in range(frames):
for i in range(0, block, 4):
sprdata[layer*layersize+frame*64+i/4] = \
chr((layerdata[layer][frame*block+i+3]<<0) +
(layerdata[layer][frame*block+i+2]<<2) +
(layerdata[layer][frame*block+i+1]<<4) +
(layerdata[layer][frame*block+i] <<6))
###########################
# Write output binary file
f2 = io.open(output, 'wb')
f2.write(''.join([chr(0),chr(0xc4)]))
f2.write(''.join(sprdata))
f2.close()
|
from django.urls import include, path
from rest_framework import routers
from rest_framework_simplejwt import views as jwt_views
from .views import LogoutView, LogoutAllView, MyTokenObtainPairView
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
path('api/token/', MyTokenObtainPairView.as_view(), name='token_obtain_pair'),
path('api/token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'),
path('api/token/logout/', LogoutView.as_view(), name='auth_logout'),
path('api/token/logout_all/', LogoutAllView.as_view(), name='auth_logout_all'),
]
|
# -*- coding: utf-8-*-
import os
import sys
import time
import socket
import subprocess
import pkgutil
import logging
try:
from pip.req import parse_requirements
except ImportError:
from pip._internal.req import parse_requirements
import jasperpath
if sys.version_info < (3, 3):
from distutils.spawn import find_executable
else:
from shutil import which as find_executable
logger = logging.getLogger(__name__)
def check_network_connection(server="www.google.com"):
"""
Checks if jasper can connect a network server.
Arguments:
server -- (optional) the server to connect with (Default:
"www.google.com")
Returns:
True or False
"""
logger = logging.getLogger(__name__)
logger.debug("Checking network connection to server '%s'...", server)
try:
# see if we can resolve the host name -- tells us if there is
# a DNS listening
host = socket.gethostbyname(server)
# connect to the host -- tells us if the host is actually
# reachable
socket.create_connection((host, 80), 2)
except Exception:
logger.debug("Network connection not working")
return False
else:
logger.debug("Network connection working")
return True
def check_executable(executable):
"""
Checks if an executable exists in $PATH.
Arguments:
executable -- the name of the executable (e.g. "echo")
Returns:
True or False
"""
logger = logging.getLogger(__name__)
logger.debug("Checking executable '%s'...", executable)
executable_path = find_executable(executable)
found = executable_path is not None
if found:
logger.debug("Executable '%s' found: '%s'", executable,
executable_path)
else:
logger.debug("Executable '%s' not found", executable)
return found
def check_python_import(package_or_module):
"""
Checks if a python package or module is importable.
Arguments:
package_or_module -- the package or module name to check
Returns:
True or False
"""
logger = logging.getLogger(__name__)
logger.debug("Checking python import '%s'...", package_or_module)
loader = pkgutil.get_loader(package_or_module)
found = loader is not None
if found:
logger.debug("Python %s '%s' found: %r",
"package" if loader.is_package(package_or_module)
else "module", package_or_module, loader.get_filename())
else:
logger.debug("Python import '%s' not found", package_or_module)
return found
def get_pip_requirements(fname=os.path.join(jasperpath.LIB_PATH,
'requirements.txt')):
"""
Gets the PIP requirements from a text file. If the files does not exists
or is not readable, it returns None
Arguments:
fname -- (optional) the requirement text file (Default:
"client/requirements.txt")
Returns:
A list of pip requirement objects or None
"""
logger = logging.getLogger(__name__)
if os.access(fname, os.R_OK):
reqs = list(parse_requirements(fname))
logger.debug("Found %d PIP requirements in file '%s'", len(reqs),
fname)
return reqs
else:
logger.debug("PIP requirements file '%s' not found or not readable",
fname)
def get_git_revision():
"""
Gets the current git revision hash as hex string. If the git executable is
missing or git is unable to get the revision, None is returned
Returns:
A hex string or None
"""
logger = logging.getLogger(__name__)
if not check_executable('git'):
logger.warning("'git' command not found, git revision not detectable")
return None
output = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
if not output:
logger.warning("Couldn't detect git revision (not a git repository?)")
return None
return output
def run():
"""
Performs a series of checks against the system and writes the results to
the logging system.
Returns:
The number of failed checks as integer
"""
logger = logging.getLogger(__name__)
# Set loglevel of this module least to info
loglvl = logger.getEffectiveLevel()
if loglvl == logging.NOTSET or loglvl > logging.INFO:
logger.setLevel(logging.INFO)
logger.info("Starting jasper diagnostic at %s" % time.strftime("%c"))
logger.info("Git revision: %r", get_git_revision())
failed_checks = 0
if not check_network_connection():
failed_checks += 1
for executable in ['phonetisaurus-g2p', 'espeak', 'say']:
if not check_executable(executable):
logger.warning("Executable '%s' is missing in $PATH", executable)
failed_checks += 1
for req in get_pip_requirements():
logger.debug("Checking PIP package '%s'...", req.name)
if not req.check_if_exists():
logger.warning("PIP package '%s' is missing", req.name)
failed_checks += 1
else:
logger.debug("PIP package '%s' found", req.name)
for fname in [os.path.join(jasperpath.APP_PATH, os.pardir, "phonetisaurus",
"g014b2b.fst")]:
logger.debug("Checking file '%s'...", fname)
if not os.access(fname, os.R_OK):
logger.warning("File '%s' is missing", fname)
failed_checks += 1
else:
logger.debug("File '%s' found", fname)
if not failed_checks:
logger.info("All checks passed")
else:
logger.info("%d checks failed" % failed_checks)
return failed_checks
if __name__ == '__main__':
logging.basicConfig(stream=sys.stdout)
logger = logging.getLogger()
if '--debug' in sys.argv:
logger.setLevel(logging.DEBUG)
run()
|
# from __future__ import division
import inspect
import sys
import os
import time
from distutils import sysconfig
# from collections import defaultdict
from threading import Thread
from six.moves.queue import Queue, Empty
import json
# from .util import Util
class AsynchronousTracer(object):
def __init__(self):
self.processor = TraceProcessor()
def tracer(self, frame, event, arg):
self.processor.queue(frame, event, arg)
return self.tracer
def done(self):
self.processor.done()
self.processor.join()
def start(self):
self.processor.start()
sys.settrace(self.tracer)
def stop(self):
sys.settrace(None)
def get_output(self):
return self.processor.output()
class InfoObject(object):
def __init__(self, prev_info_ob=None, funtion_file_path="", funtion_name="", args=None, lineno=0):
self.prev_info_ob = prev_info_ob
self.funtion_file_path = funtion_file_path
self.funtion_name = funtion_name
self.args = args
self.lineno = lineno
self.childs = []
self.return_value = None
# self.start_time = None
# self.end_time =None
# self.call_time = None
def __repr__(self):
return str(self.to_dict())
def to_dict(self):
return {
"funtion_file_path": self.funtion_file_path,
"funtion_name": self.funtion_name,
"args": self.args,
"lineno": self.lineno,
"childs": self.childs,
"return_value": self.return_value
}
class TraceProcessor(Thread):
"""
使用 sys.settrace 收集函数调用信息
"""
def __init__(self):
Thread.__init__(self)
self.trace_queue = Queue()
self.keep_going = True
self.root_info_object = None
self.now_info_object = None
self.init_libpath()
def init_libpath(self):
self.lib_path = sysconfig.get_python_lib()
path = os.path.split(self.lib_path)
if path[1] == 'site-packages':
self.lib_path = path[0]
self.lib_path = self.lib_path.lower()
def queue(self, frame, event, arg):
data = {
'frame': frame,
'event': event,
'arg': arg
}
self.trace_queue.put(data)
def run(self):
while self.keep_going:
try:
data = self.trace_queue.get(timeout=0.1)
except Empty:
continue
self.process(**data)
def done(self):
while not self.trace_queue.empty():
time.sleep(0.1)
self.keep_going = False
def push_info_ob(self, frame,full_name):
'''
入栈处理
'''
if full_name.startswith("visualcoderun."):
return
if full_name.startswith("json."):
return
if self.now_info_object is None:
info = InfoObject(
prev_info_ob=None,
funtion_file_path=(frame.f_code.co_filename),
funtion_name="__main__",
# (module_name +"." + frame.f_code.co_name),
# frame.f_code.co_filename,
args={},
lineno=0)
self.now_info_object = info
self.root_info_object = info
info2 = InfoObject(
prev_info_ob=self.now_info_object,
funtion_file_path=(frame.f_code.co_filename),
funtion_name=full_name,
# (module_name +"." + frame.f_code.co_name),
# frame.f_code.co_filename,
args=(frame.f_locals),
lineno=(frame.f_code.co_firstlineno))
self.now_info_object.childs.append(info2)
self.now_info_object = info2
def pop_info_ob(self, full_name, arg):
'''
出栈处理
'''
if full_name.startswith("visualcoderun."):
return
if full_name.startswith("json."):
return
if not self.now_info_object:
print(full_name, arg)
return
self.now_info_object.return_value = arg
self.now_info_object = self.now_info_object.prev_info_ob
def get_full_name(self, frame):
'''
获取方法名绝对路径
'''
code = frame.f_code
# Stores all the parts of a human readable name of the current call
full_name_list = []
# Work out the module name
module = inspect.getmodule(code)
module_name = ''
if module:
module_name = module.__name__
module_path = module.__file__
# if not self.config.include_stdlib \
# and self.is_module_stdlib(module_path):
# keep = False
if module_name == '__main__':
module_name = ''
if module_name:
full_name_list.append(module_name)
# Work out the class name
try:
class_name = frame.f_locals['self'].__class__.__name__
full_name_list.append(class_name)
except (KeyError, AttributeError):
pass
# Work out the current function or method
func_name = code.co_name
if func_name == '?':
func_name = '__main__'
full_name_list.append(func_name)
# Create a readable representation of the current call
full_name = '.'.join(full_name_list)
return full_name
def process(self, frame, event, arg=None):
"""This function processes a trace result. Keeps track of
relationships between calls.
"""
full_name = self.get_full_name(frame)
if event == 'call':
self.push_info_ob(frame, full_name)
if event == 'return':
self.pop_info_ob(full_name, arg)
def output(self):
'''
生成输入, 该方法线程不安全
'''
def serialize(obj):
"""JSON serializer for objects not serializable by default json code"""
ret = obj.to_dict()
def change_args(a_dict):
if "args" in a_dict:
args = a_dict["args"]
for each_key in args.keys():
if not isinstance( args[each_key], (str, int, float)):
args[each_key] = str( args[each_key] )
if a_dict["childs"]:
for each_child in a_dict["childs"]:
change_args(each_child.to_dict())
change_args(ret)
return ret
return json.dumps(
self.root_info_object,
default=serialize)
def simple_memoize(callable_object):
"""Simple memoization for functions without keyword arguments.
This is useful for mapping code objects to module in this context.
inspect.getmodule() requires a number of system calls, which may slow down
the tracing considerably. Caching the mapping from code objects (there is
*one* code object for each function, regardless of how many simultaneous
activations records there are).
In this context we can ignore keyword arguments, but a generic memoizer
ought to take care of that as well.
"""
cache = dict()
def wrapper(*rest):
if rest not in cache:
cache[rest] = callable_object(*rest)
return cache[rest]
return wrapper
inspect.getmodule = simple_memoize(inspect.getmodule)
|
from raptiformica.shell.ssh import copy_id_from_remote
from tests.testcase import TestCase
class TestCopyIdFromRemote(TestCase):
def setUp(self):
self.log = self.set_up_patch(
'raptiformica.shell.ssh.log'
)
self.run_critical_command_print_ready = self.set_up_patch(
'raptiformica.shell.ssh.run_critical_command_print_ready'
)
def test_copy_id_from_remote_copies_the_public_key_from_the_remote_server(self):
copy_id_from_remote('1.2.3.4')
expected_command = 'ssh root@1.2.3.4 -p 22 ' \
'-oStrictHostKeyChecking=no ' \
'-oUserKnownHostsFile=/dev/null ' \
'-oPasswordAuthentication=no ' \
'cat .ssh/id_rsa.pub >> $HOME/.ssh/authorized_keys'
expected_failure_message = 'Failed to copy the ID from 1.2.3.4:22 ' \
'to the local authorized keys file'
self.run_critical_command_print_ready.assert_called_once_with(
expected_command, buffered=False, shell=True,
failure_message=expected_failure_message
)
def test_copy_id_from_remote_uses_specified_port(self):
copy_id_from_remote('1.2.3.4', ssh_port=1234)
expected_command = 'ssh root@1.2.3.4 -p 1234 ' \
'-oStrictHostKeyChecking=no ' \
'-oUserKnownHostsFile=/dev/null ' \
'-oPasswordAuthentication=no ' \
'cat .ssh/id_rsa.pub >> $HOME/.ssh/authorized_keys'
expected_failure_message = 'Failed to copy the ID from 1.2.3.4:1234 ' \
'to the local authorized keys file'
self.run_critical_command_print_ready.assert_called_once_with(
expected_command, buffered=False, shell=True,
failure_message=expected_failure_message
)
|
from .inventory import *
from .gridworld import *
|
from rest_framework import status
from rest_framework import authentication, permissions
from rest_framework.response import Response
from django.contrib.auth.models import User
from miragecore.core.types import *
def create(request: HttpRequest, serializer_class: callable) -> Response:
"""
Return a status message.
"""
serializer = serializer_class(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def read(request: HttpRequest, serializer_class: callable) -> Response:
pass
def update(request: HttpRequest, serializer_class: callable) -> Response:
pass
def delete(request: HttpRequest) -> Response:
pass
|
# -*- coding: utf-8 -*-
#
# django-ldapdb
# Copyright (c) 2009-2011, Bolloré telecom
# Copyright (c) 2013, Jeremy Lainé
# All rights reserved.
#
# See AUTHORS file for a full list of contributors.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import ldap
import logging
import django.db.models
from django.db import connections, router
from django.db.models import signals
import ldapdb # noqa
logger = logging.getLogger('ldapdb')
class Model(django.db.models.base.Model):
"""
Base class for all LDAP models.
"""
dn = django.db.models.fields.CharField(max_length=200)
# meta-data
base_dn = None
search_scope = ldap.SCOPE_SUBTREE
object_classes = ['top']
def __init__(self, *args, **kwargs):
super(Model, self).__init__(*args, **kwargs)
self.saved_pk = self.pk
def build_rdn(self):
"""
Build the Relative Distinguished Name for this entry.
"""
bits = []
for field in self._meta.fields:
if field.db_column and field.primary_key:
bits.append("%s=%s" % (field.db_column,
getattr(self, field.name)))
if not len(bits):
raise Exception("Could not build Distinguished Name")
return '+'.join(bits)
def build_dn(self):
"""
Build the Distinguished Name for this entry.
"""
return "%s,%s" % (self.build_rdn(), self.base_dn)
raise Exception("Could not build Distinguished Name")
def delete(self, using=None):
"""
Delete this entry.
"""
using = using or router.db_for_write(self.__class__, instance=self)
connection = connections[using]
logger.debug("Deleting LDAP entry %s" % self.dn)
connection.delete_s(self.dn)
signals.post_delete.send(sender=self.__class__, instance=self)
def save(self, using=None):
"""
Saves the current instance.
"""
signals.pre_save.send(sender=self.__class__, instance=self)
using = using or router.db_for_write(self.__class__, instance=self)
connection = connections[using]
if not self.dn:
# create a new entry
record_exists = False
entry = [('objectClass', self.object_classes)]
new_dn = self.build_dn()
for field in self._meta.fields:
if not field.db_column:
continue
value = getattr(self, field.name)
value = field.get_db_prep_save(value, connection=connection)
if value:
entry.append((field.db_column, value))
logger.debug("Creating new LDAP entry %s" % new_dn)
connection.add_s(new_dn, entry)
# update object
self.dn = new_dn
else:
# update an existing entry
record_exists = True
modlist = []
orig = self.__class__.objects.using(using).get(pk=self.saved_pk)
for field in self._meta.fields:
if not field.db_column:
continue
old_value = getattr(orig, field.name, None)
new_value = getattr(self, field.name, None)
if old_value != new_value:
new_value = field.get_db_prep_save(new_value,
connection=connection)
if new_value:
modlist.append((ldap.MOD_REPLACE, field.db_column,
new_value))
elif old_value:
modlist.append((ldap.MOD_DELETE, field.db_column,
None))
if len(modlist):
# handle renaming
new_dn = self.build_dn()
if new_dn != self.dn:
logger.debug("Renaming LDAP entry %s to %s" % (self.dn,
new_dn))
connection.rename_s(self.dn, self.build_rdn())
self.dn = new_dn
logger.debug("Modifying existing LDAP entry %s" % self.dn)
connection.modify_s(self.dn, modlist)
else:
logger.debug("No changes to be saved to LDAP entry %s" %
self.dn)
# done
self.saved_pk = self.pk
signals.post_save.send(sender=self.__class__, instance=self,
created=(not record_exists))
@classmethod
def scoped(base_class, base_dn):
"""
Returns a copy of the current class with a different base_dn.
"""
class Meta:
proxy = True
verbose_name = base_class._meta.verbose_name
verbose_name_plural = base_class._meta.verbose_name_plural
import re
suffix = re.sub('[=,]', '_', base_dn)
name = "%s_%s" % (base_class.__name__, str(suffix))
new_class = type(name, (base_class,), {
'base_dn': base_dn, '__module__': base_class.__module__,
'Meta': Meta})
return new_class
class Meta:
abstract = True
|
import errno
import functools
import logging
import socket
import struct
import sys
import weakref
from ssl import SSLError
from tornado import gen
try:
import ssl
except ImportError:
ssl = None
from tornado import netutil
from tornado.iostream import StreamClosedError
from tornado.tcpclient import TCPClient
from tornado.tcpserver import TCPServer
import dask
from dask.utils import parse_timedelta
from ..protocol.utils import pack_frames_prelude, unpack_frames
from ..system import MEMORY_LIMIT
from ..threadpoolexecutor import ThreadPoolExecutor
from ..utils import ensure_ip, get_ip, get_ipv6, nbytes
from .addressing import parse_host_port, unparse_host_port
from .core import Comm, CommClosedError, Connector, FatalCommClosedError, Listener
from .registry import Backend, backends
from .utils import ensure_concrete_host, from_frames, get_tcp_server_address, to_frames
logger = logging.getLogger(__name__)
MAX_BUFFER_SIZE = MEMORY_LIMIT / 2
def set_tcp_timeout(comm):
"""
Set kernel-level TCP timeout on the stream.
"""
if comm.closed():
return
timeout = dask.config.get("distributed.comm.timeouts.tcp")
timeout = int(parse_timedelta(timeout, default="seconds"))
sock = comm.socket
# Default (unsettable) value on Windows
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd877220(v=vs.85).aspx
nprobes = 10
assert timeout >= nprobes + 1, "Timeout too low"
idle = max(2, timeout // 4)
interval = max(1, (timeout - idle) // nprobes)
idle = timeout - interval * nprobes
assert idle > 0
try:
if sys.platform.startswith("win"):
logger.debug("Setting TCP keepalive: idle=%d, interval=%d", idle, interval)
sock.ioctl(socket.SIO_KEEPALIVE_VALS, (1, idle * 1000, interval * 1000))
else:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
try:
TCP_KEEPIDLE = socket.TCP_KEEPIDLE
TCP_KEEPINTVL = socket.TCP_KEEPINTVL
TCP_KEEPCNT = socket.TCP_KEEPCNT
except AttributeError:
if sys.platform == "darwin":
TCP_KEEPIDLE = 0x10 # (named "TCP_KEEPALIVE" in C)
TCP_KEEPINTVL = 0x101
TCP_KEEPCNT = 0x102
else:
TCP_KEEPIDLE = None
if TCP_KEEPIDLE is not None:
logger.debug(
"Setting TCP keepalive: nprobes=%d, idle=%d, interval=%d",
nprobes,
idle,
interval,
)
sock.setsockopt(socket.SOL_TCP, TCP_KEEPCNT, nprobes)
sock.setsockopt(socket.SOL_TCP, TCP_KEEPIDLE, idle)
sock.setsockopt(socket.SOL_TCP, TCP_KEEPINTVL, interval)
if sys.platform.startswith("linux"):
logger.debug("Setting TCP user timeout: %d ms", timeout * 1000)
TCP_USER_TIMEOUT = 18 # since Linux 2.6.37
sock.setsockopt(socket.SOL_TCP, TCP_USER_TIMEOUT, timeout * 1000)
except OSError as e:
logger.warning("Could not set timeout on TCP stream: %s", e)
def get_stream_address(comm):
"""
Get a stream's local address.
"""
if comm.closed():
return "<closed>"
try:
return unparse_host_port(*comm.socket.getsockname()[:2])
except OSError:
# Probably EBADF
return "<closed>"
def convert_stream_closed_error(obj, exc):
"""
Re-raise StreamClosedError as CommClosedError.
"""
if exc.real_error is not None:
# The stream was closed because of an underlying OS error
exc = exc.real_error
if ssl and isinstance(exc, ssl.SSLError):
if "UNKNOWN_CA" in exc.reason:
raise FatalCommClosedError(f"in {obj}: {exc.__class__.__name__}: {exc}")
raise CommClosedError(f"in {obj}: {exc.__class__.__name__}: {exc}") from exc
else:
raise CommClosedError(f"in {obj}: {exc}") from exc
def _close_comm(ref):
"""Callback to close Dask Comm when Tornado Stream closes
Parameters
----------
ref: weak reference to a Dask comm
"""
comm = ref()
if comm:
comm._closed = True
class TCP(Comm):
"""
An established communication based on an underlying Tornado IOStream.
"""
def __init__(self, stream, local_addr, peer_addr, deserialize=True):
self._closed = False
Comm.__init__(self)
self._local_addr = local_addr
self._peer_addr = peer_addr
self.stream = stream
self.deserialize = deserialize
self._finalizer = weakref.finalize(self, self._get_finalizer())
self._finalizer.atexit = False
self._extra = {}
ref = weakref.ref(self)
stream.set_close_callback(functools.partial(_close_comm, ref))
stream.set_nodelay(True)
set_tcp_timeout(stream)
self._read_extra()
def _read_extra(self):
pass
def _get_finalizer(self):
def finalize(stream=self.stream, r=repr(self)):
# stream is None if a StreamClosedError is raised during interpreter shutdown
if stream is not None and not stream.closed():
logger.warning(f"Closing dangling stream in {r}")
stream.close()
return finalize
@property
def local_address(self):
return self._local_addr
@property
def peer_address(self):
return self._peer_addr
async def read(self, deserializers=None):
stream = self.stream
if stream is None:
raise CommClosedError()
fmt = "Q"
fmt_size = struct.calcsize(fmt)
try:
frames_nbytes = await stream.read_bytes(fmt_size)
(frames_nbytes,) = struct.unpack(fmt, frames_nbytes)
frames = bytearray(frames_nbytes)
n = await stream.read_into(frames)
assert n == frames_nbytes, (n, frames_nbytes)
except StreamClosedError as e:
self.stream = None
self._closed = True
if not sys.is_finalizing():
convert_stream_closed_error(self, e)
except Exception:
# Some OSError or a another "low-level" exception. We do not really know what
# was already read from the underlying socket, so it is not even safe to retry
# here using the same stream. The only safe thing to do is to abort.
# (See also GitHub #4133).
self.abort()
raise
else:
try:
frames = unpack_frames(frames)
msg = await from_frames(
frames,
deserialize=self.deserialize,
deserializers=deserializers,
allow_offload=self.allow_offload,
)
except EOFError:
# Frames possibly garbled or truncated by communication error
self.abort()
raise CommClosedError("aborted stream on truncated data")
return msg
async def write(self, msg, serializers=None, on_error="message"):
stream = self.stream
if stream is None:
raise CommClosedError()
frames = await to_frames(
msg,
allow_offload=self.allow_offload,
serializers=serializers,
on_error=on_error,
context={
"sender": self.local_info,
"recipient": self.remote_info,
**self.handshake_options,
},
)
frames_nbytes = [nbytes(f) for f in frames]
frames_nbytes_total = sum(frames_nbytes)
header = pack_frames_prelude(frames)
header = struct.pack("Q", nbytes(header) + frames_nbytes_total) + header
frames = [header, *frames]
frames_nbytes = [nbytes(header), *frames_nbytes]
frames_nbytes_total += frames_nbytes[0]
if frames_nbytes_total < 2 ** 17: # 128kiB
# small enough, send in one go
frames = [b"".join(frames)]
frames_nbytes = [frames_nbytes_total]
try:
# trick to enque all frames for writing beforehand
for each_frame_nbytes, each_frame in zip(frames_nbytes, frames):
if each_frame_nbytes:
if stream._write_buffer is None:
raise StreamClosedError()
if isinstance(each_frame, memoryview):
# Make sure that `len(data) == data.nbytes`
# See <https://github.com/tornadoweb/tornado/pull/2996>
each_frame = memoryview(each_frame).cast("B")
stream._write_buffer.append(each_frame)
stream._total_write_index += each_frame_nbytes
# start writing frames
stream.write(b"")
except StreamClosedError as e:
self.stream = None
self._closed = True
if not sys.is_finalizing():
convert_stream_closed_error(self, e)
except Exception:
# Some OSError or a another "low-level" exception. We do not really know
# what was already written to the underlying socket, so it is not even safe
# to retry here using the same stream. The only safe thing to do is to
# abort. (See also GitHub #4133).
self.abort()
raise
return frames_nbytes_total
@gen.coroutine
def close(self):
# We use gen.coroutine here rather than async def to avoid errors like
# Task was destroyed but it is pending!
# Triggered by distributed.deploy.tests.test_local::test_silent_startup
stream, self.stream = self.stream, None
self._closed = True
if stream is not None and not stream.closed():
try:
# Flush the stream's write buffer by waiting for a last write.
if stream.writing():
yield stream.write(b"")
stream.socket.shutdown(socket.SHUT_RDWR)
except OSError:
pass
finally:
self._finalizer.detach()
stream.close()
def abort(self):
stream, self.stream = self.stream, None
self._closed = True
if stream is not None and not stream.closed():
self._finalizer.detach()
stream.close()
def closed(self):
return self._closed
@property
def extra_info(self):
return self._extra
class TLS(TCP):
"""
A TLS-specific version of TCP.
"""
def _read_extra(self):
TCP._read_extra(self)
sock = self.stream.socket
if sock is not None:
self._extra.update(peercert=sock.getpeercert(), cipher=sock.cipher())
cipher, proto, bits = self._extra["cipher"]
logger.debug(
"TLS connection with %r: protocol=%s, cipher=%s, bits=%d",
self._peer_addr,
proto,
cipher,
bits,
)
def _expect_tls_context(connection_args):
ctx = connection_args.get("ssl_context")
if not isinstance(ctx, ssl.SSLContext):
raise TypeError(
"TLS expects a `ssl_context` argument of type "
"ssl.SSLContext (perhaps check your TLS configuration?)"
" Instead got %s" % str(ctx)
)
return ctx
class RequireEncryptionMixin:
def _check_encryption(self, address, connection_args):
if not self.encrypted and connection_args.get("require_encryption"):
# XXX Should we have a dedicated SecurityError class?
raise RuntimeError(
"encryption required by Dask configuration, "
"refusing communication from/to %r" % (self.prefix + address,)
)
class BaseTCPConnector(Connector, RequireEncryptionMixin):
_executor = ThreadPoolExecutor(2, thread_name_prefix="TCP-Executor")
_resolver = netutil.ExecutorResolver(close_executor=False, executor=_executor)
client = TCPClient(resolver=_resolver)
async def connect(self, address, deserialize=True, **connection_args):
self._check_encryption(address, connection_args)
ip, port = parse_host_port(address)
kwargs = self._get_connect_args(**connection_args)
try:
stream = await self.client.connect(
ip, port, max_buffer_size=MAX_BUFFER_SIZE, **kwargs
)
# Under certain circumstances tornado will have a closed connnection with an error and not raise
# a StreamClosedError.
#
# This occurs with tornado 5.x and openssl 1.1+
if stream.closed() and stream.error:
raise StreamClosedError(stream.error)
except StreamClosedError as e:
# The socket connect() call failed
convert_stream_closed_error(self, e)
except SSLError as err:
raise FatalCommClosedError() from err
local_address = self.prefix + get_stream_address(stream)
comm = self.comm_class(
stream, local_address, self.prefix + address, deserialize
)
return comm
class TCPConnector(BaseTCPConnector):
prefix = "tcp://"
comm_class = TCP
encrypted = False
def _get_connect_args(self, **connection_args):
return {}
class TLSConnector(BaseTCPConnector):
prefix = "tls://"
comm_class = TLS
encrypted = True
def _get_connect_args(self, **connection_args):
ctx = _expect_tls_context(connection_args)
return {"ssl_options": ctx}
class BaseTCPListener(Listener, RequireEncryptionMixin):
def __init__(
self,
address,
comm_handler,
deserialize=True,
allow_offload=True,
default_port=0,
**connection_args,
):
self._check_encryption(address, connection_args)
self.ip, self.port = parse_host_port(address, default_port)
self.comm_handler = comm_handler
self.deserialize = deserialize
self.allow_offload = allow_offload
self.server_args = self._get_server_args(**connection_args)
self.tcp_server = None
self.bound_address = None
async def start(self):
self.tcp_server = TCPServer(max_buffer_size=MAX_BUFFER_SIZE, **self.server_args)
self.tcp_server.handle_stream = self._handle_stream
backlog = int(dask.config.get("distributed.comm.socket-backlog"))
for i in range(5):
try:
# When shuffling data between workers, there can
# really be O(cluster size) connection requests
# on a single worker socket, make sure the backlog
# is large enough not to lose any.
sockets = netutil.bind_sockets(
self.port, address=self.ip, backlog=backlog
)
except OSError as e:
# EADDRINUSE can happen sporadically when trying to bind
# to an ephemeral port
if self.port != 0 or e.errno != errno.EADDRINUSE:
raise
exc = e
else:
self.tcp_server.add_sockets(sockets)
break
else:
raise exc
self.get_host_port() # trigger assignment to self.bound_address
def stop(self):
tcp_server, self.tcp_server = self.tcp_server, None
if tcp_server is not None:
tcp_server.stop()
def _check_started(self):
if self.tcp_server is None:
raise ValueError("invalid operation on non-started TCPListener")
async def _handle_stream(self, stream, address):
address = self.prefix + unparse_host_port(*address[:2])
stream = await self._prepare_stream(stream, address)
if stream is None:
# Preparation failed
return
logger.debug("Incoming connection from %r to %r", address, self.contact_address)
local_address = self.prefix + get_stream_address(stream)
comm = self.comm_class(stream, local_address, address, self.deserialize)
comm.allow_offload = self.allow_offload
try:
await self.on_connection(comm)
except CommClosedError:
logger.info("Connection closed before handshake completed")
return
await self.comm_handler(comm)
def get_host_port(self):
"""
The listening address as a (host, port) tuple.
"""
self._check_started()
if self.bound_address is None:
self.bound_address = get_tcp_server_address(self.tcp_server)
# IPv6 getsockname() can return more a 4-len tuple
return self.bound_address[:2]
@property
def listen_address(self):
"""
The listening address as a string.
"""
return self.prefix + unparse_host_port(*self.get_host_port())
@property
def contact_address(self):
"""
The contact address as a string.
"""
host, port = self.get_host_port()
host = ensure_concrete_host(host)
return self.prefix + unparse_host_port(host, port)
class TCPListener(BaseTCPListener):
prefix = "tcp://"
comm_class = TCP
encrypted = False
def _get_server_args(self, **connection_args):
return {}
async def _prepare_stream(self, stream, address):
return stream
class TLSListener(BaseTCPListener):
prefix = "tls://"
comm_class = TLS
encrypted = True
def _get_server_args(self, **connection_args):
ctx = _expect_tls_context(connection_args)
return {"ssl_options": ctx}
async def _prepare_stream(self, stream, address):
try:
await stream.wait_for_handshake()
except OSError as e:
# The handshake went wrong, log and ignore
logger.warning(
"Listener on %r: TLS handshake failed with remote %r: %s",
self.listen_address,
address,
getattr(e, "real_error", None) or e,
)
else:
return stream
class BaseTCPBackend(Backend):
# I/O
def get_connector(self):
return self._connector_class()
def get_listener(self, loc, handle_comm, deserialize, **connection_args):
return self._listener_class(loc, handle_comm, deserialize, **connection_args)
# Address handling
def get_address_host(self, loc):
return parse_host_port(loc)[0]
def get_address_host_port(self, loc):
return parse_host_port(loc)
def resolve_address(self, loc):
host, port = parse_host_port(loc)
return unparse_host_port(ensure_ip(host), port)
def get_local_address_for(self, loc):
host, port = parse_host_port(loc)
host = ensure_ip(host)
if ":" in host:
local_host = get_ipv6(host)
else:
local_host = get_ip(host)
return unparse_host_port(local_host, None)
class TCPBackend(BaseTCPBackend):
_connector_class = TCPConnector
_listener_class = TCPListener
class TLSBackend(BaseTCPBackend):
_connector_class = TLSConnector
_listener_class = TLSListener
backends["tcp"] = TCPBackend()
backends["tls"] = TLSBackend()
|
import sys
import time
from socket import create_connection
SERVER = ('35.157.111.68', 10199)
RECV_SIZE = 256
def main():
conn = create_connection(SERVER)
while True:
response = conn.recv(RECV_SIZE).splitlines()
for line in response:
print(line)
if 'Welcome!' in line:
continue
if 'flag' in line:
return
num = line.split()[-1]
conn.send('%s\n' % num)
if __name__ == '__main__':
main()
|
from flask import Flask, request, jsonify
import joblib
import pandas as pd
from urllib.parse import urlparse
from bs4 import BeautifulSoup
import requests
from flask_cors import CORS
from newspaper import Article
from tinydb import TinyDB
db = TinyDB("feedback.json")
app = Flask(__name__)
CORS(app)
classifier = joblib.load('./model/pipeline.pkl')
newsitesdata=pd.read_csv('./datasets/newsites.tsv', sep='\s+', header=0 )
# .\venv\Scripts\activate
# make predict route a post route
# add route to fetch newsmedia ranking
# add route to get related news articles
@app.route("/<name>")
def home(name):
return f'{name}'
@app.route("/feedback",methods=['POST'])
def feedback():
json_ = request.get_json()
url = json_['url']
label = round(json_['pred']) if json_['feedback']==1 else abs(round(json_['pred'])-1)
db.insert(({'url': url,'label':label }))
return jsonify({'success':1})
@app.route('/predict', methods=['POST'])
def predict():
json_ = request.get_json()
domain = urlparse(json_['url']).netloc
domain_normalised = '.'.join(domain.split('.')[-2:])
fact = newsitesdata.loc[newsitesdata['source_url_normalized']== domain_normalised]['fact'].values[0] if newsitesdata.loc[newsitesdata['source_url_normalized']== domain_normalised].empty==False else 0
bias = newsitesdata.loc[newsitesdata['source_url_normalized']== domain_normalised]['bias'].values[0] if newsitesdata.loc[newsitesdata['source_url_normalized']== domain_normalised].empty==False else 0
summary=0
newsdata=0
if 'text' in json_.keys():
pred= classifier.predict_proba([json_['text']])
article=Article(json_['url'])
article.download()
article.parse()
article.nlp()
keywords = article.keywords[:3]
query_string = (" OR ").join(keywords)
news = requests.get("https://newsapi.org/v2/everything?apiKey=65756de0b9cc48b99bb5bcf64ceea474&sortBy=relevancy&sources=the-hindu,the-times-of-india,the-washington-post&pageSize=3&qInTitle=" + query_string)
newsdata = news.json()
if (newsdata["totalResults"] == 0):
newsdata = 0
print(newsdata)
else:
req = requests.get(json_['url'])
text = BeautifulSoup(req.text, "html.parser").title.string.split("|")[0]
print(text)
pred = classifier.predict_proba([text])
article=Article(json_['url'])
article.download()
article.parse()
article.nlp()
summary = article.summary
return jsonify({'pred': pred[0][1], 'fact':fact, 'bias':bias, 'summary':summary, 'newsdata': newsdata})
if __name__ == "__main__":
classifier = joblib.load('./model/pipeline.pkl')
app.run(debug=True)
|
# import dbkrpy
import discord
from discord.ext import commands
class GuildCount(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.token = ''
dbkrpy.UpdateGuilds(self.bot, self.token)
def setup(bot):
bot.add_cog(GuildCount(bot))
|
import numpy as np
import h5py as h5
import hepfile
import time
#import sys
# sys.path.append('./scripts')
#from write_h5hep_file_for_unit_tests import write_h5hep_file_for_unit_tests
from write_file_for_unit_tests import write_file_for_unit_tests
def isEmpty(dictionary):
test = True
print(dictionary.keys())
for key in dictionary.keys():
print(key)
print(dictionary[key])
print(type(dictionary[key]))
if dictionary[key] is None:
test = True
elif type(dictionary[key]) == list or type(dictionary[key]) == np.ndarray:
if len(dictionary[key]) > 0:
test = False
return test
def test_load():
write_file_for_unit_tests()
# This assumes you run nosetests from the h5hep directory and not
# the tests directory.
filename = "FOR_TESTS.hdf5"
desired_datasets = ['jet', 'muon']
subset = 5
test_data, test_bucket = hepfile.load(
filename, False, desired_datasets, subset)
assert isinstance(test_data, dict)
assert isinstance(test_bucket, dict)
assert isEmpty(test_bucket) == True
assert isEmpty(test_data) == False
# Testing desired_datasets
assert "jet/e" in test_data.keys()
assert "jet/e" in test_data.keys()
assert "METpx" not in test_data.keys()
assert "METpx" not in test_data.keys()
# Testing subsets
assert len(test_data["jet/njet"]) == 5
test_data, test_bucket = hepfile.load(
filename, False, desired_datasets, 1000)
assert len(test_data["jet/njet"]) == 10
# Passing in a range of subsets
subset = (0,4)
test_data, test_bucket = hepfile.load(
filename, False, desired_datasets, subset=subset)
assert len(test_data["jet/njet"]) == 4
subset = (1,5)
test_data, test_bucket = hepfile.load(
filename, False, desired_datasets, subset=subset)
assert len(test_data["jet/njet"]) == 4
subset = [1,5]
test_data, test_bucket = hepfile.load(
filename, False, desired_datasets, subset=subset)
assert len(test_data["jet/njet"]) == 4
# Test for poor uses of subset
test_data, test_bucket = hepfile.load(
filename, False, desired_datasets, [0,0])
assert len(test_data['_LIST_OF_DATASETS_']) == 0
assert len(test_bucket.keys()) == 0
test_data, test_bucket = hepfile.load(
filename, False, desired_datasets, [10,0])
assert len(test_data['_LIST_OF_DATASETS_']) == 0
assert len(test_bucket.keys()) == 0
test_data, test_bucket = hepfile.load(
filename, False, desired_datasets, subset=0)
assert len(test_data['_LIST_OF_DATASETS_']) == 0
assert len(test_bucket.keys()) == 0
def test_unpack():
# This assumes you run nosetests from the h5hep directory and not
# the tests directory.
filename = "FOR_TESTS.hdf5"
desired_datasets = ['jet', 'muon']
subset = 10
bucket, data = hepfile.load(filename, False, desired_datasets, subset)
hepfile.unpack(data, bucket)
assert isEmpty(bucket) == False
def test_get_nbuckets_in_file():
# This assumes you run nosetests from the h5hep directory and not
# the tests directory.
filename = "FOR_TESTS.hdf5"
nbuckets = hepfile.get_nbuckets_in_file(filename)
assert nbuckets == 10
def test_get_file_metadata():
filename = "FOR_TESTS.hdf5"
metadata = hepfile.get_file_metadata(filename)
assert 'date' in metadata
assert 'hepfile_version' in metadata
assert 'h5py_version' in metadata
assert 'numpy_version' in metadata
assert 'python_version' in metadata
# Check default attributes are strings
assert isinstance(metadata['date'], str)
assert isinstance(metadata['hepfile_version'], str)
assert isinstance(metadata['h5py_version'], str)
assert isinstance(metadata['numpy_version'], str)
assert isinstance(metadata['python_version'], str)
|
# Copyright 2014 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load(
"@io_bazel_rules_go//go/private:context.bzl",
"go_context",
)
load(
"@io_bazel_rules_go//go/private:providers.bzl",
"GoPath",
)
load(
"@io_bazel_rules_go//go/private:rules/rule.bzl",
"go_rule",
)
def _go_vet_generate_impl(ctx):
print("""
EXPERIMENTAL: the go_vet_test rule is still very experimental
Please do not rely on it for production use, but feel free to use it and file issues
""")
go = go_context(ctx)
script_file = go.declare_file(go, ext=".bash")
gopath = []
files = ctx.files.data + go.stdlib.files
gopath = []
packages = []
for data in ctx.attr.data:
entry = data[GoPath]
gopath += [entry.gopath]
packages += [package.dir for package in entry.packages]
ctx.actions.write(output=script_file, is_executable=True, content="""
export GOPATH="{gopath}"
{go} tool vet {packages}
""".format(
go=go.go.short_path,
gopath=":".join(['$(pwd)/{})'.format(entry) for entry in gopath]),
packages=" ".join(packages),
))
return struct(
files = depset([script_file]),
runfiles = ctx.runfiles(files, collect_data = True),
)
_go_vet_generate = go_rule(
_go_vet_generate_impl,
attrs = {
"data": attr.label_list(
providers = [GoPath],
cfg = "data",
),
},
)
def go_vet_test(name, data, **kwargs):
script_name = "generate_"+name
_go_vet_generate(
name=script_name,
data=data,
tags = ["manual"],
)
native.sh_test(
name=name,
srcs=[script_name],
data=data,
**kwargs
)
|
# -*- coding: utf-8 -*-
"""
This script saves the input file for the feasible horseshoe problem.
"""
__version__ = '1.0'
__author__ = 'Noemie Fedon'
import sys
import pandas as pd
import numpy as np
import numpy.matlib
sys.path.append(r'C:\BELLA')
from src.CLA.lampam_functions import calc_lampam
from src.BELLA.panels import Panel
from src.BELLA.multipanels import MultiPanel
from src.BELLA.constraints import Constraints
from src.BELLA.obj_function import ObjFunction
from src.BELLA.materials import Material
from src.BELLA.save_set_up import save_constraints_BELLA
from src.BELLA.save_set_up import save_multipanel
from src.BELLA.save_set_up import save_objective_function_BELLA
from src.BELLA.save_set_up import save_materials
from src.guidelines.one_stack import check_lay_up_rules
from src.guidelines.one_stack import check_ply_drop_rules
from src.divers.excel import delete_file, autofit_column_widths
sheet = 'horseshoe2'
filename_input = '/BELLA/input-files/SST.xlsx'
filename_res = 'input_file_' + sheet + '.xlsx'
# check for authorisation before overwriting
delete_file(filename_res)
# number of panels
n_panels = 18
### Design guidelines ---------------------------------------------------------
constraints_set = 'C0'
constraints_set = 'C1'
## lay-up rules
# set of admissible fibre orientations
set_of_angles = np.array([-45, 0, 45, 90], dtype=int)
set_of_angles = np.array([
-45, 0, 45, 90, +30, -30, +60, -60, 15, -15, 75, -75], dtype=int)
sym = True # symmetry rule
oopo = False # out-of-plane orthotropy requirements
if constraints_set == 'C0':
bal = False # balance rule
rule_10_percent = False # 10% rule
diso = False # disorientation rule
contig = False # contiguity rule
dam_tol = False # damage-tolerance rule
else:
bal = True
rule_10_percent = True
diso = True
contig = True
dam_tol = True
rule_10_Abdalla = True # 10% rule restricting LPs instead of ply percentages
percent_Abdalla = 10 # percentage limit for the 10% rule applied on LPs
combine_45_135 = True # True if restriction on +-45 plies combined for 10% rule
percent_0 = 10 # percentage used in the 10% rule for 0 deg plies
percent_45 = 0 # percentage used in the 10% rule for +45 deg plies
percent_90 = 10 # percentage used in the 10% rule for 90 deg plies
percent_135 = 0 # percentage used in the 10% rule for -45 deg plies
percent_45_135 =10 # percentage used in the 10% rule for +-45 deg plies
delta_angle = 45 # maximum angle difference for adjacent plies
n_contig = 5 # maximum number of adjacent plies with the same fibre orientation
dam_tol_rule = 1 # type of damage tolerance rule
## ply-drop rules
covering = True # covering rule
n_covering = 1 # number of plies ruled by covering rule at laminate surfaces
pdl_spacing = True # ply drop spacing rule
min_drop = 2 # Minimum number of continuous plies between ply drops
constraints = Constraints(
sym=sym,
bal=bal,
oopo=oopo,
dam_tol=dam_tol,
dam_tol_rule=dam_tol_rule,
covering=covering,
n_covering=n_covering,
rule_10_percent=rule_10_percent,
rule_10_Abdalla=rule_10_Abdalla,
percent_Abdalla=percent_Abdalla,
percent_0=percent_0,
percent_45=percent_45,
percent_90=percent_90,
percent_135=percent_135,
percent_45_135=percent_45_135,
combine_45_135=combine_45_135,
diso=diso,
contig=contig,
n_contig=n_contig,
delta_angle=delta_angle,
set_of_angles=set_of_angles,
min_drop=min_drop,
pdl_spacing=pdl_spacing)
### Objective function parameters ---------------------------------------------
# Coefficient for the 10% rule penalty
coeff_10 = 1
# Coefficient for the contiguity constraint penalty
coeff_contig = 1
# Coefficient for the disorientation constraint penalty
coeff_diso = 10
# Coefficient for the out-of-plane orthotropy penalty
coeff_oopo = 1
# Coefficient for the ply drop spacing guideline penalty
coeff_spacing = 1
# Lamination-parameter weightings in panel objective functions
# (In practice these weightings can be different for each panel)
optimisation_type = 'AD'
if optimisation_type == 'A':
if all(elem in {0, +45, -45, 90} for elem in constraints.set_of_angles):
lampam_weightings = np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0])
else:
lampam_weightings = np.array([1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0])
elif optimisation_type == 'D':
if all(elem in {0, +45, -45, 90} for elem in constraints.set_of_angles):
lampam_weightings = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0])
else:
lampam_weightings = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1])
elif optimisation_type == 'AD':
if all(elem in {0, +45, -45, 90} for elem in constraints.set_of_angles):
lampam_weightings = np.array([1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0])
else:
lampam_weightings = np.array([1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1])
## Multi-panel objective function
# Weightings of the panels in the multi-panel objecive function
panel_weightings = np.ones((n_panels,), float)
obj_func_param = ObjFunction(
constraints=constraints,
coeff_contig=coeff_contig,
coeff_diso=coeff_diso,
coeff_10=coeff_10,
coeff_oopo=coeff_oopo,
coeff_spacing=coeff_spacing)
### Material properties -------------------------------------------------------
# Elastic modulus in the fibre direction in Pa
E11 = 20.5/1.45038e-10 # 141 GPa
# Elastic modulus in the transverse direction in Pa
E22 = 1.31/1.45038e-10 # 9.03 GPa
# Poisson's ratio relating transverse deformation and axial loading (-)
nu12 = 0.32
# In-plane shear modulus in Pa
G12 = 0.62/1.45038e-10 # 4.27 GPa
# Density in g/m2
density_area = 300.5
# Ply thickness in m
ply_t = (25.40/1000)*0.0075 # 0.191 mmm
materials = Material(E11=E11, E22=E22, G12=G12, nu12=nu12,
density_area=density_area, ply_t=ply_t)
### Multi-panel composite laminate layout -------------------------------------
# panel IDs
ID = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18]
# panel number of plies
n_plies_per_panel = [32, 28, 20, 18, 16, 22, 18, 24, 38,
34, 30, 28, 22, 18, 24, 30, 18, 22]
# panels adjacency
neighbour_panels = {
1 : [2, 9],
2 : [1, 3, 6, 10],
3 : [2, 4, 6],
4 : [3, 5, 7],
5 : [4, 8],
6 : [2, 3, 7],
7 : [4, 6, 8],
8 : [5, 7],
9 : [1, 10, 11],
10 : [2, 9, 12],
11 : [9, 12],
12 : [10, 11, 13, 16],
13 : [12, 14, 16],
14 : [13, 15, 17],
15 : [14, 18],
16 : [12, 13, 17],
17 : [14, 16, 18],
18 : [15, 17]}
# boundary weights
boundary_weights = {(1, 2) : 0.610,
(1, 9) : 0.457,
(2, 3) : 0.305,
(2, 6) : 0.305,
(2, 10) : 0.457,
(3, 4) : 0.305,
(3, 6) : 0.508,
(4, 5) : 0.305,
(4, 7) : 0.508,
(5, 8) : 0.508,
(6, 7) : 0.305,
(7, 8) : 0.305,
(9, 10) : 0.610,
(9, 11) : 0.457,
(10, 12) : 0.457,
(11, 12) : 0.610,
(12, 13) : 0.305,
(12, 16) : 0.305,
(13, 14) : 0.305,
(13, 16) : 0.508,
(14, 15) : 0.305,
(14, 17) : 0.508,
(15, 18) : 0.508,
(16, 17) : 0.305,
(17, 18) : 0.305}
# panel length in the x-direction (m)
length_x = (25.40/1000)*np.array([18, 18, 20, 20, 20, 20, 20, 20,
18, 18, 18, 18, 20, 20, 20, 20, 20, 20])
# panel length in the y-direction (m)
length_y = (25.40/1000)*np.array([24, 24, 12, 12, 12, 12, 12, 12,
24, 24, 24, 24, 12, 12, 12, 12, 12, 12])
# 1 lbf/in = 0.175127 N/mm
# panel loading per unit width in the x-direction in N/m
N_x = 175.127*np.array([700, 375, 270, 250, 210, 305, 290, 600,
1100, 900, 375, 400, 330, 190, 300, 815, 320, 300])
# panel loading per unit width in the y-direction in N/m
N_y = 175.127*np.array([400, 360, 325, 200, 100, 360, 195, 480,
600, 400, 525, 320, 330, 205, 610, 1000, 180, 410])
sst = pd.read_excel(filename_input, sheet_name=sheet).fillna(-1)
sst = np.array(sst, int).T
sst = np.hstack((sst, np.flip(sst, axis=1)))
n_plies_2_ss = dict()
n_plies_2_lampam = dict()
n_plies_2_sst = dict()
for stack_sst in sst:
stack = np.copy(stack_sst)
for ind_ply in range(38)[::-1]:
if stack[ind_ply] == -1:
stack = np.delete(stack, ind_ply)
n_plies_2_sst[len(stack)] = stack_sst
n_plies_2_ss[len(stack)] = stack
n_plies_2_lampam[len(stack)] = calc_lampam(stack)
sst = np.array([n_plies_2_sst[n] for n in n_plies_per_panel])
panels = []
for ind_panel in range(n_panels):
panels.append(Panel(
ID=ID[ind_panel],
lampam_target=n_plies_2_lampam[n_plies_per_panel[ind_panel]],
lampam_weightings=lampam_weightings,
n_plies=n_plies_per_panel[ind_panel],
length_x=length_x[ind_panel],
length_y=length_y[ind_panel],
N_x=N_x[ind_panel],
N_y=N_y[ind_panel],
weighting=panel_weightings[ind_panel],
neighbour_panels=neighbour_panels[ID[ind_panel]],
constraints=constraints))
multipanel = MultiPanel(panels)
multipanel.filter_target_lampams(constraints, obj_func_param)
multipanel.filter_lampam_weightings(constraints, obj_func_param)
### Checks for feasibility of the multi-panel composite layout ----------------
#check_ply_drop_rules(sst, multipanel, constraints, reduced=False)
for stack in n_plies_2_ss.values():
check_lay_up_rules(stack, constraints,
no_ipo_check=True, no_bal_check=True)
### Save data -----------------------------------------------------------------
save_multipanel(filename_res, multipanel, obj_func_param, calc_penalties=True,
constraints=constraints, sst=sst)
save_constraints_BELLA(filename_res, constraints)
save_objective_function_BELLA(filename_res, obj_func_param)
save_materials(filename_res, materials)
autofit_column_widths(filename_res)
|
import mne
import mne_features
import numpy as np
from mne_features.feature_extraction import extract_features
from moabb.datasets import physionet_mi
if __name__ == '__main__':
# get dataset
ds = physionet_mi.PhysionetMI()
raw = ds.get_data([2])[2]['session_0']['run_4'].pick_channels(['C3', 'C4'])
events = mne.events_from_annotations(raw)
s_freq = raw.info['sfreq']
# get x and save to file
X = mne.Epochs(raw, events[0]).get_data()
# extract features
extract_features()
params = {
'pow_freq_bands__freq_bands': np.arange(1, int(s_freq / 2), 1),
}
selected_funcs = {'mean', 'ptp_amp', 'std', 'pow_freq_bands'}
features_array = mne_features.feature_extraction.extract_features(X, s_freq, selected_funcs, params)
# save features to file
np.savetxt('../data/mne/1/features.csv', features_array, delimiter=',')
# get y and save to file
y = np.asarray([e[2] for e in events[0]])
np.savetxt('../data/mne/1/stimulus_vectors.csv', y)
|
# Generated by Django 3.0.3 on 2020-05-31 15:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_site', '0004_auto_20200526_1908'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='video_link',
),
migrations.AddField(
model_name='resource',
name='video_link_id',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AlterField(
model_name='resource',
name='belt_level',
field=models.CharField(choices=[('White', 'White'), ('White/Yellow', 'White/Yellow'), ('White/Black', 'White/Black'), ('Yellow', 'Yellow'), ('Yellow/Black', 'Yellow/Black'), ('Orange', 'Orange'), ('Orange/Black', 'Orange/Black'), ('Green', 'Green'), ('Green/Black', 'Green/Black'), ('Purple', 'Purple'), ('Purple/Black', 'Purple/Black'), ('Blue', 'Blue'), ('Blue/Black', 'Blue/Black'), ('Brown', 'Brown'), ('Brown/Black', 'Brown/Black'), ('Red', 'Red'), ('Red/Black', 'Red/Black')], default='White', max_length=25),
),
migrations.AlterField(
model_name='resource',
name='difficulty',
field=models.CharField(choices=[('Low', 'Low'), ('Medium', 'Medium'), ('High', 'High')], default='Easy', max_length=25),
),
]
|
import CONSTANT
from Menu.Menu import Menu
import telebot
class General_menu(Menu):
def __init__(self, message, userdata, bot, regular_id=None):
super().__init__(message=message, userdata=userdata, bot=bot, state=CONSTANT.NAME_GENERAL_MENU, regular_id = regular_id)
def update(self, message):
keyboard = telebot.types.ReplyKeyboardMarkup(True)
keyboard.row('Профіль')
lottery_ticket = self.userdata.find_user_lottery_ticket(self.regular_id)
if lottery_ticket > 0:
keyboard.row('Казино (' +CONSTANT.SYMBOL_LOTTERY_TICKET+ str(lottery_ticket)+")")
else:
keyboard.row('Казино')
self.bot.send_message(self.regular_id, 'Головне меню', reply_markup=keyboard)
if self.userdata.check_new_lottery_ticket(self.regular_id):
self.bot.send_message(self.regular_id, 'Ви отримали лотерейний квиток!! ' + CONSTANT.SYMBOL_LOTTERY_TICKET, reply_markup=keyboard)
def press(self, message):
if 'Профіль' in message.text:
from Menu.Home.Home_menu import Home_menu
menu = Home_menu(message, self.userdata, self.bot)
return menu
if 'Казино' in message.text:
from Menu.Casino.Casino_menu import Casino_menu
menu = Casino_menu(message, self.userdata, self.bot)
return menu
return self
|
import tkinter as tk
class ControlView(tk.Frame):
"""View for Movement Controls."""
def __init__(self, user_control, parent, *args, **kwargs):
"""Contruct instance.
Input: Injection of UserController instance
Parent window instance.
"""
super().__init__(parent, *args, **kwargs)
self.user_control = user_control
self.south_button = tk.Button(self,
text="S",
font=('arial', 20, 'bold'),
bg='LightGrey',
height = 1,
width = 2)
self.south_button.grid(row=0,column=1)
self.west_button = tk.Button(self,
text="W",
font=('arial', 20, 'bold'),
bg='LightGrey',
height = 1,
width = 2)
self.west_button.grid(row=1,column=0)
self.east_button = tk.Button(self,
text="E",
font=('arial', 20, 'bold'),
bg='LightGrey',
height = 1,
width = 2)
self.east_button.grid(row=1,column=2)
self.north_button = tk.Button(self,
text="N",
font=('arial', 20, 'bold'),
bg='LightGrey',
height = 1,
width = 2)
self.north_button.grid(row=2,column=1)
self.stop_button = tk.Button(self,
text="STOP",
font=('arial',20, 'bold'),
fg='White',
bg='Red',
activeforeground='Red',
activebackground='White')
self.stop_button.grid(row=3,column=0, columnspan=3, pady=10)
#Add Bindings
self.enable_all_buttons()
def enable_all_buttons(self):
"""Enables and binds all buttons."""
for child in self.winfo_children():
child.config(state='normal')
self.north_button.bind("<ButtonPress-1>",
self.start_move_north_buttonclick)
self.north_button.bind("<ButtonRelease-1>",
self.stop_move_north_buttonclick)
self.west_button.bind("<ButtonPress-1>",
self.start_move_west_buttonclick)
self.west_button.bind("<ButtonRelease-1>",
self.stop_move_west_buttonclick)
self.south_button.bind("<ButtonPress-1>",
self.start_move_south_buttonclick)
self.south_button.bind("<ButtonRelease-1>",
self.stop_move_south_buttonclick)
self.east_button.bind("<ButtonPress-1>",
self.start_move_east_buttonclick)
self.east_button.bind("<ButtonRelease-1>",
self.stop_move_east_buttonclick)
def start_move_west_buttonclick(self, event):
""" Calls corresponding method of user_control.
"""
self.user_control.move_west()
def stop_move_west_buttonclick(self, event):
""" Calls corresponding method of user_control.
"""
self.user_control.stop_west()
def start_move_north_buttonclick(self, event):
""" Calls corresponding method of user_control.
"""
self.user_control.move_north()
def stop_move_north_buttonclick(self, event):
""" Calls corresponding method of user_control.
"""
self.user_control.stop_north()
def start_move_south_buttonclick(self, event):
""" Calls corresponding method of user_control.
"""
self.user_control.move_south()
def stop_move_south_buttonclick(self, event):
""" Calls corresponding method of user_control.
"""
self.user_control.stop_south()
def start_move_east_buttonclick(self, event):
""" Calls corresponding method of user_control.
"""
self.user_control.move_east()
def stop_move_east_buttonclick(self, event):
""" Calls corresponding method of user_control.
"""
self.user_control.stop_east()
|
from unittest import mock
import json
import re
import pytest
import kombu
from django.conf import settings
from share import models
from share import util
from share.search import fetchers
from share.search import indexing
from tests import factories
@pytest.mark.skip
class TestIndexableMessage:
VERSION_MAP = {
0: indexing.V0Message,
1: indexing.V1Message,
}
@pytest.fixture(params=[
(0, models.CreativeWork, [], {'version': 0, 'CreativeWork': []}),
(0, models.CreativeWork, [], {'CreativeWork': []}),
(0, models.CreativeWork, [1, 2, 3, 4], {'CreativeWork': [1, 2, 3, 4]}),
(0, models.Agent, [], {'Agent': []}),
(0, models.Agent, [], {'Person': []}),
(0, models.Agent, [], {'share.AbstractAgent': []}),
(0, models.Tag, [1], {'tag': [1]}),
(1, models.Tag, [1, 2], {'model': 'Tag', 'ids': [1, 2], 'version': 1}),
])
def message(self, request):
version, model, ids, payload = request.param
self.EXPECTED_IDS = ids
self.EXPECTED_MODEL = model
self.EXPECTED_VERSION = version
return kombu.Message(
content_type='application/json',
body=json.dumps(payload),
)
def test_wrap(self, message):
message = indexing.IndexableMessage.wrap(message)
assert list(message) == self.EXPECTED_IDS
assert message.model == self.EXPECTED_MODEL
assert message.protocol_version == self.EXPECTED_VERSION
assert isinstance(message, self.VERSION_MAP[self.EXPECTED_VERSION])
@pytest.mark.parametrize('version', [30, 10, None, 'Foo', '1', '0', {}, []])
def test_invalid_version(self, version):
with pytest.raises(ValueError) as e:
indexing.IndexableMessage.wrap(kombu.Message(
content_type='application/json',
body=json.dumps({'version': version}),
))
assert e.value.args == ('Invalid version "{}"'.format(version), )
@pytest.mark.skip
class TestMessageFlattener:
def message_factory(self, model='CreativeWork', ids=None):
if ids is None:
ids = [1, 2, 3]
msg = indexing.IndexableMessage.wrap(kombu.Message(
content_type='application/json',
body=json.dumps({model: ids}),
))
msg.message.ack = mock.Mock()
msg.message.requeue = mock.Mock()
return msg
@pytest.mark.parametrize('number', [0, 1, 5])
def test_len(self, number):
assert len(indexing.MessageFlattener([self.message_factory() for _ in range(number)])) == number * 3
def test_list(self):
assert list(indexing.MessageFlattener([])) == []
assert list(indexing.MessageFlattener([
self.message_factory(ids=[1, 2, 3]),
])) == [1, 2, 3]
assert list(indexing.MessageFlattener([
self.message_factory(ids=[1, 2, 3]),
self.message_factory(ids=[4]),
self.message_factory(ids=[]),
self.message_factory(ids=[]),
self.message_factory(ids=[]),
self.message_factory(ids=[5, 6]),
])) == [1, 2, 3, 4, 5, 6]
def test_moves_to_pending(self):
messages = [
self.message_factory(ids=[]),
self.message_factory(ids=[]),
self.message_factory(ids=[0]),
self.message_factory(ids=[1, 2]),
self.message_factory(ids=[3, 4, 5]),
self.message_factory(ids=[6, 7, 8, 9]),
]
flattener = indexing.MessageFlattener(messages)
assert list(flattener.pending) == []
lengths = [3, 3, 4, 4, 4, 5, 5, 5, 5, 6]
# Forces the initial buffer load.
# Will always be called in a for loop.
iter(flattener)
for i in range(10):
assert i == next(flattener)
assert list(flattener.pending) == messages[:lengths[i]]
def test_ack(self):
messages = [
self.message_factory(ids=[]),
self.message_factory(ids=[]),
self.message_factory(ids=[0]),
self.message_factory(ids=[1, 2]),
self.message_factory(ids=[3, 4, 5]),
self.message_factory(ids=[6, 7, 8, 9]),
]
flattener = indexing.MessageFlattener(messages)
assert list(flattener.pending) == []
lengths = [3, 3, 4, 4, 4, 5, 5, 5, 5, 6]
# Forces the initial buffer load.
# Will always be called in a for loop.
iter(flattener)
for i in range(10):
assert i == next(flattener)
flattener.ack_pending()
assert list(flattener.pending) == []
assert list(flattener.requeued) == []
assert flattener.acked == messages[:lengths[i]]
for message in messages:
assert message.message.ack.called
def test_requeue(self):
messages = [
self.message_factory(ids=[]),
self.message_factory(ids=[]),
self.message_factory(ids=[0]),
self.message_factory(ids=[1, 2]),
self.message_factory(ids=[3, 4, 5]),
self.message_factory(ids=[6, 7, 8, 9]),
]
flattener = indexing.MessageFlattener(messages)
assert list(flattener.pending) == []
lengths = [3, 3, 4, 4, 4, 5, 5, 5, 5, 6]
# Forces the initial buffer load.
# Will always be called in a for loop.
iter(flattener)
for i in range(10):
assert i == next(flattener)
flattener.requeue_pending()
assert list(flattener.acked) == []
assert list(flattener.pending) == []
assert flattener.requeued == messages[:lengths[i]]
for message in messages:
assert message.message.requeue.called
class TestFetchers:
@pytest.mark.parametrize('model, fetcher', [
(models.Agent, fetchers.AgentFetcher),
(models.Person, fetchers.AgentFetcher),
(models.AbstractAgent, fetchers.AgentFetcher),
(models.Article, fetchers.CreativeWorkFetcher),
(models.Preprint, fetchers.CreativeWorkFetcher),
(models.CreativeWork, fetchers.CreativeWorkFetcher),
(models.AbstractCreativeWork, fetchers.CreativeWorkFetcher),
])
def test_fetcher_for(self, model, fetcher):
assert isinstance(fetchers.fetcher_for(model), fetcher)
def test_fetcher_not_found(self):
with pytest.raises(ValueError) as e:
fetchers.fetcher_for(models.AgentIdentifier)
assert e.value.args == ('No fetcher exists for <class \'share.models.identifiers.AgentIdentifier\'>', )
@pytest.mark.django_db
@pytest.mark.parametrize('id, type, final_type, types', [
(12, 'share.agent', 'agent', ['agent']),
(1850, 'share.Person', 'person', ['agent', 'person']),
(1850, 'share.Institution', 'institution', ['agent', 'organization', 'institution']),
(85, 'share.preprint', 'preprint', ['creative work', 'publication', 'preprint']),
(85, 'share.Software', 'software', ['creative work', 'software']),
])
def test_populate_types(self, id, type, final_type, types):
fetcher = fetchers.Fetcher()
populated = fetcher.populate_types({'id': id, 'type': type})
assert populated['type'] == final_type
assert populated['types'] == types[::-1]
assert util.IDObfuscator.decode_id(populated['id']) == id
@pytest.mark.django_db
def test_creativework_fetcher(self):
works = [
factories.AbstractCreativeWorkFactory(),
factories.AbstractCreativeWorkFactory(is_deleted=True),
factories.AbstractCreativeWorkFactory(),
factories.AbstractCreativeWorkFactory(),
]
for work in works[:-1]:
factories.WorkIdentifierFactory(creative_work=work)
factories.WorkIdentifierFactory.create_batch(5, creative_work=works[0])
source = factories.SourceFactory()
works[1].sources.add(source.user)
# Trim trailing zeros
def iso(x):
return re.sub(r'(\.\d+?)0*\+', r'\1+', x.isoformat())
fetched = list(fetchers.CreativeWorkFetcher()(work.id for work in works))
# TODO add more variance
assert fetched == [{
'id': util.IDObfuscator.encode(work),
'type': work._meta.verbose_name,
'types': [cls._meta.verbose_name for cls in type(work).mro() if hasattr(cls, '_meta') and cls._meta.proxy],
'title': work.title,
'description': work.description,
'date': iso(work.date_published),
'date_created': iso(work.date_created),
'date_modified': iso(work.date_modified),
'date_published': iso(work.date_published),
'date_updated': iso(work.date_updated),
'is_deleted': work.is_deleted or not work.identifiers.exists(),
'justification': getattr(work, 'justification', None),
'language': work.language,
'registration_type': getattr(work, 'registration_type', None),
'retracted': work.outgoing_creative_work_relations.filter(type='share.retracted').exists(),
'withdrawn': getattr(work, 'withdrawn', None),
'identifiers': list(work.identifiers.values_list('uri', flat=True)),
'sources': [user.source.long_title for user in work.sources.all()],
'subjects': [],
'subject_synonyms': [],
'tags': [],
'lists': {},
} for work in works]
@pytest.mark.django_db
@pytest.mark.parametrize('bepresses, customs, expected', [
([], [-1], {
'subjects': ['mergik|Magic|Cool Magic|SUPER COOL MAGIC'],
'subject_synonyms': ['bepress|Engineering|Computer Engineering|Data Storage Systems'],
}),
([-1], [], {
'subjects': ['bepress|Engineering|Computer Engineering|Data Storage Systems'],
'subject_synonyms': [],
}),
([-1], [-1], {
'subjects': ['bepress|Engineering|Computer Engineering|Data Storage Systems', 'mergik|Magic|Cool Magic|SUPER COOL MAGIC'],
'subject_synonyms': ['bepress|Engineering|Computer Engineering|Data Storage Systems'],
}),
([0, 1], [], {
'subjects': ['bepress|Engineering', 'bepress|Engineering|Computer Engineering'],
'subject_synonyms': [],
}),
([], [0, 1], {
'subjects': ['mergik|Magic', 'mergik|Magic|Cool Magic'],
'subject_synonyms': ['bepress|Engineering', 'bepress|Engineering|Computer Engineering'],
}),
])
def test_subject_indexing(self, bepresses, customs, expected):
custom_tax = factories.SubjectTaxonomyFactory(source__long_title='mergik')
system_tax = models.SubjectTaxonomy.objects.get(source__user__username=settings.APPLICATION_USERNAME)
custom = ['Magic', 'Cool Magic', 'SUPER COOL MAGIC']
bepress = ['Engineering', 'Computer Engineering', 'Data Storage Systems']
for i, name in enumerate(tuple(bepress)):
bepress[i] = factories.SubjectFactory(
name=name,
taxonomy=system_tax,
parent=bepress[i - 1] if i > 0 else None,
)
for i, name in enumerate(tuple(custom)):
custom[i] = factories.SubjectFactory(
name=name,
taxonomy=custom_tax,
central_synonym=bepress[i],
parent=custom[i - 1] if i > 0 else None,
)
work = factories.AbstractCreativeWorkFactory()
for i in bepresses:
factories.ThroughSubjectsFactory(subject=bepress[i], creative_work=work)
for i in customs:
factories.ThroughSubjectsFactory(subject=custom[i], creative_work=work)
fetched = next(fetchers.CreativeWorkFetcher()([work.id]))
assert {k: v for k, v in fetched.items() if k.startswith('subject')} == expected
@pytest.mark.django_db
def test_lineage_indexing(self):
child = factories.AbstractCreativeWorkFactory()
fetched = next(fetchers.CreativeWorkFetcher()([child.id]))
assert fetched['lists'].get('lineage') is None
actual_lineage = [child]
for _ in range(5):
new_parent = factories.AbstractCreativeWorkFactory()
factories.AbstractWorkRelationFactory(
type='share.ispartof',
subject=actual_lineage[0],
related=new_parent
)
actual_lineage.insert(0, new_parent)
for i, work in enumerate(actual_lineage):
expected_lineage = actual_lineage[:i][-3:]
fetched = next(fetchers.CreativeWorkFetcher()([work.id]))
fetched_lineage = fetched['lists'].get('lineage', [])
assert len(fetched_lineage) == len(expected_lineage)
for indexed, ancestor in zip(fetched_lineage, expected_lineage):
assert indexed['id'] == util.IDObfuscator.encode(ancestor)
assert indexed['title'] == ancestor.title
assert set(indexed['identifiers']) == set(ancestor.identifiers.values_list('uri'))
@pytest.mark.django_db
def test_agent_fetcher(self):
agents = [
factories.AbstractAgentFactory(),
factories.AbstractAgentFactory(),
factories.AbstractAgentFactory(),
factories.AbstractAgentFactory(),
]
list(fetchers.AgentFetcher()(agent.id for agent in agents))
@pytest.mark.skip
@pytest.mark.django_db
class TestESIndexer:
@pytest.fixture
def es_client(self):
es_c = mock.Mock()
es_c.cluster.health.return_value = {'status': 'green'}
return es_c
@pytest.fixture(autouse=True)
def ack(self, monkeypatch):
m = mock.Mock()
monkeypatch.setattr('kombu.Message.ack', m)
return m
@pytest.fixture(autouse=True)
def sleep(self, monkeypatch):
m = mock.Mock()
monkeypatch.setattr('time.sleep', m)
return m
def message_factory(self, model='CreativeWork', ids=None):
if ids is None:
ids = [1, 2, 3]
return kombu.Message(
content_type='application/json',
body=json.dumps({model: ids}),
)
def test_empty(self, es_client):
indexer = indexing.ESIndexer(es_client, 'share_v1')
indexer.index()
indexer = indexing.ESIndexer(es_client, 'share_v1', self.message_factory(ids=[]), self.message_factory('Agent', ids=[]))
indexer.index()
def test_indexes(self, elastic):
messages = [
self.message_factory(ids=[x.id for x in factories.AbstractCreativeWorkFactory.create_batch(5)]),
self.message_factory(ids=[x.id for x in factories.AbstractCreativeWorkFactory.create_batch(3)]),
self.message_factory(ids=[x.id for x in factories.AbstractCreativeWorkFactory.create_batch(8)]),
]
indexer = indexing.ESIndexer(elastic.es_client, settings.ELASTICSEARCH['INDEX'], *messages)
indexer.index()
elastic.es_client.indices.refresh(elastic.es_index)
assert elastic.es_client.count(index=elastic.es_index, doc_type='creativeworks')['count'] == 16
assert set(
doc['_id'] for doc
in elastic.es_client.search(doc_type='creativeworks', index=elastic.es_index, _source=['_id'], size=20)['hits']['hits']
) == set(util.IDObfuscator.encode(work) for work in models.AbstractCreativeWork.objects.all())
def test_retries(self, sleep, es_client):
indexer = indexing.ESIndexer(es_client, settings.ELASTICSEARCH['INDEX'])
indexer._index = mock.Mock()
indexer._index.side_effect = Exception('Testing')
with pytest.raises(SystemExit):
indexer.index()
assert len(indexer._index.call_args_list) == indexer.MAX_RETRIES
assert sleep.call_args_list == [mock.call(2 ** (i + 1)) for i in range(indexer.MAX_RETRIES - 1)]
def test_checks_health(self, es_client):
indexer = indexing.ESIndexer(es_client, settings.ELASTICSEARCH['INDEX'])
indexer.index()
assert es_client.cluster.health.called is True
def test_red_fails(self, es_client):
es_client.cluster.health.return_value = {'status': 'red'}
indexer = indexing.ESIndexer(es_client, settings.ELASTICSEARCH['INDEX'])
with pytest.raises(ValueError) as e:
indexer._index()
assert e.value.args == ('ES cluster health is red, Refusing to index', )
def test_gentle_mode(self, es_client):
es_client.cluster.health.return_value = {'status': 'yellow'}
indexer = indexing.ESIndexer(es_client, settings.ELASTICSEARCH['INDEX'], self.message_factory())
indexer.bulk_stream = mock.MagicMock()
indexer._index()
assert indexer.bulk_stream.assert_called_once_with(models.CreativeWork, mock.ANY, mock.ANY, gentle=True) is None
@pytest.mark.django_db
def test_acks(self, elastic):
messages = [
self.message_factory(ids=[x.id for x in factories.AbstractCreativeWorkFactory.create_batch(6)]),
self.message_factory(ids=[x.id for x in factories.AbstractCreativeWorkFactory.create_batch(4)]),
self.message_factory(ids=[x.id for x in factories.AbstractCreativeWorkFactory.create_batch(8)]),
]
indexer = indexing.ESIndexer(elastic.es_client, settings.ELASTICSEARCH['INDEX'], *messages)
indexer.index()
for message in messages:
assert message.ack.called is True
|
import os, sys
class common:
def __init__(self):
""
|
# Advent of Code 2015 - Day 3 - Perfectly Spherical Houses in a Vacuum
# http://adventofcode.com/2015/day/3
import sys
# Return a list of visited houses given a moves list
# The starting position counts as visited
def visited_positions(directions):
position = (0, 0)
visited = [(0, 0)]
for step in directions:
x, y = position
if step == '<':
x -= 1
elif step == '>':
x += 1
elif step == 'v':
y -= 1
elif step == '^':
y += 1
position = (x, y)
if position not in visited:
visited.append(position)
return visited
# Main
directions = sys.stdin.readline().strip()
print(len(visited_positions(directions)))
|
import numpy as np
from fast_bfmatcher.matchers import Matcher, MatchResult
class NumpyL2CCBFMatcher(Matcher):
"""Reference numpy implementation"""
@classmethod
def l2_distance_matrix(cls, X: np.ndarray, Y: np.ndarray):
sqnorm1 = np.sum(np.square(X), 1, keepdims=True)
sqnorm2 = np.sum(np.square(Y), 1, keepdims=True)
innerprod = np.dot(X, Y.T)
return sqnorm1 + np.transpose(sqnorm2) - 2.0 * innerprod
def match(self, X: np.ndarray, Y: np.ndarray) -> MatchResult:
dist_mat = self.l2_distance_matrix(X, Y)
row_matches = np.argmin(dist_mat, 1)
col_matches = np.argmin(dist_mat, 0)
num_rows = row_matches.shape[0]
inverse_row_indices = col_matches[row_matches]
row_indices = np.arange(0, num_rows, dtype=row_matches.dtype)
cross_checked = row_indices == inverse_row_indices
rows = row_indices[cross_checked]
cols = row_matches[cross_checked]
indices = np.transpose(np.stack([rows, cols]))
distances = dist_mat[rows, cols]
distances = np.sqrt(np.maximum(0, distances))
return MatchResult(indices, distances)
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import os
from abc import ABCMeta, abstractmethod
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
raise ImportError('Could not import json or simplejson')
class Collector(object):
'''Collect and return schemas.'''
__metaclass__ = ABCMeta
@abstractmethod
def collect(self):
'''Yield collected schemas.
Each schema should be a Python dictionary.
'''
class FilesystemCollector(Collector):
def __init__(self, paths=None, recursive=True):
'''Initialise with *paths* to search.
If *recursive* is True then all subdirectories of *paths* will also be
searched.
'''
self.paths = paths
self.recursive = recursive
if self.paths is None:
self.paths = []
super(FilesystemCollector, self).__init__()
def collect(self):
'''Yield collected schemas.'''
for path in self.paths:
for base, directories, filenames in os.walk(path):
for filename in filenames:
_, extension = os.path.splitext(filename)
if extension != '.json':
continue
filepath = os.path.join(base, filename)
with open(filepath, 'r') as file_handler:
schema = json.load(file_handler)
yield schema
if not self.recursive:
del directories[:]
|
#!/usr/bin/python3
# FLYCOP
# Author: Beatriz García-Jiménez
# April 2018
# See SMAC documentation for more details about *wrapper_vX.py, wrapper_scenario_vX.txt and *wrapper_params_vX.pcs
src='ecoliLongTerm_TemplateOptimizeConsortiumV0/'
dst='ecoliLongTerm_TestTempV12'
dirPlots='../smac-output/ecoliLongTerm_PlotsScenario12/'
fitFunc='Yield'
import cobra
import pandas as pd
import tabulate
import re
import sys
import getopt
import os.path
import copy
import csv
import math
import cobra.flux_analysis.variability
import massedit
import subprocess
import shutil, errno
import statistics
import importlib
# Load code of individual run
sys.path.append('../Scripts')
import ecoliLongTermFLYCOP
# Parsing parameters:
# Reading the first 5 arguments in SMAC
instance = sys.argv[1]
specifics = sys.argv[2]
cutoff = int(float(sys.argv[3]) + 1)
runlength = int(sys.argv[4])
seed = int(sys.argv[5])
# Reading this case study parameters to optimize by SMAC
glu1 = float(sys.argv[7])
ac1 = float(sys.argv[9])
o21 = float(sys.argv[11])
glu2 = float(sys.argv[13])
ac2 = float(sys.argv[15])
o22 = float(sys.argv[17])
# Copy the template directory
if (os.path.exists(dst)):
shutil.rmtree(dst)
try:
shutil.copytree(src, dst)
except OSError as exc: # python >2.5
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dst)
else: raise
os.chdir(dst)
if not os.path.exists(dirPlots):
os.makedirs(dirPlots)
# At a higher level: Running the wrapper-script in SMAC:
avgfitness,sdfitness=ecoliLongTermFLYCOP.ecoliLongTermFLYCOP_oneConf(glu1,ac1,o21,glu2,ac2,o22,fitFunc,dirPlots,3)
# Print wrapper Output:
print('Result of algorithm run: SAT, 0, 0, '+str(1-avgfitness)+', 0, '+str(seed)+', '+str(sdfitness)) # fitness maximize
#print('Result of algorithm run: SAT, 0, 0, '+str(avgfitness)+', 0, '+str(seed)+', '+str(sdfitness)) # fitness minimize
# Remove the temporal dir for this run result
os.chdir('..')
shutil.rmtree(dst)
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botbuilder.dialogs import (
ComponentDialog,
WaterfallDialog,
WaterfallStepContext,
DialogTurnResult,
)
from botbuilder.dialogs.prompts import TextPrompt, PromptOptions
from botbuilder.core import MessageFactory, TurnContext
from botbuilder.schema import InputHints
from booking_details import BookingDetails
from flight_booking_recognizer import FlightBookingRecognizer
from helpers import LuisHelper, Intent
from .booking_dialog import BookingDialog
class MainDialog(ComponentDialog):
def __init__(
self, luis_recognizer: FlightBookingRecognizer, booking_dialog: BookingDialog
):
super(MainDialog, self).__init__(MainDialog.__name__)
self._luis_recognizer = luis_recognizer
self._booking_dialog_id = booking_dialog.id
self.add_dialog(TextPrompt(TextPrompt.__name__))
self.add_dialog(booking_dialog)
self.add_dialog(
WaterfallDialog(
"WFDialog", [self.intro_step, self.act_step, self.final_step]
)
)
self.initial_dialog_id = "WFDialog"
async def intro_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
if not self._luis_recognizer.is_configured:
await step_context.context.send_activity(
MessageFactory.text(
"NOTE: LUIS is not configured. To enable all capabilities, add 'LuisAppId', 'LuisAPIKey' and "
"'LuisAPIHostName' to the appsettings.json file.",
input_hint=InputHints.ignoring_input,
)
)
return await step_context.next(None)
message_text = (
str(step_context.options)
if step_context.options
else "What can I help you with today?"
)
prompt_message = MessageFactory.text(
message_text, message_text, InputHints.expecting_input
)
return await step_context.prompt(
TextPrompt.__name__, PromptOptions(prompt=prompt_message)
)
async def act_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
if not self._luis_recognizer.is_configured:
# LUIS is not configured, we just run the BookingDialog path with an empty BookingDetailsInstance.
return await step_context.begin_dialog(
self._booking_dialog_id, BookingDetails()
)
# Call LUIS and gather any potential booking details. (Note the TurnContext has the response to the prompt.)
intent, luis_result = await LuisHelper.execute_luis_query(
self._luis_recognizer, step_context.context
)
if intent == Intent.BOOK_FLIGHT.value and luis_result:
# Show a warning for Origin and Destination if we can't resolve them.
await MainDialog._show_warning_for_unsupported_cities(
step_context.context, luis_result
)
# Run the BookingDialog giving it whatever details we have from the LUIS call.
return await step_context.begin_dialog(self._booking_dialog_id, luis_result)
if intent == Intent.GET_WEATHER.value:
get_weather_text = "TODO: get weather flow here"
get_weather_message = MessageFactory.text(
get_weather_text, get_weather_text, InputHints.ignoring_input
)
await step_context.context.send_activity(get_weather_message)
else:
didnt_understand_text = (
"Sorry, I didn't get that. Please try asking in a different way"
)
didnt_understand_message = MessageFactory.text(
didnt_understand_text, didnt_understand_text, InputHints.ignoring_input
)
await step_context.context.send_activity(didnt_understand_message)
return await step_context.next(None)
async def final_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
# If the child dialog ("BookingDialog") was cancelled or the user failed to confirm,
# the Result here will be null.
if step_context.result is not None:
result = step_context.result
# Now we have all the booking details call the booking service.
# If the call to the booking service was successful tell the user.
# time_property = Timex(result.travel_date)
# travel_date_msg = time_property.to_natural_language(datetime.now())
msg_txt = f"I have you booked to {result.destination} from {result.origin} on {result.travel_date}"
message = MessageFactory.text(msg_txt, msg_txt, InputHints.ignoring_input)
await step_context.context.send_activity(message)
prompt_message = "What else can I do for you?"
return await step_context.replace_dialog(self.id, prompt_message)
@staticmethod
async def _show_warning_for_unsupported_cities(
context: TurnContext, luis_result: BookingDetails
) -> None:
if luis_result.unsupported_airports:
message_text = (
f"Sorry but the following airports are not supported:"
f" {', '.join(luis_result.unsupported_airports)}"
)
message = MessageFactory.text(
message_text, message_text, InputHints.ignoring_input
)
await context.send_activity(message)
|
#!/usr/bin/env python3
from PIL import Image
import numpy
import argparse
from os import scandir, path, remove
from hashlib import md5
def get_image(image_path, ignore_size=False, target_size=None):
"""Get a numpy array of an image so that one can access values[x][y]."""
"""Credits to https://stackoverflow.com/a/27960627"""
try:
image = Image.open(image_path, 'r')
except:
print(f"Invalid file or path at : {image_path}")
return None
if not target_size is None:
image = image.resize(target_size)
width, height = image.size
if ignore_size == False and (width != 48 or height != 48):
print(f"Invalid image size (should be 48x48) : width {width} height {height} ({image_path})")
return None
if not image.mode in ['RGB']:
print(f"Unknown mode: {image.mode} ({image_path})")
return None
try:
pixel_values = numpy.array(image.getdata()).reshape((width, height, 3))
except:
print(f"Error while converting pixel values into an numpy array")
return None
return pixel_values
def get_image_hash(image_path):
image_hash = md5()
with open(image_path, "rb") as image:
for chunk in iter(lambda: image.read(4096), b""):
image_hash.update(chunk)
return image_hash.hexdigest()
def parse_mosaic_palette(mosaic_palette):
total_elements = len(mosaic_palette) // 2
parsed_mosaic_palette = [numpy.empty(total_elements, dtype=object), numpy.empty((total_elements, 3), dtype=int)]
for i in range(total_elements):
parsed_mosaic_palette[0][i] = mosaic_palette[i * 2]
parsed_mosaic_palette[1][i] = numpy.fromstring(mosaic_palette[i * 2 + 1], dtype=float, sep=' ')
return parsed_mosaic_palette
def get_mosaic_palette(delete_flag):
print("Retrieving mosaic palette...")
if path.exists("./cache"):
with open("./cache", "r") as cache_file:
saved_values = [line.rstrip('\n') for line in cache_file]
else:
saved_values = []
cache = list()
with scandir("mosaics") as mosaics:
for entry in mosaics:
if not entry.is_file() or entry.name == ".gitkeep":
continue
try:
image_hash = get_image_hash(entry.path)
if not image_hash in saved_values:
pixel_values = get_image(entry.path)
if pixel_values is None:
if delete_flag == True: remove(entry.path)
continue
pixel_values = numpy.average(pixel_values.transpose(2, 0, 1).reshape(3, -1), axis=1)
cache.append(image_hash)
cache.append(f"{pixel_values[0]} {pixel_values[1]} {pixel_values[2]}")
else:
cache.append(image_hash)
cache.append(saved_values[saved_values.index(image_hash) + 1])
except:
print(f"Invalid image at mosaics/{entry.name}")
if saved_values != cache:
with open("./cache", "w") as cache_file:
for element in cache:
cache_file.write(f"{element}\n")
print("Finished retrieving mosaic palette")
return parse_mosaic_palette(cache)
def get_image_mask(pixel_values, mosaic_palette):
shape = pixel_values.shape[:2]
image_mask = numpy.empty(shape, dtype=object)
print("Retrieving image mask...")
for x in range(shape[0]):
for y in range(shape[1]):
pixel_md5_index = numpy.average(numpy.abs(mosaic_palette[1] - pixel_values[x, y]), axis=1).argmin()
image_mask[x, y] = mosaic_palette[0][pixel_md5_index]
print("Finished retrieving of image mask")
return image_mask
def load_used_images(image_mask):
images_data = {}
print("Loading used images...")
with scandir("mosaics") as mosaics:
for entry in mosaics:
if not entry.is_file():
continue
try:
image_hash = get_image_hash(entry.path)
if image_hash in image_mask:
images_data[image_hash] = Image.open(entry.path, 'r')
except:
print(f"Error while loading image at : mosaics/{entry.name}")
return None
print("Finished loading used images")
return images_data
def generate_image(image_mask, loaded_data):
shape = image_mask.shape[:2]
print("Generating final image...")
new_image = Image.new("RGB", (48 * shape[0], 48 * shape[1]), (255, 255, 255))
for x in range(shape[0]):
for y in range(shape[1]):
image_data = loaded_data.get(image_mask[x, y])
new_image.paste(image_data, (x * 48, y * 48))
print("Finished generating final image")
return new_image
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Transform images into mosaic filled images')
parser.add_argument('image_path', type=str, help='path where is stored the image you want to process')
parser.add_argument('-o', dest='save_path', type=str, action='store', default=None, help='destination where you want to save the result (default only preview the image)')
parser.add_argument('-r', dest='resize', type=str, action='store', default=None, help='size in format \'width,height\' which the first image will be resized')
parser.add_argument('-d', dest='delete', action='store_const', const=True, default=False, help='delete invalid images in mosaics/')
args = parser.parse_args()
pixel_values = get_image(args.image_path, True, None if args.resize is None else tuple(map(int, args.resize.split(',')[:2])))
if pixel_values is None:
quit(1)
mosaic_palette = get_mosaic_palette(args.delete)
if len(mosaic_palette[0]) == 0:
print("Please supply more than one mosaic in the mosaics folder to perform the operation")
quit(1)
image_mask = get_image_mask(pixel_values, mosaic_palette)
loaded_images = load_used_images(image_mask)
if loaded_images is None:
quit(1)
new_image = generate_image(image_mask, loaded_images)
if args.save_path is None:
new_image.show()
else:
new_image.save(args.save_path)
quit(0)
|
# -*- coding: utf-8 -*-
###
# (C) Copyright [2019] Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from pprint import pprint
from hpOneView.oneview_client import OneViewClient
from config_loader import try_load_from_file
# This resource is only available on C7000 enclosures
config = {
"ip": "<oneview_ip>",
"credentials": {
"userName": "<username>",
"password": "<password>"
}
}
# A Logical Switch Group, the Switches IP address/host name, and credentials must be set to run this example
logical_switch_group_name = '<lsg_name>'
switch_ip_1 = '<switch_ip_or_hostname>'
switch_ip_2 = '<switch_ip_or_hostname>'
ssh_username = '<user_name_for_switches>'
ssh_password = '<password_for_switches>'
# To run the scope patch operations in this example, a scope name is required.
scope_name = "<scope_name>"
# Try load config from a file (if there is a config file)
config = try_load_from_file(config)
oneview_client = OneViewClient(config)
# Check for existance of Logical Switch Group specified, otherwise stops execution
logical_switch_group = oneview_client.logical_switch_groups.get_by('name', logical_switch_group_name)[0]
if logical_switch_group:
print('Found logical switch group "%s" with uri: %s' % (logical_switch_group['name'], logical_switch_group['uri']))
else:
raise Exception('Logical switch group "%s" was not found on the appliance.' % logical_switch_group_name)
switch_connection_properties = {
"connectionProperties": [{
"propertyName": "SshBasicAuthCredentialUser",
"value": ssh_username,
"valueFormat": "Unknown",
"valueType": "String"
}, {
"propertyName": "SshBasicAuthCredentialPassword",
"value": ssh_password,
"valueFormat": "SecuritySensitive",
"valueType": "String"
}]
}
options = {
"logicalSwitch": {
"name": "Test Logical Switch",
"logicalSwitchGroupUri": logical_switch_group['uri'],
"switchCredentialConfiguration": [
{
"snmpV1Configuration": {
"communityString": "public"
},
"logicalSwitchManagementHost": switch_ip_1,
"snmpVersion": "SNMPv1",
"snmpPort": 161
}, {
"snmpV1Configuration": {
"communityString": "public"
},
"logicalSwitchManagementHost": switch_ip_2,
"snmpVersion": "SNMPv1",
"snmpPort": 161
}
]
},
"logicalSwitchCredentials": [switch_connection_properties, switch_connection_properties]
}
# Create a Logical Switch
logical_switch = oneview_client.logical_switches.create(options)
print("\nCreated Logical Switch '{name}' successfully.\n uri = '{uri}'".format(**logical_switch))
# Find the recently created Logical Switch by name
logical_switch = oneview_client.logical_switches.get_by('name', 'Test Logical Switch')[0]
print("\nFound Logical Switch by name: '{name}'.\n uri = '{uri}'".format(**logical_switch))
# Update the name of the Logical Switch
options_update = {
"logicalSwitch": {
"name": "Renamed Logical Switch",
"uri": logical_switch['uri'],
"switchCredentialConfiguration": logical_switch['switchCredentialConfiguration'],
"logicalSwitchGroupUri": logical_switch_group['uri'],
"consistencyStatus": "CONSISTENT"
},
"logicalSwitchCredentials": [switch_connection_properties, switch_connection_properties]
}
logical_switch = oneview_client.logical_switches.update(options_update)
print("\nUpdated Logical Switch successfully.\n uri = '{uri}'".format(**logical_switch))
print(" with attribute name = {name}".format(**logical_switch))
# Get scope to be added
print("\nGet the scope named '%s'." % scope_name)
scope = oneview_client.scopes.get_by_name(scope_name)
# Performs a patch operation on the Logical Switch
if scope:
print("\nPatches the logical switch assigning the '%s' scope to it." % scope_name)
logical_switch = oneview_client.logical_switches.patch(logical_switch['uri'],
'replace',
'/scopeUris',
[scope['uri']])
pprint(logical_switch)
# Get all, with defaults
print("\nGet all Logical Switches")
logical_switches = oneview_client.logical_switches.get_all()
for logical_switch in logical_switches:
print(' Name: %s' % logical_switch['name'])
# Get by URI
print("\nGet a Logical Switch by URI")
logical_switch = oneview_client.logical_switches.get(logical_switch['uri'])
pprint(logical_switch)
# Reclaim the top-of-rack switches in the logical switch
print("\nReclaim the top-of-rack switches in the logical switch")
logical_switch = oneview_client.logical_switches.refresh(logical_switch['uri'])
print(" Done.")
# Delete the Logical Switch
oneview_client.logical_switches.delete(logical_switch)
print("\nLogical switch deleted successfully.")
|
class Space:
def __init__(self,space = None, dtile = None):
self.space = space
self.dtile = dtile
def get_space(self):
return self.space
def change_space(self, new_space):
self.space = new_space
def get_dtile(self):
return self.dtile
def change_dtile(self, new_dtile):
self.dtile = new_dtile
|
"""
algovision - a library for differentiable algorithms and integrating algorithms into neural networks.
Copyright (c) Felix Petersen.
This source code is licensed under the MIT license found in the LICENSE file.
"""
__author__ = "Felix Petersen"
__email__ = "ads0399@felix-petersen.de"
import setuptools
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setuptools.setup(
name='algovision',
version='v0.1.1',
description='AlgoVision - A Framework for Differentiable Algorithms and Algorithmic Supervision',
author='Felix Petersen',
author_email=__email__,
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/Felix-Petersen/algovision',
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='MIT License',
package_dir={'algovision': 'algovision'},
packages=['algovision'],
python_requires='>=3.6',
install_requires=[
'numpy',
'torch>=1.9.0'
]
)
|
import itertools
import pytest
import torch
from common_utils import assert_equal
from test_prototype_transforms_functional import make_images, make_bounding_boxes, make_one_hot_labels
from torchvision.prototype import transforms, features
from torchvision.transforms.functional import to_pil_image, pil_to_tensor
def make_vanilla_tensor_images(*args, **kwargs):
for image in make_images(*args, **kwargs):
if image.ndim > 3:
continue
yield image.data
def make_pil_images(*args, **kwargs):
for image in make_vanilla_tensor_images(*args, **kwargs):
yield to_pil_image(image)
def make_vanilla_tensor_bounding_boxes(*args, **kwargs):
for bounding_box in make_bounding_boxes(*args, **kwargs):
yield bounding_box.data
def parametrize(transforms_with_inputs):
return pytest.mark.parametrize(
("transform", "input"),
[
pytest.param(
transform,
input,
id=f"{type(transform).__name__}-{type(input).__module__}.{type(input).__name__}-{idx}",
)
for transform, inputs in transforms_with_inputs
for idx, input in enumerate(inputs)
],
)
def parametrize_from_transforms(*transforms):
transforms_with_inputs = []
for transform in transforms:
for creation_fn in [
make_images,
make_bounding_boxes,
make_one_hot_labels,
make_vanilla_tensor_images,
make_pil_images,
]:
inputs = list(creation_fn())
try:
output = transform(inputs[0])
except Exception:
continue
else:
if output is inputs[0]:
continue
transforms_with_inputs.append((transform, inputs))
return parametrize(transforms_with_inputs)
class TestSmoke:
@parametrize_from_transforms(
transforms.RandomErasing(p=1.0),
transforms.Resize([16, 16]),
transforms.CenterCrop([16, 16]),
transforms.ConvertImageDtype(),
transforms.RandomHorizontalFlip(),
)
def test_common(self, transform, input):
transform(input)
@parametrize(
[
(
transform,
[
dict(
image=features.Image.new_like(image, image.unsqueeze(0), dtype=torch.float),
one_hot_label=features.OneHotLabel.new_like(
one_hot_label, one_hot_label.unsqueeze(0), dtype=torch.float
),
)
for image, one_hot_label in itertools.product(make_images(), make_one_hot_labels())
],
)
for transform in [
transforms.RandomMixup(alpha=1.0),
transforms.RandomCutmix(alpha=1.0),
]
]
)
def test_mixup_cutmix(self, transform, input):
transform(input)
@parametrize(
[
(
transform,
itertools.chain.from_iterable(
fn(
color_spaces=[
features.ColorSpace.GRAY,
features.ColorSpace.RGB,
],
dtypes=[torch.uint8],
extra_dims=[(4,)],
)
for fn in [
make_images,
make_vanilla_tensor_images,
make_pil_images,
]
),
)
for transform in (
transforms.RandAugment(),
transforms.TrivialAugmentWide(),
transforms.AutoAugment(),
transforms.AugMix(),
)
]
)
def test_auto_augment(self, transform, input):
transform(input)
@parametrize(
[
(
transforms.Normalize(mean=[0.0, 0.0, 0.0], std=[1.0, 1.0, 1.0]),
itertools.chain.from_iterable(
fn(color_spaces=[features.ColorSpace.RGB], dtypes=[torch.float32])
for fn in [
make_images,
make_vanilla_tensor_images,
]
),
),
]
)
def test_normalize(self, transform, input):
transform(input)
@parametrize(
[
(
transforms.RandomResizedCrop([16, 16]),
itertools.chain(
make_images(extra_dims=[(4,)]),
make_vanilla_tensor_images(),
make_pil_images(),
),
)
]
)
def test_random_resized_crop(self, transform, input):
transform(input)
@parametrize(
[
(
transforms.ConvertImageColorSpace(color_space=new_color_space, old_color_space=old_color_space),
itertools.chain.from_iterable(
[
fn(color_spaces=[old_color_space])
for fn in (
make_images,
make_vanilla_tensor_images,
make_pil_images,
)
]
),
)
for old_color_space, new_color_space in itertools.product(
[
features.ColorSpace.GRAY,
features.ColorSpace.GRAY_ALPHA,
features.ColorSpace.RGB,
features.ColorSpace.RGB_ALPHA,
],
repeat=2,
)
]
)
def test_convert_image_color_space(self, transform, input):
transform(input)
@pytest.mark.parametrize("p", [0.0, 1.0])
class TestRandomHorizontalFlip:
def input_expected_image_tensor(self, p, dtype=torch.float32):
input = torch.tensor([[[0, 1], [0, 1]], [[1, 0], [1, 0]]], dtype=dtype)
expected = torch.tensor([[[1, 0], [1, 0]], [[0, 1], [0, 1]]], dtype=dtype)
return input, expected if p == 1 else input
def test_simple_tensor(self, p):
input, expected = self.input_expected_image_tensor(p)
transform = transforms.RandomHorizontalFlip(p=p)
actual = transform(input)
assert_equal(expected, actual)
def test_pil_image(self, p):
input, expected = self.input_expected_image_tensor(p, dtype=torch.uint8)
transform = transforms.RandomHorizontalFlip(p=p)
actual = transform(to_pil_image(input))
assert_equal(expected, pil_to_tensor(actual))
def test_features_image(self, p):
input, expected = self.input_expected_image_tensor(p)
transform = transforms.RandomHorizontalFlip(p=p)
actual = transform(features.Image(input))
assert_equal(features.Image(expected), actual)
def test_features_segmentation_mask(self, p):
input, expected = self.input_expected_image_tensor(p)
transform = transforms.RandomHorizontalFlip(p=p)
actual = transform(features.SegmentationMask(input))
assert_equal(features.SegmentationMask(expected), actual)
def test_features_bounding_box(self, p):
input = features.BoundingBox([0, 0, 5, 5], format=features.BoundingBoxFormat.XYXY, image_size=(10, 10))
transform = transforms.RandomHorizontalFlip(p=p)
actual = transform(input)
expected_image_tensor = torch.tensor([5, 0, 10, 5]) if p == 1.0 else input
expected = features.BoundingBox.new_like(input, data=expected_image_tensor)
assert_equal(expected, actual)
assert actual.format == expected.format
assert actual.image_size == expected.image_size
|
# import pandas as pd
#
#
# def read_data(file_path):
# data = pd.read_json(file_path, lines=True, orient='records')
# return data
#
# file_path = 'okef_eth_usd_q.json'
# data = read_data(file_path)
# print data
#直接手动转的
#1.打开emeditor导入数据,把][替换成,
#2.在网址 https://www.bejson.com/json/json2excel/ 在线将json转为csv
|
import os
import sys
from time import sleep
# Ambari includes
from resource_management.core.exceptions import ComponentIsNotRunning
from resource_management.libraries.script import Script
# Custom service test classes includes
from integrated_base_test import IntegratedBaseTestCase
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PACKAGE_DIR = os.path.join(SCRIPT_DIR, '../scripts/')
SERVICE_DIR = os.path.join(SCRIPT_DIR, '../')
sys.path.append(PACKAGE_DIR)
sys.path.append(SERVICE_DIR)
# Custom service scripts includes
import params
from mongo_db import MongoDBServer
from mongo_base import InstanceConfig
from mongo_base import InstanceStatus
class IntegratedReplicaMongodTestCase(IntegratedBaseTestCase):
def setUp(self):
self.as_super = super(IntegratedReplicaMongodTestCase, self)
self.as_super.setUp()
params.try_interval = 4
params.times_to_try = 10
# Configuring and Installing mongod dependencies
server = MongoDBServer()
server.my_hostname = 'node1.test.com'
server.configure(self.env)
server.install(self.env)
def several_hosts_setup(self):
Script.config['clusterHostInfo'] = {
'mongos_hosts': [],
'mongodb_hosts': ['node1.test.com','node2.test.com','node3.test.com'],
'mongodc_hosts': []
}
params.mongod_cluster_definition = 'node1.test.com,node2.test.com/arbiter,node3.test.com,node2.test.com'
expected_cluster_status_for_several_hosts_stopped = [
('shard0',['node1.test.com','node2.test.com/arbiter','node3.test.com','node2.test.com'], [
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_0.pid',
final_db_path='/var/lib/mongodb/node1_shard0_0',
log_file='/var/log/mongodb/node1_shard0_0.log',
db_port='27025',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node2_shard0_0.pid',
final_db_path='/var/lib/mongodb/node2_shard0_0',
log_file='/var/log/mongodb/node2_shard0_0.log',
db_port='27025',
host_name='node2.test.com',
is_arbiter=True,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node3_shard0_0.pid',
final_db_path='/var/lib/mongodb/node3_shard0_0',
log_file='/var/log/mongodb/node3_shard0_0.log',
db_port='27025',
host_name='node3.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node2_shard0_1.pid',
final_db_path='/var/lib/mongodb/node2_shard0_1',
log_file='/var/log/mongodb/node2_shard0_1.log',
db_port='27030',
host_name='node2.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None)])]
def one_host_setup(self):
Script.config['clusterHostInfo'] = {
'mongos_hosts': [],
'mongodb_hosts': ['node1.test.com'],
'mongodc_hosts': []
}
params.mongod_cluster_definition = 'node1.test.com,node1.test.com/arbiter,node1.test.com'
expected_cluster_status_for_one_host_stopped = [
('shard0',['node1.test.com','node1.test.com/arbiter','node1.test.com'], [
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_0.pid',
final_db_path='/var/lib/mongodb/node1_shard0_0',
log_file='/var/log/mongodb/node1_shard0_0.log',
db_port='27025',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_1.pid',
final_db_path='/var/lib/mongodb/node1_shard0_1',
log_file='/var/log/mongodb/node1_shard0_1.log',
db_port='27030',
host_name='node1.test.com',
is_arbiter=True,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_2.pid',
final_db_path='/var/lib/mongodb/node1_shard0_2',
log_file='/var/log/mongodb/node1_shard0_2.log',
db_port='27031',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None)])]
def test_get_cluster_data_with_one_host(self):
self.one_host_setup()
server = MongoDBServer()
server.my_hostname = 'node1.test.com'
expectedClusterData = [('shard0', ['node1.test.com', 'node1.test.com/arbiter', 'node1.test.com'],
[InstanceConfig(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_0.pid',
final_db_path='/var/lib/mongodb/node1_shard0_0',
log_file='/var/log/mongodb/node1_shard0_0.log',
db_port='27025',
host_name='node1.test.com',
is_arbiter=False),
InstanceConfig(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_1.pid',
final_db_path='/var/lib/mongodb/node1_shard0_1',
log_file='/var/log/mongodb/node1_shard0_1.log',
db_port='27030',
host_name='node1.test.com',
is_arbiter=True),
InstanceConfig(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_2.pid',
final_db_path='/var/lib/mongodb/node1_shard0_2',
log_file='/var/log/mongodb/node1_shard0_2.log',
db_port='27031',
host_name='node1.test.com',
is_arbiter=False)])]
clusterData = server.getClusterData()
self.assertEqual(clusterData,expectedClusterData,"The cluster data for the replicaset is not right")
def test_get_cluster_status_with_one_host(self):
self.one_host_setup()
server = MongoDBServer()
server.my_hostname = 'node1.test.com'
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus,self.expected_cluster_status_for_one_host_stopped,
"The cluster status result before stating the replicaset is not right")
def test_stopping_an_already_stopped_cluster(self):
self.one_host_setup()
server = MongoDBServer()
server.my_hostname = 'node1.test.com'
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus,self.expected_cluster_status_for_one_host_stopped,
"The cluster status result before stating the replicaset is not right")
server.stop(self.env)
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_one_host_stopped,
"The cluster status result after stopping the replicaset is not right")
def test_replicaset_in_one_host(self):
self.one_host_setup()
server = MongoDBServer()
server.my_hostname = 'node1.test.com'
with self.assertRaises(ComponentIsNotRunning):
server.status(self.env)
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_one_host_stopped,
"The cluster status result before stating the replicaset is not right")
server.start(self.env)
sleep(self.SLEEP_INTERVAL_AFTER_START_A_INSTANCE)
server.status(self.env)
expectedClusterStatus = [('shard0', ['node1.test.com', 'node1.test.com/arbiter', 'node1.test.com'], [
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_0.pid',
final_db_path='/var/lib/mongodb/node1_shard0_0',
log_file='/var/log/mongodb/node1_shard0_0.log',
db_port='27025',
host_name='node1.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=True,
repl_role="PRIMARY"),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_1.pid',
final_db_path='/var/lib/mongodb/node1_shard0_1',
log_file='/var/log/mongodb/node1_shard0_1.log',
db_port='27030',
host_name='node1.test.com',
is_arbiter=True,
is_started=True,
is_repl_configurated=True,
repl_role="SECONDARY"),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_2.pid',
final_db_path='/var/lib/mongodb/node1_shard0_2',
log_file='/var/log/mongodb/node1_shard0_2.log',
db_port='27031',
host_name='node1.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=True,
repl_role="SECONDARY")])]
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, expectedClusterStatus,"The cluster status result for a started replicaset is "
"not right")
server.stop(self.env)
with self.assertRaises(ComponentIsNotRunning):
server.status(self.env)
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_one_host_stopped,
"The cluster status result after stopping the replicaset is not right")
def test_get_cluster_status_with_several_hosts(self):
self.several_hosts_setup()
server = MongoDBServer()
server.my_hostname = 'node1.test.com'
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result before stating the replicaset is not right")
def test_replicaset_with_several_hosts(self):
self.several_hosts_setup()
server3 = MongoDBServer()
server3.my_hostname = 'node3.test.com'
server2 = MongoDBServer()
server2.my_hostname = 'node2.test.com'
server1 = MongoDBServer()
server1.my_hostname = 'node1.test.com'
clusterStatus = server3.getClusterStatus(server3.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result before stating the replicaset is not right")
server3.start(self.env)
sleep(self.SLEEP_INTERVAL_AFTER_START_A_INSTANCE)
server3.status(self.env)
expectedClusterStatusServer3On = [
('shard0',['node1.test.com','node2.test.com/arbiter','node3.test.com','node2.test.com'], [
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_0.pid',
final_db_path='/var/lib/mongodb/node1_shard0_0',
log_file='/var/log/mongodb/node1_shard0_0.log',
db_port='27025',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node2_shard0_0.pid',
final_db_path='/var/lib/mongodb/node2_shard0_0',
log_file='/var/log/mongodb/node2_shard0_0.log',
db_port='27025',
host_name='node2.test.com',
is_arbiter=True,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node3_shard0_0.pid',
final_db_path='/var/lib/mongodb/node3_shard0_0',
log_file='/var/log/mongodb/node3_shard0_0.log',
db_port='27025',
host_name='node3.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=False,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node2_shard0_1.pid',
final_db_path='/var/lib/mongodb/node2_shard0_1',
log_file='/var/log/mongodb/node2_shard0_1.log',
db_port='27030',
host_name='node2.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None)])]
clusterStatus = server3.getClusterStatus(server3.getClusterData())
self.assertEqual(clusterStatus, expectedClusterStatusServer3On, "The cluster status result for a started node3 "
"in the replicaset is not right")
server2.start(self.env)
sleep(self.SLEEP_INTERVAL_AFTER_START_A_INSTANCE)
server2.status(self.env)
expectedClusterStatusServer2On = [
('shard0',['node1.test.com','node2.test.com/arbiter','node3.test.com','node2.test.com'], [
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_0.pid',
final_db_path='/var/lib/mongodb/node1_shard0_0',
log_file='/var/log/mongodb/node1_shard0_0.log',
db_port='27025',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node2_shard0_0.pid',
final_db_path='/var/lib/mongodb/node2_shard0_0',
log_file='/var/log/mongodb/node2_shard0_0.log',
db_port='27025',
host_name='node2.test.com',
is_arbiter=True,
is_started=True,
is_repl_configurated=False,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node3_shard0_0.pid',
final_db_path='/var/lib/mongodb/node3_shard0_0',
log_file='/var/log/mongodb/node3_shard0_0.log',
db_port='27025',
host_name='node3.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=False,
repl_role=None),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node2_shard0_1.pid',
final_db_path='/var/lib/mongodb/node2_shard0_1',
log_file='/var/log/mongodb/node2_shard0_1.log',
db_port='27030',
host_name='node2.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=False,
repl_role=None)])]
clusterStatus = server2.getClusterStatus(server2.getClusterData())
self.assertEqual(clusterStatus, expectedClusterStatusServer2On, "The cluster status result for a started node2"
" in the replicaset is not right")
server1.start(self.env)
sleep(self.SLEEP_INTERVAL_AFTER_START_A_INSTANCE)
server1.status(self.env)
expectedClusterStatusServer1On = [
('shard0',['node1.test.com','node2.test.com/arbiter','node3.test.com','node2.test.com'], [
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node1_shard0_0.pid',
final_db_path='/var/lib/mongodb/node1_shard0_0',
log_file='/var/log/mongodb/node1_shard0_0.log',
db_port='27025',
host_name='node1.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=True,
repl_role="PRIMARY"),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node2_shard0_0.pid',
final_db_path='/var/lib/mongodb/node2_shard0_0',
log_file='/var/log/mongodb/node2_shard0_0.log',
db_port='27025',
host_name='node2.test.com',
is_arbiter=True,
is_started=True,
is_repl_configurated=True,
repl_role="SECONDARY"),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node3_shard0_0.pid',
final_db_path='/var/lib/mongodb/node3_shard0_0',
log_file='/var/log/mongodb/node3_shard0_0.log',
db_port='27025',
host_name='node3.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=True,
repl_role="SECONDARY"),
InstanceStatus(shard_name='shard0',
pid_file_name='/var/run/mongodb/node2_shard0_1.pid',
final_db_path='/var/lib/mongodb/node2_shard0_1',
log_file='/var/log/mongodb/node2_shard0_1.log',
db_port='27030',
host_name='node2.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=True,
repl_role="SECONDARY")])]
clusterStatus = server1.getClusterStatus(server1.getClusterData())
print "\n\n\n\n\n clusterStatus: " + str(clusterStatus) + '\n\n\n\n\n\n'
print "\n\n\n\n\n expectedClusterStatusServer1On: " + str(expectedClusterStatusServer1On) + '\n\n\n\n\n\n'
self.assertEqual(clusterStatus, expectedClusterStatusServer1On, "The cluster status result for a started node1"
" in the replicaset is not right")
server2.stop(self.env)
with self.assertRaises(ComponentIsNotRunning):
server2.status(self.env)
server1.stop(self.env)
with self.assertRaises(ComponentIsNotRunning):
server1.status(self.env)
server3.stop(self.env)
with self.assertRaises(ComponentIsNotRunning):
server3.status(self.env)
clusterStatus = server3.getClusterStatus(server3.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result after stopping the replicaset is not right")
|
import datetime
import time
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
from django.utils.crypto import get_random_string
from Native_Service.models import NativePost
class STAGES:
IN_QUEUE = "W KOLEJCE"
WAITING_FOR_ACCEPT = "OCZEKIWANIE NA AKCEPTACJĘ"
ACCEPTED = "ZAAKCEPTOWANE"
PAYMENT_DONE = "OPŁACONE"
IN_PROGRESS = "W TRAKCIE REALIZACJI"
DONE = "ZAKOŃCZONE"
REJECTED = "ODRZUCONE"
class NSMethods:
""" NativeService necessary methods. """
@staticmethod
def create_secret_key():
""" Method generates secret keys. """
return get_random_string(12)
@staticmethod
def get_nativepost_data(secret_key):
""" Function returns all data from native post filtered with secret_key."""
data = NativePost.objects.filter(secret_key=secret_key)
data_dict = {}
for i in data.values():
data_dict.update(i)
return data_dict
@staticmethod
def date_today():
""" Returns tuple with actual values: YEAR, MONTH, DAY"""
time = datetime.datetime.now()
return time.strftime("%Y"), time.strftime("%m"), time.strftime("%d")
class ProgressStages:
""" The basic logic of the email alert system and orders support. """
@staticmethod
def in_queue(data=None, url=None):
EmailGenerator().customer_queue_alert_html(data)
EmailGenerator().performer_queue_alert_html(data, url)
@staticmethod
def waiting_for_accept(data=None, url=None, secret_key=None):
post = NativePost.objects.get(secret_key=secret_key)
if data["stage"] == STAGES.IN_QUEUE:
post.stage = STAGES.WAITING_FOR_ACCEPT
post.save()
EmailGenerator().customer_price_to_accept_html(data, url)
else:
raise PermissionError("The valuation is not in queue.")
@staticmethod
def accepted(data=None, secret_key=None):
post = NativePost.objects.get(secret_key=secret_key)
if data["stage"] == STAGES.WAITING_FOR_ACCEPT:
post.stage = STAGES.ACCEPTED
post.save()
EmailGenerator().performer_order_accepted_html(data)
else:
raise PermissionError("The valuation is not waiting for accept.")
@staticmethod
def payment_done(data=None, secret_key=None, url=None):
ALLOWED_STAGES = [STAGES.ACCEPTED, STAGES.REJECTED]
post = NativePost.objects.get(secret_key=secret_key)
if data["stage"] in ALLOWED_STAGES:
post.stage = STAGES.PAYMENT_DONE
post.save()
EmailGenerator.customer_payment_done_html(data)
EmailGenerator.performer_payment_done_html(data, url)
else:
raise PermissionError("The order stage is not 'ACCEPTED' at the moment.")
@staticmethod
def in_progress(data=None, secret_key=None, url=None):
post = NativePost.objects.get(secret_key=secret_key)
if data["stage"] == STAGES.PAYMENT_DONE:
post.stage = STAGES.IN_PROGRESS
post.save()
EmailGenerator.customer_order_in_progress_html(data)
EmailGenerator.performer_order_in_progress_html(data, url)
else:
raise PermissionError(
"The order stage is not 'PAYMENT_DONE' at the moment."
)
@staticmethod
def done(data=None, secret_key=None, attachment=None):
# todo new field with end datetime
post = NativePost.objects.get(secret_key=secret_key)
if data["stage"] != STAGES.DONE:
post.stage = STAGES.DONE
post.save()
EmailGenerator().customer_order_done_with_files(data, attachment)
else:
raise PermissionError("The order stage is already 'DONE' at the moment.")
@staticmethod
def order_rejected(data=None, secret_key=None):
post = NativePost.objects.get(secret_key=secret_key)
if data["stage"] != STAGES.REJECTED:
post.stage = STAGES.REJECTED
post.save()
EmailGenerator.customer_order_rejected(data)
else:
raise PermissionError(
"The order stage is already 'REJECTED' at the moment."
)
@staticmethod
def correct_payment(secret_key=None):
payment_done = False
while payment_done != True:
stage = NSMethods.get_nativepost_data(secret_key)["stage"]
if stage == STAGES.PAYMENT_DONE:
payment_done = True
else:
time.sleep(2)
if stage == STAGES.REJECTED:
break
return payment_done
@staticmethod
def payment_rejected(data=None, secret_key=None):
# todo email for rejected payment
ALLOWED_STAGES = [STAGES.REJECTED, STAGES.ACCEPTED]
post = NativePost.objects.get(secret_key=secret_key)
if data["stage"] in ALLOWED_STAGES:
post.stage = STAGES.REJECTED
post.save()
else:
# todo when PayU returns previous operation 'canceled' after successful current transfer, system raise error
PermissionError(
"The order stage is not 'ACCEPTED' or 'REJECTED' at the moment."
)
class UrlsGenerator:
""" The class contains all methods generating URL's"""
@staticmethod
def view_final_pricing(secret_key):
""" Method generates url for performer to make some price. """
return f"{settings.HOST_URL}final_pricing/{secret_key}/"
@staticmethod
def view_price_for_customer(secret_key):
""" Method generates url for customer to see price. """
return f"{settings.HOST_URL}price_for_you/{secret_key}/"
@staticmethod
def view_price_accepted_payu(secret_key):
""" Method generates url for customer for price accept. """
return f"{settings.HOST_URL}price_accepted/{secret_key}/"
@staticmethod
def view_file_list_view(secret_key):
""" Method generates url for performer to see list of files. """
return f"{settings.HOST_URL}file_list/{secret_key}/"
@staticmethod
def view_notify(secret_key):
""" Method generates url for PayU to sent request. """
return f"{settings.HOST_URL}notify/"
@staticmethod
def view_successful_payment(secret_key):
""" Method generates url for performer to see successful_payment view. """
return f"{settings.HOST_URL}successful_payment/{secret_key}/"
@staticmethod
def view_order_in_progress(secret_key):
""" Method generates url for performer to set stage on 'in_progress'. """
return f"{settings.HOST_URL}in_progress/{secret_key}/"
@staticmethod
def view_reject_order(secret_key):
""" Method generates url for performer to reject the order. """
return f"{settings.HOST_URL}reject_order/{secret_key}/"
@staticmethod
def view_done(secret_key):
""" Method generates url for performer to set stage on 'done'. """
return f"{settings.HOST_URL}order_done/{secret_key}/"
@staticmethod
def list_order_files_for_file_list_view(secret_key, coded_files_list, url_date):
""" Method generates url for performer to take look at file list. """
return [
f"{settings.HOST_URL}media/uploads/{url_date}/{secret_key}/{f}"
for f in coded_files_list
]
class EmailGenerator:
""" The class contains all email sending methods. """
@staticmethod
def performer_queue_alert_html(data=None, url=None):
now = datetime.datetime.now().strftime("%d-%m-%Y %H:%M")
data.update({"url": url, "now": now})
subject, from_email = "Nowe zlecenie!", settings.SENDER
text_message = f"Nowe zlecenie! {url}"
msg_html = render_to_string("emails/performer_queue_alert.html", data)
msg = EmailMultiAlternatives(
subject, text_message, from_email, settings.PERFORMERS_LIST
)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def customer_queue_alert_html(data=None):
recipients_list = [data["email"]]
subject, from_email = "Wycena zlecenia w NativeService!", settings.SENDER
text_message = f"Wyceniamy Twoje zlecenie, czekaj na kontakt!"
msg_html = render_to_string("emails/customer_queue_alert.html", data)
msg = EmailMultiAlternatives(subject, text_message, from_email, recipients_list)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def customer_price_to_accept_html(data=None, url=None):
data["url"] = url
recipients_list = [data["email"]]
subject, from_email = "Wycena zlecenia gotowa!", settings.SENDER
text_message = f"Oto Twoja wycena, sprawdź link. {url}"
msg_html = render_to_string("emails/customer_price_to_accept.html", data)
msg = EmailMultiAlternatives(subject, text_message, from_email, recipients_list)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def performer_order_accepted_html(data=None):
subject, from_email = "Warunki zaakceptowane!", settings.SENDER
text_message = f"Użytkownik zaakceptował warunki! Oczekiwanie na płatność."
msg_html = render_to_string(
"emails/performer_price_accepted_waiting_for_payment.html", data
)
msg = EmailMultiAlternatives(
subject, text_message, from_email, settings.PERFORMERS_LIST
)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def customer_payment_done_html(data=None):
recipients_list = [data["email"]]
subject, from_email = "Płatność zrealizowana.", settings.SENDER
text_message = f"Płatność zrealizowana, czekaj na kolejne wiadomości."
msg_html = render_to_string("emails/customer_payment_done.html", data)
msg = EmailMultiAlternatives(subject, text_message, from_email, recipients_list)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def performer_payment_done_html(data=None, url=None):
data["url"] = url
subject, from_email = "Zlecenie opłacone!", settings.SENDER
text_message = f"Użytkownik opłacił zlecenie! Zmień status na 'in_progress'."
msg_html = render_to_string("emails/performer_payment_done.html", data)
msg = EmailMultiAlternatives(
subject, text_message, from_email, settings.PERFORMERS_LIST
)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def customer_order_in_progress_html(data=None):
recipients_list = [data["email"]]
subject, from_email = "Zlecenie w trakcie realizacji.", settings.SENDER
text_message = f"Realizujemy zlecenie, czekaj na kolejne wiadomości."
msg_html = render_to_string("emails/customer_order_in_progress.html", data)
msg = EmailMultiAlternatives(subject, text_message, from_email, recipients_list)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def performer_order_in_progress_html(data=None, url=None):
data["url"] = url
subject, from_email = "Zmieniono status zlecenia.", settings.SENDER
text_message = f"Zmieniłeś status na 'W TRAKCIE REALIZACJI'."
msg_html = render_to_string("emails/performer_order_in_progress.html", data)
msg = EmailMultiAlternatives(
subject, text_message, from_email, settings.PERFORMERS_LIST
)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def customer_order_rejected(data=None):
recipients_list = [data["email"]]
subject, from_email = "Zlecenie odrzucone.", settings.SENDER
text_message = f"Niestety Twoje zlecenie zostało odrzucone."
msg_html = render_to_string("emails/customer_order_rejected.html", data)
msg = EmailMultiAlternatives(subject, text_message, from_email, recipients_list)
msg.attach_alternative(msg_html, "text/html")
msg.send()
@staticmethod
def customer_order_done_with_files(data=None, attachment=None):
recipients_list = [data["email"]]
subject, from_email = "Oto Twoje zrealizowane zlecenie.", settings.SENDER
text_message = f"Zrealizowane zlecenie z plikami."
msg_html = render_to_string("emails/customer_order_done.html", data)
msg = EmailMultiAlternatives(subject, text_message, from_email, recipients_list)
msg.attach_alternative(msg_html, "text/html")
for i in attachment:
msg.attach_file(i)
msg.send()
|
from django.contrib import admin
from imageapp.models import Image
# Register your models here.
admin.site.register(Image)
|
from uam.settings import UamBaseException, ErrorTypes
class TapAddError(UamBaseException):
pass
class TapAddConflict(TapAddError):
help_text = "{} is conflicted with existed taps."
def __init__(self, tap):
self.tap = tap
self.help_text = self.help_text.format(tap)
super(TapAddConflict, self).__init__()
class TapRemoveError(UamBaseException):
pass
class TapRemoveBuiltin(TapRemoveError):
help_text = "{} is builtin tap, can not be removed."
def __init__(self, alias):
self.alias = alias
self.help_text = self.help_text.format(alias)
super(TapRemoveBuiltin, self).__init__()
class TapRemoveNotFound(UamBaseException):
help_text = "{} not found in database."
def __init__(self, alias):
self.alias = alias
self.help_text = self.help_text.format(alias)
super(TapRemoveNotFound, self).__init__()
class TapUpdateError(UamBaseException):
pass
|
from Simulations.Environments import *
from Simulations.GameFeatures import GameFeatures as GF
class SetupFiles:
def simple_features():
map_setup = { "X": {"Color": GF.BlockColor.Black, "Effect": GF.BlockEffect.Block},
"O": {"Color": GF.BlockColor.White, "Effect": GF.BlockEffect.Nothing}}
features = GF()
features.add_map_layout(GF.MapLayout.SimpleMap, map_setup)
features.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
features.add_start_position((2, 7))
features.add_controls(GF.Controls.LeftRight)
features.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(2, 2), (2, 8), (2, 13)], effect_strength=1, respawn_time=20)
features.add_goals([GF.GoalSystem.Resources])
return [features]
def simple_group_features():
map_setup = { "X": {"Color": GF.BlockColor.Black, "Effect": GF.BlockEffect.Block},
"O": {"Color": GF.BlockColor.White, "Effect": GF.BlockEffect.Nothing}}
f0 = GF()
f0.add_map_layout(GF.MapLayout.SimpleMap, map_setup)
f0.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f0.add_start_position((2, 7))
f0.add_controls(GF.Controls.LeftRight)
f0.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(2, 2), (2, 8), (2, 13)], effect_strength=1, respawn_time=20)
f0.add_goals([GF.GoalSystem.Resources])
f1 = GF()
f1.add_map_layout(GF.MapLayout.SimpleMap, map_setup)
f1.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f1.add_start_position((2, 7))
f1.add_controls(GF.Controls.LeftRight)
f1.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(2, 2)], effect_strength=1, respawn_time=20)
f1.add_goals([GF.GoalSystem.Resources])
f2 = GF()
f2.add_map_layout(GF.MapLayout.SimpleMap, map_setup)
f2.add_block(GF.BlockColor.Blue, GF.BlockEffect.Block, [(1, 1)])
f2.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f2.add_start_position((2, 7))
f2.add_controls(GF.Controls.LeftRight)
f2.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(2, 2), (2, 8), (2, 13)], effect_strength=1, respawn_time=20)
f2.add_goals([GF.GoalSystem.Resources])
f3 = GF()
f3.add_map_layout(GF.MapLayout.SimpleMap, map_setup)
f3.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f3.add_start_position((2, 7))
f3.add_controls(GF.Controls.LeftRight)
f3.add_item(GF.ItemColor.Blue, GF.ItemEffect.TakePoints, GF.ItemBehaviour.Respawn, [(2, 8), (2, 13)], effect_strength=1, respawn_time=20)
f3.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(2, 2)], effect_strength=1, respawn_time=20)
f3.add_goals([GF.GoalSystem.Resources, GF.GoalSystem.Time])
return [f0, f1, f2, f3]
def simple_features2():
map_setup = { "X": {"Color": GF.BlockColor.Black, "Effect": GF.BlockEffect.Block},
"O": {"Color": GF.BlockColor.White, "Effect": GF.BlockEffect.Nothing}}
features = GF()
features.add_map_layout(GF.MapLayout.SimpleMap2, map_setup)
features.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
features.add_start_position((6, 7))
features.add_controls(GF.Controls.LeftRight)
features.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(6, 2), (6, 8), (6, 13)], effect_strength=1, respawn_time=20)
features.add_goals([GF.GoalSystem.Resources])
return [features]
def simple_group_features2():
map_setup = { "X": {"Color": GF.BlockColor.Black, "Effect": GF.BlockEffect.Block},
"O": {"Color": GF.BlockColor.White, "Effect": GF.BlockEffect.Nothing}}
f0 = GF()
f0.add_map_layout(GF.MapLayout.SimpleMap2, map_setup)
f0.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f0.add_start_position((6, 7))
f0.add_controls(GF.Controls.LeftRight)
f0.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(6, 2), (6, 8), (6, 13)], effect_strength=1, respawn_time=20)
f0.add_goals([GF.GoalSystem.Resources])
f1 = GF()
f1.add_map_layout(GF.MapLayout.SimpleMap2, map_setup)
f1.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f1.add_start_position((6, 7))
f1.add_controls(GF.Controls.LeftRight)
f1.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(6, 2)], effect_strength=1, respawn_time=20)
f1.add_goals([GF.GoalSystem.Resources])
f2 = GF()
f2.add_map_layout(GF.MapLayout.SimpleMap2, map_setup)
f2.add_block(GF.BlockColor.Blue, GF.BlockEffect.Block, [(1, 1)])
f2.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f2.add_start_position((6, 7))
f2.add_controls(GF.Controls.LeftRight)
f2.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(6, 2), (6, 8), (6, 13)], effect_strength=1, respawn_time=20)
f2.add_goals([GF.GoalSystem.Resources])
f3 = GF()
f3.add_map_layout(GF.MapLayout.SimpleMap2, map_setup)
f3.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f3.add_start_position((6, 7))
f3.add_controls(GF.Controls.LeftRight)
f3.add_item(GF.ItemColor.Blue, GF.ItemEffect.TakePoints, GF.ItemBehaviour.Respawn, [(6, 8), (6, 13)], effect_strength=1, respawn_time=20)
f3.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(6, 2)], effect_strength=1, respawn_time=20)
f3.add_goals([GF.GoalSystem.Resources, GF.GoalSystem.Time])
return [f0, f1, f2, f3]
def first_features():
map_setup = { "X": {"Color": GF.BlockColor.Black, "Effect": GF.BlockEffect.Block},
"O": {"Color": GF.BlockColor.White, "Effect": GF.BlockEffect.Nothing} }
f1 = GF()
f1.add_map_layout(GF.MapLayout.DefaultMap1, map_setup)
f1.add_start_position((3, 3))
f1.add_controls(GF.Controls.UpDown)
f1.add_block(GF.BlockColor.Red, GF.BlockEffect.TakeLife, [(4, 5), (4, 10), (6, 7), (6, 8), (7, 5), (7, 10), (8, 6), (8, 7), (8, 8), (8, 9), (9, 3)], effect_strength=5)
f2 = GF()
f2.add_map_layout(GF.MapLayout.DefaultMap2, map_setup)
f2.add_start_position((3, 3))
f2.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(12, 5), (3, 6)], effect_strength=10, respawn_time=4)
f2.add_controls(GF.Controls.LeftRight)
f3 = GF()
f3.add_map_layout(GF.MapLayout.DefaultMap2, map_setup)
f3.add_start_position((3, 3))
f3.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.Kill, GF.MonsterBehaviour.LeftRight, [(7, 6)], speed=2)
f3.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.Kill, GF.MonsterBehaviour.UpDown, [(12, 9)])
f3.add_controls(GF.Controls.AllArrows)
f4 = GF()
f4.add_map_layout(GF.MapLayout.Empty14x14, map_setup)
f4.add_start_position((7, 7))
f4.add_controls(GF.Controls.AllArrows)
f4.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Blue, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.DiagonalLeft, [(8, 8)], effect_strength=10)
f4.add_survival_property(GF.SurvivalProperty.Starve, strength=2, interval=3)
f4.add_survival_property(GF.SurvivalProperty.Slowdown, strength=2)
return [f1, f2, f3, f4]
def project_features():
SS1_map_setup = { "X": {"Color": GF.BlockColor.Black, "Effect": GF.BlockEffect.Block},
"O": {"Color": GF.BlockColor.White, "Effect": GF.BlockEffect.Nothing} }
SS12_map_setup = { "X": {"Color": GF.BlockColor.Red, "Effect": GF.BlockEffect.Block},
"O": {"Color": GF.BlockColor.Blue, "Effect": GF.BlockEffect.Nothing} }
SS13_map_setup = { "X": {"Color": GF.BlockColor.Blue, "Effect": GF.BlockEffect.Block},
"O": {"Color": GF.BlockColor.Red, "Effect": GF.BlockEffect.Nothing} }
goals = [GF.GoalSystem.Time, GF.GoalSystem.Resources] #, GF.GoalSystem.Safety
f0 = GF()
f0.add_map_layout(GF.MapLayout.DefaultMap2, SS1_map_setup)
f0.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f0.add_item(GF.ItemColor.Yellow, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(3, 12), (5, 5), (7, 9), (10, 13)], respawn_time=30, effect_strength=1)
f0.add_item(GF.ItemColor.Green, GF.ItemEffect.GiveLife, GF.ItemBehaviour.Respawn, [(2, 2), (4, 12), (11, 4), (13, 9)], respawn_time=30, effect_strength=2)
f0.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.UpDown, [(12, 6)], speed=1, effect_strength=5)
f0.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Black, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.LeftRight, [(7, 6)], speed=1, effect_strength=1)
f0.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.LeftRight, [(13, 8)], speed=1, effect_strength=1)
f0.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Black, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.UpDown, [(12, 3)], speed=1, effect_strength=1)
f0.add_block(GF.BlockColor.Green, GF.BlockEffect.GiveLife, [(11, 8)], effect_strength=1)
f0.add_start_position((3, 3))
f0.add_goals(goals)
f1 = GF()
f1.add_map_layout(GF.MapLayout.DefaultMap2, SS1_map_setup)
f1.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f1.add_item(GF.ItemColor.Yellow, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(3, 12), (5, 5), (7, 9), (10, 13)], respawn_time=30, effect_strength=1)
f1.add_item(GF.ItemColor.Green, GF.ItemEffect.GiveLife, GF.ItemBehaviour.Respawn, [(2, 2), (4, 12), (11, 4), (13, 9)], respawn_time=30, effect_strength=2)
f1.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.LeftRight, [(13, 8)], speed=1, effect_strength=1)
f1.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Black, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.UpDown, [(12, 3)], speed=1, effect_strength=1)
f1.add_start_position((3, 3))
f1.add_goals(goals)
f2 = GF()
f2.add_map_layout(GF.MapLayout.DefaultMap2, SS1_map_setup)
f2.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f2.add_item(GF.ItemColor.Yellow, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(3, 12), (5, 5), (7, 9), (10, 13)], respawn_time=30, effect_strength=1)
f2.add_item(GF.ItemColor.Green, GF.ItemEffect.GiveLife, GF.ItemBehaviour.Respawn, [(2, 2), (4, 12), (11, 4), (13, 9)], respawn_time=30, effect_strength=2)
f2.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.UpDown, [(12, 6)], speed=1, effect_strength=5)
f2.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Black, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.LeftRight, [(7, 6)], speed=1, effect_strength=1)
f2.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.LeftRight, [(13, 8)], speed=1, effect_strength=1)
f2.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Black, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.UpDown, [(12, 3)], speed=1, effect_strength=1)
f2.add_block(GF.BlockColor.Green, GF.BlockEffect.GiveLife, [(11, 8)], effect_strength=1)
f2.add_start_position((3, 3))
f2.add_goals(goals)
f3 = GF()
f3.add_map_layout(GF.MapLayout.DefaultMap3, SS1_map_setup)
f3.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f3.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(6, 2), (13, 2), (2, 13), (8, 13)], respawn_time=40, effect_strength=1)
f3.add_item(GF.ItemColor.Green, GF.ItemEffect.GiveLife, GF.ItemBehaviour.Respawn, [(4, 6), (5, 7), (11, 7), (10, 9), (9, 2)], respawn_time=40, effect_strength=2)
f3.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.UpDown, [(11, 13)], speed=1, effect_strength=1)
f3.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.LeftRight, [(12, 10)], speed=1, effect_strength=1)
f3.add_start_position((3, 3))
f3.add_goals(goals)
f4 = GF()
f4.add_map_layout(GF.MapLayout.DefaultMap3, SS1_map_setup)
f4.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f4.add_item(GF.ItemColor.Blue, GF.ItemEffect.GivePoints, GF.ItemBehaviour.Respawn, [(6, 2), (13, 2), (2, 13), (8, 13)], respawn_time=40, effect_strength=1)
f4.add_item(GF.ItemColor.Green, GF.ItemEffect.GiveLife, GF.ItemBehaviour.Respawn, [(4, 6), (5, 7), (11, 7), (10, 9), (9, 2)], respawn_time=40, effect_strength=2)
f4.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Black, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.LeftRight, [(2, 9), (10, 10)], speed=1, effect_strength=1)
f4.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Black, GF.MonsterEffect.TakeLife, GF.MonsterBehaviour.UpDown, [(11, 5)], speed=1, effect_strength=1)
f4.add_block(GF.BlockColor.Red, GF.BlockEffect.TakeLife, [(3, 11), (9, 3), (12, 6), (13, 8), (13, 9)], effect_strength=1)
f4.add_start_position((3, 3))
f4.add_goals(goals)
f5 = GF()
f5.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f5.add_map_layout(GF.MapLayout.DefaultMap1, SS12_map_setup)
f5.add_item(GF.ItemColor.Blue, GF.ItemEffect.TakeLife, GF.ItemBehaviour.Respawn, [(9, 5), (6, 9), (8, 13), (11, 12), (13, 9)], 1, 5)
f5.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Red, GF.MonsterEffect.GiveLife, GF.MonsterBehaviour.DiagonalRight, [(11, 5), (11, 11)], 2, 1)
f5.add_block(GF.BlockColor.Green, GF.BlockEffect.TakeLife, [(5, 6), (3, 12), (12, 3), (9, 9)], 1)
f5.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.White, GF.MonsterEffect.GivePoints, GF.MonsterBehaviour.DiagonalRight, [(11, 7), (4, 11)], 2, 1)
f5.add_start_position((3, 4))
f5.add_goals(goals)
f6 = GF()
f6.add_survival_property(GF.SurvivalProperty.Starve, interval=10, strength=1)
f6.add_map_layout(GF.MapLayout.DefaultMap4, SS13_map_setup)
f6.add_start_position((4, 10))
f6.add_item(GF.ItemColor.Yellow, GF.ItemEffect.TakeLife, GF.ItemBehaviour.Nothing, [(2, 5), (2, 9), (2, 10), (3, 6), (5, 6), (7, 5), (7, 8), (8, 10), (9, 5), (9, 12), (10, 7), (10, 12), (11, 12), (12, 9), (14, 8)], 1, 25)
f6.add_monster(GF.MonsterType.Invincible, GF.MonsterColor.Black, GF.MonsterEffect.Nothing, GF.MonsterBehaviour.Nothing, [(6, 4), (6, 11), (7, 7), (8, 8), (9, 4)], 0, 0)
f6.add_block(GF.BlockColor.Green, GF.BlockEffect.Nothing, [(3, 7), (3, 8), (12, 7), (12, 8), (7, 3), (8, 3), (7, 12), (8, 12)], 0)
f6.add_goals(goals)
return [f0, f1, f2, f3, f4, f5, f6]
|
# coding: utf8
from six import unichr
from six.moves import html_entities
from zeit.wysiwyg.util import contains_element
import copy
import datetime
import lxml.builder
import lxml.etree
import lxml.html.soupparser
import lxml.objectify
import pendulum
import pytz
import six
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zeit.cms.interfaces
import zeit.cms.relation.interfaces
import zeit.cms.repository.interfaces
import zeit.wysiwyg.interfaces
import zope.cachedescriptors.property
import zope.component
import zope.interface
import zope.interface.common.idatetime
import zope.security.management
import zope.security.proxy
import zope.traversing.interfaces
@zope.component.adapter(zope.interface.Interface)
@zope.interface.implementer(zeit.wysiwyg.interfaces.IHTMLConverter)
class HTMLConverter(object):
"""General XML to HTML converter.
This html converter doesn't operate on `context` and is registered for all
objects. If a content object requires a more specialised adapter it can be
registered easily.
The actual conversion work is delegated to `ConversionStep`s which are
registered as named adapters, and sorted by their order specific to the
conversion direction.
"""
def __init__(self, context):
self.context = context
self.storage = {}
def to_html(self, tree):
"""converts XML `tree` to HTML."""
tree = zope.security.proxy.removeSecurityProxy(tree)
# copy so the conversion steps can work in-place
tree = self._copy(tree)
self._apply_steps(tree, 'xpath_xml', 'to_html')
result = []
for child in tree.iterchildren():
result.append(lxml.etree.tostring(
child, pretty_print=True, encoding=six.text_type))
return ''.join(result)
def from_html(self, tree, value):
"""converts the HTML `value` to XML and sets it on `tree`."""
tree = zope.security.proxy.removeSecurityProxy(tree)
self._clear(tree)
if not value:
# We're done. Just don't do anything.
return
value = '<div>' + self._replace_entities(value) + '</div>'
__traceback_info__ = (value,)
html = lxml.etree.fromstring(value)
self._apply_steps(html, 'xpath_html', 'to_xml')
for node in html.iterchildren():
# support tails at the toplevel by faking a wrapper node
xml = '<foo>%s</foo>' % lxml.etree.tostring(
node, encoding=six.text_type)
objectified = lxml.objectify.fromstring(xml)
for child in objectified.iterchildren():
tree.append(child)
zope.security.proxy.removeSecurityProxy(self.context)._p_changed = 1
def _steps(self, direction):
steps = [adapter for name, adapter in zope.component.getAdapters(
(self.context, self), zeit.wysiwyg.interfaces.IConversionStep)]
steps = sorted(steps, key=lambda x: getattr(x, 'order_' + direction))
return steps
def _apply_steps(self, tree, xpath, method):
for adapter in self._steps(direction=method):
xp = getattr(adapter, xpath)
if xp is SKIP:
continue
convert = getattr(adapter, method)
for node in tree.xpath(xp):
filtered = convert(node)
if filtered is not None:
node.getparent().replace(node, filtered)
filtered.tail = node.tail
def references(self, tree):
return self._retrieve_content(self._extract_referenced_ids(tree))
def _extract_referenced_ids(self, tree):
result = []
tree = zope.security.proxy.removeSecurityProxy(tree)
for adapter in self._steps('to_html'):
xp = getattr(adapter, 'xpath_xml')
if xp is SKIP:
continue
for node in tree.xpath(xp):
result.extend(adapter.references(node))
return result
def _retrieve_content(self, ids):
result = []
for id in ids:
if not id:
continue
if not id.startswith(zeit.cms.interfaces.ID_NAMESPACE):
continue
obj = zeit.cms.interfaces.ICMSContent(id, None)
if obj is not None:
result.append(obj)
return result
def covered_xpath(self):
"""return an xpath query that matches all nodes for which there is a
ConversionStep registered."""
xpath = [s.xpath_xml for s in self._steps('to_html')
if s.xpath_xml is not SKIP and s.xpath_xml != '.']
return '|'.join(xpath)
def _copy(self, tree):
"""return a copy of `tree` that contains only those nodes we know how
to deal with."""
root = lxml.builder.E.body()
xpath = self.covered_xpath()
if not xpath:
return root
covered = tree.xpath(xpath)
for child in tree.iterchildren():
if contains_element(covered, child):
root.append(copy.copy(child))
return root
def _clear(self, tree):
"""removes all nodes we want to edit from `tree`"""
for node in tree.xpath(self.covered_xpath()):
parent = node.getparent()
parent.remove(node)
@staticmethod
def _replace_entities(value):
# XXX is this efficient enough?
for entity_name, codepoint in html_entities.name2codepoint.items():
if entity_name in ('gt', 'lt', 'quot', 'amp', 'apos'):
# don't replace XML built-in entities
continue
value = value.replace('&' + entity_name + ';', unichr(codepoint))
return value
SKIP = object()
@zope.component.adapter(
zope.interface.Interface, zeit.wysiwyg.interfaces.IHTMLConverter)
@zope.interface.implementer(zeit.wysiwyg.interfaces.IConversionStep)
class ConversionStep(object):
"""Encapsulates one step of XML<-->HTML conversion.
to_html() is called with each XML node matching xpath_xml, while
to_xml() is called with each HTML node matching xpath_html.
They should work in-place, but can optionally return a value which will be
used to replace the original node.
The XPath is applied against the root node, so if you want to do special
processing (such as dropping or rearranging nodes), use '.' to be called
just once, with the root node.
To specify the order in which the steps are applied, set their orders
specific to the conversion direction.
The adapter context can be used to fine-tune for which objects a conversion
step applies, but probably should not be touched for anything else.
Remember to register steps as named adapters (the name itself doesn't
matter), so the HTMLConverter can pick them all up using getAdapters().
"""
order_to_html = 0.0
order_to_xml = 0.0
def __init__(self, context, converter):
self.context = context
self.converter = converter
self.request = (
zope.security.management.getInteraction().participations[0])
# override in subclass
xpath_xml = SKIP
xpath_html = SKIP
def to_html(self, node):
raise NotImplementedError(
"when specifiyng xpath_xml, to_html() must be implemented")
def to_xml(self, node):
raise NotImplementedError(
"when specifiyng xpath_html, to_xml() must be implemented")
def references(self, node):
return []
@zope.cachedescriptors.property.Lazy
def repository(self):
return zope.component.getUtility(
zeit.cms.repository.interfaces.IRepository)
def url(self, obj):
return zope.component.getMultiAdapter(
(obj, self.request), name='absolute_url')()
def datetime_to_html(self, dt_string):
dt = ''
if dt_string:
try:
dt = pendulum.parse(dt_string)
except ValueError:
pass
else:
tz = zope.interface.common.idatetime.ITZInfo(self.request)
dt = dt.astimezone(tz).strftime('%Y-%m-%d %H:%M')
return dt
def datetime_to_xml(self, dt_string):
dt = ''
if dt_string:
try:
dt = datetime.datetime.strptime(dt_string, '%Y-%m-%d %H:%M')
except ValueError:
dt = ''
else:
tz = zope.interface.common.idatetime.ITZInfo(self.request)
dt = tz.localize(dt).isoformat()
return dt
class DropEmptyStep(ConversionStep):
"""Drop empty HTML toplevel nodes."""
order_to_xml = -1.0
xpath_html = 'child::*'
def to_xml(self, node):
if node.get('keep'):
del node.attrib['keep']
return
if len(node):
return
if node.text and node.text.strip():
return
if node.attrib:
return
node.getparent().remove(node)
class TagReplaceStep(ConversionStep):
"""Convert between XML tag names and HTML ones."""
order_to_html = 0.9
order_to_xml = -0.9
xpath_html = './/*'
xml_html_tags = {
'intertitle': 'h3',
}
html_xml_tags = dict((value, key) for key, value in xml_html_tags.items())
assert len(html_xml_tags) == len(xml_html_tags)
def __init__(self, context, converter):
super(TagReplaceStep, self).__init__(context, converter)
self.xpath_xml = '|'.join(['.//%s' % tag for tag
in self.xml_html_tags.keys()])
def to_html(self, node):
new_tag = self.xml_html_tags.get(node.tag)
if new_tag is not None:
node.tag = new_tag
def to_xml(self, node):
new_tag = self.html_xml_tags.get(node.tag)
if new_tag is not None:
node.tag = new_tag
class PassThroughStep(ConversionStep):
"""Allow some XML tags to pass through to HTML.
This adapter exists only for the side effect of making its xpath_xml
known to HTMLConverter.covered_xpath()
"""
allow_tags = ['p', 'ul', 'ol']
def __init__(self, context, converter):
super(PassThroughStep, self).__init__(context, converter)
self.xpath_xml = '|'.join(['.//%s' % tag for tag in self.allow_tags])
def to_html(self, node):
pass
class TableStep(ConversionStep):
"""Clean up <table>s.
"""
xpath_xml = './/table'
xpath_html = './/table'
def to_html(self, node):
pass
def to_xml(self, node):
for name in node.attrib:
del node.attrib[name]
ContentOnlyElements = (
lxml.etree._Comment, lxml.etree._ProcessingInstruction, lxml.etree._Entity)
class NormalizeToplevelStep(ConversionStep):
"""Normalize any toplevel tag we don't have a ConversionStep for to <p>."""
order_to_xml = 1.0
xpath_html = '.'
def to_xml(self, node):
# XXX having to go back to the HTMLConverter is a little kludgy
xpath = self.converter.covered_xpath()
covered = node.xpath(xpath)
for child in node.iterchildren():
if isinstance(child, ContentOnlyElements):
continue
if child not in covered:
child.tag = 'p'
class NestedParagraphsStep(ConversionStep):
"""Un-nest nested <p>s."""
order_to_xml = 1.0
xpath_html = 'child::p'
def to_xml(self, node):
p_nodes = node.xpath('p')
parent_index = node.getparent().index(node)
for i, insert in enumerate(p_nodes):
node.getparent().insert(parent_index + i + 1, insert)
if p_nodes:
node.getparent().remove(node)
class ImageStructureStep(ConversionStep):
def _convert(self, node):
# Hacky support for linked images (#10033)
if node.getparent().tag == 'a':
return
if node.tail:
if node.getprevious() is not None:
self.append_or_set(node.getprevious(), 'tail', node.tail)
else:
self.append_or_set(node.getparent(), 'text', node.tail)
node.tail = None
body = node
insert_before = None
while not self._is_root(body, node):
insert_before = body
body = body.getparent()
insert_before.addprevious(node)
def append_or_set(self, node, property, value):
current_value = getattr(node, property)
if current_value is not None:
value = current_value + value
setattr(node, property, value)
def _is_root(self, element, orig_node):
raise NotImplementedError('override in subclass')
class HTMLImageStructureStep(ImageStructureStep):
xpath_html = './*//img'
order_to_xml = -2
def to_xml(self, node):
return self._convert(node)
def _is_root(self, element, orig_node):
return element == orig_node.getroottree().getroot()
class XMLImageStructureStep(ImageStructureStep):
xpath_xml = './*//image'
order_to_html = -2
def to_html(self, node):
return self._convert(node)
def _is_root(self, element, orig_node):
return element.tag == 'body'
class ImageStep(ConversionStep):
"""Replace XML <image/> by HTML <img/> and vice versa."""
xpath_xml = './/image'
xpath_html = './/img'
def to_html(self, node):
unique_id = node.get('src', '')
if unique_id.startswith('/cms/work/'):
unique_id = unique_id.replace(
'/cms/work/', zeit.cms.interfaces.ID_NAMESPACE)
url = unique_id
if unique_id.startswith(zeit.cms.interfaces.ID_NAMESPACE):
try:
image = self.repository.getContent(unique_id)
except KeyError:
pass
else:
url = self.url(image) + '/@@raw'
img = lxml.builder.E.img()
if url:
img.set('src', url)
layout = node.get('layout')
if layout:
img.set('title', layout)
new_node = img
if node.getparent().tag != 'a':
# wrap in a <p> so html is happy
new_node = lxml.builder.E.p(img)
return new_node
def to_xml(self, node):
repository_url = self.url(self.repository) + '/'
url = node.get('src')
new_node = None
if url and url.startswith(repository_url):
unique_id = url.replace(
repository_url, zeit.cms.interfaces.ID_NAMESPACE, 1).replace(
'/@@raw', '')
try:
image = self.repository.getContent(unique_id)
except KeyError:
# Not known to the cms.
pass
else:
new_node = zope.component.queryAdapter(
image, zeit.cms.content.interfaces.IXMLReference,
name='image')
if new_node is None:
# An XML reference could not be created because the object could
# not be found. Instead of just removing the image we create an
# image tag with the url we've got. This way it is also possible to
# create images to other servers.
new_node = lxml.builder.E.image()
if url:
new_node.set('src', url)
layout = node.get('title')
if layout:
new_node.set('layout', layout)
return new_node
def references(self, node):
unique_id = node.get('src', '')
return [unique_id]
class URLStep(ConversionStep):
"""Convert uniqueIds to clickable CMS-URLs and vice versa."""
xpath_xml = './/a'
xpath_html = './/a'
def _id_to_url(self, id):
try:
obj = self.repository.getContent(id)
except (KeyError, ValueError):
return id
return self.url(obj)
def _url_to_id(self, url):
"""Produce unique id from url if possible."""
repository_url = self.url(self.repository)
if not url.startswith(repository_url):
return url
path = url[len(repository_url) + 1:]
obj = zope.traversing.interfaces.ITraverser(self.repository).traverse(
path, None)
if not zeit.cms.interfaces.ICMSContent.providedBy(obj):
return url
return obj.uniqueId
def to_html(self, node):
id = node.get('href')
if id:
node.set('href', self._id_to_url(id))
def to_xml(self, node):
url = node.get('href')
if url:
node.set('href', self._url_to_id(url))
class AudioStep(ConversionStep):
"""Make editable."""
xpath_xml = './/audio'
xpath_html = ('.//*[contains(@class, "inline-element") and '
'contains(@class, "audio")]')
def to_html(self, node):
id_ = node.get('audioID')
expires = self.datetime_to_html(node.get('expires'))
new_node = lxml.builder.E.div(
lxml.builder.E.div(id_, **{'class': 'audioId'}),
lxml.builder.E.div(expires, **{'class': 'expires'}),
**{'class': 'inline-element audio'})
return new_node
def to_xml(self, node):
id_nodes = node.xpath('div[@class="audioId"]')
id_ = expires = ''
if id_nodes:
id_ = id_nodes[0].text
nodes = node.xpath('div[@class="expires"]')
if nodes:
expires = self.datetime_to_xml(nodes[0].text)
new_node = lxml.builder.E.audio(audioID=id_, expires=expires)
return new_node
class VideoStep(ConversionStep):
"""Make <video> editable."""
xpath_xml = './/video'
xpath_html = ('.//*[contains(@class, "inline-element") and '
'contains(@class, "video")]')
def to_html(self, node):
id1 = node.get('href') or ''
id2 = node.get('href2') or ''
expires = self.datetime_to_html(node.get('expires'))
format = node.get('format') or ''
new_node = lxml.builder.E.div(
lxml.builder.E.div(id1, **{'class': 'videoId'}),
lxml.builder.E.div(id2, **{'class': 'videoId2'}),
lxml.builder.E.div(expires, **{'class': 'expires'}),
lxml.builder.E.div(format, **{'class': 'format'}),
**{'class': 'inline-element video'})
return new_node
def to_xml(self, node):
id_nodes = node.xpath('div[contains(@class, "videoId")]')
id1 = id2 = expires = format = ''
if id_nodes:
id1 = id_nodes[0].text
if len(id_nodes) > 1:
id2 = id_nodes[1].text
nodes = node.xpath('div[@class="expires"]')
if nodes:
user_expires = self.datetime_to_xml(nodes[0].text)
else:
user_expires = None
expires = self._expires(id1, id2, user_expires)
def get_id_player(video_id):
if video_id and video_id.startswith('http://video.zeit.de/'):
video_id = video_id.replace('http://video.zeit.de/', '', 1)
if '/' in video_id:
type_, id_ = video_id.split('/', 1)
type_ = 'pls' if type_ == 'playlist' else 'vid'
return id_, type_
return '', ''
old_id, player1 = get_id_player(id1)
old_id2, player2 = get_id_player(id2)
nodes = node.xpath('div[@class="format"]')
if nodes:
format = nodes[0].text or ''
new_node = lxml.builder.E.video(
href=id1 or '', href2=id2 or '',
expires=expires, format=format)
return new_node
# XXX duplicated code in zeit.brightcove.asset
def _expires(self, video1, video2, user_entered):
"""returns the earliest expire date of the two objects (the
user-entered value takes precedence)"""
if user_entered:
return user_entered
# an expires value might
# - not exist on the object (if it's a Playlist)
# - exist but be None (if a Video doesn't expire)
all_expires = []
maximum = datetime.datetime(datetime.MAXYEAR, 12, 31, tzinfo=pytz.UTC)
for id in [video1, video2]:
video = zeit.cms.interfaces.ICMSContent(id, None)
expires = getattr(video, 'expires', None)
if expires is None:
expires = maximum
all_expires.append(expires)
expires = min(all_expires)
if expires == maximum:
return ''
return expires.isoformat()
class RawXMLProtectStep(ConversionStep):
"""Contents of <raw> must not be subjected to any other conversion steps.
"""
order_to_html = -50
order_to_xml = -50
xpath_xml = './/raw'
xpath_html = './/*[contains(@class, "raw")]'
def to_html(self, node):
self.store(node)
return node
def to_xml(self, node):
self.store(node)
return node
def store(self, node):
storage = self.converter.storage.setdefault('raw', {})
children = storage[node] = list(node.iterchildren())
for child in children:
node.remove(child)
class RawXMLUnprotectStep(ConversionStep):
"""Contents of <raw> must not be subjected to any other conversion steps.
"""
order_to_html = 50
order_to_xml = 50
xpath_xml = './/raw'
xpath_html = './/*[contains(@class, "raw")]'
def to_html(self, node):
self.restore(node)
return node
def to_xml(self, node):
self.restore(node)
return node
def restore(self, node):
storage = self.converter.storage.setdefault('raw', {})
for child in storage[node]:
node.append(child)
class RawXMLStep(ConversionStep):
"""Make <raw> editable."""
order_to_html = 51
order_to_xml = 51
xpath_xml = './/raw'
xpath_html = './/*[contains(@class, "raw")]'
def to_html(self, node):
result = []
for child in node.iterchildren():
result.append(lxml.etree.tostring(
child, pretty_print=True, encoding=six.text_type))
text = '\n'.join(result)
new_node = lxml.builder.E.div(
text, **{'class': 'inline-element raw'})
return new_node
def to_xml(self, node):
new_node = lxml.html.soupparser.fromstring(node.text)
new_node.tag = 'raw'
return new_node
class ReferenceStep(ConversionStep):
content_type = None # override in subclass
def __init__(self, context, converter):
super(ReferenceStep, self).__init__(context, converter)
self.xpath_xml = './/%s' % self.content_type
self.xpath_html = './/*[contains(@class, "%s")]' % self.content_type
def to_html(self, node):
href = node.get('href') or ''
new_node = lxml.builder.E.div(
lxml.builder.E.div(href, **{'class': 'href'}),
**{'class': 'inline-element %s' % self.content_type})
return new_node
def to_xml(self, node):
unique_id = node.xpath('*[contains(@class, "href")]')[0].text or ''
# Some metadata may be None, which objectify accepts for creating child
# nodes, but etree doesn't. Fortunately, etree doesn't mind smuggling
# in an objectify node here.
factory = getattr(lxml.objectify.E, self.content_type)
new_node = factory(href=unique_id)
content = zeit.cms.interfaces.ICMSContent(unique_id, None)
if content is not None:
updater = zeit.cms.content.interfaces.IXMLReferenceUpdater(content)
updater.update(new_node)
return new_node
def references(self, node):
unique_id = node.get('href')
return [unique_id]
class PortraitboxStep(ReferenceStep):
content_type = 'portraitbox'
def to_html(self, node):
new_node = super(PortraitboxStep, self).to_html(node)
layout = lxml.builder.E.div(
node.get('layout') or '', **{'class': 'layout'})
new_node.append(layout)
return new_node
def to_xml(self, node):
new_node = super(PortraitboxStep, self).to_xml(node)
layout = node.xpath('*[@class="layout"]')[0].text or ''
new_node.set('layout', layout)
return new_node
class InfoboxStep(ReferenceStep):
content_type = 'infobox'
class TimelineStep(ReferenceStep):
content_type = 'timeline'
class GalleryStep(ReferenceStep):
content_type = 'gallery'
class CitationStep(ConversionStep):
attributes = ['text', 'text2',
'attribution', 'attribution2',
'url', 'url2',
'layout']
xpath_xml = './/citation'
xpath_html = './/*[contains(@class, "citation")]'
def to_html(self, node):
children = []
for name in self.attributes:
children.append(lxml.builder.E.div(
node.get(name) or ' ', **{'class': name}))
new_node = lxml.builder.E.div(
*children, **{'class': 'inline-element citation'})
return new_node
def to_xml(self, node):
values = {}
for name in self.attributes:
value = node.xpath('*[@class="%s"]' % name)[0].text
if value and value.strip():
values[name] = value.strip()
new_node = lxml.builder.E.citation(**values)
return new_node
class InlineElementAppendParagraph(ConversionStep):
"""Add an empty paragraph after each inline element.
This is necessary to allow to position the cursor between two blocks.
The empty paragraph stripper will remove those paragraphs when converting
to XML.
"""
order_to_html = 100
xpath_xml = './/*[contains(@class, "inline-element")]'
def to_html(self, node):
index = node.getparent().index(node)
# Note: between the ' ' there is a non-breaking space.
p = lxml.builder.E.p(u' ')
node.getparent().insert(index + 1, p)
return node
@zope.interface.implementer(zeit.wysiwyg.interfaces.IHTMLContent)
class HTMLContentBase(object):
"""Base class for html content."""
def __init__(self, context):
self.context = context
@property
def html(self):
return self.convert.to_html(self.get_tree())
@html.setter
def html(self, value):
return self.convert.from_html(self.get_tree(), value)
@property
def references(self):
return self.convert.references(self.get_tree())
@property
def convert(self):
return zeit.wysiwyg.interfaces.IHTMLConverter(self.context)
def get_tree(self):
raise NotImplementedError("Implemented in subclass.")
|
"""
@Project : DuReader
@Module : construct_iterable.py
@Author : Deco [deco@cubee.com]
@Created : 5/18/18 3:36 PM
@Desc :
"""
import os
import gensim
class MySentences(object):
def __init__(self, dirname):
self.dirname = dirname
def __iter__(self):
for fname in os.listdir(self.dirname):
for line in open(os.path.join(self.dirname, fname)):
yield line.split()
sentences = MySentences('/some/directory') # a memory-friendly iterator
model = gensim.models.Word2Vec(sentences)
|
import datetime
import pytz as pytz
date_time_str = '2018-06-29 08:15:27.243860'
date_time_obj = datetime.datetime.strptime(date_time_str, '%Y-%m-%d %H:%M:%S.%f')
print(date_time_obj)
'%Y-%m-%d %H:%M:%S.%f'
data = "2019-11-11T06:00:00-05:00"
# result2 = datetime.datetime.strptime(data[0:23], "%Y-%m-%dT%H:%M:%S")
org_dt = datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)
start_dt = (datetime.datetime.fromisoformat(data) - org_dt).total_seconds()
def should_system_be_armed(event_start_str, event_event_str):
"""
This function will convert the start and end string iso times, to date times. The it will check to see if current
falls between the start and end times. If so True will be returned, otherwise False will be returned
:param event_start_str:
:param event_event_str:
:return:
"""
org_dt = datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)
start_time_in_sec = (datetime.datetime.fromisoformat(event_start_str) - org_dt).total_seconds()
end_time_in_sec = (datetime.datetime.fromisoformat(event_event_str) - org_dt).total_seconds()
current_time_in_sec = (datetime.now() - org_dt).total_seconds()
return True if start_time_in_sec < end_time_in_sec < current_time_in_sec else False
print(start_dt)
# print(result2)
|
#
# This module is structured somewhat unusually in order to allow the
# code to be shared between two different modules that run these
# tests against the internally implemented bus and the OS native
# bus if it's available
#
# Those modules dynamically create subclasses of all classes
# deriving from ServerObjectTester, unittest.TestClass, and
# an optional mixin class for setting up the internal bus.
#
import os
import tempfile
from unittest import SkipTest
import twisted
from twisted.internet import defer, reactor
from txdbus import client, endpoints, error, introspection, objects
from txdbus.interface import DBusInterface, Method, Property, Signal
from txdbus.objects import dbusMethod, DBusProperty
def delay(arg):
d = defer.Deferred()
reactor.callLater(0.05, lambda: d.callback(arg))
return d
class TestException (Exception):
dbusErrorName = 'org.txdbus.trial.TestException'
class ServerObjectTester(object):
tst_path = '/test/TestObj'
tst_bus = 'org.txdbus.trial.bus%d' % os.getpid()
TestClass = None
@defer.inlineCallbacks
def setUp(self):
yield self._setup()
yield self._client_connect()
def _client_connect(self):
self.t = self.TestClass(self.tst_path)
self.server_conn = None
self.client_conn = None
f = client.DBusClientFactory()
point = endpoints.getDBusEnvEndpoints(reactor)[0]
point.connect(f)
d = f.getConnection()
d.addCallback(self._connected)
d.addCallback(lambda _: self.get_client_connection())
return d
def _connected(self, conn):
self.server_conn = conn
conn.exportObject(self.t)
return conn.requestBusName(self.tst_bus)
def tearDown(self):
if self.client_conn:
self.client_conn.disconnect()
if self.server_conn:
self.server_conn.disconnect()
return self._teardown()
def get_client_connection(self):
if self.client_conn:
return defer.succeed(self.client_conn)
else:
f = client.DBusClientFactory()
point = endpoints.getDBusEnvEndpoints(reactor)[0]
point.connect(f)
d = f.getConnection()
def got_connection(c):
self.client_conn = c
return c
d.addCallback(got_connection)
return d
def get_proxy(self, interfaces=None):
d = self.get_client_connection()
def gotit(conn):
return conn.getRemoteObject(
self.tst_bus, self.tst_path, interfaces=interfaces)
d.addCallback(gotit)
return d
def proxy_chain(self, *args):
d = self.get_proxy()
for a in args:
d.addCallback(a)
return d
class SimpleObjectTester(ServerObjectTester):
class TestClass (objects.DBusObject):
tif = DBusInterface('org.txdbus.trial.Simple',
Method('testMethod', arguments='s', returns='s'),
Signal('tsig', 's')
)
dbusInterfaces = [tif]
def __init__(self, object_path):
objects.DBusObject.__init__(self, object_path)
def dbus_testMethod(self, arg):
return arg + 'bar'
class ConnectionTest(SimpleObjectTester):
def test_client_connect(self):
d = client.connect(reactor)
def ok(conn):
conn.disconnect()
self.assertTrue(True)
d.addCallback(ok)
return d
def test_fallback_connect(self):
bad1 = 'unix:abstract=/tmp/FOOBARBAZBLICK,guid=5'
bad2 = 'tcp:host=127.0.0.99,port=0,family="ipv4",guid=5'
good = os.environ['DBUS_SESSION_BUS_ADDRESS']
d = client.connect(reactor, '%s;%s;%s' % (bad1, bad2, good))
def ok(conn):
conn.disconnect()
self.assertTrue(True)
d.addCallback(ok)
return d
def test_failed_connect(self):
bad1 = 'unix:abstract=/tmp/FOOBARBAZBLICK,guid=5'
bad2 = 'tcp:host=127.0.0.99,port=0,family="ipv4",guid=5'
d = client.connect(reactor, '%s;%s' % (bad1, bad2))
def ok(conn):
conn.disconnect()
self.assertTrue(False, 'Connect should not have succeeded')
d.addCallback(ok)
d.addErrback(lambda _: self.assertTrue(True))
return d
def test_no_valid_endpoints(self):
d = client.connect(reactor, '')
def ok(conn):
conn.disconnect()
self.assertTrue(False, 'Connect should not have succeeded')
d.addCallback(ok)
d.addErrback(lambda _: self.assertTrue(True))
return d
class InheritiedInterfaceTest(SimpleObjectTester):
class TestClass (SimpleObjectTester.TestClass):
tif = DBusInterface('org.txdbus.trial.SimpleSub',
Method(
'testMethodSub', arguments='s', returns='s'),
Signal('tsig', 's')
)
dbusInterfaces = [tif]
def __init__(self, object_path):
objects.DBusObject.__init__(self, object_path)
def dbus_testMethodSub(self, arg):
return arg + 'barSub'
def test_subclass_method(self):
def got_object(ro):
return ro.callRemote('testMethodSub', 'foo')
def got_reply(reply):
self.assertEquals(reply, 'foobarSub')
return self.proxy_chain(got_object, got_reply)
def test_superclass_method(self):
def got_object(ro):
return ro.callRemote('testMethod', 'foo')
def got_reply(reply):
self.assertEquals(reply, 'foobar')
return self.proxy_chain(got_object, got_reply)
class ObjectManagerTest(SimpleObjectTester):
class TestClass (SimpleObjectTester.TestClass):
tif = DBusInterface('org.txdbus.trial.SimpleSub',
Method(
'testMethodSub', arguments='s', returns='s'),
Method('foo', returns='s'),
Signal('tsig', 's'),
Property('prop', 'i')
)
dbusInterfaces = [tif]
prop = DBusProperty('prop')
def __init__(self, object_path, arg=-1):
objects.DBusObject.__init__(self, object_path)
self.prop = arg
def dbus_foo(self):
return 'foo'
def test_get_managed_objects(self):
t1 = self.TestClass('/org/test/Foo', 0)
t2 = self.TestClass('/org/test/Foo/Bar', 1)
t3 = self.TestClass('/org/test/Foo/Baz', 2)
self.server_conn.exportObject(t1)
self.server_conn.exportObject(t2)
self.server_conn.exportObject(t3)
def get_proxy(path):
d = self.get_client_connection()
def gotit(conn):
return conn.getRemoteObject(self.tst_bus, path)
d.addCallback(gotit)
return d
def got_object(ro):
return ro.callRemote('GetManagedObjects')
def got_reply(reply):
self.assertEquals(reply, {
'/org/test/Foo/Bar': {
'org.freedesktop.DBus.Properties': {},
'org.txdbus.trial.Simple': {},
'org.txdbus.trial.SimpleSub': {'prop': 1},
},
'/org/test/Foo/Baz': {
'org.freedesktop.DBus.Properties': {},
'org.txdbus.trial.Simple': {},
'org.txdbus.trial.SimpleSub': {'prop': 2},
},
})
dp = get_proxy('/org/test/Foo')
dp.addCallback(got_object)
dp.addCallback(got_reply)
return dp
@defer.inlineCallbacks
def test_unexport_objects(self):
t1 = self.TestClass('/org/test/Foo', 0)
t2 = self.TestClass('/org/test/Foo/Bar', 1)
t3 = self.TestClass('/org/test/Foo/Baz', 2)
self.server_conn.exportObject(t1)
self.server_conn.exportObject(t2)
self.server_conn.exportObject(t3)
conn = yield self.get_client_connection()
ro1 = yield conn.getRemoteObject(self.tst_bus, '/org/test/Foo')
ro2 = yield conn.getRemoteObject(self.tst_bus, '/org/test/Foo/Bar')
f1 = yield ro1.callRemote('foo')
f2 = yield ro2.callRemote('foo')
self.assertEquals(f1, 'foo')
self.assertEquals(f2, 'foo')
self.server_conn.unexportObject('/org/test/Foo')
f2 = yield ro2.callRemote('foo')
self.assertEquals(f2, 'foo')
try:
f1 = yield ro1.callRemote('foo')
self.fail('failed throw exception')
except error.RemoteError as e:
self.assertEquals(
e.message,
'/org/test/Foo is not an object provided by this process.')
except Exception:
self.fail('Threw wrong exception')
def test_interface_added_signal(self):
dsig = defer.Deferred()
def on_signal(m):
dsig.callback(m)
def check_results(m):
self.assertEquals(m.interface,
'org.freedesktop.DBus.ObjectManager')
self.assertEquals(m.member, 'InterfacesAdded')
self.assertEquals(m.body, [
'/org/test/Foo',
{
'org.txdbus.trial.SimpleSub': {'prop': 0},
'org.txdbus.trial.Simple': {},
'org.freedesktop.DBus.Properties': {}
}
])
def on_proxy(ro):
return self.client_conn.addMatch(on_signal,
mtype='signal',
sender=self.tst_bus)
# path_namespace is available in dbus 1.5+
# path_namespace = '/org/test' )
def sendit(ro):
t = self.TestClass('/org/test/Foo', 0)
self.server_conn.exportObject(t)
d = self.get_proxy()
d.addCallback(on_proxy)
d.addCallback(sendit)
d.addCallback(lambda _: dsig)
d.addCallback(check_results)
return d
def test_interface_removed_signal(self):
dsig = defer.Deferred()
t = self.TestClass('/org/test/Foo', 0)
def on_signal(m):
dsig.callback(m)
def check_results(m):
self.assertEquals(m.interface,
'org.freedesktop.DBus.ObjectManager')
self.assertEquals(m.member, 'InterfacesRemoved')
self.assertEquals(m.body, [
'/org/test/Foo', [
'org.txdbus.trial.SimpleSub',
'org.txdbus.trial.Simple',
'org.freedesktop.DBus.Properties'
]
])
def on_proxy(ro):
self.server_conn.exportObject(t)
return self.client_conn.addMatch(on_signal,
mtype='signal',
sender=self.tst_bus,
member='InterfacesRemoved')
# path_namespace is available in dbus 1.5+
# path_namespace = '/org/test' )
def sendit(ro):
self.server_conn.unexportObject('/org/test/Foo')
d = self.get_proxy()
d.addCallback(on_proxy)
d.addCallback(sendit)
d.addCallback(lambda _: dsig)
d.addCallback(check_results)
return d
class SimpleTest(SimpleObjectTester):
def test_bad_remote_method_call(self):
d = self.client_conn.callRemote(self.tst_path, '0badmember',
interface='org.freedesktop.DBus.Peer',
destination=self.tst_bus,
body=1)
d.addCallback(
lambda _: self.assertTrue(
False,
'Remote call should have errbacked'))
d.addErrback(lambda _: self.assertTrue(True))
return d
def test_simple(self):
def got_object(ro):
return ro.callRemote('testMethod', 'foo')
def got_reply(reply):
self.assertEquals(reply, 'foobar')
return self.proxy_chain(got_object, got_reply)
def test_get_connection(self):
def got_object(ro):
self.assertTrue(self.t.getConnection() is not None)
return self.proxy_chain(got_object)
def test_get_name_owner(self):
d = self.server_conn.getNameOwner(self.tst_bus)
def got_reply(reply):
self.assertEquals(reply, self.server_conn.busName)
d.addCallback(got_reply)
return d
def test_manual_interface(self):
def on_proxy(ro):
return ro.callRemote('testMethod', 'foo')
def on_reply(reply):
self.assertEquals(reply, 'foobar')
d = self.get_proxy(SimpleObjectTester.TestClass.tif)
d.addCallback(on_proxy)
d.addCallback(on_reply)
return d
def test_extra_arguments(self):
def on_proxy(ro):
return ro.callRemote('testMethod', 'foo',
expectReply=False,
autoStart=False,
timeout=5)
def on_reply(reply):
self.assertTrue(reply is None)
def on_error(err):
print('***** GOT TIMEOUT ******', err.getErrorMessage())
print(' ', err.value)
self.assertTrue(
isinstance(
err.value,
error.TimeOut),
'Did not receive a timeout')
d = self.get_proxy()
d.addCallback(on_proxy)
d.addCallbacks(on_reply, on_error)
return d
def test_ping(self):
d = self.client_conn.callRemote(self.tst_path, 'Ping',
interface='org.freedesktop.DBus.Peer',
destination=self.tst_bus)
d.addCallback(lambda _: self.assertTrue(True))
return d
def test_hello_hello(self):
def on_proxy(ro):
return self.client_conn.callRemote(
'/org/freedesktop/DBus',
'Hello',
interface='org.freedesktop.DBus',
destination='org.freedesktop.DBus',
)
def on_err(e):
self.assertEquals(
'org.freedesktop.DBus.Error.Failed: Already handled an Hello '
'message',
str(e.value),
)
d = self.get_proxy()
d.addCallback(on_proxy)
d.addCallbacks(lambda _: self.fail('Call should have failed'),
on_err)
return d
class DisconnectTest(SimpleObjectTester):
def test_notify_on_disconnect(self):
dresult = defer.Deferred()
def fail():
dresult.callback(None)
self.fail(
'Failed to receive disconnect notification on remote object')
fail_cb = reactor.callLater(5, fail)
def on_disconnect(robj, reason):
fail_cb.cancel()
self.assertTrue(True)
dresult.callback(None)
def on_proxy(ro):
self.proxy = ro # ensure proxy object doesn't get GC-ed
ro.notifyOnDisconnect(on_disconnect)
self.client_conn.disconnect()
self.client_conn = None
d = self.get_proxy()
d.addCallback(on_proxy)
return dresult
def test_cancel_notify_on_disconnect(self):
dresult = defer.Deferred()
def ok():
self.assertTrue(True)
dresult.callback(None)
ok_cb = reactor.callLater(0.1, ok)
def on_disconnect(robj, reason):
ok_cb.cancel()
self.fail(
'Should not have received disconnect notification on remote '
'object'
)
dresult.callback(None)
def on_proxy(ro):
self.proxy = ro # ensure proxy object doesn't get GC-ed
ro.notifyOnDisconnect(on_disconnect)
ro.cancelNotifyOnDisconnect(on_disconnect)
self.client_conn.disconnect()
self.client_conn = None
d = self.get_proxy()
d.addCallback(on_proxy)
return dresult
class IntrospectionTest(SimpleObjectTester):
class TestClass (objects.DBusObject):
tif = DBusInterface('org.txdbus.trial.Simple',
Method('testMethod', arguments='s', returns='s'),
Signal('tsig', 's'),
Property('foo', 's', emitsOnChange=False),
Property('bar', 'i', True, True, True),
Property('baz', 's', False, True, 'invalidates')
)
dbusInterfaces = [tif]
def __init__(self, object_path):
objects.DBusObject.__init__(self, object_path)
introspection_golden_xml = """<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN"
"http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
<node name="/test/TestObj">
<interface name="org.txdbus.trial.Simple">
<method name="testMethod">
<arg direction="in" type="s"/>
<arg direction="out" type="s"/>
</method>
<signal name="tsig">
<arg type="s"/>
</signal>
<property name="bar" type="i" access="readwrite">
<annotation name="org.freedesktop.DBus.Property.EmitsChangedSignal" value="true"/>
</property>
<property name="baz" type="s" access="write">
<annotation name="org.freedesktop.DBus.Property.EmitsChangedSignal" value="invalidates"/>
</property>
<property name="foo" type="s" access="read">
<annotation name="org.freedesktop.DBus.Property.EmitsChangedSignal" value="false"/>
</property>
</interface>
<interface name="org.freedesktop.DBus.Properties">
<method name="Get">
<arg direction="in" type="s"/>
<arg direction="in" type="s"/>
<arg direction="out" type="v"/>
</method>
<method name="GetAll">
<arg direction="in" type="s"/>
<arg direction="out" type="a{sv}"/>
</method>
<method name="Set">
<arg direction="in" type="s"/>
<arg direction="in" type="s"/>
<arg direction="in" type="v"/>
</method>
<signal name="PropertiesChanged">
<arg type="s"/>
<arg type="a{sv}"/>
<arg type="as"/>
</signal>
</interface>
<interface name="org.freedesktop.DBus.Introspectable">
<method name="Introspect">
<arg direction="out" type="s" />
</method>
</interface>
<interface name="org.freedesktop.DBus.Peer">
<method name="Ping">
</method>
</interface>
<interface name="org.freedesktop.DBus.ObjectManager">
<method name="GetManagedObjects">
<arg direction="out" type="a{oa{sa{sv}}}" />
</method>
</interface>
</node>""" # noqa E501: It's okay for this to be over 80-chars
def test_introspection(self):
d = self.get_client_connection()
def cb(c):
return c.callRemote(
self.tst_path,
'Introspect',
interface='org.freedesktop.DBus.Introspectable',
destination=self.tst_bus,
)
def gotxml(xml):
# with open('/tmp/tout', 'w') as f:
# f.write(xml)
self.assertEquals(self.introspection_golden_xml, xml)
d.addCallback(cb)
d.addCallback(gotxml)
return d
def test_introspection_parsing(self):
ifaces = introspection.getInterfacesFromXML(
self.introspection_golden_xml, True)
for iface in ifaces:
if iface.name == 'org.txdbus.trial.Simple':
break
else:
self.assertTrue(False)
self.assertTrue('testMethod' in iface.methods)
self.assertTrue('tsig' in iface.signals)
self.assertTrue('foo' in iface.properties)
class SignalTester(ServerObjectTester):
class TestClass (objects.DBusObject):
tif = DBusInterface(
'org.txdbus.trial.Signal',
Method('sendSignal', arguments='s'),
Method('sendShared1'),
Method('sendEmpty'),
Method('sendRaw', arguments='s'),
Signal('testSignal', 's'),
Signal('sharedSignal', 's'),
Signal('emptySig')
)
tif2 = DBusInterface(
'org.txdbus.trial.Signal2',
Method('sendShared2'),
Signal('sharedSignal', 's')
)
dbusInterfaces = [tif, tif2]
def __init__(self, object_path):
objects.DBusObject.__init__(self, object_path)
def dbus_sendSignal(self, arg):
self.emitSignal('testSignal', 'Signal arg: ' + arg)
def dbus_sendRaw(self, arg):
self.emitSignal('testSignal', arg)
def dbus_sendEmpty(self):
self.emitSignal('emptySig')
def dbus_sendShared1(self):
self.emitSignal(
'sharedSignal',
'iface1',
interface='org.txdbus.trial.Signal',
)
def dbus_sendShared2(self):
self.emitSignal(
'sharedSignal',
'iface2',
interface='org.txdbus.trial.Signal2',
)
def test_signal(self):
dsig = defer.Deferred()
def on_signal(arg):
dsig.callback(arg)
def on_proxy(ro):
ro.notifyOnSignal('testSignal', on_signal)
return ro.callRemote('sendSignal', 'foo')
d = self.get_proxy()
d.addCallback(on_proxy)
def check_result(result):
self.assertEquals(result, 'Signal arg: foo')
dsig.addCallback(check_result)
return defer.DeferredList([d, dsig])
def test_signal_no_parameters(self):
dsig = defer.Deferred()
def on_signal():
dsig.callback(None)
def on_proxy(ro):
ro.notifyOnSignal('emptySig', on_signal)
return ro.callRemote('sendEmpty')
d = self.get_proxy()
d.addCallback(on_proxy)
return defer.DeferredList([d, dsig])
def test_bad_signal_interface(self):
def on_signal(arg):
pass
def on_proxy(ro):
self.assertRaises(
AttributeError,
ro.notifyOnSignal,
'testSignal',
on_signal,
'foo_iface'
)
d = self.get_proxy()
d.addCallback(on_proxy)
return d
def test_signal_cancel(self):
counts = {'signal_count': 0}
def on_signal(_):
counts['signal_count'] += 1
def on_proxy(ro):
dnotify = ro.notifyOnSignal('testSignal', on_signal)
def send_signal(rule_id):
dx = ro.callRemote('sendSignal', 'foo')
dx.addCallback(lambda _: rule_id)
return dx
def cancel_reg(rule_id):
return ro.cancelSignalNotification(rule_id)
def delay(arg):
d = defer.Deferred()
reactor.callLater(0.1, lambda: d.callback(arg))
return d
def check_result(y):
self.assertEqual(counts['signal_count'], 1)
dnotify.addCallback(send_signal)
dnotify.addCallback(cancel_reg)
dnotify.addCallback(send_signal)
dnotify.addCallback(check_result)
return dnotify
d = self.get_proxy()
d.addCallback(on_proxy)
return d
def test_shared_signal(self):
d1 = defer.Deferred()
d2 = defer.Deferred()
def on_proxy(ro):
ro.notifyOnSignal('sharedSignal', d1.callback,
interface='org.txdbus.trial.Signal')
ro.notifyOnSignal('sharedSignal', d2.callback,
interface='org.txdbus.trial.Signal2')
return defer.DeferredList([
ro.callRemote('sendShared1'),
ro.callRemote('sendShared2'),
])
def check_signals_sent(result):
# both callRemotes successful and returning None
self.assertEquals(len(result), 2)
for success, returnValue in result:
self.assertTrue(success)
self.assertIsNone(returnValue)
return result
signalsSent = self.get_proxy().addCallback(on_proxy)
signalsSent.addCallback(check_signals_sent)
def check_signals_received(result):
# d1 and d2 calledback with the correct signal data
self.assertEquals(result[0], (True, 'iface1'))
self.assertEquals(result[1], (True, 'iface2'))
return result
signalsReceived = defer.DeferredList([d1, d2])
signalsReceived.addCallback(check_signals_received)
# success is both:
# - signals sent, callRemotes checked, connections closed
# - signals received, callbacks checked
return defer.DeferredList([signalsSent, signalsReceived])
def test_arg_rule_match(self):
dsig = defer.Deferred()
def on_signal(result):
dsig.callback(result)
d = self.client_conn.addMatch(
on_signal,
mtype='signal',
sender=self.tst_bus,
arg=[(0, 'Signal arg: MATCH')],
)
def on_proxy(ro):
return ro.callRemote('sendSignal', 'MATCH')
d.addCallback(lambda _: self.get_proxy())
d.addCallback(on_proxy)
def check_result(result):
self.assertEquals(result.body[0], 'Signal arg: MATCH')
dsig.addCallback(check_result)
return dsig
def test_arg_path_rule_match(self):
dsig = defer.Deferred()
def on_signal(result):
dsig.callback(result)
d = self.client_conn.addMatch(
on_signal,
mtype='signal',
sender=self.tst_bus,
arg_path=[(0, '/aa/bb/')],
)
def on_proxy(ro):
return ro.callRemote('sendRaw', '/aa/bb/cc')
d.addCallback(lambda _: self.get_proxy())
d.addCallback(on_proxy)
def check_result(result):
self.assertEquals(result.body[0], '/aa/bb/cc')
dsig.addCallback(check_result)
return dsig
def test_arg_rule_remove_match(self):
dsig = defer.Deferred()
x = {'rule_id': None}
def on_signal(result):
dsig.callback(result)
d = self.client_conn.addMatch(
on_signal,
mtype='signal',
sender=self.tst_bus,
arg=[(0, 'Signal arg: MATCH')],
)
def added(rule_id):
x['rule_id'] = rule_id
return self.get_proxy()
d.addCallback(added)
def on_proxy(ro):
return ro.callRemote('sendSignal', 'MATCH')
d.addCallback(on_proxy)
def check_result(result):
self.assertEquals(result.body[0], 'Signal arg: MATCH')
dsig.addCallback(check_result)
def remove(_):
return self.client_conn.delMatch(x['rule_id'])
dsig.addCallback(remove)
return dsig
def test_path_namespace_rule_match(self):
dsig = defer.Deferred()
def on_signal(result):
dsig.callback(result)
d = self.client_conn.addMatch(
on_signal,
mtype='signal',
sender=self.tst_bus,
path_namespace='/test',
)
def on_proxy(ro):
return ro.callRemote('sendRaw', 'string payload')
d.addCallback(lambda _: self.get_proxy())
d.addCallback(on_proxy)
def check_result(result):
self.assertEquals(result.body[0], 'string payload')
dsig.addCallback(check_result)
return dsig
class ErrorTester(ServerObjectTester):
class TestException (Exception):
dbusErrorName = 'org.txdbus.trial.TestException'
pass
class InvalidErrorName (Exception):
dbusErrorName = 'oops'
class TestClass (objects.DBusObject):
tif = DBusInterface(
'org.txdbus.trial.Oops',
Method('errCall', arguments='s'),
Method('raiseExpected'),
Method('raisePython'),
Method('raiseInvalid'),
Signal('testSignal', 's')
)
dbusInterfaces = [tif]
def __init__(self, object_path):
objects.DBusObject.__init__(self, object_path)
def dbus_errCall(self, arg):
self.fail('Should not see this')
def dbus_raiseExpected(self):
raise ErrorTester.TestException('ExpectedError')
def dbus_raisePython(self):
d = {}
d['Uh oh!']
def dbus_raiseInvalid(self):
raise ErrorTester.InvalidErrorName()
def test_err_no_method(self):
def on_err(e):
self.assertEquals(
'org.freedesktop.DBus.Error.UnknownMethod: Method "FooBarBaz" '
'with signature "" on interface "(null)" doesn\'t exist',
str(e.value),
)
d = self.client_conn.callRemote(self.tst_path, 'FooBarBaz',
destination=self.tst_bus)
d.addCallbacks(lambda _: self.fail('Call should have failed'),
on_err)
return d
def test_err_no_object(self):
def on_err(e):
self.assertEquals(
'org.freedesktop.DBus.Error.UnknownObject: '
'/test/TestObjINVALID is not an object provided '
'by this process.',
str(e.value),
)
d = self.client_conn.callRemote(self.tst_path + "INVALID", 'FooBarBaz',
destination=self.tst_bus)
d.addCallbacks(lambda _: self.fail('Call should have failed'),
on_err)
return d
def test_err_call_no_arguments(self):
def on_err(e):
self.assertEquals(
str(e.value),
'org.freedesktop.DBus.Error.InvalidArgs: Call to errCall has '
'wrong args (, expected s)',
)
d = self.client_conn.callRemote(self.tst_path, 'errCall',
destination=self.tst_bus)
d.addCallbacks(lambda _: self.fail('Call should have failed'),
on_err)
return d
def test_err_call_bad_arguments(self):
def on_err(e):
self.assertEquals(
str(e.value),
'org.freedesktop.DBus.Error.InvalidArgs: Call to errCall has '
'wrong args (i, expected s)',
)
self.assertTrue(True)
d = self.client_conn.callRemote(
self.tst_path,
'errCall',
signature='i', body=[5],
destination=self.tst_bus
)
d.addCallbacks(lambda _: self.fail('Call should have failed'),
on_err)
return d
def test_raise_expected(self):
def on_err(e):
self.assertEquals(
str(e.value),
'org.txdbus.trial.TestException: ExpectedError',
)
d = self.client_conn.callRemote(
self.tst_path,
'raiseExpected',
destination=self.tst_bus,
)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
return d
def test_raise_python(self):
def on_err(e):
self.assertEquals(
str(e.value),
"org.txdbus.PythonException.KeyError: 'Uh oh!'",
)
d = self.client_conn.callRemote(
self.tst_path, 'raisePython',
destination=self.tst_bus,
)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
return d
def test_raise_invalid(self):
def on_err(e):
self.assertEquals(
str(e.value),
'org.txdbus.InvalidErrorName: !!(Invalid error name "oops")!! '
)
d = self.client_conn.callRemote(
self.tst_path,
'raiseInvalid',
destination=self.tst_bus,
)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
return d
def _test_bad(self):
def on_err(e):
self.assertTrue(True)
d = self.client_conn.callRemote(
'/org/freedesktop/DBus',
'RequestName',
interface='org.freedesktop.DBus',
signature='i',
body=[5],
destination='org.freedesktop.DBus',
)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
return d
class ComplexObjectTester(ServerObjectTester):
class TestClass (objects.DBusObject):
tif = DBusInterface(
'org.txdbus.trial.Complex',
Method(
'testComplexArgs',
arguments='s(ii(si)i)',
returns='s',
),
Method(
'testDictToTuples',
arguments='a{ss}',
returns='a(ss)',
),
Method(
'testDictToTuples2',
arguments='a{ss}',
returns='a(ss)y',
),
Method(
'testDictToTuples3',
arguments='a{ss}',
returns='ia(ss)y',
),
Method(
'testDictToTuples4',
arguments='(a{ss})',
returns='(ia(ss)y)',
),
Method('testCaller'),
Method('testTimeOut'),
Method('testTimeOutNotNeeded', arguments='s'),
Method('notImplemented'),
Signal('tsig', 's')
)
dbusInterfaces = [tif]
def __init__(self, object_path):
objects.DBusObject.__init__(self, object_path)
def dbus_testComplexArgs(self, arg1, arg2):
return repr(arg1) + ' # ' + repr(arg2)
def dbus_testDictToTuples(self, d):
k = sorted(d.keys())
r = [(x, d[x]) for x in k]
return r
def dbus_testDictToTuples2(self, d):
k = sorted(d.keys())
r = [(x, d[x]) for x in k]
return (r, 6)
def dbus_testDictToTuples3(self, d):
k = sorted(d.keys())
r = [(x, d[x]) for x in k]
return (2, r, 6)
def dbus_testDictToTuples4(self, tpl):
d = tpl[0]
k = sorted(d.keys())
r = [(x, d[x]) for x in k]
return (2, r, 6)
def dbus_testCaller(self, dbusCaller=None):
if dbusCaller is None:
raise Exception('dbusCaller should not be none')
def dbus_testTimeOut(self):
d = defer.Deferred()
reactor.callLater(0.02, lambda: d.callback(None))
return d
def dbus_testTimeOutNotNeeded(self, arg):
if arg == 'err':
raise Exception('err')
return 'foo'
def test_comlex_args(self):
class Sub:
dbusOrder = ['substr', 'subint']
def __init__(self):
self.substr = 'substring'
self.subint = 10
class Foo:
dbusOrder = ['one', 'two', 'sub', 'four']
def __init__(self):
self.one = 1
self.two = 2
self.sub = Sub()
self.four = 4
def got_object(ro):
return ro.callRemote('testComplexArgs', 'foo', Foo())
def got_reply(reply):
expected = repr(u'foo') + ' # ' + \
repr([1, 2, [u'substring', 10], 4])
self.assertEquals(reply, expected)
return self.proxy_chain(got_object, got_reply)
def test_dict_to_tuples(self):
d = {'foo': 'bar', 'baz': 'quux', 'william': 'wallace'}
def got_object(ro):
return ro.callRemote('testDictToTuples', d)
def got_reply(reply):
self.assertEquals(
reply,
[['baz', 'quux'], ['foo', 'bar'], ['william', 'wallace']],
)
return self.proxy_chain(got_object, got_reply)
def test_dict_to_tuples2(self):
d = {'foo': 'bar', 'baz': 'quux', 'william': 'wallace'}
def got_object(ro):
return ro.callRemote('testDictToTuples2', d)
def got_reply(reply):
self.assertEquals(
reply[0],
[['baz', 'quux'], ['foo', 'bar'], ['william', 'wallace']],
)
self.assertEquals(reply[1], 6)
return self.proxy_chain(got_object, got_reply)
def test_dict_to_tuples3(self):
d = {'foo': 'bar', 'baz': 'quux', 'william': 'wallace'}
def got_object(ro):
return ro.callRemote('testDictToTuples3', d)
def got_reply(reply):
self.assertEquals(reply[0], 2)
self.assertEquals(
reply[1],
[['baz', 'quux'], ['foo', 'bar'], ['william', 'wallace']],
)
self.assertEquals(reply[2], 6)
return self.proxy_chain(got_object, got_reply)
def test_dict_to_tuples4(self):
d = {'foo': 'bar', 'baz': 'quux', 'william': 'wallace'}
def got_object(ro):
return ro.callRemote('testDictToTuples4', [d])
def got_reply(reply_obj):
reply = reply_obj[0]
self.assertEquals(reply[0], 2)
self.assertEquals(
reply[1],
[['baz', 'quux'], ['foo', 'bar'], ['william', 'wallace']],
)
self.assertEquals(reply[2], 6)
return self.proxy_chain(got_object, got_reply)
def test_caller(self):
def got_object(ro):
return ro.callRemote('testCaller')
def got_reply(reply):
self.assertTrue(True)
return self.proxy_chain(got_object, got_reply)
def test_time_out(self):
def on_proxy(ro):
return ro.callRemote('testTimeOut', timeout=0.01)
def on_err(err):
self.assertTrue(
isinstance(err.value, error.TimeOut),
'Did not receive a timeout',
)
d = self.get_proxy()
d.addCallback(on_proxy)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
d.addBoth(delay)
return d
def test_time_out_not_needed(self):
def on_proxy(ro):
return ro.callRemote('testTimeOutNotNeeded', '', timeout=5)
def on_err(err):
self.assertTrue(
not isinstance(err.value, error.TimeOut),
'Should not have received a timeout',
)
d = self.get_proxy()
d.addCallback(on_proxy)
d.addErrback(on_err)
return d
def test_time_out_not_needed_on_error(self):
def on_proxy(ro):
return ro.callRemote('testTimeOutNotNeeded', 'err', timeout=5)
def on_err(err):
self.assertTrue(
not isinstance(err.value, error.TimeOut),
'Should not have received a timeout',
)
d = self.get_proxy()
d.addCallback(on_proxy)
d.addErrback(on_err)
return d
def test_not_implemented_dbus_method(self):
def on_proxy(ro):
return ro.callRemote('notImplemented')
def on_err(err):
self.assertEquals(
err.getErrorMessage(),
'org.txdbus.PythonException.NotImplementedError',
)
d = self.get_proxy()
d.addCallback(on_proxy)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
d.addBoth(delay)
return d
def test_connection_lost_callbacks(self):
x = {'hit': False}
def cb(conn, reason):
x['hit'] = True
self.client_conn.notifyOnDisconnect(cb)
self.client_conn.disconnect()
self.client_conn = None
d = delay(0.01)
d.addCallback(lambda _: self.assertTrue(x['hit']))
return d
def test_connection_lost_callbacks_canceled(self):
x = {'hit1': False, 'hit2': False}
def cb1(conn, reason):
x['hit1'] = True
def cb2(conn, reason):
x['hit2'] = True
self.client_conn.notifyOnDisconnect(cb2)
self.client_conn.notifyOnDisconnect(cb1)
self.client_conn.cancelNotifyOnDisconnect(cb2)
self.client_conn.disconnect()
self.client_conn = None
d = delay(0.01)
d.addCallback(lambda _: self.assertTrue(x['hit1']))
d.addCallback(lambda _: self.assertTrue(not x['hit2']))
return d
def test_connection_lost_with_pending_calls(self):
x = {'hit': False}
def cb(conn, reason):
x['hit'] = True
pending = reactor.callLater(0.1, lambda: self.assertTrue(
False,
'Failed to cancel pending call'
))
dcancel = defer.Deferred()
dcancel.addCallback(lambda _: self.assertTrue(False))
dcancel.addErrback(lambda _: self.assertTrue(True))
# Directly inject
self.client_conn._pendingCalls['foo'] = (dcancel, pending)
self.client_conn.notifyOnDisconnect(cb)
self.client_conn.disconnect()
self.client_conn = None
d = delay(0.01)
d.addCallback(lambda _: dcancel)
return d
class InterfaceTester(ServerObjectTester):
class TestClass (objects.DBusObject):
tif1 = DBusInterface(
'org.txdbus.trial.IFace1',
Method('testMethod', arguments='s', returns='s'),
Method('foo'),
Method('baz', returns='s'),
Method('blah', returns='vv'),
Method('onlyOneImpl'),
Property('prop_if1', 's', writeable=True),
Property('common', 's')
)
tif2 = DBusInterface(
'org.txdbus.trial.IFace2',
Method('testMethod', arguments='s', returns='s'),
Method('bar'),
Method('baz', returns='s'),
Method('renamedMethod', returns='s'),
Method('onlyOneImpl'),
Property('prop1', 's'),
Property('prop2', 'i'),
Property('pwrite', 's', writeable=True),
Property('wronly', 's', readable=False, writeable=True),
Property('common', 's')
)
dbusInterfaces = [tif1, tif2]
pif1 = DBusProperty('prop_if1')
prop_attr = DBusProperty('prop1', 'org.txdbus.trial.IFace2')
prop2_attr = DBusProperty('prop2')
propw = DBusProperty('pwrite')
pwr = DBusProperty('wronly')
common1 = DBusProperty('common', 'org.txdbus.trial.IFace1')
common2 = DBusProperty('common', 'org.txdbus.trial.IFace2')
def __init__(self, object_path):
self.pif1 # test property access prior to object construction
objects.DBusObject.__init__(self, object_path)
self.prop_attr = 'foobar'
self.prop2_attr = 5
self.propw = 'orig'
self.pwr = 'blah'
self.pif1 = 'pif1'
self.common1 = 'common1'
self.common2 = 'common2'
def dbus_testMethod(self, arg):
return arg + 'bar'
def dbus_foo(self):
return None
def dbus_bar(self):
return None
def dbus_blah(self):
return ('foo', 'bar')
@dbusMethod('org.txdbus.trial.IFace1', 'baz')
def dbus_baz1(self):
return 'iface1'
@dbusMethod('org.txdbus.trial.IFace2', 'baz')
def dbus_baz2(self):
return 'iface2'
@dbusMethod('org.txdbus.trial.IFace2', 'renamedMethod')
def dbus_sneaky(self):
return 'sneaky'
@dbusMethod('org.txdbus.trial.IFace1', 'onlyOneImpl')
def dbus_onlyOneImpl(self):
pass
def test_property_emitsOnChange_validity(self):
def c():
Property('foo', 's', emitsOnChange='foo')
self.assertRaises(TypeError, c)
def test_interface_invalid_constructor_argument(self):
def c():
DBusInterface('foo', 1)
self.assertRaises(TypeError, c)
def test_delete_iterface_method(self):
i = DBusInterface('foo', Method('methA'), Method('methB'))
self.assertTrue('methA' in i.introspectionXml)
i.delMethod('methA')
self.assertTrue('methA' not in i.introspectionXml)
def test_delete_iterface_signal(self):
i = DBusInterface('foo', Signal('sigA'), Signal('sigB'))
self.assertTrue('sigA' in i.introspectionXml)
i.delSignal('sigA')
self.assertTrue('sigA' not in i.introspectionXml)
def test_delete_iterface_property(self):
i = DBusInterface(
'foo', Property(
'propA', 'i'), Property(
'propB', 'i'))
self.assertTrue('propA' in i.introspectionXml)
i.delProperty('propA')
self.assertTrue('propA' not in i.introspectionXml)
def test_renamed_server_method(self):
def got_object(ro):
return ro.callRemote('renamedMethod')
def got_reply(reply):
self.assertEquals(reply, 'sneaky')
return self.proxy_chain(got_object, got_reply)
def test_get_proxy_with_string_interface_name(self):
def got_object(ro):
return self.client_conn.getRemoteObject(
self.tst_bus,
self.tst_path,
interfaces='org.txdbus.trial.IFace1',
)
def got_object2(ro2):
return ro2.callRemote('baz')
def got_reply(reply):
self.assertEquals(reply, 'iface1')
return self.proxy_chain(got_object, got_object2, got_reply)
def test_get_proxy_with_bad_interface_name(self):
def got_object(ro):
return self.client_conn.getRemoteObject(
self.tst_bus,
self.tst_path,
interfaces='org.txdbus.INVALID_INTERFACE',
)
def on_err(err):
self.assertEquals(
err.getErrorMessage(),
'Introspection failed to find interfaces: org.txdbus.'
'INVALID_INTERFACE',
)
def got(v):
print('GOT: ', v)
d = self.get_proxy()
d.addCallback(got_object)
d.addCallback(got)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
return d
def test_missing_implementation(self):
def got_object(ro):
return ro.callRemote(
'onlyOneImpl',
interface='org.txdbus.trial.IFace2',
)
def on_err(err):
self.assertEquals(
err.getErrorMessage(),
'org.txdbus.PythonException.NotImplementedError',
)
d = self.get_proxy()
d.addCallback(got_object)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
return d
def test_emit_invalid_signal(self):
def got_object(ro):
try:
self.t.emitSignal('InvalidSignalName')
self.assertTrue(False, 'Should have raised an exception')
except AttributeError as e:
self.assertEquals(
str(e),
'Signal "InvalidSignalName" not found in any supported '
'interface.',
)
return self.proxy_chain(got_object)
def test_baz(self):
def got_object(ro):
return ro.callRemote('baz')
def got_reply(reply):
self.assertEquals(reply, 'iface1')
return self.proxy_chain(got_object, got_reply)
def test_baz2(self):
def got_object(ro):
return ro.callRemote('baz', interface='org.txdbus.trial.IFace2')
def got_reply(reply):
self.assertEquals(reply, 'iface2')
return self.proxy_chain(got_object, got_reply)
def test_foo(self):
def got_object(ro):
return ro.callRemote('foo')
def got_reply(reply):
self.assertTrue(True)
return self.proxy_chain(got_object, got_reply)
def test_bad_remote_method(self):
def got_object(ro):
self.assertRaises(
AttributeError,
ro.callRemote,
'INVALID_METHOD_NAME')
return self.proxy_chain(got_object)
def test_bad_remote_method_argument_number(self):
def got_object(ro):
self.assertRaises(TypeError, ro.callRemote, 'foo', 'bar')
return self.proxy_chain(got_object)
def test_bar(self):
def got_object(ro):
return ro.callRemote('bar')
def got_reply(reply):
self.assertTrue(True)
return self.proxy_chain(got_object, got_reply)
def test_foo2(self):
def got_object(ro):
return ro.callRemote('testMethod', 'foo')
def got_reply(reply):
self.assertEquals(reply, 'foobar')
return self.proxy_chain(got_object, got_reply)
def test_muli_variant_return(self):
def got_object(ro):
return ro.callRemote('blah')
def got_reply(reply):
self.assertEquals(reply, ['foo', 'bar'])
return self.proxy_chain(got_object, got_reply)
def test_get_property_with_interface(self):
def got_object(ro):
return ro.callRemote('Get', 'org.txdbus.trial.IFace2', 'prop1')
def got_reply(reply):
self.assertEquals(reply, 'foobar')
return self.proxy_chain(got_object, got_reply)
def test_get_common_property1_with_interface(self):
def got_object(ro):
return ro.callRemote('Get', 'org.txdbus.trial.IFace1', 'common')
def got_reply(reply):
self.assertEquals(reply, 'common1')
return self.proxy_chain(got_object, got_reply)
def test_get_common_property2_with_interface(self):
def got_object(ro):
return ro.callRemote('Get', 'org.txdbus.trial.IFace2', 'common')
def got_reply(reply):
self.assertEquals(reply, 'common2')
return self.proxy_chain(got_object, got_reply)
def test_get_property_without_interface(self):
def got_object(ro):
return ro.callRemote('Get', '', 'prop1')
def got_reply(reply):
self.assertEquals(reply, 'foobar')
return self.proxy_chain(got_object, got_reply)
def test_get_integer_property(self):
def got_object(ro):
return ro.callRemote('Get', '', 'prop2')
def got_reply(reply):
self.assertEquals(reply, 5)
return self.proxy_chain(got_object, got_reply)
def test_get_invalid_property(self):
def got_object(ro):
return ro.callRemote('Get', '', 'INVALID_PROPERTY')
def got_err(err):
self.assertEquals(
err.getErrorMessage(),
'org.txdbus.PythonException.Exception: Invalid Property')
d = self.get_proxy()
d.addCallback(got_object)
d.addErrback(got_err)
return d
def test_set_invalid_property(self):
def got_object(ro):
return ro.callRemote('Set', '', 'INVALID_PROPERTY', 'Whoopsie')
def got_err(err):
self.assertEquals(
err.getErrorMessage(),
'org.txdbus.PythonException.Exception: Invalid Property')
d = self.get_proxy()
d.addCallback(got_object)
d.addErrback(got_err)
return d
def test_set_read_only_property(self):
def got_object(ro):
return ro.callRemote('Set', '', 'prop1', 'Whoopsie')
def got_err(err):
self.assertEquals(
err.getErrorMessage(),
'org.txdbus.PythonException.Exception: Property is not '
'Writeable',
)
d = self.get_proxy()
d.addCallback(got_object)
d.addErrback(got_err)
return d
def test_get_write_only_property(self):
def got_object(ro):
return ro.callRemote('Get', '', 'wronly')
def got_err(err):
self.assertEquals(
err.getErrorMessage(),
'org.txdbus.PythonException.Exception: Property is not '
'readable',
)
d = self.get_proxy()
d.addCallback(got_object)
d.addErrback(got_err)
return d
def test_set_string_property(self):
def got_object(ro):
self.assertEquals(self.t.propw, 'orig')
return ro.callRemote('Set', '', 'pwrite', 'changed')
def got_reply(reply):
self.assertEquals(self.t.propw, 'changed')
return self.proxy_chain(got_object, got_reply)
def test_get_all_properties(self):
def got_object(ro):
return ro.callRemote('GetAll', '')
def got_reply(reply):
# GetAll called with no specific interface.
# Remote object implements two interfaces with a 'common'
# property. The result may include the value of either
# interface's property, in this case:
self.assertIn(reply['common'], ('common1', 'common2'))
# The remaining properties have no possible ambiguity.
del reply['common']
self.assertEquals(reply, {
'prop1': 'foobar',
'prop2': 5,
'prop_if1': 'pif1',
'pwrite': 'orig'
})
return self.proxy_chain(got_object, got_reply)
def test_property_emit_changed(self):
dsig = defer.Deferred()
def on_signal(*args):
dsig.callback(args)
def check_results(arg):
interface_name, changed, invalidated = arg
self.assertEquals(interface_name, 'org.txdbus.trial.IFace2')
self.assertEquals(changed, {'pwrite': 'should emit'})
self.assertEquals(invalidated, [])
def on_proxy(ro):
dnot = ro.notifyOnSignal('PropertiesChanged', on_signal)
dnot.addCallback(lambda _: ro)
return dnot
def setit(ro):
ro.callRemote('Set', '', 'pwrite', 'should emit')
d = self.get_proxy()
d.addCallback(on_proxy)
d.addCallback(setit)
d.addCallback(lambda _: dsig)
d.addCallback(check_results)
return d
def test_property_delete(self):
def d():
del self.pif1
self.assertRaises(AttributeError, d)
def test_invalid_property_name(self):
class T(objects.DBusObject):
prop = DBusProperty('no_interface_property')
t = T('/foo')
def ii():
for x in t._iterIFaceCaches():
pass
self.assertRaises(AttributeError, ii)
class BusNameTest(SimpleObjectTester):
def test_get_bus_name(self):
d = self.client_conn.requestBusName('org.test.foobar')
def cb(i):
self.assertEquals(i, client.NAME_ACQUIRED)
d.addCallback(cb)
return d
def test_bad_bus_name(self):
def on_err(e):
self.assertEquals(
'org.freedesktop.DBus.Error.InvalidArgs: Cannot acquire a '
'service starting with \':\' such as ":1.234"',
str(e.value)
)
d = self.client_conn.requestBusName(':1.234')
d.addCallbacks(lambda _: self.fail('Call should have failed'),
on_err)
return d
def test_already_owner(self):
d = self.client_conn.requestBusName('org.test.foobar')
def cb(i):
self.assertEquals(i, client.NAME_ALREADY_OWNER)
d.addCallback(
lambda _: self.client_conn.requestBusName('org.test.foobar'))
d.addCallback(cb)
return d
def cli2(self):
f = client.DBusClientFactory()
point = endpoints.getDBusEnvEndpoints(reactor)[0]
point.connect(f)
d = f.getConnection()
def got_connection(c):
self.client_conn2 = c
return c
d.addCallback(got_connection)
return d
def test_name_in_use(self):
d = self.cli2()
d.addCallback(
lambda _: self.client_conn.requestBusName('org.test.foobar'))
d.addCallback(
lambda _: self.client_conn2.requestBusName('org.test.foobar'))
def on_err(e):
self.assertEquals(
'Failed to acquire bus name "org.test.foobar": Name in use',
str(e.value),
)
d.addCallbacks(lambda _: self.fail('Call should have failed'),
on_err)
def cleanup(x):
self.client_conn2.disconnect()
return x
d.addBoth(cleanup)
return d
def test_name_replacement(self):
d = self.cli2()
flags = {'sig': False}
def on_name_lost(sig):
self.assertEquals(sig.body[0], 'org.test.foobar')
flags['sig'] = True
d.addCallback(
lambda _: self.client_conn.addMatch(
on_name_lost,
member='NameLost'))
d.addCallback(lambda _: self.client_conn.requestBusName(
'org.test.foobar',
allowReplacement=True,
))
d.addCallback(lambda _: self.client_conn2.requestBusName(
'org.test.foobar',
replaceExisting=True,
))
d.addCallback(delay)
def cb(i):
self.assertTrue(flags['sig'])
self.assertEquals(i, client.NAME_ACQUIRED)
d.addCallback(cb)
def cleanup(x):
self.client_conn2.disconnect()
return x
d.addBoth(cleanup)
return d
def test_queued_name_replacement(self):
d = self.cli2()
flags = {'sig': False}
def on_name_acq(sig):
self.assertEquals(sig.body[0], 'org.test.foobar')
flags['sig'] = True
d.addCallback(
lambda _: self.client_conn2.addMatch(
on_name_acq,
member='NameAcquired'))
d.addCallback(lambda _: self.client_conn.requestBusName(
'org.test.foobar',
allowReplacement=False,
))
d.addCallback(lambda _: self.client_conn2.requestBusName(
'org.test.foobar',
replaceExisting=True,
doNotQueue=False,
errbackUnlessAcquired=False,
))
d.addCallback(lambda r: self.assertEquals(
r,
client.NAME_IN_QUEUE,
'Queue error'
))
d.addCallback(lambda _: self.client_conn.releaseBusName(
'org.test.foobar'
))
d.addCallback(lambda r: self.assertEquals(
r,
client.NAME_RELEASED,
'Failed to release name',
))
d.addCallback(delay)
d.addCallback(lambda _: self.assertTrue(
flags['sig'],
'Failed to acquire name after release',
))
def cleanup(x):
self.client_conn2.disconnect()
return x
d.addBoth(cleanup)
return d
def test_queued_name_replacement_with_errback(self):
d = self.cli2()
flags = {'sig': False}
def on_name_acq(sig):
self.assertEquals(sig.body[0], 'org.test.foobar')
flags['sig'] = True
d.addCallback(lambda _: self.client_conn2.addMatch(
on_name_acq,
member='NameAcquired',
))
d.addCallback(lambda _: self.client_conn.requestBusName(
'org.test.foobar',
allowReplacement=False,
))
d.addCallback(lambda _: self.client_conn2.requestBusName(
'org.test.foobar',
replaceExisting=True,
doNotQueue=False,
))
d.addErrback(lambda e: self.assertEquals(
e.getErrorMessage(),
'Failed to acquire bus name "org.test.foobar": Queued for name '
'acquisition'
))
d.addCallback(lambda _: self.client_conn.releaseBusName(
'org.test.foobar'
))
d.addCallback(lambda r: self.assertEquals(
r,
client.NAME_RELEASED,
'Failed to release name',
))
d.addCallback(delay)
d.addCallback(
lambda _: self.assertTrue(
flags['sig'],
'Failed to acquire name after release'))
def cleanup(x):
self.client_conn2.disconnect()
return x
d.addBoth(cleanup)
return d
def test_list_queued_owners(self):
d = self.cli2()
d.addCallback(lambda _: self.client_conn.requestBusName(
'org.test.foobar',
allowReplacement=False,
))
d.addCallback(lambda _: self.client_conn2.requestBusName(
'org.test.foobar',
replaceExisting=True,
doNotQueue=False,
errbackUnlessAcquired=False,
))
d.addCallback(lambda r: self.assertEquals(
r,
client.NAME_IN_QUEUE,
'Queue error',
))
d.addCallback(lambda _: self.client_conn2.listQueuedBusNameOwners(
'org.test.foobar',
))
d.addCallback(lambda r: self.assertEquals(
r,
[self.client_conn.busName, self.client_conn2.busName],
'Bus Name Queue differes from expected value'
))
def cleanup(x):
self.client_conn2.disconnect()
return x
d.addBoth(cleanup)
return d
def test_empty_list_queued_owners(self):
d = self.client_conn.listQueuedBusNameOwners('org.test.foobar')
def on_err(e):
self.assertEquals(
'org.freedesktop.DBus.Error.NameHasNoOwner: Could not get '
'owners of name \'org.test.foobar\': no such name',
str(e.value),
)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
return d
# GetConnectionUnixUser
def test_get_connection_user(self):
try:
import pwd # noqa: This is acceptable, since it's kind of a hack.
except ImportError:
raise SkipTest('This test is for Unix-like systems -only')
d = self.cli2()
d.addCallback(lambda _: self.client_conn.requestBusName(
'org.test.foobar'
))
d.addCallback(lambda _: self.client_conn2.getConnectionUnixUser(
'org.test.foobar',
))
d.addCallback(lambda r: self.assertEquals(
r,
os.getuid(),
'Failed to get connection user id',
))
def cleanup(x):
self.client_conn2.disconnect()
return x
d.addBoth(cleanup)
return d
def test_bad_get_connection_user1(self):
try:
import pwd # noqa: This is acceptable, since it's kind of a hack.
except ImportError:
raise SkipTest('This test is for Unix-like systems -only')
d = self.cli2()
d.addCallback(lambda _: self.client_conn.requestBusName(
'org.test.foobar',
))
d.addCallback(lambda _: self.client_conn2.getConnectionUnixUser(
'org.MISSING_BUS_NAME',
))
def on_err(e):
self.assertEquals(
'org.freedesktop.DBus.Error.NameHasNoOwner: Could not get UID '
'of name \'org.MISSING_BUS_NAME\': no such name',
str(e.value),
)
d.addCallbacks(lambda _: self.fail('Call should have failed'), on_err)
def cleanup(x):
self.client_conn2.disconnect()
return x
d.addBoth(cleanup)
return d
# Multiple UNIX_FD arguments require Twisted 17.1.0 or later.
_multiFD = twisted.version >= type(twisted.version)('twisted', 17, 1, 0)
_multiFDmsg = 'Multi FD arg support requires Twisted 17.1.0 or later.'
class UnixFDArgumentsTester(ServerObjectTester):
class TestClass(objects.DBusObject):
def dbus_readFD(self, fd):
f = os.fdopen(fd, 'rb')
result = f.read()
f.close()
return bytearray(result)
def dbus_concatReadFDs(self, fd1, fd2):
f1 = os.fdopen(fd1, 'rb')
f2 = os.fdopen(fd2, 'rb')
results = f1.read(), f2.read()
f1.close()
f2.close()
return bytearray(b''.join(results))
_dbusInterfaceArgs = [
'org.txdbus.trial.UnixFDArgumentsTester',
Method('readFD', arguments='h', returns='ay'),
]
if _multiFD:
_dbusInterfaceArgs.append(
Method('concatReadFDs', arguments='hh', returns='ay'),
)
dbusInterfaces = [DBusInterface(*_dbusInterfaceArgs)]
def _create_temp_file(self, payload):
fd, filename = tempfile.mkstemp(prefix='txdbus-test-')
self.addCleanup(os.unlink, filename)
os.write(fd, payload)
os.close(fd)
return filename
@defer.inlineCallbacks
def test_call_with_one_UNIX_FD_arg(self):
PAYLOAD = b'file contents'
filename = self._create_temp_file(PAYLOAD)
ro = yield self.get_proxy()
with open(filename, 'rb') as f:
result = yield ro.callRemote('readFD', f.fileno())
self.assertEqual(PAYLOAD, bytearray(result))
@defer.inlineCallbacks
def test_call_with_two_UNIX_FD_args(self):
PAYLOAD_1 = b'0123456789'
PAYLOAD_2 = b'abcdefghij'
filename1 = self._create_temp_file(PAYLOAD_1)
filename2 = self._create_temp_file(PAYLOAD_2)
ro = yield self.get_proxy()
with open(filename1, 'rb') as f1, open(filename2, 'rb') as f2:
result = yield ro.callRemote(
'concatReadFDs',
f1.fileno(),
f2.fileno(),
)
self.assertEqual(PAYLOAD_1 + PAYLOAD_2, bytearray(result))
if not _multiFD:
test_call_with_two_UNIX_FD_args.skip = _multiFDmsg
|
# Generated by Django 2.0 on 2018-01-19 00:56
from django.db import migrations, models
import django.db.models.deletion
import review.models
class Migration(migrations.Migration):
dependencies = [
('review', '0002_auto_20171228_2218'),
]
operations = [
migrations.CreateModel(
name='Rating',
fields=[
('rating_id', models.AutoField(primary_key=True, serialize=False)),
('label', review.models.EnumCharField([('programming', 'Programming Ability'), ('machine_learning', 'Stats/Machine Learning Ability'), ('data_handling', 'Data Handling/Manipulation Skills'), ('interest_in_good', 'Interest in Social Good'), ('communication', 'Communication Ability'), ('teamwork', 'Would this person work well in a team?'), ('overall', 'Overall Recommendation')])),
('score', models.IntegerField()),
],
options={
'db_table': 'review_rating',
},
),
migrations.AddField(
model_name='review',
name='interview_suggestions',
field=models.TextField(blank=True, help_text="In an interview with this candidate, what should we focus on? Any red flags? Anything you would ask them about that's hard to judge from the application and references?"),
),
migrations.AddField(
model_name='review',
name='would_interview',
field=models.BooleanField(default=None, help_text='Would you like to interview this applicant?'),
preserve_default=False,
),
migrations.AlterField(
model_name='application',
name='applicant',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='applications', to='review.Applicant'),
),
migrations.AlterField(
model_name='review',
name='comments',
field=models.TextField(blank=True, help_text='Any comments on your recommendation?'),
),
migrations.AddField(
model_name='rating',
name='review',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='review.Review'),
),
migrations.AlterUniqueTogether(
name='rating',
unique_together={('review', 'label')},
),
]
|
# One of the easiest ways to back up files is with an incremental backup. This method only backs up files that have changed since the last backup.
# You are given a list of changes that were made to the files in your database, where for each valid i, changes[i][0] is the timestamp of the time the change was made, and changes[i][1] is the file id.
# Knowing the timestamp of the last backup lastBackupTime, your task is to find the files which should be included in the next backup. Return the ids of the files that should be backed up as an array sorted in ascending order.
# Example
# For lastBackupTime = 461620205 and
# changes = [[461620203, 1],
# [461620204, 2],
# [461620205, 6],
# [461620206, 5],
# [461620207, 3],
# [461620207, 5],
# [461620208, 1]
# the output should be
# incrementalBackups(lastBackupTime, changes) = [1, 3, 5].
# Here's how the answer is calculated:
# File with id = 1 was changed at 461620203 and 461620208, and since the last backup was at 461620205, it should be included in the next backup.
# File with id = 2 was changed only at 461620204, so there's no need to back it up.
# File with id = 3 was changed at 461620207, so it should be backed up next time.
# File with id = 5 was changed at 461620206 and 461620207, so it should be included in the new backup as well.
# File with id = 6 was changed at 461620205, so it can be ignored.
# [input] integer lastBackupTime
# A non-negative integer, the timestamp of the previous backup.
# [input] array.array.integer changes
# Array of changes sorted lexicographically, where each change is given in the format as described above.
# It is guaranteed that for each valid i 0 ≤ changes[i][0] ≤ 109 and 0 ≤ changes[i][1] ≤ 100.
# If for some i changes[i][0] = lastBackupTime, assume that the ith file was successfully backed up during the last backup.
# [output] array.integer
# Array of ids of the changes that should be backed up next time, sorted in ascending order.
def incrementalBackups(lastBackupTime, changes):
if changes == []:
return []
sorted_list = sorted(changes, key=lambda data:data[1])
result_list = []
for data in sorted_list:
if lastBackupTime < data[0]:
if data[1] not in result_list:
result_list.append(data[1])
return result_list
|
'''
Change according to your local database set up.
'''
DATABASE_NAME = 'tpch'
DATABASE_USER = 'postgres'
DATABASE_PASSWORD = 'root'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""A usdview plugin that can auto-reload the stage whenever there are layer changes."""
# IMPORT FUTURE LIBRARIES
from __future__ import print_function
# IMPORT STANDARD LIBRARIES
import datetime
import functools
import logging
import sys
# IMPORT THIRD-PARTY LIBRARIES
from pxr import Tf
from pxr.Usdviewq import plugin
LOGGER = logging.getLogger('auto_reloader')
_HANDLER = logging.StreamHandler(sys.stdout)
_FORMATTER = logging.Formatter(
"%(asctime)s [%(levelname)s] %(module)s: %(message)s", datefmt="%m/%d/%Y %H:%M:%S"
)
_HANDLER.setFormatter(_FORMATTER)
LOGGER.addHandler(_HANDLER)
LOGGER.setLevel(logging.INFO)
WAS_INITIALIZED = False
IS_ENABLED = False
class AutoReloaderContainer(plugin.PluginContainer):
"""The main registry class that initializes and runs the Auto-Reloader plugin."""
def _toggle_reload_and_setup_reload(self, viewer):
"""Create the Auto-Reloader and set it to enabled.
If the Auto-Reloader already exists then disable it.
Args:
viewer (`pxr.Usdviewq.usdviewApi.UsdviewApi`):
The USD API object that is used to communicate with usdview.
"""
global WAS_INITIALIZED
global IS_ENABLED
try:
from PySide2 import QtCore
except ImportError:
from PySide import QtCore
IS_ENABLED = not IS_ENABLED
if WAS_INITIALIZED:
LOGGER.debug("The timer was already created. Nothing left to do here.")
return
# Add `timer` to this instance to keep it from going out of scope
self.timer = QtCore.QTimer()
self.timer.timeout.connect(functools.partial(reload_layers, viewer))
self.timer.start(500)
WAS_INITIALIZED = True
def registerPlugins(self, registry, _):
"""Add this Auto-Reloader plugin to usdview on-startup.
Args:
registry (`pxr.Usdviewq.plugin.PluginRegistry`):
The USD-provided object that this plugin will be added to.
"""
self._toggle_auto_reload_command = registry.registerCommandPlugin(
"AutoReloaderContainer.printMessage",
"Toggle Auto-Reload USD Stage",
self._toggle_reload_and_setup_reload,
)
def configureView(self, _, builder):
"""Add a new menu item for the Auto-Reload function."""
menu = builder.findOrCreateMenu("Reloader")
menu.addItem(self._toggle_auto_reload_command)
def reload_layers(viewer):
"""Reload every layer for the given viewer/Stage.
Reference:
https://groups.google.com/d/msg/usd-interest/w3-KivsOuTE/psDcH9p-AgAJ
Args:
viewer (`pxr.Usdviewq.usdviewApi.UsdviewApi`):
The USD API object that is used to communicate with usdview.
"""
if not IS_ENABLED:
LOGGER.debug("Layer auto-reload is disabled.")
return
layers = viewer.stage.GetUsedLayers()
reloaded = [layer.Reload() for layer in layers if not layer.anonymous]
if not any(reloaded):
return
LOGGER.info('Layer reloaded at "%s"', datetime.datetime.now())
for layer, reloaded in zip(layers, reloaded):
if reloaded:
LOGGER.info(' "%s"', layer.identifier)
Tf.Type.Define(AutoReloaderContainer)
|
import os
from metaappscriptsdk import MetaApp
META = MetaApp(meta_url="http://localhost:8080")
log = META.log
os.chdir(os.path.dirname(os.path.abspath(__file__)))
__DIR__ = os.getcwd() + "/"
configuration = {
"download": {
"dbQuery": {
"command": "SELECT * FROM products WHERE productname ilike '%Gula%' LIMIT 1000"
}
}
}
db = META.db("nw")
schema_data = db.schema_data(configuration)
print(u"schema_data = %s" % str(schema_data))
|
import sys
import requests
import re
# Parses all urls and returns a sorted list of all found email addresses.
def find_addresses(urls):
addresses = []
for url in urls:
response = requests.get(url)
txt = response.text
found_addresses = re.findall(r'\b[a-zA-Z0-9.-]+@[a-zA-Z0-9.-]+\b', txt)
for address in found_addresses:
if address not in addresses:
addresses.append(address)
return sorted(addresses)
if __name__ == "__main__":
urls = sys.argv[1:]
email_addresses = find_addresses(urls)
for address in email_addresses:
sys.stdout.write(address + '\n')
print "Found {0} unique email addresses!".format(len(email_addresses))
|
from abc import ABCMeta, abstractmethod
from atexit import register
from os.path import isfile
from pickle import dump, load
import googlemaps
class AddressGeocoder(metaclass=ABCMeta):
def __call__(self, string):
"""
Geocode address given as string.
"""
return self.geocode(string)
@abstractmethod
def geocode(self, string):
"""
Geocode given street address string.
"""
class GoogleMapsAddressGeocoder(AddressGeocoder):
def __init__(self, api_key, cache_path=None):
"""
Instantiate a new google maps geocoder
:param api_key: {str} geocoding approved Google API key
:param cache_path: {str} optional path to specify if caching query responses
is desired. Make sure to account for the Maps TOS before using this local
data. Note that caching is not process safe, so if parallelization
is desired, use multi-threading instead of multi-processing.
"""
self.api_key = api_key
self.cache_path = cache_path
self.cache = self.load_cache(cache_path)
if self.cache_path is not None:
register(self.save_cache)
@property
def client(self):
if not getattr(self, "_client", None):
self._client = googlemaps.Client(key=self.api_key)
return self._client
def geocode(self, string):
if string in self.cache:
return self.cache[string]
self.cache[string] = self.client.geocode(string)
return self.cache[string]
def load_cache(self, path):
"""
Loads the pickle file that contains our cache
"""
if path is None or not isfile(path):
return {}
with open(path, "rb") as file:
return load(file)
def save_cache(self):
"""
Save the cache to disk
Clients should explicitly call this method if they want to make sure the cache
is saved, since there are some catches with relying on registering application
exit functions
"""
if self.cache_path is None:
raise ValueError("No cache specified during initialization")
with open(self.cache_path, "wb") as file:
dump(self.cache, file)
|
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/ExplanationOfBenefit
Release: STU3
Version: 3.0.2
Revision: 11917
Last updated: 2019-10-24T11:53:00+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import explanationofbenefit
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class ExplanationOfBenefitTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("ExplanationOfBenefit", js["resourceType"])
return explanationofbenefit.ExplanationOfBenefit(js)
def testExplanationOfBenefit1(self):
inst = self.instantiate_from("explanationofbenefit-example.json")
self.assertIsNotNone(
inst, "Must have instantiated a ExplanationOfBenefit instance"
)
self.implExplanationOfBenefit1(inst)
js = inst.as_json()
self.assertEqual("ExplanationOfBenefit", js["resourceType"])
inst2 = explanationofbenefit.ExplanationOfBenefit(js)
self.implExplanationOfBenefit1(inst2)
def implExplanationOfBenefit1(self, inst):
self.assertEqual(inst.careTeam[0].sequence, 1)
self.assertEqual(inst.created.date, FHIRDate("2014-08-16").date)
self.assertEqual(inst.created.as_json(), "2014-08-16")
self.assertEqual(
force_bytes(inst.disposition), force_bytes("Claim settled as per contract.")
)
self.assertEqual(force_bytes(inst.id), force_bytes("EB3500"))
self.assertEqual(
force_bytes(inst.identifier[0].system),
force_bytes("http://www.BenefitsInc.com/fhir/explanationofbenefit"),
)
self.assertEqual(
force_bytes(inst.identifier[0].value), force_bytes("987654321")
)
self.assertEqual(
force_bytes(inst.item[0].adjudication[0].amount.code), force_bytes("USD")
)
self.assertEqual(
force_bytes(inst.item[0].adjudication[0].amount.system),
force_bytes("urn:iso:std:iso:4217"),
)
self.assertEqual(inst.item[0].adjudication[0].amount.value, 120.0)
self.assertEqual(
force_bytes(inst.item[0].adjudication[0].category.coding[0].code),
force_bytes("eligible"),
)
self.assertEqual(
force_bytes(inst.item[0].adjudication[1].category.coding[0].code),
force_bytes("eligpercent"),
)
self.assertEqual(inst.item[0].adjudication[1].value, 0.8)
self.assertEqual(
force_bytes(inst.item[0].adjudication[2].amount.code), force_bytes("USD")
)
self.assertEqual(
force_bytes(inst.item[0].adjudication[2].amount.system),
force_bytes("urn:iso:std:iso:4217"),
)
self.assertEqual(inst.item[0].adjudication[2].amount.value, 96.0)
self.assertEqual(
force_bytes(inst.item[0].adjudication[2].category.coding[0].code),
force_bytes("benefit"),
)
self.assertEqual(inst.item[0].careTeamLinkId[0], 1)
self.assertEqual(force_bytes(inst.item[0].net.code), force_bytes("USD"))
self.assertEqual(
force_bytes(inst.item[0].net.system), force_bytes("urn:iso:std:iso:4217")
)
self.assertEqual(inst.item[0].net.value, 135.57)
self.assertEqual(inst.item[0].sequence, 1)
self.assertEqual(
force_bytes(inst.item[0].service.coding[0].code), force_bytes("1200")
)
self.assertEqual(
force_bytes(inst.item[0].service.coding[0].system),
force_bytes("http://hl7.org/fhir/service-uscls"),
)
self.assertEqual(inst.item[0].servicedDate.date, FHIRDate("2014-08-16").date)
self.assertEqual(inst.item[0].servicedDate.as_json(), "2014-08-16")
self.assertEqual(force_bytes(inst.item[0].unitPrice.code), force_bytes("USD"))
self.assertEqual(
force_bytes(inst.item[0].unitPrice.system),
force_bytes("urn:iso:std:iso:4217"),
)
self.assertEqual(inst.item[0].unitPrice.value, 135.57)
self.assertEqual(
force_bytes(inst.outcome.coding[0].code), force_bytes("complete")
)
self.assertEqual(
force_bytes(inst.outcome.coding[0].system),
force_bytes("http://hl7.org/fhir/remittance-outcome"),
)
self.assertEqual(
force_bytes(inst.payee.type.coding[0].code), force_bytes("provider")
)
self.assertEqual(
force_bytes(inst.payee.type.coding[0].system),
force_bytes("http://hl7.org/fhir/payeetype"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("active"))
self.assertEqual(
force_bytes(inst.text.div),
force_bytes(
'<div xmlns="http://www.w3.org/1999/xhtml">A human-readable rendering of the ExplanationOfBenefit</div>'
),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.totalBenefit.code), force_bytes("USD"))
self.assertEqual(
force_bytes(inst.totalBenefit.system), force_bytes("urn:iso:std:iso:4217")
)
self.assertEqual(inst.totalBenefit.value, 96.0)
self.assertEqual(force_bytes(inst.totalCost.code), force_bytes("USD"))
self.assertEqual(
force_bytes(inst.totalCost.system), force_bytes("urn:iso:std:iso:4217")
)
self.assertEqual(inst.totalCost.value, 135.57)
self.assertEqual(force_bytes(inst.type.coding[0].code), force_bytes("oral"))
self.assertEqual(
force_bytes(inst.type.coding[0].system),
force_bytes("http://hl7.org/fhir/ex-claimtype"),
)
|
################################################################################
# MIT License
#
# Copyright (c) 2017 Jean-Charles Fosse & Johann Bigler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
################################################################################
from twisted.internet.defer import inlineCallbacks, returnValue
from base import Query
class UpdateQuery(Query):
"""
Object representing an update query
"""
def __init__(self, model_class, values):
super(UpdateQuery, self).__init__(model_class)
# Values to update
self.values = values
self.return_id = self.model_class._meta.primary_key
@inlineCallbacks
def execute(self):
query, values = self.database.generate_update(self)
# If return id. Use runQuery else use runOperation
if self.return_id:
result = yield self.database.runQuery(query, values)
if result and self.model_class._meta.primary_key:
returnValue(result[0][0])
else:
yield self.database.runOperation(query, values)
returnValue(None)
|
def fragment_1(expr, var):
retn = []
for items in expr.struct:
if var in str(items.power_list()):
cf, va = items.__extract__
for k, v in va.items():
if var in str(v):
retn.append(k)
return cf * retn[0], v if retn else None
|
#!/usr/bin/env python
# coding=utf-8
"""
Ingest data from the command-line.
"""
from __future__ import absolute_import
import os
from datetime import datetime
from pathlib import Path
from dateutil.tz import tzutc
from dateutil.relativedelta import relativedelta
import click
from sqlalchemy.exc import DBAPIError
from datacube.ui import click as ui
from datacube.ui.click import CLICK_SETTINGS
from datacube.model import Range
from datacube.ingest import index_datasets, store_datasets
from datacube.storage.storage import stack_storage_units
from datacube.storage import tile_datasets_with_storage_type
PASS_INDEX = ui.pass_index(app_name='datacube-ingest')
@click.group(help="Data Management Tool", context_settings=CLICK_SETTINGS)
@ui.global_cli_options
def cli():
pass
@cli.command('stack', help='Stack storage units')
@ui.executor_cli_options
@click.argument('types', nargs=-1)
@PASS_INDEX
def stack(index, executor, types):
if not types:
storage_types = index.storage.types.get_all()
else:
storage_types = [index.storage.types.get_by_name(name) for name in types]
tasks = []
for storage_type in storage_types:
# TODO: figure out start date - search ordered by time, get first, round down
start_date = datetime(1986, 1, 1, tzinfo=tzutc())
# TODO: figure out end date - now, round down
end_date = datetime(2016, 1, 1, tzinfo=tzutc())
tasks += list(_stack_storage_type(storage_type, start_date, end_date, index))
stacked = executor.map(_do_stack, tasks)
for (storage_units, filename), stacked in zip(tasks, (executor.result(s) for s in stacked)):
index.storage.replace(storage_units, stacked)
for storage_unit in storage_units:
os.unlink(str(storage_unit.local_path))
def _stack_storage_type(storage_type, start_date, end_date, index):
period, date_format = {
'year': (relativedelta(years=1), '%Y'),
'month': (relativedelta(months=1), '%Y%m'),
}[storage_type.aggregation_period]
# TODO: order by time will remove the need to run multiple searches
while start_date < end_date:
storage_units_by_tile_index = {}
for storage_unit in index.storage.search(type=storage_type.id, time=Range(start_date, start_date + period)):
storage_units_by_tile_index.setdefault(storage_unit.tile_index, []).append(storage_unit)
for tile_index, storage_units in storage_units_by_tile_index.items():
if len(storage_units) < 2:
continue
storage_units.sort(key=lambda su: su.coordinates['time'].begin)
filename = storage_type.generate_uri(tile_index=tile_index,
start_time=start_date.strftime(date_format),
end_time=(start_date + period).strftime(date_format))
yield (storage_units, filename)
start_date += period
def _do_stack(task):
storage_units, filename = task
return [stack_storage_units(storage_units, filename)]
@cli.command('check', help='Check database consistency')
@click.option('--check-index', is_flag=True, default=True, help="check that datasets have all possible storage unit")
@click.option('--check-storage', is_flag=True, default=True, help="check that storage units have valid filepaths")
@click.option('--check-overlaps', is_flag=True, default=False, help="check that storage units don't overlap (long)")
@click.argument('types', nargs=-1)
@PASS_INDEX
def check(index, check_index, check_storage, check_overlaps, types):
if not types:
storage_types = index.storage.types.get_all()
else:
storage_types = [index.storage.types.get_by_name(name) for name in types]
for storage_type in storage_types:
click.echo('Checking %s' % storage_type.name)
if check_overlaps:
click.echo('Overlaps. Might take REALLY long time...')
try:
overlaps = list(index.storage.get_overlaps(storage_type))
click.echo('%s overlaping storage units' % len(overlaps))
except DBAPIError:
click.echo('Failed to get overlaps! cube extension might not be loaded')
if check_index:
missing_units = 0
datasets = index.datasets.search_by_metadata(storage_type.document['match']['metadata'])
for dataset in datasets:
tiles = tile_datasets_with_storage_type([dataset], storage_type)
storage_units = index.storage.search(index.datasets.get_field('id') == dataset.id)
for storage_unit in storage_units:
tiles.pop(storage_unit.tile_index)
# if tiles:
# click.echo('%s missing units %s' % (dataset, tiles.keys()))
missing_units += len(tiles)
click.echo('%s missing storage units' % missing_units)
if check_storage:
missing_files = 0
for storage_unit in index.storage.search(type=storage_type.id):
if not storage_unit.local_path.exists():
missing_files += 1
click.echo('%s missing storage unit files' % missing_files)
@cli.command('ingest', help="Ingest datasets into the Data Cube.")
@ui.executor_cli_options
@click.option('--no-storage', is_flag=True, help="Don't create storage units")
@click.argument('datasets',
type=click.Path(exists=True, readable=True, writable=False),
nargs=-1)
@PASS_INDEX
def ingest(index, executor, datasets, no_storage):
indexed_datasets = []
for dataset_path in datasets:
indexed_datasets += index_datasets(Path(dataset_path), index=index)
if not no_storage:
store_datasets(indexed_datasets, index=index, executor=executor)
if __name__ == '__main__':
cli()
|
#!/usr/bin/python
# LICENSE UPL 1.0
#
# Copyright (c) 2020,2021 Oracle and/or its affiliates.
#
# Since: January, 2020
# Author: sanjay.singh@oracle.com, paramdeep.saini@oracle.com
from oralogger import *
from oraenv import *
from oracommon import *
from oramachine import *
import os
import sys
class OraMachine:
"""
This calss setup the compute before starting the installation.
"""
def __init__(self,oralogger,orahandler,oraenv,oracommon):
"""
This constructor of OraMachine class to setup the compute
Attributes:
oralogger (object): object of OraLogger Class.
ohandler (object): object of Handler class.
oenv (object): object of singleton OraEnv class.
ocommon(object): object of OraCommon class.
ora_env_dict(dict): Dict of env variable populated based on env variable for the setup.
file_name(string): Filename from where logging message is populated.
"""
self.ologger = oralogger
self.ohandler = orahandler
self.oenv = oraenv.get_instance()
self.ocommon = oracommon
self.ora_env_dict = oraenv.get_env_vars()
self.file_name = os.path.basename(__file__)
def setup(self):
"""
This function setup the compute before starting the installation
"""
msg="Machine setup completed sucessfully!"
self.ocommon.log_info_message(msg,self.file_name)
|
from .apply import *
from .listing import Listing
from .profile import Profile
from .profile import EditProfile
|
"""Contains tools that approximate various functions involving limits."""
from math import log, factorial
from pyutils.math import EULER_MASC
def derivative(func, x, delta_x):
"""Approximates the derivative of a function at 'x' using its limit
definition."""
dividend = func(x + delta_x) - func(x)
deriv = dividend / delta_x
return deriv
def def_integral(func, a, b, n):
"""Approximates the definite integral of a function over the interval
[a, b] via Riemann sums."""
delta_x = (b - a) / n
c = a
total = 0
for i in range(n + 1):
total += func(c) * delta_x
c += delta_x
return total
# Figure out how to pass in initial terms. (Perhaps lambda closures?)
def taylor_series(func, start):
"""Generates a function that finds the partial sum of a Taylor series
with a given number of terms.
'func' should have three parameters: the first being the center of
the series, the second being the main argument, and the third being
the number of terms to iterate over (the variable at the top of the
summation symbol). 'start' is the initial index (the variable at the
bottom of the summation symbol).
"""
def partial_sum(c, x, n):
"""Finds the partial sum of 'n' terms of this Taylor series,
which is centered at 'c' and evaluated at 'x'."""
total = 0
for i in range(start, start + n + 1):
total += func(c, x, i)
return total
return partial_sum
def li(x, n):
"""Approximates the logarithmic integral of 'x' using an infinite
series approximation discovered by Ramanujan."""
u = log(x)
result = EULER_MASC + log(u)
for i in range(1, n+1):
result += u**i / (i * factorial(i))
return result
|
__version__ = "0.0.1"
from ._writer import write_single_image, write_multiple
from ._widget import zarr_writer_widget
|
from modulos.passwordgeneretor import PasswordGenerator as Pass_Generetor
ask_user = input("""
Opção MENU:
a. fraca
b. media
c. forte
d. insana
>> """)
Pass_Generetor = Pass_Generetor(ask_user, input("Numero: "))
print(Pass_Generetor.pass_all())
|
from sqlalchemy import Column, Integer, String
from Agent import Base, Session
class APIVersion(Base):
"""description of class"""
__tablename__ = 'agent_apiversions'
Id = Column(Integer, primary_key = True)
DeploymentName = Column(String)
ProductName = Column(String)
VersionName = Column(String)
RealTimePredictAPI = Column(String)
TrainModelAPI = Column(String)
BatchInferenceAPI = Column(String)
DeployModelAPI = Column(String)
AuthenticationType = Column(String)
CreatedTime = Column(String)
LastUpdatedTime = Column(String)
VersionSourceType = Column(String)
ProjectFileUrl = Column(String)
AMLWorkspaceId = Column(Integer)
AuthenticationKeySecretName = Column(String)
PublisherId = Column(String)
ConfigFile = Column(String)
@staticmethod
def Get(productName, deploymentName, versionName, publisherId):
session = Session()
version = session.query(APIVersion).filter_by(ProductName = productName, DeploymentName = deploymentName, VersionName = versionName, PublisherId = publisherId).first()
session.close()
return version
@staticmethod
def MergeWithDelete(apiVersions, publisherId):
session = Session()
try:
dbAPIVersions = session.query(APIVersion).all()
for dbAPIVersion in dbAPIVersions:
if dbAPIVersion.PublisherId.lower() != publisherId.lower():
continue;
# If the subscription is removed in the control plane, remove it from the agent
try:
next(item for item in apiVersions if
item["DeploymentName"] == dbAPIVersion.DeploymentName
and item["ProductName"] == dbAPIVersion.ProductName
and item["VersionName"] == dbAPIVersion.VersionName
and item["PublisherId"].lower() == dbAPIVersion.PublisherId.lower())
except StopIteration:
session.delete(dbAPIVersion)
for apiVersion in apiVersions:
dbAPIVersion = session.query(APIVersion).filter_by(ProductName = apiVersion["ProductName"],
DeploymentName = apiVersion["DeploymentName"],
VersionName = apiVersion["VersionName"],
PublisherId = apiVersion["PublisherId"]).first()
if dbAPIVersion:
dbAPIVersion.LastUpdatedTime = apiVersion["LastUpdatedTime"]
dbAPIVersion.ConfigFile = apiVersion["ConfigFile"]
else:
dbAPIVersion = APIVersion(**apiVersion)
session.add(dbAPIVersion)
session.commit()
except Exception as e:
session.rollback()
raise
finally:
session.close()
|
def test_checkout(app):
app.cart.add_and_remove_items(3)
|
"""This is the Bokeh charts interface. It gives you a high level API to build
complex plot is a simple way.
This is the Bar class which lets you build your Bar charts just passing
the arguments to the Chart class and calling the proper functions.
It also add a new chained stacked method.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2014, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import, print_function, division
from .._builder import Builder, create_and_build
from ...models import FactorRange, Range1d
from ..glyphs import BarGlyph
from ...properties import Float, Enum
from .._properties import Dimension
from .._attributes import ColorAttr, GroupAttr
from ..operations import Stack, Dodge
from ...enums import Aggregation
from ..stats import stats
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def Bar(data, label=None, values=None, color=None, stack=None, group=None, agg="sum", xscale="categorical", yscale="linear",
xgrid=False, ygrid=True, continuous_range=None, **kw):
""" Create a Bar chart using :class:`BarBuilder <bokeh.charts.builder.bar_builder.BarBuilder>`
render the geometry from values, cat and stacked.
Args:
values (iterable): iterable 2d representing the data series
values matrix.
cat (list or bool, optional): list of string representing the categories.
(Defaults to None)
stacked (bool, optional): to see the bars stacked or grouped.
(Defaults to False, so grouping is assumed)
continuous_range(Range1d, optional): Custom continuous_range to be
used. (Defaults to None)
In addition the the parameters specific to this chart,
:ref:`userguide_charts_generic_arguments` are also accepted as keyword parameters.
Returns:
a new :class:`Chart <bokeh.charts.Chart>`
Examples:
.. bokeh-plot::
:source-position: above
from collections import OrderedDict
from bokeh.charts import Bar, output_file, show
# (dict, OrderedDict, lists, arrays and DataFrames are valid inputs)
xyvalues = OrderedDict()
xyvalues['python']=[-2, 5]
xyvalues['pypy']=[12, 40]
xyvalues['jython']=[22, 30]
cat = ['1st', '2nd']
bar = Bar(xyvalues, cat, title="Stacked bars",
xlabel="category", ylabel="language")
output_file("stacked_bar.html")
show(bar)
"""
if continuous_range and not isinstance(continuous_range, Range1d):
raise ValueError(
"continuous_range must be an instance of bokeh.models.ranges.Range1d"
)
# The continuous_range is the y_range (until we implement HBar charts)
y_range = continuous_range
kw['label'] = label
kw['values'] = values
kw['color'] = color
kw['stack'] = stack
kw['group'] = group
kw['agg'] = agg
kw['xscale'] = xscale
kw['yscale'] = yscale
kw['xgrid'] = xgrid
kw['ygrid'] = ygrid
kw['y_range'] = y_range
return create_and_build(BarBuilder, data, **kw)
class BarBuilder(Builder):
"""This is the Bar class and it is in charge of plotting
Bar chart (grouped and stacked) in an easy and intuitive way.
Essentially, it provides a way to ingest the data, make the proper
calculations and push the references into a source object.
We additionally make calculations for the ranges.
And finally add the needed glyphs (rects) taking the references
from the source.
The x_range is categorical, and is made either from the cat argument
or from the indexes of the passed values if no cat is supplied. The
y_range can be supplied as the parameter continuous_range,
or will be calculated as a linear range (Range1d) based on the supplied
values using the following rules:
* with all positive data: start = 0, end = 1.1 * max
* with all negative data: start = 1.1 * min, end = 0
* with mixed sign data: start = 1.1 * min, end = 1.1 * max
"""
# ToDo: add label back as a discrete dimension
values = Dimension('values')
dimensions = ['values']
#req_dimensions = [['values']]
default_attributes = {'label': GroupAttr(),
'color': ColorAttr(),
'stack': GroupAttr(),
'group': GroupAttr()}
agg = Enum(Aggregation, default='sum')
max_height = Float(1.0)
min_height = Float(0.0)
bar_width = Float(default=0.8)
fill_alpha = Float(default=0.8)
glyph = BarGlyph
label_attributes = ['stack', 'group']
def _setup(self):
if self.attributes['color'].columns is None:
if self.attributes['stack'].columns is not None:
self.attributes['color'].set_columns(self.attributes['stack'].columns)
if self.attributes['group'].columns is not None:
self.attributes['color'].set_columns(self.attributes['group'].columns)
# ToDo: perform aggregation validation
# Not given values kw, so using only categorical data
if self.values.dtype.name == 'object' and len(self.attribute_columns) == 0:
# agg must be count
self.agg = 'count'
self.attributes['label'].set_columns(self.values.selection)
else:
pass
if self.xlabel is None:
if self.attributes['label'].columns is not None:
self.xlabel = str(', '.join(self.attributes['label'].columns).title()).title()
elif self.values.selection is not None:
selected_value_cols = self.values.selection
if not isinstance(selected_value_cols, list):
selected_value_cols = [selected_value_cols]
self.xlabel = str(', '.join(selected_value_cols).title()).title()
if self.ylabel is None:
if not self.values.computed:
self.ylabel = '%s( %s )' % (self.agg.title(), str(self.values.selection).title())
else:
self.ylabel = '%s( %s )' % (self.agg.title(), ', '.join(self.attributes['label'].columns).title())
def _set_ranges(self):
"""Push the Bar data into the ColumnDataSource and calculate
the proper ranges.
"""
x_items = self.attributes['label']._items
x_labels = []
# Items are identified by tuples. If the tuple has a single value, we unpack it
for item in x_items:
item = self.get_label(item)
x_labels.append(str(item))
self.x_range = FactorRange(factors=x_labels)
y_shift = 0.1 * ((self.max_height + self.max_height) / 2)
if self.glyph == BarGlyph:
start = 0.0
else:
start = self.min_height - y_shift
self.y_range = Range1d(start=start, end=self.max_height + y_shift)
def get_extra_args(self):
return {}
def _yield_renderers(self):
"""Use the rect glyphs to display the bars.
Takes reference points from data loaded at the ColumnDataSource.
"""
kwargs = self.get_extra_args()
for group in self._data.groupby(**self.attributes):
bg = self.glyph(label=self.get_label(group['label']),
values=group.data[self.values.selection].values,
agg=stats[self.agg](),
width=self.bar_width,
color=group['color'],
fill_alpha=self.fill_alpha,
stack_label=self.get_label(group['stack']),
dodge_label=self.get_label(group['group']),
**kwargs)
self.add_glyph(group, bg)
Stack().apply(self.comp_glyphs)
Dodge().apply(self.comp_glyphs)
# a higher level function of bar chart is to keep track of max height of all bars
self.max_height = max([renderer.y_max for renderer in self.comp_glyphs])
self.min_height = min([renderer.y_min for renderer in self.comp_glyphs])
for renderer in self.comp_glyphs:
for sub_renderer in renderer.renderers:
yield sub_renderer
|
from __future__ import absolute_import
import numpy as np
from sklearn.metrics import roc_auc_score
from sklearn.metrics import average_precision_score
def auc(heatmap, onehot_im, is_im=True):
if is_im:
auc_score = roc_auc_score(np.reshape(onehot_im,onehot_im.size), np.reshape(heatmap,heatmap.size))
else:
auc_score = roc_auc_score(onehot_im, heatmap)
return auc_score
def ap(label, pred):
return average_precision_score(label, pred)
def argmax_pts(heatmap):
idx = np.unravel_index(heatmap.argmax(), heatmap.shape)
pred_y, pred_x = map(float,idx)
return pred_x, pred_y
def L2_dist(p1, p2):
return np.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)
|
import sys
from pyroll.core import RollPass
@RollPass.hookspec
def wusatowski_temperature_coefficient(roll_pass):
"""Gets the Wusatowski spreading model temperature coefficient a."""
@RollPass.hookspec
def wusatowski_velocity_coefficient(roll_pass):
"""Gets the Wusatowski spreading model velocity coefficient c."""
@RollPass.hookspec
def wusatowski_material_coefficient(roll_pass):
"""Gets the Wusatowski spreading model material coefficient d."""
@RollPass.hookspec
def wusatowski_friction_coefficient(roll_pass):
"""Gets the Wusatowski spreading model friction coefficient f."""
@RollPass.hookspec
def wusatowski_exponent(roll_pass):
"""Gets the Wusatowski spreading model exponent w."""
RollPass.plugin_manager.add_hookspecs(sys.modules[__name__])
|
import pytest
def test_agent_interface_has_bindings():
"""
Make sure that all methods that we want to expose are actually callable.
"""
import jpscore
from jpscore.geometry import Coordinate, LengthUnit, Units
agent = jpscore.Agent(
Coordinate(
LengthUnit(94.45, Units.m),
LengthUnit(90941, Units.m),
)
)
assert hasattr(agent, "id")
assert hasattr(agent, "pos")
|
import os
import signal
from shakenfist.config import config
from shakenfist.daemons import daemon
from shakenfist import logutil
from shakenfist.util import libvirt as util_libvirt
from shakenfist.util import process as util_process
LOG, _ = logutil.setup(__name__)
class Monitor(daemon.Daemon):
def run(self):
LOG.info('Starting')
libvirt = util_libvirt.get_libvirt()
conn = libvirt.open('qemu:///system')
present_cpus, _, _ = conn.getCPUMap()
os.makedirs('/var/run/sf', exist_ok=True)
util_process.execute(None, (config.API_COMMAND_LINE
% {
'port': config.API_PORT,
'timeout': config.API_TIMEOUT,
'name': daemon.process_name('api'),
'workers': present_cpus * 4
}),
env_variables=os.environ,
check_exit_code=[0, 1, -15])
def exit_gracefully(self, sig, _frame):
if sig == signal.SIGTERM:
self.running = False
if os.path.exists('/var/run/sf/gunicorn.pid'):
with open('/var/run/sf/gunicorn.pid') as f:
pid = int(f.read())
os.kill(pid, signal.SIGTERM)
self.log.info(
'Caught SIGTERM, requested shutdown of gunicorn pid %d' % pid)
else:
self.log.info('No recorded gunicorn pid, could not terminate')
|
# Edit the version number here and only here
__version__ = "1.161"
|
from django.urls import path
from .views import SearchView, index
urlpatterns = [
path('', index),
path('search/', SearchView.as_view(), name="search"),
]
|
"""
Read text files using numpy genfromtxt.
Da Pan, 02112016
"""
import numpy as np
def read(filename, hlines, delimiter='\t'):
"""Read text files and return headlines and data
:param filename: str
:param hlines: number of headlines
:param delimiter: str
:param fTarget: if set, data will be save to fTarget
:return hstr: list of str
:return narray: 1d or 2d data
"""
# Open file to read headlines
f = open(filename)
hstr = []
for i in range(hlines):
hstr.append(f.readline())
f.close()
# Use numpy genfromtxt to read data
data = np.genfromtxt(filename, delimiter=delimiter, skip_header=hlines)
return hstr, data
def main():
hstr, data = read(r'D:\Biochar_NH3_data\NH3_3.lvm', 22)
if __name__ == '__main__':
main()
|
from light_habit import Habit
client = Habit()
client.welcome()
while True:
client.is_it_ended()
client.user_input()
|
import datetime
import logging
import pandas as pd
from sqlalchemy import create_engine
import urllib
import numpy as np
from scipy.optimize import curve_fit
import sqlalchemy
import azure.functions as func
def main(mytimer: func.TimerRequest) -> None:
utc_timestamp = datetime.datetime.utcnow().replace(
tzinfo=datetime.timezone.utc).isoformat()
if mytimer.past_due:
logging.info('The timer is past due!')
params = urllib.parse.quote_plus(
r'Driver={ODBC Driver 17 for SQL Server};Server=tcp:covid19dbserver.database.windows.net,1433;Database=covid19db;Uid=serveradmin@covid19dbserver;Pwd=pzaGuPujnkUnDqZFbWt5;Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;')
conn_str = 'mssql+pyodbc:///?odbc_connect={}'.format(params)
engine = create_engine(conn_str, echo=False)
key_cols_candidates = ['Country/Region',
'Province/State', 'District', 'federalstate', 'date']
training_period = 21
forecasting_days = 3
forecast_col = 'infections'
for table_name in ['Hopkins', 'ECDC', 'HopkinsTS', 'RKI']:
logging.info(f"Processing table {table_name}...")
# for table_name in ['RKI']:
df = pd.read_sql_table(table_name, engine)
df = df.drop(['Province/State', 'District', 'FIPS', 'Lat',
'Long', 'deaths', 'recovered', 'ID'], axis=1, errors='ignore')
string_cols = df.dtypes[df.dtypes == 'object'].index
string_col_replacement = {key: "None" for key in string_cols}
df = df.fillna(string_col_replacement)
key_cols = [col for col in key_cols_candidates if col in df.columns]
df = df.groupby(by=key_cols).sum().reset_index()
for col in string_cols:
if col in df.columns:
df.loc[df[col].str.contains("None"), col] = pd.NA
country_col = 'Country/Region'
if table_name == 'RKI':
country_col = 'federalstate'
for country in df[country_col].unique():
logging.debug(f"Computing forecasts for country {country}")
df_country = df[df[country_col] == country]
y = df_country.sort_values(
by='date')[forecast_col].values[-training_period:]
if (y < 10).all():
# All values < 10. No good forecast possible
continue
if len(y) < training_period:
# Did not find a lot of datapoints
continue
x = range(len(y))
x_forecast = range(len(y), len(y)+forecasting_days)
try:
(a_scipy, b_scipy), _ = curve_fit(
lambda t, a, b: a*np.exp(b*t), x, y)
except Exception as e:
logging.info(table_name, country, y)
logging.error(e)
continue
def exp_scipy(x): return a_scipy * np.exp(b_scipy*x)
y_forecast = exp_scipy(x_forecast)
df_result = pd.DataFrame()
df_result['forecast_infections'] = y_forecast
one_day_delta = pd.Timedelta(value=1, unit='d')
df_result['date'] = pd.date_range(
start=df_country.date.max()+one_day_delta, periods=forecasting_days, freq='d')
df_result['forecast_infections'] = y_forecast
df_result[country_col] = country
today = datetime.datetime.now()
df_result['forecasting_date'] = today
dtype_dict = {}
for str_col in string_cols:
if (str_col in df_result.columns and df_result[str_col].notnull().sum() > 0):
# print(col)
df_result.loc[df_result[str_col].notnull(
), str_col] = df_result.loc[df_result[str_col].notnull(), str_col].str.slice(start=0, stop=99)
dtype_dict[str_col] = sqlalchemy.types.NVARCHAR(length=100)
logging.debug("Computed forecasts.")
logging.debug("Writing forecast to database...")
df_result.to_sql(f"{table_name}_forecast", engine,
if_exists='append', index=False, dtype=dtype_dict)
logging.debug("Wrote forecast to database.")
# TODO: Write merge statement to update into f{table_name}_forecast or just run once a day
logging.info('Python timer trigger function ran at %s', utc_timestamp)
|
"""
Canny Edge Detection is a popular edge detection algorithm.\
It was developed by John F. Canny in 1986.
The Canny edge detection algorithm is composed of 5 steps:
1. Noise Reduction
2. Gradient Calculation
3. Non-maximum Supperession
4. Double Threshold
5. Edge Tracking by Hysteresis
"""
import cv2
import numpy as np
import matplotlib.pyplot as plt
import keyboard
def nothing(nothing):
"""Pass nothing for trackbars."""
pass
img = cv2.imread('Medias/messi5.jpg', 0)
lap = cv2.Laplacian(img, cv2.CV_64F, ksize=3)
lap = np.uint8(np.absolute(lap))
sobelX = cv2.Sobel(img, cv2.CV_64F, 1, 0)
sobelX = np.uint8(np.absolute(sobelX))
sobelY = cv2.Sobel(img, cv2.CV_64F, 0, 1)
sobelY = np.uint8(np.absolute(sobelY))
sobelCombined = cv2.bitwise_or(sobelX, sobelY)
"""Creating a trackbar for setting thresholds"""
cv2.namedWindow('Trackbars')
cv2.createTrackbar('Threshold1', 'Trackbars', 100, 255, nothing)
cv2.createTrackbar('Threshold2', 'Trackbars', 200, 255, nothing)
trackbars = np.zeros((1, 512, 3), np.uint8)
cv2.imshow('Trackbars', trackbars)
titles = ['Image', 'Laplacian', 'Sobel X',
'Sobel Y', 'Sobel Combined', 'Canny']
while(1):
canny = cv2.Canny(img, cv2.getTrackbarPos('Threshold1', 'Trackbars'),
cv2.getTrackbarPos('Threshold2', 'Trackbars'))
images = [img, lap, sobelX, sobelY, sobelCombined, canny]
for i in range(len(images)):
plt.subplot(2, 3, i+1)
plt.imshow(images[i], "gray")
plt.title(titles[i])
plt.xticks([])
plt.yticks([])
plt.show(block=False)
k = plt.waitforbuttonpress(1)
if keyboard.is_pressed('esc'):
plt.close()
break
|
import os
import pytest
@pytest.fixture
def fake_bundle_file():
cwd = os.path.dirname(os.path.abspath(__file__))
return os.path.join(cwd, 'test_bundle.json')
@pytest.fixture
def ccoe_bundle():
cwd = os.path.dirname(os.path.abspath(__file__))
return os.path.join(cwd, 'ccoe_investigator_demo.json')
@pytest.fixture
def fake_csv_file():
cwd = os.path.dirname(os.path.abspath(__file__))
return os.path.join(cwd, 'test_procs.csv')
|
from sqlalchemy.sql import func
from datetime import datetime
from flask_sqlalchemy import SQLAlchemy
from flask.app import Flask
import appsettings
#app = Flask(__name__)
#app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///tasks.db'
#db = SQLAlchemy(app)
app = Flask(__name__)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///forum.db'
app.config['UPLOAD_FOLDER'] = appsettings.UPLOAD_FOLDER
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
app.secret_key = appsettings.secret_key
db = SQLAlchemy(app)
from routes import register, login, index, home, topic, thread, profile, process, search, reply, view_thread, process_image, get_upload
|
from django import forms
from Photos.models.photos_model import Photos
class PhotosForm(forms.ModelForm):
class Meta:
model = Photos
fields = ['original_photo', 'name', 'category', 'price']
|
# Quebrando um número
import math
num = float (input("Digite um valor qualquer: "))
print("Sua parte inteira é {}".format(math.trunc(num)))
|
cities = [
'Advance',
'Akron',
'Alamo',
'Albany',
'Albion',
'Alexandria',
'Ambia',
'Amboy',
'Amo',
'Anderson',
'Andrews',
'Angola',
'Arcadia',
'Arcola',
'Argos',
'Arlington',
'Ashley',
'Athens',
'Atlanta',
'Attica',
'Atwood',
'Auburn',
'Aurora',
'Austin',
'Avilla',
'Avoca',
'Avon',
'Bainbridge',
'Bargersville',
'Batesville',
'Bath',
'Battle Ground',
'Bedford',
'Beech Grove',
'Bellmore',
'Bennington',
'Bentonville',
'Berne',
'Bethlehem',
'Beverly Shores',
'Bicknell',
'Bippus',
'Birdseye',
'Blanford',
'Bloomfield',
'Bloomingdale',
'Bloomington',
'Bluffton',
'Boggstown',
'Boone Grove',
'Boonville',
'Borden',
'Boston',
'Boswell',
'Bourbon',
'Bowling Green',
'Bradford',
'Branchville',
'Brazil',
'Bremen',
'Bridgeton',
'Brimfield',
'Bringhurst',
'Bristol',
'Bristow',
'Brook',
'Brooklyn',
'Brookston',
'Brookville',
'Brownsburg',
'Brownstown',
'Brownsville',
'Bruceville',
'Bryant',
'Buck Creek',
'Buckskin',
'Buffalo',
'Bunker Hill',
'Burket',
'Burlington',
'Burnettsville',
'Burrows',
'Butler',
'Butlerville',
'Cambridge City',
'Camby',
'Camden',
'Campbellsburg',
'Canaan',
'Cannelburg',
'Cannelton',
'Carbon',
'Carlisle',
'Carmel',
'Cartersburg',
'Carthage',
'Cayuga',
'Cedar Grove',
'Cedar Lake',
'Celestine',
'Centerpoint',
'Centerville',
'Central',
'Chalmers',
'Chandler',
'Charlestown',
'Charlottesville',
'Chesterton',
'Chrisney',
'Churubusco',
'Cicero',
'Clarks Hill',
'Clarksburg',
'Clarksville',
'Clay City',
'Claypool',
'Clayton',
'Clear Creek',
'Clifford',
'Clinton',
'Cloverdale',
'Coal City',
'Coalmont',
'Coatesville',
'Colfax',
'Columbia City',
'Columbus',
'Commiskey',
'Connersville',
'Converse',
'Cortland',
'Corunna',
'Cory',
'Corydon',
'Covington',
'Craigville',
'Crandall',
'Crane',
'Crawfordsville',
'Cromwell',
'Cross Plains',
'Crothersville',
'Crown Point',
'Culver',
'Cutler',
'Cynthiana',
'Dale',
'Daleville',
'Dana',
'Danville',
'Darlington',
'Dayton',
'Decatur',
'Decker',
'Deedsville',
'Delong',
'Delphi',
'Demotte',
'Denver',
'Depauw',
'Deputy',
'Derby',
'Dillsboro',
'Donaldson',
'Dublin',
'Dubois',
'Dugger',
'Dunkirk',
'Dunreith',
'Dupont',
'Dyer',
'Earl Park',
'East Chicago',
'East Enterprise',
'Eaton',
'Eckerty',
'Economy',
'Edinburgh',
'Edwardsport',
'Elberfeld',
'Elizabeth',
'Elizabethtown',
'Elkhart',
'Ellettsville',
'Elnora',
'Elwood',
'Eminence',
'English',
'Etna Green',
'Evanston',
'Evansville',
'Fair Oaks',
'Fairbanks',
'Fairland',
'Fairmount',
'Falmouth',
'Farmersburg',
'Farmland',
'Ferdinand',
'Fillmore',
'Finly',
'Fishers',
'Flat Rock',
'Flora',
'Florence',
'Floyds Knobs',
'Folsomville',
'Fontanet',
'Forest',
'Fort Branch',
'Fort Ritner',
'Fort Wayne',
'Fortville',
'Fountain City',
'Fountaintown',
'Fowler',
'Fowlerton',
'Francesville',
'Francisco',
'Frankfort',
'Franklin',
'Frankton',
'Fredericksburg',
'Freedom',
'Freelandville',
'Freetown',
'Fremont',
'French Lick',
'Friendship',
'Fulda',
'Fulton',
'Galveston',
'Garrett',
'Gary',
'Gas City',
'Gaston',
'Geneva',
'Gentryville',
'Georgetown',
'Glenwood',
'Goldsmith',
'Goodland',
'Goshen',
'Gosport',
'Grabill',
'Grammer',
'Grandview',
'Granger',
'Grantsburg',
'Grass Creek',
'Graysville',
'Greencastle',
'Greenfield',
'Greens Fork',
'Greensboro',
'Greensburg',
'Greentown',
'Greenville',
'Greenwood',
'Griffin',
'Griffith',
'Grissom AFB',
'Grovertown',
'Guilford',
'Gwynneville',
'Hagerstown',
'Hamblen',
'Hamilton',
'Hamlet',
'Hammond',
'Hanna',
'Hanover',
'Hardinsburg',
'Harlan',
'Harmony',
'Harrison',
'Harrodsburg',
'Hartford City',
'Hartsville',
'Hatfield',
'Haubstadt',
'Hayden',
'Hazleton',
'Hebron',
'Helmsburg',
'Helt',
'Heltonville',
'Hemlock',
'Henryville',
'Highland',
'Hillisburg',
'Hillsboro',
'Hillsdale',
'Hoagland',
'Hobart',
'Hobbs',
'Holland',
'Holton',
'Homer',
'Hope',
'Howe',
'Hudson',
'Huntertown',
'Huntingburg',
'Huntington',
'Huron',
'Hymera',
'Idaville',
'Indianapolis',
'Ingalls',
'Inglefield',
'Ireland',
'Jackson',
'Jamestown',
'Jasonville',
'Jasper',
'Jefferson',
'Jeffersonville',
'Jonesboro',
'Jonesville',
'Judson',
'Kempton',
'Kendallville',
'Kennard',
'Kentland',
'Kewanna',
'Keystone',
'Kimmell',
'Kingman',
'Kingsbury',
'Kingsford Heights',
'Kirklin',
'Knightstown',
'Knightsville',
'Knox',
'Kokomo',
'Koleen',
'Kouts',
'Kurtz',
'La Crosse',
'La Fontaine',
'La Porte',
'Laconia',
'Ladoga',
'Lafayette',
'Lagrange',
'Lagro',
'Lake Cicott',
'Lake Station',
'Lake Village',
'Laketon',
'Lakeville',
'Lamar',
'Lanesville',
'Laotto',
'Lapaz',
'Lapel',
'Larwill',
'Laurel',
'Lawrenceburg',
'Leavenworth',
'Lebanon',
'Leesburg',
'Leiters Ford',
'Leo',
'Leopold',
'Leroy',
'Lewis',
'Lewisville',
'Lexington',
'Liberty',
'Liberty Center',
'Liberty Mills',
'Ligonier',
'Lincoln City',
'Linden',
'Linn Grove',
'Linton',
'Little York',
'Lizton',
'Logansport',
'Loogootee',
'Losantville',
'Lovett',
'Lowell',
'Lucerne',
'Lynn',
'Lynnville',
'Lyons',
'Mackey',
'Macy',
'Madison',
'Manilla',
'Marengo',
'Mariah Hill',
'Marion',
'Markle',
'Markleville',
'Marshall',
'Martinsville',
'Marysville',
'Matthews',
'Mauckport',
'Maxwell',
'Mays',
'McCordsville',
'Mecca',
'Medaryville',
'Medora',
'Mellott',
'Memphis',
'Mentone',
'Merom',
'Merrillville',
'Metamora',
'Mexico',
'Miami',
'Michigan City',
'Michigantown',
'Middlebury',
'Middletown',
'Midland',
'Milan',
'Milford',
'Mill Creek',
'Millersburg',
'Millhousen',
'Milltown',
'Milroy',
'Milton',
'Mishawaka',
'Mitchell',
'Modoc',
'Mongo',
'Monon',
'Monroe',
'Monroe City',
'Monroeville',
'Monrovia',
'Monterey',
'Montezuma',
'Montgomery',
'Monticello',
'Montmorenci',
'Montpelier',
'Mooreland',
'Moores Hill',
'Mooresville',
'Morgantown',
'Morocco',
'Morris',
'Morristown',
'Mount Ayr',
'Mount Saint Francis',
'Mount Summit',
'Mount Vernon',
'Mulberry',
'Muncie',
'Munster',
'Nabb',
'Napoleon',
'Nappanee',
'Nashville',
'Nebraska',
'Needham',
'New Albany',
'New Carlisle',
'New Castle',
'New Goshen',
'New Harmony',
'New Haven',
'New Lebanon',
'New Lisbon',
'New Market',
'New Middletown',
'New Palestine',
'New Paris',
'New Point',
'New Richmond',
'New Ross',
'New Salisbury',
'New Trenton',
'New Washington',
'New Waverly',
'Newberry',
'Newburgh',
'Newport',
'Newtown',
'Nineveh',
'Noble',
'Noblesville',
'Norman',
'North Judson',
'North Liberty',
'North Manchester',
'North Salem',
'North Vernon',
'North Webster',
'Notre Dame',
'Oakford',
'Oakland City',
'Oaktown',
'Oakville',
'Odon',
'Oldenburg',
'Onward',
'Oolitic',
'Ora',
'Orestes',
'Orland',
'Orleans',
'Osceola',
'Osgood',
'Ossian',
'Otisco',
'Otterbein',
'Otwell',
'Owensburg',
'Owensville',
'Oxford',
'Palmyra',
'Paoli',
'Paragon',
'Paris Crossing',
'Parker City',
'Patoka',
'Patricksburg',
'Patriot',
'Paxton',
'Pekin',
'Pendleton',
'Pennville',
'Perrysville',
'Pershing',
'Peru',
'Petersburg',
'Petroleum',
'Pierceton',
'Pierceville',
'Pimento',
'Pine Village',
'Pittsboro',
'Plainfield',
'Plainville',
'Pleasant Lake',
'Pleasant Mills',
'Plymouth',
'Poland',
'Poneto',
'Portage',
'Porter',
'Portland',
'Poseyville',
'Prairie Creek',
'Prairieton',
'Preble',
'Princeton',
'Putnamville',
'Quincy',
'Ragsdale',
'Ramsey',
'Redkey',
'Reelsville',
'Remington',
'Rensselaer',
'Reynolds',
'Richland',
'Richmond',
'Ridgeville',
'Riley',
'Rising Sun',
'Roachdale',
'Roann',
'Roanoke',
'Rochester',
'Rockfield',
'Rockport',
'Rockville',
'Rolling Prairie',
'Rome',
'Rome City',
'Romney',
'Rosedale',
'Roselawn',
'Rossville',
'Royal Center',
'Rushville',
'Russellville',
'Russiaville',
'Saint Anthony',
'Saint Bernice',
'Saint Croix',
'Saint Joe',
'Saint John',
'Saint Mary Of The Woods',
'Saint Meinrad',
'Saint Paul',
'Salamonia',
'Salem',
'San Pierre',
'Sandborn',
'Santa Claus',
'Saratoga',
'Schererville',
'Schneider',
'Schnellville',
'Scipio',
'Scotland',
'Scott',
'Scottsburg',
'Sedalia',
'Seelyville',
'Sellersburg',
'Selma',
'Servia',
'Seymour',
'Sharpsville',
'Shelburn',
'Shelby',
'Shelbyville',
'Shepardsville',
'Sheridan',
'Shipshewana',
'Shirley',
'Shoals',
'Sidney',
'Silver Lake',
'Smithville',
'Solsberry',
'Somerset',
'Somerville',
'South Bend',
'South Milford',
'South Whitley',
'Spencer',
'Spencerville',
'Spiceland',
'Springport',
'Springville',
'Spurgeon',
'Stanford',
'Star City',
'State Line',
'Staunton',
'Stendal',
'Stilesville',
'Stinesville',
'Stockwell',
'Straughn',
'Stroh',
'Sullivan',
'Sulphur',
'Sulphur Springs',
'Sumava Resorts',
'Summitville',
'Sunman',
'Swayzee',
'Sweetser',
'Switz City',
'Syracuse',
'Talbot',
'Taswell',
'Taylorsville',
'Tefft',
'Tell City',
'Templeton',
'Tennyson',
'Terre Haute',
'Thayer',
'Thorntown',
'Tippecanoe',
'Tipton',
'Topeka',
'Trafalgar',
'Troy',
'Tunnelton',
'Twelve Mile',
'Tyner',
'Underwood',
'Union City',
'Union Mills',
'Uniondale',
'Unionville',
'Universal',
'Upland',
'Urbana',
'Vallonia',
'Valparaiso',
'Van Buren',
'Veedersburg',
'Velpen',
'Vernon',
'Versailles',
'Vevay',
'Vincennes',
'Wabash',
'Wadesville',
'Wakarusa',
'Waldron',
'Walkerton',
'Wallace',
'Walton',
'Wanatah',
'Warren',
'Warsaw',
'Washington',
'Waterloo',
'Waveland',
'Wawaka',
'Waynetown',
'Webster',
'West Baden Springs',
'West College Corner',
'West Harrison',
'West Lafayette',
'West Lebanon',
'West Middleton',
'West Newton',
'West Terre Haute',
'Westfield',
'Westphalia',
'Westpoint',
'Westport',
'Westville',
'Wheatfield',
'Wheatland',
'Wheeler',
'Whiteland',
'Whitestown',
'Whiting',
'Wilkinson',
'Williams',
'Williamsburg',
'Williamsport',
'Winamac',
'Winchester',
'Windfall',
'Wingate',
'Winona Lake',
'Winslow',
'Wolcott',
'Wolcottville',
'Wolflake',
'Woodburn',
'Worthington',
'Wyatt',
'Yeoman',
'Yoder',
'Yorktown',
'Young America',
'Zanesville',
'Zionsville'
]
|
#! /usr/bin/env python3
# -*- Python -*-
"""
Prints number of common AAs in two AA sequences
"""
import sys, os, traceback, pprint
if sys.version_info.major != 3: raise RuntimeError("Run script with python3")
from pathlib import Path
sys.path[:0] = [str(Path(os.environ["ACMACSD_ROOT"]).resolve().joinpath("py"))]
import logging; module_logger = logging.getLogger(__name__)
from acmacs_base import timeit
#import seqdb
# ----------------------------------------------------------------------
def main(args):
s1, s2 = args.input
num_common = 0
for pos in range(min(len(s1), len(s2))):
if s1[pos] == s2[pos]:
num_common += 1
print("common:", num_common)
# ----------------------------------------------------------------------
with timeit(sys.argv[0]):
try:
import argparse
# print(sys.argv)
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('input', nargs=2, help='Sequences to compare.')
parser.add_argument('-d', '--debug', action='store_const', dest='loglevel', const=logging.DEBUG, default=logging.INFO, help='Enable debugging output.')
args = parser.parse_args()
logging.basicConfig(level=args.loglevel, format="%(levelname)s %(asctime)s: %(message)s")
exit_code = main(args)
except Exception as err:
logging.error('{}\n{}'.format(err, traceback.format_exc()))
exit_code = 1
exit(exit_code)
# ======================================================================
### Local Variables:
### eval: (if (fboundp 'eu-rename-buffer) (eu-rename-buffer))
### End:
|
"""
tests the solution in the named file
"""
import os
import sys
from argparse import ArgumentParser
from importlib import import_module
from itertools import chain
from pathlib import Path
from pprint import pprint
from random import randint, shuffle
from typing import List, Tuple, TypedDict
class Parameters(TypedDict):
nums: List[int]
target: int
CorrectAnswer = Tuple[int, int]
TEST_CASES: List[Tuple[CorrectAnswer, Parameters]] = [
((6, 4), {"nums": [78, 72, 77, 95, -72, 96, 54], "target": -18})
]
LIMIT = 109
def random_test_case():
# visual
# https://www.desmos.com/calculator/mowk6elxin
target = randint(-LIMIT, LIMIT)
x_lower_bound = max(-LIMIT, target - LIMIT)
x_upper_bound = min(LIMIT, target + LIMIT)
x = randint(x_lower_bound, x_upper_bound)
y = target - x
array: List[int] = []
length = randint(0, 5)
while len(array) < length:
other = randint(-LIMIT, LIMIT)
if other not in [x, y] and target - other not in array:
array.append(other)
array.append(x)
array.append(y)
shuffle(array)
x_index = array.index(x)
y_index = array.index(y)
if y_index == x_index:
y_index = array.index(y, y_index + 1)
return ((x_index, y_index), Parameters(nums=array, target=target))
def test(function) -> bool:
for case_index, (correct_answer, case) in enumerate(
chain(TEST_CASES, (random_test_case() for _ in range(100_000)))
):
answer = function(**case)
if not answer or set(answer) != set(correct_answer):
print(f"failure for case #{case_index + 1}:")
print("case:")
pprint(case)
print("correct answer:")
pprint(correct_answer)
print("answer:")
pprint(answer)
sys.exit(1)
print("passed")
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("file", help="filename of python file to test")
args = parser.parse_args()
path = Path(args.file)
if not path.is_file():
print(f"{path} is not a file")
sys.exit(1)
sys.path.append(str(path.parent))
module = import_module(path.stem)
test(module.Solution().twoSum)
|
import argparse
from utils.model_vc import Generator
import torch
from utils.audioUtils import audio
from utils.audioUtils.hparams import hparams
import librosa
from pathlib import Path
import numpy as np
from math import ceil
import glob
import numpy as np
import tempfile
import pdb
import os
import datetime
device = "cuda:0"
def pad_seq(x, base=32):
len_out = int(base * ceil(float(x.shape[0]) / base))
len_pad = len_out - x.shape[0]
assert len_pad >= 0
return np.pad(x, ((0, len_pad), (0, 0)), 'constant'), len_pad
mel_basis = librosa.filters.mel(hparams.sample_rate, hparams.n_fft, n_mels=80)
def voice_conversion(G, input_wavfile, model, parallel=True):
source_path = input_wavfile
wav, sr = librosa.load(source_path, hparams.sample_rate)
linear_spec = np.abs(
librosa.stft(wav, n_fft=hparams.n_fft, hop_length=hparams.hop_size, win_length=hparams.win_size))
mel_spec = mel_basis.dot(linear_spec)
mel_db = 20 * np.log10(mel_spec)
source_spec = np.clip((mel_db + 120) / 125, 0, 1)
source_embed = torch.from_numpy(np.array([0, 1])).float()
source_spec, _ = pad_seq(source_spec.T, hparams.freq)
with torch.no_grad():
s2t_spec = G.conversion(torch.Tensor(source_embed).unsqueeze(0), torch.Tensor(source_embed).unsqueeze(0),
torch.Tensor(source_spec).unsqueeze(0), device).cpu()
if parallel:
s2t_wav = G.vocoder.generate(s2t_spec.transpose(1, 2), True, 8000, 800, mu_law=True)
else:
s2t_wav = G.vocoder.generate(s2t_spec.transpose(1, 2), False, None, None, mu_law=True)
root = '/home/apushkar/audio-synthesis-webapp'
name = datetime.datetime.now().strftime("%Y%m%d%H%M%S%f")+str(np.random.randint(0,100))
# with open(datetime.datetime.now().strftime("%Y%m%d%H%M%S%f")+str(np.random.randint(0,100))+'.wav', 'w+') as fp:
# pdb.set_trace()
librosa.output.write_wav(os.path.join(root, 'media/wav', name+'.wav'), s2t_wav.astype(np.float32), hparams.sample_rate)
# librosa.output.write_wav(fp.name, s2t_wav.astype(np.float32), hparams.sample_rate)
# returnFile, _ = librosa.load("result1.wav", hparams.sample_rate)
# pdb.set_trace()
# print("here")
os.system('ffmpeg -i {} -i {} {}'.format(os.path.join('/home/apushkar/audio-synthesis-webapp/micApp/static/micApp/images/',str(model)+'.jpg'),
os.path.join(root, 'media/wav', name+'.wav'), os.path.join(root, 'media/webm', name+'.webm')))
with open(os.path.join(root, 'media/webm', name+'.webm'), 'rb') as wp:
# File1 = open('a.webm','rb')
Axel = wp.read()
return Axel
def main(wav_path, model, parallel=True):
# parser = argparse.ArgumentParser()
# parser.add_argument('--wav_path')
# parser.add_argument('--model')
# parser.add_argument('--parallel', dest='parallel', default=False, action='store_true')
# args = parser.parse_args()
#
# device = "cuda:0"
model_path = "/home/apushkar/audio-synthesis-webapp/utils/models/" # please change it to the trained models' path
G = Generator(hparams.dim_neck, hparams.speaker_embedding_size, 512, hparams.freq, is_train=False,
encoder_type="single",
discriminator=True,
use_lsgan=True,
train_wavenet=True).to(device)
model_list = glob.glob(model_path + "*.pkl")
name_list = [x.split('/')[-1].split('.')[0] for x in model_list]
# print(name_list)
# pdb.set_trace()
if model in name_list:
print("Loading autovc model...", end='\t')
load_model = "/home/apushkar/audio-synthesis-webapp/utils/models/%s.pkl" % model
d = torch.load(load_model)
newdict = d.copy()
for key, value in d.items():
newkey = key
if 'wavenet' in key:
newdict[key.replace('wavenet', 'vocoder')] = newdict.pop(key)
newkey = key.replace('wavenet', 'vocoder')
if 'module' in key:
newdict[newkey.replace('module.','',1)] = newdict.pop(newkey)
newkey = newkey.replace('module.', '', 1)
if newkey not in G.state_dict():
#print(newkey)
newdict.pop(newkey)
print("Load " + str(len(newdict)) + " parameters!")
G.load_state_dict(newdict, strict=False)
print("Done.")
return voice_conversion(G, wav_path, model, parallel)
else:
print("Unknown Examplar!")
|
from django.utils.encoding import force_text
from wagtail.core.blocks import CharBlock, IntegerBlock, RawHTMLBlock, RichTextBlock, StructBlock
from wagtail.embeds.blocks import EmbedBlock
from wagtail_blocks.blocks import (
ChartBlock,
CroppedImagesWithTextBlock,
HeaderBlock,
ImageSliderBlock,
ImageTextOverlayBlock,
ListBlock,
ListWithImagesBlock,
MapBlock,
ThumbnailGalleryBlock,
)
class VideoBlock(StructBlock):
title = CharBlock()
embed = EmbedBlock(help_text='Insert full URL of video here. Ex. https://youtu.be/sdfk343244dfef5')
max_width = IntegerBlock(default=1024, help_text='Set maximum width of the video frame in pixels.')
def get_searchable_content(self, value):
return [force_text(value)]
class Meta:
template = 'wagtail_blocks/video.html'
icon = 'media'
label = "Embed youtube or vimeo video"
STREAMFIELD_BLOCK_LIST = [
('header', HeaderBlock()),
('rich_text', RichTextBlock()),
('raw_HTML', RawHTMLBlock()),
('image_text_overlay', ImageTextOverlayBlock()),
('list', ListBlock()),
('embed', VideoBlock()),
('chart', ChartBlock()),
('map', MapBlock()),
('cropped_images_with_text', CroppedImagesWithTextBlock()),
('list_with_images', ListWithImagesBlock()),
('thumbnail_gallery', ThumbnailGalleryBlock()),
('image_slider', ImageSliderBlock()),
]
|
"""Open a shell prompt inside an ansible-test environment."""
from __future__ import annotations
import os
import typing as t
from ...util import (
ApplicationError,
display,
)
from ...config import (
ShellConfig,
)
from ...executor import (
Delegate,
)
from ...connections import (
LocalConnection,
SshConnection,
)
from ...host_profiles import (
ControllerProfile,
PosixProfile,
SshTargetHostProfile,
)
from ...provisioning import (
prepare_profiles,
)
from ...host_configs import (
ControllerConfig,
OriginConfig,
)
def command_shell(args): # type: (ShellConfig) -> None
"""Entry point for the `shell` command."""
if args.raw and isinstance(args.targets[0], ControllerConfig):
raise ApplicationError('The --raw option has no effect on the controller.')
host_state = prepare_profiles(args, skip_setup=args.raw) # shell
if args.delegate:
raise Delegate(host_state=host_state)
if args.raw and not isinstance(args.controller, OriginConfig):
display.warning('The --raw option will only be applied to the target.')
target_profile = t.cast(SshTargetHostProfile, host_state.target_profiles[0])
if isinstance(target_profile, ControllerProfile):
# run the shell locally unless a target was requested
con = LocalConnection(args)
else:
# a target was requested, connect to it over SSH
con = target_profile.get_controller_target_connections()[0]
if isinstance(con, SshConnection) and args.raw:
cmd = []
elif isinstance(target_profile, PosixProfile):
cmd = []
if args.raw:
shell = 'sh' # shell required for non-ssh connection
else:
shell = 'bash'
python = target_profile.python # make sure the python interpreter has been initialized before opening a shell
display.info(f'Target Python {python.version} is at: {python.path}')
optional_vars = (
'TERM', # keep backspace working
)
env = {name: os.environ[name] for name in optional_vars if name in os.environ}
if env:
cmd = ['/usr/bin/env'] + [f'{name}={value}' for name, value in env.items()]
cmd += [shell, '-i']
else:
cmd = []
con.run(cmd)
|
################################# LICENSE ##################################
# Copyright (c) 2009, South African Astronomical Observatory (SAAO) #
# All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# * Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer #
# in the documentation and/or other materials provided with the #
# distribution. #
# * Neither the name of the South African Astronomical Observatory #
# (SAAO) nor the names of its contributors may be used to endorse #
# or promote products derived from this software without specific #
# prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE SAAO ''AS IS'' AND ANY EXPRESS OR #
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED #
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE #
# DISCLAIMED. IN NO EVENT SHALL THE SAAO BE LIABLE FOR ANY #
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL #
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS #
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN #
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
############################################################################
#!/usr/bin/env python
# Author Version Date Comment
# -----------------------------------------------------------------------
# S M Crawford (SAAO) 0.3 11 Oct 2013
# hrsstack converts the MEF HRS files and stacks them together
# into a single image
import os, glob
import pyfits
import numpy as np
from pyraf import iraf
from pyraf.iraf import pysalt
import saltsafeio as saltio
import saltsafekey as saltkey
from saltsafelog import logging, history
from salterror import SaltError
debug=True
# -----------------------------------------------------------
# core routine
def hrsstack(images, outimages, outpref, clobber=True, logfile='salt.log',verbose=True):
"""Convert MEF HRS data into a single image. If variance frames and BPMs, then
convert them to the same format as well. Returns an MEF image but that is
combined into a single frame
"""
with logging(logfile,debug) as log:
# Check the input images
infiles = saltio.argunpack ('Input',images)
# create list of output files
outfiles=saltio.listparse('Outfile', outimages, outpref,infiles,'')
#verify that the input and output lists are the same length
saltio.comparelists(infiles,outfiles,'Input','output')
#open the file and write out as a fits files in the output directory
for img,oimg in zip(infiles, outfiles):
hdu=saltio.openfits(img)
hdu=stack(hdu)
log.message('Stacking HRS %s to %s' % (img, oimg), with_header=False)
saltio.writefits(hdu, oimg, clobber=clobber)
return
def stack(hdu):
"""Convert MEF HRS data into a single extension.
"""
#set the detector
detname=saltkey.get('DETNAM', hdu[0])
print detname
if detname=='08443-03-01' or detname=='HRDET':
detector='hrdet'
elif detname=='04434-23-02' or detname=='HBDET':
detector='hbdet'
else:
raise SaltError('%s is not an HRS detector' % detnam)
print detector
#get key parameters
nccd=saltkey.get('CCDAMPS', hdu[0])
#determine the shape of the CCD
if detector=='hbdet':
toty=hdu[1].shape[0]
totx=hdu[1].shape[1]+hdu[2].shape[1]
elif detector=='hrdet':
toty=hdu[1].shape[0]+hdu[3].shape[0]
totx=hdu[1].shape[1]+hdu[2].shape[1]
data=np.zeros((toty,totx),np.float32)
#embed the ccds
for j in range(nccd):
i=j+1
y2,x2=hdu[i].data.shape
if detector=='hbdet':
if i==1:
y1=0
x1=0
y2=y2
x2=x2
else:
y1=0
x1=x2
x2=x1+x2
elif detector=='hrdet':
y1=0
if i%2==1: x1=0
if i%2==0:
x1=x2
x2=x1+x2
if i>2:
y1=y2
y2=y1+y2
data[y1:y2,x1:x2]=hdu[i].data
ihdu = pyfits.ImageHDU(data)
nhdu = pyfits.HDUList([hdu[0], ihdu])
return nhdu
if not iraf.deftask('hrsstack'):
parfile = iraf.osfn("salthrs$hrsstack.par")
t = iraf.IrafTaskFactory(taskname="hrsstack",value=parfile,function=hrsstack, pkgname='salthrs')
|
# -*- coding: utf-8 -*-
name = 'tbb'
version = '4.4.6'
variants = [['platform-windows', 'arch==AMD64']]
build_command = "python {root}/rezbuild.py {install}"
build_requires = ['python']
def commands():
env.PATH.append("{root}/bin")
env.PATH.append("{root}/lib")
env.TBB_LIBRARIES = "{root}"
|
"""
This is the backtracking search solver that uses a couple of heurestics in order to solve a constraint problem tree
Written by Jesse Cai
2017-05-05
"""
from igraph import *
class ConstraintSolver(object):
def __init__(self, variables, consistency_type = 'arc'):
self.csp = Graph()
for var in variables:
self.add_var(var, variables[var])
def add_var(self, variable, domain):
self.csp.add_vertex(name=variable, domain=domain)
def add_constraint(self, constraint_func, *args):
# if the number of args
if len(args) > 2:
raise NotImplementedError('Have yet to deal with global constraints')
self.csp.add_edge(*args, c_func=constraint_func)
def add_constaints(self, list_of_constraints):
for constaint in list_of_constraints:
self.add_constaint(*constaint)
#csp is a graph
@classmethod
def backtracking_search(cls, csp, assignment={}):
if len(assignment.keys()) == len(csp.vs):
return assignment
var = cls.select_unassigned_var(assignment, csp)
for value in cls.order_domain_values(var, assignment, csp):
#add it into a possible assignment
possible_assignment = assignment
possible_assignment[var['name']] = value
#check if assignment is possible
if cls.consistency_checker(possible_assignment, csp):
#if so commit,
assignment[var['name']] = value
new_csp = cls.inference(assignment, csp.copy())
if new_csp:
#add inferences to assignment
result = cls.backtracking_search(new_csp, assignment)
if result:
return result
assignment.pop(var['name'])
return False
#use the mininum remaining values heurestic
#use the degree heurestic
@staticmethod
def select_unassigned_var(assignment, csp):
current_best = None
for vertex in csp.vs:
if vertex['name'] not in assignment:
if current_best:
if len(vertex['domain']) > len(current_best['domain']):
current_best = vertex
#if min remaining values are the same, then use degree heurestic
elif len(vertex['domain']) == len(current_best['domain']):
if csp.degree(vertex) > csp.degree(current_best):
current_best = vertex
else:
current_best = vertex
return current_best
@classmethod
def order_domain_values(cls, var, assignment, csp):
value_range = []
for value in var['domain']:
assignment[var['name']] = value
new_csp = cls.inference(assignment, csp.copy())
if new_csp:
vals_remaining = sum([len(d) for d in new_csp.vs['domain']])
value_range.append((value, vals_remaining))
del assignment[var['name']]
value_range.sort(key=lambda x:x[1])
return [x[0] for x in value_range]
#this is just local consistency
@staticmethod
def consistency_checker(assignment, csp):
for edge in csp.es:
src_name = csp.vs[edge.source]['name']
target_name = csp.vs[edge.target]['name']
if src_name in assignment and target_name in assignment:
if not edge['c_func'](assignment[src_name], assignment[target_name]):
return False
return True
#inference using foward checking
@classmethod
def inference(cls, assignment, csp):
neighboring_set = set()
for set_val in assignment:
neighboring_set.intersection_update(set(csp.neighbors(set_val)))
for neighbor in neighboring_set:
neighbor_name = csp.vs[neighbor]['name']
if neighbor_name not in assignment:
for possible_value in csp.vs[neighbor]['domain']:
assignment[neighbor_name] = possible_value
if cls.consistency_checker(assignment, csp) == False:
csp.vs[neighbor]['domain'].remove(possible_value)
del assignment[neighbor_name]
if csp.vs[neighbor]['domain'] == None:
return False
return csp
test = {
'washington': ['r','g','b'],
'oregon': ['r','g','b'],
'california': ['r','g','b'],
'idaho': ['r','g','b'],
'nevada': ['r','g','b'],
'utah': ['r','g','b'],
'arizona': ['r','g','b']
}
# constraint retrusn true if valid, false otherwise
def not_equal_constraint(a, b):
return a != b
mysolver = ConstraintSolver(test)
mysolver.add_constraint(not_equal_constraint, 'california', 'oregon')
mysolver.add_constraint(not_equal_constraint, 'california', 'nevada')
mysolver.add_constraint(not_equal_constraint, 'oregon', 'washington')
mysolver.add_constraint(not_equal_constraint, 'nevada', 'arizona')
mysolver.add_constraint(not_equal_constraint, 'california', 'arizona')
result =(mysolver.backtracking_search(mysolver.csp))
print(result)
|
"""Rezolv icao """
ICAO = {
'a': 'alfa', 'b': 'bravo', 'c': 'charlie', 'd': 'delta', 'e': 'echo',
'f': 'foxtrot', 'g': 'golf', 'h': 'hotel', 'i': 'india', 'j': 'juliett',
'k': 'kilo', 'l': 'lima', 'm': 'mike', 'n': 'november', 'o': 'oscar',
'p': 'papa', 'q': 'quebec', 'r': 'romeo', 's': 'sierra', 't': 'tango',
'u': 'uniform', 'v': 'victor', 'w': 'whiskey', 'x': 'x-ray', 'y': 'yankee',
'z': 'zulu'
}
def icao(mesaj):
"""Func?ia va primi calea mesajul ce trebuie transmis ?i
va genera un fi?ier numit mesaj.icao_intrare ce va con?ine
mesajul scris folosind alfabetul ICAO.
"""
file_to_write = open('mesaj.icao_intrare', 'w+')
for i in mesaj.lower():
if i.isalpha():
file_to_write.write(ICAO[i] + ' ')
elif i.isspace():
file_to_write.write(' ')
|
from django.db.models import Aggregate
from django.contrib.gis.db.backend import SpatialBackend
from django.contrib.gis.db.models.sql import GeomField
class GeoAggregate(Aggregate):
def add_to_query(self, query, alias, col, source, is_summary):
if hasattr(source, 'geom_type'):
# Doing additional setup on the Query object for spatial aggregates.
aggregate = getattr(query.aggregates_module, self.name)
# Adding a conversion class instance and any selection wrapping
# SQL (e.g., needed by Oracle).
if aggregate.conversion_class is GeomField:
query.extra_select_fields[alias] = GeomField()
if SpatialBackend.select:
query.custom_select[alias] = SpatialBackend.select
super(GeoAggregate, self).add_to_query(query, alias, col, source, is_summary)
class Collect(GeoAggregate):
name = 'Collect'
class Extent(GeoAggregate):
name = 'Extent'
class MakeLine(GeoAggregate):
name = 'MakeLine'
class Union(GeoAggregate):
name = 'Union'
|
# python setup.py build_ext -i
from distutils.core import setup, Extension
import mpi4py
swig = mpi4py.get_include()
include = [swig, "/usr/lib/openmpi/include"]
sources = ["testSwig.c","testSwig.i"]
setup( ext_modules = [
Extension("_testSwig", sources = sources,
swig_opts = ["-I"+swig ],
include_dirs = include
) ]
)
|
from jsinclude.templatetags.pkg.StringValue import StringValue
class TestStringValue:
def test_string_arg(self):
arg = StringValue('abc123')
assert arg == "'abc123'"
def test_boolean_arg(self):
arg = StringValue('true')
assert arg == 'true'
arg = StringValue('false')
assert arg == 'false'
def test_python_boolean_arg(self):
arg = StringValue('True')
assert arg == "'True'"
arg = StringValue('False')
assert arg == "'False'"
|
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Update the URL reference for a proxy object.
A single URL can be modified by passing the PID for the object to update and the new URL
on the command line. A bulk update can be performed by passing in a JSON or CSV file.
By default, this command verifies proxy objects by fully downloading the object bytes,
recalculating the checksum and comparing it with the checksum that was originally
supplied by the client that created the object.
See `audit-proxy-sciobj`_ for more information about proxy object URL references.
set-url2
"""
import d1_gmn.app.did
import d1_gmn.app.mgmt_base
import d1_gmn.app.models
class Command(d1_gmn.app.mgmt_base.GMNCommandBase):
def __init__(self, *args, **kwargs):
super().__init__(__doc__, __name__, *args, **kwargs)
def add_arguments(self, parser):
# self.add_arg_force(parser)
pass
def handle_serial(self):
# TODO.
pass
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
MYSQL_HOST = "127.0.0.1"
MYSQL_PORT = 33096
MYSQL_USER = ""
MYSQL_PASS = ""
MYSQL_DB = "crontab"
SOCKET_HOST = '127.0.0.1'
SOCKET_PORT = 21110
SOCKET_BUFFER = 8192
|
from __future__ import print_function
import web
import time
import logging
logger = logging.getLogger(__file__)
logging.basicConfig(
format="[%(asctime)s - %(filename)s:line %(lineno)s] %(message)s",
datefmt='%d %b %H:%M:%S',
level=logging.INFO)
logger.setLevel(logging.INFO)
import constant
import userControl
import sentence
import image
from imagedemo import imagedemo
import sys
reload(sys)
sys.setdefaultencoding('utf8')
web.config.debug = False
render = web.template.render('templates/')
urls = (
'/', 'index',
'/image', 'image',
'/history', 'history',
'/imagedemo/(.*)', 'imagedemo'
)
app = web.application(urls, globals())
session = web.session.Session(app, web.session.DiskStore('sessions'), initializer={'login': 0, 'user': 0})
user_control = userControl.user(constant.DATABASE_FILE)
sen = sentence.sentence(constant.DATABASE_FILE)
image_pool = image.image(constant.DATABASE_FILE)
def check():
if session.login == 0:
judge = 4
resp = {'judge': judge}
return True, render.index(resp)
else:
return False, None
class index():
def GET(self):
judge = 4
resp = {'judge': judge}
return render.index(resp)
def POST(self):
i = web.input()
username = i.get('user_id')
password = i.get('password')
judge = user_control.checkUser(username, password)
if judge == 1:
session.user = user_control.getUserId(username)
session.login = 1
raise web.redirect('/image')
else:
resp = {'judge': judge}
return render.index(resp)
class image:
def GET(self):
j, r = check()
if j:
return r
i = web.input()
s_sen = ''
s_label = ''
rank = 0
img_id = i.get('image_id')
is_edit = 0
if img_id is not None:
is_edit = 1
image_id = int(img_id)
else:
# get a img id from img pool
image_id = image_pool.getimageid(session.user)
j, imageid, image_id = user_control.getimageid(session.user, image_id)
if j:
is_edit = 1
rank, s_sen, s_label = sen.get_sentence_by_imageid(session.user, image_id)
image_name = image_pool.getimagename(imageid)
label = ', '.join(filter(lambda x: x, image_pool.getlabel(imageid).split(' ')))
logger.info(label)
image_url = constant.IMAGE_ROOT + image_name
sentences = sen.getSentence(imageid)
logger.info("s_sen: %s", s_sen.decode('gbk'))
number = sen.getNumber(session.user)
logger.info(s_label)
resp = {'image_id': image_id, 'image': image_url, 'sentence': sentences, 'label': label,
'submitted_sentence': s_sen, 'submitted_label': s_label, 'rank': rank, 'number': number,
'is_edit': is_edit, 'judge': j}
return render.image(resp)
def POST(self):
i = web.input()
image_id = int(i.get('image_id'))
submitted_sentence = i.get('sentence').encode('gbk')
selected = int(i.get('selected'))
original_sentence = i.get('selectedSentence').encode('gbk')
labels = i.get('label')
edit = i.get('edit')
j, real_id, image_id = user_control.getimageid(session.user, image_id)
sen.save_sentence(session.user, image_id, time.time(), original_sentence, selected, submitted_sentence, labels, real_id)
image_pool.save(image_id)
logger.info(edit)
if int(edit) == 0:
user_control.update_cursor(session.user)
return True
class history:
def GET(self):
j, r = check()
if j:
return r
i = web.input()
page = i.get("page")
if page is None:
page = 1
judge, data, page_number = sen.get_sentence(session.user, int(page))
resp = {'judge': judge, 'data': data, 'page': page_number, 'current': page}
return render.history(resp)
if __name__ == "__main__":
app.run()
|
#!/usr/bin/env python3
from __future__ import print_function
import subprocess as sp
from multiprocessing.pool import ThreadPool
from sys import stdin, stdout, stderr, argv
import sys
import os
import yaml
def parse_cfg():
with open("./config.yml") as fh:
d = yaml.load(fh)
return d["lanes"]
def runaxe(lane):
lanedata = parse_cfg()[lane]
keyfile = "metadata/keyfiles/" + lane + ".axe"
statsfile = "data/stats/demux/" + lane + ".tsv"
r1fq = "rawdata/" + lane + "/" + lane + "_R1.fastq.gz"
r2fq = "rawdata/" + lane + "/" + lane + "_R2.fastq.gz"
outprefix = "data/reads/raw/" + lane + "/"
logfile = "data/log/demux/" + lane + ".log"
os.makedirs("data/stats/demux", exist_ok=True)
os.makedirs("data/log/demux", exist_ok=True)
os.makedirs(outprefix.rstrip("/"), exist_ok=True)
cmd = ["axe-demux",
"-m", "0",
"-t", statsfile,
"-b", keyfile,
"-f", r1fq,
"-r", r2fq,
"-I", outprefix]
if lanedata["combo"]:
cmd.append("-c")
print("Running lane", lane, file=stderr)
proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.STDOUT)
with open(logfile, "wb", buffering=1000) as fh:
fh.write((" ".join(cmd) + "\n").encode("utf8"))
while True:
fh.write(proc.stdout.read(100))
if proc.poll() is not None:
break
print("Finished lane", lane, "(returned", proc.returncode, ")", file=stderr)
return proc.returncode
def main():
lanes = parse_cfg()
if len(argv) > 1:
# Filter on whitelist of plates
lanes = {k: v for k, v in lanes.items() if k in argv[1:]}
pool = ThreadPool(16)
rets = pool.map(runaxe, lanes)
sys.exit(max(rets))
if __name__ == "__main__":
main()
|
import numpy as np
import matplotlib.pyplot as plt # For plotting
import pdb
import pandas as pd
import seaborn as sns
from scipy import stats
data1 = np.load("train_nu.npz")
#data1 = np.load('pipe_test_1dnu.npz')
nu = data1['nu_1d']
#nu = np.sort(nu)
print('nu is',nu)
############################
#profile viscosity
#ss
data = np.load('pred_poiseuille_para.npz')
mesh = data['mesh']
print('shape of mesh is',mesh.shape)
u = data['u']
v = data['v']
p = data['p']
ut = data['ut']
uMaxP=data['uMaxP']
uMaxA=data['uMaxA']
print("shape of uMaxA",uMaxA.shape)
Ny, Nx, Np = u.shape
print ("mesh shape = ", mesh.shape)
print ("u shape", u.shape)
idxP = 28
# plt.figure()
# plt.contourf(mesh[0,:,:, idxP], mesh[1,:,:,idxP], u[:, :, idxP])
# plt.axis('equal')
# plt.colorbar()
#plt.savefig('pipe1.png')
#idxP = np.array([0,28,49])
idxP = [3]
plot_x = 0.8
plot_y = 0.07
fontsize = 16
# y = np.linspace(-0.05,0.05,50)
# ii = 0
# for idxPi in idxP:
# plt.figure()
# for i in range(Nx):
# pP, = plt.plot(y,u[:, i, idxPi])
# pT, = plt.plot(y,ut[:, i, idxPi], 'r--')
# ii = ii+1
#plt.close('all')
# plt.legend([pP, pT], ['NN Surrogate,nu = 2.1e-4', 'Truth'],fontsize = fontsize)
#print ('max u = ', np.max(u[:, :, idxP]))
#print ('max ut = ', np.max(ut[:, :, idxP]))
#plt.savefig('pipe2.png')
#plt.text(0, 0.1, r'$\delta$',
#{'color': 'k', 'fontsize': 24, 'ha': 'center', 'va': 'center',
#'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)})
d = 0.1
#plot spanwise u profile along y, looping from nu_small to nu_large
#u = data['u']
idx_X = int(round(Nx/2))
y = np.linspace(-0.05,0.05,50)
can = [3,6,14,49]
#for idxP in range(len(nu)):
xtext= [0,0.5,1]
ytext = [0.45,0.28,0.1,0.01]
plt.figure(1)
Re = []
plt.figure(1)
plt.clf()
for idxP in range(len(can)):
#plt.figure(1)
#plt.clf()
ax1 = plt.subplot(111)
pT, = plt.plot(y,ut[:, idx_X, can[idxP]], color='darkblue', linestyle='-', lw=3.0, alpha=1.0)
pP, = plt.plot(y,u[:, idx_X, can[idxP]], color='red', linestyle='--', dashes=(5, 5), lw=2.0, alpha=1.0)
tmpRe = np.max(u[:, idx_X, can[idxP]])*d/nu[can[idxP]]
Re.append(tmpRe)
#print("Re is",Re)
nu_current = float("{0:.5f}".format(nu[can[idxP]]))
#plt.title(r'$\nu = $' + str(nu_current))
plt.text(-0.012,ytext[idxP],r'$\nu = $' + str(nu_current),{'color': 'k', 'fontsize': 16})
#plt.legend([pT, pP], ['Analytical', 'NN surrogate'], fontsize = 16,loc = 10)
plt.ylabel(r'$u(y)$', fontsize=16)
plt.xlabel(r'$y$', fontsize=16)
ax1.tick_params(axis='x', labelsize=16)
ax1.tick_params(axis='y', labelsize=16)
ax1.set_xlim([-0.05, 0.05])
ax1.set_ylim([0.0, 0.62])
figureName = 'pipe_uProfiles_nuIdx_.png'
plt.savefig(figureName, bbox_inches='tight')
print('Re is',Re)
np.savez('test_Re',Re = Re)
plt.figure(2)
plt.clf()
ax1 = plt.subplot(111)
sns.kdeplot(uMaxA[0, :], shade=True, label='Analytical', linestyle="-", linewidth=3)
sns.kdeplot(uMaxP[0, :], shade=False, label='DNN', linestyle="--", linewidth=3.5, color='darkred')
plt.legend(prop={'size': 16})
plt.xlabel(r'$u_c$', fontsize=16)
plt.ylabel(r'PDF', fontsize=16)
ax1.tick_params(axis='x', labelsize=16)
ax1.tick_params(axis='y', labelsize=16)
figureName = 'pipe_unformUQ.png'
plt.savefig(figureName, bbox_inches='tight')
plt.show()
|
from default_settings import * # noqa
SQLALCHEMY_DATABASE_URI = 'sqlite://'
SECRET_KEY = 'CHANGE_ME'
|
#!/usr/bin/env python
#
from .KernelNode import KernelNode as base, debug
class Phonon_IncoherentInelastic_EnergyFocusing_Kernel(base):
tag = "Phonon_IncoherentInelastic_EnergyFocusing_Kernel"
def createKernel( self, **kwds ):
def getval(key):
v = kwds.get(key)
if v: return self._parse(v)
return v
kargs = dict(
Ef = getval('Ef'),
dEf = getval('dEf'),
average_mass = getval('average_mass'),
scattering_xs = getval('scattering_xs'),
absorption_xs = getval('absorption_xs'),
)
from mccomponents.sample.phonon \
import incoherentinelastic_energyfocusing_kernel as f
return f(None, **kargs)
def onDOS(self, dos):
self.element.dos = dos
return
onLinearlyInterpolatedDOS = onDOS
pass # end of Phonon_IncoherentInelastic_EnergyFocusing_Kernel
from .HomogeneousScatterer import HomogeneousScatterer
HomogeneousScatterer.onPhonon_IncoherentInelastic_EnergyFocusing_Kernel = HomogeneousScatterer.onKernel
# End of file
|
from selenium import webdriver
from Hush import usuarioEmail, usuarioSenha
browser = webdriver.Firefox()
browser.get("https://www.skoob.com.br/login/")
email = browser.find_element_by_name("data[Usuario][email]")
email.send_keys(usuarioEmail)
pwd = browser.find_element_by_name("data[Usuario][senha]")
pwd.send_keys(usuarioSenha)
entrar = browser.find_element_by_xpath('//*[@id="login-box-col02"]/form/div/div/input')
entrar.click()
link_indice_Skoob = browser.find_element_by_xpath('//*[@id="topoInterno"]/a/img')
link_indice_Skoob.click()
link_cortesia = browser.find_element_by_xpath('//*[@id="home-conteudo"]/div[1]/div[1]/div[2]/a')
link_cortesia.click()
#//*[@id="card3475"]/div[2]/div[4]
for elem in browser.find_elements_by_css_selector('[class *= "participe"]'): #Cada link para participar
idLivro = elem.get_attribute("rel")
print (elem.get_attribute("rel") )
linkParticipe = browser.find_element_by_xpath('//*[@id="card'+idLivro+'"]/div[2]/div[4]/a[1]')
if( linkParticipe.is_displayed() ): #Clica se der
from random import randint
from time import sleep
sleep(randint(10,100))
linkParticipe.click()
browser.quit()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.