hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
478976b6b9fddc675865d02a0c625bea73288ea2
| 110,682
|
py
|
Python
|
tensorflow/python/frozen_keras/engine/legacy_base_layer.py
|
tanzhenyu/tensorflow
|
78b09356f5c1a17d869a2fbc571bd720b3450d9b
|
[
"Apache-2.0"
] | 1
|
2021-06-11T08:28:08.000Z
|
2021-06-11T08:28:08.000Z
|
tensorflow/python/frozen_keras/engine/legacy_base_layer.py
|
jimbobbennett/tensorflow
|
988ba54e96aa1c0254c00823049cf46109d4335c
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/frozen_keras/engine/legacy_base_layer.py
|
jimbobbennett/tensorflow
|
988ba54e96aa1c0254c00823049cf46109d4335c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
"""Contains the legacy base Layer class.
This is intended to be used by legacy TF v1 layers that is deprecated, but still
using Keras Layer as base class. This copy of the Layer will stay unchanged,
which ensure the stability of v1 functionality not to be affected by the active
development of Keras Layer.
"""
# pylint: disable=g-classes-have-attributes
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import itertools
import threading
import weakref
import numpy as np
import six
from six.moves import zip # pylint: disable=redefined-builtin
from google.protobuf import json_format
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.autograph.impl import api as autograph
from tensorflow.python.distribute import distribution_strategy_context as ds_context
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.eager import function
from tensorflow.python.framework import auto_control_deps
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import tensor_util
from tensorflow.python.frozen_keras import backend
from tensorflow.python.frozen_keras import constraints
from tensorflow.python.frozen_keras import initializers
from tensorflow.python.frozen_keras import regularizers
from tensorflow.python.frozen_keras.engine import base_layer_utils
from tensorflow.python.frozen_keras.engine import input_spec
from tensorflow.python.frozen_keras.engine import node as node_module
from tensorflow.python.frozen_keras.utils import generic_utils
from tensorflow.python.frozen_keras.utils import layer_utils
from tensorflow.python.frozen_keras.utils import tf_utils
# A module that only depends on `keras.layers` import these from here.
from tensorflow.python.frozen_keras.utils.generic_utils import to_snake_case # pylint: disable=unused-import
from tensorflow.python.frozen_keras.utils.tf_utils import is_tensor_or_tensor_list # pylint: disable=unused-import
from tensorflow.python.module import module
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.training.tracking import base as trackable
from tensorflow.python.training.tracking import data_structures
from tensorflow.python.training.tracking import layer_utils as trackable_layer_utils
from tensorflow.python.training.tracking import tracking
from tensorflow.python.util import compat
from tensorflow.python.util import deprecation
from tensorflow.python.util import nest
from tensorflow.python.util import object_identity
from tensorflow.python.util import tf_inspect
from tensorflow.tools.docs import doc_controls
# Prefix that is added to the TF op layer names.
_TF_OP_LAYER_NAME_PREFIX = 'tf_op_layer_'
class LegacyBaseLayer(module.Module):
"""This is the class from which all layers inherit.
A layer is a callable object that takes as input one or more tensors and
that outputs one or more tensors. It involves *computation*, defined
in the `call()` method, and a *state* (weight variables), defined
either in the constructor `__init__()` or in the `build()` method.
Users will just instantiate a layer and then treat it as a callable.
We recommend that descendants of `Layer` implement the following methods:
* `__init__()`: Defines custom layer attributes, and creates layer state
variables that do not depend on input shapes, using `add_weight()`.
* `build(self, input_shape)`: This method can be used to create weights that
depend on the shape(s) of the input(s), using `add_weight()`. `__call__()`
will automatically build the layer (if it has not been built yet) by
calling `build()`.
* `call(self, *args, **kwargs)`: Called in `__call__` after making sure
`build()` has been called. `call()` performs the logic of applying the
layer to the input tensors (which should be passed in as argument).
Two reserved keyword arguments you can optionally use in `call()` are:
- `training` (boolean, whether the call is in
inference mode or training mode)
- `mask` (boolean tensor encoding masked timesteps in the input, used
in RNN layers)
* `get_config(self)`: Returns a dictionary containing the configuration used
to initialize this layer. If the keys differ from the arguments
in `__init__`, then override `from_config(self)` as well.
This method is used when saving
the layer or a model that contains this layer.
Examples:
Here's a basic example: a layer with two variables, `w` and `b`,
that returns `y = w . x + b`.
It shows how to implement `build()` and `call()`.
Variables set as attributes of a layer are tracked as weights
of the layers (in `layer.weights`).
```python
class SimpleDense(Layer):
def __init__(self, units=32):
super(SimpleDense, self).__init__()
self.units = units
def build(self, input_shape): # Create the state of the layer (weights)
w_init = tf.random_normal_initializer()
self.w = tf.Variable(
initial_value=w_init(shape=(input_shape[-1], self.units),
dtype='float32'),
trainable=True)
b_init = tf.zeros_initializer()
self.b = tf.Variable(
initial_value=b_init(shape=(self.units,), dtype='float32'),
trainable=True)
def call(self, inputs): # Defines the computation from inputs to outputs
return tf.matmul(inputs, self.w) + self.b
# Instantiates the layer.
linear_layer = SimpleDense(4)
# This will also call `build(input_shape)` and create the weights.
y = linear_layer(tf.ones((2, 2)))
assert len(linear_layer.weights) == 2
# These weights are trainable, so they're listed in `trainable_weights`:
assert len(linear_layer.trainable_weights) == 2
```
Note that the method `add_weight()` offers a shortcut to create weights:
```python
class SimpleDense(Layer):
def __init__(self, units=32):
super(SimpleDense, self).__init__()
self.units = units
def build(self, input_shape):
self.w = self.add_weight(shape=(input_shape[-1], self.units),
initializer='random_normal',
trainable=True)
self.b = self.add_weight(shape=(self.units,),
initializer='random_normal',
trainable=True)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
```
Besides trainable weights, updated via backpropagation during training,
layers can also have non-trainable weights. These weights are meant to
be updated manually during `call()`. Here's a example layer that computes
the running sum of its inputs:
```python
class ComputeSum(Layer):
def __init__(self, input_dim):
super(ComputeSum, self).__init__()
# Create a non-trainable weight.
self.total = tf.Variable(initial_value=tf.zeros((input_dim,)),
trainable=False)
def call(self, inputs):
self.total.assign_add(tf.reduce_sum(inputs, axis=0))
return self.total
my_sum = ComputeSum(2)
x = tf.ones((2, 2))
y = my_sum(x)
print(y.numpy()) # [2. 2.]
y = my_sum(x)
print(y.numpy()) # [4. 4.]
assert my_sum.weights == [my_sum.total]
assert my_sum.non_trainable_weights == [my_sum.total]
assert my_sum.trainable_weights == []
```
For more information about creating layers, see the guide
[Writing custom layers and models with Keras](
https://www.tensorflow.org/guide/keras/custom_layers_and_models)
Arguments:
trainable: Boolean, whether the layer's variables should be trainable.
name: String name of the layer.
dtype: The dtype of the layer's computations and weights (default of
`None` means use `tf.keras.backend.floatx` in TensorFlow 2, or the type
of the first input in TensorFlow 1).
dynamic: Set this to `True` if your layer should only be run eagerly, and
should not be used to generate a static computation graph.
This would be the case for a Tree-RNN or a recursive network,
for example, or generally for any layer that manipulates tensors
using Python control flow. If `False`, we assume that the layer can
safely be used to generate a static computation graph.
Attributes:
name: The name of the layer (string).
dtype: The dtype of the layer's computations and weights.
updates: List of update ops of this layer.
losses: List of losses added by this layer.
trainable_weights: List of variables to be included in backprop.
non_trainable_weights: List of variables that should not be
included in backprop.
weights: The concatenation of the lists trainable_weights and
non_trainable_weights (in this order).
trainable: Whether the layer should be trained (boolean).
input_spec: Optional (list of) `InputSpec` object(s) specifying the
constraints on inputs that can be accepted by the layer.
Each layer has a dtype, which is typically the dtype of the layer's
computations and variables. A layer's dtype can be queried via the
`Layer.dtype` property. The dtype is specified with the `dtype` constructor
argument. In TensorFlow 2, the dtype defaults to `tf.keras.backend.floatx()`
if no dtype is passed. `floatx()` itself defaults to "float32".
"""
# See tf.Module for the usage of this property.
# The key for _obj_reference_counts_dict is a Trackable, which could be a
# variable or layer etc. tf.Module._flatten will fail to flatten the key
# since it is trying to convert Trackable to a string. This attribute can be
# ignored even after the fix of nest lib, since the trackable object should
# already been available as individual attributes. _obj_reference_counts_dict
# just contains a copy of them.
_TF_MODULE_IGNORED_PROPERTIES = frozenset(itertools.chain(
('_obj_reference_counts_dict',),
module.Module._TF_MODULE_IGNORED_PROPERTIES
))
@trackable.no_automatic_dependency_tracking
def __init__(self, trainable=True, name=None, dtype=None, dynamic=False,
**kwargs):
# These properties should be set by the user via keyword arguments.
# note that 'dtype', 'input_shape' and 'batch_input_shape'
# are only applicable to input layers: do not pass these keywords
# to non-input layers.
allowed_kwargs = {
'input_shape',
'batch_input_shape',
'batch_size',
'weights',
'activity_regularizer',
}
# Validate optional keyword arguments.
generic_utils.validate_kwargs(kwargs, allowed_kwargs)
# Mutable properties
# Indicates whether the layer's weights are updated during training
# and whether the layer's updates are run during training.
self._trainable = trainable
# A stateful layer is a layer whose updates are run during inference too,
# for instance stateful RNNs.
self._stateful = False
# Indicates whether `build` needs to be called upon layer call, to create
# the layer's weights.
self.built = False
# Record the build input shape for loading purposes.
# TODO(kathywu): Move this to Layer._set_save_spec once cl/290121460 is
# submitted.
self._build_input_shape = None
# Provides information about which inputs are compatible with the layer.
self._input_spec = None
self.supports_masking = False
self._supports_ragged_inputs = False
self._init_set_name(name)
self._activity_regularizer = kwargs.pop('activity_regularizer', None)
self._maybe_create_attribute('_trainable_weights', [])
self._maybe_create_attribute('_non_trainable_weights', [])
self._updates = []
# Object to store all thread local layer properties.
self._thread_local = threading.local()
# A list of zero-argument lambdas which return Tensors, used for variable
# regularizers.
self._callable_losses = []
# A list of symbolic Tensors containing activity regularizers and losses
# manually added through `add_loss` in graph-building mode.
self._losses = []
# A list of metric instances corresponding to the symbolic metric tensors
# added using the `add_metric` API.
self._metrics = []
# Ensures the same metric is not added multiple times in `MirroredStrategy`.
self._metrics_lock = threading.Lock()
self._dtype = None if dtype is None else dtypes.as_dtype(dtype).name
# Dependencies tracked via attribute assignment.
# All layers in order of horizontal graph traversal.
# Entries are unique. For models includes input and output layers.
self._maybe_create_attribute('_layers', [])
# These lists will be filled via successive calls
# to self._add_inbound_node().
# Used in symbolic mode only, only in conjunction with graph-networks
self._inbound_nodes = []
self._outbound_nodes = []
self._init_call_fn_args()
# Whether the `call` method can be used to build a TF graph without issues.
# This attribute has no effect if the model is created using the Functional
# API. Instead, `model.dynamic` is determined based on the internal layers.
self._dynamic = dynamic
# Manage input shape information if passed.
if 'input_shape' in kwargs or 'batch_input_shape' in kwargs:
# In this case we will later create an input layer
# to insert before the current layer
if 'batch_input_shape' in kwargs:
batch_input_shape = tuple(kwargs['batch_input_shape'])
elif 'input_shape' in kwargs:
if 'batch_size' in kwargs:
batch_size = kwargs['batch_size']
else:
batch_size = None
batch_input_shape = (batch_size,) + tuple(kwargs['input_shape'])
self._batch_input_shape = batch_input_shape
# Manage initial weight values if passed.
self._initial_weights = kwargs.get('weights', None)
# Whether the layer will track any layers that is set as attribute on itself
# as sub-layers, the weights from the sub-layers will be included in the
# parent layer's variables() as well.
# Default to True, which means auto tracking is turned on. Certain subclass
# might want to turn it off, like Sequential model.
self._auto_track_sub_layers = True
@trackable.no_automatic_dependency_tracking
@generic_utils.default
def build(self, input_shape):
"""Creates the variables of the layer (optional, for subclass implementers).
This is a method that implementers of subclasses of `Layer` or `Model`
can override if they need a state-creation step in-between
layer instantiation and layer call.
This is typically used to create the weights of `Layer` subclasses.
Arguments:
input_shape: Instance of `TensorShape`, or list of instances of
`TensorShape` if the layer expects a list of inputs
(one instance per input).
"""
# Only record the build input shapes of overridden the build methods.
if not hasattr(self.build, '_is_default'):
self._build_input_shape = input_shape
self.built = True
@doc_controls.for_subclass_implementers
def call(self, inputs, **kwargs): # pylint: disable=unused-argument
"""This is where the layer's logic lives.
Arguments:
inputs: Input tensor, or list/tuple of input tensors.
**kwargs: Additional keyword arguments.
Returns:
A tensor or list/tuple of tensors.
"""
return inputs
@doc_controls.for_subclass_implementers
def _add_trackable(self, trackable_object, trainable):
"""Adds a Trackable object to this layer's state.
Arguments:
trackable_object: The tf.tracking.Trackable object to add.
trainable: Boolean, whether the variable should be part of the layer's
"trainable_variables" (e.g. variables, biases) or
"non_trainable_variables" (e.g. BatchNorm mean and variance).
Returns:
The TrackableWeightHandler used to track this object.
"""
handler = base_layer_utils.TrackableWeightHandler(trackable_object)
if trainable:
self._trainable_weights.append(handler)
else:
self._non_trainable_weights.append(handler)
return handler
@doc_controls.for_subclass_implementers
def add_weight(self,
name=None,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=None,
constraint=None,
partitioner=None,
use_resource=None,
synchronization=tf_variables.VariableSynchronization.AUTO,
aggregation=tf_variables.VariableAggregation.NONE,
**kwargs):
"""Adds a new variable to the layer.
Arguments:
name: Variable name.
shape: Variable shape. Defaults to scalar if unspecified.
dtype: The type of the variable. Defaults to `self.dtype` or `float32`.
initializer: Initializer instance (callable).
regularizer: Regularizer instance (callable).
trainable: Boolean, whether the variable should be part of the layer's
"trainable_variables" (e.g. variables, biases)
or "non_trainable_variables" (e.g. BatchNorm mean and variance).
Note that `trainable` cannot be `True` if `synchronization`
is set to `ON_READ`.
constraint: Constraint instance (callable).
partitioner: Partitioner to be passed to the `Trackable` API.
use_resource: Whether to use `ResourceVariable`.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
**kwargs: Additional keyword arguments. Accepted values are `getter`,
`collections` and `caching_device`.
Returns:
The created variable. Usually either a `Variable` or `ResourceVariable`
instance. If `partitioner` is not `None`, a `PartitionedVariable`
instance is returned.
Raises:
RuntimeError: If called with partitioned variable regularization and
eager execution is enabled.
ValueError: When giving unsupported dtype and no initializer or when
trainable has been set to True with synchronization set as `ON_READ`.
"""
if shape is None:
shape = ()
# Validate optional keyword arguments.
for kwarg in kwargs:
if kwarg not in ['getter', 'collections', 'caching_device']:
raise TypeError('Unknown keyword argument:', kwarg)
getter = kwargs.pop('getter', base_layer_utils.make_variable)
collections_arg = kwargs.pop('collections', None)
# See the docstring for tf.Variable about the details for caching_device.
caching_device = kwargs.pop('caching_device', None)
if dtype is None:
dtype = self.dtype or backend.floatx()
dtype = dtypes.as_dtype(dtype)
initializer = initializers.get(initializer)
regularizer = regularizers.get(regularizer)
constraint = constraints.get(constraint)
if synchronization == tf_variables.VariableSynchronization.ON_READ:
if trainable:
raise ValueError(
'Synchronization value can be set to '
'VariableSynchronization.ON_READ only for non-trainable variables. '
'You have specified trainable=True and '
'synchronization=VariableSynchronization.ON_READ.')
else:
# Set trainable to be false when variable is to be synced on read.
trainable = False
elif trainable is None:
trainable = True
# Initialize variable when no initializer provided
if initializer is None:
# If dtype is DT_FLOAT, provide a uniform unit scaling initializer
if dtype.is_floating:
initializer = initializers.glorot_uniform()
# If dtype is DT_INT/DT_UINT, provide a default value `zero`
# If dtype is DT_BOOL, provide a default value `FALSE`
elif dtype.is_integer or dtype.is_unsigned or dtype.is_bool:
initializer = initializers.zeros()
# NOTES:Do we need to support for handling DT_STRING and DT_COMPLEX here?
else:
raise ValueError('An initializer for variable %s of type %s is required'
' for layer %s' % (name, dtype.base_dtype, self.name))
variable = self._add_variable_with_custom_getter(
name=name,
shape=shape,
# TODO(allenl): a `make_variable` equivalent should be added as a
# `Trackable` method.
getter=getter,
# Manage errors in Layer rather than Trackable.
overwrite=True,
initializer=initializer,
dtype=dtype,
constraint=constraint,
trainable=trainable,
partitioner=partitioner,
use_resource=use_resource,
collections=collections_arg,
synchronization=synchronization,
aggregation=aggregation,
caching_device=caching_device)
if regularizer is not None:
# TODO(fchollet): in the future, this should be handled at the
# level of variable creation, and weight regularization losses
# should be variable attributes.
name_in_scope = variable.name[:variable.name.find(':')]
self._handle_weight_regularization(name_in_scope,
variable,
regularizer)
if isinstance(variable, tf_variables.PartitionedVariable):
for v in variable:
backend.track_variable(v)
if trainable:
self._trainable_weights.append(v)
else:
self._non_trainable_weights.append(v)
else:
backend.track_variable(variable)
if trainable:
self._trainable_weights.append(variable)
else:
self._non_trainable_weights.append(variable)
return variable
@generic_utils.default
def get_config(self):
"""Returns the config of the layer.
A layer config is a Python dictionary (serializable)
containing the configuration of a layer.
The same layer can be reinstantiated later
(without its trained weights) from this configuration.
The config of a layer does not include connectivity
information, nor the layer class name. These are handled
by `Network` (one layer of abstraction above).
Returns:
Python dictionary.
"""
all_args = tf_inspect.getfullargspec(self.__init__).args
config = {'name': self.name, 'trainable': self.trainable}
if hasattr(self, '_batch_input_shape'):
config['batch_input_shape'] = self._batch_input_shape
config['dtype'] = self.dtype
if hasattr(self, 'dynamic'):
# Only include `dynamic` in the `config` if it is `True`
if self.dynamic:
config['dynamic'] = self.dynamic
elif 'dynamic' in all_args:
all_args.remove('dynamic')
expected_args = config.keys()
# Finds all arguments in the `__init__` that are not in the config:
extra_args = [arg for arg in all_args if arg not in expected_args]
# Check that either the only argument in the `__init__` is `self`,
# or that `get_config` has been overridden:
if len(extra_args) > 1 and hasattr(self.get_config, '_is_default'):
raise NotImplementedError('Layer %s has arguments in `__init__` and '
'therefore must override `get_config`.' %
self.__class__.__name__)
return config
@classmethod
def from_config(cls, config):
"""Creates a layer from its config.
This method is the reverse of `get_config`,
capable of instantiating the same layer from the config
dictionary. It does not handle layer connectivity
(handled by Network), nor weights (handled by `set_weights`).
Arguments:
config: A Python dictionary, typically the
output of get_config.
Returns:
A layer instance.
"""
return cls(**config)
def compute_output_shape(self, input_shape):
"""Computes the output shape of the layer.
If the layer has not been built, this method will call `build` on the
layer. This assumes that the layer will later be used with inputs that
match the input shape provided here.
Arguments:
input_shape: Shape tuple (tuple of integers)
or list of shape tuples (one per output tensor of the layer).
Shape tuples can include None for free dimensions,
instead of an integer.
Returns:
An input shape tuple.
"""
if context.executing_eagerly():
# In this case we build the model first in order to do shape inference.
# This is acceptable because the framework only calls
# `compute_output_shape` on shape values that the layer would later be
# built for. It would however cause issues in case a user attempts to
# use `compute_output_shape` manually with shapes that are incompatible
# with the shape the Layer will be called on (these users will have to
# implement `compute_output_shape` themselves).
self._maybe_build(input_shape)
with func_graph.FuncGraph('graph').as_default():
input_shape = tf_utils.convert_shapes(input_shape, to_tuples=False)
def _make_placeholder_like(shape):
ph = backend.placeholder(shape=shape, dtype=self.dtype)
ph._keras_mask = None
return ph
inputs = nest.map_structure(_make_placeholder_like, input_shape)
try:
outputs = self(inputs, training=False)
except TypeError as e:
six.raise_from(
NotImplementedError(
'We could not automatically infer the static shape of the '
'layer\'s output. Please implement the '
'`compute_output_shape` method on your layer (%s).' %
self.__class__.__name__), e)
return nest.map_structure(lambda t: t.shape, outputs)
raise NotImplementedError
@doc_controls.for_subclass_implementers
def compute_output_signature(self, input_signature):
"""Compute the output tensor signature of the layer based on the inputs.
Unlike a TensorShape object, a TensorSpec object contains both shape
and dtype information for a tensor. This method allows layers to provide
output dtype information if it is different from the input dtype.
For any layer that doesn't implement this function,
the framework will fall back to use `compute_output_shape`, and will
assume that the output dtype matches the input dtype.
Args:
input_signature: Single TensorSpec or nested structure of TensorSpec
objects, describing a candidate input for the layer.
Returns:
Single TensorSpec or nested structure of TensorSpec objects, describing
how the layer would transform the provided input.
Raises:
TypeError: If input_signature contains a non-TensorSpec object.
"""
def check_type_return_shape(s):
if not isinstance(s, tensor_spec.TensorSpec):
raise TypeError(
'Only TensorSpec signature types are supported, '
'but saw signature signature entry: {}.'.format(s))
return s.shape
input_shape = nest.map_structure(check_type_return_shape, input_signature)
output_shape = self.compute_output_shape(input_shape)
dtype = self._compute_dtype
if dtype is None:
input_dtypes = [s.dtype for s in nest.flatten(input_signature)]
# Default behavior when self.dtype is None, is to use the first input's
# dtype.
dtype = input_dtypes[0]
return nest.map_structure(
lambda s: tensor_spec.TensorSpec(dtype=dtype, shape=s),
output_shape)
@generic_utils.default
def compute_mask(self, inputs, mask=None): # pylint: disable=unused-argument
"""Computes an output mask tensor.
Arguments:
inputs: Tensor or list of tensors.
mask: Tensor or list of tensors.
Returns:
None or a tensor (or list of tensors,
one per output tensor of the layer).
"""
if not self.supports_masking:
if any(m is not None for m in nest.flatten(mask)):
raise TypeError('Layer ' + self.name + ' does not support masking, '
'but was passed an input_mask: ' + str(mask))
# masking not explicitly supported: return None as mask.
return None
# if masking is explicitly supported, by default
# carry over the input mask
return mask
def __call__(self, *args, **kwargs):
"""Wraps `call`, applying pre- and post-processing steps.
Arguments:
*args: Positional arguments to be passed to `self.call`.
**kwargs: Keyword arguments to be passed to `self.call`.
Returns:
Output tensor(s).
Note:
- The following optional keyword arguments are reserved for specific uses:
* `training`: Boolean scalar tensor of Python boolean indicating
whether the `call` is meant for training or inference.
* `mask`: Boolean input mask.
- If the layer's `call` method takes a `mask` argument (as some Keras
layers do), its default value will be set to the mask generated
for `inputs` by the previous layer (if `input` did come from
a layer that generated a corresponding mask, i.e. if it came from
a Keras layer with masking support.
Raises:
ValueError: if the layer's `call` method returns None (an invalid value).
RuntimeError: if `super().__init__()` was not called in the constructor.
"""
if not hasattr(self, '_thread_local'):
raise RuntimeError(
'You must call `super().__init__()` in the layer constructor.')
# Grab the first positional or keyword argument.
if args:
inputs = args[0]
args = args[1:]
elif self._call_fn_args[0] in kwargs:
inputs = kwargs.pop(self._call_fn_args[0])
else:
raise ValueError(
'The first argument to `Layer.call` must always be passed.')
call_context = base_layer_utils.call_context()
input_list = nest.flatten(inputs)
# We will attempt to build a TF graph if & only if all inputs are symbolic.
# This is always the case in graph mode. It can also be the case in eager
# mode when all inputs can be traced back to `keras.Input()` (when building
# models using the functional API).
build_graph = tf_utils.are_all_symbolic_tensors(input_list)
# Accept NumPy and scalar inputs by converting to Tensors.
if any(isinstance(x, (np.ndarray, float, int)) for x in input_list):
def _convert_non_tensor(x):
# Don't call `ops.convert_to_tensor_v2` on all `inputs` because
# `SparseTensors` can't be converted to `Tensor`.
if isinstance(x, (np.ndarray, float, int)):
return ops.convert_to_tensor_v2(x)
return x
inputs = nest.map_structure(_convert_non_tensor, inputs)
input_list = nest.flatten(inputs)
# Handle `mask` propagation from previous layer to current layer. Masks can
# be propagated explicitly via the `mask` argument, or implicitly via
# setting the `_keras_mask` attribute on the inputs to a Layer. Masks passed
# explicitly take priority.
mask_arg_passed_by_framework = False
input_masks = self._collect_input_masks(inputs, args, kwargs)
if (self._expects_mask_arg and input_masks is not None and
not self._call_arg_was_passed('mask', args, kwargs)):
mask_arg_passed_by_framework = True
kwargs['mask'] = input_masks
# If `training` argument was not explicitly passed, propagate `training`
# value from this layer's calling layer.
training_arg_passed_by_framework = False
# Priority 1: `training` was explicitly passed.
if self._call_arg_was_passed('training', args, kwargs):
training_value = self._get_call_arg_value('training', args, kwargs)
if not self._expects_training_arg:
kwargs.pop('training')
else:
training_value = None
# Priority 2: `training` was passed to a parent layer.
if call_context.training is not None:
training_value = call_context.training
# Priority 3a: `learning_phase()` has been set.
elif backend.global_learning_phase_is_set():
training_value = backend.learning_phase()
# Priority 3b: Pass the `learning_phase()` if in the Keras FuncGraph.
elif build_graph:
with backend.get_graph().as_default():
if base_layer_utils.is_in_keras_graph():
training_value = backend.learning_phase()
if self._expects_training_arg and training_value is not None:
# Force the training_value to be bool type which matches to the contract
# for layer/model call args.
if tensor_util.is_tensor(training_value):
training_value = math_ops.cast(training_value, dtypes.bool)
else:
training_value = bool(training_value)
kwargs['training'] = training_value
training_arg_passed_by_framework = True
# Only create Keras history if at least one tensor originates from a
# `keras.Input`. Otherwise this Layer may be being used outside the Keras
# framework.
if build_graph and base_layer_utils.needs_keras_history(inputs):
base_layer_utils.create_keras_history(inputs)
# Clear eager losses on top level model call.
# We are clearing the losses only on the top level model call and not on
# every layer/model call because layer/model may be reused.
if (base_layer_utils.is_in_eager_or_tf_function() and
not call_context.in_call):
self._clear_losses()
with call_context.enter(self, inputs, build_graph, training_value):
# Check input assumptions set after layer building, e.g. input shape.
if build_graph:
# Symbolic execution on symbolic tensors. We will attempt to build
# the corresponding TF subgraph inside `backend.get_graph()`
# TODO(reedwm): We should assert input compatibility after the inputs
# are casted, not before.
input_spec.assert_input_compatibility(self.input_spec, inputs,
self.name)
if (any(isinstance(x, ragged_tensor.RaggedTensor) for x in input_list)
and self._supports_ragged_inputs is False): # pylint: disable=g-bool-id-comparison
raise ValueError('Layer %s does not support RaggedTensors as input. '
'Inputs received: %s. You can try converting your '
'input to an uniform tensor.' % (self.name, inputs))
graph = backend.get_graph()
with graph.as_default(), backend.name_scope(self._name_scope()):
# Build layer if applicable (if the `build` method has been
# overridden).
self._maybe_build(inputs)
if not self.dynamic:
# Wrapping `call` function in autograph to allow for dynamic control
# flow and control dependencies in call. We are limiting this to
# subclassed layers as autograph is strictly needed only for
# subclassed layers and models.
# tf_convert will respect the value of autograph setting in the
# enclosing tf.function, if any.
if (base_layer_utils.is_subclassed(self) and
not base_layer_utils.from_saved_model(self)):
call_fn = autograph.tf_convert(
self.call, ag_ctx.control_status_ctx())
else:
call_fn = self.call
try:
# Add auto_control_deps in V2 when they are not already added by
# a `tf.function`.
if (ops.executing_eagerly_outside_functions() and
not base_layer_utils.is_in_eager_or_tf_function()):
with auto_control_deps.AutomaticControlDependencies() as acd:
outputs = call_fn(inputs, *args, **kwargs)
# Wrap Tensors in `outputs` in `tf.identity` to avoid
# circular dependencies.
outputs = base_layer_utils.mark_as_return(outputs, acd)
else:
outputs = call_fn(inputs, *args, **kwargs)
except errors.OperatorNotAllowedInGraphError as e:
raise TypeError('You are attempting to use Python control '
'flow in a layer that was not declared to be '
'dynamic. Pass `dynamic=True` to the class '
'constructor.\nEncountered error:\n"""\n' +
str(e) + '\n"""')
else:
# We will use static shape inference to return symbolic tensors
# matching the specifications of the layer outputs.
# Since `self.dynamic` is True, we will never attempt to
# run the underlying TF graph (which is disconnected).
# TODO(fchollet): consider py_func as an alternative, which
# would enable us to run the underlying graph if needed.
outputs = self._symbolic_call(inputs)
if outputs is None:
raise ValueError('A layer\'s `call` method should return a '
'Tensor or a list of Tensors, not None '
'(layer: ' + self.name + ').')
if base_layer_utils.have_all_keras_metadata(inputs):
if training_arg_passed_by_framework:
kwargs.pop('training')
if mask_arg_passed_by_framework:
kwargs.pop('mask')
inputs, outputs = self._set_connectivity_metadata_(
inputs, outputs, args, kwargs)
self._handle_activity_regularization(inputs, outputs)
self._set_mask_metadata(inputs, outputs, input_masks)
if hasattr(self, '_set_inputs') and not self.inputs:
# Subclassed network: explicitly set metadata normally set by
# a call to self._set_inputs().
self._set_inputs(inputs, outputs)
else:
# Eager execution on data tensors.
with backend.name_scope(self._name_scope()):
self._maybe_build(inputs)
outputs = self.call(inputs, *args, **kwargs)
self._handle_activity_regularization(inputs, outputs)
self._set_mask_metadata(inputs, outputs, input_masks)
if hasattr(self, '_set_save_spec'):
self._set_save_spec(inputs)
return outputs
@property
def dtype(self):
"""Dtype used by the weights of the layer, set in the constructor."""
return self._dtype
@property
def name(self):
"""Name of the layer (string), set in the constructor."""
return self._name
@property
@trackable_layer_utils.cache_recursive_attribute('dynamic')
def dynamic(self):
"""Whether the layer is dynamic (eager-only); set in the constructor."""
# NOTE(taylorrobie): Currently self._dynamic is read-only. If that changes
# then this cache logic must be updated.
return self._dynamic
@property
@doc_controls.do_not_doc_inheritable
@trackable_layer_utils.cache_recursive_attribute('stateful')
def stateful(self):
return self._stateful
@stateful.setter
@trackable_layer_utils.invalidate_recursive_cache('stateful')
def stateful(self, value):
self._stateful = value
@property
def trainable(self):
return self._trainable
@trainable.setter
def trainable(self, value):
self._trainable = value
for layer in getattr(self, '_layers', []):
layer.trainable = value
@property
def activity_regularizer(self):
"""Optional regularizer function for the output of this layer."""
return self._activity_regularizer
@activity_regularizer.setter
def activity_regularizer(self, regularizer):
"""Optional regularizer function for the output of this layer."""
self._activity_regularizer = regularizer
@property
def input_spec(self):
"""`InputSpec` instance(s) describing the input format for this layer.
When you create a layer subclass, you can set `self.input_spec` to enable
the layer to run input compatibility checks when it is called.
Consider a `Conv2D` layer: it can only be called on a single input tensor
of rank 4. As such, you can set, in `__init__()`:
```python
self.input_spec = tf.keras.layers.InputSpec(ndim=4)
```
Now, if you try to call the layer on an input that isn't rank 4
(for instance, an input of shape `(2,)`, it will raise a nicely-formatted
error:
```
ValueError: Input 0 of layer conv2d is incompatible with the layer:
expected ndim=4, found ndim=1. Full shape received: [2]
```
Input checks that can be specified via `input_spec` include:
- Structure (e.g. a single input, a list of 2 inputs, etc)
- Shape
- Rank (ndim)
- Dtype
For more information, see `tf.keras.layers.InputSpec`.
Returns:
A `tf.keras.layers.InputSpec` instance, or nested structure thereof.
"""
return self._input_spec
@input_spec.setter
# Must be decorated to prevent tracking, since the input_spec can be nested
# InputSpec objects.
@trackable.no_automatic_dependency_tracking
def input_spec(self, value):
for v in nest.flatten(value):
if v is not None and not isinstance(v, input_spec.InputSpec):
raise TypeError('Layer input_spec must be an instance of InputSpec. '
'Got: {}'.format(v))
self._input_spec = value
@property
def trainable_weights(self):
"""List of all trainable weights tracked by this layer.
Trainable weights are updated via gradient descent during training.
Returns:
A list of trainable variables.
"""
if self.trainable:
children_weights = self._gather_children_attribute('trainable_weights')
return self._dedup_weights(self._trainable_weights + children_weights)
else:
return []
@property
def non_trainable_weights(self):
"""List of all non-trainable weights tracked by this layer.
Non-trainable weights are *not* updated during training. They are expected
to be updated manually in `call()`.
Returns:
A list of non-trainable variables.
"""
if self.trainable:
children_weights = self._gather_children_attribute(
'non_trainable_weights')
non_trainable_weights = self._non_trainable_weights + children_weights
else:
children_weights = self._gather_children_attribute('weights')
non_trainable_weights = (
self._trainable_weights + self._non_trainable_weights +
children_weights)
return self._dedup_weights(non_trainable_weights)
@property
def weights(self):
"""Returns the list of all layer variables/weights.
Returns:
A list of variables.
"""
return self.trainable_weights + self.non_trainable_weights
@property
@doc_controls.do_not_doc_inheritable
def updates(self):
collected_updates = []
all_layers = self._gather_unique_layers()
with backend.get_graph().as_default():
for layer in all_layers:
if not layer.trainable and not layer.stateful:
continue
for u in layer._updates:
if callable(u):
try:
u = u()
except errors.InaccessibleTensorError:
base_layer_utils.check_graph_consistency(
method='add_update', force_raise=True)
raise # check_graph_consistency may not always raise.
base_layer_utils.check_graph_consistency(u, method='add_update')
collected_updates.append(u)
return collected_updates
@property
def losses(self):
"""Losses which are associated with this `Layer`.
Variable regularization tensors are created when this property is accessed,
so it is eager safe: accessing `losses` under a `tf.GradientTape` will
propagate gradients back to the corresponding variables.
Returns:
A list of tensors.
"""
collected_losses = []
all_layers = self._gather_unique_layers()
for layer in all_layers:
# If any eager losses are present, we assume the model to be part of an
# eager training loop (either a custom one or the one used when
# `run_eagerly=True`) and so we always return just the eager losses.
if layer._eager_losses:
# Filter placeholder losses that may have been added by revived layers.
# (see base_layer_utils for details).
if (layer._eager_losses[0] is
not base_layer_utils.REVIVED_LOSS_PLACEHOLDER):
collected_losses.extend(layer._eager_losses)
else:
collected_losses.extend(layer._losses)
for regularizer in layer._callable_losses:
loss_tensor = regularizer()
if loss_tensor is not None:
collected_losses.append(loss_tensor)
return collected_losses
def add_loss(self, losses, inputs=None):
"""Add loss tensor(s), potentially dependent on layer inputs.
Some losses (for instance, activity regularization losses) may be dependent
on the inputs passed when calling a layer. Hence, when reusing the same
layer on different inputs `a` and `b`, some entries in `layer.losses` may
be dependent on `a` and some on `b`. This method automatically keeps track
of dependencies.
This method can be used inside a subclassed layer or model's `call`
function, in which case `losses` should be a Tensor or list of Tensors.
Example:
```python
class MyLayer(tf.keras.layers.Layer):
def call(inputs, self):
self.add_loss(tf.abs(tf.reduce_mean(inputs)), inputs=True)
return inputs
```
This method can also be called directly on a Functional Model during
construction. In this case, any loss Tensors passed to this Model must
be symbolic and be able to be traced back to the model's `Input`s. These
losses become part of the model's topology and are tracked in `get_config`.
Example:
```python
inputs = tf.keras.Input(shape=(10,))
x = tf.keras.layers.Dense(10)(inputs)
outputs = tf.keras.layers.Dense(1)(x)
model = tf.keras.Model(inputs, outputs)
# Activity regularization.
model.add_loss(tf.abs(tf.reduce_mean(x)))
```
If this is not the case for your loss (if, for example, your loss references
a `Variable` of one of the model's layers), you can wrap your loss in a
zero-argument lambda. These losses are not tracked as part of the model's
topology since they can't be serialized.
Example:
```python
inputs = tf.keras.Input(shape=(10,))
x = tf.keras.layers.Dense(10)(inputs)
outputs = tf.keras.layers.Dense(1)(x)
model = tf.keras.Model(inputs, outputs)
# Weight regularization.
model.add_loss(lambda: tf.reduce_mean(x.kernel))
```
The `get_losses_for` method allows to retrieve the losses relevant to a
specific set of inputs.
Arguments:
losses: Loss tensor, or list/tuple of tensors. Rather than tensors, losses
may also be zero-argument callables which create a loss tensor.
inputs: Ignored when executing eagerly. If anything other than None is
passed, it signals the losses are conditional on some of the layer's
inputs, and thus they should only be run where these inputs are
available. This is the case for activity regularization losses, for
instance. If `None` is passed, the losses are assumed
to be unconditional, and will apply across all dataflows of the layer
(e.g. weight regularization losses).
"""
def _tag_unconditional(loss):
"""Process the loss and tag it by setting loss._unconditional_loss."""
if callable(loss):
# We run the loss without autocasting, as regularizers are often
# numerically unstable in float16.
with base_layer_utils.autocast_context_manager(None):
loss = loss()
if loss is None:
return None # Will be filtered out when computing the .losses property
if not tensor_util.is_tensor(loss):
loss = ops.convert_to_tensor_v2(loss, dtype=backend.floatx())
loss._unconditional_loss = (inputs is None) # pylint: disable=protected-access
return loss
losses = nest.flatten(losses)
callable_losses = []
eager_losses = []
symbolic_losses = []
for loss in losses:
if callable(loss):
callable_losses.append(functools.partial(_tag_unconditional, loss))
continue
if loss is None:
continue
if not tensor_util.is_tensor(loss):
loss = ops.convert_to_tensor_v2(loss, dtype=backend.floatx())
# TF Functions should take the eager path.
if (tf_utils.is_symbolic_tensor(loss) and
not base_layer_utils.is_in_tf_function()):
symbolic_losses.append(_tag_unconditional(loss))
base_layer_utils.check_graph_consistency(loss, method='add_loss')
elif tensor_util.is_tensor(loss):
eager_losses.append(_tag_unconditional(loss))
self._callable_losses.extend(callable_losses)
in_call_context = base_layer_utils.call_context().in_call
if eager_losses and not in_call_context:
raise ValueError(
'Expected a symbolic Tensors or a callable for the loss value. '
'Please wrap your loss computation in a zero argument `lambda`.')
self._eager_losses.extend(eager_losses)
if in_call_context:
for symbolic_loss in symbolic_losses:
self._losses.append(symbolic_loss)
else:
for symbolic_loss in symbolic_losses:
if getattr(self, '_is_graph_network', False):
self._graph_network_add_loss(symbolic_loss)
else:
# Possible a loss was added in a Layer's `build`.
self._losses.append(symbolic_loss)
@trackable.no_automatic_dependency_tracking
def _clear_losses(self):
"""Used every step in eager to reset losses."""
self._eager_losses = []
if hasattr(self, '_layers'):
for layer in trackable_layer_utils.filter_empty_layer_containers(
self._layers):
layer._clear_losses()
@property
def metrics(self):
"""List of `tf.keras.metrics.Metric` instances tracked by the layer."""
collected_metrics = []
all_layers = self._gather_unique_layers()
for layer in all_layers:
with layer._metrics_lock:
collected_metrics.extend(layer._metrics)
return collected_metrics
def add_metric(self, value, aggregation=None, name=None):
"""Adds metric tensor to the layer.
Args:
value: Metric tensor.
aggregation: Sample-wise metric reduction function. If `aggregation=None`,
it indicates that the metric tensor provided has been aggregated
already. eg, `bin_acc = BinaryAccuracy(name='acc')` followed by
`model.add_metric(bin_acc(y_true, y_pred))`. If aggregation='mean', the
given metric tensor will be sample-wise reduced using `mean` function.
eg, `model.add_metric(tf.reduce_sum(outputs), name='output_mean',
aggregation='mean')`.
name: String metric name.
Raises:
ValueError: If `aggregation` is anything other than None or `mean`.
"""
if aggregation is not None and aggregation != 'mean':
raise ValueError(
'We currently support only `mean` sample-wise metric aggregation. '
'You provided aggregation=`%s`' % aggregation)
from_metric_obj = hasattr(value, '_metric_obj')
is_symbolic = tf_utils.is_symbolic_tensor(value)
in_call_context = base_layer_utils.call_context().in_call
if name is None and not from_metric_obj:
# Eg. `self.add_metric(math_ops.reduce_sum(x), aggregation='mean')`
# In eager mode, we use metric name to lookup a metric. Without a name,
# a new Mean metric wrapper will be created on every model/layer call.
# So, we raise an error when no name is provided.
# We will do the same for symbolic mode for consistency although a name
# will be generated if no name is provided.
# We will not raise this error in the foll use case for the sake of
# consistency as name in provided in the metric constructor.
# mean = metrics.Mean(name='my_metric')
# model.add_metric(mean(outputs))
raise ValueError('Please provide a name for your metric like '
'`self.add_metric(tf.reduce_sum(inputs), '
'name=\'mean_activation\', aggregation=\'mean\')`')
elif from_metric_obj:
name = value._metric_obj.name
if in_call_context:
# TF Function path should take the eager path.
if is_symbolic and not base_layer_utils.is_in_tf_function():
self._symbolic_add_metric(value, aggregation, name)
else:
self._eager_add_metric(value, aggregation, name)
else:
if not is_symbolic:
raise ValueError('Expected a symbolic Tensor for the metric value, '
'received: ' + str(value))
# Possible a metric was added in a Layer's `build`.
if not getattr(self, '_is_graph_network', False):
with backend.get_graph().as_default():
self._symbolic_add_metric(value, aggregation, name)
return
if from_metric_obj:
raise ValueError('Using the result of calling a `Metric` object '
'when calling `add_metric` on a Functional '
'Model is not supported. Please pass the '
'Tensor to monitor directly.')
# Insert layers into the Keras Graph Network.
self._graph_network_add_metric(value, aggregation, name)
@deprecation.deprecated_args(None, '`inputs` is now automatically inferred',
'inputs')
@doc_controls.do_not_doc_inheritable
def add_update(self, updates, inputs=None):
"""Add update op(s), potentially dependent on layer inputs.
Weight updates (for instance, the updates of the moving mean and variance
in a BatchNormalization layer) may be dependent on the inputs passed
when calling a layer. Hence, when reusing the same layer on
different inputs `a` and `b`, some entries in `layer.updates` may be
dependent on `a` and some on `b`. This method automatically keeps track
of dependencies.
The `get_updates_for` method allows to retrieve the updates relevant to a
specific set of inputs.
This call is ignored when eager execution is enabled (in that case, variable
updates are run on the fly and thus do not need to be tracked for later
execution).
Arguments:
updates: Update op, or list/tuple of update ops, or zero-arg callable
that returns an update op. A zero-arg callable should be passed in
order to disable running the updates by setting `trainable=False`
on this Layer, when executing in Eager mode.
inputs: Deprecated, will be automatically inferred.
"""
call_context = base_layer_utils.call_context()
if (ds_context.has_strategy() and
ds_context.in_cross_replica_context() and
# When saving the model, the distribution strategy context should be
# ignored, following the default path for adding updates.
not call_context.saving):
# Updates don't need to be run in a cross-replica context.
return
updates = generic_utils.to_list(updates)
# All updates can be run immediately in Eager or in a tf.function.
if base_layer_utils.is_in_eager_or_tf_function():
if not call_context.frozen:
for update in updates:
if callable(update):
update()
return
if call_context.in_call:
relevant_inputs = call_context.inputs
else:
inbound_nodes = getattr(self, '_inbound_nodes', [])
relevant_inputs = [node.input_tensors for node in inbound_nodes]
def process_update(x):
"""Standardize update ops.
Arguments:
x: Tensor, op, or callable.
Returns:
An update op.
"""
if callable(x):
update = lambda: process_update(x())
if not ops.executing_eagerly_outside_functions():
# In V1 mode, call the callable right away and process. This is needed
# for TPU strategy.
return update()
elif isinstance(x, ops.Operation):
update = x
elif hasattr(x, 'op'):
update = x.op
else:
update = ops.convert_to_tensor_v2(x)
reachable = tf_utils.get_reachable_from_inputs(relevant_inputs, [update])
update._unconditional_update = update not in reachable
return update
updates = [process_update(x) for x in updates]
# Non-callable Updates are run automatically inside `call` in V2, so
# they do not need to be tracked later.
if ops.executing_eagerly_outside_functions() and call_context.in_call:
updates = [u for u in updates if callable(u)]
self._updates.extend(updates)
def set_weights(self, weights):
"""Sets the weights of the layer, from Numpy arrays.
The weights of a layer represent the state of the layer. This function
sets the weight values from numpy arrays. The weight values should be
passed in the order they are created by the layer. Note that the layer's
weights must be instantiated before calling this function by calling
the layer.
For example, a Dense layer returns a list of two values-- per-output
weights and the bias value. These can be used to set the weights of another
Dense layer:
>>> a = tf.keras.layers.Dense(1,
... kernel_initializer=tf.constant_initializer(1.))
>>> a_out = a(tf.convert_to_tensor([[1., 2., 3.]]))
>>> a.get_weights()
[array([[1.],
[1.],
[1.]], dtype=float32), array([0.], dtype=float32)]
>>> b = tf.keras.layers.Dense(1,
... kernel_initializer=tf.constant_initializer(2.))
>>> b_out = b(tf.convert_to_tensor([[10., 20., 30.]]))
>>> b.get_weights()
[array([[2.],
[2.],
[2.]], dtype=float32), array([0.], dtype=float32)]
>>> b.set_weights(a.get_weights())
>>> b.get_weights()
[array([[1.],
[1.],
[1.]], dtype=float32), array([0.], dtype=float32)]
Arguments:
weights: a list of Numpy arrays. The number
of arrays and their shape must match
number of the dimensions of the weights
of the layer (i.e. it should match the
output of `get_weights`).
Raises:
ValueError: If the provided weights list does not match the
layer's specifications.
"""
params = self.weights
expected_num_weights = 0
for param in params:
if isinstance(param, base_layer_utils.TrackableWeightHandler):
expected_num_weights += param.num_tensors
else:
expected_num_weights += 1
if expected_num_weights != len(weights):
raise ValueError(
'You called `set_weights(weights)` on layer "%s" '
'with a weight list of length %s, but the layer was '
'expecting %s weights. Provided weights: %s...' %
(self.name, len(weights), expected_num_weights, str(weights)[:50]))
weight_index = 0
weight_value_tuples = []
for param in params:
if isinstance(param, base_layer_utils.TrackableWeightHandler):
num_tensors = param.num_tensors
tensors = weights[weight_index:weight_index + num_tensors]
param.set_weights(tensors)
weight_index += num_tensors
else:
weight = weights[weight_index]
ref_shape = param.shape
if not ref_shape.is_compatible_with(weight.shape):
raise ValueError(
'Layer weight shape %s not compatible with provided weight '
'shape %s' % (ref_shape, weight.shape))
weight_value_tuples.append((param, weight))
weight_index += 1
backend.batch_set_value(weight_value_tuples)
def get_weights(self):
"""Returns the current weights of the layer.
The weights of a layer represent the state of the layer. This function
returns both trainable and non-trainable weight values associated with this
layer as a list of Numpy arrays, which can in turn be used to load state
into similarly parameterized layers.
For example, a Dense layer returns a list of two values-- per-output
weights and the bias value. These can be used to set the weights of another
Dense layer:
>>> a = tf.keras.layers.Dense(1,
... kernel_initializer=tf.constant_initializer(1.))
>>> a_out = a(tf.convert_to_tensor([[1., 2., 3.]]))
>>> a.get_weights()
[array([[1.],
[1.],
[1.]], dtype=float32), array([0.], dtype=float32)]
>>> b = tf.keras.layers.Dense(1,
... kernel_initializer=tf.constant_initializer(2.))
>>> b_out = b(tf.convert_to_tensor([[10., 20., 30.]]))
>>> b.get_weights()
[array([[2.],
[2.],
[2.]], dtype=float32), array([0.], dtype=float32)]
>>> b.set_weights(a.get_weights())
>>> b.get_weights()
[array([[1.],
[1.],
[1.]], dtype=float32), array([0.], dtype=float32)]
Returns:
Weights values as a list of numpy arrays.
"""
weights = self.weights
output_weights = []
for weight in weights:
if isinstance(weight, base_layer_utils.TrackableWeightHandler):
output_weights.extend(weight.get_tensors())
else:
output_weights.append(weight)
return backend.batch_get_value(output_weights)
@doc_controls.do_not_generate_docs
def get_updates_for(self, inputs):
"""Retrieves updates relevant to a specific set of inputs.
Arguments:
inputs: Input tensor or list/tuple of input tensors.
Returns:
List of update ops of the layer that depend on `inputs`.
"""
if inputs is None:
# Requesting unconditional updates.
return [u for u in self.updates if u._unconditional_update]
# Requesting input-conditional updates.
updates = [u for u in self.updates if not u._unconditional_update]
inputs = nest.flatten(inputs)
reachable = tf_utils.get_reachable_from_inputs(inputs, updates)
return [u for u in updates if u in reachable]
@doc_controls.do_not_doc_inheritable
def get_losses_for(self, inputs):
"""Retrieves losses relevant to a specific set of inputs.
Arguments:
inputs: Input tensor or list/tuple of input tensors.
Returns:
List of loss tensors of the layer that depend on `inputs`.
"""
if inputs is None:
# Requesting unconditional losses.
return [l for l in self.losses if l._unconditional_loss]
# Requesting input-conditional losses.
losses = [l for l in self.losses if not l._unconditional_loss]
inputs = nest.flatten(inputs)
reachable = tf_utils.get_reachable_from_inputs(inputs, losses)
return [l for l in losses if l in reachable]
@doc_controls.do_not_doc_inheritable
def get_input_mask_at(self, node_index):
"""Retrieves the input mask tensor(s) of a layer at a given node.
Arguments:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
A mask tensor
(or list of tensors if the layer has multiple inputs).
"""
inputs = self.get_input_at(node_index)
if isinstance(inputs, list):
return [getattr(x, '_keras_mask', None) for x in inputs]
else:
return getattr(inputs, '_keras_mask', None)
@doc_controls.do_not_doc_inheritable
def get_output_mask_at(self, node_index):
"""Retrieves the output mask tensor(s) of a layer at a given node.
Arguments:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
A mask tensor
(or list of tensors if the layer has multiple outputs).
"""
output = self.get_output_at(node_index)
if isinstance(output, list):
return [getattr(x, '_keras_mask', None) for x in output]
else:
return getattr(output, '_keras_mask', None)
@property
@doc_controls.do_not_doc_inheritable
def input_mask(self):
"""Retrieves the input mask tensor(s) of a layer.
Only applicable if the layer has exactly one inbound node,
i.e. if it is connected to one incoming layer.
Returns:
Input mask tensor (potentially None) or list of input
mask tensors.
Raises:
AttributeError: if the layer is connected to
more than one incoming layers.
"""
inputs = self.input
if isinstance(inputs, list):
return [getattr(x, '_keras_mask', None) for x in inputs]
else:
return getattr(inputs, '_keras_mask', None)
@property
@doc_controls.do_not_doc_inheritable
def output_mask(self):
"""Retrieves the output mask tensor(s) of a layer.
Only applicable if the layer has exactly one inbound node,
i.e. if it is connected to one incoming layer.
Returns:
Output mask tensor (potentially None) or list of output
mask tensors.
Raises:
AttributeError: if the layer is connected to
more than one incoming layers.
"""
output = self.output
if isinstance(output, list):
return [getattr(x, '_keras_mask', None) for x in output]
else:
return getattr(output, '_keras_mask', None)
@doc_controls.do_not_doc_inheritable
def get_input_shape_at(self, node_index):
"""Retrieves the input shape(s) of a layer at a given node.
Arguments:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
A shape tuple
(or list of shape tuples if the layer has multiple inputs).
Raises:
RuntimeError: If called in Eager mode.
"""
return self._get_node_attribute_at_index(node_index, 'input_shapes',
'input shape')
@doc_controls.do_not_doc_inheritable
def get_output_shape_at(self, node_index):
"""Retrieves the output shape(s) of a layer at a given node.
Arguments:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
A shape tuple
(or list of shape tuples if the layer has multiple outputs).
Raises:
RuntimeError: If called in Eager mode.
"""
return self._get_node_attribute_at_index(node_index, 'output_shapes',
'output shape')
@doc_controls.do_not_doc_inheritable
def get_input_at(self, node_index):
"""Retrieves the input tensor(s) of a layer at a given node.
Arguments:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
A tensor (or list of tensors if the layer has multiple inputs).
Raises:
RuntimeError: If called in Eager mode.
"""
return self._get_node_attribute_at_index(node_index, 'input_tensors',
'input')
@doc_controls.do_not_doc_inheritable
def get_output_at(self, node_index):
"""Retrieves the output tensor(s) of a layer at a given node.
Arguments:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
A tensor (or list of tensors if the layer has multiple outputs).
Raises:
RuntimeError: If called in Eager mode.
"""
return self._get_node_attribute_at_index(node_index, 'output_tensors',
'output')
@property
def input(self):
"""Retrieves the input tensor(s) of a layer.
Only applicable if the layer has exactly one input,
i.e. if it is connected to one incoming layer.
Returns:
Input tensor or list of input tensors.
Raises:
RuntimeError: If called in Eager mode.
AttributeError: If no inbound nodes are found.
"""
if not self._inbound_nodes:
raise AttributeError('Layer ' + self.name +
' is not connected, no input to return.')
return self._get_node_attribute_at_index(0, 'input_tensors', 'input')
@property
def output(self):
"""Retrieves the output tensor(s) of a layer.
Only applicable if the layer has exactly one output,
i.e. if it is connected to one incoming layer.
Returns:
Output tensor or list of output tensors.
Raises:
AttributeError: if the layer is connected to more than one incoming
layers.
RuntimeError: if called in Eager mode.
"""
if not self._inbound_nodes:
raise AttributeError('Layer ' + self.name + ' has no inbound nodes.')
return self._get_node_attribute_at_index(0, 'output_tensors', 'output')
@property
@doc_controls.do_not_doc_inheritable
def input_shape(self):
"""Retrieves the input shape(s) of a layer.
Only applicable if the layer has exactly one input,
i.e. if it is connected to one incoming layer, or if all inputs
have the same shape.
Returns:
Input shape, as an integer shape tuple
(or list of shape tuples, one tuple per input tensor).
Raises:
AttributeError: if the layer has no defined input_shape.
RuntimeError: if called in Eager mode.
"""
if not self._inbound_nodes:
raise AttributeError('The layer has never been called '
'and thus has no defined input shape.')
all_input_shapes = set(
[str(node.input_shapes) for node in self._inbound_nodes])
if len(all_input_shapes) == 1:
return self._inbound_nodes[0].input_shapes
else:
raise AttributeError('The layer "' + str(self.name) +
' has multiple inbound nodes, '
'with different input shapes. Hence '
'the notion of "input shape" is '
'ill-defined for the layer. '
'Use `get_input_shape_at(node_index)` '
'instead.')
def count_params(self):
"""Count the total number of scalars composing the weights.
Returns:
An integer count.
Raises:
ValueError: if the layer isn't yet built
(in which case its weights aren't yet defined).
"""
if not self.built:
if getattr(self, '_is_graph_network', False):
with tf_utils.maybe_init_scope(self):
self._maybe_build(self.inputs)
else:
raise ValueError('You tried to call `count_params` on ' + self.name +
', but the layer isn\'t built. '
'You can build it manually via: `' + self.name +
'.build(batch_input_shape)`.')
return layer_utils.count_params(self.weights)
@property
@doc_controls.do_not_doc_inheritable
def output_shape(self):
"""Retrieves the output shape(s) of a layer.
Only applicable if the layer has one output,
or if all outputs have the same shape.
Returns:
Output shape, as an integer shape tuple
(or list of shape tuples, one tuple per output tensor).
Raises:
AttributeError: if the layer has no defined output shape.
RuntimeError: if called in Eager mode.
"""
if not self._inbound_nodes:
raise AttributeError('The layer has never been called '
'and thus has no defined output shape.')
all_output_shapes = set(
[str(node.output_shapes) for node in self._inbound_nodes])
if len(all_output_shapes) == 1:
return self._inbound_nodes[0].output_shapes
else:
raise AttributeError('The layer "%s"'
' has multiple inbound nodes, '
'with different output shapes. Hence '
'the notion of "output shape" is '
'ill-defined for the layer. '
'Use `get_output_shape_at(node_index)` '
'instead.' % self.name)
@property
@doc_controls.do_not_doc_inheritable
def inbound_nodes(self):
"""Deprecated, do NOT use! Only for compatibility with external Keras."""
return self._inbound_nodes
@property
@doc_controls.do_not_doc_inheritable
def outbound_nodes(self):
"""Deprecated, do NOT use! Only for compatibility with external Keras."""
return self._outbound_nodes
##############################################################################
# Methods & attributes below are public aliases of other methods. #
##############################################################################
@deprecation.deprecated(
date=None, instructions='Please use `layer.__call__` method instead.')
@doc_controls.do_not_doc_inheritable
def apply(self, inputs, *args, **kwargs):
"""Deprecated, do NOT use!
This is an alias of `self.__call__`.
Arguments:
inputs: Input tensor(s).
*args: additional positional arguments to be passed to `self.call`.
**kwargs: additional keyword arguments to be passed to `self.call`.
Returns:
Output tensor(s).
"""
return self.__call__(inputs, *args, **kwargs)
@deprecation.deprecated(
date=None, instructions='Please use `layer.add_weight` method instead.')
@doc_controls.do_not_doc_inheritable
def add_variable(self, *args, **kwargs):
"""Deprecated, do NOT use! Alias for `add_weight`."""
return self.add_weight(*args, **kwargs)
@property
@doc_controls.do_not_generate_docs
def variables(self):
"""Returns the list of all layer variables/weights.
Alias of `self.weights`.
Returns:
A list of variables.
"""
return self.weights
@property
@doc_controls.do_not_generate_docs
def trainable_variables(self):
return self.trainable_weights
@property
@doc_controls.do_not_generate_docs
def non_trainable_variables(self):
return self.non_trainable_weights
##############################################################################
# Methods & attributes below are all private and only used by the framework. #
##############################################################################
# TODO(scottzhu): Remove this?
@property
def _compute_dtype(self):
"""The layer's compute dtype.
Unless mixed-precision is used, this is the same as `Layer.dtype`.
If self._autocast is True, layer's will cast floating-point inputs to this.
Returns:
The layer's compute dtype.
"""
return self.dtype
def _name_scope(self):
return self.name
def _init_set_name(self, name, zero_based=True):
if not name:
self._name = backend.unique_object_name(
generic_utils.to_snake_case(self.__class__.__name__),
zero_based=zero_based)
else:
self._name = name
def _get_existing_metric(self, name=None):
match = [m for m in self._metrics if m.name == name]
if not match:
return
if len(match) > 1:
raise ValueError(
'Please provide different names for the metrics you have added. '
'We found {} metrics with the name: "{}"'.format(len(match), name))
return match[0]
def _eager_add_metric(self, value, aggregation=None, name=None):
# If the given metric is available in `metrics` list we just update state
# on it, otherwise we create a new metric instance and
# add it to the `metrics` list.
metric_obj = getattr(value, '_metric_obj', None)
# Tensors that come from a Metric object already updated the Metric state.
should_update_state = not metric_obj
name = metric_obj.name if metric_obj else name
with self._metrics_lock:
match = self._get_existing_metric(name)
if match:
metric_obj = match
elif metric_obj:
self._metrics.append(metric_obj)
else:
from tensorflow.python.keras import metrics as metrics_mod # pylint:disable=g-import-not-at-top
if aggregation is None:
raise ValueError(
'`aggregation` must be specified when passing a `Tensor` '
'to `add_metric`.')
assert aggregation is not None
metric_obj = metrics_mod.Mean(name=name, dtype=value.dtype)
self._metrics.append(metric_obj)
if should_update_state:
metric_obj(value)
return
def _symbolic_add_metric(self, value, aggregation=None, name=None):
base_layer_utils.check_graph_consistency(value, method='add_metric')
match = self._get_existing_metric(name)
if aggregation is None:
# Iterate over the metrics and check if the given metric exists already.
# This can happen when a metric instance is created in subclassed model
# layer `__init__` and we have tracked that instance already in
# model.__setattr__.
if match:
result_tensor = value
metric_obj = match
elif hasattr(value, '_metric_obj'):
# We track the instance using the metadata on the result tensor.
result_tensor = value
metric_obj = result_tensor._metric_obj
self._metrics.append(metric_obj)
else:
raise ValueError(
'We do not support adding an aggregated metric result tensor that '
'is not the output of a `tf.keras.metrics.Metric` metric instance. '
'Without having access to the metric instance we cannot reset the '
'state of a metric after every epoch during training. You can '
'create a `tf.keras.metrics.Metric` instance and pass the result '
'here or pass an un-aggregated result with `aggregation` parameter '
'set as `mean`. For example: `self.add_metric(tf.reduce_sum(inputs)'
', name=\'mean_activation\', aggregation=\'mean\')`')
else:
# If a non-aggregated tensor is given as input (ie. `aggregation` is
# explicitly set to `mean`), we wrap the tensor in `Mean` metric.
if match:
result_tensor = match(value)
metric_obj = match
else:
metric_obj, result_tensor = base_layer_utils.create_mean_metric(
value, name)
self._metrics.append(metric_obj)
def _handle_weight_regularization(self, name, variable, regularizer):
"""Create lambdas which compute regularization losses."""
def _loss_for_variable(v):
"""Creates a regularization loss `Tensor` for variable `v`."""
with backend.name_scope(name + '/Regularizer'):
regularization = regularizer(v)
return regularization
if isinstance(variable, tf_variables.PartitionedVariable):
for v in variable:
self.add_loss(functools.partial(_loss_for_variable, v))
else:
self.add_loss(functools.partial(_loss_for_variable, variable))
def _handle_activity_regularization(self, inputs, outputs):
# Apply activity regularization.
# Note that it should be applied every time the layer creates a new
# output, since it is output-specific.
if self._activity_regularizer:
output_list = nest.flatten(outputs)
with backend.name_scope('ActivityRegularizer'):
for output in output_list:
activity_loss = self._activity_regularizer(output)
batch_size = math_ops.cast(
array_ops.shape(output)[0], activity_loss.dtype)
# Make activity regularization strength batch-agnostic.
mean_activity_loss = activity_loss / batch_size
base_layer_utils.check_graph_consistency(
mean_activity_loss, method='activity_regularizer')
self.add_loss(mean_activity_loss, inputs=inputs)
def _set_mask_metadata(self, inputs, outputs, previous_mask):
flat_outputs = nest.flatten(outputs)
mask_already_computed = (
getattr(self, '_compute_output_and_mask_jointly', False) or
all(getattr(x, '_keras_mask', None) is not None for x in flat_outputs))
# Only compute the mask if the Layer explicitly supports masking or has
# overridden `compute_mask`.
should_compute_mask = (
hasattr(self, 'compute_mask') and
(self.supports_masking or
not getattr(self.compute_mask, '_is_default', False)))
if mask_already_computed:
flat_masks = [getattr(x, '_keras_mask', None) for x in flat_outputs]
elif not should_compute_mask:
flat_masks = [None for _ in flat_outputs]
else:
output_masks = self.compute_mask(inputs, previous_mask)
# `compute_mask` can return a single `None` even when a Layer
# has multiple outputs.
if output_masks is None:
flat_masks = [None for _ in flat_outputs]
else:
flat_masks = nest.flatten(output_masks)
for output, mask in zip(flat_outputs, flat_masks):
try:
output._keras_mask = mask
except AttributeError:
# C Type such as np.ndarray.
pass
if tf_utils.are_all_symbolic_tensors(flat_outputs):
for output in flat_outputs:
if getattr(output, '_keras_mask', None) is not None:
# Do not track masks for `TensorFlowOpLayer` construction.
output._keras_mask._keras_history_checked = True
def _collect_input_masks(self, inputs, args, kwargs):
"""Checks if `mask` argument was passed, else gathers mask from inputs."""
if self._call_arg_was_passed('mask', args, kwargs):
return self._get_call_arg_value('mask', args, kwargs)
if not self._should_compute_mask:
return None
input_masks = nest.map_structure(lambda t: getattr(t, '_keras_mask', None),
inputs)
if generic_utils.is_all_none(input_masks):
return None
return input_masks
def _call_arg_was_passed(self, arg_name, args, kwargs, inputs_in_args=False):
if arg_name in kwargs:
return True
call_fn_args = self._call_fn_args
if not inputs_in_args:
# Ignore `inputs` arg.
call_fn_args = call_fn_args[1:]
if arg_name in dict(zip(call_fn_args, args)):
return True
return False
def _get_call_arg_value(self, arg_name, args, kwargs, inputs_in_args=False):
if arg_name in kwargs:
return kwargs[arg_name]
call_fn_args = self._call_fn_args
if not inputs_in_args:
# Ignore `inputs` arg.
call_fn_args = call_fn_args[1:]
args_dict = dict(zip(call_fn_args, args))
return args_dict[arg_name]
def _set_connectivity_metadata_(self, inputs, outputs, args, kwargs):
# If the layer returns tensors from its inputs, unmodified,
# we copy them to avoid loss of tensor metadata.
output_ls = nest.flatten(outputs)
inputs_ls = object_identity.ObjectIdentitySet(nest.flatten(inputs))
output_ls_copy = []
for x in output_ls:
if x in inputs_ls:
with backend.name_scope(self.name):
x = array_ops.identity(x)
output_ls_copy.append(x)
outputs = nest.pack_sequence_as(outputs, output_ls_copy)
# Ignore `inputs` arg.
arguments = dict(zip(self._call_fn_args[1:], args))
arguments.update(kwargs)
# Add an inbound node to the layer, so it can keep track of this call.
# This updates the layer history of the output tensor(s).
self._add_inbound_node(
input_tensors=inputs, output_tensors=outputs, arguments=arguments)
return inputs, outputs
def _add_inbound_node(self,
input_tensors,
output_tensors,
arguments=None):
"""Internal method to create an inbound node for the layer.
Arguments:
input_tensors: list of input tensors.
output_tensors: list of output tensors.
arguments: dictionary of keyword arguments that were passed to the
`call` method of the layer at the call that created the node.
"""
inbound_layers = nest.map_structure(lambda t: t._keras_history.layer,
input_tensors)
node_indices = nest.map_structure(lambda t: t._keras_history.node_index,
input_tensors)
tensor_indices = nest.map_structure(lambda t: t._keras_history.tensor_index,
input_tensors)
# Create node, add it to inbound nodes.
node_module.Node(
self,
inbound_layers=inbound_layers,
node_indices=node_indices,
tensor_indices=tensor_indices,
input_tensors=input_tensors,
output_tensors=output_tensors,
arguments=arguments)
# Update tensor history metadata.
# The metadata attribute consists of
# 1) a layer instance
# 2) a node index for the layer
# 3) a tensor index for the node.
# The allows layer reuse (multiple nodes per layer) and multi-output
# or multi-input layers (e.g. a layer can return multiple tensors,
# and each can be sent to a different layer).
for i, tensor in enumerate(nest.flatten(output_tensors)):
tensor._keras_history = KerasHistory(self,
len(self._inbound_nodes) - 1, i) # pylint: disable=protected-access
def _get_node_attribute_at_index(self, node_index, attr, attr_name):
"""Private utility to retrieves an attribute (e.g. inputs) from a node.
This is used to implement the methods:
- get_input_shape_at
- get_output_shape_at
- get_input_at
etc...
Arguments:
node_index: Integer index of the node from which
to retrieve the attribute.
attr: Exact node attribute name.
attr_name: Human-readable attribute name, for error messages.
Returns:
The layer's attribute `attr` at the node of index `node_index`.
Raises:
RuntimeError: If the layer has no inbound nodes, or if called in Eager
mode.
ValueError: If the index provided does not match any node.
"""
if not self._inbound_nodes:
raise RuntimeError('The layer has never been called '
'and thus has no defined ' + attr_name + '.')
if not len(self._inbound_nodes) > node_index:
raise ValueError('Asked to get ' + attr_name + ' at node ' +
str(node_index) + ', but the layer has only ' +
str(len(self._inbound_nodes)) + ' inbound nodes.')
values = getattr(self._inbound_nodes[node_index], attr)
if isinstance(values, list) and len(values) == 1:
return values[0]
else:
return values
def _maybe_build(self, inputs):
# Check input assumptions set before layer building, e.g. input rank.
if not self.built:
input_spec.assert_input_compatibility(
self.input_spec, inputs, self.name)
input_list = nest.flatten(inputs)
if input_list and self._dtype is None:
try:
self._dtype = input_list[0].dtype.base_dtype.name
except AttributeError:
pass
input_shapes = None
if all(hasattr(x, 'shape') for x in input_list):
input_shapes = nest.map_structure(lambda x: x.shape, inputs)
# Only call `build` if the user has manually overridden the build method.
if not hasattr(self.build, '_is_default'):
# Any setup work performed only once should happen in an `init_scope`
# to avoid creating symbolic Tensors that will later pollute any eager
# operations.
with tf_utils.maybe_init_scope(self):
self.build(input_shapes) # pylint:disable=not-callable
# We must set also ensure that the layer is marked as built, and the build
# shape is stored since user defined build functions may not be calling
# `super.build()`
LegacyBaseLayer.build(self, input_shapes)
# Optionally load weight values specified at layer instantiation.
if self._initial_weights is not None:
if ops.executing_eagerly_outside_functions():
with ops.init_scope():
# Using `init_scope` since we want variable assignment in
# `set_weights` to be treated like variable initialization.
self.set_weights(self._initial_weights)
else:
self.set_weights(self._initial_weights)
self._initial_weights = None
def _symbolic_call(self, inputs):
input_shapes = nest.map_structure(lambda x: x.shape, inputs)
output_shapes = self.compute_output_shape(input_shapes)
# Convert to TensorShape so that nest.map_structure will not map into
# individual dim of the shape.
output_shapes = tf_utils.convert_shapes(output_shapes, to_tuples=False)
def _make_placeholder_like(shape):
ph = backend.placeholder(shape=shape, dtype=self.dtype)
ph._keras_mask = None
return ph
return nest.map_structure(_make_placeholder_like, output_shapes)
def _get_trainable_state(self):
"""Get the `trainable` state of each sublayer.
Returns:
A dict mapping all sublayers to their `trainable` value.
"""
layers = trackable_layer_utils.filter_empty_layer_containers(self._layers)
# Keep track of each top-level layers' `trainable` as well as the
# state of all of its sublayers.
trainable_state = weakref.WeakKeyDictionary()
trainable_state[self] = self.trainable
for layer in layers:
trainable_state.update(layer._get_trainable_state())
return trainable_state
def _set_trainable_state(self, trainable_state):
"""Set `trainable` state for each sublayer."""
layers = trackable_layer_utils.filter_empty_layer_containers(self._layers)
if self in trainable_state:
self.trainable = trainable_state[self]
for layer in layers:
layer._set_trainable_state(trainable_state)
@property
def _obj_reference_counts(self):
"""A dictionary counting the number of attributes referencing an object."""
self._maybe_create_attribute('_obj_reference_counts_dict',
object_identity.ObjectIdentityDictionary())
return self._obj_reference_counts_dict
@trackable.no_automatic_dependency_tracking
def _maybe_create_attribute(self, name, default_value):
"""Create the attribute with the default value if it hasn't been created.
This is useful for fields that is used for tracking purpose,
_trainable_weights, or _layers. Note that user could create a layer subclass
and assign an internal field before invoking the Layer.__init__(), the
__setattr__() need to create the tracking fields and __init__() need to not
override them.
Args:
name: String, the name of the attribute.
default_value: Object, the default value of the attribute.
"""
if not hasattr(self, name):
super(LegacyBaseLayer, self).__setattr__(name, default_value)
def __delattr__(self, name):
# For any super.__delattr__() call, we will directly use the implementation
# in Trackable and skip the behavior in AutoTrackable. The Layer was
# originally use Trackable as base class, the change of using Module as base
# class forced us to have AutoTrackable in the class hierarchy. Skipping
# the __delattr__ and __setattr__ in AutoTrackable will keep the status quo.
existing_value = getattr(self, name, None)
# If this value is replacing an existing object assigned to an attribute, we
# should clean it out to avoid leaking memory. First we check if there are
# other attributes referencing it.
reference_counts = self._obj_reference_counts
if existing_value not in reference_counts:
super(tracking.AutoTrackable, self).__delattr__(name)
return
reference_count = reference_counts[existing_value]
if reference_count > 1:
# There are other remaining references. We can't remove this object from
# _layers etc.
reference_counts[existing_value] = reference_count - 1
super(tracking.AutoTrackable, self).__delattr__(name)
return
else:
# This is the last remaining reference.
del reference_counts[existing_value]
super(tracking.AutoTrackable, self).__delattr__(name)
if (isinstance(existing_value, LegacyBaseLayer)
or trackable_layer_utils.has_weights(existing_value)):
super(tracking.AutoTrackable, self).__setattr__(
'_layers',
[l for l in self._layers if l is not existing_value])
self._attribute_sentinel.invalidate_all()
if isinstance(existing_value, tf_variables.Variable):
super(tracking.AutoTrackable, self).__setattr__(
'_trainable_weights',
[w for w in self._trainable_weights if w is not existing_value])
super(tracking.AutoTrackable, self).__setattr__(
'_non_trainable_weights',
[w for w in self._non_trainable_weights if w is not existing_value])
# Any time we change `_layers` (either by deleting the attribute or by
# reassigning it which will call __delattr__ from __setattr__) the topology
# of the subgraph of Layers may change. In that case we will need to
# recompute any attribute which depends on that subgraph.
if name == '_layers':
self._attribute_sentinel.invalidate_all()
def __setattr__(self, name, value):
if (name == '_self_setattr_tracking' or
not getattr(self, '_self_setattr_tracking', True) or
# Exclude @property.setters from tracking
hasattr(self.__class__, name)):
try:
super(tracking.AutoTrackable, self).__setattr__(name, value)
except AttributeError:
raise AttributeError(
('Can\'t set the attribute "{}", likely because it conflicts with '
'an existing read-only @property of the object. Please choose a '
'different name.').format(name))
return
# Keep track of trackable objects, for the needs of `Network.save_weights`.
value = data_structures.sticky_attribute_assignment(
trackable=self, value=value, name=name)
reference_counts = self._obj_reference_counts
reference_counts[value] = reference_counts.get(value, 0) + 1
# Clean out the old attribute, which clears _layers and _trainable_weights
# if necessary.
try:
self.__delattr__(name)
except AttributeError:
pass
# TODO(scottzhu): Need to track Module object as well for weight tracking.
# Be careful about metric if it becomes a Module in future.
# Append value to self._layers if relevant
if (getattr(self, '_auto_track_sub_layers', True) and
(isinstance(value, LegacyBaseLayer) or
trackable_layer_utils.has_weights(value))):
self._maybe_create_attribute('_layers', [])
# We need to check object identity to avoid de-duplicating empty
# container types which compare equal.
if not any((layer is value for layer in self._layers)):
self._layers.append(value)
if hasattr(value, '_attribute_sentinel'):
value._attribute_sentinel.add_parent(self._attribute_sentinel)
if hasattr(value, '_use_resource_variables'):
# Legacy layers (V1 tf.layers) must always use
# resource variables.
value._use_resource_variables = True
# Append value to list of trainable / non-trainable weights if relevant
# TODO(b/125122625): This won't pick up on any variables added to a
# list/dict after creation.
for val in nest.flatten(value):
# TODO(b/126450014): Remove `_UnreadVariable` check here when assign ops
# no longer return True for isinstance Variable checks.
if not isinstance(val, tf_variables.Variable):
continue
if isinstance(val, resource_variable_ops._UnreadVariable): # pylint: disable=protected-access
continue
# Users may add extra weights/variables
# simply by assigning them to attributes (invalid for graph networks)
self._maybe_create_attribute('_trainable_weights', [])
self._maybe_create_attribute('_non_trainable_weights', [])
if val.trainable:
if any(val is w for w in self._trainable_weights):
continue
self._trainable_weights.append(val)
else:
if any(val is w for w in self._non_trainable_weights):
continue
self._non_trainable_weights.append(val)
backend.track_variable(val)
# Skip the auto trackable from tf.Module to keep status quo. See the comment
# at __delattr__.
super(tracking.AutoTrackable, self).__setattr__(name, value)
def _gather_children_attribute(self, attribute):
assert attribute in {
'weights', 'trainable_weights', 'non_trainable_weights'
}
if hasattr(self, '_layers'):
nested_layers = trackable_layer_utils.filter_empty_layer_containers(
self._layers)
return list(
itertools.chain.from_iterable(
getattr(layer, attribute) for layer in nested_layers))
return []
def _gather_unique_layers(self):
"""Returns the current layer and all its children depth first deduped.
We are deduping after getting the layers to maintain the order.
"""
all_layers = self._gather_layers()
unique_layers, seen_layers = [], object_identity.ObjectIdentitySet()
for layer in all_layers:
if layer not in seen_layers:
unique_layers.append(layer)
# Track the Variable's identity to avoid __eq__ issues.
seen_layers.add(layer)
return unique_layers
def _gather_layers(self):
"""Returns the current layer and all its children depth first."""
all_layers = [self]
if hasattr(self, '_layers'):
child_layers = trackable_layer_utils.filter_empty_layer_containers(
self._layers)
for child_layer in child_layers:
all_layers.extend(child_layer._gather_layers())
return all_layers
@property
@tracking.cached_per_instance
def _attribute_sentinel(self):
return trackable_layer_utils.AttributeSentinel()
# This is a hack so that the is_layer (within
# training/trackable/layer_utils.py) check doesn't get the weights attr.
# TODO(b/110718070): Remove when fixed.
def _is_layer(self):
return True
def _init_call_fn_args(self):
# Clear cached call function arguments.
self.__class__._call_full_argspec.fget.cache.pop(self, None)
self.__class__._call_fn_args.fget.cache.pop(self, None)
self.__class__._call_accepts_kwargs.fget.cache.pop(self, None)
call_fn_args = self._call_fn_args
self._expects_training_arg = ('training' in call_fn_args or
self._call_accepts_kwargs)
self._expects_mask_arg = ('mask' in call_fn_args or
self._call_accepts_kwargs)
@property
@tracking.cached_per_instance
def _call_full_argspec(self):
# Argspec inspection is expensive and the call spec is used often, so it
# makes sense to cache the result.
return tf_inspect.getfullargspec(self.call)
@property
@tracking.cached_per_instance
def _call_fn_args(self):
all_args = self._call_full_argspec.args
# Scrub `self` that appears if a decorator was applied.
if all_args and all_args[0] == 'self':
return all_args[1:]
return all_args
@property
@tracking.cached_per_instance
def _call_accepts_kwargs(self):
return self._call_full_argspec.varkw is not None
@property
@tracking.cached_per_instance
def _should_compute_mask(self):
return ('mask' in self._call_fn_args or
getattr(self, 'compute_mask', None) is not None)
@property
def _eager_losses(self):
# A list of loss values containing activity regularizers and losses
# manually added through `add_loss` during eager execution. It is cleared
# after every batch.
# Because we plan on eventually allowing a same model instance to be trained
# in eager mode or graph mode alternatively, we need to keep track of
# eager losses and symbolic losses via separate attributes.
if not hasattr(self._thread_local, '_eager_losses'):
self._thread_local._eager_losses = []
return self._thread_local._eager_losses
@_eager_losses.setter
def _eager_losses(self, losses):
self._thread_local._eager_losses = losses
def _dedup_weights(self, weights):
"""Dedupe weights while maintaining order as much as possible."""
output, seen_weights = [], object_identity.ObjectIdentitySet()
for w in weights:
if w not in seen_weights:
output.append(w)
# Track the Variable's identity to avoid __eq__ issues.
seen_weights.add(w)
return output
# SavedModel properties. Please see keras/saving/saved_model for details.
def __getstate__(self):
# Override to support `copy.deepcopy` and pickling.
# Thread-local objects cannot be copied in Python 3, so pop these.
# Thread-local objects are used to cache losses in MirroredStrategy, and
# so shouldn't be copied.
state = self.__dict__.copy()
state.pop('_thread_local', None)
state.pop('_metrics_lock', None)
return state
def __setstate__(self, state):
state['_thread_local'] = threading.local()
state['_metrics_lock'] = threading.Lock()
# Bypass Trackable logic as `__dict__` already contains this info.
object.__setattr__(self, '__dict__', state)
class TensorFlowOpLayer(LegacyBaseLayer):
"""Wraps a TensorFlow Operation in a Layer.
This class is used internally by the Functional API. When a user
uses a raw TensorFlow Operation on symbolic tensors originating
from an `Input` Layer, the resultant operation will be wrapped
with this Layer object in order to make the operation compatible
with the Keras API.
This Layer will create a new, identical operation (except for inputs
and outputs) every time it is called. If `run_eagerly` is `True`,
the op creation and calculation will happen inside an Eager function.
Instances of this Layer are created when `autolambda` is called, which
is whenever a Layer's `__call__` encounters symbolic inputs that do
not have Keras metadata, or when a Network's `__init__` encounters
outputs that do not have Keras metadata.
Attributes:
node_def: String, the serialized NodeDef of the Op this layer will wrap.
name: String, the name of the Layer.
constants: Dict of NumPy arrays, the values of any Tensors needed for this
Operation that do not originate from a Keras `Input` Layer. Since all
placeholders must come from Keras `Input` Layers, these Tensors must be
treated as constant in the Functional API.
trainable: Bool, whether this Layer is trainable. Currently Variables are
not supported, and so this parameter has no effect.
dtype: The default dtype of this Layer. Inherited from `Layer` and has no
effect on this class, however is used in `get_config`.
"""
@trackable.no_automatic_dependency_tracking
def __init__(self,
node_def,
name,
constants=None,
trainable=True,
dtype=None):
# Pass autocast=False, as if inputs are cast, input types might not match
# Operation type.
super(TensorFlowOpLayer, self).__init__(
name=_TF_OP_LAYER_NAME_PREFIX + name, trainable=trainable, dtype=dtype,
autocast=False)
if isinstance(node_def, dict):
self.node_def = json_format.ParseDict(node_def, node_def_pb2.NodeDef())
else:
if not isinstance(node_def, bytes):
node_def = node_def.encode('utf-8')
self.node_def = node_def_pb2.NodeDef.FromString(node_def)
# JSON serialization stringifies keys which are integer input indices.
self.constants = ({
int(index): constant for index, constant in constants.items()
} if constants is not None else {})
# Layer uses original op unless it is called on new inputs.
# This means `built` is not set in `__call__`.
self.built = True
def call(self, inputs):
if context.executing_eagerly():
return self._defun_call(inputs)
return self._make_op(inputs)
def _make_node_def(self, graph):
node_def = node_def_pb2.NodeDef()
node_def.CopyFrom(self.node_def)
# Used in TPUReplicateContext to indicate whether this node has been cloned
# and to not add TPU attributes.
node_def.attr['_cloned'].b = True
node_def.name = graph.unique_name(node_def.name)
return node_def
def _make_op(self, inputs):
inputs = nest.flatten(inputs)
graph = inputs[0].graph
node_def = self._make_node_def(graph)
with graph.as_default():
for index, constant in self.constants.items():
# Recreate constant in graph to add distribution context.
value = tensor_util.constant_value(constant)
if value is not None:
constant = constant_op.constant(value, name=node_def.input[index])
inputs.insert(index, constant)
c_op = ops._create_c_op(graph, node_def, inputs, control_inputs=[])
op = graph._create_op_from_tf_operation(c_op)
op._control_flow_post_processing()
# Record the gradient because custom-made ops don't go through the
# code-gen'd eager call path
op_type = compat.as_str(op.op_def.name)
attr_names = [compat.as_str(attr.name) for attr in op.op_def.attr]
attrs = []
for attr_name in attr_names:
attrs.append(attr_name)
attrs.append(op.get_attr(attr_name))
attrs = tuple(attrs)
execute.record_gradient(op_type, op.inputs, attrs, op.outputs)
if len(op.outputs) == 1:
return op.outputs[0]
return op.outputs
@function.defun
def _defun_call(self, inputs):
"""Wraps the op creation method in an Eager function for `run_eagerly`."""
return self._make_op(inputs)
def get_config(self):
config = super(TensorFlowOpLayer, self).get_config()
config.update({
# `__init__` prefixes the name. Revert to the constructor argument.
'name': config['name'][len(_TF_OP_LAYER_NAME_PREFIX):],
'node_def': json_format.MessageToDict(self.node_def),
'constants': {
i: backend.get_value(c) for i, c in self.constants.items()
}
})
return config
class AddLoss(LegacyBaseLayer):
"""Adds its inputs as a loss.
Attributes:
unconditional: Whether or not the loss should be conditioned on the inputs.
"""
def __init__(self, unconditional, **kwargs):
# Pass autocast=False, as there is no reason to cast loss to a different
# dtype.
kwargs['autocast'] = False
super(AddLoss, self).__init__(**kwargs)
self.unconditional = unconditional
def call(self, inputs):
self.add_loss(inputs, inputs=(not self.unconditional))
return inputs
def get_config(self):
config = super(AddLoss, self).get_config()
config.update({'unconditional': self.unconditional})
return config
class AddMetric(LegacyBaseLayer):
"""Adds its inputs as a metric.
Attributes:
aggregation: 'mean' or None. How the inputs should be aggregated.
metric_name: The name to use for this metric.
"""
def __init__(self, aggregation=None, metric_name=None, **kwargs):
super(AddMetric, self).__init__(**kwargs)
self.aggregation = aggregation
self.metric_name = metric_name
def call(self, inputs):
self.add_metric(inputs, self.aggregation, self.metric_name)
return inputs
def get_config(self):
config = super(AddMetric, self).get_config()
config.update({
'aggregation': self.aggregation,
'metric_name': self.metric_name
})
return config
class KerasHistory(
collections.namedtuple('KerasHistory',
['layer', 'node_index', 'tensor_index'])):
"""Tracks the Layer call that created a Tensor, for Keras Graph Networks.
During construction of Keras Graph Networks, this metadata is added to
each Tensor produced as the output of a Layer, starting with an
`InputLayer`. This allows Keras to track how each Tensor was produced, and
this information is later retraced by the `keras.engine.Network` class to
reconstruct the Keras Graph Network.
Attributes:
layer: The Layer that produced the Tensor.
node_index: The specific call to the Layer that produced this Tensor. Layers
can be called multiple times in order to share weights. A new node is
created every time a Layer is called.
tensor_index: The output index for this Tensor. Always zero if the Layer
that produced this Tensor only has one output. Nested structures of
Tensors are deterministically assigned an index via `nest.flatten`.
"""
# Added to maintain memory and performance characteristics of `namedtuple`
# while subclassing.
__slots__ = ()
| 39.74219
| 115
| 0.679433
|
b2ea526e2e39ab0533c5f463ff094c54814cb304
| 5,613
|
py
|
Python
|
release/scripts/addons/io_scene_3ds/__init__.py
|
noorbeast/BlenderSource
|
65ebecc5108388965678b04b43463b85f6c69c1d
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 2
|
2019-03-20T13:10:46.000Z
|
2019-05-15T20:00:31.000Z
|
engine/2.80/scripts/addons/io_scene_3ds/__init__.py
|
byteinc/Phasor
|
f7d23a489c2b4bcc3c1961ac955926484ff8b8d9
|
[
"Unlicense"
] | null | null | null |
engine/2.80/scripts/addons/io_scene_3ds/__init__.py
|
byteinc/Phasor
|
f7d23a489c2b4bcc3c1961ac955926484ff8b8d9
|
[
"Unlicense"
] | null | null | null |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
bl_info = {
"name": "Autodesk 3DS format",
"author": "Bob Holcomb, Campbell Barton",
"version": (1, 0, 0),
"blender": (2, 74, 0),
"location": "File > Import-Export",
"description": "Import-Export 3DS, meshes, uvs, materials, textures, "
"cameras & lamps",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/Autodesk_3DS",
"support": 'OFFICIAL',
"category": "Import-Export"}
if "bpy" in locals():
import importlib
if "import_3ds" in locals():
importlib.reload(import_3ds)
if "export_3ds" in locals():
importlib.reload(export_3ds)
import bpy
from bpy.props import (
BoolProperty,
EnumProperty,
FloatProperty,
StringProperty,
)
from bpy_extras.io_utils import (
ImportHelper,
ExportHelper,
orientation_helper,
axis_conversion,
)
@orientation_helper(axis_forward='Y', axis_up='Z')
class Import3DS(bpy.types.Operator, ImportHelper):
"""Import from 3DS file format (.3ds)"""
bl_idname = "import_scene.autodesk_3ds"
bl_label = 'Import 3DS'
bl_options = {'UNDO'}
filename_ext = ".3ds"
filter_glob: StringProperty(default="*.3ds", options={'HIDDEN'})
constrain_size: FloatProperty(
name="Size Constraint",
description="Scale the model by 10 until it reaches the "
"size constraint (0 to disable)",
min=0.0, max=1000.0,
soft_min=0.0, soft_max=1000.0,
default=10.0,
)
use_image_search: BoolProperty(
name="Image Search",
description="Search subdirectories for any associated images "
"(Warning, may be slow)",
default=True,
)
use_apply_transform: BoolProperty(
name="Apply Transform",
description="Workaround for object transformations "
"importing incorrectly",
default=True,
)
def execute(self, context):
from . import import_3ds
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
))
global_matrix = axis_conversion(from_forward=self.axis_forward,
from_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
return import_3ds.load(self, context, **keywords)
@orientation_helper(axis_forward='Y', axis_up='Z')
class Export3DS(bpy.types.Operator, ExportHelper):
"""Export to 3DS file format (.3ds)"""
bl_idname = "export_scene.autodesk_3ds"
bl_label = 'Export 3DS'
filename_ext = ".3ds"
filter_glob: StringProperty(
default="*.3ds",
options={'HIDDEN'},
)
use_selection: BoolProperty(
name="Selection Only",
description="Export selected objects only",
default=False,
)
def execute(self, context):
from . import export_3ds
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
"check_existing",
))
global_matrix = axis_conversion(to_forward=self.axis_forward,
to_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
return export_3ds.save(self, context, **keywords)
# Add to a menu
def menu_func_export(self, context):
self.layout.operator(Export3DS.bl_idname, text="3D Studio (.3ds)")
def menu_func_import(self, context):
self.layout.operator(Import3DS.bl_idname, text="3D Studio (.3ds)")
def register():
bpy.utils.register_module(__name__)
bpy.types.TOPBAR_MT_file_import.append(menu_func_import)
bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
# NOTES:
# why add 1 extra vertex? and remove it when done? -
# "Answer - eekadoodle - would need to re-order UV's without this since face
# order isnt always what we give blender, BMesh will solve :D"
#
# disabled scaling to size, this requires exposing bb (easy) and understanding
# how it works (needs some time)
if __name__ == "__main__":
register()
| 32.824561
| 78
| 0.597363
|
4400d3348fb2784826efc89e17d2b7f7a21aef0d
| 8,080
|
py
|
Python
|
gQuant/plugins/hrp_plugin/greenflow_hrp_plugin/distanceNode.py
|
t-triobox/gQuant
|
6ee3ba104ce4c6f17a5755e7782298902d125563
|
[
"Apache-2.0"
] | null | null | null |
gQuant/plugins/hrp_plugin/greenflow_hrp_plugin/distanceNode.py
|
t-triobox/gQuant
|
6ee3ba104ce4c6f17a5755e7782298902d125563
|
[
"Apache-2.0"
] | null | null | null |
gQuant/plugins/hrp_plugin/greenflow_hrp_plugin/distanceNode.py
|
t-triobox/gQuant
|
6ee3ba104ce4c6f17a5755e7782298902d125563
|
[
"Apache-2.0"
] | null | null | null |
"""
////////////////////////////////////////////////////////////////////////////
//
// Copyright (C) NVIDIA Corporation. All rights reserved.
//
// NVIDIA Sample Code
//
// Please refer to the NVIDIA end user license agreement (EULA) associated
// with this source code for terms and conditions that govern your use of
// this software. Any use, reproduction, disclosure, or distribution of
// this software and related documentation outside the terms of the EULA
// is strictly prohibited.
//
////////////////////////////////////////////////////////////////////////////
"""
from greenflow.dataframe_flow import ConfSchema, PortsSpecSchema
from greenflow.dataframe_flow.template_node_mixin import TemplateNodeMixin
from greenflow.dataframe_flow import Node
from .kernels import compute_cov_distance
import cupy
import cudf
class DistanceNode(TemplateNodeMixin, Node):
def init(self):
TemplateNodeMixin.init(self)
self.delayed_process = True
self.infer_meta = False
self.INPUT_PORT_NAME = 'in'
self.COV_DF = 'cov_df'
self.MEAN_DF = 'mean_df'
self.STD_DF = 'std_df'
self.CORR_DF = 'corr_df'
self.DISTANCE_DF = 'distance_df'
port_type = PortsSpecSchema.port_type
port_inports = {
self.INPUT_PORT_NAME: {
port_type: [
"pandas.DataFrame", "cudf.DataFrame",
"dask_cudf.DataFrame", "dask.dataframe.DataFrame"
]
}
}
port_outports = {
self.MEAN_DF: {
port_type: "${port:in}"
},
self.STD_DF: {
port_type: "${port:in}"
},
self.COV_DF: {
port_type: "${port:in}"
},
self.CORR_DF: {
port_type: "${port:in}"
},
self.DISTANCE_DF: {
port_type: "${port:in}"
}
}
self.template_ports_setup(in_ports=port_inports,
out_ports=port_outports)
def update(self):
TemplateNodeMixin.update(self)
meta_outports = self.template_meta_setup().outports
meta_inports = self.template_meta_setup().inports
sub_dict = {
'year': 'int16',
'month': 'int16',
'sample_id': 'int64',
}
required = {
"date": "datetime64[ns]",
}
required.update(sub_dict)
meta_inports[self.INPUT_PORT_NAME] = required
json_cov = {}
json_dis = {}
json_mean = {}
json_corr = {}
json_std = {}
input_meta = self.get_input_meta()
if self.INPUT_PORT_NAME in input_meta:
assets = len(input_meta[self.INPUT_PORT_NAME]) - 4
for i in range(assets*assets):
json_cov[i] = 'float64'
for i in range(assets):
json_mean[i] = 'float64'
json_std[i] = 'float64'
for i in range(assets*(assets-1)//2):
json_dis[i] = 'float64'
json_corr[i] = 'float64'
json_cov.update(sub_dict)
json_dis.update(sub_dict)
json_mean.update(sub_dict)
json_std.update(sub_dict)
json_corr.update(sub_dict)
meta_outports[self.MEAN_DF] = json_mean
meta_outports[self.STD_DF] = json_std
meta_outports[self.COV_DF] = json_cov
meta_outports[self.CORR_DF] = json_corr
meta_outports[self.DISTANCE_DF] = json_dis
self.template_meta_setup(
in_ports=meta_inports,
out_ports=meta_outports
)
def conf_schema(self):
json = {
"title": "Compute the Distance Matrix and Cov df",
"type": "object",
"properties": {
"window": {
'type': "integer",
"title": "Window size",
"description": """the number of months used to compute the
distance and vairance"""
}
},
"required": ["window"],
}
ui = {
}
return ConfSchema(json=json, ui=ui)
def process(self, inputs):
df = inputs[self.INPUT_PORT_NAME]
all_sample_ids = df['sample_id'].unique()
total_samples = len(all_sample_ids)
window = self.conf['window']
means, cov, distance, all_dates = compute_cov_distance(total_samples,
df,
window=window)
total_samples, num_months, assets, assets = cov.shape
months_id = all_dates.dt.year*12 + (all_dates.dt.month-1)
months_id = months_id - months_id.min()
mid = (cupy.arange(months_id.max() + 1) +
(all_dates.dt.month - 1)[0])[window:]
minyear = all_dates.dt.year.min()
if len(mid) == 0:
mid = cupy.array([0])
months = mid % 12
years = mid // 12 + minyear
output = {}
# print(num_months, len(mid))
if self.outport_connected(self.MEAN_DF):
df_mean = cudf.DataFrame(
means.reshape(total_samples*num_months, -1))
df_mean['year'] = cupy.concatenate(
[years]*total_samples).astype(cupy.int16)
df_mean['month'] = cupy.concatenate(
[months]*total_samples).astype(cupy.int16)
df_mean['sample_id'] = cupy.repeat(cupy.arange(
total_samples) + all_sample_ids.min(), len(mid))
output.update({self.MEAN_DF: df_mean})
if self.outport_connected(self.STD_DF):
data_ma = cov.reshape(total_samples*num_months, assets, assets)
diagonzied = cupy.diagonal(data_ma, 0, 1, 2) # get var
diagonzied = cupy.sqrt(diagonzied) # get std
df_std = cudf.DataFrame(diagonzied)
df_std['year'] = cupy.concatenate(
[years]*total_samples).astype(cupy.int16)
df_std['month'] = cupy.concatenate(
[months]*total_samples).astype(cupy.int16)
df_std['sample_id'] = cupy.repeat(cupy.arange(
total_samples) + all_sample_ids.min(), len(mid))
output.update({self.STD_DF: df_std})
if self.outport_connected(self.COV_DF):
df_cov = cudf.DataFrame(cov.reshape(total_samples*num_months, -1))
df_cov['year'] = cupy.concatenate(
[years]*total_samples).astype(cupy.int16)
df_cov['month'] = cupy.concatenate(
[months]*total_samples).astype(cupy.int16)
df_cov['sample_id'] = cupy.repeat(cupy.arange(
total_samples) + all_sample_ids.min(), len(mid))
output.update({self.COV_DF: df_cov})
if self.outport_connected(self.CORR_DF):
dis_ma = distance.reshape(total_samples*num_months, -1)
dis_ma = 1 - 2.0 * dis_ma
df_corr = cudf.DataFrame(dis_ma)
df_corr['year'] = cupy.concatenate(
[years]*total_samples).astype(cupy.int16)
df_corr['month'] = cupy.concatenate(
[months]*total_samples).astype(cupy.int16)
df_corr['sample_id'] = cupy.repeat(cupy.arange(
total_samples) + all_sample_ids.min(), len(mid))
output.update({self.CORR_DF: df_corr})
if self.outport_connected(self.DISTANCE_DF):
df_dis = cudf.DataFrame(distance.reshape(total_samples*num_months,
-1))
df_dis['year'] = cupy.concatenate(
[years]*total_samples).astype(cupy.int16)
df_dis['month'] = cupy.concatenate(
[months]*total_samples).astype(cupy.int16)
df_dis['sample_id'] = cupy.repeat(cupy.arange(
total_samples) + all_sample_ids.min(), len(mid))
output.update({self.DISTANCE_DF: df_dis})
return output
| 39.223301
| 78
| 0.541337
|
09c357f445613cc7901f50edb7a22d6b13b263f1
| 2,694
|
py
|
Python
|
tests/test_aws.py
|
procore/patroni
|
70bae1b267db4cf76d3ade299a3bef6741ec2b89
|
[
"MIT"
] | null | null | null |
tests/test_aws.py
|
procore/patroni
|
70bae1b267db4cf76d3ade299a3bef6741ec2b89
|
[
"MIT"
] | null | null | null |
tests/test_aws.py
|
procore/patroni
|
70bae1b267db4cf76d3ade299a3bef6741ec2b89
|
[
"MIT"
] | null | null | null |
import unittest
import requests
import boto.ec2
from collections import namedtuple
from patroni.scripts.aws import AWSConnection
from requests.exceptions import RequestException
class MockEc2Connection:
def __init__(self, error=False):
self.error = error
def get_all_volumes(self, filters):
if self.error:
raise Exception("get_all_volumes")
oid = namedtuple('Volume', 'id')
return [oid(id='a'), oid(id='b')]
def create_tags(self, objects, tags):
if self.error or len(objects) == 0:
raise Exception("create_tags")
return True
class MockResponse:
def __init__(self, content):
self.content = content
self.ok = True
def json(self):
return self.content
class TestAWSConnection(unittest.TestCase):
def __init__(self, method_name='runTest'):
super(TestAWSConnection, self).__init__(method_name)
def set_error(self):
self.error = True
def set_json_error(self):
self.json_error = True
def boto_ec2_connect_to_region(self, region):
return MockEc2Connection(self.error)
def requests_get(self, url, **kwargs):
if self.error:
raise RequestException("foo")
result = namedtuple('Request', 'ok content')
result.ok = True
if url.split('/')[-1] == 'document' and not self.json_error:
result = {"instanceId": "012345", "region": "eu-west-1"}
else:
result = 'foo'
return MockResponse(result)
def setUp(self):
self.error = False
self.json_error = False
requests.get = self.requests_get
boto.ec2.connect_to_region = self.boto_ec2_connect_to_region
self.conn = AWSConnection('test')
def test_aws_available(self):
self.assertTrue(self.conn.aws_available())
def test_on_role_change(self):
self.assertTrue(self.conn._tag_ebs('master'))
self.assertTrue(self.conn._tag_ec2('master'))
self.assertTrue(self.conn.on_role_change('master'))
def test_non_aws(self):
self.set_error()
conn = AWSConnection('test')
self.assertFalse(conn.aws_available())
self.assertFalse(conn._tag_ebs('master'))
self.assertFalse(conn._tag_ec2('master'))
def test_aws_bizare_response(self):
self.set_json_error()
conn = AWSConnection('test')
self.assertFalse(conn.aws_available())
def test_aws_tag_ebs_error(self):
self.set_error()
self.assertFalse(self.conn._tag_ebs("master"))
def test_aws_tag_ec2_error(self):
self.set_error()
self.assertFalse(self.conn._tag_ec2("master"))
| 28.357895
| 68
| 0.646993
|
23463df23fe604082778cc1f1d2d07bde5e8c012
| 992
|
py
|
Python
|
acshaproxy/registry/events.py
|
cncolder/acs-haproxy-src
|
11631299110aef4db4929d29442c8a5744b3be41
|
[
"Apache-2.0"
] | null | null | null |
acshaproxy/registry/events.py
|
cncolder/acs-haproxy-src
|
11631299110aef4db4929d29442c8a5744b3be41
|
[
"Apache-2.0"
] | null | null | null |
acshaproxy/registry/events.py
|
cncolder/acs-haproxy-src
|
11631299110aef4db4929d29442c8a5744b3be41
|
[
"Apache-2.0"
] | null | null | null |
import etcd_info
import logging
import sys
logger = logging.getLogger("haproxy")
class Events(object):
def __init__(self):
self.message_handler = None
def on_message(self, handler):
self.message_handler = handler
def run_forever(self, *args, **kwargs):
etcd_client = etcd_info.get_etcd_client()
wait_index = 0
while True:
logger.info("watch with index: %s" % wait_index)
result, wait_index = etcd_info.watch_prefix(etcd_client, etcd_info.get_services_base_uri(), wait_index)
logger.info("got result")
if result is not None:
# read the info
try:
services_info_reg, _ = etcd_info.get_services_info(etcd_client)
except Exception:
logger.warn("Unexpected error: %s" % sys.exc_info()[0])
else: # find the etcd action and log it
self.message_handler(services_info_reg)
| 33.066667
| 115
| 0.604839
|
4f50d8638289f72d2945505f1ea94028a24506f3
| 3,308
|
py
|
Python
|
tests/test_parsing.py
|
shammellee/pendulum
|
bb179c8fb6ef92b7bfc471a46338abbfac9fafca
|
[
"MIT"
] | 1
|
2018-11-25T03:10:22.000Z
|
2018-11-25T03:10:22.000Z
|
tests/test_parsing.py
|
shammellee/pendulum
|
bb179c8fb6ef92b7bfc471a46338abbfac9fafca
|
[
"MIT"
] | null | null | null |
tests/test_parsing.py
|
shammellee/pendulum
|
bb179c8fb6ef92b7bfc471a46338abbfac9fafca
|
[
"MIT"
] | 1
|
2020-07-24T17:37:18.000Z
|
2020-07-24T17:37:18.000Z
|
import pendulum
from .conftest import assert_datetime, assert_date, assert_time, assert_duration
def test_parse():
text = "2016-10-16T12:34:56.123456+01:30"
dt = pendulum.parse(text)
assert isinstance(dt, pendulum.DateTime)
assert_datetime(dt, 2016, 10, 16, 12, 34, 56, 123456)
assert "+01:30" == dt.tz.name
assert 5400 == dt.offset
text = "2016-10-16"
dt = pendulum.parse(text)
assert isinstance(dt, pendulum.DateTime)
assert_datetime(dt, 2016, 10, 16, 0, 0, 0, 0)
assert 0 == dt.offset
with pendulum.test(pendulum.datetime(2015, 11, 12)):
text = "12:34:56.123456"
dt = pendulum.parse(text)
assert isinstance(dt, pendulum.DateTime)
assert_datetime(dt, 2015, 11, 12, 12, 34, 56, 123456)
assert 0 == dt.offset
def test_parse_with_timezone():
text = "2016-10-16T12:34:56.123456"
dt = pendulum.parse(text, tz="Europe/Paris")
assert_datetime(dt, 2016, 10, 16, 12, 34, 56, 123456)
assert "Europe/Paris" == dt.tz.name
assert 7200 == dt.offset
def test_parse_exact():
text = "2016-10-16T12:34:56.123456+01:30"
dt = pendulum.parse(text, exact=True)
assert isinstance(dt, pendulum.DateTime)
assert_datetime(dt, 2016, 10, 16, 12, 34, 56, 123456)
assert 5400 == dt.offset
text = "2016-10-16"
dt = pendulum.parse(text, exact=True)
assert isinstance(dt, pendulum.Date)
assert_date(dt, 2016, 10, 16)
text = "12:34:56.123456"
dt = pendulum.parse(text, exact=True)
assert isinstance(dt, pendulum.Time)
assert_time(dt, 12, 34, 56, 123456)
text = "13:00"
dt = pendulum.parse(text, exact=True)
assert isinstance(dt, pendulum.Time)
assert_time(dt, 13, 0, 0)
def test_parse_duration():
text = "P2Y3M4DT5H6M7S"
duration = pendulum.parse(text)
assert isinstance(duration, pendulum.Duration)
assert_duration(duration, 2, 3, 0, 4, 5, 6, 7)
text = "P2W"
duration = pendulum.parse(text)
assert isinstance(duration, pendulum.Duration)
assert_duration(duration, 0, 0, 2, 0, 0, 0, 0)
def test_parse_interval():
text = "2008-05-11T15:30:00Z/P1Y2M10DT2H30M"
period = pendulum.parse(text)
assert isinstance(period, pendulum.Period)
assert_datetime(period.start, 2008, 5, 11, 15, 30, 0, 0)
assert period.start.offset == 0
assert_datetime(period.end, 2009, 7, 21, 18, 0, 0, 0)
assert period.end.offset == 0
text = "P1Y2M10DT2H30M/2008-05-11T15:30:00Z"
period = pendulum.parse(text)
assert isinstance(period, pendulum.Period)
assert_datetime(period.start, 2007, 3, 1, 13, 0, 0, 0)
assert period.start.offset == 0
assert_datetime(period.end, 2008, 5, 11, 15, 30, 0, 0)
assert period.end.offset == 0
text = "2007-03-01T13:00:00Z/2008-05-11T15:30:00Z"
period = pendulum.parse(text)
assert isinstance(period, pendulum.Period)
assert_datetime(period.start, 2007, 3, 1, 13, 0, 0, 0)
assert period.start.offset == 0
assert_datetime(period.end, 2008, 5, 11, 15, 30, 0, 0)
assert period.end.offset == 0
def test_parse_now():
dt = pendulum.parse("now")
assert dt.timezone_name == "America/Toronto"
mock_now = pendulum.yesterday()
with pendulum.test(mock_now):
assert pendulum.parse("now") == mock_now
| 25.251908
| 80
| 0.657195
|
89edce94017f16c10255d0376d77c40812c9b29d
| 2,258
|
py
|
Python
|
micro_ros_diagnostic_bridge/launch/diagnostic_bridge.launch.py
|
Kannut/micro_ros_diagnostics
|
d57343e8d6a937eb4ba597f0d3872a1fd0037bb7
|
[
"Apache-2.0"
] | null | null | null |
micro_ros_diagnostic_bridge/launch/diagnostic_bridge.launch.py
|
Kannut/micro_ros_diagnostics
|
d57343e8d6a937eb4ba597f0d3872a1fd0037bb7
|
[
"Apache-2.0"
] | null | null | null |
micro_ros_diagnostic_bridge/launch/diagnostic_bridge.launch.py
|
Kannut/micro_ros_diagnostics
|
d57343e8d6a937eb4ba597f0d3872a1fd0037bb7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021 - for information on the respective copyright owner
# see the NOTICE file and/or the repository https://github.com/micro-ROS/system_modes.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import launch
import launch.actions
from launch.substitutions import LaunchConfiguration
import launch_ros.actions
logger = launch.substitutions.LaunchConfiguration('log_level')
def generate_launch_description():
return launch.LaunchDescription([
launch.actions.DeclareLaunchArgument(
'lookup_table',
description='Path to lookup table'),
launch.actions.DeclareLaunchArgument(
'log_level',
default_value=['info'],
description='Logging level'),
launch.actions.DeclareLaunchArgument(
'input_topic',
default_value=['diagnostics_uros'],
description='Remap for input topic'),
launch.actions.DeclareLaunchArgument(
'output_topic',
default_value=['diagnostics'],
description='Remap for output topic'),
launch.actions.DeclareLaunchArgument(
'namespace',
default_value=[''],
description='Namespace'),
launch_ros.actions.Node(
package='micro_ros_diagnostic_bridge',
executable='diagnostic_bridge',
namespace=LaunchConfiguration('namespace'),
parameters=[{'lookup_table': LaunchConfiguration('lookup_table')}],
remappings=[
('diagnostics_uros', LaunchConfiguration('input_topic')),
('diagnostics', LaunchConfiguration('output_topic'))
],
output='screen',
arguments=['--ros-args', '--log-level', logger])
])
| 38.931034
| 86
| 0.665633
|
6f2db62368d53484c84a67d93065296c22ed1ef6
| 1,055
|
py
|
Python
|
src/storages/memory.py
|
evestidor/svc-stock-manager
|
56cea7f49e735df565e4a90d31e210b74160a73e
|
[
"MIT"
] | null | null | null |
src/storages/memory.py
|
evestidor/svc-stock-manager
|
56cea7f49e735df565e4a90d31e210b74160a73e
|
[
"MIT"
] | 9
|
2019-12-04T23:17:07.000Z
|
2022-02-10T09:44:02.000Z
|
src/storages/memory.py
|
evestidor/svc-stock-manager
|
56cea7f49e735df565e4a90d31e210b74160a73e
|
[
"MIT"
] | null | null | null |
from typing import List
from src.domain import Stock
from src.exceptions import (
StockAlreadyExists,
StockDoesNotExist,
)
from src.interfaces import StockStorage
class StockMemoryStorage(StockStorage):
AlreadyExists = StockAlreadyExists
DoesNotExist = StockDoesNotExist
def __init__(self):
self._db = {}
def add(self, stock: Stock) -> Stock:
stock = self._clone_stock(stock)
if self._does_stock_exist(stock):
raise self.AlreadyExists
self._db[stock.symbol] = stock
return stock
def list(self) -> List[Stock]:
return list(self._db.values())
def update_price(self, stock: Stock) -> Stock:
try:
self._db[stock.symbol].price = stock.price
except KeyError as e:
raise self.DoesNotExist from e
return self._db[stock.symbol]
def _clone_stock(self, stock: Stock) -> Stock:
return Stock(**stock.__dict__)
def _does_stock_exist(self, stock: Stock) -> bool:
return stock.symbol in self._db
| 25.119048
| 54
| 0.654976
|
922834be1b3b25e3b0c48b078ab247cfea39ef28
| 422
|
py
|
Python
|
tests/protected/A.py
|
LLNL/PYB11Generator
|
ca4714eabf4591dc9bc49327aa20e159d8ea4b94
|
[
"BSD-3-Clause"
] | 7
|
2020-10-12T21:31:12.000Z
|
2022-01-05T16:56:20.000Z
|
tests/protected/A.py
|
LLNL/PYB11Generator
|
ca4714eabf4591dc9bc49327aa20e159d8ea4b94
|
[
"BSD-3-Clause"
] | 1
|
2019-06-30T05:19:07.000Z
|
2019-06-30T05:19:07.000Z
|
tests/protected/A.py
|
jmikeowen/PYB11Generator
|
ca4714eabf4591dc9bc49327aa20e159d8ea4b94
|
[
"BSD-3-Clause"
] | 1
|
2021-08-03T01:26:04.000Z
|
2021-08-03T01:26:04.000Z
|
from PYB11Generator import *
@PYB11template("T1", "T2")
class A:
def pyinit(self):
"A default constructor"
return
@PYB11pure_virtual
@PYB11protected
def func(self,
val1 = "const %(T1)s",
val2 = "const %(T2)s"):
return "void"
x = PYB11readwrite()
y = PYB11readwrite()
A_int_double = PYB11TemplateClass(A, template_parameters=("int", "double"))
| 20.095238
| 75
| 0.592417
|
6a3678abdbe3fbf5bc7c1433d0cde86974cf32e7
| 2,683
|
py
|
Python
|
3rdparty/pytorch/caffe2/python/operator_test/index_hash_ops_test.py
|
WoodoLee/TorchCraft
|
999f68aab9e7d50ed3ae138297226dc95fefc458
|
[
"MIT"
] | 15
|
2019-08-10T02:36:38.000Z
|
2021-07-14T13:45:32.000Z
|
3rdparty/pytorch/caffe2/python/operator_test/index_hash_ops_test.py
|
WoodoLee/TorchCraft
|
999f68aab9e7d50ed3ae138297226dc95fefc458
|
[
"MIT"
] | 7
|
2019-10-21T03:08:51.000Z
|
2022-03-11T23:54:28.000Z
|
pytorch/caffe2/python/operator_test/index_hash_ops_test.py
|
raghavnauhria/whatmt
|
c20483a437c82936cb0fb8080925e37b9c4bba87
|
[
"MIT"
] | 6
|
2020-10-16T13:28:31.000Z
|
2021-08-25T12:08:34.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, workspace
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
class TestIndexHashOps(serial.SerializedTestCase):
@serial.given(
indices=st.sampled_from([
np.int32, np.int64
]).flatmap(lambda dtype: hu.tensor(min_dim=1, max_dim=1, dtype=dtype)),
seed=st.integers(min_value=0, max_value=10),
modulo=st.integers(min_value=100000, max_value=200000),
**hu.gcs_cpu_only
)
def test_index_hash_ops(self, indices, seed, modulo, gc, dc):
op = core.CreateOperator("IndexHash",
["indices"], ["hashed_indices"],
seed=seed, modulo=modulo)
def index_hash(indices):
dtype = np.array(indices).dtype
assert dtype == np.int32 or dtype == np.int64
hashed_indices = []
for index in indices:
hashed = dtype.type(0xDEADBEEF * seed)
indices_bytes = np.array([index], dtype).view(np.int8)
for b in indices_bytes:
hashed = dtype.type(hashed * 65537 + b)
hashed = (modulo + hashed % modulo) % modulo
hashed_indices.append(hashed)
return [hashed_indices]
self.assertDeviceChecks(dc, op, [indices], [0])
self.assertReferenceChecks(gc, op, [indices], index_hash)
def test_shape_and_type_inference(self):
with hu.temp_workspace("shape_type_inf_int64"):
net = core.Net('test_net')
net.ConstantFill(
[], "values", shape=[64], dtype=core.DataType.INT64,
)
net.IndexHash(['values'], ['values_output'])
(shapes, types) = workspace.InferShapesAndTypes([net], {})
self.assertEqual(shapes["values_output"], [64])
self.assertEqual(types["values_output"], core.DataType.INT64)
with hu.temp_workspace("shape_type_inf_int32"):
net = core.Net('test_net')
net.ConstantFill(
[], "values", shape=[2, 32], dtype=core.DataType.INT32,
)
net.IndexHash(['values'], ['values_output'])
(shapes, types) = workspace.InferShapesAndTypes([net], {})
self.assertEqual(shapes["values_output"], [2, 32])
self.assertEqual(types["values_output"], core.DataType.INT32)
| 40.651515
| 79
| 0.613492
|
8a0d2f1017f244421162550f476db5d9a4274ed7
| 23,813
|
py
|
Python
|
src/pruning_layers/layers.py
|
ivclab/PackExpander
|
81b8e832018f60fc678883f3025c39cb1d289e27
|
[
"MIT"
] | 25
|
2019-03-28T09:02:44.000Z
|
2022-02-11T15:30:50.000Z
|
src/pruning_layers/layers.py
|
ivclab/PackExpander
|
81b8e832018f60fc678883f3025c39cb1d289e27
|
[
"MIT"
] | 2
|
2020-10-29T06:16:16.000Z
|
2021-01-04T02:23:04.000Z
|
src/pruning_layers/layers.py
|
ivclab/PackExpander
|
81b8e832018f60fc678883f3025c39cb1d289e27
|
[
"MIT"
] | 7
|
2020-03-26T05:39:30.000Z
|
2021-07-30T09:12:42.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensorflow layers with added variables for parameter masking.
Branched from tensorflow/contrib/layers/python/layers/layers.py
"""
# pylint: disable=missing-docstring
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib.framework.python.ops import add_arg_scope
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import initializers
from tensorflow.contrib.layers.python.layers import utils
import sys, os
sys.path.append(os.path.dirname(__file__))
# from core_layers import MaskedConv2D, MaskedSeparableConv2D, MaskedFullyConnected
from core_layers import MaskedConv2D, MaskedSeparableConv2D, MaskedFullyConnected
from tensorflow.python.framework import ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as tf_variables
import tensorflow as tf
from tensorflow.python.layers import core as core_layers
def _model_variable_getter(getter,
name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=True,
collections=None,
caching_device=None,
partitioner=None,
rename=None,
use_resource=None,
**_):
"""Getter that uses model_variable for compatibility with core layers."""
short_name = name.split('/')[-1]
if rename and short_name in rename:
name_components = name.split('/')
name_components[-1] = rename[short_name]
name = '/'.join(name_components)
return variables.model_variable(
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
collections=collections,
trainable=trainable,
caching_device=caching_device,
partitioner=partitioner,
custom_getter=getter,
use_resource=use_resource)
def _build_variable_getter(rename=None):
"""Build a model variable getter that respects scope getter and renames."""
# VariableScope will nest the getters
def layer_variable_getter(getter, *args, **kwargs):
kwargs['rename'] = rename
return _model_variable_getter(getter, *args, **kwargs)
return layer_variable_getter
def _add_variable_to_collections(variable, collections_set, collections_name):
"""Adds variable (or all its parts) to all collections with that name."""
collections = utils.get_variable_collections(collections_set,
collections_name) or []
variables_list = [variable]
if isinstance(variable, tf_variables.PartitionedVariable):
variables_list = [v for v in variable]
for collection in collections:
for var in variables_list:
if var not in ops.get_collection(collection):
ops.add_to_collection(collection, var)
@add_arg_scope
def masked_convolution(inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=None,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None,
task_id=1):
"""Adds an 2D convolution followed by an optional batch_norm layer.
The layer creates a mask variable on top of the weight variable. The input to
the convolution operation is the elementwise multiplication of the mask
variable and the weigh
It is required that 1 <= N <= 3.
`convolution` creates a variable called `weights`, representing the
convolutional kernel, that is convolved (actually cross-correlated) with the
`inputs` to produce a `Tensor` of activations. If a `normalizer_fn` is
provided (such as `batch_norm`), it is then applied. Otherwise, if
`normalizer_fn` is None and a `biases_initializer` is provided then a `biases`
variable would be created and added the activations. Finally, if
`activation_fn` is not `None`, it is applied to the activations as well.
Performs atrous convolution with input stride/dilation rate equal to `rate`
if a value > 1 for any dimension of `rate` is specified. In this case
`stride` values != 1 are not supported.
Args:
inputs: A Tensor of rank N+2 of shape
`[batch_size] + input_spatial_shape + [in_channels]` if data_format does
not start with "NC" (default), or
`[batch_size, in_channels] + input_spatial_shape` if data_format starts
with "NC".
num_outputs: Integer, the number of output filters.
kernel_size: A sequence of N positive integers specifying the spatial
dimensions of of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
stride: A sequence of N positive integers specifying the stride at which to
compute output. Can be a single integer to specify the same value for all
spatial dimensions. Specifying any `stride` value != 1 is incompatible
with specifying any `rate` value != 1.
padding: One of `"VALID"` or `"SAME"`.
data_format: A string or None. Specifies whether the channel dimension of
the `input` and output is the last dimension (default, or if `data_format`
does not start with "NC"), or the second dimension (if `data_format`
starts with "NC"). For N=1, the valid values are "NWC" (default) and
"NCW". For N=2, the valid values are "NHWC" (default) and "NCHW".
For N=3, the valid values are "NDHWC" (default) and "NCDHW".
rate: A sequence of N positive integers specifying the dilation rate to use
for atrous convolution. Can be a single integer to specify the same
value for all spatial dimensions. Specifying any `rate` value != 1 is
incompatible with specifying any `stride` value != 1.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collection per variable.
outputs_collections: Collection to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for `variable_scope`.
Returns:
A tensor representing the output of the operation.
Raises:
ValueError: If `data_format` is invalid.
ValueError: Both 'rate' and `stride` are not uniformly 1.
"""
if data_format not in [None, 'NWC', 'NCW', 'NHWC', 'NCHW', 'NDHWC', 'NCDHW']:
raise ValueError('Invalid data_format: %r' % (data_format,))
layer_variable_getter = _build_variable_getter({
'bias': 'biases',
'kernel': 'weights'
})
with variable_scope.variable_scope(scope,
'Conv', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
input_rank = inputs.get_shape().ndims
if input_rank == 3:
raise ValueError('Sparse Convolution not supported for input with rank',
input_rank)
elif input_rank == 4:
layer_class = MaskedConv2D
elif input_rank == 5:
raise ValueError('Sparse Convolution not supported for input with rank',
input_rank)
else:
raise ValueError('Sparse Convolution not supported for input with rank',
input_rank)
if data_format is None or data_format == 'NHWC':
df = 'channels_last'
elif data_format == 'NCHW':
df = 'channels_first'
else:
raise ValueError('Unsupported data format', data_format)
layer = layer_class(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
dilation_rate=rate,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
task_id=task_id,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.use_bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
with tf.variable_scope('task_{}'.format(task_id)): # Because there are multi-task problems
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def masked_separable_convolution2d(
inputs,
num_outputs,
kernel_size,
depth_multiplier,
stride=1,
padding='SAME',
data_format=None,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None,
task_id=1):
if data_format not in [None, 'NHWC', 'NCHW']:
raise ValueError('Invalid data_format: %r' % (data_format,))
layer_variable_getter = _build_variable_getter({
'bias': 'biases',
'depthwise_kernel': 'depthwise_weights',
'pointwise_kernel': 'pointwise_weights'
})
with variable_scope.variable_scope(
scope,
'SeparableConv2d', [inputs],
reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
if data_format is None or data_format == 'NHWC':
df = 'channels_last'
elif data_format == 'NCHW':
df = 'channels_first'
else:
raise ValueError('Unsupported data format', data_format)
if num_outputs is not None:
layer = MaskedSeparableConv2D(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
dilation_rate=utils.two_element_tuple(rate),
activation=None,
depth_multiplier=depth_multiplier,
use_bias=not normalizer_fn and biases_initializer,
depthwise_initializer=weights_initializer,
pointwise_initializer=weights_initializer,
depthwise_regularizer=weights_regularizer,
pointwise_regularizer=weights_regularizer,
bias_initializer=biases_initializer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
task_id=task_id,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
# Add variables to collections.
_add_variable_to_collections(layer.depthwise_kernel,
variables_collections, 'weights')
_add_variable_to_collections(layer.pointwise_kernel,
variables_collections, 'weights')
if layer.use_bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
with tf.variable_scope('task_{}'.format(task_id)): # Because there are multi-task problems
outputs = normalizer_fn(outputs, **normalizer_params)
else:
raise ValueError('Num Outputs is None, Need to apply depthwise conv2d')
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
masked_conv2d = masked_convolution
masked_separable_conv2d = masked_separable_convolution2d
@add_arg_scope
def masked_fully_connected(
inputs,
num_outputs,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None,
task_id=1):
"""Adds a sparse fully connected layer. The weight matrix is masked.
`fully_connected` creates a variable called `weights`, representing a fully
connected weight matrix, which is multiplied by the `inputs` to produce a
`Tensor` of hidden units. If a `normalizer_fn` is provided (such as
`batch_norm`), it is then applied. Otherwise, if `normalizer_fn` is
None and a `biases_initializer` is provided then a `biases` variable would be
created and added the hidden units. Finally, if `activation_fn` is not `None`,
it is applied to the hidden units as well.
Note: that if `inputs` have a rank greater than 2, then `inputs` is flattened
prior to the initial matrix multiply by `weights`.
Args:
inputs: A tensor of at least rank 2 and static value for the last dimension;
i.e. `[batch_size, depth]`, `[None, None, None, channels]`.
num_outputs: Integer or long, the number of output units in the layer.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collections per variable.
outputs_collections: Collection to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for variable_scope.
Returns:
The tensor variable representing the result of the series of operations.
Raises:
ValueError: If x has rank less than 2 or if its last dimension is not set.
"""
if not isinstance(num_outputs, six.integer_types):
raise ValueError('num_outputs should be int or long, got %s.' %
(num_outputs,))
layer_variable_getter = _build_variable_getter({
'bias': 'biases',
'kernel': 'weights'
})
with variable_scope.variable_scope(
scope,
'FC', [inputs],
reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
layer = MaskedFullyConnected(
units=num_outputs,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
task_id=task_id,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.bias is not None:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
# Apply normalizer function / layer.
if normalizer_fn is not None:
if not normalizer_params:
normalizer_params = {}
with tf.variable_scope('task_{}'.format(task_id)): # Because there are multi-task problems
outputs = normalizer_fn(outputs, **normalizer_params)
# outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def customized_slim_fully_connected(
inputs,
num_outputs,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None,
task_id=1):
"""Adds a sparse fully connected layer. The weight matrix is masked.
`fully_connected` creates a variable called `weights`, representing a fully
connected weight matrix, which is multiplied by the `inputs` to produce a
`Tensor` of hidden units. If a `normalizer_fn` is provided (such as
`batch_norm`), it is then applied. Otherwise, if `normalizer_fn` is
None and a `biases_initializer` is provided then a `biases` variable would be
created and added the hidden units. Finally, if `activation_fn` is not `None`,
it is applied to the hidden units as well.
Note: that if `inputs` have a rank greater than 2, then `inputs` is flattened
prior to the initial matrix multiply by `weights`.
Args:
inputs: A tensor of at least rank 2 and static value for the last dimension;
i.e. `[batch_size, depth]`, `[None, None, None, channels]`.
num_outputs: Integer or long, the number of output units in the layer.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collections per variable.
outputs_collections: Collection to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for variable_scope.
Returns:
The tensor variable representing the result of the series of operations.
Raises:
ValueError: If x has rank less than 2 or if its last dimension is not set.
"""
if not isinstance(num_outputs, six.integer_types):
raise ValueError('num_outputs should be int or long, got %s.' %
(num_outputs,))
layer_variable_getter = _build_variable_getter({
'bias': 'biases',
'kernel': 'weights'
})
with variable_scope.variable_scope(
scope,
'FC', [inputs],
reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
layer = core_layers.Dense(
units=num_outputs,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.bias is not None:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
# Apply normalizer function / layer.
if normalizer_fn is not None:
if not normalizer_params:
normalizer_params = {}
with tf.variable_scope('task_{}'.format(task_id)): # Because there are multi-task problems
outputs = normalizer_fn(outputs, **normalizer_params)
# outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
| 40.705983
| 98
| 0.693571
|
5ab135d7a54e1f290b344fb5f89ff7ffe33a4fa4
| 2,410
|
py
|
Python
|
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_rdbms/postgresql/models/server_properties_for_create.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_rdbms/postgresql/models/server_properties_for_create.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_rdbms/postgresql/models/server_properties_for_create.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ServerPropertiesForCreate(Model):
"""The properties used to create a new server.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: ServerPropertiesForDefaultCreate,
ServerPropertiesForRestore, ServerPropertiesForGeoRestore
All required parameters must be populated in order to send to Azure.
:param version: Server version. Possible values include: '9.5', '9.6',
'10', '10.0', '10.2'
:type version: str or ~azure.mgmt.rdbms.postgresql.models.ServerVersion
:param ssl_enforcement: Enable ssl enforcement or not when connect to
server. Possible values include: 'Enabled', 'Disabled'
:type ssl_enforcement: str or
~azure.mgmt.rdbms.postgresql.models.SslEnforcementEnum
:param storage_profile: Storage profile of a server.
:type storage_profile: ~azure.mgmt.rdbms.postgresql.models.StorageProfile
:param create_mode: Required. Constant filled by server.
:type create_mode: str
"""
_validation = {
'create_mode': {'required': True},
}
_attribute_map = {
'version': {'key': 'version', 'type': 'str'},
'ssl_enforcement': {'key': 'sslEnforcement', 'type': 'SslEnforcementEnum'},
'storage_profile': {'key': 'storageProfile', 'type': 'StorageProfile'},
'create_mode': {'key': 'createMode', 'type': 'str'},
}
_subtype_map = {
'create_mode': {'Default': 'ServerPropertiesForDefaultCreate', 'PointInTimeRestore': 'ServerPropertiesForRestore', 'GeoRestore': 'ServerPropertiesForGeoRestore'}
}
def __init__(self, **kwargs):
super(ServerPropertiesForCreate, self).__init__(**kwargs)
self.version = kwargs.get('version', None)
self.ssl_enforcement = kwargs.get('ssl_enforcement', None)
self.storage_profile = kwargs.get('storage_profile', None)
self.create_mode = None
| 41.551724
| 169
| 0.660166
|
0466350039a51495a96dad604cc3828f2127b41f
| 152
|
py
|
Python
|
feeder/api/models/__init__.py
|
jordan-hamilton/petnet-feeder-service
|
66c3192b0e66f4eefb5fd55cceb1219fa1ddb914
|
[
"MIT"
] | 47
|
2020-04-23T20:28:27.000Z
|
2020-10-07T19:49:10.000Z
|
feeder/api/models/__init__.py
|
jordan-hamilton/petnet-feeder-service
|
66c3192b0e66f4eefb5fd55cceb1219fa1ddb914
|
[
"MIT"
] | 101
|
2021-01-23T05:23:33.000Z
|
2022-03-28T13:38:13.000Z
|
feeder/api/models/__init__.py
|
ericchapman80/petnet-api-hacking
|
23cff84317d7380d7d1c0a2718cc153e83920906
|
[
"MIT"
] | 9
|
2020-04-25T17:22:44.000Z
|
2020-10-07T04:36:56.000Z
|
from pydantic import BaseModel
class BasePaginatedList(BaseModel):
size: int = 0
page: int = 0
totalSize: int = 0
totalPages: int = 1
| 16.888889
| 35
| 0.664474
|
8661420fa46026edb9d594ae0fb58b437eae34d4
| 254
|
py
|
Python
|
python/12p1.py
|
dsumike/adventofcode
|
cd5e484fa162bada67625c3779580d77e87d1daa
|
[
"MIT"
] | null | null | null |
python/12p1.py
|
dsumike/adventofcode
|
cd5e484fa162bada67625c3779580d77e87d1daa
|
[
"MIT"
] | null | null | null |
python/12p1.py
|
dsumike/adventofcode
|
cd5e484fa162bada67625c3779580d77e87d1daa
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import re
# Part 1
# ignore that this is JSON, look for all ints and add them together
with open('../input/12.txt') as fileobj:
json = fileobj.readline()
total = sum([int(x) for x in re.findall(r'-?[0-9]+', json)])
print total
| 19.538462
| 67
| 0.669291
|
f3747ad21d103d033483025376c35ddb48b4a17b
| 8,228
|
py
|
Python
|
official/nlp/gpt/src/gpt.py
|
mindspore-ai/models
|
9127b128e2961fd698977e918861dadfad00a44c
|
[
"Apache-2.0"
] | 77
|
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
official/nlp/gpt/src/gpt.py
|
mindspore-ai/models
|
9127b128e2961fd698977e918861dadfad00a44c
|
[
"Apache-2.0"
] | 3
|
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
official/nlp/gpt/src/gpt.py
|
mindspore-ai/models
|
9127b128e2961fd698977e918861dadfad00a44c
|
[
"Apache-2.0"
] | 24
|
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""GPT model"""
import numpy as np
import mindspore.nn as nn
from mindspore.common.parameter import Parameter
import mindspore.common.dtype as mstype
from mindspore.common.initializer import TruncatedNormal, initializer
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.nn.transformer.layers import _LayerNorm
from mindspore.nn.transformer.transformer import AttentionMask, TransformerEncoder
class EmbeddingLookup(nn.Cell):
"""
The embedding lookup table for vocabulary
Args:
config(GPTConfig): the config of network
Inputs:
input_ids: the tokenized inputs with datatype int32
Returns:
output: Tensor, the embedding vector for the input with shape (batch_size, seq_length, embedding_size)
self.embedding_table: Tensor, the embedding table for the vocabulary
"""
def __init__(self, config):
super(EmbeddingLookup, self).__init__()
self.vocab_size = config.vocab_size
self.embedding_size = config.embedding_size
self.embedding_table = Parameter(initializer(TruncatedNormal(0.02), [self.vocab_size, self.embedding_size]))
self.gather = P.Gather()
self.shape = (-1, config.seq_length, config.embedding_size)
def construct(self, input_ids):
output = self.gather(self.embedding_table, input_ids, 0)
return output, self.embedding_table
class GPT_Model(nn.Cell):
"""
The backbone of GPT network
Args:
config(GPTConfig): the config of network
Inputs:
input_ids: the tokenized inputs with datatype int32
input_mask: the mask indicating whether each position is a valid input
layer_past: the previous feature map
Returns:
output_state: Tensor, the output logit of backbone
present_layer: Tensor, the current feature map
embedding_table: Tensor, the embedding table for the vocabulary
"""
def __init__(self, config):
super(GPT_Model, self).__init__()
self.get_attention_mask = AttentionMask(seq_length=config.seq_length)
self.word_embedding = EmbeddingLookup(config)
self.position_embedding = nn.Embedding(config.seq_length, config.embedding_size,
embedding_table=TruncatedNormal(0.02))
self.blocks = nn.CellList()
self.encoder = TransformerEncoder(batch_size=config.batch_size,
num_layers=config.num_layers,
hidden_size=config.embedding_size,
ffn_hidden_size=config.embedding_size * 4,
seq_length=config.seq_length,
num_heads=config.num_heads,)
self.layernorm = _LayerNorm((config.embedding_size,)).to_float(config.compute_dtype)
self.use_past = config.use_past
self.past = tuple([None]*config.num_layers)
self.num_layers = config.num_layers
def construct(self, input_ids, input_mask, layer_past=None):
"""GPT model"""
if not self.use_past:
layer_past = self.past
input_embedding, embedding_table = self.word_embedding(input_ids)
batch_size, seq_length = F.shape(input_ids)
input_position = F.tuple_to_array(F.make_range(seq_length))
input_position = P.Tile()(input_position, (batch_size, 1))
position_embedding = self.position_embedding(input_position)
hidden_states = input_embedding + position_embedding
hidden_states = P.Cast()(hidden_states, mstype.float16)
attention_mask = self.get_attention_mask(input_mask)
hidden_states, present_layer = self.encoder(hidden_states, attention_mask)
output_state = self.layernorm(hidden_states)
return output_state, present_layer, embedding_table
class GPT_Head(nn.Cell):
"""
Head for GPT to get the logits of each token in the vocab
Args:
config(GPTConfig): the config of network
Inputs:
state: the output of the backbone
embedding_table: the embedding table of the vocabulary
Returns:
logits: Tensor, the logits of the corresponding inputs
"""
def __init__(self, config):
super(GPT_Head, self).__init__()
self.matmul = P.MatMul(transpose_b=True)
self.embedding_size = config.embedding_size
self.log_softmax = P.LogSoftmax(axis=-1)
self.dtype = config.compute_dtype
self.cast = P.Cast()
def construct(self, state, embedding_table):
state = P.Reshape()(state, (-1, self.embedding_size))
logits = self.matmul(state, self.cast(embedding_table, self.dtype))
return logits
class GPT(nn.Cell):
"""
The GPT network consisting of two parts the backbone and the head
Args:
config(GPTConfig): the config of network
Inputs:
input_ids: the tokenized inputs
input_mask: the mask indicating whether each position is a valid input
past: the previous feature map
Returns:
logits: Tensor: the logits of the corresponding inputs with shape (batch_size, seq_length, vocab_size)
"""
def __init__(self, config):
super(GPT, self).__init__()
self.backbone = GPT_Model(config)
self.head = GPT_Head(config)
def construct(self, input_ids, input_mask, past=None):
output_states, _, embedding_table = self.backbone(input_ids, input_mask, past)
logits = self.head(output_states, embedding_table)
return logits
class GPTWithLoss(nn.Cell):
"""
GPT training loss
Args:
network: backbone network of GPT2/3
loss: loss function, e.g., crossentropy
eos_token: the end_of_sentence token
Inputs:
input_ids: the tokenized inputs
past: the previous feature map
Returns:
output: Tensor, the loss of the network
"""
def __init__(self, network, loss, eos_token=50256):
super(GPTWithLoss, self).__init__(auto_prefix=False)
self.network = network
self.loss = loss
self.eos_token = eos_token
def construct(self, input_ids, past=None):
tokens = input_ids[:, :-1]
input_mask = F.cast(F.not_equal(tokens, self.eos_token), mstype.float32)
logits = self.network(tokens, input_mask, past)
labels = input_ids[:, 1:]
labels = P.Reshape()(labels, (-1,))
input_mask = P.Reshape()(input_mask, (-1,))
output = self.loss(logits, labels, input_mask)
return output
class EvalNet(nn.Cell):
"""
GPT evaluation net
Args:
backbone: backbone network of GPT2/3
generate: enable generate mode
Inputs:
input_ids: the tokenized inpus
Returns:
outputs: Tensor, corresponding output for different tasks
"""
def __init__(self, backbone, generate=False):
super(EvalNet, self).__init__(auto_prefix=False)
self.backbone = backbone
self.argmax = P.Argmax()
self.generate = generate
self.cast = P.Cast()
def construct(self, input_ids, input_mask):
"""evaluation net"""
input_mask = self.cast(input_mask, mstype.float32)
logits = self.backbone(input_ids, input_mask)
outputs = None
if self.generate:
outputs = nn.LogSoftmax()(logits)
outputs = F.tensor_pow(np.e, outputs)
else:
outputs = self.argmax(logits)
return outputs
| 36.087719
| 116
| 0.658605
|
a4139b1a0de6890a85e78e3da3d7aaee35ed43ad
| 16,523
|
py
|
Python
|
code/python/FactSetEstimates/v2/fds/sdk/FactSetEstimates/model/rolling_detail_request.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/FactSetEstimates/v2/fds/sdk/FactSetEstimates/model/rolling_detail_request.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/FactSetEstimates/v2/fds/sdk/FactSetEstimates/model/rolling_detail_request.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
FactSet Estimates
Gain access to 20+ years of comprehensive estimates and statistics of over 250+ estimated metrics, including financial statement items, product segments, geosegments, and industry metrics. FactSet's consensus estimates are aggregated from a wide base of over 800+ contributors and cover over 19,000 active companies across 90+ countries. Data returned can be accessed on the data frequencies based on quarterly, fiscal years, and calendar years. FactSet Estimates updates on a real time basis intraday (every 5 minutes). Updating times vary based on earning season vs. non-earning season but the goal is to have the data available to the client within a few hours that FactSet receives updated information. Often times updates times can be much faster as FactSet has always been known as one of the fastest estimate providers in the market. # noqa: E501
The version of the OpenAPI document: 2.3.0
Contact: api@factset.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.FactSetEstimates.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.FactSetEstimates.exceptions import ApiAttributeError
def lazy_import():
from fds.sdk.FactSetEstimates.model.frequency import Frequency
from fds.sdk.FactSetEstimates.model.ids import Ids
from fds.sdk.FactSetEstimates.model.metrics import Metrics
from fds.sdk.FactSetEstimates.model.periodicity_detail import PeriodicityDetail
globals()['Frequency'] = Frequency
globals()['Ids'] = Ids
globals()['Metrics'] = Metrics
globals()['PeriodicityDetail'] = PeriodicityDetail
class RollingDetailRequest(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'ids': (Ids,), # noqa: E501
'metrics': (Metrics,), # noqa: E501
'start_date': (str,), # noqa: E501
'end_date': (str,), # noqa: E501
'frequency': (Frequency,), # noqa: E501
'include_all': (bool,), # noqa: E501
'relative_fiscal_start': (int,), # noqa: E501
'relative_fiscal_end': (int,), # noqa: E501
'periodicity': (PeriodicityDetail,), # noqa: E501
'currency': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'ids': 'ids', # noqa: E501
'metrics': 'metrics', # noqa: E501
'start_date': 'startDate', # noqa: E501
'end_date': 'endDate', # noqa: E501
'frequency': 'frequency', # noqa: E501
'include_all': 'includeAll', # noqa: E501
'relative_fiscal_start': 'relativeFiscalStart', # noqa: E501
'relative_fiscal_end': 'relativeFiscalEnd', # noqa: E501
'periodicity': 'periodicity', # noqa: E501
'currency': 'currency', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, ids, metrics, *args, **kwargs): # noqa: E501
"""RollingDetailRequest - a model defined in OpenAPI
Args:
ids (Ids):
metrics (Metrics):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
start_date (str): The start date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to previous close. Future dates (T+1) are not accepted in this #endpoint. . [optional] # noqa: E501
end_date (str): The end date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to previous close. Future dates (T+1) are not accepted in this endpoint. . [optional] # noqa: E501
frequency (Frequency): [optional] # noqa: E501
include_all (bool): Include All filter is used to identify included and excluded broker details from the consensus By default the service would return only the brokers included in the consensus- * **TRUE** = Returns all the brokers included and excluded in the consensus * **FALSE** = Returns only the broker details included in the consensus . [optional] if omitted the server will use the default value of False # noqa: E501
relative_fiscal_start (int): Relative fiscal period, expressed as an integer, used to filter results.. [optional] # noqa: E501
relative_fiscal_end (int): Relative fiscal period, expressed as an integer, used to filter results.. [optional] # noqa: E501
periodicity (PeriodicityDetail): [optional] # noqa: E501
currency (str): Currency code for adjusting the data. For a list of currency ISO codes, visit [Online Assistant Page #1470](https://oa.apps.factset.com/pages/1470).. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.ids = ids
self.metrics = metrics
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, ids, metrics, *args, **kwargs): # noqa: E501
"""RollingDetailRequest - a model defined in OpenAPI
Args:
ids (Ids):
metrics (Metrics):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
start_date (str): The start date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to previous close. Future dates (T+1) are not accepted in this #endpoint. . [optional] # noqa: E501
end_date (str): The end date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to previous close. Future dates (T+1) are not accepted in this endpoint. . [optional] # noqa: E501
frequency (Frequency): [optional] # noqa: E501
include_all (bool): Include All filter is used to identify included and excluded broker details from the consensus By default the service would return only the brokers included in the consensus- * **TRUE** = Returns all the brokers included and excluded in the consensus * **FALSE** = Returns only the broker details included in the consensus . [optional] if omitted the server will use the default value of False # noqa: E501
relative_fiscal_start (int): Relative fiscal period, expressed as an integer, used to filter results.. [optional] # noqa: E501
relative_fiscal_end (int): Relative fiscal period, expressed as an integer, used to filter results.. [optional] # noqa: E501
periodicity (PeriodicityDetail): [optional] # noqa: E501
currency (str): Currency code for adjusting the data. For a list of currency ISO codes, visit [Online Assistant Page #1470](https://oa.apps.factset.com/pages/1470).. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.ids = ids
self.metrics = metrics
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 52.789137
| 859
| 0.60794
|
1daadd23d0fdab21106af0d26bb81a3cdcd1ae26
| 3,308
|
py
|
Python
|
tests/parser/syntax/test_list.py
|
williamremor/vyper
|
4d33dc4140f7d0c339876afb6af7b417bd0ed8e0
|
[
"MIT"
] | 1
|
2018-08-31T02:32:57.000Z
|
2018-08-31T02:32:57.000Z
|
tests/parser/syntax/test_list.py
|
williamremor/vyper
|
4d33dc4140f7d0c339876afb6af7b417bd0ed8e0
|
[
"MIT"
] | null | null | null |
tests/parser/syntax/test_list.py
|
williamremor/vyper
|
4d33dc4140f7d0c339876afb6af7b417bd0ed8e0
|
[
"MIT"
] | null | null | null |
import pytest
from pytest import raises
from vyper import compiler
from vyper.exceptions import TypeMismatchException, StructureException
fail_list = [
"""
@public
def foo():
x: num[3] = [1, 2, 3]
x = 4
""",
"""
@public
def foo():
x: num[3] = [1, 2, 3]
x = [4, 5, 6, 7]
""",
"""
@public
def foo() -> num[2]:
return [3, 5, 7]
""",
"""
@public
def foo() -> num[2]:
return [3]
""",
"""
y: num[3]
@public
def foo(x: num[3]):
self.y = x[0]
""",
"""
y: num[3]
@public
def foo(x: num[3]):
self.y[0] = x
""",
"""
y: num[4]
@public
def foo(x: num[3]):
self.y = x
""",
"""
bar: num[3]
@public
def foo():
self.bar = [1, 2, 0x1234567890123456789012345678901234567890]
""",
("""
bar: num[3]
@public
def foo():
self.bar = []
""", StructureException),
"""
b: num[5]
@public
def foo():
x = self.b[0][1]
""",
"""
bar: num[3]
@public
def foo():
self.bar = [1, [2], 3]
""",
"""
bar: num[3][3]
@public
def foo():
self.bar = 5
""",
"""
bar: num[3][3]
@public
def foo():
self.bar = [2, 5]
""",
"""
bar: num[3]
@public
def foo():
self.bar = [1, 2, 3, 4]
""",
"""
bar: num[3]
@public
def foo():
self.bar = [1, 2]
""",
"""
b: num[5]
@public
def foo():
self.b[0] = 7.5
""",
"""
b: num[5]
@public
def foo():
x = self.b[0].cow
""",
"""
@public
def foo()->bool[2]:
a: decimal[2]
a[0] = 1
return a
""",
"""
@public
def foo()->bool[2]:
a: bool[1000]
a[0] = 1
return a
""",
"""
@public
def test() -> num:
a = [1, 2, 3.0]
return a[0]
""",
"""
@public
def test() -> num:
a = [1, 2, true]
return a[0]
"""
]
@pytest.mark.parametrize('bad_code', fail_list)
def test_block_fail(bad_code):
if isinstance(bad_code, tuple):
with raises(bad_code[1]):
compiler.compile(bad_code[0])
else:
with raises(TypeMismatchException):
compiler.compile(bad_code)
valid_list = [
"""
@public
def foo():
x: num[3] = [1, 2, 3]
x = [4, 5, 6]
""",
"""
@public
def foo() -> num[2][2]:
return [[1,2],[3,4]]
""",
"""
@public
def foo() -> decimal[2][2]:
return [[1,2],[3,4]]
""",
"""
@public
def foo() -> decimal[2][2]:
return [[1.0, 2.0], [3.5, 4.0]]
""",
"""
@public
def foo(x: num[3]) -> num:
return x[0]
""",
"""
y: num[3]
@public
def foo(x: num[3]):
self.y = x
""",
"""
y: decimal[3]
@public
def foo(x: num[3]):
self.y = x
""",
"""
y: decimal[2][2]
@public
def foo(x: num[2][2]):
self.y = x
""",
"""
y: decimal[2]
@public
def foo(x: num[2][2]):
self.y = x[1]
""",
"""
@public
def foo() -> num[2]:
return [3,5]
""",
"""
bar: decimal[3]
@public
def foo():
self.bar = [1.0, 2.1, 3.0]
""",
"""
x: num[1][2][3][4][5]
""",
"""
bar: num[3]
@public
def foo():
self.bar = [1, 2, 3]
""",
"""
b: num[5]
@public
def foo():
a: num[5]
self.b[0] = a[0]
""",
"""
b: decimal[5]
@public
def foo():
self.b[0] = 7
"""
]
@pytest.mark.parametrize('good_code', valid_list)
def test_list_success(good_code):
assert compiler.compile(good_code) is not None
| 13.447154
| 70
| 0.457981
|
b73e87fa77f0eac281ca110683c746fa70efc4d1
| 1,059
|
py
|
Python
|
importers/pnc.py
|
dumbbillyhardy/beancount-importers
|
d1d250cfa8484ed42416eaef10e13f0510f035c7
|
[
"MIT"
] | 1
|
2020-08-05T22:53:58.000Z
|
2020-08-05T22:53:58.000Z
|
importers/pnc.py
|
dumbbillyhardy/beancount-importers
|
d1d250cfa8484ed42416eaef10e13f0510f035c7
|
[
"MIT"
] | null | null | null |
importers/pnc.py
|
dumbbillyhardy/beancount-importers
|
d1d250cfa8484ed42416eaef10e13f0510f035c7
|
[
"MIT"
] | 1
|
2020-08-05T22:04:58.000Z
|
2020-08-05T22:04:58.000Z
|
from .baseAccount import BaseAccount
from dateutil.parser import parse
from titlecase import titlecase
class CashBuilderImporter(BaseAccount):
def getDate(self, row):
return parse(row['Date']).date()
def getDesc(self, row):
return titlecase(row['Description'])
def getAmt(self, row):
return '-'+row['Amount'][2:] if row['Amount'][0] == '-' else row['Amount'][1:]
def isPayment(self, row):
return row['Description'].find("ONLINE CREDIT CARD PMT") != -1
class VirtualWalletImporter(BaseAccount):
def skip(self, row):
if 'Skip' in row and row['Skip'] == 'true':
return True
pncCat = row['Category']
return pncCat == "Paychecks" or pncCat == "Credit Card Payments"
def getDate(self, row):
return parse(row['Date']).date()
def getDesc(self, row):
return titlecase(row['Description'])
def getAmt(self, row):
withdrawal = row['Withdrawals'][1:]
deposit = row['Deposits'][1:]
return '-'+deposit if deposit else withdrawal
| 33.09375
| 86
| 0.624174
|
1caae31a7efdcdfb0dab427173604d72c98a998a
| 584
|
py
|
Python
|
src/manipulation_dreambed/PySMACROSInterface.py
|
kth-ros-pkg/manipulation_dreambed
|
0551762dba4d1b142053785692967548d06a9582
|
[
"BSD-3-Clause"
] | null | null | null |
src/manipulation_dreambed/PySMACROSInterface.py
|
kth-ros-pkg/manipulation_dreambed
|
0551762dba4d1b142053785692967548d06a9582
|
[
"BSD-3-Clause"
] | null | null | null |
src/manipulation_dreambed/PySMACROSInterface.py
|
kth-ros-pkg/manipulation_dreambed
|
0551762dba4d1b142053785692967548d06a9582
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
"""
This file is part of the RobDREAM (http://robdream.eu/) deliverable D4.2.
@author: Silvia Cruciani (cruciani@kth.se)
"""
import rospy
import yaml
from manipulation_dreambed.srv import DreamBedEvaluation
def func(**kwargs):
rospy.wait_for_service('dream_bed_evaluation')
try:
dream_bed_evaluation = rospy.ServiceProxy('dream_bed_evaluation', DreamBedEvaluation)
resp1 = dream_bed_evaluation(yaml.dump(kwargs))
return resp1.value
except rospy.ServiceException, e:
print "Service call failed: %s" % repr(e)
| 30.736842
| 93
| 0.717466
|
99be556e67d5d6c9b6324ba18b2d0ee395a04e83
| 605
|
py
|
Python
|
examples/7_tessy_as_script.py
|
K4rian/tessy
|
c7e63818238f03a10efa24ebeb1cb0c032598a63
|
[
"MIT"
] | null | null | null |
examples/7_tessy_as_script.py
|
K4rian/tessy
|
c7e63818238f03a10efa24ebeb1cb0c032598a63
|
[
"MIT"
] | null | null | null |
examples/7_tessy_as_script.py
|
K4rian/tessy
|
c7e63818238f03a10efa24ebeb1cb0c032598a63
|
[
"MIT"
] | null | null | null |
import sys
import tessy
"""
7. Tessy as a invokable script
This example demonstrates how to write a simple invokable script using tessy and
the "image_to_string" function.
"""
if __name__ == "__main__":
lang = None
image_file = None
if len(sys.argv) == 2:
image_file = sys.argv[1]
elif len(sys.argv) == 4 and sys.argv[1] == "-l":
lang = sys.argv[2]
image_file = sys.argv[3]
else:
sys.stderr.write("Usage: python 7_tessy_as_script.py [-l lang] image_file")
exit(0)
tessy.init()
print(tessy.image_to_string(image_file, lang=lang))
| 21.607143
| 83
| 0.638017
|
18ed0c84f2ef518167b3b2f5462b250989838ac1
| 4,988
|
py
|
Python
|
setup.py
|
kamakiri01/qulacs
|
1e3e6ac26390abdfe5abe7f4d52349bcfd68e20c
|
[
"MIT"
] | 260
|
2018-10-13T15:58:26.000Z
|
2022-03-17T11:03:58.000Z
|
setup.py
|
kamakiri01/qulacs
|
1e3e6ac26390abdfe5abe7f4d52349bcfd68e20c
|
[
"MIT"
] | 182
|
2018-10-14T02:29:27.000Z
|
2022-03-06T20:23:18.000Z
|
setup.py
|
kamakiri01/qulacs
|
1e3e6ac26390abdfe5abe7f4d52349bcfd68e20c
|
[
"MIT"
] | 88
|
2018-10-10T03:46:29.000Z
|
2022-02-27T21:56:05.000Z
|
import os
import re
import sys
import platform
import subprocess
from setuptools import setup, find_packages, Extension
from setuptools.command.build_ext import build_ext
from distutils.version import LooseVersion
_VERSION = '0.3.0'
project_name = 'Qulacs'
def _is_valid_compiler(cmd):
try:
out = subprocess.check_output([cmd, '-dumpfullversion', '-dumpversion']).decode()
version = LooseVersion(out)
return version >= LooseVersion('7.0.0')
except:
return False
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=''):
Extension.__init__(self, name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
class CMakeBuild(build_ext):
user_options = build_ext.user_options + [
('opt-flags=', 'o', 'optimization flags for compiler')
]
def initialize_options(self):
build_ext.initialize_options(self)
self.opt_flags = None
def finalize_options(self):
build_ext.finalize_options(self)
def run(self):
try:
out = subprocess.check_output(['cmake', '--version'])
except OSError:
raise RuntimeError("CMake must be installed to build the following extensions: " +
", ".join(e.name for e in self.extensions))
cmake_version = LooseVersion(re.search(r'version\s*([\d.]+)', out.decode()).group(1))
for ext in self.extensions:
self.build_extension(ext)
def build_extension(self, ext):
extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
cmake_args = ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir,
'-DCMAKE_RUNTIME_OUTPUT_DIRECTORY=' + extdir,
'-DPYTHON_EXECUTABLE=' + sys.executable,
'-DPYTHON_SETUP_FLAG:STR=Yes']
cfg = 'Debug' if self.debug else 'Release'
build_args = ['--config', cfg]
if platform.system() == "Windows":
cmake_args += ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format(cfg.upper(), extdir)]
cmake_args += ['-DCMAKE_RUNTIME_OUTPUT_DIRECTORY_{}={}'.format(cfg.upper(), extdir)]
if sys.maxsize > 2**32:
cmake_args += ['-A', 'x64']
build_args += ['--', '/m']
else:
env_gcc = os.getenv('C_COMPILER')
if env_gcc:
gcc_candidates = [env_gcc]
else:
gcc_candidates = ['gcc', 'gcc-9', 'gcc-8', 'gcc-7']
gcc = next(iter(filter(_is_valid_compiler, gcc_candidates)), None)
env_gxx = os.getenv('CXX_COMPILER')
if env_gxx:
gxx_candidates = [env_gxx]
else:
gxx_candidates = ['g++', 'g++-9', 'g++-8', 'g++-7']
gxx = next(iter(filter(_is_valid_compiler, gxx_candidates)), None)
if gcc is None or gxx is None:
raise RuntimeError("gcc/g++ >= 7.0.0 must be installed to build the following extensions: " +
", ".join(e.name for e in self.extensions))
cmake_args += ['-DCMAKE_C_COMPILER=' + gcc]
cmake_args += ['-DCMAKE_CXX_COMPILER=' + gxx]
cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg]
build_args += ['--', '-j2']
if self.opt_flags is not None:
opt_flags = self.opt_flags
elif os.getenv('QULACS_OPT_FLAGS'):
opt_flags = os.getenv('QULACS_OPT_FLAGS')
else:
opt_flags = None
if opt_flags:
cmake_args += ['-DOPT_FLAGS=' + opt_flags]
env = os.environ.copy()
env['CXXFLAGS'] = '{} -DVERSION_INFO=\\"{}\\"'.format(env.get('CXXFLAGS', ''),
self.distribution.get_version())
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
subprocess.check_call(['cmake', ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env)
subprocess.check_call(['cmake', '--build', '.', '--target', 'python'] + build_args, cwd=self.build_temp)
setup(
name=project_name,
version=_VERSION,
author='QunaSys',
author_email='qulacs@qunasys.com',
url='http://www.qulacs.org',
description='Quantum circuit simulator for research',
long_description='',
packages=find_packages(exclude=['test*']),
include_package_data=True,
ext_modules=[CMakeExtension('qulacs')],
cmdclass=dict(build_ext=CMakeBuild),
zip_safe=False,
test_suite = 'test',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Communications :: Email',
],
)
| 36.676471
| 112
| 0.584603
|
93095c13380d9e4a486d692c3e5463f04d54a037
| 1,076
|
py
|
Python
|
SpiMediaGallery/main/importers/project_application_importer.py
|
Swiss-Polar-Institute/spi-media-gallery
|
2f66f938cbe1a7a25a5971d42abb1b0b5deca31e
|
[
"MIT"
] | 5
|
2020-02-21T20:38:50.000Z
|
2022-02-19T11:00:46.000Z
|
SpiMediaGallery/main/importers/project_application_importer.py
|
Swiss-Polar-Institute/spi-media-gallery
|
2f66f938cbe1a7a25a5971d42abb1b0b5deca31e
|
[
"MIT"
] | 23
|
2019-10-01T17:13:39.000Z
|
2022-01-21T20:02:26.000Z
|
SpiMediaGallery/main/importers/project_application_importer.py
|
Swiss-Polar-Institute/spi-media-gallery
|
2f66f938cbe1a7a25a5971d42abb1b0b5deca31e
|
[
"MIT"
] | 2
|
2022-02-03T08:52:51.000Z
|
2022-02-03T08:58:00.000Z
|
from django.core.cache import cache
from .project_application_api_client import ProjectApplicationApiClient
from ..management.commands.generate_virtual_tags import GenerateTags
from ..management.commands.resize_media import Resizer
def import_resize_update_tags_from_project_application():
# Creates the new Medium objects
media_importer = ProjectApplicationApiClient()
imported_media = media_importer.import_new_media()
tags_deleted_media = media_importer.delete_deleted_media()
# Resizes the photos
bucket_name_resized = 'processed'
sizes_type = ['T', 'S', 'L']
media_type = 'Photos'
resizer = Resizer(bucket_name_resized, sizes_type, media_type)
resizer.resize_media()
# Resizes the videos
sizes_type = ['S', 'L']
media_type = 'Videos'
resizer = Resizer(bucket_name_resized, sizes_type, media_type)
resizer.resize_media()
# Tags cleanup
generator = GenerateTags()
generator.delete_tags_if_orphaned(tags_deleted_media)
# Clears the cache: so the homepage tags are updated
cache.clear()
| 31.647059
| 71
| 0.757435
|
a3aef05533ea82fbc26faf81c1c2d882740c3f89
| 181
|
py
|
Python
|
appmon/forms.py
|
jboegeholz/flask_appmon
|
6a3ae2c7c889f8212293ef9e1cfc5c9e9efe6cf6
|
[
"MIT"
] | null | null | null |
appmon/forms.py
|
jboegeholz/flask_appmon
|
6a3ae2c7c889f8212293ef9e1cfc5c9e9efe6cf6
|
[
"MIT"
] | 1
|
2016-10-02T20:16:21.000Z
|
2016-10-02T20:16:21.000Z
|
appmon/forms.py
|
jboegeholz/flask_appmon
|
6a3ae2c7c889f8212293ef9e1cfc5c9e9efe6cf6
|
[
"MIT"
] | null | null | null |
from wtforms import Form, StringField
class ApplicationForm(Form):
app_name = StringField("Application Name")
client = StringField("Client")
port = StringField("Port")
| 25.857143
| 46
| 0.729282
|
598b957e807ca223c54b16b5e82a5f66f9e9f541
| 12,898
|
py
|
Python
|
google/cloud/dataflow/__init__.py
|
LaudateCorpus1/python-dataflow-client
|
888664b70baefe7acb7e46c82a06c24e7ba06af2
|
[
"Apache-2.0"
] | 7
|
2021-11-04T03:22:01.000Z
|
2022-02-05T14:28:50.000Z
|
google/cloud/dataflow/__init__.py
|
LaudateCorpus1/python-dataflow-client
|
888664b70baefe7acb7e46c82a06c24e7ba06af2
|
[
"Apache-2.0"
] | 31
|
2021-06-26T01:14:50.000Z
|
2022-03-17T03:01:49.000Z
|
google/cloud/dataflow/__init__.py
|
LaudateCorpus1/python-dataflow-client
|
888664b70baefe7acb7e46c82a06c24e7ba06af2
|
[
"Apache-2.0"
] | 4
|
2021-08-31T11:41:16.000Z
|
2022-01-29T08:09:21.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.cloud.dataflow_v1beta3.services.flex_templates_service.client import (
FlexTemplatesServiceClient,
)
from google.cloud.dataflow_v1beta3.services.flex_templates_service.async_client import (
FlexTemplatesServiceAsyncClient,
)
from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.client import (
JobsV1Beta3Client,
)
from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.async_client import (
JobsV1Beta3AsyncClient,
)
from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.client import (
MessagesV1Beta3Client,
)
from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.async_client import (
MessagesV1Beta3AsyncClient,
)
from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.client import (
MetricsV1Beta3Client,
)
from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.async_client import (
MetricsV1Beta3AsyncClient,
)
from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.client import (
SnapshotsV1Beta3Client,
)
from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.async_client import (
SnapshotsV1Beta3AsyncClient,
)
from google.cloud.dataflow_v1beta3.services.templates_service.client import (
TemplatesServiceClient,
)
from google.cloud.dataflow_v1beta3.services.templates_service.async_client import (
TemplatesServiceAsyncClient,
)
from google.cloud.dataflow_v1beta3.types.environment import AutoscalingSettings
from google.cloud.dataflow_v1beta3.types.environment import DebugOptions
from google.cloud.dataflow_v1beta3.types.environment import Disk
from google.cloud.dataflow_v1beta3.types.environment import Environment
from google.cloud.dataflow_v1beta3.types.environment import Package
from google.cloud.dataflow_v1beta3.types.environment import SdkHarnessContainerImage
from google.cloud.dataflow_v1beta3.types.environment import TaskRunnerSettings
from google.cloud.dataflow_v1beta3.types.environment import WorkerPool
from google.cloud.dataflow_v1beta3.types.environment import WorkerSettings
from google.cloud.dataflow_v1beta3.types.environment import AutoscalingAlgorithm
from google.cloud.dataflow_v1beta3.types.environment import DefaultPackageSet
from google.cloud.dataflow_v1beta3.types.environment import FlexResourceSchedulingGoal
from google.cloud.dataflow_v1beta3.types.environment import JobType
from google.cloud.dataflow_v1beta3.types.environment import ShuffleMode
from google.cloud.dataflow_v1beta3.types.environment import TeardownPolicy
from google.cloud.dataflow_v1beta3.types.environment import WorkerIPAddressConfiguration
from google.cloud.dataflow_v1beta3.types.jobs import BigQueryIODetails
from google.cloud.dataflow_v1beta3.types.jobs import BigTableIODetails
from google.cloud.dataflow_v1beta3.types.jobs import CheckActiveJobsRequest
from google.cloud.dataflow_v1beta3.types.jobs import CheckActiveJobsResponse
from google.cloud.dataflow_v1beta3.types.jobs import CreateJobRequest
from google.cloud.dataflow_v1beta3.types.jobs import DatastoreIODetails
from google.cloud.dataflow_v1beta3.types.jobs import DisplayData
from google.cloud.dataflow_v1beta3.types.jobs import ExecutionStageState
from google.cloud.dataflow_v1beta3.types.jobs import ExecutionStageSummary
from google.cloud.dataflow_v1beta3.types.jobs import FailedLocation
from google.cloud.dataflow_v1beta3.types.jobs import FileIODetails
from google.cloud.dataflow_v1beta3.types.jobs import GetJobRequest
from google.cloud.dataflow_v1beta3.types.jobs import Job
from google.cloud.dataflow_v1beta3.types.jobs import JobExecutionInfo
from google.cloud.dataflow_v1beta3.types.jobs import JobExecutionStageInfo
from google.cloud.dataflow_v1beta3.types.jobs import JobMetadata
from google.cloud.dataflow_v1beta3.types.jobs import ListJobsRequest
from google.cloud.dataflow_v1beta3.types.jobs import ListJobsResponse
from google.cloud.dataflow_v1beta3.types.jobs import PipelineDescription
from google.cloud.dataflow_v1beta3.types.jobs import PubSubIODetails
from google.cloud.dataflow_v1beta3.types.jobs import SdkVersion
from google.cloud.dataflow_v1beta3.types.jobs import SnapshotJobRequest
from google.cloud.dataflow_v1beta3.types.jobs import SpannerIODetails
from google.cloud.dataflow_v1beta3.types.jobs import Step
from google.cloud.dataflow_v1beta3.types.jobs import TransformSummary
from google.cloud.dataflow_v1beta3.types.jobs import UpdateJobRequest
from google.cloud.dataflow_v1beta3.types.jobs import JobState
from google.cloud.dataflow_v1beta3.types.jobs import JobView
from google.cloud.dataflow_v1beta3.types.jobs import KindType
from google.cloud.dataflow_v1beta3.types.messages import AutoscalingEvent
from google.cloud.dataflow_v1beta3.types.messages import JobMessage
from google.cloud.dataflow_v1beta3.types.messages import ListJobMessagesRequest
from google.cloud.dataflow_v1beta3.types.messages import ListJobMessagesResponse
from google.cloud.dataflow_v1beta3.types.messages import StructuredMessage
from google.cloud.dataflow_v1beta3.types.messages import JobMessageImportance
from google.cloud.dataflow_v1beta3.types.metrics import GetJobExecutionDetailsRequest
from google.cloud.dataflow_v1beta3.types.metrics import GetJobMetricsRequest
from google.cloud.dataflow_v1beta3.types.metrics import GetStageExecutionDetailsRequest
from google.cloud.dataflow_v1beta3.types.metrics import JobExecutionDetails
from google.cloud.dataflow_v1beta3.types.metrics import JobMetrics
from google.cloud.dataflow_v1beta3.types.metrics import MetricStructuredName
from google.cloud.dataflow_v1beta3.types.metrics import MetricUpdate
from google.cloud.dataflow_v1beta3.types.metrics import ProgressTimeseries
from google.cloud.dataflow_v1beta3.types.metrics import StageExecutionDetails
from google.cloud.dataflow_v1beta3.types.metrics import StageSummary
from google.cloud.dataflow_v1beta3.types.metrics import WorkerDetails
from google.cloud.dataflow_v1beta3.types.metrics import WorkItemDetails
from google.cloud.dataflow_v1beta3.types.metrics import ExecutionState
from google.cloud.dataflow_v1beta3.types.snapshots import DeleteSnapshotRequest
from google.cloud.dataflow_v1beta3.types.snapshots import DeleteSnapshotResponse
from google.cloud.dataflow_v1beta3.types.snapshots import GetSnapshotRequest
from google.cloud.dataflow_v1beta3.types.snapshots import ListSnapshotsRequest
from google.cloud.dataflow_v1beta3.types.snapshots import ListSnapshotsResponse
from google.cloud.dataflow_v1beta3.types.snapshots import PubsubSnapshotMetadata
from google.cloud.dataflow_v1beta3.types.snapshots import Snapshot
from google.cloud.dataflow_v1beta3.types.snapshots import SnapshotState
from google.cloud.dataflow_v1beta3.types.streaming import ComputationTopology
from google.cloud.dataflow_v1beta3.types.streaming import CustomSourceLocation
from google.cloud.dataflow_v1beta3.types.streaming import DataDiskAssignment
from google.cloud.dataflow_v1beta3.types.streaming import KeyRangeDataDiskAssignment
from google.cloud.dataflow_v1beta3.types.streaming import KeyRangeLocation
from google.cloud.dataflow_v1beta3.types.streaming import MountedDataDisk
from google.cloud.dataflow_v1beta3.types.streaming import PubsubLocation
from google.cloud.dataflow_v1beta3.types.streaming import StateFamilyConfig
from google.cloud.dataflow_v1beta3.types.streaming import (
StreamingApplianceSnapshotConfig,
)
from google.cloud.dataflow_v1beta3.types.streaming import StreamingComputationRanges
from google.cloud.dataflow_v1beta3.types.streaming import StreamingSideInputLocation
from google.cloud.dataflow_v1beta3.types.streaming import StreamingStageLocation
from google.cloud.dataflow_v1beta3.types.streaming import StreamLocation
from google.cloud.dataflow_v1beta3.types.streaming import TopologyConfig
from google.cloud.dataflow_v1beta3.types.templates import ContainerSpec
from google.cloud.dataflow_v1beta3.types.templates import CreateJobFromTemplateRequest
from google.cloud.dataflow_v1beta3.types.templates import DynamicTemplateLaunchParams
from google.cloud.dataflow_v1beta3.types.templates import FlexTemplateRuntimeEnvironment
from google.cloud.dataflow_v1beta3.types.templates import GetTemplateRequest
from google.cloud.dataflow_v1beta3.types.templates import GetTemplateResponse
from google.cloud.dataflow_v1beta3.types.templates import InvalidTemplateParameters
from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateParameter
from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateRequest
from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateResponse
from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateParameters
from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateRequest
from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateResponse
from google.cloud.dataflow_v1beta3.types.templates import ParameterMetadata
from google.cloud.dataflow_v1beta3.types.templates import RuntimeEnvironment
from google.cloud.dataflow_v1beta3.types.templates import RuntimeMetadata
from google.cloud.dataflow_v1beta3.types.templates import SDKInfo
from google.cloud.dataflow_v1beta3.types.templates import TemplateMetadata
from google.cloud.dataflow_v1beta3.types.templates import ParameterType
__all__ = (
"FlexTemplatesServiceClient",
"FlexTemplatesServiceAsyncClient",
"JobsV1Beta3Client",
"JobsV1Beta3AsyncClient",
"MessagesV1Beta3Client",
"MessagesV1Beta3AsyncClient",
"MetricsV1Beta3Client",
"MetricsV1Beta3AsyncClient",
"SnapshotsV1Beta3Client",
"SnapshotsV1Beta3AsyncClient",
"TemplatesServiceClient",
"TemplatesServiceAsyncClient",
"AutoscalingSettings",
"DebugOptions",
"Disk",
"Environment",
"Package",
"SdkHarnessContainerImage",
"TaskRunnerSettings",
"WorkerPool",
"WorkerSettings",
"AutoscalingAlgorithm",
"DefaultPackageSet",
"FlexResourceSchedulingGoal",
"JobType",
"ShuffleMode",
"TeardownPolicy",
"WorkerIPAddressConfiguration",
"BigQueryIODetails",
"BigTableIODetails",
"CheckActiveJobsRequest",
"CheckActiveJobsResponse",
"CreateJobRequest",
"DatastoreIODetails",
"DisplayData",
"ExecutionStageState",
"ExecutionStageSummary",
"FailedLocation",
"FileIODetails",
"GetJobRequest",
"Job",
"JobExecutionInfo",
"JobExecutionStageInfo",
"JobMetadata",
"ListJobsRequest",
"ListJobsResponse",
"PipelineDescription",
"PubSubIODetails",
"SdkVersion",
"SnapshotJobRequest",
"SpannerIODetails",
"Step",
"TransformSummary",
"UpdateJobRequest",
"JobState",
"JobView",
"KindType",
"AutoscalingEvent",
"JobMessage",
"ListJobMessagesRequest",
"ListJobMessagesResponse",
"StructuredMessage",
"JobMessageImportance",
"GetJobExecutionDetailsRequest",
"GetJobMetricsRequest",
"GetStageExecutionDetailsRequest",
"JobExecutionDetails",
"JobMetrics",
"MetricStructuredName",
"MetricUpdate",
"ProgressTimeseries",
"StageExecutionDetails",
"StageSummary",
"WorkerDetails",
"WorkItemDetails",
"ExecutionState",
"DeleteSnapshotRequest",
"DeleteSnapshotResponse",
"GetSnapshotRequest",
"ListSnapshotsRequest",
"ListSnapshotsResponse",
"PubsubSnapshotMetadata",
"Snapshot",
"SnapshotState",
"ComputationTopology",
"CustomSourceLocation",
"DataDiskAssignment",
"KeyRangeDataDiskAssignment",
"KeyRangeLocation",
"MountedDataDisk",
"PubsubLocation",
"StateFamilyConfig",
"StreamingApplianceSnapshotConfig",
"StreamingComputationRanges",
"StreamingSideInputLocation",
"StreamingStageLocation",
"StreamLocation",
"TopologyConfig",
"ContainerSpec",
"CreateJobFromTemplateRequest",
"DynamicTemplateLaunchParams",
"FlexTemplateRuntimeEnvironment",
"GetTemplateRequest",
"GetTemplateResponse",
"InvalidTemplateParameters",
"LaunchFlexTemplateParameter",
"LaunchFlexTemplateRequest",
"LaunchFlexTemplateResponse",
"LaunchTemplateParameters",
"LaunchTemplateRequest",
"LaunchTemplateResponse",
"ParameterMetadata",
"RuntimeEnvironment",
"RuntimeMetadata",
"SDKInfo",
"TemplateMetadata",
"ParameterType",
)
| 45.900356
| 88
| 0.8323
|
b3fcab36690d67eed49f78b55179704187a10b42
| 3,316
|
py
|
Python
|
golden_ratio.py
|
HLee2020/Phys250_Final_Project
|
f09eeb66668db9cf407b780901ad995e1567a75a
|
[
"MIT"
] | null | null | null |
golden_ratio.py
|
HLee2020/Phys250_Final_Project
|
f09eeb66668db9cf407b780901ad995e1567a75a
|
[
"MIT"
] | null | null | null |
golden_ratio.py
|
HLee2020/Phys250_Final_Project
|
f09eeb66668db9cf407b780901ad995e1567a75a
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from numpy import random as rng
from ant_walks import *
from ant_measurements import *
def golden_ratio(lattice, x_init, y_init, steps, state, orient):
x = x_init
y = y_init
x_list = [x]
y_list = [y]
lattice_y = len(lattice)
lattice_x =len(lattice[0])
edge = True
count = 0
while count <= steps and edge == True:
if state == 0:
if lattice[y][x] == -1:
lattice[y][x] *= -1
if orient == 0:
orient = 3
else:
orient -= 1
edge = edge_check(lattice, x, y, orient)
x, y = orientated_step(orient, x, y)
x_list.append(x)
y_list.append(y)
state = 1
else:
if orient == 0:
orient = 3
else:
orient -= 1
edge = edge_check(lattice, x, y, orient)
x, y = orientated_step(orient, x, y)
x_list.append(x)
y_list.append(y)
state = 1
else:
if lattice[y][x] == -1:
lattice[y][x] *= -1
if orient == 3:
orient = 0
else:
orient += 1
edge = edge_check(lattice, x, y, orient)
x, y = orientated_step(orient, x, y)
x_list.append(x)
y_list.append(y)
state = 1
else:
lattice[y][x] *= -1
edge = edge_check(lattice, x, y, orient)
x, y = orientated_step(orient, x, y)
x_list.append(x)
y_list.append(y)
state = 0
# plt.imshow(lattice)
# plt.pause(0.00001)
# plt.clf()
count += 1
return (lattice, x, y, count, state, x_list, y_list)
def plotlattice_golden(lattice, x, y, count, state):
plt.imshow(lattice, origin="lower", cmap='Blues', vmin=-1, vmax=1)
plt.xlabel("X Coordinate")
plt.ylabel("Y Coordinate")
plt.title("Golden Ratio Variation of Langton's Ant, Iteration Number: {}".format(count))
plt.colorbar()
plt.savefig("ant_walk_golden_"+str(count)+".svg")
plt.show()
lattice = normal_lattice(90, 90, -1)
lattice, x, y, count, state, x_list, y_list = golden_ratio(lattice, 40, 40, 5000, 0, 0)
plotlattice_golden(lattice, x, y, count, state)
#
# lattice = normal_lattice(350, 350, -1)
# lattice, x, y, count, state, x_list, y_list = golden_ratio(lattice, 150, 150, 4000, 0, 0)
# plotlattice_golden(lattice, x, y, count, state)
# lattice, x, y, count, state, x_list, y_list = spiral(lattice, 25, 25, 1000, 0, 0)
# plotlattice_spiral(lattice, x, y, count, state)
# lattice, x, y, count, state, x_list, y_list = spiral(lattice, 101, 101, 2000, 0, 0)
# plotlattice_spiral(lattice, x, y, count, state)
# distance_list = [distance(150, 150, i, j) for i,j in zip(x_list, y_list)]
# plt.plot(range(len(distance_list)), distance_list)
# plt.title("Distance to Number of Steps, Golden Ratio Pattern with "+str(count)+" Steps")
# plt.ylabel("Distance")
# plt.xlabel("Number of Steps")
# plt.savefig("golden_ratio_distance_"+str(count)+".svg")
# plt.show()
| 35.276596
| 92
| 0.535887
|
07495119d70118d8a2b0b7cfe7a99b7e37690c1b
| 3,714
|
py
|
Python
|
sahara/service/edp/oozie.py
|
hortonworksqe/sahara
|
b8edeaf2b6a475728bf9fd2ddc3a860dc6c23270
|
[
"Apache-2.0"
] | 1
|
2016-04-13T17:07:05.000Z
|
2016-04-13T17:07:05.000Z
|
sahara/service/edp/oozie.py
|
hortonworksqe/sahara
|
b8edeaf2b6a475728bf9fd2ddc3a860dc6c23270
|
[
"Apache-2.0"
] | null | null | null |
sahara/service/edp/oozie.py
|
hortonworksqe/sahara
|
b8edeaf2b6a475728bf9fd2ddc3a860dc6c23270
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import re
from six.moves.urllib import parse as urlparse
import sahara.exceptions as ex
class OozieClient(object):
def __init__(self, url, oozie_server):
self.job_url = url + "/v1/job/%s"
self.jobs_url = url + "/v1/jobs"
self.oozie_server = oozie_server
self.port = urlparse.urlparse(url).port
def _get_http_session(self, info=None):
return self.oozie_server.remote().get_http_client(self.port, info=info)
def add_job(self, job_config, job_execution):
session = self._get_http_session(job_execution.extra.get('neutron'))
resp = session.post(self.jobs_url, data=job_config, headers={
"Content-Type": "application/xml;charset=UTF-8"
})
_check_status_code(resp, 201)
return get_json(resp)['id']
def run_job(self, job_execution, job_id):
session = self._get_http_session(job_execution.extra.get('neutron'))
resp = session.put(self.job_url % job_id + "?action=start")
_check_status_code(resp, 200)
def kill_job(self, job_execution):
session = self._get_http_session(job_execution.extra.get('neutron'))
resp = session.put(self.job_url % job_execution.oozie_job_id +
"?action=kill")
_check_status_code(resp, 200)
def get_job_status(self, job_execution):
session = self._get_http_session(job_execution.extra.get('neutron'))
resp = session.get(self.job_url % job_execution.oozie_job_id +
"?show=info")
_check_status_code(resp, 200)
return get_json(resp)
def get_job_logs(self, job_execution):
session = self._get_http_session(job_execution.extra.get('neutron'))
resp = session.get(self.job_url % job_execution.oozie_job_id +
"?show=log")
_check_status_code(resp, 200)
return resp.text
def get_jobs(self, offset, size, **filter):
url = self.jobs_url + "?offset=%s&len=%s" % (offset, size)
if len(filter) > 0:
f = ";".join([k + "=" + v for k, v in filter.items()])
url += "&filter=" + urlparse.quote(f)
session = self._get_http_session()
resp = session.get(url)
_check_status_code(resp, 200)
return get_json(resp)
def _check_status_code(resp, expected_code):
if resp.status_code != expected_code:
resp_text = resp.text
# cleaning tomcat error message
message = resp_text.split("<HR size=\"1\" noshade=\"noshade\">")[1]
message = message.replace("</p><p>", "\n")
message = re.sub('<[^<]+?>', ' ', message)
raise OozieException(message)
def get_json(response):
"""Provides backward compatibility for old versions of requests library."""
json_field_or_function = getattr(response, 'json', None)
if callable(json_field_or_function):
return response.json()
else:
return json.loads(response.content)
class OozieException(ex.SaharaException):
def __init__(self, message):
self.message = message
self.code = "OOZIE_EXCEPTION"
| 36.058252
| 79
| 0.655358
|
4706269e1cfa50e3bfcc7d8304e8cc1aa022e702
| 3,137
|
py
|
Python
|
homeassistant/components/evohome/water_heater.py
|
deporpeps/home-assistant
|
797196dce97d1418e1217fbe645e7da1c94ce6c5
|
[
"Apache-2.0"
] | 1
|
2022-01-15T18:27:59.000Z
|
2022-01-15T18:27:59.000Z
|
homeassistant/components/evohome/water_heater.py
|
deporpeps/home-assistant
|
797196dce97d1418e1217fbe645e7da1c94ce6c5
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/evohome/water_heater.py
|
deporpeps/home-assistant
|
797196dce97d1418e1217fbe645e7da1c94ce6c5
|
[
"Apache-2.0"
] | null | null | null |
"""Support for WaterHeater devices of (EMEA/EU) Honeywell TCC systems."""
import logging
from typing import List
import requests.exceptions
import evohomeclient2
from homeassistant.components.water_heater import (
SUPPORT_OPERATION_MODE, WaterHeaterDevice)
from homeassistant.const import PRECISION_WHOLE, STATE_OFF, STATE_ON
from homeassistant.util.dt import parse_datetime
from . import _handle_exception, EvoDevice
from .const import DOMAIN, EVO_STRFTIME, EVO_FOLLOW, EVO_TEMPOVER, EVO_PERMOVER
_LOGGER = logging.getLogger(__name__)
HA_STATE_TO_EVO = {STATE_ON: 'On', STATE_OFF: 'Off'}
EVO_STATE_TO_HA = {v: k for k, v in HA_STATE_TO_EVO.items()}
HA_OPMODE_TO_DHW = {STATE_ON: EVO_FOLLOW, STATE_OFF: EVO_PERMOVER}
def setup_platform(hass, hass_config, add_entities,
discovery_info=None) -> None:
"""Create the DHW controller."""
broker = hass.data[DOMAIN]['broker']
_LOGGER.debug(
"Found %s, id: %s",
broker.tcs.hotwater.zone_type, broker.tcs.hotwater.zoneId)
evo_dhw = EvoDHW(broker, broker.tcs.hotwater)
add_entities([evo_dhw], update_before_add=True)
class EvoDHW(EvoDevice, WaterHeaterDevice):
"""Base for a Honeywell evohome DHW controller (aka boiler)."""
def __init__(self, evo_broker, evo_device) -> None:
"""Initialize the evohome DHW controller."""
super().__init__(evo_broker, evo_device)
self._name = 'DHW controller'
self._icon = 'mdi:thermometer-lines'
self._precision = PRECISION_WHOLE
self._state_attributes = [
'dhwId', 'activeFaults', 'stateStatus', 'temperatureStatus',
'setpoints']
self._supported_features = SUPPORT_OPERATION_MODE
self._operation_list = list(HA_OPMODE_TO_DHW)
@property
def current_operation(self) -> str:
"""Return the current operating mode (On, or Off)."""
return EVO_STATE_TO_HA[self._evo_device.stateStatus['state']]
@property
def operation_list(self) -> List[str]:
"""Return the list of available operations."""
return self._operation_list
@property
def current_temperature(self) -> float:
"""Return the current temperature."""
return self._evo_device.temperatureStatus['temperature']
def set_operation_mode(self, operation_mode: str) -> None:
"""Set new operation mode for a DHW controller."""
op_mode = HA_OPMODE_TO_DHW[operation_mode]
state = '' if op_mode == EVO_FOLLOW else HA_STATE_TO_EVO[STATE_OFF]
until = None # EVO_FOLLOW, EVO_PERMOVER
if op_mode == EVO_TEMPOVER:
self._setpoints = self.get_setpoints()
if self._setpoints:
until = parse_datetime(self._setpoints['next']['from'])
until = until.strftime(EVO_STRFTIME)
data = {'Mode': op_mode, 'State': state, 'UntilTime': until}
try:
self._evo_device._set_dhw(data) # pylint: disable=protected-access
except (requests.exceptions.RequestException,
evohomeclient2.AuthenticationError) as err:
_handle_exception(err)
| 34.472527
| 79
| 0.683456
|
9b57a65c36677d25905724d3e79650ca56c1c076
| 374
|
py
|
Python
|
dirmon/setup.py
|
ytreister/stoq-plugins-public
|
1325ba0f61a313a1b9dbb71f2843ee698fb7ab9d
|
[
"Apache-2.0"
] | 72
|
2015-11-20T12:52:10.000Z
|
2021-06-08T16:57:53.000Z
|
dirmon/setup.py
|
ytreister/stoq-plugins-public
|
1325ba0f61a313a1b9dbb71f2843ee698fb7ab9d
|
[
"Apache-2.0"
] | 87
|
2016-08-25T15:09:59.000Z
|
2022-02-07T14:34:45.000Z
|
dirmon/setup.py
|
ytreister/stoq-plugins-public
|
1325ba0f61a313a1b9dbb71f2843ee698fb7ab9d
|
[
"Apache-2.0"
] | 37
|
2015-11-20T12:51:59.000Z
|
2021-12-13T18:13:32.000Z
|
from setuptools import setup, find_packages
setup(
name="dirmon",
version="3.0.0",
author="Marcus LaFerrera (@mlaferrera)",
url="https://github.com/PUNCH-Cyber/stoq-plugins-public",
license="Apache License 2.0",
description="Monitor a directory for newly created files for processing",
packages=find_packages(),
include_package_data=True,
)
| 28.769231
| 77
| 0.71123
|
f009d2a98c3be5c7a2718d4552d8c8b8976b1af4
| 4,548
|
py
|
Python
|
benchmarks/Evolution/both/evo_json/convert_py_json/convert_trait.py
|
nuprl/retic_performance
|
621211c2f40251ce5364c33e72e4067e34a32013
|
[
"MIT"
] | 3
|
2018-08-03T02:41:29.000Z
|
2021-03-19T03:18:47.000Z
|
benchmarks/Evolution/both/evo_json/convert_py_json/convert_trait.py
|
nuprl/retic_performance
|
621211c2f40251ce5364c33e72e4067e34a32013
|
[
"MIT"
] | 3
|
2018-02-04T17:53:56.000Z
|
2018-11-10T17:06:57.000Z
|
benchmarks/Evolution/both/evo_json/convert_py_json/convert_trait.py
|
nuprl/retic_performance
|
621211c2f40251ce5364c33e72e4067e34a32013
|
[
"MIT"
] | 1
|
2018-08-04T00:14:12.000Z
|
2018-08-04T00:14:12.000Z
|
from evo_json.data_def import *
from evo_json.constants import *
from evolution.data_defs import is_list_of_valid_elem, is_list_of
from evolution.constants import CARNIVORE_MIN_FOOD_TOKENS, \
CARNIVORE_MAX_FOOD_TOKENS
__author__ = 'Edwin Cowart, Kevin McDonough'
"""----------- PyJSON Trait <-> TraitCard -----------"""
def is_pj_trait(value):
""" Is the given value a PJ_Trait?
:param value: The value being checked
:type value: Any
:return: True if value is a PJ_TRAIT, False otherwise
:rtype: Boolean
"""
return isinstance(value, str) and (value in trait_dictionary.keys())
def convert_from_py_trait(pj_trait):
""" Convert the given PyJSON Trait to a TraitCard
:param pj_trait: The PyJSON Trait being converted
:type pj_trait: PJ_Trait
:return: The resulting TraitCard
:rtype: TraitCard
"""
if not is_pj_trait(pj_trait):
raise ValueError(
"convert_from_py_trait: Invalid PyJSON Trait: " + repr(pj_trait))
return trait_dictionary[pj_trait]()
def convert_to_py_trait(trait_card):
""" Convert the given TraitCard to a PyJSON Trait
:param trait_card: The TraitCard being converted
:type trait_card: TraitCard
:return: The resulting PyJSON Trait
:rtype: PJ_Trait
"""
if not isinstance(trait_card, TraitCard):
raise ValueError("convert_to_py_trait: Invalid TraitCard")
for key, trait_card_subtype in trait_dictionary.items():
if isinstance(trait_card, trait_card_subtype):
return key
# Should not be Reached
raise ValueError(
"convert_to_py_trait: Invalid TraitCard, must be subtype which is one of:" +
"\"" + "\", \"".join(trait_dictionary.values()) + "\"")
def convert_from_pj_card(py_json):
"""
Convertes the given PJ Card to a TraitCard
:param py_json: The Card to be converted
:type py_json: PyJSON
:return: The equivalent TraitCard
:rtype: TraitCard
"""
if not isinstance(py_json, list) or len(py_json) != 2:
raise ValueError("Invalid food card.")
if not (CARNIVORE_MIN_FOOD_TOKENS <=
py_json[0] <= CARNIVORE_MAX_FOOD_TOKENS):
raise ValueError("Food value out of range.")
base_card = convert_from_py_trait(py_json[1])
base_card.num_tokens_as_food_card = py_json[0]
return base_card
def convert_to_pj_card(trait_card):
"""
Convert the given TraitCard to a PyJSON Card
:param trait_card: The TraitCard to be converted
:type trait_card: TraitCard
:return: Card
:rtype: PyJSON
"""
return [trait_card.num_tokens_as_food_card, convert_to_py_trait(trait_card)]
"""----------- PyJSON LOT <-> List[TraitCard] -----------"""
"""
PJ_LOT = List[PJ_Trait]
"""
def is_pj_lot(value):
""" Is the given value a Py_LOT
:param value: The value being checked
:type value: Any
:return: True if the value is a Py_LOT, False otherwise
:rtype: Boolean
"""
return isinstance(value, list)
def convert_from_pj_lot(py_lot):
""" Convert the given PyJSON LOT to a List of TraitCard
:param py_lot: The PyJSON LOT being converted
:type py_lot: PJ_LOT
:return: The resulting List of TraitCard
:rtype: [TraitCard, ...]
"""
if not is_pj_lot(py_lot):
raise ValueError("convert_from_lot: Invalid PyJSON LOT")
return [convert_from_py_trait(py_trait) for py_trait in py_lot]
def convert_to_pj_lot(trait_card_list):
""" Convert the given List of TraitCard to a
:param trait_card_list: The List of TraitCard being converted
:type trait_card_list: [TraitCard, ...]
:return: The resulting LOT
:rtype: PJ_LOT
"""
if not is_list_of(trait_card_list, TraitCard):
raise ValueError("convert_to_py_lot: Invalid List[TraitCard]")
return [convert_to_py_trait(trait_card) for trait_card in trait_card_list]
def convert_from_pj_loc(pj_loc):
"""
Convert the given LOC to a list of TraitCards
:param pj_loc: List of Cards to convert to TraitCards
:type pj_loc: PyJSON
:return: the list of trait cards
:rtype: [TraitCard, ...]
"""
if not isinstance(pj_loc, list):
raise ValueError("List of cards must be a list")
return [convert_from_pj_card(card) for card in pj_loc]
def convert_to_pj_loc(lotc):
"""
Convert the given list of TraitCards to an LOC
:param lotc: The TraitCards to be converted
:type lotc: [TraitCard, ...]
:return: The LOC
:rtype: PyJSON
"""
return [convert_to_pj_card(tc) for tc in lotc]
| 29.72549
| 84
| 0.684257
|
0d2376884ec0b7ce93579203037e71c319d40689
| 993
|
py
|
Python
|
v2.5.7/toontown/pets/PetManager.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-01T15:46:43.000Z
|
2021-07-23T16:26:48.000Z
|
v1.0.0.test/toontown/pets/PetManager.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 1
|
2019-06-29T03:40:05.000Z
|
2021-06-13T01:15:16.000Z
|
v1.0.0.test/toontown/pets/PetManager.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-28T21:18:46.000Z
|
2021-02-25T06:37:25.000Z
|
from panda3d.core import *
from toontown.toonbase import ToontownGlobals
from direct.task import Task
def acquirePetManager():
if not hasattr(base, 'petManager'):
PetManager()
base.petManager.incRefCount()
def releasePetManager():
base.petManager.decRefCount()
class PetManager:
CollTaskName = 'petFloorCollisions'
def __init__(self):
base.petManager = self
self.refCount = 0
self.cTrav = CollisionTraverser('petFloorCollisions')
taskMgr.add(self._doCollisions, PetManager.CollTaskName, priority=ToontownGlobals.PetFloorCollPriority)
def _destroy(self):
taskMgr.remove(PetManager.CollTaskName)
del self.cTrav
def _doCollisions(self, task):
self.cTrav.traverse(render)
return Task.cont
def incRefCount(self):
self.refCount += 1
def decRefCount(self):
self.refCount -= 1
if self.refCount == 0:
self._destroy()
del base.petManager
| 25.461538
| 111
| 0.673716
|
7ade9cd63dfc275197fcba50fb864a9154cd20b6
| 410
|
py
|
Python
|
apps/core/migrations/0003_longer_service_name.py
|
sparcs-kaist/sparcssso
|
9aeedc02652dadacb44c6a4ba06901f6d2372223
|
[
"MIT"
] | 18
|
2015-07-06T06:20:14.000Z
|
2022-03-20T23:45:40.000Z
|
apps/core/migrations/0003_longer_service_name.py
|
sparcs-kaist/sparcssso
|
9aeedc02652dadacb44c6a4ba06901f6d2372223
|
[
"MIT"
] | 170
|
2015-07-07T08:42:03.000Z
|
2022-03-24T17:31:17.000Z
|
apps/core/migrations/0003_longer_service_name.py
|
sparcs-kaist/sparcssso
|
9aeedc02652dadacb44c6a4ba06901f6d2372223
|
[
"MIT"
] | 11
|
2015-07-07T20:42:19.000Z
|
2022-01-12T22:39:59.000Z
|
# Generated by Django 2.1.8 on 2021-03-07 14:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0002_emaildomain'),
]
operations = [
migrations.AlterField(
model_name='service',
name='name',
field=models.CharField(max_length=40, primary_key=True, serialize=False),
),
]
| 21.578947
| 85
| 0.607317
|
5c8bc7233d38d4519b8bea2f6b1a7f8902e217ec
| 8,773
|
py
|
Python
|
snake.py
|
LemonPi314/terminal-snake
|
ee7d875bf3f2e97b685cc0bf21340b2109b39162
|
[
"MIT"
] | null | null | null |
snake.py
|
LemonPi314/terminal-snake
|
ee7d875bf3f2e97b685cc0bf21340b2109b39162
|
[
"MIT"
] | null | null | null |
snake.py
|
LemonPi314/terminal-snake
|
ee7d875bf3f2e97b685cc0bf21340b2109b39162
|
[
"MIT"
] | null | null | null |
import time
import json
from random import choice
from dataclasses import dataclass
from pyco import terminal, cursor
from pyco.color import Fore, Back, RESET
from pyco.utils import getch, kbhit
ESC = '\x1b'
@dataclass
class XYPair:
x: int
y: int
def copy(self):
obj = type(self).__new__(self.__class__)
obj.__dict__.update(self.__dict__)
return obj
def __iter__(self):
for attr in [attr for attr in dir(self) if not callable(getattr(self, attr)) and not attr.startswith("__")]:
yield getattr(self, attr)
class Point(XYPair):
pass
class Dimensions(XYPair):
pass
class Field:
def __init__(self, game, config: dict):
self.game = game
self.config = config
self.snake = Snake(game, self, config)
self.scaling = self.config['field']['scaling']
self.size = Dimensions(self.config['field']['width'], self.config['field']['height'])
self.real_size = Dimensions(self.size.x * self.scaling * 2, self.size.y * self.scaling)
self.enable_border = self.config['field']['enable_border']
self.snake_coords: list[Point] = []
self.field = [[{} for y in range(self.size.y)] for x in range(self.size.x)]
self.create_food()
self.clear()
print(f'{ESC}[8;{self.real_size.y};{self.real_size.x}t', end='')
terminal.set_window_title("Terminal Snake Game")
terminal.clear_screen()
cursor.hide()
def clear(self):
for y in range(self.size.y):
for x in range(self.size.x):
if x == self.food_pos.x and y == self.food_pos.y:
continue
else:
self.field[x][y] = self.config['field']['empty']
if self.enable_border:
if (x == 0 and y == 0) or (x == 0 and y == self.size.y - 1) or (x == self.size.x - 1 and y == 0) or (x == self.size.x - 1 and y == self.size.y - 1):
self.field[x][y] = self.config['field']['border']['corner']
elif x == 0 or x == self.size.x - 1:
self.field[x][y] = self.config['field']['border']['vertical']
elif y == 0 or y == self.size.y - 1:
self.field[x][y] = self.config['field']['border']['horizontal']
def update(self):
self.clear()
for x, y in self.snake_coords:
self.field[x][y] = self.config['snake']['body']
head = self.snake_coords[-1]
self.field[head.x][head.y] = self.config['snake']['head']
tail = self.snake_coords[0]
self.field[tail.x][tail.y] = self.config['snake']['tail']
for y in range(self.size.y):
row = ''
for x in range(self.size.x):
row += (getattr(Fore, self.field[x][y]['fore_color']) + getattr(Back, self.field[x][y]['back_color']) + self.field[x][y]['char'] + RESET) * 2 * self.scaling
for i in range(1, self.scaling + 1):
self.game.add_string((0, (y * self.scaling) + i), row)
terminal.set_window_title(f"Terminal Snake Game - Score: {self.snake.length}")
def create_food(self):
xr = list(range(self.size.x))
yr = list(range(self.size.y))
while xr and yr:
point = Point(choice(xr), choice(yr))
if point not in self.snake_coords:
self.food_pos = point
self.field[self.food_pos.x][self.food_pos.y] = self.config['field']['food']
return
xr.remove(point.x)
yr.remove(point.y)
class Snake:
def __init__(self, game, field: Field, config: dict):
self.game = game
self.field = field
self.config = config['snake']
self.pos = Point(self.config['pos']['x'], self.config['pos']['y'])
self.length = self.config['length']
self.direction = self.config['direction'].upper()
self.speed = 1 / self.config['speed']
self.is_alive = True
self.last_move_time = -self.speed
self.last_direction_time = 0
if self.direction == 'R' or self.direction == 'L':
self.coords = [Point(self.pos.x + i if self.direction == 'R' else -i, self.pos.y) for i in range(self.length)]
elif self.direction == 'U' or self.direction == 'D':
self.coords = [Point(self.pos.x, self.pos.y + i if self.direction == 'D' else -i) for i in range(self.length)]
def change_direction(self, direction: str):
if direction == 'R' and self.direction == 'L': return
if direction == 'L' and self.direction == 'R': return
if direction == 'U' and self.direction == 'D': return
if direction == 'D' and self.direction == 'U': return
if self.last_direction_time > self.last_move_time: return
self.last_direction_time = time.perf_counter()
self.direction = direction
def loop(self, point: Point):
looped = True
if point.x > self.field.size.x - 1:
point.x = 0
elif point.x < 0:
point.x = self.field.size.x - 1
elif point.y > self.field.size.y - 1:
point.y = 0
elif point.y < 0:
point.y = self.field.size.y - 1
else:
looped = False
if looped and self.config['walls_kill']:
self.is_alive = False
return point
def grow(self):
self.length += 1
a = self.coords[0]
b = self.coords[1]
tail = a.copy()
if a.x < b.x:
tail.x -= 1
elif a.y < b.y:
tail.y -= 1
elif a.x > b.x:
tail.x += 1
elif a.y > b.y:
tail.y += 1
tail = self.loop(tail)
self.coords.insert(0, tail)
def move(self):
if time.perf_counter() - self.last_move_time >= self.speed:
head = self.coords[-1].copy()
if self.direction == 'R':
head.x += 1
elif self.direction == 'L':
head.x -= 1
elif self.direction == 'U':
head.y -= 1
elif self.direction == 'D':
head.y += 1
body = self.coords[:-1]
self.is_alive = head not in body
self.coords.append(self.loop(head))
self.coords.pop(0)
self.field.snake_coords = self.coords
if not self.is_alive:
self.game.end(Fore.BRIGHT_RED + "GAME OVER", Fore.WHITE + "PRESS 'ESC' TO EXIT OR ANY OTHER KEY TO PLAY AGAIN")
if self.field.food_pos in self.coords:
self.grow()
self.field.create_food()
if self.length >= self.field.size.x * self.field.size.y:
self.game.end(Fore.BRIGHT_GREEN + "YOU WIN", Fore.WHITE + "PRESS 'ESC' TO EXIT OR ANY OTHER KEY TO PLAY AGAIN")
self.last_move_time = time.perf_counter()
class Game:
def __init__(self, config_path: str = 'config.json'):
with open(config_path, 'r') as config_file:
self.config = json.load(config_file)
self.keybinds = self.config['keybinds']
self.running = True
def play(self):
self.running = True
while self.running:
self.field = Field(self, self.config)
while self.field.snake.is_alive and self.running:
key = self.get_key()
if key is not None:
key = key.upper()
# key = self.keybinds.get(key, key)
if key in self.keybinds.values():
if key == self.keybinds.get('exit'):
self.exit()
elif key == self.keybinds.get('pause'):
getch()
else:
self.field.snake.change_direction(key)
self.field.snake.move()
self.field.update()
def end(self, *messages: str):
# terminal.bell()
for i, message in enumerate(messages):
self.add_string(((self.field.real_size.x - len(message)) // 2, ((self.field.real_size.y - len(messages)) // 2) + i), message + RESET)
key = getch().upper()
if key == '\x1b':
self.exit()
else:
self.play()
def exit(self):
terminal.clear_screen()
self.running = False
def add_string(self, pos: tuple[int], string: str):
cursor.set_position(*pos)
print(string, end='')
def get_key(self) -> str:
if kbhit():
key = getch()
if key in ['\000', '\x00', '\xe0']:
key = getch()
return key
if __name__ == '__main__':
game = Game()
game.play()
| 37.173729
| 172
| 0.531175
|
410a610700d1d69201cead69cfc85774d2359403
| 8,386
|
py
|
Python
|
fcos_core/modeling/rpn/rpn.py
|
Hwang64/CrabNet
|
bf414e2d51f433ab3560a973feff104907779d21
|
[
"BSD-2-Clause"
] | 2
|
2021-03-17T01:52:44.000Z
|
2021-03-18T02:41:32.000Z
|
fcos_core/modeling/rpn/rpn.py
|
Hwang64/CrabNet
|
bf414e2d51f433ab3560a973feff104907779d21
|
[
"BSD-2-Clause"
] | null | null | null |
fcos_core/modeling/rpn/rpn.py
|
Hwang64/CrabNet
|
bf414e2d51f433ab3560a973feff104907779d21
|
[
"BSD-2-Clause"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
import torch.nn.functional as F
from torch import nn
from fcos_core.modeling import registry
from fcos_core.modeling.box_coder import BoxCoder
from fcos_core.modeling.rpn.retinanet.retinanet import build_retinanet
from fcos_core.modeling.rpn.fcos.fcos import build_fcos
from fcos_core.modeling.rpn.atss.atss import build_atss
from fcos_core.modeling.rpn.retinanet.retinanet_reg import build_retinanet_reg
from fcos_core.modeling.rpn.fcos.fcos_reg import build_fcos_reg
from fcos_core.modeling.rpn.atss.atss_reg import build_atss_reg
from .loss import make_rpn_loss_evaluator
from .anchor_generator import make_anchor_generator
from .inference import make_rpn_postprocessor
class RPNHeadConvRegressor(nn.Module):
"""
A simple RPN Head for classification and bbox regression
"""
def __init__(self, cfg, in_channels, num_anchors):
"""
Arguments:
cfg : config
in_channels (int): number of channels of the input feature
num_anchors (int): number of anchors to be predicted
"""
super(RPNHeadConvRegressor, self).__init__()
self.cls_logits = nn.Conv2d(in_channels, num_anchors, kernel_size=1, stride=1)
self.bbox_pred = nn.Conv2d(
in_channels, num_anchors * 4, kernel_size=1, stride=1
)
for l in [self.cls_logits, self.bbox_pred]:
torch.nn.init.normal_(l.weight, std=0.01)
torch.nn.init.constant_(l.bias, 0)
def forward(self, x):
assert isinstance(x, (list, tuple))
logits = [self.cls_logits(y) for y in x]
bbox_reg = [self.bbox_pred(y) for y in x]
return logits, bbox_reg
class RPNHeadFeatureSingleConv(nn.Module):
"""
Adds a simple RPN Head with one conv to extract the feature
"""
def __init__(self, cfg, in_channels):
"""
Arguments:
cfg : config
in_channels (int): number of channels of the input feature
"""
super(RPNHeadFeatureSingleConv, self).__init__()
self.conv = nn.Conv2d(
in_channels, in_channels, kernel_size=3, stride=1, padding=1
)
for l in [self.conv]:
torch.nn.init.normal_(l.weight, std=0.01)
torch.nn.init.constant_(l.bias, 0)
self.out_channels = in_channels
def forward(self, x):
assert isinstance(x, (list, tuple))
x = [F.relu(self.conv(z)) for z in x]
return x
@registry.RPN_HEADS.register("SingleConvRPNHead")
class RPNHead(nn.Module):
"""
Adds a simple RPN Head with classification and regression heads
"""
def __init__(self, cfg, in_channels, num_anchors):
"""
Arguments:
cfg : config
in_channels (int): number of channels of the input feature
num_anchors (int): number of anchors to be predicted
"""
super(RPNHead, self).__init__()
self.conv = nn.Conv2d(
in_channels, in_channels, kernel_size=3, stride=1, padding=1
)
self.cls_logits = nn.Conv2d(in_channels, num_anchors, kernel_size=1, stride=1)
self.bbox_pred = nn.Conv2d(
in_channels, num_anchors * 4, kernel_size=1, stride=1
)
for l in [self.conv, self.cls_logits, self.bbox_pred]:
torch.nn.init.normal_(l.weight, std=0.01)
torch.nn.init.constant_(l.bias, 0)
def forward(self, x):
logits = []
bbox_reg = []
for feature in x:
t = F.relu(self.conv(feature))
logits.append(self.cls_logits(t))
bbox_reg.append(self.bbox_pred(t))
return logits, bbox_reg
class RPNModule(torch.nn.Module):
"""
Module for RPN computation. Takes feature maps from the backbone and RPN
proposals and losses. Works for both FPN and non-FPN.
"""
def __init__(self, cfg, in_channels):
super(RPNModule, self).__init__()
self.cfg = cfg.clone()
anchor_generator = make_anchor_generator(cfg)
rpn_head = registry.RPN_HEADS[cfg.MODEL.RPN.RPN_HEAD]
head = rpn_head(
cfg, in_channels, anchor_generator.num_anchors_per_location()[0]
)
rpn_box_coder = BoxCoder(weights=(1.0, 1.0, 1.0, 1.0))
box_selector_train = make_rpn_postprocessor(cfg, rpn_box_coder, is_train=True)
box_selector_test = make_rpn_postprocessor(cfg, rpn_box_coder, is_train=False)
loss_evaluator = make_rpn_loss_evaluator(cfg, rpn_box_coder)
self.anchor_generator = anchor_generator
self.head = head
self.box_selector_train = box_selector_train
self.box_selector_test = box_selector_test
self.loss_evaluator = loss_evaluator
def forward(self, images, features, targets=None):
"""
Arguments:
images (ImageList): images for which we want to compute the predictions
features (list[Tensor]): features computed from the images that are
used for computing the predictions. Each tensor in the list
correspond to different feature levels
targets (list[BoxList): ground-truth boxes present in the image (optional)
Returns:
boxes (list[BoxList]): the predicted boxes from the RPN, one BoxList per
image.
losses (dict[Tensor]): the losses for the model during training. During
testing, it is an empty dict.
"""
objectness, rpn_box_regression = self.head(features)
anchors = self.anchor_generator(images, features)
if self.training:
return self._forward_train(anchors, objectness, rpn_box_regression, targets)
else:
return self._forward_test(anchors, objectness, rpn_box_regression)
def _forward_train(self, anchors, objectness, rpn_box_regression, targets):
if self.cfg.MODEL.RPN_ONLY:
# When training an RPN-only model, the loss is determined by the
# predicted objectness and rpn_box_regression values and there is
# no need to transform the anchors into predicted boxes; this is an
# optimization that avoids the unnecessary transformation.
boxes = anchors
else:
# For end-to-end models, anchors must be transformed into boxes and
# sampled into a training batch.
with torch.no_grad():
boxes = self.box_selector_train(
anchors, objectness, rpn_box_regression, targets
)
loss_objectness, loss_rpn_box_reg = self.loss_evaluator(
anchors, objectness, rpn_box_regression, targets
)
losses = {
"loss_objectness": loss_objectness,
"loss_rpn_box_reg": loss_rpn_box_reg,
}
return boxes, losses
def _forward_test(self, anchors, objectness, rpn_box_regression):
boxes = self.box_selector_test(anchors, objectness, rpn_box_regression)
if self.cfg.MODEL.RPN_ONLY:
# For end-to-end models, the RPN proposals are an intermediate state
# and don't bother to sort them in decreasing score order. For RPN-only
# models, the proposals are the final output and we return them in
# high-to-low confidence order.
inds = [
box.get_field("objectness").sort(descending=True)[1] for box in boxes
]
boxes = [box[ind] for box, ind in zip(boxes, inds)]
return boxes, {}
def build_rpn(cfg, in_channels, extra=False):
"""
This gives the gist of it. Not super important because it doesn't change as much
"""
if cfg.MODEL.ATSS_ON:
if extra == False: return build_atss(cfg, in_channels)
if extra == True: return build_atss(cfg, in_channels), build_atss_reg(cfg, in_channels)
if cfg.MODEL.FCOS_ON:
if extra == False: return build_fcos(cfg, in_channels)
if extra == True: return build_fcos(cfg, in_channels), build_fcos_reg(cfg, in_channels)
#if extra == True: return build_fcos(cfg, in_channels), build_retinanet_reg(cfg, in_channels)
if cfg.MODEL.RETINANET_ON:
return build_retinanet(cfg, in_channels)
return RPNModule(cfg, in_channels)
| 38.118182
| 101
| 0.646196
|
393063e704a04acdf7cc87444883a69d9559bae2
| 14,052
|
py
|
Python
|
test/python/WMCore_t/Services_t/DBS_t/DBSReader_t.py
|
haozturk/WMCore
|
b34e8c89b4a30de78b7d0cf4730d4c70af5830b4
|
[
"Apache-2.0"
] | null | null | null |
test/python/WMCore_t/Services_t/DBS_t/DBSReader_t.py
|
haozturk/WMCore
|
b34e8c89b4a30de78b7d0cf4730d4c70af5830b4
|
[
"Apache-2.0"
] | null | null | null |
test/python/WMCore_t/Services_t/DBS_t/DBSReader_t.py
|
haozturk/WMCore
|
b34e8c89b4a30de78b7d0cf4730d4c70af5830b4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
_DBSReader_t_
Unit test for the DBS helper class.
"""
import unittest
from WMCore.Services.DBS.DBS3Reader import getDataTiers, DBS3Reader as DBSReader
from WMCore.Services.DBS.DBSErrors import DBSReaderError
from WMQuality.Emulators.EmulatedUnitTestCase import EmulatedUnitTestCase
# A small dataset that should always exist
DATASET = '/HighPileUp/Run2011A-v1/RAW'
BLOCK = '/HighPileUp/Run2011A-v1/RAW#fabf118a-cbbf-11e0-80a9-003048caaace'
FILE = '/store/data/Run2011A/HighPileUp/RAW/v1/000/173/657/B293AF24-BFCB-E011-8F85-BCAEC5329701.root'
# A RECO dataset that has parents (also small)
DATASET_WITH_PARENTS = '/Cosmics/ComissioningHI-PromptReco-v1/RECO'
BLOCK_WITH_PARENTS = DATASET_WITH_PARENTS + '#7020873e-0dcd-11e1-9b6c-003048caaace'
PARENT_DATASET = '/Cosmics/ComissioningHI-v1/RAW'
PARENT_BLOCK = PARENT_DATASET + '#929366bc-0c31-11e1-b764-003048caaace'
PARENT_FILE = '/store/data/ComissioningHI/Cosmics/RAW/v1/000/181/369/662EAD44-300C-E111-A709-BCAEC518FF62.root'
class DBSReaderTest(EmulatedUnitTestCase):
def setUp(self):
"""
_setUp_
Initialize the PhEDEx API to point at the test server.
"""
self.endpoint = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader'
self.dbs = None
super(DBSReaderTest, self).setUp()
return
def tearDown(self):
"""
_tearDown_
"""
super(DBSReaderTest, self).tearDown()
return
def testListDatatiers(self):
"""
listDatatiers returns all datatiers available
"""
self.dbs = DBSReader(self.endpoint)
results = self.dbs.listDatatiers()
self.assertTrue('RAW' in results)
self.assertTrue('GEN-SIM-RECO' in results)
self.assertTrue('GEN-SIM' in results)
self.assertFalse('RAW-ALAN' in results)
return
def testGetDataTiers(self):
"""
Test the getDataTiers function
"""
results = getDataTiers(self.endpoint)
self.assertTrue('RAW' in results)
self.assertTrue('GEN-SIM-RECO' in results)
self.assertTrue('GEN-SIM' in results)
self.assertFalse('RAW-ALAN' in results)
# dbsUrl is mandatory
with self.assertRaises(TypeError):
_ = getDataTiers()
return
def testListPrimaryDatasets(self):
"""
listPrimaryDatasets returns known primary datasets
"""
self.dbs = DBSReader(self.endpoint)
results = self.dbs.listPrimaryDatasets('Jet*')
self.assertTrue('Jet' in results)
self.assertTrue('JetMET' in results)
self.assertTrue('JetMETTau' in results)
self.assertFalse(self.dbs.listPrimaryDatasets('DoesntExist'))
return
def testMatchProcessedDatasets(self):
"""
matchProcessedDatasets returns known processed datasets
"""
self.dbs = DBSReader(self.endpoint)
dataset = self.dbs.matchProcessedDatasets('Jet', 'RAW', 'Run2011A-v1')
self.assertEqual(1, len(dataset))
self.assertEqual(['/Jet/Run2011A-v1/RAW'], dataset[0]['PathList'])
self.assertEqual('Run2011A-v1', dataset[0]['Name'])
self.assertFalse(self.dbs.matchProcessedDatasets('Jet', 'RAW', 'Run2011A-v666'))
def testlistRuns(self):
"""listRuns returns known runs"""
self.dbs = DBSReader(self.endpoint)
runs = self.dbs.listRuns(dataset=DATASET)
self.assertEqual(46, len(runs))
self.assertTrue(174074 in runs)
runs = self.dbs.listRuns(block=BLOCK)
self.assertEqual(1, len(runs))
self.assertEqual([173657], runs)
def testlistRunLumis(self):
"""listRunLumis returns known runs and lumicounts (None for DBS3)"""
self.dbs = DBSReader(self.endpoint)
runs = self.dbs.listRunLumis(dataset=DATASET)
self.assertEqual(46, len(runs))
self.assertTrue(173692 in runs)
self.assertEqual(runs[173692], None)
runs = self.dbs.listRunLumis(block=BLOCK)
self.assertEqual(1, len(runs))
self.assertTrue(173657 in runs)
self.assertEqual(runs[173657], None)
def testListProcessedDatasets(self):
"""listProcessedDatasets returns known processed datasets"""
self.dbs = DBSReader(self.endpoint)
datasets = self.dbs.listProcessedDatasets('Jet', 'RAW')
self.assertTrue('Run2011A-v1' in datasets)
self.assertTrue('Run2011B-v1' in datasets)
self.assertFalse(self.dbs.listProcessedDatasets('Jet', 'blah'))
self.assertFalse(self.dbs.listProcessedDatasets('blah', 'RAW'))
def testlistDatasetFiles(self):
"""listDatasetFiles returns files in dataset"""
self.dbs = DBSReader(self.endpoint)
files = self.dbs.listDatasetFiles(DATASET)
self.assertEqual(49, len(files))
self.assertTrue(FILE in files)
def testlistDatasetFileDetails(self):
"""testlistDatasetFilesDetails returns lumis, events, and parents of a dataset"""
TESTFILE = '/store/data/Run2011A/HighPileUp/RAW/v1/000/173/658/56484BAB-CBCB-E011-AF00-BCAEC518FF56.root'
self.dbs = DBSReader(self.endpoint)
details = self.dbs.listDatasetFileDetails(DATASET)
self.assertEqual(len(details), 49)
self.assertTrue(TESTFILE in details)
self.assertEqual(details[TESTFILE]['NumberOfEvents'], 545)
self.assertEqual(details[TESTFILE]['file_size'], 286021145)
self.assertEqual(details[TESTFILE]['BlockName'],
'/HighPileUp/Run2011A-v1/RAW#dd6e0796-cbcc-11e0-80a9-003048caaace')
self.assertEqual(details[TESTFILE]['Md5'], 'NOTSET')
self.assertEqual(details[TESTFILE]['md5'], 'NOTSET')
self.assertEqual(details[TESTFILE]['Adler32'], 'a41a1446')
self.assertEqual(details[TESTFILE]['adler32'], 'a41a1446')
self.assertEqual(details[TESTFILE]['Checksum'], '22218315')
self.assertEqual(details[TESTFILE]['check_sum'], '22218315')
self.assertTrue(173658 in details[TESTFILE]['Lumis'])
self.assertEqual(sorted(details[TESTFILE]['Lumis'][173658]),
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
50,
51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
74,
75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
98,
99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111])
def testGetDBSSummaryInfo(self):
"""getDBSSummaryInfo returns summary of dataset and block"""
self.dbs = DBSReader(self.endpoint)
dataset = self.dbs.getDBSSummaryInfo(DATASET)
self.assertEqual(dataset['path'], DATASET)
self.assertEqual(dataset['block'], '')
self.assertEqual(dataset['NumberOfEvents'], 22075)
self.assertEqual(dataset['NumberOfBlocks'], 46)
self.assertEqual(dataset['FileSize'], 4001680824)
self.assertEqual(dataset['file_size'], 4001680824)
self.assertEqual(dataset['NumberOfFiles'], 49)
self.assertEqual(dataset['NumberOfLumis'], 7223)
block = self.dbs.getDBSSummaryInfo(DATASET, BLOCK)
self.assertEqual(block['path'], DATASET)
self.assertEqual(block['block'], BLOCK)
self.assertEqual(block['NumberOfEvents'], 377)
self.assertEqual(block['NumberOfBlocks'], 1)
self.assertEqual(block['FileSize'], 150780132)
self.assertEqual(block['file_size'], 150780132)
self.assertEqual(block['NumberOfFiles'], 2)
self.assertEqual(block['NumberOfLumis'], 94)
with self.assertRaises(DBSReaderError):
self.dbs.getDBSSummaryInfo(DATASET + 'blah')
with self.assertRaises(DBSReaderError):
self.dbs.getDBSSummaryInfo(DATASET, BLOCK + 'asas')
def testListFileBlocks(self):
"""listFileBlocks returns block names in dataset"""
self.dbs = DBSReader(self.endpoint)
blocks = self.dbs.listFileBlocks(DATASET)
self.assertTrue(BLOCK in blocks)
# block is closed
block = self.dbs.listFileBlocks(DATASET, blockName=BLOCK, onlyClosedBlocks=True)[0]
self.assertEqual(block, BLOCK)
self.assertTrue(BLOCK in block)
def testListOpenFileBlocks(self):
"""listOpenFileBlocks finds open blocks"""
# hard to find a dataset with open blocks, so don't bother
self.dbs = DBSReader(self.endpoint)
self.assertFalse(self.dbs.listOpenFileBlocks(DATASET))
def testBlockExists(self):
"""blockExists returns existence of blocks"""
self.dbs = DBSReader(self.endpoint)
self.assertTrue(self.dbs.blockExists(BLOCK))
self.assertRaises(DBSReaderError, self.dbs.blockExists, DATASET + '#somethingelse')
def testListFilesInBlock(self):
"""listFilesInBlock returns files in block"""
self.dbs = DBSReader(self.endpoint)
self.assertTrue(FILE in [x['LogicalFileName'] for x in self.dbs.listFilesInBlock(BLOCK)])
self.assertRaises(DBSReaderError, self.dbs.listFilesInBlock, DATASET + '#blah')
def testListFilesInBlockWithParents(self):
"""listFilesInBlockWithParents gets files with parents for a block"""
self.dbs = DBSReader(self.endpoint)
files = self.dbs.listFilesInBlockWithParents(
'/Cosmics/Commissioning2015-PromptReco-v1/RECO#004ac3ba-d09e-11e4-afad-001e67ac06a0')
self.assertEqual(4, len(files))
self.assertEqual('/Cosmics/Commissioning2015-PromptReco-v1/RECO#004ac3ba-d09e-11e4-afad-001e67ac06a0',
files[0]['block_name'])
self.assertEqual('/Cosmics/Commissioning2015-PromptReco-v1/RECO#004ac3ba-d09e-11e4-afad-001e67ac06a0',
files[0]['BlockName'])
self.assertEqual(
'/store/data/Commissioning2015/Cosmics/RAW/v1/000/238/545/00000/1043E89F-2DCF-E411-9CAE-02163E013751.root',
files[0]['ParentList'][0]['LogicalFileName'])
self.assertRaises(DBSReaderError, self.dbs.listFilesInBlockWithParents, BLOCK + 'asas')
def testLfnsInBlock(self):
"""lfnsInBlock returns lfns in block"""
self.dbs = DBSReader(self.endpoint)
self.assertTrue(FILE in [x['logical_file_name'] for x in self.dbs.lfnsInBlock(BLOCK)])
self.assertRaises(DBSReaderError, self.dbs.lfnsInBlock, BLOCK + 'asas')
def testListFileBlockLocation(self):
"""listFileBlockLocation returns block location"""
WRONG_BLOCK = BLOCK[:-4] + 'abcd'
BLOCK2 = '/HighPileUp/Run2011A-v1/RAW#6021175e-cbfb-11e0-80a9-003048caaace'
DBS_BLOCK = '/GenericTTbar/hernan-140317_231446_crab_JH_ASO_test_T2_ES_CIEMAT_5000_100_140318_0014-' + \
'ea0972193530f531086947d06eb0f121/USER#fb978442-a61b-413a-b4f4-526e6cdb142e'
DBS_BLOCK2 = '/GenericTTbar/hernan-140317_231446_crab_JH_ASO_test_T2_ES_CIEMAT_5000_100_140318_0014-' + \
'ea0972193530f531086947d06eb0f121/USER#0b04d417-d734-4ef2-88b0-392c48254dab'
self.dbs = DBSReader('https://cmsweb.cern.ch/dbs/prod/phys03/DBSReader/')
self.assertEqual(self.dbs.listFileBlockLocation(BLOCK), [])
# This block is only found on DBS
self.assertItemsEqual(self.dbs.listFileBlockLocation(DBS_BLOCK), [u'T2_ES_CIEMAT'])
# doesn't raise on non-existant block
self.assertEqual(self.dbs.listFileBlockLocation(WRONG_BLOCK), [])
# test bulk call:
## two blocks in phedex
self.assertEqual(2, len(self.dbs.listFileBlockLocation([BLOCK, BLOCK2])))
## one block in phedex one does not exist
self.assertEqual(2, len(self.dbs.listFileBlockLocation([BLOCK, WRONG_BLOCK])))
## one in phedex one in dbs
self.assertEqual(2, len(self.dbs.listFileBlockLocation([BLOCK, DBS_BLOCK])))
## two in dbs
self.assertEqual(2, len(self.dbs.listFileBlockLocation([DBS_BLOCK, DBS_BLOCK2])))
## one in DBS and one does not exist
self.assertEqual(2, len(self.dbs.listFileBlockLocation([DBS_BLOCK, WRONG_BLOCK])))
def testGetFileBlock(self):
"""getFileBlock returns block"""
self.dbs = DBSReader(self.endpoint)
block = self.dbs.getFileBlock(BLOCK)
self.assertEqual(len(block), 3)
self.assertEqual(2, len(block['Files']))
self.assertRaises(DBSReaderError, self.dbs.getFileBlock, BLOCK + 'asas')
def testGetFileBlockWithParents(self):
"""getFileBlockWithParents returns block and parents"""
self.dbs = DBSReader(self.endpoint)
block = self.dbs.getFileBlockWithParents(BLOCK_WITH_PARENTS)
self.assertEqual(len(block), 3)
self.assertEqual(PARENT_FILE, block['Files'][0]['ParentList'][0]['LogicalFileName'])
self.assertRaises(DBSReaderError, self.dbs.getFileBlockWithParents, BLOCK + 'asas')
def testListBlockParents(self):
"""listBlockParents returns block parents"""
self.dbs = DBSReader(self.endpoint)
parents = self.dbs.listBlockParents(BLOCK_WITH_PARENTS)
self.assertItemsEqual([PARENT_BLOCK], parents)
self.assertFalse(self.dbs.listBlockParents(PARENT_BLOCK))
def testBlockIsOpen(self):
"""blockIsOpen checks if a block is open"""
self.dbs = DBSReader(self.endpoint)
self.assertFalse(self.dbs.blockIsOpen(BLOCK))
def testBlockToDatasetPath(self):
"""blockToDatasetPath extracts path from block name"""
self.dbs = DBSReader(self.endpoint)
self.assertEqual(self.dbs.blockToDatasetPath(BLOCK), DATASET)
self.assertRaises(DBSReaderError, self.dbs.blockToDatasetPath, BLOCK + 'asas')
if __name__ == '__main__':
unittest.main()
| 45.329032
| 123
| 0.661472
|
44e84d57cb44e3b582fd702ffb12e3ff7bbf30eb
| 3,170
|
py
|
Python
|
asn/neuronModel.py
|
lynnsoerensen/Arousal_DCNN
|
0f4ae01dac06722fa5fecfd43e13b722cdf09199
|
[
"MIT"
] | 2
|
2020-12-30T00:38:14.000Z
|
2021-07-01T18:30:13.000Z
|
asn/neuronModel.py
|
lynnsoerensen/Arousal_DCNN
|
0f4ae01dac06722fa5fecfd43e13b722cdf09199
|
[
"MIT"
] | null | null | null |
asn/neuronModel.py
|
lynnsoerensen/Arousal_DCNN
|
0f4ae01dac06722fa5fecfd43e13b722cdf09199
|
[
"MIT"
] | 1
|
2021-10-03T08:49:35.000Z
|
2021-10-03T08:49:35.000Z
|
import numpy as np
class ASN:
""" Adaptive spiking neuron class, double-checked on Jan 24, 2018 """
def __init__(self, mf=0.1, bias=0):
# Params
# membrane filter
self.tau_phi = 2.5
self.dPhi = np.exp(-1 / self.tau_phi)
# threshold decay filter
self.tau_gamma = 15.0
self.dGamma = np.exp(-1 / self.tau_gamma)
# refractory decay filter
self.tau_eta = 50.0
self.dEta = np.exp(-1 / self.tau_eta)
self.dBeta = self.dEta
self.m_f = mf # **2 would be the old matlab code
self.theta0 = self.m_f # Resting threshold
self.S_bias = bias
self.S = self.S_bias # filtered activation, initialized with bias
self.S_dyn = 0
self.theta = self.theta0 # Start value of threshold
self.theta_dyn = 0 # dynamic part of the threshold
self.S_hat = 0 # refractory response, internal approximation
self.current_next = 0 # incoming current in next neuron
self.S_next = 0 # and filtered by the membrane potential.
self.I = 0
self.spike = 0
def update(self, current, spike_train=True):
"""inject current for one moment at a time"""
# Membrane filter
if spike_train:
self.I = self.I * self.dBeta + current
else:
self.I = current
self.S_dyn = (1 - self.dPhi) * self.I + self.dPhi * self.S_dyn
self.S = self.S_bias + self.S_dyn
# Decay
self.S_hat = self.S_hat * self.dEta
self.current_next = self.current_next * self.dEta
# Spike?
if self.S - self.S_hat > 0.5 * self.theta:
self.spike = 1 # Code spike
# Update refractory response
self.S_hat = self.S_hat + self.theta
# Update threshold
# self.theta_dyn = self.theta_dyn + self.m_f*self.theta/self.theta0 #based on the matlab code
self.theta_dyn = self.theta_dyn + self.m_f * self.theta # adaptive part based on the paper
self.current_next = self.current_next + 1
else:
self.spike = 0
# Decay
self.theta_dyn = self.theta_dyn * self.dGamma
self.theta = self.theta0 + self.theta_dyn
# Signal in next neuron
self.S_next = (1 - self.dPhi) * self.current_next + self.dPhi * self.S_next
def call(self, input, spike_train=True, mf=0.1, bias=0):
timesteps = input.shape[1]
batch_size = input.shape[0]
S = np.zeros(input.shape)
S_next = np.zeros(input.shape)
spikes = np.zeros(input.shape)
S_hat = np.zeros(input.shape)
theta = np.zeros(input.shape)
for b in range(batch_size):
self.__init__(mf=mf, bias=bias)
for t in range(timesteps): # loop over timesteps
self.update(input[b, t, :], spike_train=spike_train)
S[b, t, 0] = self.S
S_next[b, t, 0] = self.S_next
spikes[b, t, 0] = self.spike
S_hat[b, t, 0] = self.S_hat
theta[b, t, 0] = self.theta
return S_next, spikes, S, S_hat, theta
| 35.617978
| 105
| 0.570978
|
aba4244b7274b765aa0af9774b2318ce20e75f22
| 201
|
py
|
Python
|
AtCoder/ABC/B/page-13/088B.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
AtCoder/ABC/B/page-13/088B.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
AtCoder/ABC/B/page-13/088B.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
n = int(input())
lst = list(map(int, input().split()))
lst.sort(reverse=True)
a = 0
b = 0
for i in range(1, n+1):
if i % 2 == 0:
b += lst[i-1]
else:
a += lst[i-1]
print(a-b)
| 13.4
| 37
| 0.477612
|
a6fb653b8551c87c741d8c6d6cac5633b1d54c9a
| 11,644
|
py
|
Python
|
tests/test_cli.py
|
mdengler/pipgrip
|
03f5b9df70d8035b26d35e1dfa5bae6857fc240d
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_cli.py
|
mdengler/pipgrip
|
03f5b9df70d8035b26d35e1dfa5bae6857fc240d
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_cli.py
|
mdengler/pipgrip
|
03f5b9df70d8035b26d35e1dfa5bae6857fc240d
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from click.testing import CliRunner
import pipgrip.pipper
from pipgrip.cli import flatten, main
from pipgrip.pipper import _download_wheel
self_wheel = _download_wheel(".", None, None, None, "./tests/assets")
# fmt: off
def mock_download_wheel(package, *args, **kwargs):
wheelhouse = {
".": self_wheel,
"setuptools>=38.3": "./tests/assets/setuptools-44.0.0-py2.py3-none-any.whl",
"pkginfo>=1.4.2": "./tests/assets/pkginfo-1.5.0.1-py2.py3-none-any.whl",
"packaging>=17": "./tests/assets/packaging-20.0-py2.py3-none-any.whl",
"click": "./tests/assets/Click-7.0-py2.py3-none-any.whl",
"anytree": "./tests/assets/anytree-2.7.3-py2.py3-none-any.whl",
"six": "./tests/assets/six-1.13.0-py2.py3-none-any.whl",
"wheel": "./tests/assets/wheel-0.33.6-py2.py3-none-any.whl",
"pyparsing>=2.0.2": "./tests/assets/pyparsing-2.4.6-py2.py3-none-any.whl",
"requests==2.22.0": "./tests/assets/requests-2.22.0-py2.py3-none-any.whl",
"urllib3<1.25.0|>1.25.0,<1.25.1|>1.25.1,<1.26,>=1.21.1": "./tests/assets/urllib3-1.25-py2.py3-none-any.whl",
"urllib3==1.25.7": "./tests/assets/urllib3-1.25.7-py2.py3-none-any.whl",
"idna<2.9,>=2.5": "./tests/assets/idna-2.8-py2.py3-none-any.whl",
"chardet<3.1.0,>=3.0.2": "./tests/assets/chardet-3.0.4-py2.py3-none-any.whl",
"certifi>=2017.4.17": "./tests/assets/certifi-2019.11.28-py2.py3-none-any.whl",
"keras==2.2.2": "./tests/assets/Keras-2.2.2-py2.py3-none-any.whl",
"six>=1.9.0": "./tests/assets/six-1.13.0-py2.py3-none-any.whl",
"scipy>=0.14": "./tests/assets/scipy-1.2.2-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl",
"pyyaml": "./tests/assets/PyYAML-5.3-cp27-cp27m-macosx_10_14_x86_64.whl",
"numpy>=1.9.1": "./tests/assets/numpy-1.16.6-cp27-cp27m-macosx_10_9_x86_64.whl",
"keras-preprocessing==1.0.2": "./tests/assets/Keras_Preprocessing-1.0.2-py2.py3-none-any.whl",
"keras-preprocessing": "./tests/assets/Keras_Preprocessing-1.1.0-py2.py3-none-any.whl",
"keras-applications==1.0.4": "./tests/assets/Keras_Applications-1.0.4-py2.py3-none-any.whl",
"h5py": "./tests/assets/h5py-2.10.0-cp27-cp27m-macosx_10_6_intel.whl",
"pip>=7.1.0": "./tests/assets/pip-20.0.2-py2.py3-none-any.whl",
}
return wheelhouse[package]
def mock_get_available_versions(package, *args, **kwargs):
versions = {
"setuptools": ["44.0.0"],
"pkginfo": ["1.5.0.1"],
"packaging": ["20.0"],
"click": ["7.0"],
"anytree": ["2.7.3"],
"six": ["1.13.0"],
"wheel": ["0.33.6"],
"pyparsing": ["2.4.6"],
"requests": ["2.22.0"],
"urllib3": ["1.25.7"],
"idna": ["2.8"],
"chardet": ["3.0.4"],
"certifi": ["2019.11.28"],
"keras": ["2.2.2", "2.2.3", "2.2.4", "2.2.5", "2.3.0", "2.3.1"],
"scipy": ["1.2.2"],
"pyyaml": ["5.3"],
"numpy": ["1.16.6"],
"keras-preprocessing": ["1.0.0", "1.0.1", "1.0.2", "1.0.3", "1.0.4", "1.0.5", "1.0.6", "1.0.8", "1.0.9", "1.1.0"],
"keras-applications": ["1.0.0", "1.0.1", "1.0.2", "1.0.4", "1.0.5", "1.0.6", "1.0.7", "1.0.8"],
"h5py": ["2.10.0"],
"pip": ["20.0.2"],
}
return versions[package]
# fmt: on
def invoke_patched(func, arguments, monkeypatch):
def default_environment():
return {
"implementation_name": "cpython",
"implementation_version": "3.7.5",
"os_name": "posix",
"platform_machine": "x86_64",
"platform_release": "5.0.0-1027-azure",
"platform_system": "Linux",
"platform_version": "#29~18.04.1-Ubuntu SMP Mon Nov 25 21:18:57 UTC 2019",
"python_full_version": "3.7.5",
"platform_python_implementation": "CPython",
"python_version": "3.7",
"sys_platform": "linux",
}
monkeypatch.setattr(
pipgrip.pipper, "_download_wheel", mock_download_wheel,
)
monkeypatch.setattr(
pipgrip.pipper, "_get_available_versions", mock_get_available_versions,
)
monkeypatch.setattr(
pipgrip.pipper, "default_environment", default_environment,
)
runner = CliRunner()
return runner.invoke(main, arguments)
@pytest.mark.parametrize(
"arguments, expected",
[
(
["."],
[
".",
"anytree==2.7.3",
"six==1.13.0",
"click==7.0",
"packaging==20.0",
"pyparsing==2.4.6",
"pkginfo==1.5.0.1",
"setuptools==44.0.0",
"wheel==0.33.6",
"pip==20.0.2",
],
),
(
["requests==2.22.0"],
[
"requests==2.22.0",
"chardet==3.0.4",
"idna==2.8",
"urllib3==1.25.7",
"certifi==2019.11.28",
],
),
( # cyclic
["keras==2.2.2"],
[
"keras==2.2.2",
"h5py==2.10.0",
"numpy==1.16.6",
"six==1.13.0",
"keras-applications==1.0.4",
"keras-preprocessing==1.0.2",
"scipy==1.2.2",
"pyyaml==5.3",
],
),
(
["--tree", "keras==2.2.2"],
[ # generated on py2.7 - ipython - %paste a - print a
u"keras==2.2.2 (2.2.2)",
u"\u251c\u2500\u2500 h5py (2.10.0)",
u"\u2502 \u251c\u2500\u2500 numpy>=1.7 (1.16.6)",
u"\u2502 \u2514\u2500\u2500 six (1.13.0)",
u"\u251c\u2500\u2500 keras-applications==1.0.4 (1.0.4)",
u"\u2502 \u251c\u2500\u2500 h5py (2.10.0)",
u"\u2502 \u2502 \u251c\u2500\u2500 numpy>=1.7 (1.16.6)",
u"\u2502 \u2502 \u2514\u2500\u2500 six (1.13.0)",
u"\u2502 \u251c\u2500\u2500 keras>=2.1.6 (2.2.2, cyclic)",
u"\u2502 \u2514\u2500\u2500 numpy>=1.9.1 (1.16.6)",
u"\u251c\u2500\u2500 keras-preprocessing==1.0.2 (1.0.2)",
u"\u2502 \u251c\u2500\u2500 keras>=2.1.6 (2.2.2, cyclic)",
u"\u2502 \u251c\u2500\u2500 numpy>=1.9.1 (1.16.6)",
u"\u2502 \u251c\u2500\u2500 scipy>=0.14 (1.2.2)",
u"\u2502 \u2502 \u2514\u2500\u2500 numpy>=1.8.2 (1.16.6)",
u"\u2502 \u2514\u2500\u2500 six>=1.9.0 (1.13.0)",
u"\u251c\u2500\u2500 numpy>=1.9.1 (1.16.6)",
u"\u251c\u2500\u2500 pyyaml (5.3)",
u"\u251c\u2500\u2500 scipy>=0.14 (1.2.2)",
u"\u2502 \u2514\u2500\u2500 numpy>=1.8.2 (1.16.6)",
u"\u2514\u2500\u2500 six>=1.9.0 (1.13.0)",
],
),
(
["keras_preprocessing"],
["keras-preprocessing==1.1.0", "six==1.13.0", "numpy==1.16.6"],
),
(
["-r", "tests/test_reqs.txt"],
["keras-preprocessing==1.1.0", "six==1.13.0", "numpy==1.16.6"],
),
],
ids=(
"pipgrip pipgrip",
"requests",
"keras (cyclic)",
"--tree keras (cyclic)",
"keras_preprocessing (underscore)",
"-r",
),
)
def test_solutions(arguments, expected, monkeypatch):
result = invoke_patched(main, arguments, monkeypatch)
if result.exit_code:
raise result.exception
assert set(result.output.strip().split("\n")) == set(expected)
@pytest.mark.parametrize(
"arguments",
[
(["-h"]),
(["-v", "click"]),
(["-vv", "click"]),
(
[
"-vvv",
"--json",
"click",
"--index-url=https://pypi.org/simple",
"--extra-index-url=https://pypi.org/simple",
]
),
(["--no-cache-dir", "--lock", "--pipe", "click"]),
(
[
"--lock",
"--tree",
"--max-depth=1",
"--pre",
"--cache-dir=/tmp/abc",
"keras==2.2.2",
]
),
(["-r", "tests/test_reqs.txt"]),
(["-r", "tests/test_reqs.txt", "-r", "tests/test_reqs.txt"]),
(["urllib3==1.25.7", "urllib3<1.25.0|>1.25.0,<1.25.1|>1.25.1,<1.26,>=1.21.1"]),
# (["keras-preprocessing==1.0.2", "keras==2.2.2"]), # fix RecursionError
],
)
def test_correct_options(arguments, monkeypatch):
result = invoke_patched(main, arguments, monkeypatch)
if result.exit_code:
raise result.exception
@pytest.mark.parametrize(
"arguments",
[
(["--json", "--pipe", "click"]),
(["click", "--tree", "--max-depth=-2"]),
(["click", "--max-depth=1"]),
(["-e", "click"]),
(["--reverse-tree", "click"]),
(["click", "-r", "tests/test_reqs.txt"]),
(["../."]),
],
)
def test_incorrect_options(arguments, monkeypatch):
result = invoke_patched(main, arguments, monkeypatch)
if not result.exit_code:
raise RuntimeError("Unexpected result:\n{}".format(result.stdout))
def test_flatten():
a = {
("aiobotocore", "0.11.1"): {
("aiohttp", "3.6.2"): {
("async-timeout", "3.0.1"): {},
("attrs", "19.3.0"): {},
("chardet", "3.0.4"): {},
("multidict", "4.7.3"): {},
("yarl", "1.4.2"): {("idna", "2.8"): {}, ("multidict", "4.7.3"): {}},
},
("async-generator", "1.10"): {},
("awscli", "1.16.278"): {
("pyyaml", "5.1.2"): {},
("botocore", "1.13.14"): {
("docutils", "0.15.2"): {},
("jmespath", "0.9.4"): {},
("python-dateutil", "2.8.0"): {("six", "1.13.0"): {}},
("urllib3", "1.25.7"): {},
},
("colorama", "0.4.1"): {},
("docutils", "0.15.2"): {},
("rsa", "3.4.2"): {("pyasn1", "0.4.8"): {}},
("s3transfer", "0.2.1"): {
("botocore", "1.13.14"): {
("docutils", "0.15.2"): {},
("jmespath", "0.9.4"): {},
("python-dateutil", "2.8.0"): {("six", "1.13.0"): {}},
("urllib3", "1.25.7"): {},
}
},
},
("botocore", "1.13.14"): {
("docutils", "0.15.2"): {},
("jmespath", "0.9.4"): {},
("python-dateutil", "2.8.0"): {("six", "1.13.0"): {}},
("urllib3", "1.25.7"): {},
},
("wrapt", "1.11.2"): {},
}
}
assert flatten(a) == {
"aiobotocore": "0.11.1",
"aiohttp": "3.6.2",
"async-generator": "1.10",
"async-timeout": "3.0.1",
"attrs": "19.3.0",
"awscli": "1.16.278",
"botocore": "1.13.14",
"chardet": "3.0.4",
"colorama": "0.4.1",
"docutils": "0.15.2",
"idna": "2.8",
"jmespath": "0.9.4",
"multidict": "4.7.3",
"pyasn1": "0.4.8",
"python-dateutil": "2.8.0",
"pyyaml": "5.1.2",
"rsa": "3.4.2",
"s3transfer": "0.2.1",
"six": "1.13.0",
"urllib3": "1.25.7",
"wrapt": "1.11.2",
"yarl": "1.4.2",
}
| 36.965079
| 161
| 0.455514
|
bda21e067273d2e1c5db8dc65fdb916b20107941
| 111,879
|
py
|
Python
|
telethon/telegram_client.py
|
islam-200555/Telethon
|
85103bcf6de8024c902ede98f0b9bf0f7f47a0aa
|
[
"MIT"
] | null | null | null |
telethon/telegram_client.py
|
islam-200555/Telethon
|
85103bcf6de8024c902ede98f0b9bf0f7f47a0aa
|
[
"MIT"
] | null | null | null |
telethon/telegram_client.py
|
islam-200555/Telethon
|
85103bcf6de8024c902ede98f0b9bf0f7f47a0aa
|
[
"MIT"
] | null | null | null |
import asyncio
import getpass
import hashlib
import io
import itertools
import logging
import os
import re
import sys
import time
import warnings
from collections import UserList
from datetime import datetime, timedelta
from io import BytesIO
from mimetypes import guess_type
from async_generator import async_generator, yield_
from .crypto import CdnDecrypter
from .tl import TLObject
from .tl.custom import InputSizedFile
from .tl.functions.updates import GetDifferenceRequest
from .tl.functions.upload import (
SaveBigFilePartRequest, SaveFilePartRequest, GetFileRequest
)
from .tl.types.updates import (
DifferenceSlice, DifferenceEmpty, Difference, DifferenceTooLong
)
from .tl.types.upload import FileCdnRedirect
try:
import socks
except ImportError:
socks = None
try:
import hachoir
import hachoir.metadata
import hachoir.parser
except ImportError:
hachoir = None
from . import TelegramBareClient
from . import helpers, utils, events
from .errors import (
RPCError, UnauthorizedError, PhoneCodeEmptyError, PhoneCodeExpiredError,
PhoneCodeHashEmptyError, PhoneCodeInvalidError, LocationInvalidError,
SessionPasswordNeededError, FileMigrateError, PhoneNumberUnoccupiedError,
PhoneNumberOccupiedError, UsernameNotOccupiedError
)
from .network import ConnectionTcpFull
from .tl.custom import Draft, Dialog
from .tl.functions.account import (
GetPasswordRequest, UpdatePasswordSettingsRequest
)
from .tl.functions.auth import (
CheckPasswordRequest, LogOutRequest, SendCodeRequest, SignInRequest,
SignUpRequest, ResendCodeRequest, ImportBotAuthorizationRequest
)
from .tl.functions.contacts import (
GetContactsRequest, ResolveUsernameRequest
)
from .tl.functions.messages import (
GetDialogsRequest, GetHistoryRequest, SendMediaRequest,
SendMessageRequest, GetChatsRequest, GetAllDraftsRequest,
CheckChatInviteRequest, ReadMentionsRequest, SendMultiMediaRequest,
UploadMediaRequest, EditMessageRequest, GetFullChatRequest,
ForwardMessagesRequest, SearchRequest
)
from .tl.functions import channels
from .tl.functions import messages
from .tl.functions.users import (
GetUsersRequest
)
from .tl.functions.channels import (
GetChannelsRequest, GetFullChannelRequest, GetParticipantsRequest
)
from .tl.types import (
DocumentAttributeAudio, DocumentAttributeFilename,
InputMediaUploadedDocument, InputMediaUploadedPhoto, InputPeerEmpty,
Message, MessageMediaContact, MessageMediaDocument, MessageMediaPhoto,
InputUserSelf, UserProfilePhoto, ChatPhoto, UpdateMessageID,
UpdateNewChannelMessage, UpdateNewMessage, UpdateShortSentMessage,
PeerUser, InputPeerUser, InputPeerChat, InputPeerChannel, MessageEmpty,
ChatInvite, ChatInviteAlready, PeerChannel, Photo, InputPeerSelf,
InputSingleMedia, InputMediaPhoto, InputPhoto, InputFile, InputFileBig,
InputDocument, InputMediaDocument, Document, MessageEntityTextUrl,
InputMessageEntityMentionName, DocumentAttributeVideo,
UpdateEditMessage, UpdateEditChannelMessage, UpdateShort, Updates,
MessageMediaWebPage, ChannelParticipantsSearch, PhotoSize, PhotoCachedSize,
PhotoSizeEmpty, MessageService, ChatParticipants, User, WebPage,
ChannelParticipantsBanned, ChannelParticipantsKicked,
InputMessagesFilterEmpty
)
from .tl.types.messages import DialogsSlice
from .tl.types.account import PasswordInputSettings, NoPassword
from .extensions import markdown, html
__log__ = logging.getLogger(__name__)
class TelegramClient(TelegramBareClient):
"""
Initializes the Telegram client with the specified API ID and Hash.
Args:
session (`str` | `telethon.sessions.abstract.Session`, `None`):
The file name of the session file to be used if a string is
given (it may be a full path), or the Session instance to be
used otherwise. If it's ``None``, the session will not be saved,
and you should call :meth:`.log_out()` when you're done.
Note that if you pass a string it will be a file in the current
working directory, although you can also pass absolute paths.
The session file contains enough information for you to login
without re-sending the code, so if you have to enter the code
more than once, maybe you're changing the working directory,
renaming or removing the file, or using random names.
api_id (`int` | `str`):
The API ID you obtained from https://my.telegram.org.
api_hash (`str`):
The API ID you obtained from https://my.telegram.org.
connection (`telethon.network.connection.common.Connection`, optional):
The connection instance to be used when creating a new connection
to the servers. If it's a type, the `proxy` argument will be used.
Defaults to `telethon.network.connection.tcpfull.ConnectionTcpFull`.
use_ipv6 (`bool`, optional):
Whether to connect to the servers through IPv6 or not.
By default this is ``False`` as IPv6 support is not
too widespread yet.
proxy (`tuple` | `dict`, optional):
A tuple consisting of ``(socks.SOCKS5, 'host', port)``.
See https://github.com/Anorov/PySocks#usage-1 for more.
update_workers (`int`, optional):
If specified, represents how many extra threads should
be spawned to handle incoming updates, and updates will
be kept in memory until they are processed. Note that
you must set this to at least ``0`` if you want to be
able to process updates through :meth:`updates.poll()`.
timeout (`int` | `float` | `timedelta`, optional):
The timeout to be used when receiving responses from
the network. Defaults to 5 seconds.
spawn_read_thread (`bool`, optional):
Whether to use an extra background thread or not. Defaults
to ``True`` so receiving items from the network happens
instantly, as soon as they arrive. Can still be disabled
if you want to run the library without any additional thread.
report_errors (`bool`, optional):
Whether to report RPC errors or not. Defaults to ``True``,
see :ref:`api-status` for more information.
Kwargs:
Some extra parameters are required when establishing the first
connection. These are are (along with their default values):
.. code-block:: python
device_model = platform.node()
system_version = platform.system()
app_version = TelegramClient.__version__
lang_code = 'en'
system_lang_code = lang_code
"""
# region Initialization
def __init__(self, session, api_id, api_hash,
*,
connection=ConnectionTcpFull,
use_ipv6=False,
proxy=None,
timeout=timedelta(seconds=10),
loop=None,
report_errors=True,
**kwargs):
super().__init__(
session, api_id, api_hash,
connection=connection,
use_ipv6=use_ipv6,
proxy=proxy,
timeout=timeout,
loop=loop,
report_errors=report_errors,
**kwargs
)
self._event_builders = []
self._events_pending_resolve = []
# Some fields to easy signing in. Let {phone: hash} be
# a dictionary because the user may change their mind.
self._phone_code_hash = {}
self._phone = None
# Sometimes we need to know who we are, cache the self peer
self._self_input_peer = None
# endregion
# region Telegram requests functions
# region Authorization requests
async def send_code_request(self, phone, force_sms=False):
"""
Sends a code request to the specified phone number.
Args:
phone (`str` | `int`):
The phone to which the code will be sent.
force_sms (`bool`, optional):
Whether to force sending as SMS.
Returns:
An instance of :tl:`SentCode`.
"""
phone = utils.parse_phone(phone) or self._phone
phone_hash = self._phone_code_hash.get(phone)
if not phone_hash:
result = await self(SendCodeRequest(phone, self.api_id, self.api_hash))
self._phone_code_hash[phone] = phone_hash = result.phone_code_hash
else:
force_sms = True
self._phone = phone
if force_sms:
result = await self(ResendCodeRequest(phone, phone_hash))
self._phone_code_hash[phone] = result.phone_code_hash
return result
async def start(self,
phone=lambda: input('Please enter your phone: '),
password=lambda: getpass.getpass(
'Please enter your password: '),
bot_token=None, force_sms=False, code_callback=None,
first_name='New User', last_name=''):
"""
Convenience method to interactively connect and sign in if required,
also taking into consideration that 2FA may be enabled in the account.
Example usage:
>>> import asyncio
>>> rc = asyncio.get_event_loop().run_until_complete
>>> client = rc(TelegramClient(session, api_id, api_hash).start(phone))
Please enter the code you received: 12345
Please enter your password: *******
(You are now logged in)
Args:
phone (`str` | `int` | `callable`):
The phone (or callable without arguments to get it)
to which the code will be sent.
password (`callable`, optional):
The password for 2 Factor Authentication (2FA).
This is only required if it is enabled in your account.
bot_token (`str`):
Bot Token obtained by `@BotFather <https://t.me/BotFather>`_
to log in as a bot. Cannot be specified with ``phone`` (only
one of either allowed).
force_sms (`bool`, optional):
Whether to force sending the code request as SMS.
This only makes sense when signing in with a `phone`.
code_callback (`callable`, optional):
A callable that will be used to retrieve the Telegram
login code. Defaults to `input()`.
first_name (`str`, optional):
The first name to be used if signing up. This has no
effect if the account already exists and you sign in.
last_name (`str`, optional):
Similar to the first name, but for the last. Optional.
Returns:
This `TelegramClient`, so initialization
can be chained with ``.start()``.
"""
if code_callback is None:
def code_callback():
return input('Please enter the code you received: ')
elif not callable(code_callback):
raise ValueError(
'The code_callback parameter needs to be a callable '
'function that returns the code you received by Telegram.'
)
if not phone and not bot_token:
raise ValueError('No phone number or bot token provided.')
if phone and bot_token and not callable(phone):
raise ValueError('Both a phone and a bot token provided, '
'must only provide one of either')
if not self.is_connected():
await self.connect()
if self.is_user_authorized():
await self._check_events_pending_resolve()
return self
if bot_token:
await self.sign_in(bot_token=bot_token)
return self
# Turn the callable into a valid phone number
while callable(phone):
phone = utils.parse_phone(phone()) or phone
me = None
attempts = 0
max_attempts = 3
two_step_detected = False
sent_code = await self.send_code_request(phone, force_sms=force_sms)
sign_up = not sent_code.phone_registered
while attempts < max_attempts:
try:
if sign_up:
me = await self.sign_up(code_callback(), first_name, last_name)
else:
# Raises SessionPasswordNeededError if 2FA enabled
me = await self.sign_in(phone, code_callback())
break
except SessionPasswordNeededError:
two_step_detected = True
break
except PhoneNumberOccupiedError:
sign_up = False
except PhoneNumberUnoccupiedError:
sign_up = True
except (PhoneCodeEmptyError, PhoneCodeExpiredError,
PhoneCodeHashEmptyError, PhoneCodeInvalidError):
print('Invalid code. Please try again.', file=sys.stderr)
attempts += 1
else:
raise RuntimeError(
'{} consecutive sign-in attempts failed. Aborting'
.format(max_attempts)
)
if two_step_detected:
if not password:
raise ValueError(
"Two-step verification is enabled for this account. "
"Please provide the 'password' argument to 'start()'."
)
# TODO If callable given make it retry on invalid
if callable(password):
password = password()
me = await self.sign_in(phone=phone, password=password)
# We won't reach here if any step failed (exit by exception)
signed, name = 'Signed in successfully as', utils.get_display_name(me)
try:
print(signed, name)
except UnicodeEncodeError:
# Some terminals don't support certain characters
print(signed, name.encode('utf-8', errors='ignore')
.decode('ascii', errors='ignore'))
await self._check_events_pending_resolve()
return self
async def sign_in(self, phone=None, code=None,
password=None, bot_token=None, phone_code_hash=None):
"""
Starts or completes the sign in process with the given phone number
or code that Telegram sent.
Args:
phone (`str` | `int`):
The phone to send the code to if no code was provided,
or to override the phone that was previously used with
these requests.
code (`str` | `int`):
The code that Telegram sent. Note that if you have sent this
code through the application itself it will immediately
expire. If you want to send the code, obfuscate it somehow.
If you're not doing any of this you can ignore this note.
password (`str`):
2FA password, should be used if a previous call raised
SessionPasswordNeededError.
bot_token (`str`):
Used to sign in as a bot. Not all requests will be available.
This should be the hash the @BotFather gave you.
phone_code_hash (`str`):
The hash returned by .send_code_request. This can be set to None
to use the last hash known.
Returns:
The signed in user, or the information about
:meth:`send_code_request`.
"""
if self.is_user_authorized():
await self._check_events_pending_resolve()
return self.get_me()
if phone and not code and not password:
return await self.send_code_request(phone)
elif code:
phone = utils.parse_phone(phone) or self._phone
phone_code_hash = \
phone_code_hash or self._phone_code_hash.get(phone, None)
if not phone:
raise ValueError(
'Please make sure to call send_code_request first.'
)
if not phone_code_hash:
raise ValueError('You also need to provide a phone_code_hash.')
# May raise PhoneCodeEmptyError, PhoneCodeExpiredError,
# PhoneCodeHashEmptyError or PhoneCodeInvalidError.
result = await self(SignInRequest(phone, phone_code_hash, str(code)))
elif password:
salt = (await self(GetPasswordRequest())).current_salt
result = await self(CheckPasswordRequest(
helpers.get_password_hash(password, salt)
))
elif bot_token:
result = await self(ImportBotAuthorizationRequest(
flags=0, bot_auth_token=bot_token,
api_id=self.api_id, api_hash=self.api_hash
))
else:
raise ValueError(
'You must provide a phone and a code the first time, '
'and a password only if an RPCError was raised before.'
)
self._self_input_peer = utils.get_input_peer(
result.user, allow_self=False
)
await self._set_connected_and_authorized()
return result.user
async def sign_up(self, code, first_name, last_name=''):
"""
Signs up to Telegram if you don't have an account yet.
You must call .send_code_request(phone) first.
Args:
code (`str` | `int`):
The code sent by Telegram
first_name (`str`):
The first name to be used by the new account.
last_name (`str`, optional)
Optional last name.
Returns:
The new created :tl:`User`.
"""
if self.is_user_authorized():
await self._check_events_pending_resolve()
return await self.get_me()
result = await self(SignUpRequest(
phone_number=self._phone,
phone_code_hash=self._phone_code_hash.get(self._phone, ''),
phone_code=str(code),
first_name=first_name,
last_name=last_name
))
self._self_input_peer = utils.get_input_peer(
result.user, allow_self=False
)
await self._set_connected_and_authorized()
return result.user
async def log_out(self):
"""
Logs out Telegram and deletes the current ``*.session`` file.
Returns:
``True`` if the operation was successful.
"""
try:
await self(LogOutRequest())
except RPCError:
return False
self.disconnect()
await self.session.delete()
self._authorized = False
return True
async def get_me(self, input_peer=False):
"""
Gets "me" (the self user) which is currently authenticated,
or None if the request fails (hence, not authenticated).
Args:
input_peer (`bool`, optional):
Whether to return the :tl:`InputPeerUser` version or the normal
:tl:`User`. This can be useful if you just need to know the ID
of yourself.
Returns:
Your own :tl:`User`.
"""
if input_peer and self._self_input_peer:
return self._self_input_peer
try:
me = (await self(GetUsersRequest([InputUserSelf()])))[0]
if not self._self_input_peer:
self._self_input_peer = utils.get_input_peer(
me, allow_self=False
)
return self._self_input_peer if input_peer else me
except UnauthorizedError:
return None
# endregion
# region Dialogs ("chats") requests
@async_generator
async def iter_dialogs(self, limit=None, offset_date=None, offset_id=0,
offset_peer=InputPeerEmpty(), _total=None):
"""
Returns an iterator over the dialogs, yielding 'limit' at most.
Dialogs are the open "chats" or conversations with other people,
groups you have joined, or channels you are subscribed to.
Args:
limit (`int` | `None`):
How many dialogs to be retrieved as maximum. Can be set to
``None`` to retrieve all dialogs. Note that this may take
whole minutes if you have hundreds of dialogs, as Telegram
will tell the library to slow down through a
``FloodWaitError``.
offset_date (`datetime`, optional):
The offset date to be used.
offset_id (`int`, optional):
The message ID to be used as an offset.
offset_peer (:tl:`InputPeer`, optional):
The peer to be used as an offset.
_total (`list`, optional):
A single-item list to pass the total parameter by reference.
Yields:
Instances of `telethon.tl.custom.dialog.Dialog`.
"""
limit = float('inf') if limit is None else int(limit)
if limit == 0:
if not _total:
return
# Special case, get a single dialog and determine count
dialogs = await self(GetDialogsRequest(
offset_date=offset_date,
offset_id=offset_id,
offset_peer=offset_peer,
limit=1
))
_total[0] = getattr(dialogs, 'count', len(dialogs.dialogs))
return
seen = set()
req = GetDialogsRequest(
offset_date=offset_date,
offset_id=offset_id,
offset_peer=offset_peer,
limit=0
)
while len(seen) < limit:
req.limit = min(limit - len(seen), 100)
r = await self(req)
if _total:
_total[0] = getattr(r, 'count', len(r.dialogs))
messages = {m.id: m for m in r.messages}
entities = {utils.get_peer_id(x): x
for x in itertools.chain(r.users, r.chats)}
# Happens when there are pinned dialogs
if len(r.dialogs) > limit:
r.dialogs = r.dialogs[:limit]
for d in r.dialogs:
peer_id = utils.get_peer_id(d.peer)
if peer_id not in seen:
seen.add(peer_id)
await yield_(Dialog(self, d, entities, messages))
if len(r.dialogs) < req.limit or not isinstance(r, DialogsSlice):
# Less than we requested means we reached the end, or
# we didn't get a DialogsSlice which means we got all.
break
req.offset_date = r.messages[-1].date
req.offset_peer = entities[utils.get_peer_id(r.dialogs[-1].peer)]
req.offset_id = r.messages[-1].id
req.exclude_pinned = True
async def get_dialogs(self, *args, **kwargs):
"""
Same as :meth:`iter_dialogs`, but returns a list instead
with an additional ``.total`` attribute on the list.
"""
total = [0]
kwargs['_total'] = total
dialogs = UserList()
async for dialog in self.iter_dialogs(*args, **kwargs):
dialogs.append(dialog)
dialogs.total = total[0]
return dialogs
@async_generator
async def iter_drafts(self): # TODO: Ability to provide a `filter`
"""
Iterator over all open draft messages.
Instances of `telethon.tl.custom.draft.Draft` are yielded.
You can call `telethon.tl.custom.draft.Draft.set_message`
to change the message or `telethon.tl.custom.draft.Draft.delete`
among other things.
"""
for update in (await self(GetAllDraftsRequest())).updates:
await yield_(Draft._from_update(self, update))
async def get_drafts(self):
"""
Same as :meth:`iter_drafts`, but returns a list instead.
"""
return list(await self.iter_drafts())
@staticmethod
def _get_response_message(request, result):
"""
Extracts the response message known a request and Update result.
The request may also be the ID of the message to match.
"""
# Telegram seems to send updateMessageID first, then updateNewMessage,
# however let's not rely on that just in case.
if isinstance(request, int):
msg_id = request
else:
msg_id = None
for update in result.updates:
if isinstance(update, UpdateMessageID):
if update.random_id == request.random_id:
msg_id = update.id
break
if isinstance(result, UpdateShort):
updates = [result.update]
elif isinstance(result, Updates):
updates = result.updates
else:
return
for update in updates:
if isinstance(update, (UpdateNewChannelMessage, UpdateNewMessage)):
if update.message.id == msg_id:
return update.message
elif (isinstance(update, UpdateEditMessage) and
not isinstance(request.peer, InputPeerChannel)):
if request.id == update.message.id:
return update.message
elif (isinstance(update, UpdateEditChannelMessage) and
utils.get_peer_id(request.peer) ==
utils.get_peer_id(update.message.to_id)):
if request.id == update.message.id:
return update.message
async def _parse_message_text(self, message, parse_mode):
"""
Returns a (parsed message, entities) tuple depending on ``parse_mode``.
"""
if not parse_mode:
return message, []
if isinstance(parse_mode, str):
parse_mode = parse_mode.lower()
if parse_mode in {'md', 'markdown'}:
message, msg_entities = markdown.parse(message)
elif parse_mode.startswith('htm'):
message, msg_entities = html.parse(message)
else:
raise ValueError('Unknown parsing mode: {}'.format(parse_mode))
elif callable(parse_mode):
message, msg_entities = parse_mode(message)
else:
raise TypeError('Invalid parsing mode type: {}'.format(parse_mode))
for i, e in enumerate(msg_entities):
if isinstance(e, MessageEntityTextUrl):
m = re.match(r'^@|\+|tg://user\?id=(\d+)', e.url)
if m:
try:
msg_entities[i] = InputMessageEntityMentionName(
e.offset, e.length, await self.get_input_entity(
int(m.group(1)) if m.group(1) else e.url
)
)
except (ValueError, TypeError):
# Make no replacement
pass
return message, msg_entities
async def send_message(self, entity, message='', reply_to=None,
parse_mode='md', link_preview=True, file=None,
force_document=False, clear_draft=False):
"""
Sends the given message to the specified entity (user/chat/channel).
The default parse mode is the same as the official applications
(a custom flavour of markdown). ``**bold**, `code` or __italic__``
are available. In addition you can send ``[links](https://example.com)``
and ``[mentions](@username)`` (or using IDs like in the Bot API:
``[mention](tg://user?id=123456789)``) and ``pre`` blocks with three
backticks.
Sending a ``/start`` command with a parameter (like ``?start=data``)
is also done through this method. Simply send ``'/start data'`` to
the bot.
Args:
entity (`entity`):
To who will it be sent.
message (`str` | :tl:`Message`):
The message to be sent, or another message object to resend.
The maximum length for a message is 35,000 bytes or 4,096
characters. Longer messages will not be sliced automatically,
and you should slice them manually if the text to send is
longer than said length.
reply_to (`int` | :tl:`Message`, optional):
Whether to reply to a message or not. If an integer is provided,
it should be the ID of the message that it should reply to.
parse_mode (`str`, optional):
Can be 'md' or 'markdown' for markdown-like parsing (default),
or 'htm' or 'html' for HTML-like parsing. If ``None`` or any
other false-y value is provided, the message will be sent with
no formatting.
If a ``callable`` is passed, it should be a function accepting
a `str` as an input and return as output a tuple consisting
of ``(parsed message str, [MessageEntity instances])``.
See :tl:`MessageEntity` for allowed message entities.
link_preview (`bool`, optional):
Should the link preview be shown?
file (`file`, optional):
Sends a message with a file attached (e.g. a photo,
video, audio or document). The ``message`` may be empty.
force_document (`bool`, optional):
Whether to send the given file as a document or not.
clear_draft (`bool`, optional):
Whether the existing draft should be cleared or not.
Has no effect when sending a file.
Returns:
The sent :tl:`Message`.
"""
if file is not None:
return await self.send_file(
entity, file, caption=message, reply_to=reply_to,
parse_mode=parse_mode, force_document=force_document
)
elif not message:
raise ValueError(
'The message cannot be empty unless a file is provided'
)
entity = await self.get_input_entity(entity)
if isinstance(message, Message):
if (message.media
and not isinstance(message.media, MessageMediaWebPage)):
return await self.send_file(entity, message.media,
caption=message.message,
entities=message.entities)
if reply_to is not None:
reply_id = self._get_message_id(reply_to)
elif utils.get_peer_id(entity) == utils.get_peer_id(message.to_id):
reply_id = message.reply_to_msg_id
else:
reply_id = None
request = SendMessageRequest(
peer=entity,
message=message.message or '',
silent=message.silent,
reply_to_msg_id=reply_id,
reply_markup=message.reply_markup,
entities=message.entities,
no_webpage=not isinstance(message.media, MessageMediaWebPage),
clear_draft=clear_draft
)
message = message.message
else:
message, msg_ent =\
await self._parse_message_text(message, parse_mode)
request = SendMessageRequest(
peer=entity,
message=message,
entities=msg_ent,
no_webpage=not link_preview,
reply_to_msg_id=self._get_message_id(reply_to),
clear_draft=clear_draft
)
result = await self(request)
if isinstance(result, UpdateShortSentMessage):
return Message(
id=result.id,
to_id=entity,
message=message,
date=result.date,
out=result.out,
media=result.media,
entities=result.entities
)
return self._get_response_message(request, result)
async def forward_messages(self, entity, messages, from_peer=None):
"""
Forwards the given message(s) to the specified entity.
Args:
entity (`entity`):
To which entity the message(s) will be forwarded.
messages (`list` | `int` | :tl:`Message`):
The message(s) to forward, or their integer IDs.
from_peer (`entity`):
If the given messages are integer IDs and not instances
of the ``Message`` class, this *must* be specified in
order for the forward to work.
Returns:
The list of forwarded :tl:`Message`, or a single one if a list
wasn't provided as input.
"""
single = not utils.is_list_like(messages)
if single:
messages = (messages,)
if not from_peer:
try:
# On private chats (to_id = PeerUser), if the message is
# not outgoing, we actually need to use "from_id" to get
# the conversation on which the message was sent.
from_peer = next(
m.from_id if not m.out and isinstance(m.to_id, PeerUser)
else m.to_id for m in messages if isinstance(m, Message)
)
except StopIteration:
raise ValueError(
'from_chat must be given if integer IDs are used'
)
req = ForwardMessagesRequest(
from_peer=from_peer,
id=[m if isinstance(m, int) else m.id for m in messages],
to_peer=entity
)
result = await self(req)
random_to_id = {}
id_to_message = {}
for update in result.updates:
if isinstance(update, UpdateMessageID):
random_to_id[update.random_id] = update.id
elif isinstance(update, (UpdateNewMessage, UpdateNewChannelMessage)):
id_to_message[update.message.id] = update.message
result = [id_to_message[random_to_id[rnd]] for rnd in req.random_id]
return result[0] if single else result
async def edit_message(self, entity, message=None, text=None,
parse_mode='md', link_preview=True):
"""
Edits the given message ID (to change its contents or disable preview).
Args:
entity (`entity` | :tl:`Message`):
From which chat to edit the message. This can also be
the message to be edited, and the entity will be inferred
from it, so the next parameter will be assumed to be the
message text.
message (`int` | :tl:`Message` | `str`):
The ID of the message (or :tl:`Message` itself) to be edited.
If the `entity` was a :tl:`Message`, then this message will be
treated as the new text.
text (`str`, optional):
The new text of the message. Does nothing if the `entity`
was a :tl:`Message`.
parse_mode (`str`, optional):
Can be 'md' or 'markdown' for markdown-like parsing (default),
or 'htm' or 'html' for HTML-like parsing. If ``None`` or any
other false-y value is provided, the message will be sent with
no formatting.
link_preview (`bool`, optional):
Should the link preview be shown?
Examples:
>>> import asyncio
>>> async def main():
... client = await TelegramClient(...).start()
... message = await client.send_message('username', 'hello')
...
... await client.edit_message('username', message, 'hello!')
... # or
... await client.edit_message('username', message.id, 'Hello')
... # or
... await client.edit_message(message, 'Hello!')
...
>>> loop = asyncio.get_event_loop()
>>> loop.run_until_complete(main())
Raises:
``MessageAuthorRequiredError`` if you're not the author of the
message but tried editing it anyway.
``MessageNotModifiedError`` if the contents of the message were
not modified at all.
Returns:
The edited :tl:`Message`.
"""
if isinstance(entity, Message):
text = message # Shift the parameters to the right
message = entity
entity = entity.to_id
text, msg_entities = await self._parse_message_text(text, parse_mode)
request = EditMessageRequest(
peer=await self.get_input_entity(entity),
id=self._get_message_id(message),
message=text,
no_webpage=not link_preview,
entities=msg_entities
)
result = await self(request)
return self._get_response_message(request, result)
async def delete_messages(self, entity, message_ids, revoke=True):
"""
Deletes a message from a chat, optionally "for everyone".
Args:
entity (`entity`):
From who the message will be deleted. This can actually
be ``None`` for normal chats, but **must** be present
for channels and megagroups.
message_ids (`list` | `int` | :tl:`Message`):
The IDs (or ID) or messages to be deleted.
revoke (`bool`, optional):
Whether the message should be deleted for everyone or not.
By default it has the opposite behaviour of official clients,
and it will delete the message for everyone.
This has no effect on channels or megagroups.
Returns:
The :tl:`AffectedMessages`.
"""
if not utils.is_list_like(message_ids):
message_ids = (message_ids,)
message_ids = [
m.id if isinstance(m, (Message, MessageService, MessageEmpty))
else int(m) for m in message_ids
]
if entity is None:
return await self(messages.DeleteMessagesRequest(message_ids, revoke=revoke))
entity = await self.get_input_entity(entity)
if isinstance(entity, InputPeerChannel):
return await self(channels.DeleteMessagesRequest(entity, message_ids))
else:
return await self(messages.DeleteMessagesRequest(message_ids, revoke=revoke))
@async_generator
async def iter_messages(self, entity, limit=20, offset_date=None,
offset_id=0, max_id=0, min_id=0, add_offset=0,
search=None, filter=None, from_user=None,
batch_size=100, wait_time=None, ids=None,
_total=None):
"""
Iterator over the message history for the specified entity.
If either `search`, `filter` or `from_user` are provided,
:tl:`messages.Search` will be used instead of :tl:`messages.getHistory`.
Args:
entity (`entity`):
The entity from whom to retrieve the message history.
limit (`int` | `None`, optional):
Number of messages to be retrieved. Due to limitations with
the API retrieving more than 3000 messages will take longer
than half a minute (or even more based on previous calls).
The limit may also be ``None``, which would eventually return
the whole history.
offset_date (`datetime`):
Offset date (messages *previous* to this date will be
retrieved). Exclusive.
offset_id (`int`):
Offset message ID (only messages *previous* to the given
ID will be retrieved). Exclusive.
max_id (`int`):
All the messages with a higher (newer) ID or equal to this will
be excluded.
min_id (`int`):
All the messages with a lower (older) ID or equal to this will
be excluded.
add_offset (`int`):
Additional message offset (all of the specified offsets +
this offset = older messages).
search (`str`):
The string to be used as a search query.
filter (:tl:`MessagesFilter` | `type`):
The filter to use when returning messages. For instance,
:tl:`InputMessagesFilterPhotos` would yield only messages
containing photos.
from_user (`entity`):
Only messages from this user will be returned.
batch_size (`int`):
Messages will be returned in chunks of this size (100 is
the maximum). While it makes no sense to modify this value,
you are still free to do so.
wait_time (`int`):
Wait time between different :tl:`GetHistoryRequest`. Use this
parameter to avoid hitting the ``FloodWaitError`` as needed.
If left to ``None``, it will default to 1 second only if
the limit is higher than 3000.
ids (`int`, `list`):
A single integer ID (or several IDs) for the message that
should be returned. This parameter takes precedence over
the rest (which will be ignored if this is set). This can
for instance be used to get the message with ID 123 from
a channel. Note that if the message doesn't exist, ``None``
will appear in its place, so that zipping the list of IDs
with the messages can match one-to-one.
_total (`list`, optional):
A single-item list to pass the total parameter by reference.
Yields:
Instances of :tl:`Message` with extra attributes:
* ``.sender`` = entity of the sender.
* ``.fwd_from.sender`` = if fwd_from, who sent it originally.
* ``.fwd_from.channel`` = if fwd_from, original channel.
* ``.to`` = entity to which the message was sent.
Notes:
Telegram's flood wait limit for :tl:`GetHistoryRequest` seems to
be around 30 seconds per 3000 messages, therefore a sleep of 1
second is the default for this limit (or above). You may need
an higher limit, so you're free to set the ``batch_size`` that
you think may be good.
"""
entity = await self.get_input_entity(entity)
if ids:
if not utils.is_list_like(ids):
ids = (ids,)
async for x in self._iter_ids(entity, ids, total=_total):
await yield_(x)
return
# Telegram doesn't like min_id/max_id. If these IDs are low enough
# (starting from last_id - 100), the request will return nothing.
#
# We can emulate their behaviour locally by setting offset = max_id
# and simply stopping once we hit a message with ID <= min_id.
offset_id = max(offset_id, max_id)
if offset_id and min_id:
if offset_id - min_id <= 1:
return
limit = float('inf') if limit is None else int(limit)
if search is not None or filter or from_user:
if filter is None:
filter = InputMessagesFilterEmpty()
request = SearchRequest(
peer=entity,
q=search or '',
filter=filter() if isinstance(filter, type) else filter,
min_date=None,
max_date=offset_date,
offset_id=offset_id,
add_offset=add_offset,
limit=1,
max_id=0,
min_id=0,
hash=0,
from_id=self.get_input_entity(from_user) if from_user else None
)
else:
request = GetHistoryRequest(
peer=entity,
limit=1,
offset_date=offset_date,
offset_id=offset_id,
min_id=0,
max_id=0,
add_offset=add_offset,
hash=0
)
if limit == 0:
if not _total:
return
# No messages, but we still need to know the total message count
result = await self(request)
_total[0] = getattr(result, 'count', len(result.messages))
return
if wait_time is None:
wait_time = 1 if limit > 3000 else 0
have = 0
last_id = float('inf')
batch_size = min(max(batch_size, 1), 100)
while have < limit:
start = time.time()
# Telegram has a hard limit of 100
request.limit = min(limit - have, batch_size)
r = await self(request)
if _total:
_total[0] = getattr(r, 'count', len(r.messages))
entities = {utils.get_peer_id(x): x
for x in itertools.chain(r.users, r.chats)}
for message in r.messages:
if message.id <= min_id:
return
if isinstance(message, MessageEmpty) or message.id >= last_id:
continue
# There has been reports that on bad connections this method
# was returning duplicated IDs sometimes. Using ``last_id``
# is an attempt to avoid these duplicates, since the message
# IDs are returned in descending order.
last_id = message.id
self._make_message_friendly(message, entities)
await yield_(message)
have += 1
if len(r.messages) < request.limit:
break
request.offset_id = r.messages[-1].id
if isinstance(request, GetHistoryRequest):
request.offset_date = r.messages[-1].date
else:
request.max_date = r.messages[-1].date
await asyncio.sleep(max(wait_time - (time.time() - start), 0))
@staticmethod
def _make_message_friendly(message, entities):
"""
Add a few extra attributes to the :tl:`Message` to be friendlier.
To make messages more friendly, always add message
to service messages, and action to normal messages.
"""
# TODO Create an actual friendlier class
message.message = getattr(message, 'message', None)
message.action = getattr(message, 'action', None)
message.to = entities[utils.get_peer_id(message.to_id)]
message.sender = (
None if not message.from_id else
entities[utils.get_peer_id(message.from_id)]
)
if getattr(message, 'fwd_from', None):
message.fwd_from.sender = (
None if not message.fwd_from.from_id else
entities[utils.get_peer_id(message.fwd_from.from_id)]
)
message.fwd_from.channel = (
None if not message.fwd_from.channel_id else
entities[utils.get_peer_id(
PeerChannel(message.fwd_from.channel_id)
)]
)
@async_generator
async def _iter_ids(self, entity, ids, total):
"""
Special case for `iter_messages` when it should only fetch some IDs.
"""
if total:
total[0] = len(ids)
if isinstance(entity, InputPeerChannel):
r = await self(channels.GetMessagesRequest(entity, ids))
else:
r = await self(messages.GetMessagesRequest(ids))
entities = {utils.get_peer_id(x): x
for x in itertools.chain(r.users, r.chats)}
# Telegram seems to return the messages in the order in which
# we asked them for, so we don't need to check it ourselves.
for message in r.messages:
if isinstance(message, MessageEmpty):
await yield_(None)
else:
self._make_message_friendly(message, entities)
await yield_(message)
async def get_messages(self, *args, **kwargs):
"""
Same as :meth:`iter_messages`, but returns a list instead
with an additional ``.total`` attribute on the list.
If the `limit` is not set, it will be 1 by default unless both
`min_id` **and** `max_id` are set (as *named* arguments), in
which case the entire range will be returned.
This is so because any integer limit would be rather arbitrary and
it's common to only want to fetch one message, but if a range is
specified it makes sense that it should return the entirety of it.
If `ids` is present in the *named* arguments and is not a list,
a single :tl:`Message` will be returned for convenience instead
of a list.
"""
total = [0]
kwargs['_total'] = total
if len(args) == 1 and 'limit' not in kwargs:
if 'min_id' in kwargs and 'max_id' in kwargs:
kwargs['limit'] = None
else:
kwargs['limit'] = 1
msgs = UserList()
async for msg in self.iter_messages(*args, **kwargs):
msgs.append(msg)
msgs.total = total[0]
if 'ids' in kwargs and not utils.is_list_like(kwargs['ids']):
return msgs[0]
return msgs
async def get_message_history(self, *args, **kwargs):
"""Deprecated, see :meth:`get_messages`."""
warnings.warn(
'get_message_history is deprecated, use get_messages instead'
)
return await self.get_messages(*args, **kwargs)
async def send_read_acknowledge(self, entity, message=None, max_id=None,
clear_mentions=False):
"""
Sends a "read acknowledge" (i.e., notifying the given peer that we've
read their messages, also known as the "double check").
This effectively marks a message as read (or more than one) in the
given conversation.
Args:
entity (`entity`):
The chat where these messages are located.
message (`list` | :tl:`Message`):
Either a list of messages or a single message.
max_id (`int`):
Overrides messages, until which message should the
acknowledge should be sent.
clear_mentions (`bool`):
Whether the mention badge should be cleared (so that
there are no more mentions) or not for the given entity.
If no message is provided, this will be the only action
taken.
"""
if max_id is None:
if message:
if utils.is_list_like(message):
max_id = max(msg.id for msg in message)
else:
max_id = message.id
elif not clear_mentions:
raise ValueError(
'Either a message list or a max_id must be provided.')
entity = await self.get_input_entity(entity)
if clear_mentions:
await self(ReadMentionsRequest(entity))
if max_id is None:
return True
if max_id is not None:
if isinstance(entity, InputPeerChannel):
return await self(channels.ReadHistoryRequest(entity, max_id=max_id))
else:
return await self(messages.ReadHistoryRequest(entity, max_id=max_id))
return False
@staticmethod
def _get_message_id(message):
"""Sanitizes the 'reply_to' parameter a user may send"""
if message is None:
return None
if isinstance(message, int):
return message
try:
if message.SUBCLASS_OF_ID == 0x790009e3:
# hex(crc32(b'Message')) = 0x790009e3
return message.id
except AttributeError:
pass
raise TypeError('Invalid message type: {}'.format(type(message)))
@async_generator
async def iter_participants(self, entity, limit=None, search='',
filter=None, aggressive=False, _total=None):
"""
Iterator over the participants belonging to the specified chat.
Args:
entity (`entity`):
The entity from which to retrieve the participants list.
limit (`int`):
Limits amount of participants fetched.
search (`str`, optional):
Look for participants with this string in name/username.
filter (:tl:`ChannelParticipantsFilter`, optional):
The filter to be used, if you want e.g. only admins
Note that you might not have permissions for some filter.
This has no effect for normal chats or users.
aggressive (`bool`, optional):
Aggressively looks for all participants in the chat in
order to get more than 10,000 members (a hard limit
imposed by Telegram). Note that this might take a long
time (over 5 minutes), but is able to return over 90,000
participants on groups with 100,000 members.
This has no effect for groups or channels with less than
10,000 members, or if a ``filter`` is given.
_total (`list`, optional):
A single-item list to pass the total parameter by reference.
Yields:
The :tl:`User` objects returned by :tl:`GetParticipantsRequest`
with an additional ``.participant`` attribute which is the
matched :tl:`ChannelParticipant` type for channels/megagroups
or :tl:`ChatParticipants` for normal chats.
"""
if isinstance(filter, type):
if filter in (ChannelParticipantsBanned, ChannelParticipantsKicked,
ChannelParticipantsSearch):
# These require a `q` parameter (support types for convenience)
filter = filter('')
else:
filter = filter()
entity = await self.get_input_entity(entity)
if search and (filter or not isinstance(entity, InputPeerChannel)):
# We need to 'search' ourselves unless we have a PeerChannel
search = search.lower()
def filter_entity(ent):
return search in utils.get_display_name(ent).lower() or\
search in (getattr(ent, 'username', '') or None).lower()
else:
def filter_entity(ent):
return True
limit = float('inf') if limit is None else int(limit)
if isinstance(entity, InputPeerChannel):
if _total or (aggressive and not filter):
total = (await self(GetFullChannelRequest(
entity
))).full_chat.participants_count
if _total:
_total[0] = total
else:
total = 0
if limit == 0:
return
seen = set()
if total > 10000 and aggressive and not filter:
requests = [GetParticipantsRequest(
channel=entity,
filter=ChannelParticipantsSearch(search + chr(x)),
offset=0,
limit=200,
hash=0
) for x in range(ord('a'), ord('z') + 1)]
else:
requests = [GetParticipantsRequest(
channel=entity,
filter=filter or ChannelParticipantsSearch(search),
offset=0,
limit=200,
hash=0
)]
while requests:
# Only care about the limit for the first request
# (small amount of people, won't be aggressive).
#
# Most people won't care about getting exactly 12,345
# members so it doesn't really matter not to be 100%
# precise with being out of the offset/limit here.
requests[0].limit = min(limit - requests[0].offset, 200)
if requests[0].offset > limit:
break
results = await self(requests)
for i in reversed(range(len(requests))):
participants = results[i]
if not participants.users:
requests.pop(i)
else:
requests[i].offset += len(participants.participants)
users = {user.id: user for user in participants.users}
for participant in participants.participants:
user = users[participant.user_id]
if not filter_entity(user) or user.id in seen:
continue
seen.add(participant.user_id)
user = users[participant.user_id]
user.participant = participant
await yield_(user)
if len(seen) >= limit:
return
elif isinstance(entity, InputPeerChat):
# TODO We *could* apply the `filter` here ourselves
full = await self(GetFullChatRequest(entity.chat_id))
if not isinstance(full.full_chat.participants, ChatParticipants):
# ChatParticipantsForbidden won't have ``.participants``
_total[0] = 0
return
if _total:
_total[0] = len(full.full_chat.participants.participants)
have = 0
users = {user.id: user for user in full.users}
for participant in full.full_chat.participants.participants:
user = users[participant.user_id]
if not filter_entity(user):
continue
have += 1
if have > limit:
break
else:
user = users[participant.user_id]
user.participant = participant
await yield_(user)
else:
if _total:
_total[0] = 1
if limit != 0:
user = await self.get_entity(entity)
if filter_entity(user):
user.participant = None
await yield_(user)
async def get_participants(self, *args, **kwargs):
"""
Same as :meth:`iter_participants`, but returns a list instead
with an additional ``.total`` attribute on the list.
"""
total = [0]
kwargs['_total'] = total
participants = UserList()
async for participant in self.iter_participants(*args, **kwargs):
participants.append(participant)
participants.total = total[0]
return participants
# endregion
# region Uploading files
async def send_file(self, entity, file, caption='',
force_document=False, progress_callback=None,
reply_to=None,
attributes=None,
thumb=None,
allow_cache=True,
parse_mode='md',
voice_note=False,
video_note=False,
**kwargs):
"""
Sends a file to the specified entity.
Args:
entity (`entity`):
Who will receive the file.
file (`str` | `bytes` | `file` | `media`):
The path of the file, byte array, or stream that will be sent.
Note that if a byte array or a stream is given, a filename
or its type won't be inferred, and it will be sent as an
"unnamed application/octet-stream".
Furthermore the file may be any media (a message, document,
photo or similar) so that it can be resent without the need
to download and re-upload it again.
If a list or similar is provided, the files in it will be
sent as an album in the order in which they appear, sliced
in chunks of 10 if more than 10 are given.
caption (`str`, optional):
Optional caption for the sent media message.
force_document (`bool`, optional):
If left to ``False`` and the file is a path that ends with
the extension of an image file or a video file, it will be
sent as such. Otherwise always as a document.
progress_callback (`callable`, optional):
A callback function accepting two parameters:
``(sent bytes, total)``.
reply_to (`int` | :tl:`Message`):
Same as `reply_to` from `send_message`.
attributes (`list`, optional):
Optional attributes that override the inferred ones, like
:tl:`DocumentAttributeFilename` and so on.
thumb (`str` | `bytes` | `file`, optional):
Optional thumbnail (for videos).
allow_cache (`bool`, optional):
Whether to allow using the cached version stored in the
database or not. Defaults to ``True`` to avoid re-uploads.
Must be ``False`` if you wish to use different attributes
or thumb than those that were used when the file was cached.
parse_mode (`str`, optional):
The parse mode for the caption message.
voice_note (`bool`, optional):
If ``True`` the audio will be sent as a voice note.
Set `allow_cache` to ``False`` if you sent the same file
without this setting before for it to work.
video_note (`bool`, optional):
If ``True`` the video will be sent as a video note,
also known as a round video message.
Set `allow_cache` to ``False`` if you sent the same file
without this setting before for it to work.
Notes:
If the ``hachoir3`` package (``hachoir`` module) is installed,
it will be used to determine metadata from audio and video files.
Returns:
The :tl:`Message` (or messages) containing the sent file,
or messages if a list of them was passed.
"""
# First check if the user passed an iterable, in which case
# we may want to send as an album if all are photo files.
if utils.is_list_like(file):
# TODO Fix progress_callback
images = []
if force_document:
documents = file
else:
documents = []
for x in file:
if utils.is_image(x):
images.append(x)
else:
documents.append(x)
result = []
while images:
result += await self._send_album(
entity, images[:10], caption=caption,
progress_callback=progress_callback, reply_to=reply_to,
parse_mode=parse_mode
)
images = images[10:]
for x in documents:
result.append(await self.send_file(
entity, x, allow_cache=allow_cache,
caption=caption, force_document=force_document,
progress_callback=progress_callback, reply_to=reply_to,
attributes=attributes, thumb=thumb, voice_note=voice_note,
video_note=video_note, **kwargs
))
return result
entity = await self.get_input_entity(entity)
reply_to = self._get_message_id(reply_to)
# Not document since it's subject to change.
# Needed when a Message is passed to send_message and it has media.
if 'entities' in kwargs:
msg_entities = kwargs['entities']
else:
caption, msg_entities =\
await self._parse_message_text(caption, parse_mode)
if not isinstance(file, (str, bytes, io.IOBase)):
# The user may pass a Message containing media (or the media,
# or anything similar) that should be treated as a file. Try
# getting the input media for whatever they passed and send it.
try:
media = utils.get_input_media(file)
except TypeError:
pass # Can't turn whatever was given into media
else:
request = SendMediaRequest(entity, media,
reply_to_msg_id=reply_to,
message=caption,
entities=msg_entities)
return self._get_response_message(request, await self(request))
as_image = utils.is_image(file) and not force_document
use_cache = InputPhoto if as_image else InputDocument
file_handle = await self.upload_file(
file, progress_callback=progress_callback,
use_cache=use_cache if allow_cache else None
)
if isinstance(file_handle, use_cache):
# File was cached, so an instance of use_cache was returned
if as_image:
media = InputMediaPhoto(file_handle)
else:
media = InputMediaDocument(file_handle)
elif as_image:
media = InputMediaUploadedPhoto(file_handle)
else:
mime_type = None
if isinstance(file, str):
# Determine mime-type and attributes
# Take the first element by using [0] since it returns a tuple
mime_type = guess_type(file)[0]
attr_dict = {
DocumentAttributeFilename:
DocumentAttributeFilename(os.path.basename(file))
}
if utils.is_audio(file) and hachoir:
m = hachoir.metadata.extractMetadata(
hachoir.parser.createParser(file)
)
attr_dict[DocumentAttributeAudio] = DocumentAttributeAudio(
voice=voice_note,
title=m.get('title') if m.has('title') else None,
performer=m.get('author') if m.has('author') else None,
duration=int(m.get('duration').seconds
if m.has('duration') else 0)
)
if not force_document and utils.is_video(file):
if hachoir:
m = hachoir.metadata.extractMetadata(
hachoir.parser.createParser(file)
)
doc = DocumentAttributeVideo(
round_message=video_note,
w=m.get('width') if m.has('width') else 0,
h=m.get('height') if m.has('height') else 0,
duration=int(m.get('duration').seconds
if m.has('duration') else 0)
)
else:
doc = DocumentAttributeVideo(0, 1, 1,
round_message=video_note)
attr_dict[DocumentAttributeVideo] = doc
else:
attr_dict = {
DocumentAttributeFilename: DocumentAttributeFilename(
os.path.basename(
getattr(file, 'name', None) or 'unnamed'))
}
if voice_note:
if DocumentAttributeAudio in attr_dict:
attr_dict[DocumentAttributeAudio].voice = True
else:
attr_dict[DocumentAttributeAudio] = \
DocumentAttributeAudio(0, voice=True)
# Now override the attributes if any. As we have a dict of
# {cls: instance}, we can override any class with the list
# of attributes provided by the user easily.
if attributes:
for a in attributes:
attr_dict[type(a)] = a
# Ensure we have a mime type, any; but it cannot be None
# 'The "octet-stream" subtype is used to indicate that a body
# contains arbitrary binary data.'
if not mime_type:
mime_type = 'application/octet-stream'
input_kw = {}
if thumb:
input_kw['thumb'] = await self.upload_file(thumb)
media = InputMediaUploadedDocument(
file=file_handle,
mime_type=mime_type,
attributes=list(attr_dict.values()),
**input_kw
)
# Once the media type is properly specified and the file uploaded,
# send the media message to the desired entity.
request = SendMediaRequest(entity, media, reply_to_msg_id=reply_to,
message=caption, entities=msg_entities)
msg = self._get_response_message(request, await self(request))
if msg and isinstance(file_handle, InputSizedFile):
# There was a response message and we didn't use cached
# version, so cache whatever we just sent to the database.
md5, size = file_handle.md5, file_handle.size
if as_image:
to_cache = utils.get_input_photo(msg.media.photo)
else:
to_cache = utils.get_input_document(msg.media.document)
await self.session.cache_file(md5, size, to_cache)
return msg
def send_voice_note(self, *args, **kwargs):
"""Deprecated, see :meth:`send_file`."""
warnings.warn('send_voice_note is deprecated, use '
'send_file(..., voice_note=True) instead')
kwargs['is_voice_note'] = True
return self.send_file(*args, **kwargs)
async def _send_album(self, entity, files, caption='',
progress_callback=None, reply_to=None,
parse_mode='md'):
"""Specialized version of .send_file for albums"""
# We don't care if the user wants to avoid cache, we will use it
# anyway. Why? The cached version will be exactly the same thing
# we need to produce right now to send albums (uploadMedia), and
# cache only makes a difference for documents where the user may
# want the attributes used on them to change.
#
# In theory documents can be sent inside the albums but they appear
# as different messages (not inside the album), and the logic to set
# the attributes/avoid cache is already written in .send_file().
entity = await self.get_input_entity(entity)
if not utils.is_list_like(caption):
caption = (caption,)
captions = []
for caption in reversed(caption): # Pop from the end (so reverse)
captions.append(await self._parse_message_text(caption or '',
parse_mode))
reply_to = self._get_message_id(reply_to)
# Need to upload the media first, but only if they're not cached yet
media = []
for file in files:
# fh will either be InputPhoto or a modified InputFile
fh = await self.upload_file(file, use_cache=InputPhoto)
if not isinstance(fh, InputPhoto):
input_photo = utils.get_input_photo((await self(UploadMediaRequest(
entity, media=InputMediaUploadedPhoto(fh)
))).photo)
await self.session.cache_file(fh.md5, fh.size, input_photo)
fh = input_photo
if captions:
caption, msg_entities = captions.pop()
else:
caption, msg_entities = '', None
media.append(InputSingleMedia(InputMediaPhoto(fh), message=caption,
entities=msg_entities))
# Now we can construct the multi-media request
result = await self(SendMultiMediaRequest(
entity, reply_to_msg_id=reply_to, multi_media=media
))
return [
self._get_response_message(update.id, result)
for update in result.updates
if isinstance(update, UpdateMessageID)
]
async def upload_file(self, file, part_size_kb=None, file_name=None,
use_cache=None, progress_callback=None):
"""
Uploads the specified file and returns a handle (an instance of
:tl:`InputFile` or :tl:`InputFileBig`, as required) which can be
later used before it expires (they are usable during less than a day).
Uploading a file will simply return a "handle" to the file stored
remotely in the Telegram servers, which can be later used on. This
will **not** upload the file to your own chat or any chat at all.
Args:
file (`str` | `bytes` | `file`):
The path of the file, byte array, or stream that will be sent.
Note that if a byte array or a stream is given, a filename
or its type won't be inferred, and it will be sent as an
"unnamed application/octet-stream".
part_size_kb (`int`, optional):
Chunk size when uploading files. The larger, the less
requests will be made (up to 512KB maximum).
file_name (`str`, optional):
The file name which will be used on the resulting InputFile.
If not specified, the name will be taken from the ``file``
and if this is not a ``str``, it will be ``"unnamed"``.
use_cache (`type`, optional):
The type of cache to use (currently either :tl:`InputDocument`
or :tl:`InputPhoto`). If present and the file is small enough
to need the MD5, it will be checked against the database,
and if a match is found, the upload won't be made. Instead,
an instance of type ``use_cache`` will be returned.
progress_callback (`callable`, optional):
A callback function accepting two parameters:
``(sent bytes, total)``.
Returns:
:tl:`InputFileBig` if the file size is larger than 10MB,
`telethon.tl.custom.input_sized_file.InputSizedFile`
(subclass of :tl:`InputFile`) otherwise.
"""
if isinstance(file, (InputFile, InputFileBig)):
return file # Already uploaded
if isinstance(file, str):
file_size = os.path.getsize(file)
elif isinstance(file, bytes):
file_size = len(file)
else:
file = file.read()
file_size = len(file)
# File will now either be a string or bytes
if not part_size_kb:
part_size_kb = utils.get_appropriated_part_size(file_size)
if part_size_kb > 512:
raise ValueError('The part size must be less or equal to 512KB')
part_size = int(part_size_kb * 1024)
if part_size % 1024 != 0:
raise ValueError(
'The part size must be evenly divisible by 1024')
# Set a default file name if None was specified
file_id = helpers.generate_random_long()
if not file_name:
if isinstance(file, str):
file_name = os.path.basename(file)
else:
file_name = str(file_id)
# Determine whether the file is too big (over 10MB) or not
# Telegram does make a distinction between smaller or larger files
is_large = file_size > 10 * 1024 * 1024
hash_md5 = hashlib.md5()
if not is_large:
# Calculate the MD5 hash before anything else.
# As this needs to be done always for small files,
# might as well do it before anything else and
# check the cache.
if isinstance(file, str):
with open(file, 'rb') as stream:
file = stream.read()
hash_md5.update(file)
if use_cache:
cached = await self.session.get_file(
hash_md5.digest(), file_size, cls=use_cache
)
if cached:
return cached
part_count = (file_size + part_size - 1) // part_size
__log__.info('Uploading file of %d bytes in %d chunks of %d',
file_size, part_count, part_size)
with open(file, 'rb') if isinstance(file, str) else BytesIO(file) \
as stream:
for part_index in range(part_count):
# Read the file by in chunks of size part_size
part = stream.read(part_size)
# The SavePartRequest is different depending on whether
# the file is too large or not (over or less than 10MB)
if is_large:
request = SaveBigFilePartRequest(file_id, part_index,
part_count, part)
else:
request = SaveFilePartRequest(file_id, part_index, part)
result = await self(request)
if result:
__log__.debug('Uploaded %d/%d', part_index + 1,
part_count)
if progress_callback:
progress_callback(stream.tell(), file_size)
else:
raise RuntimeError(
'Failed to upload file part {}.'.format(part_index))
if is_large:
return InputFileBig(file_id, part_count, file_name)
else:
return InputSizedFile(
file_id, part_count, file_name, md5=hash_md5, size=file_size
)
# endregion
# region Downloading media requests
async def download_profile_photo(self, entity, file=None, download_big=True):
"""
Downloads the profile photo of the given entity (user/chat/channel).
Args:
entity (`entity`):
From who the photo will be downloaded.
file (`str` | `file`, optional):
The output file path, directory, or stream-like object.
If the path exists and is a file, it will be overwritten.
download_big (`bool`, optional):
Whether to use the big version of the available photos.
Returns:
``None`` if no photo was provided, or if it was Empty. On success
the file path is returned since it may differ from the one given.
"""
# hex(crc32(x.encode('ascii'))) for x in
# ('User', 'Chat', 'UserFull', 'ChatFull')
ENTITIES = (0x2da17977, 0xc5af5d94, 0x1f4661b9, 0xd49a2697)
# ('InputPeer', 'InputUser', 'InputChannel')
INPUTS = (0xc91c90b6, 0xe669bf46, 0x40f202fd)
if not isinstance(entity, TLObject) or entity.SUBCLASS_OF_ID in INPUTS:
entity = await self.get_entity(entity)
possible_names = []
if entity.SUBCLASS_OF_ID not in ENTITIES:
photo = entity
else:
if not hasattr(entity, 'photo'):
# Special case: may be a ChatFull with photo:Photo
# This is different from a normal UserProfilePhoto and Chat
if not hasattr(entity, 'chat_photo'):
return None
return await self._download_photo(
entity.chat_photo, file, date=None, progress_callback=None)
for attr in ('username', 'first_name', 'title'):
possible_names.append(getattr(entity, attr, None))
photo = entity.photo
if isinstance(photo, (UserProfilePhoto, ChatPhoto)):
loc = photo.photo_big if download_big else photo.photo_small
else:
try:
loc = utils.get_input_location(photo)
except TypeError:
return None
file = self._get_proper_filename(
file, 'profile_photo', '.jpg',
possible_names=possible_names
)
try:
await self.download_file(loc, file)
return file
except LocationInvalidError:
# See issue #500, Android app fails as of v4.6.0 (1155).
# The fix seems to be using the full channel chat photo.
ie = await self.get_input_entity(entity)
if isinstance(ie, InputPeerChannel):
full = await self(GetFullChannelRequest(ie))
return await self._download_photo(
full.full_chat.chat_photo, file,
date=None, progress_callback=None
)
else:
# Until there's a report for chats, no need to.
return None
async def download_media(self, message, file=None, progress_callback=None):
"""
Downloads the given media, or the media from a specified Message.
Note that if the download is too slow, you should consider installing
``cryptg`` (through ``pip install cryptg``) so that decrypting the
received data is done in C instead of Python (much faster).
message (:tl:`Message` | :tl:`Media`):
The media or message containing the media that will be downloaded.
file (`str` | `file`, optional):
The output file path, directory, or stream-like object.
If the path exists and is a file, it will be overwritten.
progress_callback (`callable`, optional):
A callback function accepting two parameters:
``(received bytes, total)``.
Returns:
``None`` if no media was provided, or if it was Empty. On success
the file path is returned since it may differ from the one given.
"""
# TODO This won't work for messageService
if isinstance(message, Message):
date = message.date
media = message.media
else:
date = datetime.now()
media = message
if isinstance(media, MessageMediaWebPage):
if isinstance(media.webpage, WebPage):
media = media.webpage.document or media.webpage.photo
if isinstance(media, (MessageMediaPhoto, Photo,
PhotoSize, PhotoCachedSize)):
return await self._download_photo(
media, file, date, progress_callback
)
elif isinstance(media, (MessageMediaDocument, Document)):
return await self._download_document(
media, file, date, progress_callback
)
elif isinstance(media, MessageMediaContact):
return self._download_contact(
media, file
)
async def _download_photo(self, photo, file, date, progress_callback):
"""Specialized version of .download_media() for photos"""
# Determine the photo and its largest size
if isinstance(photo, MessageMediaPhoto):
photo = photo.photo
if isinstance(photo, Photo):
for size in reversed(photo.sizes):
if not isinstance(size, PhotoSizeEmpty):
photo = size
break
else:
return
if not isinstance(photo, (PhotoSize, PhotoCachedSize)):
return
file = self._get_proper_filename(file, 'photo', '.jpg', date=date)
if isinstance(photo, PhotoCachedSize):
# No need to download anything, simply write the bytes
if isinstance(file, str):
helpers.ensure_parent_dir_exists(file)
f = open(file, 'wb')
else:
f = file
try:
f.write(photo.bytes)
finally:
if isinstance(file, str):
f.close()
return file
await self.download_file(photo.location, file, file_size=photo.size,
progress_callback=progress_callback)
return file
async def _download_document(self, document, file, date, progress_callback):
"""Specialized version of .download_media() for documents."""
if isinstance(document, MessageMediaDocument):
document = document.document
if not isinstance(document, Document):
return
file_size = document.size
kind = 'document'
possible_names = []
for attr in document.attributes:
if isinstance(attr, DocumentAttributeFilename):
possible_names.insert(0, attr.file_name)
elif isinstance(attr, DocumentAttributeAudio):
kind = 'audio'
if attr.performer and attr.title:
possible_names.append('{} - {}'.format(
attr.performer, attr.title
))
elif attr.performer:
possible_names.append(attr.performer)
elif attr.title:
possible_names.append(attr.title)
elif attr.voice:
kind = 'voice'
file = self._get_proper_filename(
file, kind, utils.get_extension(document),
date=date, possible_names=possible_names
)
await self.download_file(document, file, file_size=file_size,
progress_callback=progress_callback)
return file
@staticmethod
def _download_contact(mm_contact, file):
"""Specialized version of .download_media() for contacts.
Will make use of the vCard 4.0 format.
"""
first_name = mm_contact.first_name
last_name = mm_contact.last_name
phone_number = mm_contact.phone_number
if isinstance(file, str):
file = TelegramClient._get_proper_filename(
file, 'contact', '.vcard',
possible_names=[first_name, phone_number, last_name]
)
f = open(file, 'w', encoding='utf-8')
else:
f = file
try:
# Remove these pesky characters
first_name = first_name.replace(';', '')
last_name = (last_name or '').replace(';', '')
f.write('BEGIN:VCARD\n')
f.write('VERSION:4.0\n')
f.write('N:{};{};;;\n'.format(first_name, last_name))
f.write('FN:{} {}\n'.format(first_name, last_name))
f.write('TEL;TYPE=cell;VALUE=uri:tel:+{}\n'.format(phone_number))
f.write('END:VCARD\n')
finally:
# Only close the stream if we opened it
if isinstance(file, str):
f.close()
return file
@staticmethod
def _get_proper_filename(file, kind, extension,
date=None, possible_names=None):
"""Gets a proper filename for 'file', if this is a path.
'kind' should be the kind of the output file (photo, document...)
'extension' should be the extension to be added to the file if
the filename doesn't have any yet
'date' should be when this file was originally sent, if known
'possible_names' should be an ordered list of possible names
If no modification is made to the path, any existing file
will be overwritten.
If any modification is made to the path, this method will
ensure that no existing file will be overwritten.
"""
if file is not None and not isinstance(file, str):
# Probably a stream-like object, we cannot set a filename here
return file
if file is None:
file = ''
elif os.path.isfile(file):
# Make no modifications to valid existing paths
return file
if os.path.isdir(file) or not file:
try:
name = None if possible_names is None else next(
x for x in possible_names if x
)
except StopIteration:
name = None
if not name:
if not date:
date = datetime.now()
name = '{}_{}-{:02}-{:02}_{:02}-{:02}-{:02}'.format(
kind,
date.year, date.month, date.day,
date.hour, date.minute, date.second,
)
file = os.path.join(file, name)
directory, name = os.path.split(file)
name, ext = os.path.splitext(name)
if not ext:
ext = extension
result = os.path.join(directory, name + ext)
if not os.path.isfile(result):
return result
i = 1
while True:
result = os.path.join(directory, '{} ({}){}'.format(name, i, ext))
if not os.path.isfile(result):
return result
i += 1
async def download_file(self, input_location, file=None, part_size_kb=None,
file_size=None, progress_callback=None):
"""
Downloads the given input location to a file.
Args:
input_location (:tl:`FileLocation` | :tl:`InputFileLocation`):
The file location from which the file will be downloaded.
See `telethon.utils.get_input_location` source for a complete
list of supported types.
file (`str` | `file`, optional):
The output file path, directory, or stream-like object.
If the path exists and is a file, it will be overwritten.
If the file path is ``None``, then the result will be
saved in memory and returned as `bytes`.
part_size_kb (`int`, optional):
Chunk size when downloading files. The larger, the less
requests will be made (up to 512KB maximum).
file_size (`int`, optional):
The file size that is about to be downloaded, if known.
Only used if ``progress_callback`` is specified.
progress_callback (`callable`, optional):
A callback function accepting two parameters:
``(downloaded bytes, total)``. Note that the
``total`` is the provided ``file_size``.
"""
if not part_size_kb:
if not file_size:
part_size_kb = 64 # Reasonable default
else:
part_size_kb = utils.get_appropriated_part_size(file_size)
part_size = int(part_size_kb * 1024)
# https://core.telegram.org/api/files says:
# > part_size % 1024 = 0 (divisible by 1KB)
#
# But https://core.telegram.org/cdn (more recent) says:
# > limit must be divisible by 4096 bytes
# So we just stick to the 4096 limit.
if part_size % 4096 != 0:
raise ValueError(
'The part size must be evenly divisible by 4096.')
in_memory = file is None
if in_memory:
f = io.BytesIO()
elif isinstance(file, str):
# Ensure that we'll be able to download the media
helpers.ensure_parent_dir_exists(file)
f = open(file, 'wb')
else:
f = file
# The used client will change if FileMigrateError occurs
client = self
cdn_decrypter = None
input_location = utils.get_input_location(input_location)
__log__.info('Downloading file in chunks of %d bytes', part_size)
try:
offset = 0
while True:
try:
if cdn_decrypter:
result = await cdn_decrypter.get_file()
else:
result = await client(GetFileRequest(
input_location, offset, part_size
))
if isinstance(result, FileCdnRedirect):
__log__.info('File lives in a CDN')
cdn_decrypter, result = \
await CdnDecrypter.prepare_decrypter(
client,
await self._get_cdn_client(result),
result
)
except FileMigrateError as e:
__log__.info('File lives in another DC')
client = await self._get_exported_client(e.new_dc)
continue
offset += part_size
# If we have received no data (0 bytes), the file is over
# So there is nothing left to download and write
if not result.bytes:
# Return some extra information, unless it's a CDN file
if in_memory:
f.flush()
return f.getvalue()
else:
return getattr(result, 'type', '')
f.write(result.bytes)
__log__.debug('Saved %d more bytes', len(result.bytes))
if progress_callback:
progress_callback(f.tell(), file_size)
finally:
if client != self:
client.disconnect()
if cdn_decrypter:
try:
cdn_decrypter.client.disconnect()
except:
pass
if isinstance(file, str) or in_memory:
f.close()
# endregion
# endregion
# region Event handling
def on(self, event):
"""
Decorator helper method around add_event_handler().
Args:
event (`_EventBuilder` | `type`):
The event builder class or instance to be used,
for instance ``events.NewMessage``.
"""
def decorator(f):
if self._loop.is_running():
asyncio.ensure_future(self.add_event_handler(f, event))
else:
self._loop.run_until_complete(self.add_event_handler(f, event))
return f
return decorator
async def _check_events_pending_resolve(self):
if self._events_pending_resolve:
for event in self._events_pending_resolve:
await event.resolve(self)
self._events_pending_resolve.clear()
async def _on_handler(self, update):
for builder, callback in self._event_builders:
event = builder.build(update)
if event:
event._client = self
event.original_update = update
try:
await callback(event)
except events.StopPropagation:
__log__.debug(
"Event handler '{}' stopped chain of "
"propagation for event {}."
.format(callback.__name__, type(event).__name__)
)
break
async def add_event_handler(self, callback, event=None):
"""
Registers the given callback to be called on the specified event.
Args:
callback (`callable`):
The callable function accepting one parameter to be used.
event (`_EventBuilder` | `type`, optional):
The event builder class or instance to be used,
for instance ``events.NewMessage``.
If left unspecified, `telethon.events.raw.Raw` (the
:tl:`Update` objects with no further processing) will
be passed instead.
"""
self.update_handler = self._on_handler
if isinstance(event, type):
event = event()
elif not event:
event = events.Raw()
if self.is_user_authorized():
await event.resolve(self)
await self._check_events_pending_resolve()
else:
self._events_pending_resolve.append(event)
self._event_builders.append((event, callback))
def remove_event_handler(self, callback, event=None):
"""
Inverse operation of :meth:`add_event_handler`.
If no event is given, all events for this callback are removed.
Returns how many callbacks were removed.
"""
found = 0
if event and not isinstance(event, type):
event = type(event)
i = len(self._event_builders)
while i:
i -= 1
ev, cb = self._event_builders[i]
if cb == callback and (not event or isinstance(ev, event)):
del self._event_builders[i]
found += 1
return found
def list_event_handlers(self):
"""
Lists all added event handlers, returning a list of pairs
consisting of (callback, event).
"""
return [(callback, event) for event, callback in self._event_builders]
async def add_update_handler(self, handler):
"""Deprecated, see :meth:`add_event_handler`."""
warnings.warn(
'add_update_handler is deprecated, use the @client.on syntax '
'or add_event_handler(callback, events.Raw) instead (see '
'https://telethon.rtfd.io/en/latest/extra/basic/working-'
'with-updates.html)'
)
return await self.add_event_handler(handler, events.Raw)
def remove_update_handler(self, handler):
return self.remove_event_handler(handler)
def list_update_handlers(self):
return [callback for callback, _ in self.list_event_handlers()]
async def catch_up(self):
state = self.session.get_update_state(0)
if not state or not state.pts:
return
self.session.catching_up = True
try:
while True:
d = await self(GetDifferenceRequest(state.pts, state.date, state.qts))
if isinstance(d, DifferenceEmpty):
state.date = d.date
state.seq = d.seq
break
elif isinstance(d, (DifferenceSlice, Difference)):
if isinstance(d, Difference):
state = d.state
elif d.intermediate_state.pts > state.pts:
state = d.intermediate_state
else:
# TODO Figure out why other applications can rely on
# using always the intermediate_state to eventually
# reach a DifferenceEmpty, but that leads to an
# infinite loop here (so check against old pts to stop)
break
self._updates_handler(Updates(
users=d.users,
chats=d.chats,
date=state.date,
seq=state.seq,
updates=d.other_updates + [UpdateNewMessage(m, 0, 0)
for m in d.new_messages]
))
elif isinstance(d, DifferenceTooLong):
break
finally:
self.session.set_update_state(0, state)
self.session.catching_up = False
# endregion
# region Small utilities to make users' life easier
async def get_entity(self, entity):
"""
Turns the given entity into a valid Telegram user or chat.
entity (`str` | `int` | :tl:`Peer` | :tl:`InputPeer`):
The entity (or iterable of entities) to be transformed.
If it's a string which can be converted to an integer or starts
with '+' it will be resolved as if it were a phone number.
If it doesn't start with '+' or starts with a '@' it will be
be resolved from the username. If no exact match is returned,
an error will be raised.
If the entity is an integer or a Peer, its information will be
returned through a call to self.get_input_peer(entity).
If the entity is neither, and it's not a TLObject, an
error will be raised.
Returns:
:tl:`User`, :tl:`Chat` or :tl:`Channel` corresponding to the
input entity. A list will be returned if more than one was given.
"""
single = not utils.is_list_like(entity)
if single:
entity = (entity,)
# Group input entities by string (resolve username),
# input users (get users), input chat (get chats) and
# input channels (get channels) to get the most entities
# in the less amount of calls possible.
inputs = []
for x in entity:
inputs.append(x if isinstance(x, str)
else await self.get_input_entity(x))
users = [x for x in inputs
if isinstance(x, (InputPeerUser, InputPeerSelf))]
chats = [x.chat_id for x in inputs if isinstance(x, InputPeerChat)]
channels = [x for x in inputs if isinstance(x, InputPeerChannel)]
if users:
# GetUsersRequest has a limit of 200 per call
tmp = []
while users:
curr, users = users[:200], users[200:]
tmp.extend(await self(GetUsersRequest(curr)))
users = tmp
if chats: # TODO Handle chats slice?
chats = (await self(GetChatsRequest(chats))).chats
if channels:
channels = (await self(GetChannelsRequest(channels))).chats
# Merge users, chats and channels into a single dictionary
id_entity = {
utils.get_peer_id(x): x
for x in itertools.chain(users, chats, channels)
}
# We could check saved usernames and put them into the users,
# chats and channels list from before. While this would reduce
# the amount of ResolveUsername calls, it would fail to catch
# username changes.
result = []
for x in inputs:
if isinstance(x, str):
result.append(await self._get_entity_from_string(x))
elif not isinstance(x, InputPeerSelf):
result.append(id_entity[utils.get_peer_id(x)])
else:
result.append(next(u for u in id_entity.values()
if isinstance(u, User) and u.is_self))
return result[0] if single else result
async def _get_entity_from_string(self, string):
"""
Gets a full entity from the given string, which may be a phone or
an username, and processes all the found entities on the session.
The string may also be a user link, or a channel/chat invite link.
This method has the side effect of adding the found users to the
session database, so it can be queried later without API calls,
if this option is enabled on the session.
Returns the found entity, or raises TypeError if not found.
"""
phone = utils.parse_phone(string)
if phone:
for user in (await self(GetContactsRequest(0))).users:
if user.phone == phone:
return user
else:
username, is_join_chat = utils.parse_username(string)
if is_join_chat:
invite = await self(CheckChatInviteRequest(username))
if isinstance(invite, ChatInvite):
raise ValueError(
'Cannot get entity from a channel (or group) '
'that you are not part of. Join the group and retry'
)
elif isinstance(invite, ChatInviteAlready):
return invite.chat
elif username:
if username in ('me', 'self'):
return await self.get_me()
try:
result = await self(ResolveUsernameRequest(username))
except UsernameNotOccupiedError as e:
raise ValueError('No user has "{}" as username'
.format(username)) from e
for entity in itertools.chain(result.users, result.chats):
if getattr(entity, 'username', None) or ''\
.lower() == username:
return entity
try:
# Nobody with this username, maybe it's an exact name/title
return await self.get_entity(
await self.session.get_input_entity(string))
except ValueError:
pass
raise ValueError(
'Cannot find any entity corresponding to "{}"'.format(string)
)
async def get_input_entity(self, peer):
"""
Turns the given peer into its input entity version. Most requests
use this kind of InputUser, InputChat and so on, so this is the
most suitable call to make for those cases.
entity (`str` | `int` | :tl:`Peer` | :tl:`InputPeer`):
The integer ID of an user or otherwise either of a
:tl:`PeerUser`, :tl:`PeerChat` or :tl:`PeerChannel`, for
which to get its ``Input*`` version.
If this ``Peer`` hasn't been seen before by the library, the top
dialogs will be loaded and their entities saved to the session
file (unless this feature was disabled explicitly).
If in the end the access hash required for the peer was not found,
a ValueError will be raised.
Returns:
:tl:`InputPeerUser`, :tl:`InputPeerChat` or :tl:`InputPeerChannel`
or :tl:`InputPeerSelf` if the parameter is ``'me'`` or ``'self'``.
If you need to get the ID of yourself, you should use
`get_me` with ``input_peer=True``) instead.
"""
if peer in ('me', 'self'):
return InputPeerSelf()
try:
# First try to get the entity from cache, otherwise figure it out
return await self.session.get_input_entity(peer)
except ValueError:
pass
if isinstance(peer, str):
return utils.get_input_peer(await self._get_entity_from_string(peer))
if not isinstance(peer, int) and (not isinstance(peer, TLObject)
or peer.SUBCLASS_OF_ID != 0x2d45687):
# Try casting the object into an input peer. Might TypeError.
# Don't do it if a not-found ID was given (instead ValueError).
# Also ignore Peer (0x2d45687 == crc32(b'Peer'))'s, lacking hash.
return utils.get_input_peer(peer)
raise ValueError(
'Could not find the input entity for "{}". Please read https://'
'telethon.readthedocs.io/en/latest/extra/basic/entities.html to'
' find out more details.'
.format(peer)
)
async def edit_2fa(self, current_password=None, new_password=None, hint='',
email=None):
"""
Changes the 2FA settings of the logged in user, according to the
passed parameters. Take note of the parameter explanations.
Has no effect if both current and new password are omitted.
current_password (`str`, optional):
The current password, to authorize changing to ``new_password``.
Must be set if changing existing 2FA settings.
Must **not** be set if 2FA is currently disabled.
Passing this by itself will remove 2FA (if correct).
new_password (`str`, optional):
The password to set as 2FA.
If 2FA was already enabled, ``current_password`` **must** be set.
Leaving this blank or ``None`` will remove the password.
hint (`str`, optional):
Hint to be displayed by Telegram when it asks for 2FA.
Leaving unspecified is highly discouraged.
Has no effect if ``new_password`` is not set.
email (`str`, optional):
Recovery and verification email. Raises ``EmailUnconfirmedError``
if value differs from current one, and has no effect if
``new_password`` is not set.
Returns:
``True`` if successful, ``False`` otherwise.
"""
if new_password is None and current_password is None:
return False
pass_result = await self(GetPasswordRequest())
if isinstance(pass_result, NoPassword) and current_password:
current_password = None
salt_random = os.urandom(8)
salt = pass_result.new_salt + salt_random
if not current_password:
current_password_hash = salt
else:
current_password = pass_result.current_salt +\
current_password.encode() + pass_result.current_salt
current_password_hash = hashlib.sha256(current_password).digest()
if new_password: # Setting new password
new_password = salt + new_password.encode('utf-8') + salt
new_password_hash = hashlib.sha256(new_password).digest()
new_settings = PasswordInputSettings(
new_salt=salt,
new_password_hash=new_password_hash,
hint=hint
)
if email: # If enabling 2FA or changing email
new_settings.email = email # TG counts empty string as None
return await self(UpdatePasswordSettingsRequest(
current_password_hash, new_settings=new_settings
))
else: # Removing existing password
return await self(UpdatePasswordSettingsRequest(
current_password_hash,
new_settings=PasswordInputSettings(
new_salt=bytes(),
new_password_hash=bytes(),
hint=hint
)
))
# endregion
| 39.659341
| 89
| 0.560418
|
205bc91dab311149173a0d90819b22ef604f45bc
| 3,500
|
py
|
Python
|
tempest/api/object_storage/test_container_acl.py
|
afaheem88/tempest_neutron
|
20276dbee68087e576f4977633380fc6cd3fc1cd
|
[
"Apache-2.0"
] | null | null | null |
tempest/api/object_storage/test_container_acl.py
|
afaheem88/tempest_neutron
|
20276dbee68087e576f4977633380fc6cd3fc1cd
|
[
"Apache-2.0"
] | null | null | null |
tempest/api/object_storage/test_container_acl.py
|
afaheem88/tempest_neutron
|
20276dbee68087e576f4977633380fc6cd3fc1cd
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.object_storage import base
from tempest import clients
from tempest.common.utils import data_utils
from tempest import test
class ObjectTestACLs(base.BaseObjectTest):
@classmethod
def resource_setup(cls):
super(ObjectTestACLs, cls).resource_setup()
cls.data.setup_test_user()
test_os = clients.Manager(cls.data.test_credentials)
cls.test_auth_data = test_os.auth_provider.auth_data
def setUp(self):
super(ObjectTestACLs, self).setUp()
self.container_name = data_utils.rand_name(name='TestContainer')
self.container_client.create_container(self.container_name)
def tearDown(self):
self.delete_containers([self.container_name])
super(ObjectTestACLs, self).tearDown()
@test.attr(type='smoke')
def test_read_object_with_rights(self):
# attempt to read object using authorized user
# update X-Container-Read metadata ACL
cont_headers = {'X-Container-Read':
self.data.test_tenant + ':' + self.data.test_user}
resp_meta, body = self.container_client.update_container_metadata(
self.container_name, metadata=cont_headers,
metadata_prefix='')
self.assertHeaders(resp_meta, 'Container', 'POST')
# create object
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(self.container_name,
object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# Trying to read the object with rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
resp, _ = self.object_client.get_object(
self.container_name, object_name)
self.assertHeaders(resp, 'Object', 'GET')
@test.attr(type='smoke')
def test_write_object_with_rights(self):
# attempt to write object using authorized user
# update X-Container-Write metadata ACL
cont_headers = {'X-Container-Write':
self.data.test_tenant + ':' + self.data.test_user}
resp_meta, body = self.container_client.update_container_metadata(
self.container_name, metadata=cont_headers,
metadata_prefix='')
self.assertHeaders(resp_meta, 'Container', 'POST')
# Trying to write the object with rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(
self.container_name,
object_name, 'data', headers={})
self.assertHeaders(resp, 'Object', 'PUT')
| 42.168675
| 78
| 0.668571
|
b11cb699166972e5702315de3e7c144262951e55
| 1,406
|
py
|
Python
|
cogdl/wrappers/data_wrapper/graph_classification/patchy_san_dw.py
|
li-ziang/cogdl
|
60022d3334e3abae2d2a505e6e049a26acf10f39
|
[
"MIT"
] | 6
|
2020-07-09T02:48:41.000Z
|
2021-06-16T09:04:14.000Z
|
cogdl/wrappers/data_wrapper/graph_classification/patchy_san_dw.py
|
li-ziang/cogdl
|
60022d3334e3abae2d2a505e6e049a26acf10f39
|
[
"MIT"
] | null | null | null |
cogdl/wrappers/data_wrapper/graph_classification/patchy_san_dw.py
|
li-ziang/cogdl
|
60022d3334e3abae2d2a505e6e049a26acf10f39
|
[
"MIT"
] | 1
|
2020-05-19T11:45:45.000Z
|
2020-05-19T11:45:45.000Z
|
import torch
from .graph_classification_dw import GraphClassificationDataWrapper
from cogdl.models.nn.patchy_san import get_single_feature
class PATCHY_SAN_DataWrapper(GraphClassificationDataWrapper):
@staticmethod
def add_args(parser):
GraphClassificationDataWrapper.add_args(parser)
parser.add_argument("--num-sample", default=30, type=int, help="Number of chosen vertexes")
parser.add_argument("--num-neighbor", default=10, type=int, help="Number of neighbor in constructing features")
parser.add_argument("--stride", default=1, type=int, help="Stride of chosen vertexes")
def __init__(self, dataset, num_sample, num_neighbor, stride, *args, **kwargs):
super(PATCHY_SAN_DataWrapper, self).__init__(dataset, *args, **kwargs)
self.sample = num_sample
self.dataset = dataset
self.neighbor = num_neighbor
self.stride = stride
def pre_transform(self):
dataset = self.dataset
num_features = dataset.num_features
num_classes = dataset.num_classes
for i, data in enumerate(dataset):
new_feature = get_single_feature(
dataset[i], num_features, num_classes, self.sample, self.neighbor, self.stride
)
dataset[i].x = torch.from_numpy(new_feature)
self.dataset = dataset
super(PATCHY_SAN_DataWrapper, self).pre_transform()
| 42.606061
| 119
| 0.699147
|
6df51b54b0413191306c4392bb2186dd4add5d9d
| 7,425
|
py
|
Python
|
vietnam_thor/app.py
|
vishalbelsare/teaching
|
dc1f7d6b259ff13ea4747e12ece0b2c66982532b
|
[
"MIT"
] | 26
|
2019-07-24T16:54:19.000Z
|
2022-03-24T13:32:51.000Z
|
vietnam_thor/app.py
|
vishalbelsare/teaching
|
dc1f7d6b259ff13ea4747e12ece0b2c66982532b
|
[
"MIT"
] | 1
|
2020-07-28T22:52:23.000Z
|
2020-07-28T22:52:23.000Z
|
vietnam_thor/app.py
|
vishalbelsare/teaching
|
dc1f7d6b259ff13ea4747e12ece0b2c66982532b
|
[
"MIT"
] | 26
|
2020-04-16T14:57:16.000Z
|
2021-11-23T00:24:12.000Z
|
"""
Web app with Panel for THOR data.
Created May 2021
Copyright (C) Damien Farrell
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import os, io, random
import string
import numpy as np
import pandas as pd
import pylab as plt
import seaborn as sns
from collections import OrderedDict
import datetime as dt
import geopandas as gpd
from bokeh.layouts import column
from bokeh.models import ColumnDataSource, Slider, CustomJS, DatePicker
from bokeh.plotting import figure
from bokeh.themes import Theme
from bokeh.io import show, output_notebook
from bokeh.models import (DataTable, GeoJSONDataSource, ColumnDataSource, HoverTool, renderers,
Label, LabelSet, CustomJS, MultiSelect, Dropdown, Div)
from bokeh.tile_providers import CARTODBPOSITRON, get_provider
import panel as pn
import panel.widgets as pnw
pn.extension()
def wgs84_to_web_mercator(df, lon="LON", lat="LAT"):
"""convert mat long to web mercartor"""
k = 6378137
df.loc[:,"x"] = df[lon] * (k * np.pi/180.0)
df.loc[:,"y"] = np.log(np.tan((90 + df[lat]) * np.pi/360.0)) * k
return df
s=pd.read_csv('thor_data_vietnam_small.csv', low_memory=False,index_col=0)
s['MSNDATE'] = pd.to_datetime(s.MSNDATE, format='%Y/%m/%d',errors='coerce')
s['YEAR'] = s.MSNDATE.dt.year.fillna(0).astype(int)
s=s[s.YEAR>0]
s = wgs84_to_web_mercator(s, lon="TGTLONDDD_DDD_WGS84", lat="TGTLATDD_DDD_WGS84")
x = s[~s.TGTLATDD_DDD_WGS84.isnull()].copy()
x = x[~x.TGTCOUNTRY.isin(['PHILLIPINES','UNKNOWN','WESTPAC WATERS'])]
colormap={'NORTH VIETNAM':'brown','SOUTH VIETNAM':'orange','LAOS':'red',
'CAMBODIA':'green','THAILAND':'blue','UNKNOWN':'gray'}
providers = ['CARTODBPOSITRON','STAMEN_TERRAIN','OSM','ESRI_IMAGERY']
cats = ['TGTCOUNTRY','WEAPONTYPE','MFUNC_DESC']
def draw_map(df=None, long=None, lat=None, height=500, colorby='TGTCOUNTRY',
point_size=5,
tile_provider='CARTODBPOSITRON'):
tile_provider = get_provider(tile_provider)
tools = "pan,wheel_zoom,box_zoom,hover,tap,lasso_select,reset,save"
sizing_mode='stretch_both'
# range bounds supplied in web mercator coordinates
k = 6378137
pad = 700000
if lat == None:
lat = 16
if long == None:
long = 108
x = long * (k * np.pi/180.0)
y = np.log(np.tan((90 + lat) * np.pi/360.0)) * k
p = figure(x_range=(x-pad, x+pad), y_range=(y-pad, y+pad),
x_axis_type="mercator", y_axis_type="mercator", tools=tools,
plot_width=height, plot_height=height, sizing_mode=sizing_mode)
p.add_tile(tile_provider)
if df is None:
return
df.loc[:,'color'] = [colormap[i] if i in colormap else 'gray' for i in df[colorby]]
#df['size'] = 10
source = ColumnDataSource(df)
p.circle(x='x', y='y', size=point_size, alpha=0.7, color='color', source=source)#, legend_group=colorby)
p.toolbar.logo = None
p.title.text = "date"
hover = p.select(dict(type=HoverTool))
hover.tooltips = OrderedDict([
("TGTCOUNTRY", "@TGTCOUNTRY"),
("MSNDATE", "@MSNDATE{%F}"),
("TAKEOFFLOCATION", "@TAKEOFFLOCATION"),
("WEAPONTYPE", "@WEAPONTYPE"),
("MFUNC_DESC", "@MFUNC_DESC")
])
hover.formatters={'@MSNDATE': 'datetime'}
return p
def dashboard():
cols = list(x.columns)
colorby='TGTCOUNTRY'
map_pane=pn.pane.Bokeh(width=700)
df_pane = pn.pane.DataFrame(width=600,height=600)
date_picker = pnw.DatePicker(name='Pick Date',width=200)
from datetime import date
date_picker.value=date(1965, 1, 1)
date_slider = pnw.DateSlider(name='Date', start=dt.datetime(1965, 1, 1),
end=dt.datetime(1973, 10, 31), value=dt.datetime(1968, 1, 1))
tile_select = pnw.Select(name='tile layer',options=providers,width=200)
filterby_select = pnw.Select(name='filter by',value='',options=['']+cols[1:4],width=200)
value_select = pnw.Select(name='value',value='',options=[],width=200)
find_btn = pnw.Button(name='find in region',button_type='primary',width=200)
def update_tile(event=None):
p = map_pane.object
p.renderers = [x for x in p.renderers if not str(x).startswith('TileRenderer')]
rend = renderers.TileRenderer(tile_source= get_provider(tile_select.value))
p.renderers.insert(0, rend)
def update_filter(event):
col=filterby_select.value
if col=='':
value_select.options = []
else:
value_select.options = sorted(list(x[col].dropna().unique()))
def find_in_region(event):
#get points in selected map area
p = map_pane.object
source = p.renderers[1].data_source
d = x[(x.x>p.x_range.start) & (x.x<p.x_range.end) & (x.y>p.y_range.start) & (x.y<p.y_range.end)]
#add any filter
d = do_filter(d)
if len(d)==0:
return
elif len(d)>25000:
p.title.text = 'too many points!'
else:
d.loc[:,'color'] = [colormap[i] if i in colormap else 'gray' for i in d[colorby]]
source.data = dict(d)
p.title.text = 'selected %s points' %len(d)
map_pane.param.trigger('object')
return
def do_filter(d):
col = filterby_select.value
val = value_select.value
if col != '':
d = d[d[col]==val]
return d
def update_date(event):
date_slider.value = date_picker.value
def update_map(event=None, date=None):
p = map_pane.object
source = p.renderers[1].data_source
if date == None:
date = str(date_slider.value)
d = x[x.MSNDATE==date]
d = do_filter(d)
if len(d)==0:
return
d.loc[:,'color'] = [colormap[i] if i in colormap else 'gray' for i in d[colorby]]
source.data = dict(d)
p.title.text = date
sdate='1968-01-01'
d = x[x.MSNDATE==sdate]
map_pane.object=draw_map(d)
date_slider.param.watch(update_map,'value')
date_picker.param.watch(update_date,'value')
tile_select.param.watch(update_tile,'value')
filterby_select.param.watch(update_filter,'value')
value_select.param.watch(update_map,'value')
find_btn.on_click(find_in_region)
dashboard = pn.Column(date_slider,pn.Row(pn.Column(date_picker,tile_select,filterby_select,value_select,find_btn),map_pane))
return dashboard
app=dashboard()
bootstrap = pn.template.BootstrapTemplate(title='THOR SE ASIA data view',
header_color='blue') #favicon='static/logo.png'
pn.config.sizing_mode = 'stretch_width'
app=dashboard()
bootstrap.main.append(app)
bootstrap.servable()
| 37.690355
| 128
| 0.646465
|
5d4bdac389bb1f270d74efb6c876258d46077110
| 79
|
py
|
Python
|
torch/utils/model_dump/__main__.py
|
Hacky-DH/pytorch
|
80dc4be615854570aa39a7e36495897d8a040ecc
|
[
"Intel"
] | 60,067
|
2017-01-18T17:21:31.000Z
|
2022-03-31T21:37:45.000Z
|
torch/utils/model_dump/__main__.py
|
Hacky-DH/pytorch
|
80dc4be615854570aa39a7e36495897d8a040ecc
|
[
"Intel"
] | 66,955
|
2017-01-18T17:21:38.000Z
|
2022-03-31T23:56:11.000Z
|
torch/utils/model_dump/__main__.py
|
Hacky-DH/pytorch
|
80dc4be615854570aa39a7e36495897d8a040ecc
|
[
"Intel"
] | 19,210
|
2017-01-18T17:45:04.000Z
|
2022-03-31T23:51:56.000Z
|
#!/usr/bin/env python3
import sys
from . import main
sys.exit(main(sys.argv))
| 13.166667
| 24
| 0.721519
|
10e1dd9832c0c616596400be1c0d10e07f0034e9
| 2,535
|
py
|
Python
|
django_tricks/utils/decorators.py
|
mariocesar/django-tricks
|
4653d663bc489aa8b30375b41b6adb0f73720b74
|
[
"0BSD"
] | 2
|
2017-07-31T19:52:34.000Z
|
2021-07-26T02:45:23.000Z
|
django_tricks/utils/decorators.py
|
mariocesar/django-tricks
|
4653d663bc489aa8b30375b41b6adb0f73720b74
|
[
"0BSD"
] | 1
|
2021-03-25T21:43:16.000Z
|
2021-03-25T21:43:16.000Z
|
django_tricks/utils/decorators.py
|
mariocesar/django-tricks
|
4653d663bc489aa8b30375b41b6adb0f73720b74
|
[
"0BSD"
] | null | null | null |
import sys
from django.core.cache import cache
from django.utils.six import wraps
class ServiceReturn:
def __init__(self, name, ret_value, err=None, exc_info=None):
self.name = name
self.ret_value = ret_value
self.exc_info = exc_info
self.err = err
def __repr__(self):
return '<ServiceReturn %s [%s]>' % (self.name, 'success' if self.successful else 'failed')
def __nonzero__(self):
return self.successful
def __bool__(self):
return self.successful
def __iter__(self):
return iter(self.ret_value)
def __getattr__(self, item):
return getattr(self.ret_value, item)
@property
def successful(self):
return self.err is None
@property
def failed(self):
return self.exc_info is not None
def raise_for_status(self):
if self.exc_info:
raise self.err
def service(func):
"""wrap functions'return value with ServiceReturn, catching exceptions and storing
the return value and successful status."""
name = func.__qualname__
@wraps(func)
def inner(*args, **kwargs) -> ServiceReturn:
# Do argument validation if annotations are available
annotations = func.__annotations__
if annotations:
for argname, argtype in annotations.items():
if argtype in kwargs and type(kwargs[argname]) is not argtype:
raise ValueError('"%s" argument has the wrong type.'
'Expected %s, found %s' % (name, argtype, type(kwargs[argname])))
try:
ret = func(*args, **kwargs)
except Exception as err:
exc_info = sys.exc_info()
return ServiceReturn(name, ret_value=None, err=err, exc_info=exc_info)
return ServiceReturn(name, ret_value=ret)
return inner
def cacheresult(func, prefix=None, keyname=None):
"""Saves up in the cache the function's return value each time it is called.
Uses the name of the method and their arguments to build the cache key name."""
keyname = '%s%s' % (prefix, keyname or func.__qualname__)
@wraps(func)
def inner(this, *args, **kwargs):
if args or kwargs:
cachekey = '%s%s%s' % (keyname, hash(args), hash(kwargs))
else:
cachekey = keyname
res = cache.get(cachekey)
if res is None:
res = func(this, *args, **kwargs)
cache.set(cachekey, res)
return res
return inner
| 27.258065
| 102
| 0.61499
|
45f1ececed2f95831172490cdfaeef9c086369c0
| 9,630
|
py
|
Python
|
runway/cfngin/lookups/handlers/file.py
|
avosper-intellaegis/runway
|
757d4e7db269ec16479b044ac82a69f25fa2a450
|
[
"Apache-2.0"
] | null | null | null |
runway/cfngin/lookups/handlers/file.py
|
avosper-intellaegis/runway
|
757d4e7db269ec16479b044ac82a69f25fa2a450
|
[
"Apache-2.0"
] | null | null | null |
runway/cfngin/lookups/handlers/file.py
|
avosper-intellaegis/runway
|
757d4e7db269ec16479b044ac82a69f25fa2a450
|
[
"Apache-2.0"
] | null | null | null |
"""File lookup."""
# pylint: disable=arguments-differ
# pyright: reportIncompatibleMethodOverride=none
from __future__ import annotations
import base64
import collections.abc
import json
import re
from typing import Any, Callable, Dict, List, Mapping, Sequence, Union, overload
import yaml
from troposphere import Base64, GenericHelperFn
from typing_extensions import Literal
from ....lookups.handlers.base import LookupHandler
from ...utils import read_value_from_path
TYPE_NAME = "file"
_PARAMETER_PATTERN = re.compile(r"{{([::|\w]+)}}")
ParameterizedObjectTypeDef = Union[bytes, str, Mapping[str, Any], Sequence[Any], Any]
ParameterizedObjectReturnTypeDef = Union[
Dict[str, "ParameterizedObjectReturnTypeDef"],
GenericHelperFn,
List["ParameterizedObjectReturnTypeDef"],
]
class FileLookup(LookupHandler):
"""File lookup."""
@classmethod
def handle(cls, value: str, **_: Any) -> Any:
r"""Translate a filename into the file contents.
Args:
value: Parameter(s) given to this lookup.
Fields should use the following format::
<codec>:<path>
Example::
# We've written a file to /some/path:
$ echo "hello there" > /some/path
# With CFNgin we would reference the contents of this file with the
# following
conf_key: ${file plain:file://some/path}
# The above would resolve to
conf_key: hello there
# Or, if we used wanted a base64 encoded copy of the file data
conf_key: ${file base64:file://some/path}
# The above would resolve to
conf_key: aGVsbG8gdGhlcmUK
Supported codecs:
**plain**
Plain Text
**base64**
Encode the plain text file at the given path with base64 prior to
returning it.
**parameterized**
The same as plain, but additionally supports referencing template
parameters to create userdata that's supplemented with information
from the template, as is commonly needed in EC2 UserData.
For example, given a template parameter of BucketName, the file
could contain the following text::
#!/bin/sh
aws s3 sync s3://{{BucketName}}/somepath /somepath
Then you could use something like this in the YAML config
file::
UserData: ${file parameterized:/path/to/file}
Resulting in the UserData parameter being defined as::
{ "Fn::Join" : ["", [
"#!/bin/sh\\naws s3 sync s3://",
{"Ref" : "BucketName"},
"/somepath /somepath"
]] }
**parameterized-b64**
The same as parameterized, with the results additionally wrapped
in ``{ "Fn::Base64": ... }`` , which is what you actually need
for EC2 UserData.
When using parameterized-b64 for UserData, you should use a
variable defined as such:
.. code-block:: python
from troposphere import AWSHelperFn
"UserData": {
"type": AWSHelperFn,
"description": "Instance user data",
"default": Ref("AWS::NoValue")
}
Then assign UserData in a LaunchConfiguration or Instance to
``self.variables["UserData"]``. Note that we use AWSHelperFn
as the type because the parameterized-b64 codec returns either a
Base64 or a GenericHelperFn troposphere object.
**json**
Decode the file as JSON and return the resulting object
**json-parameterized**
Same as ``json``, but applying templating rules from
``parameterized`` to every object *value*. Note that
object *keys* are not modified. Example (an external
PolicyDocument)::
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"some:Action"
],
"Resource": "{{MyResource}}"
}
]
}
**yaml**
Decode the file as YAML and return the resulting object.
All strings are returned as ``unicode`` even in Python 2.
**yaml-parameterized**
Same as ``json-parameterized``, but using YAML. Example::
Version: 2012-10-17
Statement:
- Effect: Allow
Action:
- "some:Action"
Resource: "{{MyResource}}"
"""
try:
codec, path = value.split(":", 1)
except ValueError:
raise TypeError(
'File value must be of the format "<codec>:<path>" (got %s)' % (value)
) from None
return CODECS[codec](read_value_from_path(path))
def _parameterize_string(raw: str) -> GenericHelperFn:
"""Substitute placeholders in a string using CloudFormation references.
Args:
raw: String to be processed. Byte strings are not supported; decode them
before passing them to this function.
Returns:
An expression with placeholders from the input replaced, suitable to be
passed to Troposphere to be included in CloudFormation template.
This will be the input string without modification if no substitutions
are found, and a composition of CloudFormation calls otherwise.
"""
parts: List[Any] = []
s_index = 0
for match in _PARAMETER_PATTERN.finditer(raw):
parts.append(raw[s_index : match.start()])
parts.append({"Ref": match.group(1)})
s_index = match.end()
if not parts:
return GenericHelperFn(raw)
parts.append(raw[s_index:])
return GenericHelperFn({"Fn::Join": ["", parts]})
@overload
def parameterized_codec(
raw: Union[bytes, str], b64: Literal[False] = ...
) -> GenericHelperFn:
...
@overload
def parameterized_codec(raw: Union[bytes, str], b64: Literal[True] = ...) -> Base64:
...
def parameterized_codec(raw: Union[bytes, str], b64: bool = False) -> Any:
"""Parameterize a string, possibly encoding it as Base64 afterwards.
Args:
raw: String to be processed. Byte strings will be interpreted as UTF-8.
b64: Whether to wrap the output in a Base64 CloudFormation call.
Returns:
:class:`troposphere.AWSHelperFn`: Output to be included in a
CloudFormation template.
"""
if isinstance(raw, bytes):
raw = raw.decode("utf-8")
result = _parameterize_string(raw)
# Note, since we want a raw JSON object (not a string) output in the
# template, we wrap the result in GenericHelperFn (not needed if we're
# using Base64)
return Base64(result.data) if b64 else result
@overload
def _parameterize_obj(obj: Union[bytes, str]) -> GenericHelperFn:
...
@overload
def _parameterize_obj(obj: Mapping[str, Any]) -> ParameterizedObjectReturnTypeDef:
...
@overload
def _parameterize_obj(obj: List[Any]) -> ParameterizedObjectReturnTypeDef:
...
def _parameterize_obj(
obj: ParameterizedObjectTypeDef,
) -> ParameterizedObjectReturnTypeDef:
"""Recursively parameterize all strings contained in an object.
Parametrizes all values of a Mapping, all items of a Sequence, an
unicode string, or pass other objects through unmodified.
Byte strings will be interpreted as UTF-8.
Args:
obj: Data to parameterize.
Return:
A parameterized object to be included in a CloudFormation template.
Mappings are converted to `dict`, Sequences are converted to `list`,
and strings possibly replaced by compositions of function calls.
"""
if isinstance(obj, bytes):
return _parameterize_string(obj.decode("utf8"))
if isinstance(obj, str):
return _parameterize_string(obj)
if isinstance(obj, collections.abc.Mapping):
return {key: _parameterize_obj(value) for key, value in obj.items()}
if isinstance(obj, collections.abc.Sequence):
return [_parameterize_obj(item) for item in obj]
return obj
class SafeUnicodeLoader(yaml.SafeLoader):
"""Safe unicode loader."""
def construct_yaml_str(self, node: Any) -> Any:
"""Construct yaml str."""
return self.construct_scalar(node)
def yaml_codec(raw: str, parameterized: bool = False) -> Any:
"""YAML codec."""
data = yaml.load(raw, Loader=SafeUnicodeLoader)
return _parameterize_obj(data) if parameterized else data
def json_codec(raw: str, parameterized: bool = False) -> Any:
"""JSON codec."""
data = json.loads(raw)
return _parameterize_obj(data) if parameterized else data
CODECS: Dict[str, Callable[..., Any]] = {
"plain": lambda x: x,
"base64": lambda x: base64.b64encode(x.encode("utf8")).decode("utf-8"),
"parameterized": lambda x: parameterized_codec(x, False),
"parameterized-b64": lambda x: parameterized_codec(x, True),
"yaml": lambda x: yaml_codec(x, parameterized=False),
"yaml-parameterized": lambda x: yaml_codec(x, parameterized=True),
"json": lambda x: json_codec(x, parameterized=False),
"json-parameterized": lambda x: json_codec(x, parameterized=True),
}
| 31.782178
| 86
| 0.607269
|
99723e54d9d16e1e81a0e906336f7675d8545697
| 2,237
|
py
|
Python
|
tests/unit/ui/test_utils.py
|
varuntiwari27/rally
|
948fba0e8fe8214dd3716451d2a52e014a4115be
|
[
"Apache-2.0"
] | 1
|
2021-09-29T02:16:09.000Z
|
2021-09-29T02:16:09.000Z
|
tests/unit/ui/test_utils.py
|
noah8713/rally-ovs
|
2434787c2cf4ca267108966c4ddc55ded3c333d9
|
[
"Apache-2.0"
] | 1
|
2020-07-14T11:29:31.000Z
|
2020-07-14T11:29:31.000Z
|
tests/unit/ui/test_utils.py
|
noah8713/rally-ovs
|
2434787c2cf4ca267108966c4ddc55ded3c333d9
|
[
"Apache-2.0"
] | 1
|
2020-07-02T01:33:48.000Z
|
2020-07-02T01:33:48.000Z
|
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import jinja2
import mock
from rally.ui import utils
from tests.unit import test
class ModuleTestCase(test.TestCase):
def test_get_mako_template(self):
try:
import mako
except ImportError:
self.skip("No mako module. Skipping test.")
template = utils.get_mako_template("ci/index.mako")
self.assertIsInstance(template, mako.template.Template)
def test_get_jinja_template(self):
template = utils.get_jinja_template("base.html")
self.assertIsInstance(template,
jinja2.environment.Template)
self.assertEqual("base.html", template.name)
self.assertIn("include_raw_file", template.globals)
def test_get_jinja_template_raises(self):
self.assertRaises(jinja2.exceptions.TemplateNotFound,
utils.get_jinja_template, "nonexistent")
@mock.patch("rally.ui.utils.get_mako_template")
def test_get_template_mako(self, mock_get_mako_template):
mock_get_mako_template.return_value = "fake_template"
template = utils.get_template("template.mako")
self.assertEqual("fake_template", template)
mock_get_mako_template.assert_called_once_with("template.mako")
@mock.patch("rally.ui.utils.get_jinja_template")
def test_get_template_jinja(self, mock_get_jinja_template):
mock_get_jinja_template.return_value = "fake_template"
template = utils.get_template("template.html")
self.assertEqual("fake_template", template)
mock_get_jinja_template.assert_called_once_with("template.html")
| 39.245614
| 78
| 0.71122
|
f74d3cd803994be5b0ec6f093a396dab53fd0640
| 2,594
|
py
|
Python
|
f_d_c.py
|
ashu2004a/face_recognition
|
a0585d81daa7f0588e6269df8e26fdf7f18c27be
|
[
"CC0-1.0"
] | 1
|
2021-04-25T14:48:23.000Z
|
2021-04-25T14:48:23.000Z
|
f_d_c.py
|
ashu2004a/face_recognition
|
a0585d81daa7f0588e6269df8e26fdf7f18c27be
|
[
"CC0-1.0"
] | null | null | null |
f_d_c.py
|
ashu2004a/face_recognition
|
a0585d81daa7f0588e6269df8e26fdf7f18c27be
|
[
"CC0-1.0"
] | null | null | null |
'''
Author: Ashutosh Panigrahi
Year: 2021
Version: 0.0.1
'''
#This piece of code detect the face (image in png/jpg or else given) given.
import __future__
import click
import os
import re
import face_recognition.api as fcrec
import multiprocessing
import sys
import itertools
def print_result(filename, location):
top, right, bottom, left = location
print("{},{},{},{},{}".format(filename, top, right, bottom, left))
def test_image(image_to_check, model, upsample):
unknown_image = fcrec.load_image_file(image_to_check)
face_locations = fcrec.face_locations(unknown_image, number_of_times_to_upsample=upsample, model=model)
for face_location in face_locations:
print_result(image_to_check, face_location)
def image_files_in_folder(folder):
return [os.path.join(folder, f) for f in os.listdir(folder) if re.match(r'.*\.(jpg|jpeg|png)', f, flags=re.I)]
def process_images_in_process_pool(images_to_check, number_of_cpus, model, upsample):
if number_of_cpus == -1:
processes = None
else:
processes = number_of_cpus
context = multiprocessing
if "forkserver" in multiprocessing.get_all_start_methods():
context = multiprocessing.get_context("forkserver")
pool = context.Pool(processes=processes)
function_parameters = zip(
images_to_check,
itertools.repeat(model),
itertools.repeat(upsample),
)
pool.starmap(test_image, function_parameters)
@click.command()
@click.argument('image_to_check')
@click.option('--cpus', default=1, help='number of CPU cores to use in parallel. -1 means "use all in system"')
@click.option('--model', default="hog", help='Which face detection model to use. Options are "hog" or "cnn".')
@click.option('--upsample', default=0, help='How many times to upsample the image looking for faces. Higher numbers find smaller faces.')
def main(image_to_check, cpus, model, upsample):
# Multi-core processing only supported on Python 3.4 or greater
if (sys.version_info < (3, 4)) and cpus != 1:
click.echo("WARNING: Multi-processing support requires Python 3.4 or greater. Falling back to single-threaded processing!")
cpus = 1
if os.path.isdir(image_to_check):
if cpus == 1:
[test_image(image_file, model, upsample) for image_file in image_files_in_folder(image_to_check)]
else:
process_images_in_process_pool(image_files_in_folder(image_to_check), cpus, model, upsample)
else:
test_image(image_to_check, model, upsample)
if __name__ == "__main__":
main()
| 32.835443
| 137
| 0.712799
|
a6abdd5461a47150ac3bec97bfdc07a72023f47d
| 3,886
|
py
|
Python
|
tests/test_observable/test_takewithtime.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
tests/test_observable/test_takewithtime.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
tests/test_observable/test_takewithtime.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
import unittest
from reactivex import operators as ops
from reactivex.testing import ReactiveTest, TestScheduler
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestTakeWithTime(unittest.TestCase):
def test_take_zero(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1), on_next(220, 2), on_completed(230)
)
def create():
return xs.pipe(ops.take_with_time(0))
res = scheduler.start(create)
assert res.messages == [on_completed(200)]
assert xs.subscriptions == [subscribe(200, 200)]
def test_take_some(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1), on_next(220, 2), on_next(230, 3), on_completed(240)
)
def create():
return xs.pipe(ops.take_with_time(25))
res = scheduler.start(create)
assert res.messages == [on_next(210, 1), on_next(220, 2), on_completed(225)]
assert xs.subscriptions == [subscribe(200, 225)]
def test_take_late(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1), on_next(220, 2), on_completed(230)
)
def create():
return xs.pipe(ops.take_with_time(50))
res = scheduler.start(create)
assert res.messages == [on_next(210, 1), on_next(220, 2), on_completed(230)]
assert xs.subscriptions == [subscribe(200, 230)]
def test_take_Error(self):
scheduler = TestScheduler()
ex = "ex"
xs = scheduler.create_hot_observable(on_error(210, ex))
def create():
return xs.pipe(ops.take_with_time(50))
res = scheduler.start(create)
assert res.messages == [on_error(210, ex)]
assert xs.subscriptions == [subscribe(200, 210)]
def test_take_never(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable()
def create():
return xs.pipe(ops.take_with_time(50))
res = scheduler.start(create)
assert res.messages == [on_completed(250)]
assert xs.subscriptions == [subscribe(200, 250)]
def test_take_twice1(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1),
on_next(220, 2),
on_next(230, 3),
on_next(240, 4),
on_next(250, 5),
on_next(260, 6),
on_completed(270),
)
def create():
return xs.pipe(
ops.take_with_time(55),
ops.take_with_time(35),
)
res = scheduler.start(create)
assert res.messages == [
on_next(210, 1),
on_next(220, 2),
on_next(230, 3),
on_completed(235),
]
assert xs.subscriptions == [subscribe(200, 235)]
def test_take_twice2(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1),
on_next(220, 2),
on_next(230, 3),
on_next(240, 4),
on_next(250, 5),
on_next(260, 6),
on_completed(270),
)
def create():
return xs.pipe(
ops.take_with_time(35),
ops.take_with_time(55),
)
res = scheduler.start(create)
assert res.messages == [
on_next(210, 1),
on_next(220, 2),
on_next(230, 3),
on_completed(235),
]
assert xs.subscriptions == [subscribe(200, 235)]
| 28.15942
| 84
| 0.575142
|
0c5d8b3b1bb500a47d6a92d1b54714dfde7b4220
| 1,339
|
py
|
Python
|
Codewars/7kyu/ranking-position/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/7kyu/ranking-position/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/7kyu/ranking-position/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
input_ = [
{ 'name': 'John', 'points': 100 },
{ 'name': 'Bob', 'points': 130 },
{ 'name': 'Mary', 'points': 120 },
{ 'name': 'Kate', 'points': 120 }
]
output = [
{ 'name': 'Bob', 'points': 130, 'position': 1 },
{ 'name': 'Kate', 'points': 120, 'position': 2 },
{ 'name': 'Mary', 'points': 120, 'position': 2 },
{ 'name': 'John', 'points': 100, 'position': 4 }
]
Test.assert_equals(ranking(input_), output)
input_ = [
{ 'name': 'Bob', 'points': 130 },
{ 'name': 'Mary', 'points': 120 },
{ 'name': 'John', 'points': 100 }
]
output = [
{ 'name': 'Bob', 'points': 130, 'position': 1 },
{ 'name': 'Mary', 'points': 120, 'position': 2 },
{ 'name': 'John', 'points': 100, 'position': 3 }
]
Test.assert_equals(ranking(input_), output)
input_ = [
{ 'name': 'Bob', 'points': 100 },
{ 'name': 'Mary', 'points': 100 },
{ 'name': 'John', 'points': 100 }
]
output = [
{ 'name': 'Bob', 'points': 100, 'position': 1 },
{ 'name': 'John', 'points': 100, 'position': 1 },
{ 'name': 'Mary', 'points': 100, 'position': 1 }
]
Test.assert_equals(ranking(input_), output)
input_ = [{ 'name': 'Joe', 'points': 100 }]
output = [{ 'name': 'Joe', 'points': 100, 'position': 1 }]
Test.assert_equals(ranking(input_), output)
Test.assert_equals(ranking([]), [])
| 29.108696
| 58
| 0.51531
|
0d4017a0c2d13cddc203b9f5d163e6c518c35565
| 10,597
|
py
|
Python
|
src/python/pants/engine/build_files.py
|
lgirault/pants
|
7731c9783d8011089f59d47c41d49fdd6433bb4f
|
[
"Apache-2.0"
] | 94
|
2015-01-15T21:24:20.000Z
|
2022-02-16T16:55:43.000Z
|
src/python/pants/engine/build_files.py
|
lgirault/pants
|
7731c9783d8011089f59d47c41d49fdd6433bb4f
|
[
"Apache-2.0"
] | 5
|
2020-07-18T01:04:43.000Z
|
2021-05-10T08:40:56.000Z
|
src/python/pants/engine/build_files.py
|
lgirault/pants
|
7731c9783d8011089f59d47c41d49fdd6433bb4f
|
[
"Apache-2.0"
] | 47
|
2015-02-25T02:20:07.000Z
|
2022-03-21T00:59:16.000Z
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from builtins import next
from os.path import dirname, join
import six
from future.utils import raise_from
from twitter.common.collections import OrderedSet
from pants.base.project_tree import Dir
from pants.base.specs import SingleAddress, Spec, Specs
from pants.build_graph.address import Address, BuildFileAddress
from pants.build_graph.address_lookup_error import AddressLookupError
from pants.engine.addressable import AddressableDescriptor, BuildFileAddresses
from pants.engine.fs import Digest, FilesContent, PathGlobs, Snapshot
from pants.engine.mapper import AddressFamily, AddressMap, AddressMapper, ResolveError
from pants.engine.objects import Locatable, SerializableFactory, Validatable
from pants.engine.parser import HydratedStruct
from pants.engine.rules import RootRule, rule
from pants.engine.selectors import Get
from pants.engine.struct import Struct
from pants.util.collections_abc_backport import MutableMapping, MutableSequence
from pants.util.objects import TypeConstraintError
logger = logging.getLogger(__name__)
class ResolvedTypeMismatchError(ResolveError):
"""Indicates a resolved object was not of the expected type."""
def _key_func(entry):
key, value = entry
return key
@rule(AddressFamily, [AddressMapper, Dir])
def parse_address_family(address_mapper, directory):
"""Given an AddressMapper and a directory, return an AddressFamily.
The AddressFamily may be empty, but it will not be None.
"""
patterns = tuple(join(directory.path, p) for p in address_mapper.build_patterns)
path_globs = PathGlobs(include=patterns,
exclude=address_mapper.build_ignore_patterns)
snapshot = yield Get(Snapshot, PathGlobs, path_globs)
files_content = yield Get(FilesContent, Digest, snapshot.directory_digest)
if not files_content:
raise ResolveError('Directory "{}" does not contain any BUILD files.'.format(directory.path))
address_maps = []
for filecontent_product in files_content:
address_maps.append(AddressMap.parse(filecontent_product.path,
filecontent_product.content,
address_mapper.parser))
yield AddressFamily.create(directory.path, address_maps)
def _raise_did_you_mean(address_family, name, source=None):
names = [a.target_name for a in address_family.addressables]
possibilities = '\n '.join(':{}'.format(target_name) for target_name in sorted(names))
resolve_error = ResolveError('"{}" was not found in namespace "{}". '
'Did you mean one of:\n {}'
.format(name, address_family.namespace, possibilities))
if source:
raise_from(resolve_error, source)
else:
raise resolve_error
@rule(HydratedStruct, [AddressMapper, Address])
def hydrate_struct(address_mapper, address):
"""Given an AddressMapper and an Address, resolve a Struct from a BUILD file.
Recursively collects any embedded addressables within the Struct, but will not walk into a
dependencies field, since those should be requested explicitly by rules.
"""
address_family = yield Get(AddressFamily, Dir(address.spec_path))
struct = address_family.addressables.get(address)
addresses = address_family.addressables
if not struct or address not in addresses:
_raise_did_you_mean(address_family, address.target_name)
# TODO: This is effectively: "get the BuildFileAddress for this Address".
# see https://github.com/pantsbuild/pants/issues/6657
address = next(build_address for build_address in addresses if build_address == address)
inline_dependencies = []
def maybe_append(outer_key, value):
if isinstance(value, six.string_types):
if outer_key != 'dependencies':
inline_dependencies.append(Address.parse(value,
relative_to=address.spec_path,
subproject_roots=address_mapper.subproject_roots))
elif isinstance(value, Struct):
collect_inline_dependencies(value)
def collect_inline_dependencies(item):
for key, value in sorted(item._asdict().items(), key=_key_func):
if not AddressableDescriptor.is_addressable(item, key):
continue
if isinstance(value, MutableMapping):
for _, v in sorted(value.items(), key=_key_func):
maybe_append(key, v)
elif isinstance(value, MutableSequence):
for v in value:
maybe_append(key, v)
else:
maybe_append(key, value)
# Recursively collect inline dependencies from the fields of the struct into `inline_dependencies`.
collect_inline_dependencies(struct)
# And then hydrate the inline dependencies.
hydrated_inline_dependencies = yield [Get(HydratedStruct, Address, a) for a in inline_dependencies]
dependencies = [d.value for d in hydrated_inline_dependencies]
def maybe_consume(outer_key, value):
if isinstance(value, six.string_types):
if outer_key == 'dependencies':
# Don't recurse into the dependencies field of a Struct, since those will be explicitly
# requested by tasks. But do ensure that their addresses are absolute, since we're
# about to lose the context in which they were declared.
value = Address.parse(value,
relative_to=address.spec_path,
subproject_roots=address_mapper.subproject_roots)
else:
value = dependencies[maybe_consume.idx]
maybe_consume.idx += 1
elif isinstance(value, Struct):
value = consume_dependencies(value)
return value
# NB: Some pythons throw an UnboundLocalError for `idx` if it is a simple local variable.
maybe_consume.idx = 0
# 'zip' the previously-requested dependencies back together as struct fields.
def consume_dependencies(item, args=None):
hydrated_args = args or {}
for key, value in sorted(item._asdict().items(), key=_key_func):
if not AddressableDescriptor.is_addressable(item, key):
hydrated_args[key] = value
continue
if isinstance(value, MutableMapping):
container_type = type(value)
hydrated_args[key] = container_type((k, maybe_consume(key, v))
for k, v in sorted(value.items(), key=_key_func))
elif isinstance(value, MutableSequence):
container_type = type(value)
hydrated_args[key] = container_type(maybe_consume(key, v) for v in value)
else:
hydrated_args[key] = maybe_consume(key, value)
return _hydrate(type(item), address.spec_path, **hydrated_args)
yield HydratedStruct(consume_dependencies(struct, args={'address': address}))
def _hydrate(item_type, spec_path, **kwargs):
# If the item will be Locatable, inject the spec_path.
if issubclass(item_type, Locatable):
kwargs['spec_path'] = spec_path
try:
item = item_type(**kwargs)
except TypeConstraintError as e:
raise ResolvedTypeMismatchError(e)
# Let factories replace the hydrated object.
if isinstance(item, SerializableFactory):
item = item.create()
# Finally make sure objects that can self-validate get a chance to do so.
if isinstance(item, Validatable):
item.validate()
return item
@rule(BuildFileAddresses, [AddressMapper, Specs])
def addresses_from_address_families(address_mapper, specs):
"""Given an AddressMapper and list of Specs, return matching BuildFileAddresses.
:raises: :class:`ResolveError` if:
- there were no matching AddressFamilies, or
- the Spec matches no addresses for SingleAddresses.
:raises: :class:`AddressLookupError` if no targets are matched for non-SingleAddress specs.
"""
# Capture a Snapshot covering all paths for these Specs, then group by directory.
snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs))
dirnames = {dirname(f) for f in snapshot.files}
address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames]
address_family_by_directory = {af.namespace: af for af in address_families}
matched_addresses = OrderedSet()
for spec in specs:
# NB: if a spec is provided which expands to some number of targets, but those targets match
# --exclude-target-regexp, we do NOT fail! This is why we wait to apply the tag and exclude
# patterns until we gather all the targets the spec would have matched without them.
try:
addr_families_for_spec = spec.matching_address_families(address_family_by_directory)
except Spec.AddressFamilyResolutionError as e:
raise raise_from(ResolveError(e), e)
try:
all_addr_tgt_pairs = spec.address_target_pairs_from_address_families(addr_families_for_spec)
except Spec.AddressResolutionError as e:
raise raise_from(AddressLookupError(e), e)
except SingleAddress._SingleAddressResolutionError as e:
_raise_did_you_mean(e.single_address_family, e.name, source=e)
matched_addresses.update(
addr for (addr, tgt) in all_addr_tgt_pairs
if specs.matcher.matches_target_address_pair(addr, tgt)
)
# NB: This may be empty, as the result of filtering by tag and exclude patterns!
yield BuildFileAddresses(tuple(matched_addresses))
def _spec_to_globs(address_mapper, specs):
"""Given a Specs object, return a PathGlobs object for the build files that it matches."""
patterns = set()
for spec in specs:
patterns.update(spec.make_glob_patterns(address_mapper))
return PathGlobs(include=patterns, exclude=address_mapper.build_ignore_patterns)
def create_graph_rules(address_mapper):
"""Creates tasks used to parse Structs from BUILD files.
:param address_mapper_key: The subject key for an AddressMapper instance.
:param symbol_table: A SymbolTable instance to provide symbols for Address lookups.
"""
@rule(AddressMapper, [])
def address_mapper_singleton():
return address_mapper
return [
address_mapper_singleton,
# BUILD file parsing.
hydrate_struct,
parse_address_family,
# Spec handling: locate directories that contain build files, and request
# AddressFamilies for each of them.
addresses_from_address_families,
# Root rules representing parameters that might be provided via root subjects.
RootRule(Address),
RootRule(BuildFileAddress),
RootRule(BuildFileAddresses),
RootRule(Specs),
]
| 40.446565
| 101
| 0.733038
|
efd5dd58e7f9cef8933ebf8f712c7a6e44637acc
| 3,247
|
py
|
Python
|
test/test_TimeseriesStats.py
|
gunny26/datalogger4
|
ab1b6bd295e41f57530a9f1c0977f5dcd1eabf2a
|
[
"Apache-2.0"
] | null | null | null |
test/test_TimeseriesStats.py
|
gunny26/datalogger4
|
ab1b6bd295e41f57530a9f1c0977f5dcd1eabf2a
|
[
"Apache-2.0"
] | null | null | null |
test/test_TimeseriesStats.py
|
gunny26/datalogger4
|
ab1b6bd295e41f57530a9f1c0977f5dcd1eabf2a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
import unittest
import logging
import datetime
import gzip
import os
import json
# own modules
from datalogger4.Timeseries import Timeseries
from datalogger4.TimeseriesStats import TimeseriesStats
class Test(unittest.TestCase):
def setUp(self):
self.basedir = "/var/rrd"
self.datestring = "2015-11-30"
self.testfile = "testdata/ts_KHUnc3J2d2Vic3FsMi50aWxhay5jYycsKQ==.csv.gz"
with gzip.open(self.testfile, "rt") as infile:
ts = Timeseries.load(infile)
self.tsstat = TimeseriesStats(ts)
def test__eq__(self):
with gzip.open(self.testfile, "rt") as infile:
ts = Timeseries.load(infile)
tsstat = TimeseriesStats(ts)
assert tsstat == self.tsstat
def test__getitem__(self):
assert self.tsstat[("opened_tables", "min")] == 3581.000000
assert self.tsstat["opened_tables"] == {'min': 3581.0, 'max': 3601.0, 'avg': 3594.0, 'sum': 1035072.0, 'std': 7.340905181848413, 'median': 3599.0, 'count': 288, 'first': 3581.0, 'last': 3601.0, 'mean': 3594.0, 'inc': 20.0, 'dec': 0.0, 'diff': 20.0}
def test__str__(self):
print(self.tsstat)
def test_keys(self):
assert list(self.tsstat.keys()) == ['com_select', 'uptime', 'com_insert', 'slow_queries', 'bytes_sent', 'com_update', 'connections', 'com_delete', 'qcache_hits', 'questions', 'opened_tables', 'aborted_connects', 'bytes_received', 'created_tmp_tables', 'created_tmp_disk_tables', 'aborted_clients']
def test_values(self):
assert list(self.tsstat.values())[0]["min"] == 89169365.0
def test_items(self):
for key, value in self.tsstat.items():
assert isinstance(key, str)
assert isinstance(value, dict)
def test_stats(self):
assert isinstance(self.tsstat.stats, dict)
def test_funcnames(self):
assert list(self.tsstat.funcnames) == ['avg', 'count', 'dec', 'diff', 'first', 'inc', 'last', 'max', 'mean', 'median', 'min', 'std', 'sum']
def test_get_stats(self):
assert isinstance(self.tsstat.get_stats(), dict)
def test_get_stat(self):
assert self.tsstat.get_stat("min")['com_select'] == 89169365.0
def test_dump(self):
with open("testdata/tsstat_testdump.json", "wt") as outfile:
self.tsstat.dump(outfile)
with open("testdata/tsstat_testdump.json", "rt") as infile:
tsstat = TimeseriesStats.load(infile)
assert self.tsstat == tsstat
def test_load(self):
with open("testdata/tsstat_KHUnc3J2d2Vic3FsMi50aWxhay5jYycsKQ==.json", "rt") as infile:
tsstat = TimeseriesStats.load(infile)
assert tsstat[("com_select", "min")] == 0.000000
def test_to_json(self):
assert "\"diff\": 10961584.0" in self.tsstat.to_json()
def test_to_data(self):
data = self.tsstat.to_data()
assert data["aborted_clients"]["count"] == 288
data_str = json.dumps(data, indent=4)
assert isinstance(data_str, str)
def test_from_json(self):
tsstat = TimeseriesStats.from_json(self.tsstat.to_json())
assert tsstat == self.tsstat
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
unittest.main()
| 36.483146
| 305
| 0.647367
|
6a2fa229c6af2e24d45c083bcab9df06b3111d70
| 23,985
|
py
|
Python
|
tests/reflectometry/data_test.py
|
arm61/ess
|
f3d3fd47d334420b0b838aec720efc38c186a420
|
[
"BSD-3-Clause"
] | null | null | null |
tests/reflectometry/data_test.py
|
arm61/ess
|
f3d3fd47d334420b0b838aec720efc38c186a420
|
[
"BSD-3-Clause"
] | null | null | null |
tests/reflectometry/data_test.py
|
arm61/ess
|
f3d3fd47d334420b0b838aec720efc38c186a420
|
[
"BSD-3-Clause"
] | null | null | null |
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2021 Scipp contributors (https://github.com/scipp)
# flake8: noqa: E501
"""
Tests for data module
"""
# author: Andrew R. McCluskey (arm61)
import os
import unittest
import numpy as np
from numpy.testing import assert_almost_equal, assert_equal
import scipp as sc
from ess.reflectometry import data
from ..tools.io import file_location
np.random.seed(1)
N = 9
VALUES = np.ones(N)
DETECTORS = np.random.randint(1, 5, size=(N))
DATA = sc.DataArray(
data=sc.Variable(
dims=["event"],
unit=sc.units.counts,
values=VALUES,
dtype=sc.dtype.float32,
),
coords={
"detector_id":
sc.Variable(dims=["event"], values=DETECTORS, dtype=sc.dtype.int32)
},
)
DETECTOR_ID = sc.Variable(dims=["detector_id"],
values=np.arange(1, 5),
dtype=sc.dtype.int32)
BINNED = sc.bin(DATA, groups=[DETECTOR_ID])
PIXELS = np.array([[1, 1, 1], [1, 2, 1], [2, 1, 1], [2, 2, 1]])
X = sc.Variable(
dims=["detector_id"],
values=PIXELS[:, 0],
dtype=sc.dtype.float64,
unit=sc.units.m,
)
Y = sc.Variable(
dims=["detector_id"],
values=PIXELS[:, 1],
dtype=sc.dtype.float64,
unit=sc.units.m,
)
Z = sc.Variable(
dims=["detector_id"],
values=PIXELS[:, 2],
dtype=sc.dtype.float64,
unit=sc.units.m,
)
BINNED.coords["position"] = sc.geometry.position(X, Y, Z)
BINNED.attrs['instrument_name'] = sc.scalar(value='AMOR')
BINNED.attrs['experiment_title'] = sc.scalar(value='test')
class TestData(unittest.TestCase):
# Commented out until the sample.nxs file has a home
# def test_refldata_file(self):
# file_path = (os.path.dirname(os.path.realpath(__file__)) +
# os.path.sep + "sample.nxs")
# p = data.ReflData(file_path)
# assert_equal(isinstance(p.data, sc._scipp.core.DataArray), True)
# assert_equal(p.data_file, file_path)
def test_refldata_init(self):
"""
Testing the default initialisation of the ReflData objects.
"""
p = data.ReflData(BINNED.copy())
assert_equal(isinstance(p.data, sc._scipp.core.DataArray), True)
assert_equal(isinstance(p.data.data, sc._scipp.core.Variable), True)
assert_almost_equal(p.data.coords["position"].fields.x.values,
X.values)
assert_almost_equal(p.data.coords["position"].fields.y.values,
Y.values)
assert_almost_equal(p.data.coords["position"].fields.z.values,
Z.values)
assert_almost_equal(
np.sort(
p.data.bins.constituents["data"].coords["detector_id"].values),
np.sort(DETECTORS),
)
assert_almost_equal(
np.sort(p.data.bins.constituents["data"].values),
np.sort(VALUES),
decimal=5,
)
assert_almost_equal(
np.sort(p.data.bins.constituents["data"].variances),
np.sort(np.ones_like(VALUES)),
decimal=5,
)
assert_almost_equal(p.sample_angle_offset.values, 0)
assert_equal(p.sample_angle_offset.unit, sc.units.deg)
def test_refldata_init_sample_angle_offset(self):
"""
Testing the ReflData initialisation with a non-default sample_angle_offset.
"""
p = data.ReflData(BINNED.copy(), sample_angle_offset=2 * sc.units.deg)
assert_almost_equal(p.sample_angle_offset.values, 2)
assert_equal(p.sample_angle_offset.unit, sc.units.deg)
def test_refldata_event(self):
p = data.ReflData(BINNED.copy())
assert_equal(isinstance(p.event, sc._scipp.core.DataArray), True)
assert_almost_equal(np.sort(p.event.coords["detector_id"].values),
np.sort(DETECTORS))
assert_almost_equal(np.sort(p.event.values),
np.sort(VALUES),
decimal=5)
assert_almost_equal(
np.sort(p.event.variances),
np.sort(np.ones_like(VALUES)),
decimal=5,
)
def test_q_bin(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=np.linspace(1, 10, N),
unit=sc.Unit('1/angstrom'),
)
p.event.coords["sigma_qz_by_qz"] = sc.Variable(
dims=["event"],
values=np.linspace(0.1, 1.0, N),
unit=sc.Unit('1/angstrom'),
dtype=sc.dtype.float64,
)
p.event.coords["tof"] = sc.Variable(dims=["event"], values=DETECTORS)
bins = np.linspace(0, 11, 4)
b = p.q_bin(bins)
assert_almost_equal(b.coords["qz"].values, bins)
assert_almost_equal(b.coords["sigma_qz_by_qz"].values,
np.linspace(0.325, 1.0, 3))
assert_almost_equal(b.data.values, np.array([3.0, 3.0, 3.]) / 9.)
assert_almost_equal(b.data.variances, np.array([3.0, 3.0, 3.]) / 81.)
def test_q_bin_None(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=np.linspace(1, 10, N),
unit=sc.Unit('1/angstrom'),
)
p.event.coords["sigma_qz_by_qz"] = sc.Variable(
dims=["event"],
values=np.linspace(0.1, 1.0, N),
unit=sc.Unit('1/angstrom'),
dtype=sc.dtype.float64,
)
p.event.coords["tof"] = sc.Variable(dims=["event"], values=DETECTORS)
b = p.q_bin()
assert_equal(b.shape, [199])
def test_q_bin_different_unit(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=np.linspace(1, 10, N),
unit=(1 / sc.units.m).unit,
)
p.event.coords["sigma_qz_by_qz"] = sc.Variable(
dims=["event"],
values=np.linspace(0.1, 1.0, N),
unit=(1 / sc.units.m).unit,
dtype=sc.dtype.float64,
)
p.event.coords["tof"] = sc.Variable(dims=["event"], values=DETECTORS)
bins = np.linspace(0, 11, 4)
b = p.q_bin(bins, unit=(1 / sc.units.m).unit)
assert_almost_equal(b.coords["qz"].values, bins)
assert_almost_equal(b.coords["sigma_qz_by_qz"].values,
np.linspace(0.325, 1.0, 3))
assert_almost_equal(b.data.values, np.array([3.0, 3.0, 3.]) / 9.)
assert_almost_equal(b.data.variances, np.array([3.0, 3.0, 3.]) / 81.)
def test_q_bin_no_qz(self):
p = data.ReflData(BINNED.copy())
p.event.coords["sigma_qz_by_qz"] = sc.Variable(
dims=["event"],
values=np.linspace(0.1, 1.0, N),
unit=(1 / sc.units.m).unit,
dtype=sc.dtype.float64,
)
p.event.coords["tof"] = sc.Variable(dims=["event"], values=DETECTORS)
bins = np.linspace(0, 11, 4)
with self.assertRaises(sc.NotFoundError):
_ = p.q_bin(bins)
def test_q_bin_no_qzresolution(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=np.linspace(1, 10, N),
unit=sc.Unit('1/angstrom'),
)
p.event.coords["tof"] = sc.Variable(dims=["event"], values=DETECTORS)
bins = np.linspace(0, 11, 4)
b = p.q_bin(bins)
assert_almost_equal(b.coords["qz"].values, bins)
assert_almost_equal(b.data.values, np.array([3.0, 3.0, 3.]) / 9.)
assert_almost_equal(b.data.variances, np.array([3.0, 3.0, 3.]) / 81.)
def test_q_bin_no_tof(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=np.linspace(1, 10, N),
unit=sc.Unit('1/angstrom'),
)
p.event.coords["sigma_qz_by_qz"] = sc.Variable(
dims=["event"],
values=np.linspace(0.1, 1.0, N),
unit=sc.Unit('1/angstrom'),
dtype=sc.dtype.float64,
)
bins = np.linspace(0, 11, 4)
with self.assertRaises(sc.NotFoundError):
_ = p.q_bin(bins)
def test_wavelength_theta_bin(self):
p = data.ReflData(BINNED.copy())
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.angstrom,
)
p.event.coords["theta"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.deg,
)
binned = p.wavelength_theta_bin()
assert_equal(binned.shape, [49, 49])
def test_q_theta_bin(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.Unit('1/angstrom'),
)
p.event.coords["theta"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.deg,
)
binned = p.q_theta_bin()
assert_equal(binned.shape, [49, 49])
def test_wavelength_q_bin(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.Unit('1/angstrom'),
)
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.angstrom,
)
binned = p.wavelength_q_bin()
assert_equal(binned.shape, [49, 49])
def test_tof_to_wavelength(self):
p = data.ReflData(BINNED.copy())
p.event.coords["tof"] = sc.Variable(dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64)
p.data.attrs["source_position"] = sc.geometry.position(
0.0 * sc.units.m, 0.0 * sc.units.m, -15.0 * sc.units.m)
p.data.attrs["sample_position"] = sc.geometry.position(
0.0 * sc.units.m, 0.0 * sc.units.m, 0.0 * sc.units.m)
p.find_wavelength()
assert_almost_equal(
p.event.coords["wavelength"].values,
[
0.0004729,
0.0009457,
0.0002267,
0.0002267,
0.0009069,
0.0004396,
0.0008791,
0.0004396,
0.0008791,
],
)
def test_find_theta_gravity(self):
p = data.ReflData(BINNED.copy())
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.angstrom,
)
p.data.attrs["sample_position"] = sc.geometry.position(
0.0 * sc.units.m, 0.0 * sc.units.m, 0.0 * sc.units.m)
p.find_theta()
assert_almost_equal(
p.event.coords["theta"].values,
[
44.9999641, 44.9998564, 63.4349452, 63.4349452, 63.4348914,
63.4349345, 63.4348914, 63.4349345, 63.4348914
],
)
assert_almost_equal(
p.event.coords["sigma_theta_by_theta"].values,
[
0.0013517,
0.0013517,
0.0009589,
0.0009589,
0.0009589,
0.0009589,
0.0009589,
0.0009589,
0.0009589,
],
)
assert_almost_equal(
p.data.attrs["sigma_gamma"].values,
[0.0608281, 0.0608281, 0.0608281, 0.0608281],
)
def test_find_theta_no_gravity(self):
p = data.ReflData(BINNED.copy(), gravity=False)
with self.assertRaises(NotImplementedError):
p.find_theta()
def test_find_qz(self):
p = data.ReflData(BINNED.copy())
p.event.coords["theta"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.deg,
)
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.angstrom,
)
p.find_qz()
assert_almost_equal(
p.event.coords["qz"].values,
[
0.21928,
0.21914643,
0.21931341,
0.21931341,
0.21914643,
0.21928,
0.21914643,
0.21928,
0.21914643,
],
)
assert_almost_equal(p.event.coords["sigma_qz_by_qz"].values,
np.zeros(9))
def test_find_qz_with_resolution(self):
p = data.ReflData(BINNED.copy())
p.event.coords["theta"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.deg,
)
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.angstrom,
)
p.event.coords["sigma_theta_by_theta"] = sc.Variable(
dims=["event"], values=DETECTORS * 0.1, dtype=sc.dtype.float64)
p.data.coords["sigma_lambda_by_lamdba"] = sc.Variable(
dims=["detector_id"],
values=np.arange(1, 5) * 0.1,
dtype=sc.dtype.float64,
)
p.find_qz()
assert_almost_equal(
p.event.coords["qz"].values,
[
0.21928,
0.21914643,
0.21931341,
0.21931341,
0.21914643,
0.21928,
0.21914643,
0.21928,
0.21914643,
],
)
assert_almost_equal(
p.event.coords["sigma_qz_by_qz"].values,
[
0.2236068,
0.4123106,
0.2236068,
0.2236068,
0.4472136,
0.4472136,
0.5656854,
0.4472136,
0.5656854,
],
)
def test_illumination(self):
p = data.ReflData(BINNED.copy())
p.event.coords["theta"] = sc.Variable(
dims=["event"],
values=DETECTORS * 0.1,
dtype=sc.dtype.float64,
unit=sc.units.deg,
)
p.beam_width = 50e-3 * sc.units.m
p.sample_size = 0.10 * sc.units.m
p.illumination()
assert_almost_equal(
p.event.coords["illumination"].values,
[
0.7549526,
0.9799233,
0.4389162,
0.4389162,
0.9799233,
0.7549526,
0.9799233,
0.7549526,
0.9799233,
],
)
assert_almost_equal(
p.event.data.values,
1 / np.array(
[
0.7549526,
0.9799233,
0.4389162,
0.4389162,
0.9799233,
0.7549526,
0.9799233,
0.7549526,
0.9799233,
],
dtype=np.float32,
),
)
assert_almost_equal(
p.event.data.variances,
1 / np.array(
[
0.7549526,
0.9799233,
0.4389162,
0.4389162,
0.9799233,
0.7549526,
0.9799233,
0.7549526,
0.9799233,
],
dtype=np.float32,
)**2,
decimal=6,
)
def test_detector_masking_defaults(self):
p = data.ReflData(BINNED.copy())
p.detector_masking()
assert_equal(p.data.masks["x_mask"].values, [False] * 4)
assert_equal(p.data.masks["y_mask"].values, [False] * 4)
assert_equal(p.data.masks["z_mask"].values, [False] * 4)
def test_detector_masking_x_min(self):
p = data.ReflData(BINNED.copy())
p.detector_masking(x_min=2 * sc.units.m)
assert_equal(p.data.masks["x_mask"].values, [True, True, False, False])
assert_equal(p.data.masks["y_mask"].values, [False] * 4)
assert_equal(p.data.masks["z_mask"].values, [False] * 4)
def test_detector_masking_x_max(self):
p = data.ReflData(BINNED.copy())
p.detector_masking(x_max=1 * sc.units.m)
assert_equal(p.data.masks["x_mask"].values, [False, False, True, True])
assert_equal(p.data.masks["y_mask"].values, [False] * 4)
assert_equal(p.data.masks["z_mask"].values, [False] * 4)
def test_detector_masking_y_min(self):
p = data.ReflData(BINNED.copy())
p.detector_masking(y_min=2 * sc.units.m)
assert_equal(p.data.masks["y_mask"].values, [True, False, True, False])
assert_equal(p.data.masks["x_mask"].values, [False] * 4)
assert_equal(p.data.masks["z_mask"].values, [False] * 4)
def test_detector_masking_y_max(self):
p = data.ReflData(BINNED.copy())
p.detector_masking(y_max=1 * sc.units.m)
assert_equal(p.data.masks["y_mask"].values, [False, True, False, True])
assert_equal(p.data.masks["x_mask"].values, [False] * 4)
assert_equal(p.data.masks["z_mask"].values, [False] * 4)
def test_detector_masking_z_min(self):
p = data.ReflData(BINNED.copy())
p.detector_masking(z_min=2 * sc.units.m)
assert_equal(p.data.masks["z_mask"].values, [True, True, True, True])
assert_equal(p.data.masks["y_mask"].values, [False] * 4)
assert_equal(p.data.masks["x_mask"].values, [False] * 4)
def test_detector_masking_z_max(self):
p = data.ReflData(BINNED.copy())
p.detector_masking(z_max=1 * sc.units.m)
assert_equal(p.data.masks["z_mask"].values,
[False, False, False, False])
assert_equal(p.data.masks["y_mask"].values, [False] * 4)
assert_equal(p.data.masks["x_mask"].values, [False] * 4)
def test_theta_masking(self):
p = data.ReflData(BINNED.copy())
p.event.coords["theta"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.deg,
)
p.theta_masking(theta_min=2 * sc.units.deg, theta_max=4 * sc.units.deg)
assert_equal(
p.data.masks["theta"].values,
[True, False, True],
)
def test_theta_masking_no_min(self):
p = data.ReflData(BINNED.copy())
p.event.coords["theta"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.deg,
)
p.theta_masking(theta_max=4 * sc.units.deg)
assert_equal(
p.data.masks["theta"].values,
[True, False, True],
)
def test_theta_masking_no_max(self):
p = data.ReflData(BINNED.copy())
p.event.coords["theta"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.deg,
)
p.theta_masking(theta_min=2 * sc.units.deg)
assert_equal(
p.data.masks["theta"].values,
[True, False, True],
)
def test_wavelength_masking(self):
p = data.ReflData(BINNED.copy())
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.angstrom,
)
p.wavelength_masking(
wavelength_min=2 * sc.units.angstrom,
wavelength_max=4 * sc.units.angstrom,
)
assert_equal(
p.data.masks["wavelength"].values,
[True, False, True],
)
def test_wavelength_masking_no_min(self):
p = data.ReflData(BINNED.copy())
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.angstrom,
)
p.wavelength_masking(wavelength_max=4 * sc.units.angstrom)
assert_equal(
p.data.masks["wavelength"].values,
[True, False, True],
)
def test_wavelength_masking_no_max(self):
p = data.ReflData(BINNED.copy())
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS,
dtype=sc.dtype.float64,
unit=sc.units.angstrom,
)
p.wavelength_masking(wavelength_min=2 * sc.units.angstrom)
assert_equal(
p.data.masks["wavelength"].values,
[True, False, True],
)
def test_write(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=np.linspace(1, 10, N),
unit=sc.Unit('1/angstrom'),
)
p.event.coords["sigma_qz_by_qz"] = sc.Variable(
dims=["event"],
values=np.linspace(0.1, 1.0, N),
unit=sc.Unit('1/angstrom'),
dtype=sc.dtype.float64,
)
p.event.coords["tof"] = sc.Variable(dims=["event"], values=DETECTORS)
with file_location("test1.txt") as file_path:
p.write_reflectometry(file_path)
written_data = np.loadtxt(file_path, unpack=True)
assert_equal(written_data.shape, (4, 199))
def test_write_bins(self):
p = data.ReflData(BINNED.copy())
p.event.coords["qz"] = sc.Variable(
dims=["event"],
values=np.linspace(1, 10, N),
unit=sc.Unit('1/angstrom'),
)
p.event.coords["sigma_qz_by_qz"] = sc.Variable(
dims=["event"],
values=np.linspace(0.1, 1.0, N),
unit=sc.Unit('1/angstrom'),
dtype=sc.dtype.float64,
)
p.event.coords["tof"] = sc.Variable(dims=["event"], values=DETECTORS)
bins = np.linspace(0, 11, 4)
with file_location("test2.txt") as file_path:
p.write_reflectometry(file_path, {"bins": bins})
written_data = np.loadtxt(file_path, unpack=True)
assert_almost_equal(written_data[0], bins[:-1] + np.diff(bins))
assert_almost_equal(written_data[1], np.array([3, 3, 3]) / 9)
assert_almost_equal(written_data[2],
np.sqrt(np.array([3, 3, 3]) / 81))
assert_almost_equal(written_data[3], np.linspace(0.325, 1.0, 3))
def test_write_wavelength_theta(self):
p = data.ReflData(BINNED.copy())
p.event.coords["wavelength"] = sc.Variable(
dims=["event"],
values=DETECTORS.astype(float),
unit=sc.units.angstrom)
p.event.coords["theta"] = sc.Variable(dims=["event"],
values=DETECTORS.astype(float),
unit=sc.units.deg)
bins = np.linspace(0, 100, 10)
with file_location("test1.txt") as file_path:
p.write_wavelength_theta(file_path, (bins, bins))
written_data = np.loadtxt(file_path, unpack=True)
assert_equal(written_data.shape, (11, 9))
| 34.76087
| 83
| 0.525578
|
2481d3cf96dc57e73f536d5817da17eef8b9dfa2
| 8,607
|
py
|
Python
|
src/external/elf-loader/extract-system-config.py
|
Tiamat-Tech/shadow
|
482458a2ff0573d4b5eec0c588852af46aead24a
|
[
"BSD-3-Clause"
] | 1
|
2021-04-17T01:00:35.000Z
|
2021-04-17T01:00:35.000Z
|
src/external/elf-loader/extract-system-config.py
|
Tiamat-Tech/shadow
|
482458a2ff0573d4b5eec0c588852af46aead24a
|
[
"BSD-3-Clause"
] | 18
|
2020-12-15T07:11:46.000Z
|
2022-02-15T00:07:52.000Z
|
src/external/elf-loader/extract-system-config.py
|
Tiamat-Tech/shadow
|
482458a2ff0573d4b5eec0c588852af46aead24a
|
[
"BSD-3-Clause"
] | 1
|
2021-09-21T22:20:32.000Z
|
2021-09-21T22:20:32.000Z
|
#!/usr/bin/env python3
import sys
import re
import getopt
import os
import codecs
from elftools.elf.elffile import ELFFile
# Should be inheriting FileNotFoundError but we use Exception to ensure python2 compatibility
class CouldNotFindFile(Exception):
pass
class DebugData:
def __init__(self, debug_filename):
with open(debug_filename, 'rb') as f:
elffile = ELFFile(f)
assert elffile.has_dwarf_info(), debug_filename + ' has no DWARF info'
self.dwarfinfo = elffile.get_dwarf_info()
def get_struct_size(self, struct_die, required=True):
if struct_die is not None:
return struct_die.attributes['DW_AT_byte_size'].value
assert not required
return None
def get_member_offset (self, die, member_name, required=True):
member = self._find_member_in_struct(die, member_name.encode('UTF-8'))
if member is not None and 'DW_AT_data_member_location' in member.attributes:
return member.attributes['DW_AT_data_member_location'].value
assert not required
return None
def get_struct_die(self, struct_name):
return self._get_X('DW_TAG_structure_type', struct_name)
def get_type_die(self, type_name):
typedef = self._get_X('DW_TAG_typedef', type_name)
ref = typedef.attributes['DW_AT_type'].value
for CU in self.dwarfinfo.iter_CUs():
for die in CU.iter_DIEs():
if die.offset == ref:
return die
return None
def _find_in_DIE(self, die, tag_name, struct_name):
if die.tag == tag_name and \
'DW_AT_name' in die.attributes and \
die.attributes['DW_AT_name'].value == struct_name:
return die
if die.has_children:
for child in die.iter_children():
result = self._find_in_DIE(child, tag_name, struct_name)
if result is not None:
return result
return None
def _get_X(self, tag_name, item_name):
for CU in self.dwarfinfo.iter_CUs():
for die in CU.iter_DIEs():
item = self._find_in_DIE(die, tag_name, item_name.encode('UTF-8'))
if item is not None:
return item
return None
def _find_member_in_struct(self, struct, member_name):
for die in struct.iter_children():
if die.tag == 'DW_TAG_member' and \
'DW_AT_name' in die.attributes and \
die.attributes['DW_AT_name'].value == member_name:
return die
return None
def find_build_id(path):
if not os.path.exists(path):
return None
with open(path, 'rb') as f:
elffile = ELFFile(f)
section = elffile.get_section_by_name('.note.gnu.build-id')
build_id = ''
try:
note = next(section.iter_notes())
build_id = note['n_desc']
except AttributeError:
# older versions of pyelftools don't support notes,
# so parse the section data directly
build_id = codecs.getencoder('hex')(section.data()[-20:])[0].decode('UTF-8')
return "/usr/lib/debug/.build-id/{}/{}.debug".format(build_id[0:2], build_id[2:])
def check_file_regex(directory, file_regex):
if not os.path.exists(directory):
return None
lines = os.listdir(directory)
regex = re.compile(file_regex)
for line in lines:
result = regex.search(line)
if result:
return directory + result.group()
return None
def search_debug_file():
debug_files = [ ('/usr/lib64/debug/lib64/', r'ld-[0-9.]+\.so.debug'),
('/usr/lib/debug/lib64/', r'ld-linux-x86-64\.so\.2\.debug'),
('/usr/lib/debug/', r'ld-linux-x86-64\.so\.2'),
('/usr/lib/debug/lib/', r'ld-linux\.so\.2\.debug'),
('/usr/lib/debug/', r'ld-linux\.so\.2'),
# ubuntu 09.10-10.10
('/usr/lib/debug/lib/', r'ld-[0-9.]+\.so'),
# ubuntu 11.04/11.10
('/usr/lib/debug/lib/i386-linux-gnu/', r'ld-[0-9.]+\.so'),
('/usr/lib/debug/lib/x86_64-linux-gnu/', r'ld-[0-9.]+\.so'),
# ubuntu >12.04
('/usr/lib/debug/lib/x86_64-linux-gnu/', r'ld-[0-9.]+\.so'),
('/usr/lib/debug/lib/i386-linux-gnu/', r'ld-[0-9.]+\.so'),
# arch / manjaro
('/usr/lib/debug/usr/lib32/', r'ld-[0-9.]+\.so\.debug'),
('/usr/lib/debug/usr/lib/', r'ld-[0-9.]+\.so\.debug'),
]
build_ids = [ # debian
('/lib/x86_64-linux-gnu/', r'ld-[0-9.]+\.so'),
('/lib/i386-linux-gnu/', r'ld-[0-9.]+\.so'),
# solus
('/usr/lib/', r'ld-linux-x86-64\.so\.2'),
]
for file_tuple in debug_files:
file = check_file_regex(file_tuple[0], file_tuple[1])
if file and os.path.isfile(file):
return file
for file_tuple in build_ids:
library = check_file_regex(file_tuple[0], file_tuple[1])
if not library:
continue
file = find_build_id(library)
if file and os.path.isfile(file):
return file
raise CouldNotFindFile('Debug file not found')
def list_lib_path():
paths = []
re_lib = re.compile ('(?<=^#)')
if not os.path.isdir("/etc/ld.so.conf.d/"):
return ''
for filename in os.listdir("/etc/ld.so.conf.d/"):
try:
for line in open("/etc/ld.so.conf.d/" + filename, 'r'):
if re_lib.search(line) is not None:
continue
paths.append(line.rstrip())
except:
continue
return ':'.join(paths)
def define(outfile, name, value):
if value is not None:
outfile.write('#define {} {}\n'.format(name, value))
def usage():
print('''Usage: ./extract-system-config.py [OPTIONS]
Options:
\t-h, --help\tdisplay this help text
\t-c, --config=[FILE]\twrite output to file (default: stdout)
\t-d, --debug=[FILE]\tread debug symbols from file (default: search common locations)
\t-b, --builddir=[DIR]\tbuild directory for inclusion in library path''')
def main(argv):
config_filename = ''
debug_filename = ''
build_dir = ''
try:
opts, args = getopt.getopt(argv, 'hc:d:b:',
['help', 'config=', 'debug=', 'builddir='])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', '--help'):
usage()
sys.exit()
elif opt in ('-c', '--config'):
config_filename = arg
elif opt in ('-d', '--debug'):
debug_filename = arg
elif opt in ('-b', '--builddir'):
build_dir = arg
if config_filename != '':
config = open(config_filename, 'w')
else:
config = sys.stdout
if debug_filename == '':
debug_filename = search_debug_file()
find_build_id(debug_filename)
debug = DebugData(debug_filename)
rtld_global_die = debug.get_struct_die('rtld_global')
rtld_global_ro_die = debug.get_struct_die('rtld_global_ro')
tcb_die = debug.get_type_die('tcbhead_t')
define(config, 'CONFIG_RTLD_GLOBAL_SIZE', debug.get_struct_size(rtld_global_die))
# field was removed in glibc 2.25
define(config, 'CONFIG_DL_ERROR_CATCH_TSD_OFFSET', debug.get_member_offset (rtld_global_die, '_dl_error_catch_tsd', False))
define(config, 'CONFIG_RTLD_GLOBAL_RO_SIZE', debug.get_struct_size(rtld_global_ro_die))
define(config, 'CONFIG_RTLD_DL_PAGESIZE_OFFSET', debug.get_member_offset(rtld_global_ro_die, '_dl_pagesize'))
define(config, 'CONFIG_RTLD_DL_CLKTCK_OFFSET', debug.get_member_offset(rtld_global_ro_die, '_dl_clktck'))
define(config, 'CONFIG_TCB_SIZE', debug.get_struct_size(debug.get_struct_die('pthread')))
define(config, 'CONFIG_TCB_TCB_OFFSET', debug.get_member_offset(tcb_die, 'tcb'))
define(config, 'CONFIG_TCB_DTV_OFFSET', debug.get_member_offset(tcb_die, 'dtv'))
define(config, 'CONFIG_TCB_SELF_OFFSET',debug.get_member_offset(tcb_die, 'self'))
define(config, 'CONFIG_TCB_SYSINFO_OFFSET', debug.get_member_offset(tcb_die, 'sysinfo'))
define(config, 'CONFIG_TCB_STACK_GUARD', debug.get_member_offset(tcb_die, 'stack_guard'))
define(config, 'CONFIG_SYSTEM_LDSO_LIBRARY_PATH', '"' + list_lib_path() + ':' + build_dir + '\"')
if __name__ == "__main__":
main(sys.argv[1:])
| 41.781553
| 127
| 0.595562
|
c57a2422172ba631d7064511ac500f526f10b498
| 21,241
|
py
|
Python
|
dvc/repo/experiments/__init__.py
|
rfdickerson/dvc
|
54a48e3256ffc18f47e51fc23a5a3e1488d2b57e
|
[
"Apache-2.0"
] | 1
|
2021-04-05T19:16:36.000Z
|
2021-04-05T19:16:36.000Z
|
dvc/repo/experiments/__init__.py
|
mgkwill/dvc
|
de768375ed72ce171daad91c38beb7af387c2cf2
|
[
"Apache-2.0"
] | null | null | null |
dvc/repo/experiments/__init__.py
|
mgkwill/dvc
|
de768375ed72ce171daad91c38beb7af387c2cf2
|
[
"Apache-2.0"
] | null | null | null |
import logging
import os
import re
import tempfile
from collections.abc import Mapping
from concurrent.futures import (
ProcessPoolExecutor,
ThreadPoolExecutor,
as_completed,
)
from contextlib import contextmanager
from functools import partial, wraps
from typing import Iterable, Optional
from funcy import cached_property, first
from dvc.exceptions import DownloadError, DvcException, UploadError
from dvc.path_info import PathInfo
from dvc.progress import Tqdm
from dvc.repo.experiments.executor import ExperimentExecutor, LocalExecutor
from dvc.scm.git import Git
from dvc.stage.serialize import to_lockfile
from dvc.tree.repo import RepoTree
from dvc.utils import dict_sha256, env2bool, relpath
from dvc.utils.fs import remove
logger = logging.getLogger(__name__)
def scm_locked(f):
# Lock the experiments workspace so that we don't try to perform two
# different sequences of git operations at once
@wraps(f)
def wrapper(exp, *args, **kwargs):
with exp.scm_lock:
return f(exp, *args, **kwargs)
return wrapper
def hash_exp(stages):
exp_data = {}
for stage in stages:
exp_data.update(to_lockfile(stage))
return dict_sha256(exp_data)
class UnchangedExperimentError(DvcException):
def __init__(self, rev):
super().__init__(f"Experiment identical to baseline '{rev[:7]}'.")
self.rev = rev
class BaselineMismatchError(DvcException):
def __init__(self, rev, expected):
if hasattr(rev, "hexsha"):
rev = rev.hexsha
rev_str = f"{rev[:7]}" if rev is not None else "dangling commit"
super().__init__(
f"Experiment derived from '{rev_str}', expected '{expected[:7]}'."
)
self.rev = rev
self.expected_rev = expected
class Experiments:
"""Class that manages experiments in a DVC repo.
Args:
repo (dvc.repo.Repo): repo instance that these experiments belong to.
"""
EXPERIMENTS_DIR = "experiments"
PACKED_ARGS_FILE = "repro.dat"
STASH_MSG_PREFIX = "dvc-exp-"
STASH_EXPERIMENT_RE = re.compile(
r"(?:On \(.*\): )dvc-exp-(?P<baseline_rev>[0-9a-f]+)$"
)
BRANCH_RE = re.compile(
r"^(?P<baseline_rev>[a-f0-9]{7})-(?P<exp_sha>[a-f0-9]+)$"
)
def __init__(self, repo):
from dvc.lock import make_lock
if not (
env2bool("DVC_TEST")
or repo.config["core"].get("experiments", False)
):
raise NotImplementedError
self.repo = repo
self.scm_lock = make_lock(
os.path.join(self.repo.tmp_dir, "exp_scm_lock"),
tmp_dir=self.repo.tmp_dir,
)
@cached_property
def exp_dir(self):
return os.path.join(self.repo.dvc_dir, self.EXPERIMENTS_DIR)
@cached_property
def scm(self):
"""Experiments clone scm instance."""
if os.path.exists(self.exp_dir):
return Git(self.exp_dir)
return self._init_clone()
@cached_property
def dvc_dir(self):
return relpath(self.repo.dvc_dir, self.repo.scm.root_dir)
@cached_property
def exp_dvc_dir(self):
return os.path.join(self.exp_dir, self.dvc_dir)
@cached_property
def exp_dvc(self):
"""Return clone dvc Repo instance."""
from dvc.repo import Repo
return Repo(self.exp_dvc_dir)
@contextmanager
def chdir(self):
cwd = os.getcwd()
os.chdir(self.exp_dvc.root_dir)
yield self.exp_dvc.root_dir
os.chdir(cwd)
@cached_property
def args_file(self):
return os.path.join(self.exp_dvc.tmp_dir, self.PACKED_ARGS_FILE)
@property
def stash_reflog(self):
if "refs/stash" in self.scm.repo.refs:
return self.scm.repo.refs["refs/stash"].log()
return []
@property
def stash_revs(self):
revs = {}
for i, entry in enumerate(self.stash_reflog):
m = self.STASH_EXPERIMENT_RE.match(entry.message)
if m:
revs[entry.newhexsha] = (i, m.group("baseline_rev"))
return revs
def _init_clone(self):
src_dir = self.repo.scm.root_dir
logger.debug("Initializing experiments clone")
git = Git.clone(src_dir, self.exp_dir)
self._config_clone()
return git
def _config_clone(self):
dvc_dir = relpath(self.repo.dvc_dir, self.repo.scm.root_dir)
local_config = os.path.join(self.exp_dir, dvc_dir, "config.local")
cache_dir = self.repo.cache.local.cache_dir
logger.debug("Writing experiments local config '%s'", local_config)
with open(local_config, "w") as fobj:
fobj.write(f"[cache]\n dir = {cache_dir}")
def _scm_checkout(self, rev):
self.scm.repo.git.reset(hard=True)
if self.scm.repo.head.is_detached:
self._checkout_default_branch()
if not Git.is_sha(rev) or not self.scm.has_rev(rev):
self.scm.pull()
logger.debug("Checking out experiment commit '%s'", rev)
self.scm.checkout(rev)
def _checkout_default_branch(self):
# switch to default branch
git_repo = self.scm.repo
origin_refs = git_repo.remotes["origin"].refs
ref = origin_refs["HEAD"].reference
branch_name = ref.name.split("/")[-1]
if branch_name in git_repo.heads:
branch = git_repo.heads[branch_name]
else:
branch = git_repo.create_head(branch_name, ref)
branch.set_tracking_branch(ref)
branch.checkout()
def _stash_exp(self, *args, params: Optional[dict] = None, **kwargs):
"""Stash changes from the current (parent) workspace as an experiment.
Args:
params: Optional dictionary of parameter values to be used.
Values take priority over any parameters specified in the
user's workspace.
"""
rev = self.scm.get_rev()
# patch user's workspace into experiments clone
tmp = tempfile.NamedTemporaryFile(delete=False).name
try:
self.repo.scm.repo.git.diff(
patch=True, full_index=True, binary=True, output=tmp
)
if os.path.getsize(tmp):
logger.debug("Patching experiment workspace")
self.scm.repo.git.apply(tmp)
elif not params:
# experiment matches original baseline
raise UnchangedExperimentError(rev)
finally:
remove(tmp)
# update experiment params from command line
if params:
self._update_params(params)
# save additional repro command line arguments
self._pack_args(*args, **kwargs)
# save experiment as a stash commit w/message containing baseline rev
# (stash commits are merge commits and do not contain a parent commit
# SHA)
msg = f"{self.STASH_MSG_PREFIX}{rev}"
self.scm.repo.git.stash("push", "-m", msg)
return self.scm.resolve_rev("stash@{0}")
def _pack_args(self, *args, **kwargs):
ExperimentExecutor.pack_repro_args(self.args_file, *args, **kwargs)
self.scm.add(self.args_file)
def _unpack_args(self, tree=None):
return ExperimentExecutor.unpack_repro_args(self.args_file, tree=tree)
def _update_params(self, params: dict):
"""Update experiment params files with the specified values."""
from dvc.utils.serialize import MODIFIERS
logger.debug("Using experiment params '%s'", params)
# recursive dict update
def _update(dict_, other):
for key, value in other.items():
if isinstance(value, Mapping):
dict_[key] = _update(dict_.get(key, {}), value)
else:
dict_[key] = value
return dict_
for params_fname in params:
path = PathInfo(self.exp_dvc.root_dir) / params_fname
suffix = path.suffix.lower()
modify_data = MODIFIERS[suffix]
with modify_data(path, tree=self.exp_dvc.tree) as data:
_update(data, params[params_fname])
def _commit(self, exp_hash, check_exists=True, branch=True):
"""Commit stages as an experiment and return the commit SHA."""
if not self.scm.is_dirty():
raise UnchangedExperimentError(self.scm.get_rev())
rev = self.scm.get_rev()
exp_name = f"{rev[:7]}-{exp_hash}"
if branch:
if check_exists and exp_name in self.scm.list_branches():
logger.debug("Using existing experiment branch '%s'", exp_name)
return self.scm.resolve_rev(exp_name)
self.scm.checkout(exp_name, create_new=True)
logger.debug("Commit new experiment branch '%s'", exp_name)
self.scm.repo.git.add(A=True)
self.scm.commit(f"Add experiment {exp_name}")
return self.scm.get_rev()
def reproduce_one(self, queue=False, **kwargs):
"""Reproduce and checkout a single experiment."""
stash_rev = self.new(**kwargs)
if queue:
logger.info(
"Queued experiment '%s' for future execution.", stash_rev[:7]
)
return [stash_rev]
results = self.reproduce([stash_rev], keep_stash=False)
exp_rev = first(results)
if exp_rev is not None:
self.checkout_exp(exp_rev)
return results
def reproduce_queued(self, **kwargs):
results = self.reproduce(**kwargs)
if results:
revs = [f"{rev[:7]}" for rev in results]
logger.info(
"Successfully reproduced experiment(s) '%s'.\n"
"Use `dvc exp checkout <exp_rev>` to apply the results of "
"a specific experiment to your workspace.",
", ".join(revs),
)
return results
@scm_locked
def new(self, *args, **kwargs):
"""Create a new experiment.
Experiment will be reproduced and checked out into the user's
workspace.
"""
rev = self.repo.scm.get_rev()
self._scm_checkout(rev)
try:
stash_rev = self._stash_exp(*args, **kwargs)
except UnchangedExperimentError as exc:
logger.info("Reproducing existing experiment '%s'.", rev[:7])
raise exc
logger.debug(
"Stashed experiment '%s' for future execution.", stash_rev[:7]
)
return stash_rev
@scm_locked
def reproduce(
self,
revs: Optional[Iterable] = None,
keep_stash: Optional[bool] = True,
**kwargs,
):
"""Reproduce the specified experiments.
Args:
revs: If revs is not specified, all stashed experiments will be
reproduced.
keep_stash: If True, stashed experiments will be preserved if they
fail to reproduce successfully.
"""
stash_revs = self.stash_revs
# to_run contains mapping of:
# input_rev: (stash_index, baseline_rev)
# where input_rev contains the changes to execute (usually a stash
# commit) and baseline_rev is the baseline to compare output against.
# The final experiment commit will be branched from baseline_rev.
if revs is None:
to_run = {
rev: baseline_rev
for rev, (_, baseline_rev) in stash_revs.items()
}
else:
to_run = {
rev: stash_revs[rev][1] if rev in stash_revs else rev
for rev in revs
}
logger.debug(
"Reproducing experiment revs '%s'",
", ".join((rev[:7] for rev in to_run)),
)
# setup executors - unstash experiment, generate executor, upload
# contents of (unstashed) exp workspace to the executor tree
executors = {}
for rev, baseline_rev in to_run.items():
self._scm_checkout(baseline_rev)
self.scm.repo.git.stash("apply", rev)
repro_args, repro_kwargs = self._unpack_args()
executor = LocalExecutor(
baseline_rev,
repro_args=repro_args,
repro_kwargs=repro_kwargs,
dvc_dir=self.dvc_dir,
cache_dir=self.repo.cache.local.cache_dir,
)
self._collect_input(executor)
executors[rev] = executor
exec_results = self._reproduce(executors, **kwargs)
if keep_stash:
# only drop successfully run stashed experiments
to_drop = sorted(
(
stash_revs[rev][0]
for rev in exec_results
if rev in stash_revs
),
reverse=True,
)
else:
# drop all stashed experiments
to_drop = sorted(
(stash_revs[rev][0] for rev in to_run if rev in stash_revs),
reverse=True,
)
for index in to_drop:
self.scm.repo.git.stash("drop", index)
result = {}
for _, exp_result in exec_results.items():
result.update(exp_result)
return result
def _reproduce(self, executors: dict, jobs: Optional[int] = 1) -> dict:
"""Run dvc repro for the specified ExperimentExecutors in parallel.
Returns dict containing successfully executed experiments.
"""
result = {}
with ProcessPoolExecutor(max_workers=jobs) as workers:
futures = {}
for rev, executor in executors.items():
future = workers.submit(
executor.reproduce,
executor.dvc_dir,
cwd=executor.dvc.root_dir,
**executor.repro_kwargs,
)
futures[future] = (rev, executor)
for future in as_completed(futures):
rev, executor = futures[future]
exc = future.exception()
if exc is None:
exp_hash = future.result()
self._scm_checkout(executor.baseline_rev)
try:
self._collect_output(executor)
except DownloadError:
logger.error(
"Failed to collect output for experiment '%s'",
rev,
)
continue
finally:
if os.path.exists(self.args_file):
remove(self.args_file)
try:
exp_rev = self._commit(exp_hash)
except UnchangedExperimentError:
logger.debug(
"Experiment '%s' identical to baseline '%s'",
rev,
executor.baseline_rev,
)
exp_rev = executor.baseline_rev
logger.info("Reproduced experiment '%s'.", exp_rev[:7])
result[rev] = {exp_rev: exp_hash}
else:
logger.exception(
"Failed to reproduce experiment '%s'", rev
)
executor.cleanup()
return result
def _collect_input(self, executor: ExperimentExecutor):
"""Copy (upload) input from the experiments workspace to the executor
tree.
"""
logger.debug("Collecting input for '%s'", executor.tmp_dir)
repo_tree = RepoTree(self.exp_dvc)
self._process(
executor.tree,
self.exp_dvc.tree,
executor.collect_files(self.exp_dvc.tree, repo_tree),
)
def _collect_output(self, executor: ExperimentExecutor):
"""Copy (download) output from the executor tree into experiments
workspace.
"""
logger.debug("Collecting output from '%s'", executor.tmp_dir)
self._process(
self.exp_dvc.tree,
executor.tree,
executor.collect_output(),
download=True,
)
@staticmethod
def _process(dest_tree, src_tree, collected_files, download=False):
from dvc.cache.local import _log_exceptions
from_infos = []
to_infos = []
names = []
for from_info in collected_files:
from_infos.append(from_info)
fname = from_info.relative_to(src_tree.path_info)
names.append(str(fname))
to_infos.append(dest_tree.path_info / fname)
total = len(from_infos)
if download:
func = partial(
_log_exceptions(src_tree.download, "download"),
dir_mode=dest_tree.dir_mode,
file_mode=dest_tree.file_mode,
)
desc = "Downloading"
else:
func = partial(_log_exceptions(dest_tree.upload, "upload"))
desc = "Uploading"
with Tqdm(total=total, unit="file", desc=desc) as pbar:
func = pbar.wrap_fn(func)
# TODO: parallelize this, currently --jobs for repro applies to
# number of repro executors not download threads
with ThreadPoolExecutor(max_workers=1) as dl_executor:
fails = sum(dl_executor.map(func, from_infos, to_infos, names))
if fails:
if download:
raise DownloadError(fails)
raise UploadError(fails)
@scm_locked
def checkout_exp(self, rev):
"""Checkout an experiment to the user's workspace."""
from git.exc import GitCommandError
from dvc.repo.checkout import checkout as dvc_checkout
self._check_baseline(rev)
self._scm_checkout(rev)
tmp = tempfile.NamedTemporaryFile(delete=False).name
self.scm.repo.head.commit.diff(
"HEAD~1", patch=True, full_index=True, binary=True, output=tmp
)
dirty = self.repo.scm.is_dirty()
if dirty:
logger.debug("Stashing workspace changes.")
self.repo.scm.repo.git.stash("push", "--include-untracked")
try:
if os.path.getsize(tmp):
logger.debug("Patching local workspace")
self.repo.scm.repo.git.apply(tmp, reverse=True)
need_checkout = True
else:
need_checkout = False
except GitCommandError:
raise DvcException("failed to apply experiment changes.")
finally:
remove(tmp)
if dirty:
self._unstash_workspace()
if need_checkout:
dvc_checkout(self.repo)
def _check_baseline(self, exp_rev):
baseline_sha = self.repo.scm.get_rev()
exp_commit = self.scm.repo.rev_parse(exp_rev)
parent = first(exp_commit.parents)
if parent is not None and parent.hexsha == baseline_sha:
return
raise BaselineMismatchError(parent, baseline_sha)
def _unstash_workspace(self):
# Essentially we want `git stash pop` with `-X ours` merge strategy
# to prefer the applied experiment changes over stashed workspace
# changes. git stash doesn't support merge strategy parameters, but we
# can do it ourselves with checkout/reset.
from git.exc import GitCommandError
logger.debug("Unstashing workspace changes.")
git_repo = self.repo.scm.repo.git
# stage workspace changes, then apply stashed changes on top
git_repo.add(A=True)
try:
git_repo.stash("apply", "stash@{0}")
except GitCommandError:
# stash apply will return error code on merge conflicts,
# prefer workspace changes over stash changes
git_repo.checkout("--ours", "--", ".")
# unstage changes and drop the stash entry
git_repo.reset("HEAD")
git_repo.stash("drop", "stash@{0}")
@scm_locked
def get_baseline(self, rev):
"""Return the baseline rev for an experiment rev."""
from git.exc import GitCommandError
rev = self.scm.resolve_rev(rev)
try:
name = self.scm.repo.git.name_rev(rev, name_only=True)
except GitCommandError:
return None
if not name:
return None
if name in ("undefined", "stash"):
_, baseline = self.stash_revs.get(rev, (None, None))
return baseline
m = self.BRANCH_RE.match(name)
if m:
return self.scm.resolve_rev(m.group("baseline_rev"))
return None
def checkout(self, *args, **kwargs):
from dvc.repo.experiments.checkout import checkout
return checkout(self.repo, *args, **kwargs)
def diff(self, *args, **kwargs):
from dvc.repo.experiments.diff import diff
return diff(self.repo, *args, **kwargs)
def show(self, *args, **kwargs):
from dvc.repo.experiments.show import show
return show(self.repo, *args, **kwargs)
| 34.538211
| 79
| 0.583588
|
c6f2aeb530373c41986cc3d3272ba0215c659647
| 1,739
|
py
|
Python
|
IO/Xdmf3/Testing/Python/ManyFiles.py
|
satya-arjunan/vtk8
|
ee7ced57de6d382a2d12693c01e2fcdac350b25f
|
[
"BSD-3-Clause"
] | 1,755
|
2015-01-03T06:55:00.000Z
|
2022-03-29T05:23:26.000Z
|
IO/Xdmf3/Testing/Python/ManyFiles.py
|
satya-arjunan/vtk8
|
ee7ced57de6d382a2d12693c01e2fcdac350b25f
|
[
"BSD-3-Clause"
] | 29
|
2015-04-23T20:58:30.000Z
|
2022-03-02T16:16:42.000Z
|
IO/Xdmf3/Testing/Python/ManyFiles.py
|
satya-arjunan/vtk8
|
ee7ced57de6d382a2d12693c01e2fcdac350b25f
|
[
"BSD-3-Clause"
] | 1,044
|
2015-01-05T22:48:27.000Z
|
2022-03-31T02:38:26.000Z
|
"""
This test verifies that vtk's Xdmf reader will read a sampling of small
to moderate size data files that cover a spectrum of file format features.
"""
from __future__ import print_function
import sys
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
smallFiles = [
#from XDMF
"2DCoRectMesh.xmf",
"2DRectMesh.xmf",
"2DSMesh.xmf",
"3DCoRectMesh.xmf",
"3DRectMesh.xmf",
"3DSMesh.xmf",
"Graph.xmf",
"Hexahedron.xmf",
"HexahedronTimestep.xmf",
"Mixed.xmf",
"Tetrahedron.xmf",
"TetrahedronMultipleGrids.xmf",
"TetrahedronSpatial.xmf",
"TetrahedronSpatialTimestep.xmf",
"TetrahedronTimestep.xmf",
"Triangle.xmf",
"corect.xmf",
"hex20.xmf",
"hexahedron_xy.xmf",
"hexahedron_xyz.xmf",
"output.xmf",
"quadrilateral.xmf",
"rectTest.xmf",
"tensor.xmf",
"set.xmf"
]
largeFiles = [
#from ParaView
"Iron/Iron_Protein.ImageData.xmf",
"Iron/Iron_Protein.ImageData.Collection.xmf",
"Iron/Iron_Protein.RectilinearGrid.xmf",
"Iron/Iron_Protein.RectilinearGrid.Collection.xmf",
"Iron/Iron_Protein.StructuredGrid.xmf",
"Iron/Iron_Protein.StructuredGrid.Collection.xmf",
"Big/Scenario1_p1.xmf",
]
testfilenames = smallFiles
import sys
if "--do_big_files" in sys.argv:
testfilenames = smallFiles + largeFiles
if __name__ == "__main__":
for fname in testfilenames:
xr = vtk.vtkXdmf3Reader()
afname = "" + str(VTK_DATA_ROOT) + "/Data/XDMF/" + fname
print ("Trying %s" % afname)
xr.CanReadFile(afname)
xr.SetFileName(afname)
xr.UpdateInformation()
xr.Update()
ds = xr.GetOutputDataObject(0)
if not ds:
print("Got zero output from known good file")
sys.exit(vtk.VTK_ERROR)
#if res != vtk.VTK_OK:
# print "Could not read", afname
# sys.exit(vtk.VTK_ERROR)
| 23.186667
| 74
| 0.72973
|
2a68e244183dd5212631b8456f9129348ec95876
| 914
|
py
|
Python
|
api/weather/WeatherServiceTest.py
|
Gabriel737/CadorsMap
|
2bca28b8bda79caad1149bcedd1dc4953c84e13b
|
[
"MIT"
] | 1
|
2021-12-11T21:11:06.000Z
|
2021-12-11T21:11:06.000Z
|
api/weather/WeatherServiceTest.py
|
kaumil/cmpt_732
|
e753824d30fdc32c60f6699ac5b4d88d78f6fa33
|
[
"MIT"
] | null | null | null |
api/weather/WeatherServiceTest.py
|
kaumil/cmpt_732
|
e753824d30fdc32c60f6699ac5b4d88d78f6fa33
|
[
"MIT"
] | 1
|
2021-11-11T16:51:07.000Z
|
2021-11-11T16:51:07.000Z
|
from Weather import WeatherService
def main():
ws = WeatherService()
print('=== COORDS ===')
coords = ws._locate_coordinates('Stave Lake, BC, Canada')
print(coords.latitude, coords.longitude, '\n')
print('=== CYVR === ')
print(ws.retrieve_weather('2005-11-13', 'cyvr', 'richmond BC (CYVR)', 'BC', 'Canada'), '\n')
print('=== WEMINDJI QC (CYNC) === ')
print(ws.retrieve_weather('2019-11-13', 'cync', 'WEMINDJI QC (CYNC)', 'QC', 'Canada'), '\n')
print('=== KANGIQSUALUJJUAQ (GEORGES RIVER) QC (CYLU) === ')
print(ws.retrieve_weather('2018-11-13', 'cylu', 'KANGIQSUALUJJUAQ (GEORGES RIVER) QC', 'QC', 'Canada'), '\n')
print('=== 15NM W QUÉBEC / JEAN LESAGE INTL QC (CYQB) === ')
print(ws.retrieve_weather('2021-11-13', 'cyqb', '15NM W QUÉBEC / JEAN LESAGE INTL QC (CYQB)', 'QC', 'Canada'), '\n')
if __name__ == '__main__':
main()
| 38.083333
| 120
| 0.584245
|
caa7526d0b961c79b7aa3531481e0aec746aa73b
| 15,164
|
py
|
Python
|
temboardui/plugins/monitoring/__init__.py
|
l00ptr/temboard
|
f583d58e4d831f5395bc29e0a90634d38b0b4930
|
[
"PostgreSQL"
] | null | null | null |
temboardui/plugins/monitoring/__init__.py
|
l00ptr/temboard
|
f583d58e4d831f5395bc29e0a90634d38b0b4930
|
[
"PostgreSQL"
] | null | null | null |
temboardui/plugins/monitoring/__init__.py
|
l00ptr/temboard
|
f583d58e4d831f5395bc29e0a90634d38b0b4930
|
[
"PostgreSQL"
] | null | null | null |
# -*- coding: utf-8 -*-
import logging
import os
from textwrap import dedent
import tornado.web
import tornado.escape
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy import create_engine
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.exc import ProgrammingError, IntegrityError
from sqlalchemy.sql import text
from psycopg2.extensions import AsIs
from temboardui.toolkit import taskmanager
from temboardui.application import (
get_roles_by_instance,
send_mail,
send_sms,
)
from temboardui.model.orm import (
Instances,
)
from .model.orm import (
Check,
CheckState,
Host,
Instance,
)
from .alerting import (
check_specs,
)
from .handlers import blueprint
logger = logging.getLogger(__name__)
workers = taskmanager.WorkerSet()
def configuration(config):
return {}
def get_routes(config):
plugin_path = os.path.dirname(os.path.realpath(__file__))
__import__(__name__ + '.handlers.alerting')
__import__(__name__ + '.handlers.monitoring')
routes = blueprint.rules + [
(r"/js/monitoring/(.*)",
tornado.web.StaticFileHandler, {'path': plugin_path + "/static/js"}),
]
return routes
@workers.register(pool_size=1)
def aggregate_data_worker(app):
# Worker in charge of aggregate data
try:
dbconf = app.config.repository
dburi = 'postgresql://{user}:{pwd}@:{p}/{db}?host={h}'.format(
user=dbconf['user'],
pwd=dbconf['password'],
h=dbconf['host'],
p=dbconf['port'],
db=dbconf['dbname'])
engine = create_engine(dburi)
with engine.begin() as conn:
conn.execute("SET search_path TO monitoring")
logger.debug("Running SQL function monitoring.aggregate_data()")
res = conn.execute("SELECT * FROM aggregate_data()")
for row in res.fetchall():
logger.debug("table=%s insert=%s"
% (row['tblname'], row['nb_rows']))
return
except Exception as e:
logger.error('Could not aggregate montitoring data')
logger.exception(e)
raise(e)
@workers.register(pool_size=1)
def history_tables_worker(app):
# Worker in charge of history tables
try:
dbconf = app.config.repository
dburi = 'postgresql://{user}:{pwd}@:{p}/{db}?host={h}'.format(
user=dbconf['user'],
pwd=dbconf['password'],
h=dbconf['host'],
p=dbconf['port'],
db=dbconf['dbname'])
engine = create_engine(dburi)
with engine.connect() as conn:
conn.execute("SET search_path TO monitoring")
logger.debug("Running SQL function monitoring.history_tables()")
res = conn.execute("SELECT * FROM history_tables()")
for row in res.fetchall():
logger.debug("table=%s insert=%s"
% (row['tblname'], row['nb_rows']))
conn.execute("COMMIT")
return
except Exception as e:
logger.error('Could not history montitoring tables')
logger.exception(e)
try:
conn.execute("ROLLBACK")
except Exception:
pass
raise(e)
@workers.register(pool_size=10)
def check_data_worker(app, host_id, instance_id, data):
# Worker in charge of checking preprocessed monitoring values
specs = check_specs
dbconf = app.config.repository
dburi = 'postgresql://{user}:{pwd}@:{p}/{db}?host={h}'.format(
user=dbconf['user'],
pwd=dbconf['password'],
h=dbconf['host'],
p=dbconf['port'],
db=dbconf['dbname']
)
engine = create_engine(dburi)
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
worker_session = Session()
keys = dict()
for raw in data:
datetime = raw.get('datetime')
name = raw.get('name')
key = raw.get('key')
value = raw.get('value')
warning = raw.get('warning')
critical = raw.get('critical')
# Proceed with thresholds comparison
spec = specs.get(name)
state = 'UNDEF'
if not spec:
continue
if not (spec.get('operator')(value, warning)
or spec.get('operator')(value, critical)):
state = 'OK'
if spec.get('operator')(value, warning):
state = 'WARNING'
if spec.get('operator')(value, critical):
state = 'CRITICAL'
# Try to find enabled check for this host_id with the same name
try:
c = worker_session.query(Check).filter(
Check.name == unicode(name),
Check.host_id == host_id,
Check.instance_id == instance_id,
Check.enabled == bool(True),
).one()
except NoResultFound:
continue
# Update/insert check current state
try:
cs = worker_session.query(CheckState).filter(
CheckState.check_id == c.check_id,
CheckState.key == unicode(key)
).one()
# State has changed since last time
if cs.state != state:
taskmanager.schedule_task(
'notify_state_change',
listener_addr=os.path.join(app.config.temboard.home,
'.tm.socket'),
options={
'check_id': c.check_id,
'key': key,
'value': value,
'state': state,
'prev_state': cs.state
},
expire=0,
)
cs.state = unicode(state)
worker_session.merge(cs)
except NoResultFound:
cs = CheckState(check_id=c.check_id, key=unicode(key),
state=unicode(state))
worker_session.add(cs)
worker_session.flush()
# Append state change if any to history
worker_session.execute("SELECT monitoring.append_state_changes(:d, :i,"
":s, :k, :v, :w, :c)",
{'d': datetime, 'i': c.check_id, 's': cs.state,
'k': cs.key, 'v': value, 'w': warning,
'c': critical})
if c.check_id not in keys:
keys[c.check_id] = list()
keys[c.check_id].append(cs.key)
worker_session.commit()
worker_session.expunge_all()
# Purge CheckState
for check_id, ks in keys.items():
worker_session.execute("DELETE FROM monitoring.check_states WHERE "
"check_id = :check_id AND NOT (key = ANY(:ks))",
{'check_id': check_id, 'ks': ks})
worker_session.commit()
# check_ids for the given instance
req = worker_session.query(Check).filter(Check.instance_id == instance_id)
check_ids = [check.check_id for check in req]
# Set to UNDEF every unchecked check for the given instance
# This may happen when postgres is unavailable for example
worker_session.execute("UPDATE monitoring.check_states "
"SET state = 'UNDEF' "
"WHERE check_id = ANY(:all_check_ids) AND "
"NOT check_id = ANY(:check_ids_to_keep)",
{
'all_check_ids': check_ids,
'check_ids_to_keep': keys.keys()
})
worker_session.commit()
worker_session.close()
@workers.register(pool_size=1)
def purge_data_worker(app):
"""Background worker in charge of purging monitoring data. Purge policy
is based on purge_after parameter from monitoring section. purger_after
defines the number of day of data to keep, from now. Default value means
there is no purge policy.
"""
logger.setLevel(app.config.logging.level)
logger.info("Starting monitoring data purge worker.")
if not app.config.monitoring.purge_after:
logger.info("No purge policy, end.")
return
dburi = 'postgresql://{user}:{pwd}@:{p}/{db}?host={h}'.format(
user=app.config.repository['user'],
pwd=app.config.repository['password'],
h=app.config.repository['host'],
p=app.config.repository['port'],
db=app.config.repository['dbname'],
)
engine = create_engine(dburi)
with engine.connect() as conn:
# Get tablename list to purge from metric_tables_config()
res = conn.execute(
dedent("""
SELECT
tablename
FROM (
SELECT
tablename_prefix||'_'||suffix AS tablename
FROM
json_object_keys(monitoring.metric_tables_config()) AS tablename_prefix,
UNNEST(ARRAY['30m_current', '6h_current', 'current', 'history']) AS suffix
) AS q
WHERE EXISTS (
SELECT 1
FROM
pg_catalog.pg_tables AS pgt
WHERE
pgt.tablename=q.tablename
AND pgt.schemaname = 'monitoring'
)
ORDER BY tablename;
""") # noqa
)
tablenames = [r['tablename'] for r in res.fetchall()]
tablenames.extend(['state_changes', 'check_changes'])
purge_query_base = "DELETE FROM :tablename WHERE "
for tablename in tablenames:
# With history tables, we have to deal with tstzrange
if tablename.endswith("_history"):
query = purge_query_base + \
"NOT (history_range && tstzrange(NOW() " + \
"- ':nday days'::INTERVAL, NOW()))"
else:
query = purge_query_base + \
"datetime < (NOW() - ':nday days'::INTERVAL)"
logger.debug("Purging table %s", tablename)
t = conn.begin()
try:
res_delete = conn.execute(
text(query),
tablename=AsIs("monitoring.%s" % tablename),
nday=app.config.monitoring.purge_after,
)
t.commit()
except (ProgrammingError, IntegrityError) as e:
logger.exception(e)
logger.error("Could not delete data from table %s", tablename)
t.rollback()
continue
if res_delete.rowcount > 0:
logger.info("Table %s purged, %s rows deleted",
tablename, res_delete.rowcount)
logger.info("End of monitoring data purge worker.")
@workers.register(pool_size=1)
def notify_state_change(app, check_id, key, value, state, prev_state):
# check if at least one notifications transport is configured
# if it's not the case pass
notifications_conf = app.config.notifications
smtp_host = notifications_conf.smtp_host
smtp_port = notifications_conf.smtp_port
smtp_tls = notifications_conf.smtp_tls
smtp_login = notifications_conf.smtp_login
smtp_password = notifications_conf.smtp_password
smtp_from_addr = notifications_conf.smtp_from_addr
if not smtp_host and \
not notifications_conf.get('twilio_account_sid', None):
logger.info("No SMTP nor SMS service configured, "
"notification not sent")
return
# Worker in charge of sending notifications
dbconf = app.config.repository
dburi = 'postgresql://{user}:{pwd}@:{p}/{db}?host={h}'.format(
user=dbconf['user'],
pwd=dbconf['password'],
h=dbconf['host'],
p=dbconf['port'],
db=dbconf['dbname']
)
engine = create_engine(dburi)
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
worker_session = Session()
check = worker_session.query(Check).filter(
Check.check_id == check_id,
).join(Instance).join(Host).one()
port = check.instance.port
hostname = check.instance.host.hostname
instance = worker_session.query(Instances).filter(
Instances.pg_port == port,
Instances.hostname == hostname,
).one()
# don't notify if notifications are disabled for this instance
if not instance.notify:
return
specs = check_specs
spec = specs.get(check.name)
message = ''
if state != 'OK':
message = spec.get('message').format(
key=key,
check=check.name,
value=value,
threshold=getattr(check, state.lower()),
)
description = spec.get('description')
subject = '[temBoard] {state} {hostname} - {description}' \
.format(hostname=hostname, state=state, description=description)
link = 'https://%s:%d/server/%s/%d/alerting/%s' % (
app.config.temboard.address,
app.config.temboard.port,
instance.agent_address,
instance.agent_port,
check.name)
direction = '➚' if prev_state == 'OK' or state == 'CRITICAL' else '➘'
body = '''
Instance: {hostname}:{port}
Description: {description}
Status: {direction} {state} (prev. {prev_state})
{message}
{link}
'''.format(
hostname=hostname,
port=instance.agent_port,
description=description,
direction=direction,
state=state,
prev_state=prev_state,
message=message,
link=link,
)
roles = get_roles_by_instance(worker_session,
instance.agent_address,
instance.agent_port)
emails = [role.role_email for role in roles if role.role_email]
if len(emails):
send_mail(smtp_host, smtp_port, subject, body, emails, smtp_tls,
smtp_login, smtp_password, smtp_from_addr)
phones = [role.role_phone for role in roles if role.role_phone]
if len(phones):
send_sms(app.config.notifications, body, phones)
@taskmanager.bootstrap()
def monitoring_bootstrap(context):
yield taskmanager.Task(
worker_name='aggregate_data_worker',
id='aggregate_data',
redo_interval=30 * 60, # Repeat each 30m,
options={},
)
yield taskmanager.Task(
worker_name='history_tables_worker',
id='history_tables',
redo_interval=3 * 60 * 60, # Repeat each 3h
options={},
)
yield taskmanager.Task(
worker_name='purge_data_worker',
id='purge_data',
redo_interval=24 * 60 * 60, # Repeat each 24h
options={},
)
| 34.385488
| 98
| 0.558692
|
012a2c3dd63d7741689260ea0029462b87006cc9
| 150
|
py
|
Python
|
yatube/core/context_processors/year.py
|
LHLHLHE/yatube_project
|
8f0ee4f8715133dd8c268caa3f49f80a0648c859
|
[
"MIT"
] | null | null | null |
yatube/core/context_processors/year.py
|
LHLHLHE/yatube_project
|
8f0ee4f8715133dd8c268caa3f49f80a0648c859
|
[
"MIT"
] | null | null | null |
yatube/core/context_processors/year.py
|
LHLHLHE/yatube_project
|
8f0ee4f8715133dd8c268caa3f49f80a0648c859
|
[
"MIT"
] | null | null | null |
import datetime
def year(request):
"""Добавляет переменную с текущим годом."""
return {
'year': datetime.datetime.now().year,
}
| 16.666667
| 47
| 0.62
|
b09070a26468a2b80936043135028accbe8c5000
| 171
|
py
|
Python
|
client/__init__.py
|
mycelium-ethereum/upshot-adapter
|
f4bed3f4c4e746062465f8e705bb3988044cf432
|
[
"MIT"
] | null | null | null |
client/__init__.py
|
mycelium-ethereum/upshot-adapter
|
f4bed3f4c4e746062465f8e705bb3988044cf432
|
[
"MIT"
] | null | null | null |
client/__init__.py
|
mycelium-ethereum/upshot-adapter
|
f4bed3f4c4e746062465f8e705bb3988044cf432
|
[
"MIT"
] | null | null | null |
from dotenv import load_dotenv
load_dotenv();
import os
from client.Webhook import webhook
from client.Upshot import Upshot
upshot = Upshot(os.getenv("UPSHOT_API_KEY"))
| 19
| 44
| 0.807018
|
a71ee19552d1acd72fe1bd577ef83283c6936eec
| 49
|
py
|
Python
|
btd6_memory_info/generated/SteamNative/RemoteStorageFileWriteAsyncComplete_t/remote_storage_file_write_async_complete_t.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
btd6_memory_info/generated/SteamNative/RemoteStorageFileWriteAsyncComplete_t/remote_storage_file_write_async_complete_t.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
btd6_memory_info/generated/SteamNative/RemoteStorageFileWriteAsyncComplete_t/remote_storage_file_write_async_complete_t.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
class RemoteStorageFileWriteAsyncComplete_t: pass
| 49
| 49
| 0.938776
|
4d930c0f2ec564695065f9344177d99faa68824a
| 1,719
|
py
|
Python
|
fabfile.py
|
KunihikoKido/aws-lambda-es-dumpdata
|
40cb4f1852de0d7fc2c6e446d053fe50385f262f
|
[
"MIT"
] | 3
|
2017-03-13T02:51:06.000Z
|
2017-09-01T08:03:53.000Z
|
fabfile.py
|
KunihikoKido/aws-lambda-es-suggest
|
f80fa289c0856e307622336a33ed5c1badfe75fb
|
[
"MIT"
] | null | null | null |
fabfile.py
|
KunihikoKido/aws-lambda-es-suggest
|
f80fa289c0856e307622336a33ed5c1badfe75fb
|
[
"MIT"
] | 2
|
2018-08-10T15:36:17.000Z
|
2019-11-13T20:09:11.000Z
|
# -*- coding: utf-8 -*-
import os
from fabric.api import local
from fabric.api import task
from fabric_aws_lambda import SetupTask
from fabric_aws_lambda import InvokeTask
from fabric_aws_lambda import MakeZipTask
from fabric_aws_lambda import AWSLambdaInvokeTask
from fabric_aws_lambda import AWSLambdaGetConfigTask
from fabric_aws_lambda import AWSLambdaUpdateCodeTask
BASE_PATH = os.path.dirname(__file__)
LIB_PATH = os.path.join(BASE_PATH, 'lib')
INSTALL_PREFIX = os.path.join(BASE_PATH, 'local')
REQUIREMENTS_TXT = os.path.join(BASE_PATH, 'requirements.txt')
LAMBDA_FUNCTION_NAME = os.path.basename(BASE_PATH)
LAMBDA_HANDLER = 'lambda_handler'
LAMBDA_FILE = os.path.join(BASE_PATH, 'lambda_function.py')
EVENT_FILE = os.path.join(BASE_PATH, 'event.json')
ZIP_FILE = os.path.join(BASE_PATH, 'lambda_function.zip')
ZIP_EXCLUDE_FILE = os.path.join(BASE_PATH, 'exclude.lst')
@task
def clean():
for target in [ZIP_FILE, LIB_PATH, INSTALL_PREFIX]:
local('rm -rf {}'.format(target))
task_setup = SetupTask(
requirements=REQUIREMENTS_TXT,
lib_path=LIB_PATH,
install_prefix=INSTALL_PREFIX
)
task_invoke = InvokeTask(
lambda_file=LAMBDA_FILE,
lambda_handler=LAMBDA_HANDLER,
event_file=EVENT_FILE,
lib_path=LIB_PATH
)
task_makezip = MakeZipTask(
zip_file=ZIP_FILE,
exclude_file=ZIP_EXCLUDE_FILE,
lib_path=LIB_PATH
)
task_aws_invoke = AWSLambdaInvokeTask(
function_name=LAMBDA_FUNCTION_NAME,
payload='file://{}'.format(EVENT_FILE)
)
task_aws_getconfig = AWSLambdaGetConfigTask(
function_name=LAMBDA_FUNCTION_NAME,
)
task_aws_updatecode = AWSLambdaUpdateCodeTask(
function_name=LAMBDA_FUNCTION_NAME,
zip_file='fileb://{}'.format(ZIP_FILE)
)
| 26.045455
| 62
| 0.778941
|
a018d2d7818a31d1e835209f54a458acc22837b9
| 3,104
|
py
|
Python
|
clone/views.py
|
karobia001/Insta-clone
|
fc512c8f73776bd4df9fb518e628c8dd42e0fac4
|
[
"MIT"
] | null | null | null |
clone/views.py
|
karobia001/Insta-clone
|
fc512c8f73776bd4df9fb518e628c8dd42e0fac4
|
[
"MIT"
] | 3
|
2020-06-06T00:09:24.000Z
|
2021-06-10T22:17:03.000Z
|
clone/views.py
|
karobia001/Insta-clone
|
fc512c8f73776bd4df9fb518e628c8dd42e0fac4
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.decorators import login_required
from .forms import NewStatusForm, NewCommentForm
from django.shortcuts import render,redirect
from .models import Image, Profile, Comment
# Create your views here.
@login_required(login_url='/accounts/register/')
def timeline(request):
current_user = request.user
images = Image.objects.order_by('-pub_date')
profiles = Profile.objects.order_by('-last_update')
comment = Comment.objects.order_by('-date')
return render(request, 'index.html', {"images":images, "profiles":profiles, "user_profile":user_profile, "comment":comment})
@login_required(login_url='/accounts/login/')
def profile(request):
current_user = request.user
# profile = Profile.objects.get(user_id=current_user.id)
images = Image.objects.filter(profile_id=current_user.id)
return render(request, 'profile.html', {"images":images})
@login_required(login_url='/accounts/login/')
def new_post(request, username):
current_user = request.user
p = Profile.objects.filter(id=current_user.id).first()
if request.method == 'POST':
form = NewStatusForm(request.POST,request.FILES)
if form.is_valid():
post = form.save(commit=False)
post.imageuploader_profile= p
return redirect('/')
else:
form =NewStatusForm
return render(request, 'new_status.html', {"form":form,'p':p})
#User Profile
@login_required(login_url='/accounts/login/')
def user_profile(request, user_id):
profile = Profile.objects.get(id=user_id)
images = Image.objects.all().filter(user_id=user_id)
return render(request, "profile.html", {"[profile":profile, "images":images})
@login_required(login_url='/accounts/login/')
def single_image(request, image_id):
image = Image.objects.get(id = image_id)
return render(request, "single_image.html",{"image":image})
def find_profile(request):
if 'image' in request.GET and request.GET["image"]:
search_term = request.GET.get("image")
searched_images = Image.search_by_user(search_term)
message = f"{search_term}"
return render(request, 'user_profile.html',{"message":message,"image": searched_images})
else:
message = "You haven't searched for any term yet"
return render(request, 'single_image.html',{"message":message})
@login_required(login_url='/accounts/login/')
def single_image_like(request, image_id):
image = Image.objects.get(id=image_id)
image.likes = image.likes + 1
image.save()
return redirect('allTimelines')
@login_required(login_url='/accounts/login/')
def new_comment(request, username):
current_user =request.user
username = current_user.username
if request.method =='POST':
form = NewCommentForm(request.POST, request.FILES)
if form.is_valid():
comment = form.save()
comment.user = request.user
comment.save()
return redirect('allTimelines')
else:
form = NewCommentForm()
return render(request, 'new_comment.html', {"form":form})
| 37.853659
| 128
| 0.692332
|
688dc9499ee05ec79a009558993ac6cb9864c9f0
| 1,793
|
py
|
Python
|
integration_tests/projects/002_jaffle_shop/fal_scripts/models/orders_forecast.py
|
emekdahl/fal
|
8a6670a2a1940cdd3e838073894d643e1050b654
|
[
"Apache-2.0"
] | 360
|
2021-11-10T18:43:03.000Z
|
2022-03-31T14:57:19.000Z
|
integration_tests/projects/002_jaffle_shop/fal_scripts/models/orders_forecast.py
|
emekdahl/fal
|
8a6670a2a1940cdd3e838073894d643e1050b654
|
[
"Apache-2.0"
] | 64
|
2021-11-11T22:05:53.000Z
|
2022-03-30T18:14:05.000Z
|
integration_tests/projects/002_jaffle_shop/fal_scripts/models/orders_forecast.py
|
emekdahl/fal
|
8a6670a2a1940cdd3e838073894d643e1050b654
|
[
"Apache-2.0"
] | 9
|
2021-12-17T02:49:36.000Z
|
2022-03-31T14:57:21.000Z
|
"""Forecast and upload order data
Packages:
- prophet
"""
import pandas as pd
from prophet import Prophet
import sqlalchemy.types as types
def make_forecast(dataframe: pd.DataFrame, periods: int = 30):
"""Make forecast on metric data."""
model = Prophet(daily_seasonality=False, yearly_seasonality=False)
model.fit(dataframe)
future = model.make_future_dataframe(periods=periods)
prediction = model.predict(future)
return model, prediction
def plot_forecast(model: Prophet, forecast: pd.DataFrame, filename: str):
from prophet.plot import plot_plotly
fig = plot_plotly(model, forecast)
fig.write_image(f"{context.current_model.name}_{filename}.jpg")
df: pd.DataFrame = ref("orders_daily")
print(df)
df_count = df[["order_date", "order_count"]]
df_count = df_count.rename(columns={"order_date": "ds", "order_count": "y"})
model_count, forecast_count = make_forecast(df_count, 50)
# plot_forecast(model_count, forecast_count, "count")
df_amount = df[["order_date", "order_amount"]]
df_amount = df_amount.rename(columns={"order_date": "ds", "order_amount": "y"})
model_amount, forecast_amount = make_forecast(df_amount, 50)
# plot_forecast(model_amount, forecast_amount, "amount")
joined_forecast = forecast_count.join(
forecast_amount.set_index("ds"),
on="ds",
lsuffix="_count",
rsuffix="_amount",
)
print(joined_forecast.dtypes)
# HACK: have to figure out how to write dates (or datetimes) to the database
# TODO: The types.DATE did not work when testing for `dtype={"ds": types.DATE}`
joined_forecast["ds"] = joined_forecast["ds"].map(lambda x: x.strftime("%Y-%m-%d"))
# Generates a table with a BUNCH of columns
# It will use the current model as target, no need to pass it
write_to_model(joined_forecast, mode="overwrite")
| 31.45614
| 83
| 0.737312
|
510e701e19f0d454f8cf705c6d49aca211a5ef13
| 7,386
|
py
|
Python
|
tests/test_branch_utils.py
|
QualiTorque/torque-cli
|
20ee82b4c68fd7e5b659209b72a686e76471ab52
|
[
"Apache-2.0"
] | null | null | null |
tests/test_branch_utils.py
|
QualiTorque/torque-cli
|
20ee82b4c68fd7e5b659209b72a686e76471ab52
|
[
"Apache-2.0"
] | null | null | null |
tests/test_branch_utils.py
|
QualiTorque/torque-cli
|
20ee82b4c68fd7e5b659209b72a686e76471ab52
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from datetime import datetime
from unittest.mock import Mock, patch
import torque.commands.sb
import torque.services.waiter
from torque.branch import branch_utils
from torque.commands.base import BaseCommand
from torque.constants import DEFAULT_TIMEOUT, FINAL_SB_STATUSES, UNCOMMITTED_BRANCH_NAME
from torque.exceptions import BadBlueprintRepo
class TestStashLogicFunctions(unittest.TestCase):
def setUp(self):
self.switch = branch_utils.switch_to_temp_branch
self.revert = branch_utils.revert_from_local_temp_branch
self.check_repo_for_errors = branch_utils.check_repo_for_errors
self.wait_before_delete = torque.services.waiter.Waiter.wait_for_sandbox_to_launch
self.debug_output_about_repo_examination = branch_utils.debug_output_about_repo_examination
self.command = BaseCommand([])
self.initialize_mock_vars()
def initialize_mock_vars(self):
self.repo = Mock()
self.sb_manager = Mock()
self.sandbox = Mock()
self.sb_manager.get.return_value = self.sandbox
self.sandbox_id = Mock()
self.temp_branch = "mock_temp_branch"
self.blueprint_name = "mock_blueprint_name"
@patch.object(branch_utils, "create_remote_branch")
@patch.object(branch_utils, "commit_to_local_temp_branch")
@patch.object(branch_utils, "preserve_uncommitted_code")
@patch.object(branch_utils, "create_local_temp_branch")
@patch.object(branch_utils, "stash_local_changes")
def test_switch_to_temp_branch_dirtyrepo(
self,
stash_local_changes,
create_local_temp_branch,
preserve_uncommitted_code,
commit_to_local_temp_branch,
create_remote_branch,
):
# Arrange:
self.repo = Mock()
self.repo.is_dirty = Mock(return_value=True)
defined_branch_in_file = "defined_branch_in_file"
# Act:
uncommitted_branch_name = self.switch(self.repo, defined_branch_in_file)
# Assert:
create_remote_branch.assert_called_once_with(self.repo, uncommitted_branch_name)
commit_to_local_temp_branch.assert_called_once_with(self.repo)
preserve_uncommitted_code.assert_called_once_with(self.repo)
create_local_temp_branch.assert_called_once_with(self.repo, uncommitted_branch_name)
stash_local_changes.assert_called_once_with(self.repo)
self.assertTrue(uncommitted_branch_name.startswith(UNCOMMITTED_BRANCH_NAME))
@patch.object(branch_utils, "create_remote_branch")
@patch.object(branch_utils, "commit_to_local_temp_branch")
@patch.object(branch_utils, "preserve_uncommitted_code")
@patch.object(branch_utils, "create_local_temp_branch")
@patch.object(branch_utils, "stash_local_changes")
def test_switch_to_temp_branch_cleanrepo(
self,
stash_local_changes,
create_local_temp_branch,
preserve_uncommitted_code,
commit_to_local_temp_branch,
create_remote_branch,
):
# Arrange:
self.repo = Mock()
self.repo.is_dirty = Mock(return_value=False)
self.repo.untracked_files = True
defined_branch_in_file = "defined_branch_in_file"
# Act:
uncommitted_branch_name = self.switch(self.repo, defined_branch_in_file)
# Assert:
create_remote_branch.assert_called_once_with(self.repo, uncommitted_branch_name)
commit_to_local_temp_branch.assert_called_once_with(self.repo)
preserve_uncommitted_code.assert_called_once_with(self.repo)
create_local_temp_branch.assert_called_once_with(self.repo, uncommitted_branch_name)
stash_local_changes.assert_called_once_with(self.repo)
self.assertTrue(uncommitted_branch_name.startswith(UNCOMMITTED_BRANCH_NAME))
@patch.object(branch_utils, "checkout_remote_branch")
@patch.object(branch_utils, "revert_from_uncommitted_code")
def test_revert_from_temp_branch(self, revert_from_uncommitted_code, checkout_remote_branch):
# Arrange:
self.repo = Mock()
active_branch = "active_branch"
# Act:
self.revert(self.repo, active_branch, True)
# Assert:
checkout_remote_branch.assert_called_once_with(
self.repo,
active_branch,
)
revert_from_uncommitted_code.assert_called_once_with(self.repo)
def test_check_repo_for_errors(self):
# Arrange:
self.repo = Mock()
self.repo.is_repo_detached = Mock(return_value=True)
# Act & Assert:
self.assertRaises(BadBlueprintRepo, self.check_repo_for_errors, self.repo)
def test_debug_output_about_repo_examination(self):
# Arrange:
self.repo = Mock()
mock_blueprint = Mock()
self.repo.is_repo_detached = Mock(return_value=False)
# Act:
self.debug_output_about_repo_examination(self.repo, mock_blueprint)
# Assert:
self.repo.is_dirty.assert_called_once()
self.repo.is_current_branch_exists_on_remote()
self.repo.is_current_branch_synced()
@patch("time.sleep", return_value=None)
@patch("torque.services.waiter.can_temp_branch_be_deleted")
def test_wait_for_sandbox_to_launch_final_stage(self, can_temp, time_sleep):
# Arrange:
self.initialize_mock_vars()
can_temp.return_value = False
context_branch = Mock()
# Act & assert:
for final_stage in FINAL_SB_STATUSES:
self.sandbox.sandbox_status = final_stage
start_time = datetime.now()
self.wait_before_delete(
self.command,
self.sb_manager,
self.sandbox_id,
DEFAULT_TIMEOUT,
context_branch,
False,
)
assert (datetime.now() - start_time).seconds < 1
@patch("time.sleep", return_value=None)
@patch("torque.services.waiter.can_temp_branch_be_deleted")
def test_wait_for_sandbox_to_launch_can_be_deleted(self, can_temp, time_sleep):
# Arrange:
self.initialize_mock_vars()
mock_non_final_stage = "mock_non_final_stage"
can_temp.return_value = True
self.sandbox.sandbox_status = mock_non_final_stage
context_branch = Mock()
# Act:
timeout_reached = self.wait_before_delete(
self.command,
self.sb_manager,
self.sandbox_id,
1,
context_branch,
False,
)
# Assert:
self.assertFalse(timeout_reached)
@patch("torque.services.waiter.DEFAULT_TIMEOUT", 0.01)
@patch("time.sleep", return_value=None)
@patch("torque.services.waiter.can_temp_branch_be_deleted")
def test_wait_before_temp_branch_delete_cannot_be_deleted(
self,
can_temp,
time_sleep,
):
# Arrange:
self.initialize_mock_vars()
mock_non_final_stage = "mock_non_final_stage"
can_temp.return_value = False
self.sandbox.sandbox_status = mock_non_final_stage
context_branch = Mock()
# Act:
timeout_reached = self.wait_before_delete(
self.command,
self.sb_manager,
self.sandbox_id,
0,
context_branch,
False,
)
# Assert:
self.assertTrue(timeout_reached)
| 37.115578
| 99
| 0.691579
|
4adeaf7f937b4b5db8adb199cd10f9cb42174cec
| 2,254
|
py
|
Python
|
pennylane_aqt/api_client.py
|
XanaduAI/pennylane-aqt
|
1570a0cf422967dfe0d170f099a636a3024bd353
|
[
"Apache-2.0"
] | 5
|
2020-06-01T05:13:23.000Z
|
2020-07-12T17:59:14.000Z
|
pennylane_aqt/api_client.py
|
XanaduAI/pennylane-aqt
|
1570a0cf422967dfe0d170f099a636a3024bd353
|
[
"Apache-2.0"
] | 3
|
2020-06-03T12:52:44.000Z
|
2020-06-23T18:08:30.000Z
|
pennylane_aqt/api_client.py
|
XanaduAI/pennylane-aqt
|
1570a0cf422967dfe0d170f099a636a3024bd353
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
API Client
==========
**Module name:** :mod:`pennylane_aqt.api_client`
.. currentmodule:: pennylane_aqt.api_client
Tools to interface with online APIs.
Classes
-------
.. autosummary::
submit
verify_valid_status
Code details
~~~~~~~~~~~~
"""
import urllib
import requests
SUPPORTED_HTTP_REQUESTS = ["PUT", "POST"]
VALID_STATUS_CODES = [200, 201, 202]
DEFAULT_TIMEOUT = 1.0
def verify_valid_status(response):
"""
Check a HTTP response for valid status codes, and raise an exception if
the code is invalid
Args:
response[requests.model.Response]: the response containing the error
Returns:
bool: whether the response has an acceptable HTTP status code
Raises:
requests.HTTPError: if the status is not valid
"""
if response.status_code not in VALID_STATUS_CODES:
raise requests.HTTPError(response, response.text)
def submit(request_type, url, request, headers):
"""Submit a request to AQT's API.
Args:
request_type (str): the type of HTTP request ("PUT" or "POST")
url (str): the API's online URL
request (str): JSON-formatted payload
headers (dict): HTTP request header
Returns:
requests.models.Response: the response from the API
"""
if request_type not in SUPPORTED_HTTP_REQUESTS:
raise ValueError("""Invalid HTTP request method provided. Options are "PUT" or "POST".""")
if request_type == "PUT":
return requests.put(url, request, headers=headers, timeout=DEFAULT_TIMEOUT)
if request_type == "POST":
return requests.post(url, request, headers=headers, timeout=DEFAULT_TIMEOUT)
| 28.531646
| 98
| 0.703194
|
13ff58be813f7ad399c32cc9d408f919ed019e38
| 1,153
|
py
|
Python
|
modules/signatures/windows/rat_rbot.py
|
Yuanmessi/Bold-Falcon
|
00fcaba0b3d9c462b9d20ecb256ff85db5d119e2
|
[
"BSD-3-Clause"
] | 24
|
2021-06-21T07:35:37.000Z
|
2022-03-22T03:33:59.000Z
|
modules/signatures/windows/rat_rbot.py
|
Yuanmessi/Bold-Falcon
|
00fcaba0b3d9c462b9d20ecb256ff85db5d119e2
|
[
"BSD-3-Clause"
] | 3
|
2021-07-01T08:09:05.000Z
|
2022-01-28T03:38:36.000Z
|
modules/signatures/windows/rat_rbot.py
|
Yuanmessi/Bold-Falcon
|
00fcaba0b3d9c462b9d20ecb256ff85db5d119e2
|
[
"BSD-3-Clause"
] | 6
|
2021-06-22T05:32:57.000Z
|
2022-02-11T02:05:45.000Z
|
# Copyright (C) 2010-2015 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# This signature was contributed by RedSocks - http://redsocks.nl
# See the file 'docs/LICENSE' for copying permission.
from lib.cuckoo.common.abstracts import Signature
class RBot(Signature):
name = "rat_rbot"
description = "Creates known RBot files, registry keys and/or mutexes"
severity = 3
categories = ["rat"]
families = ["rbot"]
authors = ["RedSocks"]
minimum = "2.0"
mutexes_re = [
".*\\[ri0t\\ v5\\]",
".*dRbot",
".*P3NBot",
".*WinServ",
".*bb1",
]
files_re = [
"C:\\\\WINDOWS\\\\(system32|syswow64)\\\\sysfcg32.exe",
]
def on_complete(self):
for indicator in self.mutexes_re:
match = self.check_mutex(pattern=indicator, regex=True)
if match:
self.mark_ioc("mutex", match)
for indicator in self.files_re:
match = self.check_file(pattern=indicator, regex=True)
if match:
self.mark_ioc("file", match)
return self.has_marks()
| 28.121951
| 74
| 0.5915
|
d5580f9a74dce5251565eeff3abc57d2886c23fb
| 5,170
|
py
|
Python
|
highlighter_path.py
|
Phidica/sublime-fish
|
cd95616df5c73da5ce2dfe548892b4571040f9dd
|
[
"MIT"
] | 31
|
2017-11-15T00:21:27.000Z
|
2022-03-22T16:01:34.000Z
|
highlighter_path.py
|
eugenesvk/sublime-fish
|
6697fdd7becc3e5991484d4365b036b9fbb1d99d
|
[
"MIT"
] | 17
|
2017-11-11T15:07:27.000Z
|
2022-03-29T14:46:14.000Z
|
highlighter_path.py
|
eugenesvk/sublime-fish
|
6697fdd7becc3e5991484d4365b036b9fbb1d99d
|
[
"MIT"
] | 2
|
2021-03-19T11:41:34.000Z
|
2021-09-19T14:37:13.000Z
|
import os.path
import logging
import re
import sublime, sublime_plugin
from fish.highlighter_base import BaseHighlighter
class PathHighlighter(sublime_plugin.ViewEventListener, BaseHighlighter):
def __init__(self, view):
sublime_plugin.ViewEventListener.__init__(self, view)
BaseHighlighter.__init__(self, view)
self.viewDir = None
# Override default properties of the template
self.selectors = [
'meta.function-call.parameter.argument.path.fish',
'meta.function-call.operator.redirection.path.fish',
]
# def __del__(self):
# BaseHighlighter.__del__(self)
@classmethod
def is_applicable(self, settings):
try:
return 'Packages/fish/fish' in settings.get('syntax') and 'path' in settings.get('enabled_highlighters')
except TypeError: # In weird cases get() comes back NoneType
return False
@classmethod
def applies_to_primary_view_only(self):
return False
# Using _async functions means regions may flash onscreen as they are changed,
# however the advantage is that input is not blocked. In very big files
# this is essential
# Review full file at load
def on_load_async(self):
self.logger.debug("on_load")
self._update_markup()
# Review full file at save
def on_post_save_async(self):
self.logger.debug("on_post_save")
self._update_markup()
# Review current line after each modification
# We still iterate over every currently drawn region to test if it should be
# erased, however we only test new regions that are on the current line,
# preventing a potentially large number of disk operations (testing if
# files exist)
def on_modified_async(self):
self.logger.debug("on_modified")
self._update_markup(local = True)
def on_text_command(self, command_name, args):
if command_name == 'run_highlighter_test_trigger' \
and self.view.find(r'^#! HIGHLIGHTER TEST PATH', 0).begin() == 0:
self._run_test()
def _should_markup(self):
# If view is not backed by a file on disk then we have no directory reference
filePath = self.view.file_name()
if filePath is None:
self.logger.info("Refusing to mark up unsaved buffer")
return False
# First time we have a file path, note the directory
# Assumes directory cannot change while view is open (is this true?)
if self.viewDir is None:
self.viewDir = os.path.dirname(filePath)
self.logger.info("Active directory = {}".format(self.viewDir))
return True
def _test_draw_region(self, region, selector, regionID):
text = self.view.substr(region)
self.logger.debug("Region {} text = {}".format(region, text))
if text.startswith('~'):
testPath = os.path.expanduser(text)
else:
# Attempt to extract the quoted text, but don't try anything smart
# like stripping whitespace as the quotes are there to preserve it
if '"' in text or "'" in text:
try:
# I think this is safe, because to get here the text can't contain spaces unless the whole thing is quoted, so there isn't any way to do anything malicious
# Alternatively we could getFishOutput(['echo', text]), but that puts a dependency on fish being installed
newText = eval(text)
text = newText
if text == "":
return None
except (SyntaxError, NameError):
return None
# Keep an absolute path, otherwise assume we have a relative one
if text.startswith('/'):
testPath = text
elif sublime.platform() == 'windows':
# Fish can handle native Windows paths, but we have to take a little care
if re.match(r'([A-z]:)?(\\\\|/)', text):
# This is an absolute path with an optional drive and double backslash or single slash.
# If a drive wasn't given, os.path will insert the drive of the viewDir
testPath = os.path.join(self.viewDir, text)
elif re.match(r'\\', text):
# This is only one backslash, which to fish looks like a character escape and not a path
return None
elif any(c in text for c in [':', '*', '?']):
# True Windows files can't contain these symbols. In Cygwin/MSYS2 they could, but we're agnostic of those subsystems
return None
else:
# Otherwise just a regular relative path
testPath = os.path.join(self.viewDir, text)
else:
testPath = os.path.join(self.viewDir, text)
self.logger.debug("Test path = {}".format(testPath))
if not os.path.exists(testPath):
return None
# We want the drawn region to be marked up like the actual text (eg, a
# string, etc), so draw using the complete scope of the last character.
# The only reason not to use the first character is that it might be a tilde
drawScope = self.view.scope_name(region.end() - 1)
# Whitespace will not be underlined, see https://github.com/SublimeTextIssues/Core/issues/137
drawStyle = sublime.DRAW_NO_FILL | sublime.DRAW_NO_OUTLINE | sublime.DRAW_SOLID_UNDERLINE
return ('path', drawScope, drawStyle)
def _build_status(self):
return None
| 37.737226
| 165
| 0.681431
|
72cb7bfb4edd5f8bfec073bf042c5b707fd9ed54
| 764
|
py
|
Python
|
docs/00.Python_Advanced/Django_note/mysite1/mysite1/urls.py
|
mheanng/PythonNote
|
e3e5ede07968fab0a45f6ac4db96e62092c17026
|
[
"Apache-2.0"
] | null | null | null |
docs/00.Python_Advanced/Django_note/mysite1/mysite1/urls.py
|
mheanng/PythonNote
|
e3e5ede07968fab0a45f6ac4db96e62092c17026
|
[
"Apache-2.0"
] | null | null | null |
docs/00.Python_Advanced/Django_note/mysite1/mysite1/urls.py
|
mheanng/PythonNote
|
e3e5ede07968fab0a45f6ac4db96e62092c17026
|
[
"Apache-2.0"
] | null | null | null |
"""mysite1 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| 34.727273
| 79
| 0.704188
|
7a59e65296759895caa2944c0a2902c1d12946b5
| 1,952
|
py
|
Python
|
setup.py
|
skterry/KAI
|
b8ccf003bf892692bdf00f4bd112e2a7a2938a5b
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
skterry/KAI
|
b8ccf003bf892692bdf00f4bd112e2a7a2938a5b
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
skterry/KAI
|
b8ccf003bf892692bdf00f4bd112e2a7a2938a5b
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# NOTE: The configuration for the package, including the name, version, and
# other information are set in the setup.cfg file.
import os
import sys
from setuptools import setup
# First provide helpful messages if contributors try and run legacy commands
# for tests or docs.
TEST_HELP = """
Note: running tests is no longer done using 'python setup.py test'. Instead
you will need to run:
tox -e test
If you don't already have tox installed, you can install it with:
pip install tox
If you only want to run part of the test suite, you can also use pytest
directly with::
pip install -e .[test]
pytest
For more information, see:
http://docs.astropy.org/en/latest/development/testguide.html#running-tests
"""
if 'test' in sys.argv:
print(TEST_HELP)
sys.exit(1)
DOCS_HELP = """
Note: building the documentation is no longer done using
'python setup.py build_docs'. Instead you will need to run:
tox -e build_docs
If you don't already have tox installed, you can install it with:
pip install tox
You can also build the documentation with Sphinx directly using::
pip install -e .[docs]
cd docs
make html
For more information, see:
http://docs.astropy.org/en/latest/install.html#builddocs
"""
if 'build_docs' in sys.argv or 'build_sphinx' in sys.argv:
print(DOCS_HELP)
sys.exit(1)
VERSION_TEMPLATE = """
# Note that we need to fall back to the hard-coded version if either
# setuptools_scm can't be imported or setuptools_scm can't determine the
# version, so we catch the generic 'Exception'.
try:
from setuptools_scm import get_version
version = get_version(root='..', relative_to=__file__)
except Exception:
version = '{version}'
""".lstrip()
setup(use_scm_version={'write_to': os.path.join('kai', 'version.py'),
'write_to_template': VERSION_TEMPLATE})
| 24.708861
| 76
| 0.71875
|
7088ea152946ef33d247df89e52221b9a94e8d5c
| 88
|
py
|
Python
|
odxtools/version.py
|
andlaus/odxtools
|
8c4d806d7d23b9f87e571edffb3e90b7005688f4
|
[
"MIT"
] | null | null | null |
odxtools/version.py
|
andlaus/odxtools
|
8c4d806d7d23b9f87e571edffb3e90b7005688f4
|
[
"MIT"
] | null | null | null |
odxtools/version.py
|
andlaus/odxtools
|
8c4d806d7d23b9f87e571edffb3e90b7005688f4
|
[
"MIT"
] | null | null | null |
# SPDX-License-Identifier: MIT
# Copyright (c) 2022 MBition GmbH
__version__ = '1.0.1'
| 17.6
| 33
| 0.715909
|
ba76eedca41d8c5534d97efdd5858bd369bc9d61
| 10,956
|
py
|
Python
|
sdk/python/pulumi_azure_native/web/v20210201/web_app_metadata_slot.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/web/v20210201/web_app_metadata_slot.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/web/v20210201/web_app_metadata_slot.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = ['WebAppMetadataSlotArgs', 'WebAppMetadataSlot']
@pulumi.input_type
class WebAppMetadataSlotArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
slot: pulumi.Input[str],
kind: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a WebAppMetadataSlot resource.
:param pulumi.Input[str] name: Name of the app.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] slot: Name of the deployment slot. If a slot is not specified, the API will update the metadata for the production slot.
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] properties: Settings.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "slot", slot)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if properties is not None:
pulumi.set(__self__, "properties", properties)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the app.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
Name of the resource group to which the resource belongs.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def slot(self) -> pulumi.Input[str]:
"""
Name of the deployment slot. If a slot is not specified, the API will update the metadata for the production slot.
"""
return pulumi.get(self, "slot")
@slot.setter
def slot(self, value: pulumi.Input[str]):
pulumi.set(self, "slot", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Settings.
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "properties", value)
class WebAppMetadataSlot(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
slot: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
String dictionary resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] name: Name of the app.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] properties: Settings.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] slot: Name of the deployment slot. If a slot is not specified, the API will update the metadata for the production slot.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WebAppMetadataSlotArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
String dictionary resource.
:param str resource_name: The name of the resource.
:param WebAppMetadataSlotArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WebAppMetadataSlotArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
slot: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WebAppMetadataSlotArgs.__new__(WebAppMetadataSlotArgs)
__props__.__dict__["kind"] = kind
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__.__dict__["name"] = name
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if slot is None and not opts.urn:
raise TypeError("Missing required property 'slot'")
__props__.__dict__["slot"] = slot
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:web/v20210201:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20150801:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20150801:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20160801:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20160801:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20180201:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20180201:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20181101:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20181101:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20190801:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20190801:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20200601:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20200601:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20200901:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20200901:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20201001:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20201001:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20201201:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20201201:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20210101:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20210101:WebAppMetadataSlot"), pulumi.Alias(type_="azure-native:web/v20210115:WebAppMetadataSlot"), pulumi.Alias(type_="azure-nextgen:web/v20210115:WebAppMetadataSlot")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(WebAppMetadataSlot, __self__).__init__(
'azure-native:web/v20210201:WebAppMetadataSlot',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'WebAppMetadataSlot':
"""
Get an existing WebAppMetadataSlot resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = WebAppMetadataSlotArgs.__new__(WebAppMetadataSlotArgs)
__props__.__dict__["kind"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["type"] = None
return WebAppMetadataSlot(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output[Mapping[str, str]]:
"""
Settings.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
| 46.621277
| 1,771
| 0.655988
|
799e473531e6a234ff4b1d9d7e665426aa1dff2e
| 1,295
|
py
|
Python
|
dataset/codesearchnet_feng/__init__.py
|
nashid/naturalcc
|
e9d9b4a296b61199fc35779b062db2205935a608
|
[
"MIT"
] | 1
|
2021-12-21T05:52:37.000Z
|
2021-12-21T05:52:37.000Z
|
dataset/codesearchnet_feng/__init__.py
|
hrshy0629/naturalcc
|
9c3329dd8387c8242deb52bf590ebe3ac795f8de
|
[
"MIT"
] | null | null | null |
dataset/codesearchnet_feng/__init__.py
|
hrshy0629/naturalcc
|
9c3329dd8387c8242deb52bf590ebe3ac795f8de
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from ncc import (
__NCC_DIR__,
__BPE_DIR__, __TREE_SITTER_LIBS_DIR__,
)
from ncc.data.constants import (
RAW, ATTRIBUTES, MODES,
)
DATASET_NAME = 'codesearchnet_feng'
DATASET_DIR = os.path.join(__NCC_DIR__, DATASET_NAME)
RAW_DIR = os.path.join(DATASET_DIR, RAW)
ATTRIBUTES_DIR = os.path.join(DATASET_DIR, ATTRIBUTES)
DEDUPE_DIR = os.path.join(DATASET_DIR, 'dedupe')
BPE_DIR = __BPE_DIR__
LIBS_DIR = __TREE_SITTER_LIBS_DIR__
LANGUAGES = ['ruby', 'python', 'java', 'go', 'php', 'javascript']
RECURSION_DEPTH = 999 # dfs recursion limitation
# path modality
PATH_NUM = 300 # path modality number
# sbt modality
MAX_SUB_TOKEN_LEN = 5 # we only consider the first 5 sub-tokens from tokenizer
NODE_TMP = 'TMP'
MEANINGLESS_TOKENS = set(['(', ')', '[', ']', '{', '}', ';', '@', '#', ':', '()', '<>', '{}'])
COMMENT_END_TOKENS = set(['{', '[', '('])
MAX_COMMENT_TOKEN_LIST_LEN = 25
MAX_CODE_TOKEN_LEN = 70
NO_METHOD = '<NO_METHOD>'
__all__ = [
"DATASET_NAME",
"RAW_DIR", "ATTRIBUTES_DIR", "DEDUPE_DIR",
"BPE_DIR", "LIBS_DIR",
"LANGUAGES", "MODES",
"RECURSION_DEPTH", "PATH_NUM", "MAX_SUB_TOKEN_LEN",
"MEANINGLESS_TOKENS", "COMMENT_END_TOKENS",
"MAX_CODE_TOKEN_LEN",
"MAX_COMMENT_TOKEN_LIST_LEN",
"NO_METHOD",
]
| 26.979167
| 94
| 0.677992
|
9b2e58eb7277626552563a9421b74eadecb150db
| 1,064
|
py
|
Python
|
sampleConfigs/generateConfigFiles.py
|
pnnl/ARTS
|
4fbb49611a0efdfa94bddb510289206f7b0acd5d
|
[
"ECL-2.0",
"Apache-2.0"
] | 9
|
2019-05-16T23:07:05.000Z
|
2020-10-21T16:15:01.000Z
|
sampleConfigs/generateConfigFiles.py
|
pnnl/ARTS
|
4fbb49611a0efdfa94bddb510289206f7b0acd5d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sampleConfigs/generateConfigFiles.py
|
pnnl/ARTS
|
4fbb49611a0efdfa94bddb510289206f7b0acd5d
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2019-09-10T17:08:10.000Z
|
2021-03-15T18:27:00.000Z
|
#!/usr/bin/env python3
import configparser
import itertools as it
import sys
if(len(sys.argv) == 1):
print(sys.argv[0], "NUM_GPU LAUNCHER")
exit(0)
if(len(sys.argv) == 3):
launcher = sys.argv[2]
else:
launcher = "ssh"
r2 = [str(x) for x in range(2)]
r3 = [str(x) for x in range(3)]
options = {}
options["freeDbAfterGpuRun"] = r2
options["runGpuGcIdle"] = r2
options["runGpuGcPreEdt"] = r2
options["deleteZerosGpuGc"] = r2
options["gpuFit"] = r3
options["gpuLocality"] = r3
options["gpuP2P"] = r2
config = configparser.ConfigParser()
config.optionxform=str
cfgTemplate = "arts.cfg"
config.read(cfgTemplate)
conf = config[config.sections()[0]]
conf["scheduler"] = "3"
conf["launcher"] = launcher
conf["gpu"] = sys.argv[1]
keys = sorted(options)
combinations = list(it.product(*(options[key] for key in keys)))
i = 1
for comb in combinations:
cfgFile = "test"+str(i)+".cfg"
for key, ith in zip(keys, list(range(len(comb)))):
conf[key] = comb[ith]
with open(cfgFile, 'w') as cFile:
config.write(cFile)
i += 1
| 20.862745
| 64
| 0.655075
|
b1818048f49d777e08e17b6d3ac0a3780b6917c1
| 1,260
|
py
|
Python
|
api/permissions.py
|
ferrumie/multi-pay
|
3e6a6fc63de823c10b5c52ade73b8357ed8bfd48
|
[
"MIT"
] | null | null | null |
api/permissions.py
|
ferrumie/multi-pay
|
3e6a6fc63de823c10b5c52ade73b8357ed8bfd48
|
[
"MIT"
] | null | null | null |
api/permissions.py
|
ferrumie/multi-pay
|
3e6a6fc63de823c10b5c52ade73b8357ed8bfd48
|
[
"MIT"
] | null | null | null |
from rest_framework import permissions
from django.core.cache import cache
class IsOwner(permissions.BasePermission):
"""
Object-level permission to only allow owners to have access to the requests
"""
def has_permission(self, request, view):
return request.user and request.user.is_authenticated
def has_object_permission(self, request, view, obj):
return obj.user == request.user
class IsIdempotent(permissions.BasePermission):
'''
Permission to ensure the idempotency of the post requests
Raise an error if duplicate request is detected
'''
message = 'Duplicate request detected.'
# TODO: make this work
# needed to ensure idempotency for the post requests
def has_permission(self, request, view):
if request.method != 'POST':
return True
ival = request.META.get('HTTP_X_IDEMPOTENCY_KEY')
if ival is None:
return True
ival = ival[:128]
key = 'idemp-{}-{}'.format(request.user.pk, ival)
is_idempotent = bool(cache.add(key, 'yes',
1000))
# if not is_idempotent:
# logger.info(u'Duplicate request (non-idempotent): %s', key)
return is_idempotent
| 32.307692
| 79
| 0.644444
|
3be2a3d6fea6e7074d23cdd89e597457fde0f944
| 9,243
|
py
|
Python
|
huaweicloud-sdk-as/huaweicloudsdkas/v1/model/lifecycle_hook_list.py
|
githubmilesma/huaweicloud-sdk-python-v3
|
9d9449ed68a609ca65f0aa50b5b2a1c28445bf03
|
[
"Apache-2.0"
] | 1
|
2021-04-16T07:59:28.000Z
|
2021-04-16T07:59:28.000Z
|
huaweicloud-sdk-as/huaweicloudsdkas/v1/model/lifecycle_hook_list.py
|
Lencof/huaweicloud-sdk-python-v3
|
d13dc4e2830a83e295be6e4de021999b3376e34e
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-as/huaweicloudsdkas/v1/model/lifecycle_hook_list.py
|
Lencof/huaweicloud-sdk-python-v3
|
d13dc4e2830a83e295be6e4de021999b3376e34e
|
[
"Apache-2.0"
] | 1
|
2022-01-17T02:24:18.000Z
|
2022-01-17T02:24:18.000Z
|
# coding: utf-8
import pprint
import re
import six
class LifecycleHookList:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'lifecycle_hook_name': 'str',
'lifecycle_hook_type': 'str',
'default_result': 'str',
'default_timeout': 'int',
'notification_topic_urn': 'str',
'notification_topic_name': 'str',
'notification_metadata': 'str',
'create_time': 'datetime'
}
attribute_map = {
'lifecycle_hook_name': 'lifecycle_hook_name',
'lifecycle_hook_type': 'lifecycle_hook_type',
'default_result': 'default_result',
'default_timeout': 'default_timeout',
'notification_topic_urn': 'notification_topic_urn',
'notification_topic_name': 'notification_topic_name',
'notification_metadata': 'notification_metadata',
'create_time': 'create_time'
}
def __init__(self, lifecycle_hook_name=None, lifecycle_hook_type=None, default_result=None, default_timeout=None, notification_topic_urn=None, notification_topic_name=None, notification_metadata=None, create_time=None):
"""LifecycleHookList - a model defined in huaweicloud sdk"""
self._lifecycle_hook_name = None
self._lifecycle_hook_type = None
self._default_result = None
self._default_timeout = None
self._notification_topic_urn = None
self._notification_topic_name = None
self._notification_metadata = None
self._create_time = None
self.discriminator = None
if lifecycle_hook_name is not None:
self.lifecycle_hook_name = lifecycle_hook_name
if lifecycle_hook_type is not None:
self.lifecycle_hook_type = lifecycle_hook_type
if default_result is not None:
self.default_result = default_result
if default_timeout is not None:
self.default_timeout = default_timeout
if notification_topic_urn is not None:
self.notification_topic_urn = notification_topic_urn
if notification_topic_name is not None:
self.notification_topic_name = notification_topic_name
if notification_metadata is not None:
self.notification_metadata = notification_metadata
if create_time is not None:
self.create_time = create_time
@property
def lifecycle_hook_name(self):
"""Gets the lifecycle_hook_name of this LifecycleHookList.
生命周期挂钩名称。
:return: The lifecycle_hook_name of this LifecycleHookList.
:rtype: str
"""
return self._lifecycle_hook_name
@lifecycle_hook_name.setter
def lifecycle_hook_name(self, lifecycle_hook_name):
"""Sets the lifecycle_hook_name of this LifecycleHookList.
生命周期挂钩名称。
:param lifecycle_hook_name: The lifecycle_hook_name of this LifecycleHookList.
:type: str
"""
self._lifecycle_hook_name = lifecycle_hook_name
@property
def lifecycle_hook_type(self):
"""Gets the lifecycle_hook_type of this LifecycleHookList.
生命周期挂钩类型。INSTANCE_TERMINATING;INSTANCE_LAUNCHING。
:return: The lifecycle_hook_type of this LifecycleHookList.
:rtype: str
"""
return self._lifecycle_hook_type
@lifecycle_hook_type.setter
def lifecycle_hook_type(self, lifecycle_hook_type):
"""Sets the lifecycle_hook_type of this LifecycleHookList.
生命周期挂钩类型。INSTANCE_TERMINATING;INSTANCE_LAUNCHING。
:param lifecycle_hook_type: The lifecycle_hook_type of this LifecycleHookList.
:type: str
"""
self._lifecycle_hook_type = lifecycle_hook_type
@property
def default_result(self):
"""Gets the default_result of this LifecycleHookList.
生命周期挂钩默认回调操作。ABANDON;CONTINUE。
:return: The default_result of this LifecycleHookList.
:rtype: str
"""
return self._default_result
@default_result.setter
def default_result(self, default_result):
"""Sets the default_result of this LifecycleHookList.
生命周期挂钩默认回调操作。ABANDON;CONTINUE。
:param default_result: The default_result of this LifecycleHookList.
:type: str
"""
self._default_result = default_result
@property
def default_timeout(self):
"""Gets the default_timeout of this LifecycleHookList.
生命周期挂钩超时时间,单位秒。
:return: The default_timeout of this LifecycleHookList.
:rtype: int
"""
return self._default_timeout
@default_timeout.setter
def default_timeout(self, default_timeout):
"""Sets the default_timeout of this LifecycleHookList.
生命周期挂钩超时时间,单位秒。
:param default_timeout: The default_timeout of this LifecycleHookList.
:type: int
"""
self._default_timeout = default_timeout
@property
def notification_topic_urn(self):
"""Gets the notification_topic_urn of this LifecycleHookList.
SMN服务中Topic的唯一的资源标识。
:return: The notification_topic_urn of this LifecycleHookList.
:rtype: str
"""
return self._notification_topic_urn
@notification_topic_urn.setter
def notification_topic_urn(self, notification_topic_urn):
"""Sets the notification_topic_urn of this LifecycleHookList.
SMN服务中Topic的唯一的资源标识。
:param notification_topic_urn: The notification_topic_urn of this LifecycleHookList.
:type: str
"""
self._notification_topic_urn = notification_topic_urn
@property
def notification_topic_name(self):
"""Gets the notification_topic_name of this LifecycleHookList.
SMN服务中Topic的资源名称。
:return: The notification_topic_name of this LifecycleHookList.
:rtype: str
"""
return self._notification_topic_name
@notification_topic_name.setter
def notification_topic_name(self, notification_topic_name):
"""Sets the notification_topic_name of this LifecycleHookList.
SMN服务中Topic的资源名称。
:param notification_topic_name: The notification_topic_name of this LifecycleHookList.
:type: str
"""
self._notification_topic_name = notification_topic_name
@property
def notification_metadata(self):
"""Gets the notification_metadata of this LifecycleHookList.
自定义通知消息。
:return: The notification_metadata of this LifecycleHookList.
:rtype: str
"""
return self._notification_metadata
@notification_metadata.setter
def notification_metadata(self, notification_metadata):
"""Sets the notification_metadata of this LifecycleHookList.
自定义通知消息。
:param notification_metadata: The notification_metadata of this LifecycleHookList.
:type: str
"""
self._notification_metadata = notification_metadata
@property
def create_time(self):
"""Gets the create_time of this LifecycleHookList.
创建生命周期挂钩时间,遵循UTC时间。
:return: The create_time of this LifecycleHookList.
:rtype: datetime
"""
return self._create_time
@create_time.setter
def create_time(self, create_time):
"""Sets the create_time of this LifecycleHookList.
创建生命周期挂钩时间,遵循UTC时间。
:param create_time: The create_time of this LifecycleHookList.
:type: datetime
"""
self._create_time = create_time
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LifecycleHookList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 30.81
| 223
| 0.646543
|
f0768f1af5082d30570467e49cdea80c2029a2b1
| 875
|
py
|
Python
|
astropy/stats/__init__.py
|
jayvdb/astropy
|
bc6d8f106dd5b60bf57a8e6e29c4e2ae2178991f
|
[
"BSD-3-Clause"
] | 445
|
2019-01-26T13:50:26.000Z
|
2022-03-18T05:17:38.000Z
|
astropy/stats/__init__.py
|
jayvdb/astropy
|
bc6d8f106dd5b60bf57a8e6e29c4e2ae2178991f
|
[
"BSD-3-Clause"
] | 242
|
2019-01-29T15:48:27.000Z
|
2022-03-31T22:09:21.000Z
|
astropy/stats/__init__.py
|
jayvdb/astropy
|
bc6d8f106dd5b60bf57a8e6e29c4e2ae2178991f
|
[
"BSD-3-Clause"
] | 31
|
2019-03-10T09:51:27.000Z
|
2022-02-14T23:11:12.000Z
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This subpackage contains statistical tools provided for or used by Astropy.
While the `scipy.stats` package contains a wide range of statistical
tools, it is a general-purpose package, and is missing some that are
particularly useful to astronomy or are used in an atypical way in
astronomy. This package is intended to provide such functionality, but
*not* to replace `scipy.stats` if its implementation satisfies
astronomers' needs.
"""
from .funcs import * # noqa
from .biweight import * # noqa
from .sigma_clipping import * # noqa
from .jackknife import * # noqa
from .circstats import * # noqa
from .bayesian_blocks import * # noqa
from .histogram import * # noqa
from .info_theory import * # noqa
from .lombscargle import * # noqa
from .spatial import * # noqa
from .bls import * # noqa
| 35
| 75
| 0.752
|
9973462c7f138d73d5b5ea9d49ad48b85bca8d3e
| 280
|
py
|
Python
|
back/markdownchat/chat/models.py
|
ivanseibel/markdown-chat
|
3afe687183a49ba5426dc1636fe1f49de5b6f304
|
[
"MIT"
] | 1
|
2022-01-20T17:50:03.000Z
|
2022-01-20T17:50:03.000Z
|
back/markdownchat/chat/models.py
|
ivanseibel/markdown-chat
|
3afe687183a49ba5426dc1636fe1f49de5b6f304
|
[
"MIT"
] | 1
|
2021-09-06T22:38:10.000Z
|
2021-09-06T22:38:10.000Z
|
back/markdownchat/chat/models.py
|
ivanseibel/markdown-chat
|
3afe687183a49ba5426dc1636fe1f49de5b6f304
|
[
"MIT"
] | null | null | null |
from django.db import models
class SignedUser(models.Model):
room = models.CharField(blank=False, null=False, max_length=50)
username = models.CharField(blank=False, null=False, max_length=50)
channel_name = models.CharField(blank=False, null=False, max_length=100)
| 35
| 76
| 0.760714
|
27171fbc12333917b03d104c4331137fe31e932d
| 2,173
|
py
|
Python
|
src/mfactcheck/pipelines/multi_sent.py
|
D-Roberts/multilingual_nli_ECIR
|
e7466f5e6c6b6246ae37c1c951003c13f56d84c8
|
[
"Apache-2.0"
] | 2
|
2021-02-13T22:57:33.000Z
|
2021-03-29T09:41:08.000Z
|
src/mfactcheck/pipelines/multi_sent.py
|
D-Roberts/multilingual_nli_ECIR
|
e7466f5e6c6b6246ae37c1c951003c13f56d84c8
|
[
"Apache-2.0"
] | null | null | null |
src/mfactcheck/pipelines/multi_sent.py
|
D-Roberts/multilingual_nli_ECIR
|
e7466f5e6c6b6246ae37c1c951003c13f56d84c8
|
[
"Apache-2.0"
] | null | null | null |
"""Pipeline onnx client Sentence Selector cpu"""
import collections
import csv
import json
import os
import numpy as np
from mfactcheck.multi_retriever.sentences.data import (
SentenceProcessor,
get_eval_data,
get_topk_sentences_eval,
)
from mfactcheck.multi_retriever.sentences.config_util import _get_sent_configs
from .base import Pipeline
from mfactcheck.utils.log_helper import LogHelper
LogHelper.setup()
logger = LogHelper.get_logger(os.path.splitext(os.path.basename(__file__))[0])
class MultiSentPipeline(Pipeline):
def __init__(self, module="sent", args=None, args_parser=_get_sent_configs):
super().__init__(module, args, args_parser)
self.processor = SentenceProcessor()
self.label_list = self.processor.get_labels()
self.num_labels = len(self.label_list)
self.label_verification_list = self.processor.get_labels_verification()
def __call__(self):
"""Classify verification label given input claim-sentence pairs"""
eval_features, self.num_eg = get_eval_data(
logger,
db_path=self.args.db_path,
data_dir=self.args.data_dir,
output_file_name=self.args.predict_sentence_file_name,
processor=self.processor,
tokenizer=self.tokenizer,
output_mode="classification",
label_list=self.label_list,
label_verification_list=self.label_verification_list,
max_seq_length=self.args.max_seq_length,
dataset=self.args.dataset,
do_doc_process=self.args.do_doc_process,
add_ro=self.args.add_ro,
doc_file=self.args.test_doc_file,
ro_doc_file=self.args.dev_ro_doc_file,
api=self.args.api,
)
logits, _, new_guids, guids_map = super().__call__(eval_features, self.num_eg)
# topk selector: get dataset for nli module (dev, test)
get_topk_sentences_eval(
zip(new_guids, logits),
guids_map,
os.path.join(self.args.data_dir, self.args.predict_sentence_file_name),
self.args.predict_rte_file,
self.args.sent_k,
)
| 35.622951
| 86
| 0.684307
|
637ef0ff4fb778956845f201b2fc7d19c7351f75
| 7,714
|
py
|
Python
|
calm/dsl/config/config.py
|
opywan/calm-dsl
|
1d89436d039a39265a0ae806022be5b52e757ac0
|
[
"Apache-2.0"
] | null | null | null |
calm/dsl/config/config.py
|
opywan/calm-dsl
|
1d89436d039a39265a0ae806022be5b52e757ac0
|
[
"Apache-2.0"
] | null | null | null |
calm/dsl/config/config.py
|
opywan/calm-dsl
|
1d89436d039a39265a0ae806022be5b52e757ac0
|
[
"Apache-2.0"
] | 1
|
2020-04-07T12:21:13.000Z
|
2020-04-07T12:21:13.000Z
|
import configparser
import errno
import os
from jinja2 import Environment, PackageLoader
from calm.dsl.tools import get_logging_handle
from .schema import validate_config, validate_init_config
LOG = get_logging_handle(__name__)
_CONFIG = None
_CONFIG_FILE = None
_INIT = None
def make_file_dir(path, is_dir=False):
"""creates the file directory if not present"""
# Create parent directory if not present
if not os.path.exists(os.path.dirname(os.path.realpath(path))):
try:
LOG.debug("Creating directory for file {}".format(path))
os.makedirs(os.path.dirname(os.path.realpath(path)))
LOG.debug("Success")
except OSError as exc:
if exc.errno != errno.EEXIST:
raise Exception("[{}] - {}".format(exc["code"], exc["error"]))
if is_dir and (not os.path.exists(path)):
os.makedirs(path)
def get_init_file():
"""Returns the init file location"""
init_file = os.path.join(os.path.expanduser("~"), ".calm", "init.ini")
make_file_dir(init_file)
return init_file
def get_default_config_file():
"""Returns default location of config file"""
user_config_file = os.path.join(os.path.expanduser("~"), ".calm", "config.ini")
make_file_dir(user_config_file)
return user_config_file
def get_default_db_file():
"""Returns default location of db file"""
dsl_db_file = os.path.join(os.path.expanduser("~"), ".calm", "dsl.db")
make_file_dir(dsl_db_file)
return dsl_db_file
def get_default_local_dir():
"""Returns the default location for local dir"""
local_dir = os.path.join(os.path.expanduser("~"), ".calm", ".local")
make_file_dir(local_dir, is_dir=True)
return local_dir
def get_user_config_file():
"""Returns the config file location"""
global _CONFIG_FILE
cwd = os.getcwd()
if not _CONFIG_FILE:
config_file = None
if "config.ini" in os.listdir(cwd):
config_file = os.path.join(cwd, "config.ini")
elif os.path.exists(get_init_file()):
init_obj = get_init_data()
config_file = None
if "CONFIG" in init_obj:
config_file = init_obj["CONFIG"].get("location", None)
_CONFIG_FILE = config_file or get_default_config_file()
return _CONFIG_FILE
def get_init_data():
"""Returns the init config data"""
global _INIT
if not _INIT:
update_init_obj()
return _INIT
def update_init_obj():
"""updates the global init obj"""
global _INIT
config = configparser.ConfigParser()
config.optionxform = str
init_file = get_init_file()
config.read(init_file)
# Validate init config
if not validate_init_config(config):
raise ValueError(
"Invalid init config file: {}. Please run: calm init dsl".format(init_file)
)
_INIT = config
def update_init_config(config_file, db_file, local_dir):
"""updates the init file data"""
global _CONFIG_FILE
# create required directories
make_file_dir(config_file)
make_file_dir(db_file)
make_file_dir(local_dir, is_dir=True)
init_file = get_init_file()
LOG.debug("Rendering init template")
text = _render_init_template(config_file, db_file, local_dir)
LOG.debug("Success")
# UPDATE global _CONFIG_FILE object
_CONFIG_FILE = config_file
# Write config
LOG.debug("Writing configuration to '{}'".format(init_file))
with open(init_file, "w") as fd:
fd.write(text)
LOG.debug("Success")
# Update existing init object
update_init_obj()
def _render_init_template(
config_file, db_file, local_dir, schema_file="init.ini.jinja2"
):
"""renders the init template"""
loader = PackageLoader(__name__, "")
env = Environment(loader=loader)
template = env.get_template(schema_file)
text = template.render(
config_file=config_file, db_file=db_file, local_dir=local_dir,
)
return text.strip() + os.linesep
def _render_config_template(
ip,
port,
username,
password,
project_name,
log_level,
schema_file="config.ini.jinja2",
):
"""renders the config template"""
loader = PackageLoader(__name__, "")
env = Environment(loader=loader)
template = env.get_template(schema_file)
text = template.render(
ip=ip,
port=port,
username=username,
password=password,
project_name=project_name,
log_level=log_level,
)
return text.strip() + os.linesep
def init_config(
ip, port, username, password, project_name, log_level, config_file=None
):
"""Writes the configuration to config file / default config file"""
# Default user config file
user_config_file = get_user_config_file()
config_file = config_file or user_config_file
# Render config template
LOG.debug("Rendering configuration template")
text = _render_config_template(
ip, port, username, password, project_name, log_level
)
LOG.debug("Success")
# Write config
LOG.debug("Writing configuration to '{}'".format(config_file))
with open(config_file, "w") as fd:
fd.write(text)
LOG.debug("Success")
def get_config(config_file=None):
"""Returns the config object"""
global _CONFIG, _CONFIG_FILE
if config_file:
_CONFIG_FILE = config_file
if (not _CONFIG) or (config_file):
# Create config object
user_config_file = get_user_config_file()
config = configparser.ConfigParser()
config.optionxform = str # Maintaining case sensitivity for field names
config.read(user_config_file)
# Check presence of file
if not os.path.exists(user_config_file):
raise FileNotFoundError(
"File {} not found. Please run: calm init dsl".format(user_config_file)
)
# Validate the config file
if not validate_config(config):
raise ValueError(
"Invalid config file: {}. Please run: calm init dsl".format(
user_config_file
)
)
_CONFIG = config
return _CONFIG
def set_config(
host,
port,
username,
password,
project_name,
db_location,
log_level,
config_file,
local_dir,
):
"""writes the configuration to config file"""
config = get_config()
init_obj = get_init_data()
host = host or config["SERVER"]["pc_ip"]
username = username or config["SERVER"]["pc_username"]
port = port or config["SERVER"]["pc_port"]
password = password or config["SERVER"]["pc_password"]
project_name = project_name or config["PROJECT"]["name"]
log_level = log_level or config["LOG"]["level"]
logging_levels = LOG.get_logging_levels()
if log_level not in logging_levels:
raise ValueError("Invalid log level. Select from {}".format(logging_levels))
make_file_dir(config_file)
init_config(
host,
port,
username,
password,
project_name,
log_level,
config_file=config_file,
)
db_location = db_location or init_obj["DB"]["location"]
local_dir = local_dir or init_obj["LOCAL_DIR"]["location"]
config_file = config_file or init_obj["CONFIG"]["location"]
# Update init config
update_init_config(
config_file=config_file, db_file=db_location, local_dir=local_dir
)
def print_config():
"""prints the configuration"""
config_file = get_user_config_file()
LOG.debug("Fetching configuration from '{}'".format(config_file))
print("")
with open(config_file) as fd:
print(fd.read())
| 25.627907
| 88
| 0.654913
|
f07b798ca8f38e3b115da93fe94db93857bce477
| 3,214
|
py
|
Python
|
mysite/settings.py
|
bheavner/django_girls
|
3456eea7c488584af98710aaec6411c965491d8a
|
[
"MIT"
] | null | null | null |
mysite/settings.py
|
bheavner/django_girls
|
3456eea7c488584af98710aaec6411c965491d8a
|
[
"MIT"
] | null | null | null |
mysite/settings.py
|
bheavner/django_girls
|
3456eea7c488584af98710aaec6411c965491d8a
|
[
"MIT"
] | null | null | null |
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.2.24.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '1-hu_pe)vvw(a8!7x2h68o5r%vbp^wop8uz1=63alvav18cnaj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', '.pythonanywhere.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog.apps.BlogConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Los_Angeles'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| 26.130081
| 91
| 0.698818
|
3bb5a13ae7c2cea9ef71a4042839d87cd1925bb5
| 12,342
|
py
|
Python
|
docs/source/conf.py
|
tjb900/distributed
|
f6bf1b7105f508f7435df898ee3ac00a2edefd65
|
[
"BSD-3-Clause"
] | null | null | null |
docs/source/conf.py
|
tjb900/distributed
|
f6bf1b7105f508f7435df898ee3ac00a2edefd65
|
[
"BSD-3-Clause"
] | null | null | null |
docs/source/conf.py
|
tjb900/distributed
|
f6bf1b7105f508f7435df898ee3ac00a2edefd65
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Dask.distributed documentation build configuration file, created by
# sphinx-quickstart on Tue Oct 6 14:42:44 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.extlinks',
'sphinx.ext.intersphinx',
'numpydoc',
]
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Dask.distributed'
copyright = u'2016, Anaconda, Inc.'
author = u'Anaconda, Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import distributed
version = distributed.__version__
# The full version, including alpha/beta/rc tags.
release = distributed.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# Taken from docs.readthedocs.io:
# on_rtd is whether we are on readthedocs.io
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'distributeddoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'distributed.tex', u'Dask.distributed Documentation',
u'Matthew Rocklin', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'Dask.distributed', u'Dask.distributed Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Dask.distributed', u'Dask.distributed Documentation',
author, 'Dask.distributed', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Link to GitHub issues and pull requests using :pr:`1234` and :issue:`1234`
# syntax
extlinks = {
'issue': ('https://github.com/dask/distributed/issues/%s', 'GH#'),
'pr': ('https://github.com/dask/distributed/pull/%s', 'GH#')
}
# Configuration for intersphinx: refer to the Python standard library
# and the Numpy documentation.
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
'numpy': ('http://docs.scipy.org/doc/numpy', None),
}
def setup(app):
app.add_stylesheet("http://dask.pydata.org/en/latest/_static/style.css")
| 31.727506
| 80
| 0.718603
|
67dd463ce455c8bae0c5f475f6942f60905663cc
| 21,643
|
py
|
Python
|
sdk/python/pulumi_google_native/networkconnectivity/v1/policy_based_route_iam_policy.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/networkconnectivity/v1/policy_based_route_iam_policy.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/networkconnectivity/v1/policy_based_route_iam_policy.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['PolicyBasedRouteIamPolicyArgs', 'PolicyBasedRouteIamPolicy']
@pulumi.input_type
class PolicyBasedRouteIamPolicyArgs:
def __init__(__self__, *,
policy_based_route_id: pulumi.Input[str],
audit_configs: Optional[pulumi.Input[Sequence[pulumi.Input['AuditConfigArgs']]]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input['BindingArgs']]]] = None,
etag: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
update_mask: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a PolicyBasedRouteIamPolicy resource.
:param pulumi.Input[Sequence[pulumi.Input['AuditConfigArgs']]] audit_configs: Specifies cloud audit logging configuration for this policy.
:param pulumi.Input[Sequence[pulumi.Input['BindingArgs']]] bindings: Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:alice@example.com`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
:param pulumi.Input[str] etag: `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
:param pulumi.Input[str] update_mask: OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: "bindings, etag"`
:param pulumi.Input[int] version: Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
pulumi.set(__self__, "policy_based_route_id", policy_based_route_id)
if audit_configs is not None:
pulumi.set(__self__, "audit_configs", audit_configs)
if bindings is not None:
pulumi.set(__self__, "bindings", bindings)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if project is not None:
pulumi.set(__self__, "project", project)
if update_mask is not None:
pulumi.set(__self__, "update_mask", update_mask)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="policyBasedRouteId")
def policy_based_route_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "policy_based_route_id")
@policy_based_route_id.setter
def policy_based_route_id(self, value: pulumi.Input[str]):
pulumi.set(self, "policy_based_route_id", value)
@property
@pulumi.getter(name="auditConfigs")
def audit_configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AuditConfigArgs']]]]:
"""
Specifies cloud audit logging configuration for this policy.
"""
return pulumi.get(self, "audit_configs")
@audit_configs.setter
def audit_configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AuditConfigArgs']]]]):
pulumi.set(self, "audit_configs", value)
@property
@pulumi.getter
def bindings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BindingArgs']]]]:
"""
Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:alice@example.com`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
"""
return pulumi.get(self, "bindings")
@bindings.setter
def bindings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BindingArgs']]]]):
pulumi.set(self, "bindings", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
`etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="updateMask")
def update_mask(self) -> Optional[pulumi.Input[str]]:
"""
OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: "bindings, etag"`
"""
return pulumi.get(self, "update_mask")
@update_mask.setter
def update_mask(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_mask", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "version", value)
class PolicyBasedRouteIamPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
audit_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AuditConfigArgs']]]]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BindingArgs']]]]] = None,
etag: Optional[pulumi.Input[str]] = None,
policy_based_route_id: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
update_mask: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
Note - this resource's API doesn't support deletion. When deleted, the resource will persist
on Google Cloud even though it will be deleted from Pulumi state.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AuditConfigArgs']]]] audit_configs: Specifies cloud audit logging configuration for this policy.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BindingArgs']]]] bindings: Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:alice@example.com`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
:param pulumi.Input[str] etag: `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
:param pulumi.Input[str] update_mask: OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: "bindings, etag"`
:param pulumi.Input[int] version: Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PolicyBasedRouteIamPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
Note - this resource's API doesn't support deletion. When deleted, the resource will persist
on Google Cloud even though it will be deleted from Pulumi state.
:param str resource_name: The name of the resource.
:param PolicyBasedRouteIamPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PolicyBasedRouteIamPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
audit_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AuditConfigArgs']]]]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BindingArgs']]]]] = None,
etag: Optional[pulumi.Input[str]] = None,
policy_based_route_id: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
update_mask: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PolicyBasedRouteIamPolicyArgs.__new__(PolicyBasedRouteIamPolicyArgs)
__props__.__dict__["audit_configs"] = audit_configs
__props__.__dict__["bindings"] = bindings
__props__.__dict__["etag"] = etag
if policy_based_route_id is None and not opts.urn:
raise TypeError("Missing required property 'policy_based_route_id'")
__props__.__dict__["policy_based_route_id"] = policy_based_route_id
__props__.__dict__["project"] = project
__props__.__dict__["update_mask"] = update_mask
__props__.__dict__["version"] = version
super(PolicyBasedRouteIamPolicy, __self__).__init__(
'google-native:networkconnectivity/v1:PolicyBasedRouteIamPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PolicyBasedRouteIamPolicy':
"""
Get an existing PolicyBasedRouteIamPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = PolicyBasedRouteIamPolicyArgs.__new__(PolicyBasedRouteIamPolicyArgs)
__props__.__dict__["audit_configs"] = None
__props__.__dict__["bindings"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["version"] = None
return PolicyBasedRouteIamPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="auditConfigs")
def audit_configs(self) -> pulumi.Output[Sequence['outputs.AuditConfigResponse']]:
"""
Specifies cloud audit logging configuration for this policy.
"""
return pulumi.get(self, "audit_configs")
@property
@pulumi.getter
def bindings(self) -> pulumi.Output[Sequence['outputs.BindingResponse']]:
"""
Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:alice@example.com`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
"""
return pulumi.get(self, "bindings")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
`etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def version(self) -> pulumi.Output[int]:
"""
Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
return pulumi.get(self, "version")
| 81.059925
| 1,118
| 0.711777
|
b0187997bf5c72498496acc700c720ed20fef4cf
| 124,922
|
py
|
Python
|
salt/minion.py
|
ahammond/salt
|
945b21b70dbe708716d7b009a2005ef0acf76e6b
|
[
"Apache-2.0"
] | null | null | null |
salt/minion.py
|
ahammond/salt
|
945b21b70dbe708716d7b009a2005ef0acf76e6b
|
[
"Apache-2.0"
] | null | null | null |
salt/minion.py
|
ahammond/salt
|
945b21b70dbe708716d7b009a2005ef0acf76e6b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
Routines to set up a minion
'''
# Import python libs
from __future__ import absolute_import, print_function, with_statement
import os
import re
import sys
import copy
import time
import types
import signal
import fnmatch
import logging
import threading
import traceback
import contextlib
import multiprocessing
from random import randint, shuffle
from stat import S_IMODE
# Import Salt Libs
# pylint: disable=import-error,no-name-in-module,redefined-builtin
import salt.ext.six as six
if six.PY3:
import ipaddress
else:
import salt.ext.ipaddress as ipaddress
from salt.ext.six.moves import range
# pylint: enable=no-name-in-module,redefined-builtin
# Import third party libs
try:
import zmq
# TODO: cleanup
import zmq.eventloop.ioloop
# support pyzmq 13.0.x, TODO: remove once we force people to 14.0.x
if not hasattr(zmq.eventloop.ioloop, 'ZMQIOLoop'):
zmq.eventloop.ioloop.ZMQIOLoop = zmq.eventloop.ioloop.IOLoop
LOOP_CLASS = zmq.eventloop.ioloop.ZMQIOLoop
HAS_ZMQ = True
except ImportError:
import tornado.ioloop
LOOP_CLASS = tornado.ioloop.IOLoop
HAS_ZMQ = False
HAS_RANGE = False
try:
import seco.range
HAS_RANGE = True
except ImportError:
pass
HAS_PSUTIL = False
try:
import salt.utils.psutil_compat as psutil
HAS_PSUTIL = True
except ImportError:
pass
HAS_RESOURCE = False
try:
import resource
HAS_RESOURCE = True
except ImportError:
pass
try:
import zmq.utils.monitor
HAS_ZMQ_MONITOR = True
except ImportError:
HAS_ZMQ_MONITOR = False
# pylint: enable=import-error
# Import salt libs
import salt
import salt.client
import salt.crypt
import salt.loader
import salt.beacons
import salt.engines
import salt.payload
import salt.syspaths
import salt.utils
import salt.utils.context
import salt.utils.jid
import salt.pillar
import salt.utils.args
import salt.utils.event
import salt.utils.minion
import salt.utils.minions
import salt.utils.schedule
import salt.utils.error
import salt.utils.zeromq
import salt.defaults.exitcodes
import salt.cli.daemons
import salt.log.setup
from salt.config import DEFAULT_MINION_OPTS
from salt.defaults import DEFAULT_TARGET_DELIM
from salt.executors import FUNCTION_EXECUTORS
from salt.utils.debug import enable_sigusr1_handler
from salt.utils.event import tagify
from salt.utils.odict import OrderedDict
from salt.utils.process import (default_signals,
SignalHandlingMultiprocessingProcess,
ProcessManager)
from salt.exceptions import (
CommandExecutionError,
CommandNotFoundError,
SaltInvocationError,
SaltReqTimeoutError,
SaltClientError,
SaltSystemExit,
SaltDaemonNotRunning,
SaltException,
)
import tornado.gen # pylint: disable=F0401
import tornado.ioloop # pylint: disable=F0401
log = logging.getLogger(__name__)
# To set up a minion:
# 1. Read in the configuration
# 2. Generate the function mapping dict
# 3. Authenticate with the master
# 4. Store the AES key
# 5. Connect to the publisher
# 6. Handle publications
def resolve_dns(opts, fallback=True):
'''
Resolves the master_ip and master_uri options
'''
ret = {}
check_dns = True
if (opts.get('file_client', 'remote') == 'local' and
not opts.get('use_master_when_local', False)):
check_dns = False
if check_dns is True:
# Because I import salt.log below I need to re-import salt.utils here
import salt.utils
try:
if opts['master'] == '':
raise SaltSystemExit
ret['master_ip'] = \
salt.utils.dns_check(opts['master'], True, opts['ipv6'])
except SaltClientError:
if opts['retry_dns']:
while True:
import salt.log
msg = ('Master hostname: \'{0}\' not found. Retrying in {1} '
'seconds').format(opts['master'], opts['retry_dns'])
if salt.log.setup.is_console_configured():
log.error(msg)
else:
print('WARNING: {0}'.format(msg))
time.sleep(opts['retry_dns'])
try:
ret['master_ip'] = salt.utils.dns_check(
opts['master'], True, opts['ipv6']
)
break
except SaltClientError:
pass
else:
if fallback:
ret['master_ip'] = '127.0.0.1'
else:
raise
except SaltSystemExit:
unknown_str = 'unknown address'
master = opts.get('master', unknown_str)
if master == '':
master = unknown_str
if opts.get('__role') == 'syndic':
err = 'Master address: \'{0}\' could not be resolved. Invalid or unresolveable address. Set \'syndic_master\' value in minion config.'.format(master)
else:
err = 'Master address: \'{0}\' could not be resolved. Invalid or unresolveable address. Set \'master\' value in minion config.'.format(master)
log.error(err)
raise SaltSystemExit(code=42, msg=err)
else:
ret['master_ip'] = '127.0.0.1'
if 'master_ip' in ret and 'master_ip' in opts:
if ret['master_ip'] != opts['master_ip']:
log.warning('Master ip address changed from {0} to {1}'.format(opts['master_ip'],
ret['master_ip'])
)
ret['master_uri'] = 'tcp://{ip}:{port}'.format(ip=ret['master_ip'],
port=opts['master_port'])
return ret
def prep_ip_port(opts):
ret = {}
if opts['master_uri_format'] == 'ip_only':
ret['master'] = opts['master']
else:
ip_port = opts['master'].rsplit(":", 1)
if len(ip_port) == 1:
# e.g. master: mysaltmaster
ret['master'] = ip_port[0]
else:
# e.g. master: localhost:1234
# e.g. master: 127.0.0.1:1234
# e.g. master: ::1:1234
ret['master'] = ip_port[0]
ret['master_port'] = ip_port[1]
return ret
def get_proc_dir(cachedir, **kwargs):
'''
Given the cache directory, return the directory that process data is
stored in, creating it if it doesn't exist.
The following optional Keyword Arguments are handled:
mode: which is anything os.makedir would accept as mode.
uid: the uid to set, if not set, or it is None or -1 no changes are
made. Same applies if the directory is already owned by this
uid. Must be int. Works only on unix/unix like systems.
gid: the gid to set, if not set, or it is None or -1 no changes are
made. Same applies if the directory is already owned by this
gid. Must be int. Works only on unix/unix like systems.
'''
fn_ = os.path.join(cachedir, 'proc')
mode = kwargs.pop('mode', None)
if mode is None:
mode = {}
else:
mode = {'mode': mode}
if not os.path.isdir(fn_):
# proc_dir is not present, create it with mode settings
os.makedirs(fn_, **mode)
d_stat = os.stat(fn_)
# if mode is not an empty dict then we have an explicit
# dir mode. So lets check if mode needs to be changed.
if mode:
mode_part = S_IMODE(d_stat.st_mode)
if mode_part != mode['mode']:
os.chmod(fn_, (d_stat.st_mode ^ mode_part) | mode['mode'])
if hasattr(os, 'chown'):
# only on unix/unix like systems
uid = kwargs.pop('uid', -1)
gid = kwargs.pop('gid', -1)
# if uid and gid are both -1 then go ahead with
# no changes at all
if (d_stat.st_uid != uid or d_stat.st_gid != gid) and \
[i for i in (uid, gid) if i != -1]:
os.chown(fn_, uid, gid)
return fn_
def parse_args_and_kwargs(func, args, data=None):
'''
Wrap load_args_and_kwargs
'''
salt.utils.warn_until(
'Carbon',
'salt.minion.parse_args_and_kwargs() has been renamed to '
'salt.minion.load_args_and_kwargs(). Please change this function call '
'before the Carbon release of Salt.'
)
return load_args_and_kwargs(func, args, data=data)
def load_args_and_kwargs(func, args, data=None, ignore_invalid=False):
'''
Detect the args and kwargs that need to be passed to a function call, and
check them against what was passed.
'''
argspec = salt.utils.args.get_function_argspec(func)
_args = []
_kwargs = {}
invalid_kwargs = []
for arg in args:
if isinstance(arg, six.string_types):
string_arg, string_kwarg = salt.utils.args.parse_input([arg], condition=False) # pylint: disable=W0632
if string_arg:
# Don't append the version that was just derived from parse_cli
# above, that would result in a 2nd call to
# salt.utils.cli.yamlify_arg(), which could mangle the input.
_args.append(arg)
elif string_kwarg:
salt.utils.warn_until(
'Carbon',
'The list of function args and kwargs should be parsed '
'by salt.utils.args.parse_input() before calling '
'salt.minion.load_args_and_kwargs().'
)
if argspec.keywords or next(six.iterkeys(string_kwarg)) in argspec.args:
# Function supports **kwargs or is a positional argument to
# the function.
_kwargs.update(string_kwarg)
else:
# **kwargs not in argspec and parsed argument name not in
# list of positional arguments. This keyword argument is
# invalid.
for key, val in six.iteritems(string_kwarg):
invalid_kwargs.append('{0}={1}'.format(key, val))
continue
# if the arg is a dict with __kwarg__ == True, then its a kwarg
elif isinstance(arg, dict) and arg.pop('__kwarg__', False) is True:
for key, val in six.iteritems(arg):
if argspec.keywords or key in argspec.args:
# Function supports **kwargs or is a positional argument to
# the function.
_kwargs[key] = val
else:
# **kwargs not in argspec and parsed argument name not in
# list of positional arguments. This keyword argument is
# invalid.
invalid_kwargs.append('{0}={1}'.format(key, val))
continue
else:
_args.append(arg)
if invalid_kwargs and not ignore_invalid:
salt.utils.invalid_kwargs(invalid_kwargs)
if argspec.keywords and isinstance(data, dict):
# this function accepts **kwargs, pack in the publish data
for key, val in six.iteritems(data):
_kwargs['__pub_{0}'.format(key)] = val
return _args, _kwargs
def eval_master_func(opts):
'''
Evaluate master function if master type is 'func'
and save it result in opts['master']
'''
if '__master_func_evaluated' not in opts:
# split module and function and try loading the module
mod, fun = opts['master'].split('.')
try:
master_mod = salt.loader.raw_mod(opts, mod, fun)
# we take whatever the module returns as master address
opts['master'] = master_mod[mod + '.' + fun]()
opts['__master_func_evaluated'] = True
except TypeError:
log.error("Failed to evaluate master address from module '{0}'".format(
opts['master']))
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
log.info('Evaluated master from module: {0}'.format(master_mod))
class MinionBase(object):
def __init__(self, opts):
self.opts = opts
@staticmethod
def process_schedule(minion, loop_interval):
try:
if hasattr(minion, 'schedule'):
minion.schedule.eval()
else:
log.error('Minion scheduler not initialized. Scheduled jobs will not be run.')
return
# Check if scheduler requires lower loop interval than
# the loop_interval setting
if minion.schedule.loop_interval < loop_interval:
loop_interval = minion.schedule.loop_interval
log.debug(
'Overriding loop_interval because of scheduled jobs.'
)
except Exception as exc:
log.error(
'Exception {0} occurred in scheduled job'.format(exc)
)
return loop_interval
def process_beacons(self, functions):
'''
Evaluate all of the configured beacons, grab the config again in case
the pillar or grains changed
'''
if 'config.merge' in functions:
b_conf = functions['config.merge']('beacons')
if b_conf:
return self.beacons.process(b_conf) # pylint: disable=no-member
return []
@tornado.gen.coroutine
def eval_master(self,
opts,
timeout=60,
safe=True,
failed=False):
'''
Evaluates and returns a tuple of the current master address and the pub_channel.
In standard mode, just creates a pub_channel with the given master address.
With master_type=func evaluates the current master address from the given
module and then creates a pub_channel.
With master_type=failover takes the list of masters and loops through them.
The first one that allows the minion to create a pub_channel is then
returned. If this function is called outside the minions initialization
phase (for example from the minions main event-loop when a master connection
loss was detected), 'failed' should be set to True. The current
(possibly failed) master will then be removed from the list of masters.
'''
# return early if we are not connecting to a master
if opts['master_type'] == 'disable':
log.warning('Master is set to disable, skipping connection')
self.connected = False
raise tornado.gen.Return((None, None))
# check if master_type was altered from its default
elif opts['master_type'] != 'str' and opts['__role'] != 'syndic':
# check for a valid keyword
if opts['master_type'] == 'func':
eval_master_func(opts)
# if failover is set, master has to be of type list
elif opts['master_type'] == 'failover':
if isinstance(opts['master'], list):
log.info('Got list of available master addresses:'
' {0}'.format(opts['master']))
if opts['master_shuffle']:
if opts['master_failback']:
secondary_masters = opts['master'][1:]
shuffle(secondary_masters)
opts['master'][1:] = secondary_masters
else:
shuffle(opts['master'])
opts['auth_tries'] = 0
if opts['master_failback'] and opts['master_failback_interval'] == 0:
opts['master_failback_interval'] = opts['master_alive_interval']
# if opts['master'] is a str and we have never created opts['master_list']
elif isinstance(opts['master'], str) and ('master_list' not in opts):
# We have a string, but a list was what was intended. Convert.
# See issue 23611 for details
opts['master'] = [opts['master']]
elif opts['__role'] == 'syndic':
log.info('Syndic setting master_syndic to \'{0}\''.format(opts['master']))
# if failed=True, the minion was previously connected
# we're probably called from the minions main-event-loop
# because a master connection loss was detected. remove
# the possibly failed master from the list of masters.
elif failed:
log.info('Moving possibly failed master {0} to the end of'
' the list of masters'.format(opts['master']))
if opts['master'] in opts['master_list']:
# create new list of master with the possibly failed
# one moved to the end
failed_master = opts['master']
opts['master'] = [x for x in opts['master_list'] if opts['master'] != x]
opts['master'].append(failed_master)
else:
opts['master'] = opts['master_list']
else:
msg = ('master_type set to \'failover\' but \'master\' '
'is not of type list but of type '
'{0}'.format(type(opts['master'])))
log.error(msg)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
# If failover is set, minion have to failover on DNS errors instead of retry DNS resolve.
# See issue 21082 for details
if opts['retry_dns']:
msg = ('\'master_type\' set to \'failover\' but \'retry_dns\' is not 0. '
'Setting \'retry_dns\' to 0 to failover to the next master on DNS errors.')
log.critical(msg)
opts['retry_dns'] = 0
else:
msg = ('Invalid keyword \'{0}\' for variable '
'\'master_type\''.format(opts['master_type']))
log.error(msg)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
# FIXME: if SMinion don't define io_loop, it can't switch master see #29088
# Specify kwargs for the channel factory so that SMinion doesn't need to define an io_loop
# (The channel factories will set a default if the kwarg isn't passed)
factory_kwargs = {'timeout': timeout, 'safe': safe}
if getattr(self, 'io_loop', None):
factory_kwargs['io_loop'] = self.io_loop # pylint: disable=no-member
tries = opts.get('master_tries', 1)
attempts = 0
# if we have a list of masters, loop through them and be
# happy with the first one that allows us to connect
if isinstance(opts['master'], list):
conn = False
# shuffle the masters and then loop through them
local_masters = copy.copy(opts['master'])
last_exc = None
while True:
attempts += 1
if tries > 0:
log.debug('Connecting to master. Attempt {0} '
'of {1}'.format(attempts, tries)
)
else:
log.debug('Connecting to master. Attempt {0} '
'(infinite attempts)'.format(attempts)
)
for master in local_masters:
opts['master'] = master
opts.update(prep_ip_port(opts))
opts.update(resolve_dns(opts))
self.opts = opts
# on first run, update self.opts with the whole master list
# to enable a minion to re-use old masters if they get fixed
if 'master_list' not in opts:
opts['master_list'] = local_masters
try:
pub_channel = salt.transport.client.AsyncPubChannel.factory(opts, **factory_kwargs)
yield pub_channel.connect()
conn = True
break
except SaltClientError as exc:
last_exc = exc
msg = ('Master {0} could not be reached, trying '
'next master (if any)'.format(opts['master']))
log.info(msg)
continue
if not conn:
if attempts == tries:
# Exhausted all attempts. Return exception.
self.connected = False
msg = ('No master could be reached or all masters '
'denied the minions connection attempt.')
log.error(msg)
# If the code reaches this point, 'last_exc'
# should already be set.
raise last_exc # pylint: disable=E0702
else:
self.tok = pub_channel.auth.gen_token('salt')
self.connected = True
raise tornado.gen.Return((opts['master'], pub_channel))
# single master sign in
else:
while True:
attempts += 1
if tries > 0:
log.debug('Connecting to master. Attempt {0} '
'of {1}'.format(attempts, tries)
)
else:
log.debug('Connecting to master. Attempt {0} '
'(infinite attempts)'.format(attempts)
)
opts.update(prep_ip_port(opts))
opts.update(resolve_dns(opts))
try:
pub_channel = salt.transport.client.AsyncPubChannel.factory(self.opts, **factory_kwargs)
yield pub_channel.connect()
self.tok = pub_channel.auth.gen_token('salt')
self.connected = True
raise tornado.gen.Return((opts['master'], pub_channel))
except SaltClientError as exc:
if attempts == tries:
# Exhausted all attempts. Return exception.
self.connected = False
raise exc
class SMinion(MinionBase):
'''
Create an object that has loaded all of the minion module functions,
grains, modules, returners etc. The SMinion allows developers to
generate all of the salt minion functions and present them with these
functions for general use.
'''
def __init__(self, opts):
# Late setup of the opts grains, so we can log from the grains module
opts['grains'] = salt.loader.grains(opts)
super(SMinion, self).__init__(opts)
# Clean out the proc directory (default /var/cache/salt/minion/proc)
if (self.opts.get('file_client', 'remote') == 'remote'
or self.opts.get('use_master_when_local', False)):
if HAS_ZMQ:
zmq.eventloop.ioloop.install()
io_loop = LOOP_CLASS.current()
io_loop.run_sync(
lambda: self.eval_master(self.opts, failed=True)
)
self.gen_modules(initial_load=True)
# If configured, cache pillar data on the minion
if self.opts['file_client'] == 'remote' and self.opts.get('minion_pillar_cache', False):
import yaml
pdir = os.path.join(self.opts['cachedir'], 'pillar')
if not os.path.isdir(pdir):
os.makedirs(pdir, 0o700)
ptop = os.path.join(pdir, 'top.sls')
if self.opts['environment'] is not None:
penv = self.opts['environment']
else:
penv = 'base'
cache_top = {penv: {self.opts['id']: ['cache']}}
with salt.utils.fopen(ptop, 'wb') as fp_:
fp_.write(yaml.dump(cache_top))
os.chmod(ptop, 0o600)
cache_sls = os.path.join(pdir, 'cache.sls')
with salt.utils.fopen(cache_sls, 'wb') as fp_:
fp_.write(yaml.dump(self.opts['pillar']))
os.chmod(cache_sls, 0o600)
def gen_modules(self, initial_load=False):
'''
Load all of the modules for the minion
'''
if self.opts.get('master_type') != 'disable':
self.opts['pillar'] = salt.pillar.get_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
self.opts['environment'],
pillarenv=self.opts.get('pillarenv'),
).compile_pillar()
self.utils = salt.loader.utils(self.opts)
self.functions = salt.loader.minion_mods(self.opts, utils=self.utils,
include_errors=True)
self.serializers = salt.loader.serializers(self.opts)
self.returners = salt.loader.returners(self.opts, self.functions)
self.proxy = salt.loader.proxy(self.opts, self.functions, self.returners, None)
# TODO: remove
self.function_errors = {} # Keep the funcs clean
self.states = salt.loader.states(self.opts,
self.functions,
self.utils,
self.serializers)
self.rend = salt.loader.render(self.opts, self.functions)
self.matcher = Matcher(self.opts, self.functions)
self.functions['sys.reload_modules'] = self.gen_modules
self.executors = salt.loader.executors(self.opts)
class MasterMinion(object):
'''
Create a fully loaded minion function object for generic use on the
master. What makes this class different is that the pillar is
omitted, otherwise everything else is loaded cleanly.
'''
def __init__(
self,
opts,
returners=True,
states=True,
rend=True,
matcher=True,
whitelist=None):
self.opts = salt.config.minion_config(opts['conf_file'])
self.opts.update(opts)
self.whitelist = whitelist
self.opts['grains'] = salt.loader.grains(opts)
self.opts['pillar'] = {}
self.mk_returners = returners
self.mk_states = states
self.mk_rend = rend
self.mk_matcher = matcher
self.gen_modules(initial_load=True)
def gen_modules(self, initial_load=False):
'''
Load all of the modules for the minion
'''
self.utils = salt.loader.utils(self.opts)
self.functions = salt.loader.minion_mods(
self.opts,
utils=self.utils,
whitelist=self.whitelist,
initial_load=initial_load)
self.serializers = salt.loader.serializers(self.opts)
if self.mk_returners:
self.returners = salt.loader.returners(self.opts, self.functions)
if self.mk_states:
self.states = salt.loader.states(self.opts,
self.functions,
self.utils,
self.serializers)
if self.mk_rend:
self.rend = salt.loader.render(self.opts, self.functions)
if self.mk_matcher:
self.matcher = Matcher(self.opts, self.functions)
self.functions['sys.reload_modules'] = self.gen_modules
class MinionManager(MinionBase):
'''
Create a multi minion interface, this creates as many minions as are
defined in the master option and binds each minion object to a respective
master.
'''
# timeout for one of the minions to auth with a master
MINION_CONNECT_TIMEOUT = 5
def __init__(self, opts):
super(MinionManager, self).__init__(opts)
self.auth_wait = self.opts['acceptance_wait_time']
self.max_auth_wait = self.opts['acceptance_wait_time_max']
self.minions = []
if HAS_ZMQ:
zmq.eventloop.ioloop.install()
self.io_loop = LOOP_CLASS.current()
def _bind(self):
# start up the event publisher, so we can see events during startup
self.event_publisher = salt.utils.event.AsyncEventPublisher(
self.opts,
io_loop=self.io_loop,
)
self.event = salt.utils.event.get_event('minion', opts=self.opts, io_loop=self.io_loop)
self.event.subscribe('')
self.event.set_event_handler(self.handle_event)
@tornado.gen.coroutine
def handle_event(self, package):
yield [minion.handle_event(package) for minion in self.minions]
def _spawn_minions(self):
'''
Spawn all the coroutines which will sign in to masters
'''
masters = self.opts['master']
if self.opts['master_type'] == 'failover' or not isinstance(self.opts['master'], list):
masters = [masters]
for master in masters:
s_opts = copy.deepcopy(self.opts)
s_opts['master'] = master
s_opts['multimaster'] = True
s_opts['auth_timeout'] = self.MINION_CONNECT_TIMEOUT
minion = Minion(s_opts,
self.MINION_CONNECT_TIMEOUT,
False,
io_loop=self.io_loop,
loaded_base_name='salt.loader.{0}'.format(s_opts['master']),
)
self.minions.append(minion)
self.io_loop.spawn_callback(self._connect_minion, minion)
@tornado.gen.coroutine
def _connect_minion(self, minion):
'''
Create a minion, and asynchronously connect it to a master
'''
last = 0 # never have we signed in
auth_wait = minion.opts['acceptance_wait_time']
while True:
try:
yield minion.connect_master()
minion.tune_in(start=False)
break
except SaltClientError as exc:
log.error('Error while bringing up minion for multi-master. Is master at {0} responding?'.format(minion.opts['master']))
last = time.time()
if auth_wait < self.max_auth_wait:
auth_wait += self.auth_wait
yield tornado.gen.sleep(auth_wait) # TODO: log?
except Exception as e:
log.critical('Unexpected error while connecting to {0}'.format(minion.opts['master']), exc_info=True)
# Multi Master Tune In
def tune_in(self):
'''
Bind to the masters
This loop will attempt to create connections to masters it hasn't connected
to yet, but once the initial connection is made it is up to ZMQ to do the
reconnect (don't know of an API to get the state here in salt)
'''
self._bind()
# Fire off all the minion coroutines
self._spawn_minions()
# serve forever!
self.io_loop.start()
@property
def restart(self):
for minion in self.minions:
if minion.restart:
return True
return False
def stop(self, signum):
for minion in self.minions:
minion.process_manager.stop_restarting()
minion.process_manager.send_signal_to_processes(signum)
# kill any remaining processes
minion.process_manager.kill_children()
minion.destroy()
def destroy(self):
for minion in self.minions:
minion.destroy()
class Minion(MinionBase):
'''
This class instantiates a minion, runs connections for a minion,
and loads all of the functions into the minion
'''
def __init__(self, opts, timeout=60, safe=True, loaded_base_name=None, io_loop=None): # pylint: disable=W0231
'''
Pass in the options dict
'''
# this means that the parent class doesn't know *which* master we connect to
super(Minion, self).__init__(opts)
self.timeout = timeout
self.safe = safe
self._running = None
self.win_proc = []
self.loaded_base_name = loaded_base_name
self.connected = False
self.restart = False
# Flag meaning minion has finished initialization including first connect to the master.
# True means the Minion is fully functional and ready to handle events.
self.ready = False
if io_loop is None:
if HAS_ZMQ:
zmq.eventloop.ioloop.install()
self.io_loop = LOOP_CLASS.current()
else:
self.io_loop = io_loop
# Warn if ZMQ < 3.2
if HAS_ZMQ:
try:
zmq_version_info = zmq.zmq_version_info()
except AttributeError:
# PyZMQ <= 2.1.9 does not have zmq_version_info, fall back to
# using zmq.zmq_version() and build a version info tuple.
zmq_version_info = tuple(
[int(x) for x in zmq.zmq_version().split('.')] # pylint: disable=no-member
)
if zmq_version_info < (3, 2):
log.warning(
'You have a version of ZMQ less than ZMQ 3.2! There are '
'known connection keep-alive issues with ZMQ < 3.2 which '
'may result in loss of contact with minions. Please '
'upgrade your ZMQ!'
)
# Late setup the of the opts grains, so we can log from the grains
# module. If this is a proxy, however, we need to init the proxymodule
# before we can get the grains. We do this for proxies in the
# post_master_init
if not salt.utils.is_proxy():
self.opts['grains'] = salt.loader.grains(opts)
log.info('Creating minion process manager')
self.process_manager = ProcessManager(name='MinionProcessManager')
self.io_loop.spawn_callback(self.process_manager.run, async=True)
# We don't have the proxy setup yet, so we can't start engines
# Engines need to be able to access __proxy__
if not salt.utils.is_proxy():
self.io_loop.spawn_callback(salt.engines.start_engines, self.opts,
self.process_manager)
# Install the SIGINT/SIGTERM handlers if not done so far
if signal.getsignal(signal.SIGINT) is signal.SIG_DFL:
# No custom signal handling was added, install our own
signal.signal(signal.SIGINT, self._handle_signals)
if signal.getsignal(signal.SIGTERM) is signal.SIG_DFL:
# No custom signal handling was added, install our own
signal.signal(signal.SIGINT, self._handle_signals)
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
self._running = False
# escalate the signals to the process manager
self.process_manager.stop_restarting()
self.process_manager.send_signal_to_processes(signum)
# kill any remaining processes
self.process_manager.kill_children()
sys.exit(0)
def sync_connect_master(self, timeout=None):
'''
Block until we are connected to a master
'''
self._sync_connect_master_success = False
log.debug("sync_connect_master")
def on_connect_master_future_done(future):
self._sync_connect_master_success = True
self.io_loop.stop()
self._connect_master_future = self.connect_master()
# finish connecting to master
self._connect_master_future.add_done_callback(on_connect_master_future_done)
if timeout:
self.io_loop.call_later(timeout, self.io_loop.stop)
try:
self.io_loop.start()
except KeyboardInterrupt:
self.destroy()
# I made the following 3 line oddity to preserve traceback.
# Please read PR #23978 before changing, hopefully avoiding regressions.
# Good luck, we're all counting on you. Thanks.
future_exception = self._connect_master_future.exc_info()
if future_exception:
# This needs to be re-raised to preserve restart_on_error behavior.
raise six.reraise(*future_exception)
if timeout and self._sync_connect_master_success is False:
raise SaltDaemonNotRunning('Failed to connect to the salt-master')
@tornado.gen.coroutine
def connect_master(self):
'''
Return a future which will complete when you are connected to a master
'''
master, self.pub_channel = yield self.eval_master(self.opts, self.timeout, self.safe)
yield self._post_master_init(master)
# TODO: better name...
@tornado.gen.coroutine
def _post_master_init(self, master):
'''
Function to finish init after connecting to a master
This is primarily loading modules, pillars, etc. (since they need
to know which master they connected to)
'''
if self.connected:
self.opts['master'] = master
# Initialize pillar before loader to make pillar accessible in modules
self.opts['pillar'] = yield salt.pillar.get_async_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
self.opts['environment'],
pillarenv=self.opts.get('pillarenv')
).compile_pillar()
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
self.serial = salt.payload.Serial(self.opts)
self.mod_opts = self._prep_mod_opts()
self.matcher = Matcher(self.opts, self.functions)
self.beacons = salt.beacons.Beacon(self.opts, self.functions)
uid = salt.utils.get_uid(user=self.opts.get('user', None))
self.proc_dir = get_proc_dir(self.opts['cachedir'], uid=uid)
self.schedule = salt.utils.schedule.Schedule(
self.opts,
self.functions,
self.returners,
cleanup=['__master_alive'])
# add default scheduling jobs to the minions scheduler
if self.opts['mine_enabled'] and 'mine.update' in self.functions:
self.schedule.add_job({
'__mine_interval':
{
'function': 'mine.update',
'minutes': self.opts['mine_interval'],
'jid_include': True,
'maxrunning': 2,
'return_job': self.opts.get('mine_return_job', False)
}
}, persist=True)
log.info('Added mine.update to scheduler')
else:
self.schedule.delete_job('__mine_interval', persist=True)
# add master_alive job if enabled
if (self.opts['transport'] != 'tcp' and
self.opts['master_alive_interval'] > 0 and
self.connected):
self.schedule.add_job({
'__master_alive_{0}'.format(self.opts['master']):
{
'function': 'status.master',
'seconds': self.opts['master_alive_interval'],
'jid_include': True,
'maxrunning': 1,
'kwargs': {'master': self.opts['master'],
'connected': True}
}
}, persist=True)
if self.opts['master_failback'] and \
'master_list' in self.opts and \
self.opts['master'] != self.opts['master_list'][0]:
self.schedule.add_job({
'__master_failback':
{
'function': 'status.ping_master',
'seconds': self.opts['master_failback_interval'],
'jid_include': True,
'maxrunning': 1,
'kwargs': {'master': self.opts['master_list'][0]}
}
}, persist=True)
else:
self.schedule.delete_job('__master_failback', persist=True)
else:
self.schedule.delete_job('__master_alive_{0}'.format(self.opts['master']), persist=True)
self.schedule.delete_job('__master_failback', persist=True)
self.grains_cache = self.opts['grains']
self.ready = True
def _return_retry_timer(self):
'''
Based on the minion configuration, either return a randomized timer or
just return the value of the return_retry_timer.
'''
msg = 'Minion return retry timer set to {0} seconds'
if self.opts.get('return_retry_timer_max'):
try:
random_retry = randint(self.opts['return_retry_timer'], self.opts['return_retry_timer_max'])
log.debug(msg.format(random_retry) + ' (randomized)')
return random_retry
except ValueError:
# Catch wiseguys using negative integers here
log.error(
'Invalid value (return_retry_timer: {0} or return_retry_timer_max: {1})'
'both must be a positive integers'.format(
self.opts['return_retry_timer'],
self.opts['return_retry_timer_max'],
)
)
log.debug(msg.format(DEFAULT_MINION_OPTS['return_retry_timer']))
return DEFAULT_MINION_OPTS['return_retry_timer']
else:
log.debug(msg.format(self.opts.get('return_retry_timer')))
return self.opts.get('return_retry_timer')
def _prep_mod_opts(self):
'''
Returns a copy of the opts with key bits stripped out
'''
mod_opts = {}
for key, val in six.iteritems(self.opts):
if key == 'logger':
continue
mod_opts[key] = val
return mod_opts
def _load_modules(self, force_refresh=False, notify=False, grains=None):
'''
Return the functions and the returners loaded up from the loader
module
'''
# if this is a *nix system AND modules_max_memory is set, lets enforce
# a memory limit on module imports
# this feature ONLY works on *nix like OSs (resource module doesn't work on windows)
modules_max_memory = False
if self.opts.get('modules_max_memory', -1) > 0 and HAS_PSUTIL and HAS_RESOURCE:
log.debug('modules_max_memory set, enforcing a maximum of {0}'.format(self.opts['modules_max_memory']))
modules_max_memory = True
old_mem_limit = resource.getrlimit(resource.RLIMIT_AS)
rss, vms = psutil.Process(os.getpid()).memory_info()
mem_limit = rss + vms + self.opts['modules_max_memory']
resource.setrlimit(resource.RLIMIT_AS, (mem_limit, mem_limit))
elif self.opts.get('modules_max_memory', -1) > 0:
if not HAS_PSUTIL:
log.error('Unable to enforce modules_max_memory because psutil is missing')
if not HAS_RESOURCE:
log.error('Unable to enforce modules_max_memory because resource is missing')
# This might be a proxy minion
if hasattr(self, 'proxy'):
proxy = self.proxy
else:
proxy = None
if grains is None:
self.opts['grains'] = salt.loader.grains(self.opts, force_refresh, proxy=proxy)
self.utils = salt.loader.utils(self.opts)
if self.opts.get('multimaster', False):
s_opts = copy.deepcopy(self.opts)
functions = salt.loader.minion_mods(s_opts, utils=self.utils, proxy=proxy,
loaded_base_name=self.loaded_base_name, notify=notify)
else:
functions = salt.loader.minion_mods(self.opts, utils=self.utils, notify=notify, proxy=proxy)
returners = salt.loader.returners(self.opts, functions)
errors = {}
if '_errors' in functions:
errors = functions['_errors']
functions.pop('_errors')
# we're done, reset the limits!
if modules_max_memory is True:
resource.setrlimit(resource.RLIMIT_AS, old_mem_limit)
executors = salt.loader.executors(self.opts, functions)
return functions, returners, errors, executors
def _send_req_sync(self, load, timeout):
channel = salt.transport.Channel.factory(self.opts)
return channel.send(load, timeout=timeout)
@tornado.gen.coroutine
def _send_req_async(self, load, timeout):
channel = salt.transport.client.AsyncReqChannel.factory(self.opts)
ret = yield channel.send(load, timeout=timeout)
raise tornado.gen.Return(ret)
def _fire_master(self, data=None, tag=None, events=None, pretag=None, timeout=60, sync=True):
'''
Fire an event on the master, or drop message if unable to send.
'''
load = {'id': self.opts['id'],
'cmd': '_minion_event',
'pretag': pretag,
'tok': self.tok}
if events:
load['events'] = events
elif data and tag:
load['data'] = data
load['tag'] = tag
elif not data and tag:
load['data'] = {}
load['tag'] = tag
else:
return
def timeout_handler(*_):
log.info('fire_master failed: master could not be contacted. Request timed out.')
return True
if sync:
try:
self._send_req_sync(load, timeout)
except salt.exceptions.SaltReqTimeoutError:
log.info('fire_master failed: master could not be contacted. Request timed out.')
except Exception:
log.info('fire_master failed: {0}'.format(traceback.format_exc()))
return False
else:
with tornado.stack_context.ExceptionStackContext(timeout_handler):
self._send_req_async(load, timeout, callback=lambda f: None) # pylint: disable=unexpected-keyword-arg
return True
def _handle_decoded_payload(self, data):
'''
Override this method if you wish to handle the decoded data
differently.
'''
if 'user' in data:
log.info(
'User {0[user]} Executing command {0[fun]} with jid '
'{0[jid]}'.format(data)
)
else:
log.info(
'Executing command {0[fun]} with jid {0[jid]}'.format(data)
)
log.debug('Command details {0}'.format(data))
if isinstance(data['fun'], six.string_types):
if data['fun'] == 'sys.reload_modules':
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
self.schedule.functions = self.functions
self.schedule.returners = self.returners
# We stash an instance references to allow for the socket
# communication in Windows. You can't pickle functions, and thus
# python needs to be able to reconstruct the reference on the other
# side.
instance = self
multiprocessing_enabled = self.opts.get('multiprocessing', True)
if multiprocessing_enabled:
if sys.platform.startswith('win'):
# let python reconstruct the minion on the other side if we're
# running on windows
instance = None
with default_signals(signal.SIGINT, signal.SIGTERM):
process = SignalHandlingMultiprocessingProcess(
target=self._target, args=(instance, self.opts, data, self.connected)
)
else:
process = threading.Thread(
target=self._target,
args=(instance, self.opts, data, self.connected),
name=data['jid']
)
if multiprocessing_enabled:
with default_signals(signal.SIGINT, signal.SIGTERM):
# Reset current signals before starting the process in
# order not to inherit the current signal handlers
process.start()
else:
process.start()
# TODO: remove the windows specific check?
if multiprocessing_enabled and not salt.utils.is_windows():
# we only want to join() immediately if we are daemonizing a process
process.join()
else:
self.win_proc.append(process)
def ctx(self):
'''Return a single context manager for the minion's data
'''
if six.PY2:
return contextlib.nested(
self.functions.context_dict.clone(),
self.returners.context_dict.clone(),
self.executors.context_dict.clone(),
)
else:
exitstack = contextlib.ExitStack()
exitstack.push(self.functions.context_dict.clone())
exitstack.push(self.returners.context_dict.clone())
exitstack.push(self.executors.context_dict.clone())
return exitstack
@classmethod
def _target(cls, minion_instance, opts, data, connected):
if not minion_instance:
minion_instance = cls(opts)
minion_instance.connected = connected
if not hasattr(minion_instance, 'functions'):
functions, returners, function_errors, executors = (
minion_instance._load_modules(grains=opts['grains'])
)
minion_instance.functions = functions
minion_instance.returners = returners
minion_instance.function_errors = function_errors
minion_instance.executors = executors
if not hasattr(minion_instance, 'serial'):
minion_instance.serial = salt.payload.Serial(opts)
if not hasattr(minion_instance, 'proc_dir'):
uid = salt.utils.get_uid(user=opts.get('user', None))
minion_instance.proc_dir = (
get_proc_dir(opts['cachedir'], uid=uid)
)
with tornado.stack_context.StackContext(minion_instance.ctx):
if isinstance(data['fun'], tuple) or isinstance(data['fun'], list):
Minion._thread_multi_return(minion_instance, opts, data)
else:
Minion._thread_return(minion_instance, opts, data)
@classmethod
def _thread_return(cls, minion_instance, opts, data):
'''
This method should be used as a threading target, start the actual
minion side execution.
'''
# this seems awkward at first, but it's a workaround for Windows
# multiprocessing communication.
if sys.platform.startswith('win') and \
opts['multiprocessing'] and \
not salt.log.setup.is_logging_configured():
# We have to re-init the logging system for Windows
salt.log.setup.setup_console_logger(log_level=opts.get('log_level', 'info'))
if opts.get('log_file'):
salt.log.setup.setup_logfile_logger(opts['log_file'], opts.get('log_level_logfile', 'info'))
fn_ = os.path.join(minion_instance.proc_dir, data['jid'])
if opts['multiprocessing'] and not salt.utils.is_windows():
# Shutdown the multiprocessing before daemonizing
salt.log.setup.shutdown_multiprocessing_logging()
salt.utils.daemonize_if(opts)
# Reconfigure multiprocessing logging after daemonizing
salt.log.setup.setup_multiprocessing_logging()
salt.utils.appendproctitle('{0}._thread_return {1}'.format(cls.__name__, data['jid']))
sdata = {'pid': os.getpid()}
sdata.update(data)
log.info('Starting a new job with PID {0}'.format(sdata['pid']))
with salt.utils.fopen(fn_, 'w+b') as fp_:
fp_.write(minion_instance.serial.dumps(sdata))
ret = {'success': False}
function_name = data['fun']
if function_name in minion_instance.functions:
try:
if minion_instance.connected and minion_instance.opts['pillar'].get('minion_blackout', False):
# this minion is blacked out. Only allow saltutil.refresh_pillar
if function_name != 'saltutil.refresh_pillar' and \
function_name not in minion_instance.opts['pillar'].get('minion_blackout_whitelist', []):
raise SaltInvocationError('Minion in blackout mode. Set \'minion_blackout\' '
'to False in pillar to resume operations. Only '
'saltutil.refresh_pillar allowed in blackout mode.')
func = minion_instance.functions[function_name]
args, kwargs = load_args_and_kwargs(
func,
data['arg'],
data)
minion_instance.functions.pack['__context__']['retcode'] = 0
executors = data.get('module_executors') or opts.get('module_executors', ['direct_call.get'])
if isinstance(executors, six.string_types):
executors = [executors]
elif not isinstance(executors, list) or not executors:
raise SaltInvocationError("Wrong executors specification: {0}. String or non-empty list expected".
format(executors))
if opts.get('sudo_user', '') and executors[-1] != 'sudo.get':
if executors[-1] in FUNCTION_EXECUTORS:
executors[-1] = 'sudo.get' # replace
else:
executors.append('sudo.get') # append
log.trace('Executors list {0}'.format(executors)) # pylint: disable=no-member
# Get executors
def get_executor(name):
executor_class = minion_instance.executors.get(name)
if executor_class is None:
raise SaltInvocationError("Executor '{0}' is not available".format(name))
return executor_class
# Get the last one that is function executor
executor = get_executor(executors.pop())(opts, data, func, args, kwargs)
# Instantiate others from bottom to the top
for executor_name in reversed(executors):
executor = get_executor(executor_name)(opts, data, executor)
return_data = executor.execute()
if isinstance(return_data, types.GeneratorType):
ind = 0
iret = {}
for single in return_data:
if isinstance(single, dict) and isinstance(iret, dict):
iret.update(single)
else:
if not iret:
iret = []
iret.append(single)
tag = tagify([data['jid'], 'prog', opts['id'], str(ind)], 'job')
event_data = {'return': single}
minion_instance._fire_master(event_data, tag)
ind += 1
ret['return'] = iret
else:
ret['return'] = return_data
ret['retcode'] = minion_instance.functions.pack['__context__'].get(
'retcode',
0
)
ret['success'] = True
except CommandNotFoundError as exc:
msg = 'Command required for \'{0}\' not found'.format(
function_name
)
log.debug(msg, exc_info=True)
ret['return'] = '{0}: {1}'.format(msg, exc)
ret['out'] = 'nested'
except CommandExecutionError as exc:
log.error(
'A command in \'{0}\' had a problem: {1}'.format(
function_name,
exc
),
exc_info_on_loglevel=logging.DEBUG
)
ret['return'] = 'ERROR: {0}'.format(exc)
ret['out'] = 'nested'
except SaltInvocationError as exc:
log.error(
'Problem executing \'{0}\': {1}'.format(
function_name,
exc
),
exc_info_on_loglevel=logging.DEBUG
)
ret['return'] = 'ERROR executing \'{0}\': {1}'.format(
function_name, exc
)
ret['out'] = 'nested'
except TypeError as exc:
msg = 'Passed invalid arguments to {0}: {1}\n{2}'.format(function_name, exc, func.__doc__, )
log.warning(msg, exc_info_on_loglevel=logging.DEBUG)
ret['return'] = msg
ret['out'] = 'nested'
except Exception:
msg = 'The minion function caused an exception'
log.warning(msg, exc_info_on_loglevel=logging.DEBUG)
salt.utils.error.fire_exception(salt.exceptions.MinionError(msg), opts, job=data)
ret['return'] = '{0}: {1}'.format(msg, traceback.format_exc())
ret['out'] = 'nested'
else:
ret['return'] = minion_instance.functions.missing_fun_string(function_name)
mod_name = function_name.split('.')[0]
if mod_name in minion_instance.function_errors:
ret['return'] += ' Possible reasons: \'{0}\''.format(
minion_instance.function_errors[mod_name]
)
ret['success'] = False
ret['retcode'] = 254
ret['out'] = 'nested'
ret['jid'] = data['jid']
ret['fun'] = data['fun']
ret['fun_args'] = data['arg']
if 'master_id' in data:
ret['master_id'] = data['master_id']
if 'metadata' in data:
if isinstance(data['metadata'], dict):
ret['metadata'] = data['metadata']
else:
log.warning('The metadata parameter must be a dictionary. Ignoring.')
if minion_instance.connected:
minion_instance._return_pub(
ret,
timeout=minion_instance._return_retry_timer()
)
# TODO: make a list? Seems odd to split it this late :/
if data['ret'] and isinstance(data['ret'], six.string_types):
if 'ret_config' in data:
ret['ret_config'] = data['ret_config']
if 'ret_kwargs' in data:
ret['ret_kwargs'] = data['ret_kwargs']
ret['id'] = opts['id']
for returner in set(data['ret'].split(',')):
try:
minion_instance.returners['{0}.returner'.format(
returner
)](ret)
except Exception as exc:
log.error(
'The return failed for job {0} {1}'.format(
data['jid'],
exc
)
)
log.error(traceback.format_exc())
@classmethod
def _thread_multi_return(cls, minion_instance, opts, data):
'''
This method should be used as a threading target, start the actual
minion side execution.
'''
salt.utils.appendproctitle('{0}._thread_multi_return {1}'.format(cls.__name__, data['jid']))
# this seems awkward at first, but it's a workaround for Windows
# multiprocessing communication.
if sys.platform.startswith('win') and \
opts['multiprocessing'] and \
not salt.log.is_logging_configured():
# We have to re-init the logging system for Windows
salt.log.setup_console_logger(log_level=opts.get('log_level', 'info'))
if opts.get('log_file'):
salt.log.setup_logfile_logger(opts['log_file'], opts.get('log_level_logfile', 'info'))
ret = {
'return': {},
'success': {},
}
for ind in range(0, len(data['fun'])):
ret['success'][data['fun'][ind]] = False
try:
if minion_instance.connected and minion_instance.opts['pillar'].get('minion_blackout', False):
# this minion is blacked out. Only allow saltutil.refresh_pillar
if data['fun'][ind] != 'saltutil.refresh_pillar' and \
data['fun'][ind] not in minion_instance.opts['pillar'].get('minion_blackout_whitelist', []):
raise SaltInvocationError('Minion in blackout mode. Set \'minion_blackout\' '
'to False in pillar to resume operations. Only '
'saltutil.refresh_pillar allowed in blackout mode.')
func = minion_instance.functions[data['fun'][ind]]
args, kwargs = load_args_and_kwargs(
func,
data['arg'][ind],
data)
ret['return'][data['fun'][ind]] = func(*args, **kwargs)
ret['success'][data['fun'][ind]] = True
except Exception as exc:
trb = traceback.format_exc()
log.warning(
'The minion function caused an exception: {0}'.format(
exc
)
)
ret['return'][data['fun'][ind]] = trb
ret['jid'] = data['jid']
ret['fun'] = data['fun']
ret['fun_args'] = data['arg']
if 'metadata' in data:
ret['metadata'] = data['metadata']
if minion_instance.connected:
minion_instance._return_pub(
ret,
timeout=minion_instance._return_retry_timer()
)
if data['ret']:
if 'ret_config' in data:
ret['ret_config'] = data['ret_config']
if 'ret_kwargs' in data:
ret['ret_kwargs'] = data['ret_kwargs']
for returner in set(data['ret'].split(',')):
ret['id'] = opts['id']
try:
minion_instance.returners['{0}.returner'.format(
returner
)](ret)
except Exception as exc:
log.error(
'The return failed for job {0} {1}'.format(
data['jid'],
exc
)
)
def _return_pub(self, ret, ret_cmd='_return', timeout=60, sync=True):
'''
Return the data from the executed command to the master server
'''
jid = ret.get('jid', ret.get('__jid__'))
fun = ret.get('fun', ret.get('__fun__'))
if self.opts['multiprocessing']:
fn_ = os.path.join(self.proc_dir, jid)
if os.path.isfile(fn_):
try:
os.remove(fn_)
except (OSError, IOError):
# The file is gone already
pass
log.info('Returning information for job: {0}'.format(jid))
if ret_cmd == '_syndic_return':
load = {'cmd': ret_cmd,
'id': self.opts['id'],
'jid': jid,
'fun': fun,
'arg': ret.get('arg'),
'tgt': ret.get('tgt'),
'tgt_type': ret.get('tgt_type'),
'load': ret.get('__load__')}
if '__master_id__' in ret:
load['master_id'] = ret['__master_id__']
load['return'] = {}
for key, value in six.iteritems(ret):
if key.startswith('__'):
continue
load['return'][key] = value
else:
load = {'cmd': ret_cmd,
'id': self.opts['id']}
for key, value in six.iteritems(ret):
load[key] = value
if 'out' in ret:
if isinstance(ret['out'], six.string_types):
load['out'] = ret['out']
else:
log.error('Invalid outputter {0}. This is likely a bug.'
.format(ret['out']))
else:
try:
oput = self.functions[fun].__outputter__
except (KeyError, AttributeError, TypeError):
pass
else:
if isinstance(oput, six.string_types):
load['out'] = oput
if self.opts['cache_jobs']:
# Local job cache has been enabled
salt.utils.minion.cache_jobs(self.opts, load['jid'], ret)
if not self.opts['pub_ret']:
return ''
def timeout_handler(*_):
msg = ('The minion failed to return the job information for job '
'{0}. This is often due to the master being shut down or '
'overloaded. If the master is running consider increasing '
'the worker_threads value.').format(jid)
log.warning(msg)
return True
if sync:
try:
ret_val = self._send_req_sync(load, timeout=timeout)
except SaltReqTimeoutError:
timeout_handler()
return ''
else:
with tornado.stack_context.ExceptionStackContext(timeout_handler):
ret_val = self._send_req_async(load, timeout=timeout, callback=lambda f: None) # pylint: disable=unexpected-keyword-arg
log.trace('ret_val = {0}'.format(ret_val)) # pylint: disable=no-member
return ret_val
def _state_run(self):
'''
Execute a state run based on information set in the minion config file
'''
if self.opts['startup_states']:
if self.opts.get('master_type', 'str') == 'disable' and \
self.opts.get('file_client', 'remote') == 'remote':
log.warning('Cannot run startup_states when \'master_type\' is '
'set to \'disable\' and \'file_client\' is set to '
'\'remote\'. Skipping.')
else:
data = {'jid': 'req', 'ret': self.opts.get('ext_job_cache', '')}
if self.opts['startup_states'] == 'sls':
data['fun'] = 'state.sls'
data['arg'] = [self.opts['sls_list']]
elif self.opts['startup_states'] == 'top':
data['fun'] = 'state.top'
data['arg'] = [self.opts['top_file']]
else:
data['fun'] = 'state.highstate'
data['arg'] = []
self._handle_decoded_payload(data)
def _refresh_grains_watcher(self, refresh_interval_in_minutes):
'''
Create a loop that will fire a pillar refresh to inform a master about a change in the grains of this minion
:param refresh_interval_in_minutes:
:return: None
'''
if '__update_grains' not in self.opts.get('schedule', {}):
if 'schedule' not in self.opts:
self.opts['schedule'] = {}
self.opts['schedule'].update({
'__update_grains':
{
'function': 'event.fire',
'args': [{}, 'grains_refresh'],
'minutes': refresh_interval_in_minutes
}
})
def _fire_master_minion_start(self):
# Send an event to the master that the minion is live
self._fire_master(
'Minion {0} started at {1}'.format(
self.opts['id'],
time.asctime()
),
'minion_start'
)
# dup name spaced event
self._fire_master(
'Minion {0} started at {1}'.format(
self.opts['id'],
time.asctime()
),
tagify([self.opts['id'], 'start'], 'minion'),
)
def module_refresh(self, force_refresh=False, notify=False):
'''
Refresh the functions and returners.
'''
log.debug('Refreshing modules. Notify={0}'.format(notify))
self.functions, self.returners, _, self.executors = self._load_modules(force_refresh, notify=notify)
self.schedule.functions = self.functions
self.schedule.returners = self.returners
# TODO: only allow one future in flight at a time?
@tornado.gen.coroutine
def pillar_refresh(self, force_refresh=False):
'''
Refresh the pillar
'''
log.debug('Refreshing pillar')
try:
self.opts['pillar'] = yield salt.pillar.get_async_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
self.opts['environment'],
pillarenv=self.opts.get('pillarenv'),
).compile_pillar()
except SaltClientError:
# Do not exit if a pillar refresh fails.
log.error('Pillar data could not be refreshed. '
'One or more masters may be down!')
self.module_refresh(force_refresh)
def manage_schedule(self, tag, data):
'''
Refresh the functions and returners.
'''
func = data.get('func', None)
name = data.get('name', None)
schedule = data.get('schedule', None)
where = data.get('where', None)
persist = data.get('persist', None)
if func == 'delete':
self.schedule.delete_job(name, persist)
elif func == 'add':
self.schedule.add_job(schedule, persist)
elif func == 'modify':
self.schedule.modify_job(name, schedule, persist, where)
elif func == 'enable':
self.schedule.enable_schedule()
elif func == 'disable':
self.schedule.disable_schedule()
elif func == 'enable_job':
self.schedule.enable_job(name, persist, where)
elif func == 'run_job':
self.schedule.run_job(name)
elif func == 'disable_job':
self.schedule.disable_job(name, persist, where)
elif func == 'reload':
self.schedule.reload(schedule)
elif func == 'list':
self.schedule.list(where)
elif func == 'save_schedule':
self.schedule.save_schedule()
def manage_beacons(self, tag, data):
'''
Manage Beacons
'''
func = data.get('func', None)
name = data.get('name', None)
beacon_data = data.get('beacon_data', None)
if func == 'add':
self.beacons.add_beacon(name, beacon_data)
elif func == 'modify':
self.beacons.modify_beacon(name, beacon_data)
elif func == 'delete':
self.beacons.delete_beacon(name)
elif func == 'enable':
self.beacons.enable_beacons()
elif func == 'disable':
self.beacons.disable_beacons()
elif func == 'enable_beacon':
self.beacons.enable_beacon(name)
elif func == 'disable_beacon':
self.beacons.disable_beacon(name)
elif func == 'list':
self.beacons.list_beacons()
def environ_setenv(self, tag, data):
'''
Set the salt-minion main process environment according to
the data contained in the minion event data
'''
environ = data.get('environ', None)
if environ is None:
return False
false_unsets = data.get('false_unsets', False)
clear_all = data.get('clear_all', False)
import salt.modules.environ as mod_environ
return mod_environ.setenv(environ, false_unsets, clear_all)
def _pre_tune(self):
'''
Set the minion running flag and issue the appropriate warnings if
the minion cannot be started or is already running
'''
if self._running is None:
self._running = True
elif self._running is False:
log.error(
'This {0} was scheduled to stop. Not running '
'{0}.tune_in()'.format(self.__class__.__name__)
)
return
elif self._running is True:
log.error(
'This {0} is already running. Not running '
'{0}.tune_in()'.format(self.__class__.__name__)
)
return
try:
log.info(
'{0} is starting as user \'{1}\''.format(
self.__class__.__name__,
salt.utils.get_user()
)
)
except Exception as err:
# Only windows is allowed to fail here. See #3189. Log as debug in
# that case. Else, error.
log.log(
salt.utils.is_windows() and logging.DEBUG or logging.ERROR,
'Failed to get the user who is starting {0}'.format(
self.__class__.__name__
),
exc_info=err
)
def _mine_send(self, tag, data):
'''
Send mine data to the master
'''
channel = salt.transport.Channel.factory(self.opts)
data['tok'] = self.tok
try:
ret = channel.send(data)
return ret
except SaltReqTimeoutError:
log.warning('Unable to send mine data to master.')
return None
@tornado.gen.coroutine
def handle_event(self, package):
'''
Handle an event from the epull_sock (all local minion events)
'''
if not self.ready:
raise tornado.gen.Return()
tag, data = salt.utils.event.SaltEvent.unpack(package)
log.debug('Minion of "{0}" is handling event tag \'{1}\''.format(self.opts['master'], tag))
if tag.startswith('module_refresh'):
self.module_refresh(notify=data.get('notify', False))
elif tag.startswith('pillar_refresh'):
yield self.pillar_refresh()
elif tag.startswith('manage_schedule'):
self.manage_schedule(tag, data)
elif tag.startswith('manage_beacons'):
self.manage_beacons(tag, data)
elif tag.startswith('grains_refresh'):
if self.grains_cache != self.opts['grains']:
self.pillar_refresh(force_refresh=True)
self.grains_cache = self.opts['grains']
elif tag.startswith('environ_setenv'):
self.environ_setenv(tag, data)
elif tag.startswith('_minion_mine'):
self._mine_send(tag, data)
elif tag.startswith('fire_master'):
log.debug('Forwarding master event tag={tag}'.format(tag=data['tag']))
self._fire_master(data['data'], data['tag'], data['events'], data['pretag'])
elif tag.startswith('__master_disconnected') or tag.startswith('__master_failback'):
# if the master disconnect event is for a different master, raise an exception
if tag.startswith('__master_disconnected') and data['master'] != self.opts['master']:
# not mine master, ignore
return
if tag.startswith('__master_failback'):
# if the master failback event is not for the top master, raise an exception
if data['master'] != self.opts['master_list'][0]:
raise SaltException('Bad master \'{0}\' when mine failback is \'{1}\''.format(
data['master'], self.opts['master']))
# if the master failback event is for the current master, raise an exception
elif data['master'] == self.opts['master'][0]:
raise SaltException('Already connected to \'{0}\''.format(data['master']))
if self.connected:
# we are not connected anymore
self.connected = False
# modify the scheduled job to fire only on reconnect
if self.opts['transport'] != 'tcp':
schedule = {
'function': 'status.master',
'seconds': self.opts['master_alive_interval'],
'jid_include': True,
'maxrunning': 1,
'kwargs': {'master': self.opts['master'],
'connected': False}
}
self.schedule.modify_job(name='__master_alive_{0}'.format(self.opts['master']),
schedule=schedule)
log.info('Connection to master {0} lost'.format(self.opts['master']))
if self.opts['master_type'] == 'failover':
log.info('Trying to tune in to next master from master-list')
if hasattr(self, 'pub_channel'):
self.pub_channel.on_recv(None)
if hasattr(self.pub_channel, 'auth'):
self.pub_channel.auth.invalidate()
if hasattr(self.pub_channel, 'close'):
self.pub_channel.close()
del self.pub_channel
# if eval_master finds a new master for us, self.connected
# will be True again on successful master authentication
try:
master, self.pub_channel = yield self.eval_master(
opts=self.opts,
failed=True)
except SaltClientError:
pass
if self.connected:
self.opts['master'] = master
# re-init the subsystems to work with the new master
log.info('Re-initialising subsystems for new '
'master {0}'.format(self.opts['master']))
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
self.pub_channel.on_recv(self._handle_payload)
self._fire_master_minion_start()
log.info('Minion is ready to receive requests!')
# update scheduled job to run with the new master addr
if self.opts['transport'] != 'tcp':
schedule = {
'function': 'status.master',
'seconds': self.opts['master_alive_interval'],
'jid_include': True,
'maxrunning': 1,
'kwargs': {'master': self.opts['master'],
'connected': True}
}
self.schedule.modify_job(name='__master_alive_{0}'.format(self.opts['master']),
schedule=schedule)
if self.opts['master_failback'] and 'master_list' in self.opts:
if self.opts['master'] != self.opts['master_list'][0]:
schedule = {
'function': 'status.ping_master',
'seconds': self.opts['master_failback_interval'],
'jid_include': True,
'maxrunning': 1,
'kwargs': {'master': self.opts['master_list'][0]}
}
self.schedule.modify_job(name='__master_failback',
schedule=schedule)
else:
self.schedule.delete_job(name='__master_failback', persist=True)
else:
self.restart = True
self.io_loop.stop()
elif tag.startswith('__master_connected'):
# handle this event only once. otherwise it will pollute the log
if not self.connected:
log.info('Connection to master {0} re-established'.format(self.opts['master']))
self.connected = True
# modify the __master_alive job to only fire,
# if the connection is lost again
if self.opts['transport'] != 'tcp':
schedule = {
'function': 'status.master',
'seconds': self.opts['master_alive_interval'],
'jid_include': True,
'maxrunning': 1,
'kwargs': {'master': self.opts['master'],
'connected': True}
}
self.schedule.modify_job(name='__master_alive_{0}'.format(self.opts['master']),
schedule=schedule)
elif tag.startswith('__schedule_return'):
self._return_pub(data, ret_cmd='_return', sync=False)
elif tag.startswith('_salt_error'):
if self.connected:
log.debug('Forwarding salt error event tag={tag}'.format(tag=tag))
self._fire_master(data, tag)
def _fallback_cleanups(self):
'''
Fallback cleanup routines, attempting to fix leaked processes, threads, etc.
'''
# Add an extra fallback in case a forked process leaks through
multiprocessing.active_children()
# Cleanup Windows threads
if not salt.utils.is_windows():
return
for thread in self.win_proc:
if not thread.is_alive():
thread.join()
try:
self.win_proc.remove(thread)
del thread
except (ValueError, NameError):
pass
# Main Minion Tune In
def tune_in(self, start=True):
'''
Lock onto the publisher. This is the main event loop for the minion
:rtype : None
'''
self._pre_tune()
log.debug('Minion \'{0}\' trying to tune in'.format(self.opts['id']))
if start:
self.sync_connect_master()
if self.connected:
self._fire_master_minion_start()
log.info('Minion is ready to receive requests!')
# Make sure to gracefully handle SIGUSR1
enable_sigusr1_handler()
# Make sure to gracefully handle CTRL_LOGOFF_EVENT
salt.utils.enable_ctrl_logoff_handler()
# On first startup execute a state run if configured to do so
self._state_run()
loop_interval = self.opts['loop_interval']
try:
if self.opts['grains_refresh_every']: # If exists and is not zero. In minutes, not seconds!
if self.opts['grains_refresh_every'] > 1:
log.debug(
'Enabling the grains refresher. Will run every {0} minutes.'.format(
self.opts['grains_refresh_every'])
)
else: # Clean up minute vs. minutes in log message
log.debug(
'Enabling the grains refresher. Will run every {0} minute.'.format(
self.opts['grains_refresh_every'])
)
self._refresh_grains_watcher(
abs(self.opts['grains_refresh_every'])
)
except Exception as exc:
log.error(
'Exception occurred in attempt to initialize grain refresh routine during minion tune-in: {0}'.format(
exc)
)
self.periodic_callbacks = {}
# schedule the stuff that runs every interval
ping_interval = self.opts.get('ping_interval', 0) * 60
if ping_interval > 0 and self.connected:
def ping_master():
try:
if not self._fire_master('ping', 'minion_ping'):
if not self.opts.get('auth_safemode', True):
log.error('** Master Ping failed. Attempting to restart minion**')
delay = self.opts.get('random_reauth_delay', 5)
log.info('delaying random_reauth_delay {0}s'.format(delay))
# regular sys.exit raises an exception -- which isn't sufficient in a thread
os._exit(salt.defaults.exitcodes.SALT_KEEPALIVE)
except Exception:
log.warning('Attempt to ping master failed.', exc_on_loglevel=logging.DEBUG)
self.periodic_callbacks['ping'] = tornado.ioloop.PeriodicCallback(ping_master, ping_interval * 1000, io_loop=self.io_loop)
self.periodic_callbacks['cleanup'] = tornado.ioloop.PeriodicCallback(self._fallback_cleanups, loop_interval * 1000, io_loop=self.io_loop)
def handle_beacons():
# Process Beacons
beacons = None
try:
beacons = self.process_beacons(self.functions)
except Exception:
log.critical('The beacon errored: ', exc_info=True)
if beacons and self.connected:
self._fire_master(events=beacons)
self.periodic_callbacks['beacons'] = tornado.ioloop.PeriodicCallback(handle_beacons, loop_interval * 1000, io_loop=self.io_loop)
# TODO: actually listen to the return and change period
def handle_schedule():
self.process_schedule(self, loop_interval)
if hasattr(self, 'schedule'):
self.periodic_callbacks['schedule'] = tornado.ioloop.PeriodicCallback(handle_schedule, 1000, io_loop=self.io_loop)
# start all the other callbacks
for periodic_cb in six.itervalues(self.periodic_callbacks):
periodic_cb.start()
# add handler to subscriber
if hasattr(self, 'pub_channel') and self.pub_channel is not None:
self.pub_channel.on_recv(self._handle_payload)
elif self.opts.get('master_type') != 'disable':
log.error('No connection to master found. Scheduled jobs will not run.')
if start:
try:
self.io_loop.start()
if self.restart:
self.destroy()
except (KeyboardInterrupt, RuntimeError): # A RuntimeError can be re-raised by Tornado on shutdown
self.destroy()
def _handle_payload(self, payload):
if payload is not None and payload['enc'] == 'aes':
if self._target_load(payload['load']):
self._handle_decoded_payload(payload['load'])
elif self.opts['zmq_filtering']:
# In the filtering enabled case, we'd like to know when minion sees something it shouldnt
log.trace('Broadcast message received not for this minion, Load: {0}'.format(payload['load']))
# If it's not AES, and thus has not been verified, we do nothing.
# In the future, we could add support for some clearfuncs, but
# the minion currently has no need.
def _target_load(self, load):
# Verify that the publication is valid
if 'tgt' not in load or 'jid' not in load or 'fun' not in load \
or 'arg' not in load:
return False
# Verify that the publication applies to this minion
# It's important to note that the master does some pre-processing
# to determine which minions to send a request to. So for example,
# a "salt -G 'grain_key:grain_val' test.ping" will invoke some
# pre-processing on the master and this minion should not see the
# publication if the master does not determine that it should.
if 'tgt_type' in load:
match_func = getattr(self.matcher,
'{0}_match'.format(load['tgt_type']), None)
if match_func is None:
return False
if load['tgt_type'] in ('grain', 'grain_pcre', 'pillar'):
delimiter = load.get('delimiter', DEFAULT_TARGET_DELIM)
if not match_func(load['tgt'], delimiter=delimiter):
return False
elif not match_func(load['tgt']):
return False
else:
if not self.matcher.glob_match(load['tgt']):
return False
return True
def destroy(self):
'''
Tear down the minion
'''
self._running = False
if hasattr(self, 'schedule'):
del self.schedule
if hasattr(self, 'pub_channel') and self.pub_channel is not None:
self.pub_channel.on_recv(None)
if hasattr(self.pub_channel, 'close'):
self.pub_channel.close()
del self.pub_channel
if hasattr(self, 'periodic_callbacks'):
for cb in six.itervalues(self.periodic_callbacks):
cb.stop()
def __del__(self):
self.destroy()
class Syndic(Minion):
'''
Make a Syndic minion, this minion will use the minion keys on the
master to authenticate with a higher level master.
'''
def __init__(self, opts, **kwargs):
self._syndic_interface = opts.get('interface')
self._syndic = True
# force auth_safemode True because Syndic don't support autorestart
opts['auth_safemode'] = True
opts['loop_interval'] = 1
super(Syndic, self).__init__(opts, **kwargs)
self.mminion = salt.minion.MasterMinion(opts)
self.jid_forward_cache = set()
self.jids = {}
self.raw_events = []
self.pub_future = None
def _handle_decoded_payload(self, data):
'''
Override this method if you wish to handle the decoded data
differently.
'''
# TODO: even do this??
data['to'] = int(data.get('to', self.opts['timeout'])) - 1
# Only forward the command if it didn't originate from ourselves
if data.get('master_id', 0) != self.opts.get('master_id', 1):
self.syndic_cmd(data)
def syndic_cmd(self, data):
'''
Take the now clear load and forward it on to the client cmd
'''
# Set up default tgt_type
if 'tgt_type' not in data:
data['tgt_type'] = 'glob'
kwargs = {}
# optionally add a few fields to the publish data
for field in ('master_id', # which master the job came from
'user', # which user ran the job
):
if field in data:
kwargs[field] = data[field]
def timeout_handler(*args):
log.warning('Unable to forward pub data: {0}'.format(args[1]))
return True
with tornado.stack_context.ExceptionStackContext(timeout_handler):
self.local.pub_async(data['tgt'],
data['fun'],
data['arg'],
data['tgt_type'],
data['ret'],
data['jid'],
data['to'],
io_loop=self.io_loop,
callback=lambda _: None,
**kwargs)
def fire_master_syndic_start(self):
# Send an event to the master that the minion is live
self._fire_master(
'Syndic {0} started at {1}'.format(
self.opts['id'],
time.asctime()
),
'syndic_start',
sync=False,
)
self._fire_master(
'Syndic {0} started at {1}'.format(
self.opts['id'],
time.asctime()
),
tagify([self.opts['id'], 'start'], 'syndic'),
sync=False,
)
# TODO: clean up docs
def tune_in_no_block(self):
'''
Executes the tune_in sequence but omits extra logging and the
management of the event bus assuming that these are handled outside
the tune_in sequence
'''
# Instantiate the local client
self.local = salt.client.get_local_client(
self.opts['_minion_conf_file'], io_loop=self.io_loop)
# add handler to subscriber
self.pub_channel.on_recv(self._process_cmd_socket)
def _process_cmd_socket(self, payload):
if payload is not None and payload['enc'] == 'aes':
log.trace('Handling payload')
self._handle_decoded_payload(payload['load'])
# If it's not AES, and thus has not been verified, we do nothing.
# In the future, we could add support for some clearfuncs, but
# the syndic currently has no need.
@tornado.gen.coroutine
def _return_pub_multi(self, values):
for value in values:
yield self._return_pub(value,
'_syndic_return',
timeout=self._return_retry_timer(),
sync=False)
@tornado.gen.coroutine
def reconnect(self):
if hasattr(self, 'pub_channel'):
self.pub_channel.on_recv(None)
if hasattr(self.pub_channel, 'close'):
self.pub_channel.close()
del self.pub_channel
# if eval_master finds a new master for us, self.connected
# will be True again on successful master authentication
master, self.pub_channel = yield self.eval_master(opts=self.opts)
if self.connected:
self.opts['master'] = master
self.pub_channel.on_recv(self._process_cmd_socket)
log.info('Minion is ready to receive requests!')
raise tornado.gen.Return(self)
def destroy(self):
'''
Tear down the syndic minion
'''
# We borrowed the local clients poller so give it back before
# it's destroyed. Reset the local poller reference.
super(Syndic, self).destroy()
if hasattr(self, 'local'):
del self.local
if hasattr(self, 'forward_events'):
self.forward_events.stop()
# TODO: need a way of knowing if the syndic connection is busted
class SyndicManager(MinionBase):
'''
Make a MultiMaster syndic minion, this minion will handle relaying jobs and returns from
all minions connected to it to the list of masters it is connected to.
Modes (controlled by `syndic_mode`:
sync: This mode will synchronize all events and publishes from higher level masters
cluster: This mode will only sync job publishes and returns
Note: jobs will be returned best-effort to the requesting master. This also means
(since we are using zmq) that if a job was fired and the master disconnects
between the publish and return, that the return will end up in a zmq buffer
in this Syndic headed to that original master.
In addition, since these classes all seem to use a mix of blocking and non-blocking
calls (with varying timeouts along the way) this daemon does not handle failure well,
it will (under most circumstances) stall the daemon for ~15s trying to forward events
to the down master
'''
# time to connect to upstream master
SYNDIC_CONNECT_TIMEOUT = 5
SYNDIC_EVENT_TIMEOUT = 5
def __init__(self, opts, io_loop=None):
opts['loop_interval'] = 1
super(SyndicManager, self).__init__(opts)
self.mminion = salt.minion.MasterMinion(opts)
# sync (old behavior), cluster (only returns and publishes)
self.syndic_mode = self.opts.get('syndic_mode', 'sync')
self.syndic_failover = self.opts.get('syndic_failover', 'random')
self.auth_wait = self.opts['acceptance_wait_time']
self.max_auth_wait = self.opts['acceptance_wait_time_max']
self._has_master = threading.Event()
self.jid_forward_cache = set()
if io_loop is None:
if HAS_ZMQ:
zmq.eventloop.ioloop.install()
self.io_loop = LOOP_CLASS.current()
else:
self.io_loop = io_loop
# List of events
self.raw_events = []
# Dict of rets: {master_id: {event_tag: job_ret, ...}, ...}
self.job_rets = {}
# List of delayed job_rets which was unable to send for some reason and will be resend to
# any available master
self.delayed = []
# Active pub futures: {master_id: (future, [job_ret, ...]), ...}
self.pub_futures = {}
def _spawn_syndics(self):
'''
Spawn all the coroutines which will sign in the syndics
'''
self._syndics = OrderedDict() # mapping of opts['master'] -> syndic
masters = self.opts['master']
if not isinstance(masters, list):
masters = [masters]
for master in masters:
s_opts = copy.copy(self.opts)
s_opts['master'] = master
self._syndics[master] = self._connect_syndic(s_opts)
@tornado.gen.coroutine
def _connect_syndic(self, opts):
'''
Create a syndic, and asynchronously connect it to a master
'''
last = 0 # never have we signed in
auth_wait = opts['acceptance_wait_time']
while True:
log.debug('Syndic attempting to connect to {0}'.format(opts['master']))
try:
syndic = Syndic(opts,
timeout=self.SYNDIC_CONNECT_TIMEOUT,
safe=False,
io_loop=self.io_loop,
)
yield syndic.connect_master()
# set up the syndic to handle publishes (specifically not event forwarding)
syndic.tune_in_no_block()
# Send an event to the master that the minion is live
syndic.fire_master_syndic_start()
log.info('Syndic successfully connected to {0}'.format(opts['master']))
break
except SaltClientError as exc:
log.error('Error while bringing up syndic for multi-syndic. Is master at {0} responding?'.format(opts['master']))
last = time.time()
if auth_wait < self.max_auth_wait:
auth_wait += self.auth_wait
yield tornado.gen.sleep(auth_wait) # TODO: log?
except KeyboardInterrupt:
raise
except: # pylint: disable=W0702
log.critical('Unexpected error while connecting to {0}'.format(opts['master']), exc_info=True)
raise tornado.gen.Return(syndic)
def _mark_master_dead(self, master):
'''
Mark a master as dead. This will start the sign-in routine
'''
# if its connected, mark it dead
if self._syndics[master].done():
syndic = self._syndics[master].result() # pylint: disable=no-member
self._syndics[master] = syndic.reconnect()
else:
log.info('Attempting to mark {0} as dead, although it is already marked dead'.format(master)) # TODO: debug?
def _call_syndic(self, func, args=(), kwargs=None, master_id=None):
'''
Wrapper to call a given func on a syndic, best effort to get the one you asked for
'''
if kwargs is None:
kwargs = {}
for master, syndic_future in self.iter_master_options(master_id):
if not syndic_future.done() or syndic_future.exception():
log.error('Unable to call {0} on {1}, that syndic is not connected'.format(func, master))
continue
try:
getattr(syndic_future.result(), func)(*args, **kwargs)
return
except SaltClientError:
log.error('Unable to call {0} on {1}, trying another...'.format(func, master))
self._mark_master_dead(master)
continue
log.critical('Unable to call {0} on any masters!'.format(func))
def _return_pub_syndic(self, values, master_id=None):
'''
Wrapper to call the '_return_pub_multi' a syndic, best effort to get the one you asked for
'''
func = '_return_pub_multi'
for master, syndic_future in self.iter_master_options(master_id):
if not syndic_future.done() or syndic_future.exception():
log.error('Unable to call {0} on {1}, that syndic is not connected'.format(func, master))
continue
future, data = self.pub_futures.get(master, (None, None))
if future is not None:
if not future.done():
if master == master_id:
# Targeted master previous send not done yet, call again later
return False
else:
# Fallback master is busy, try the next one
continue
elif future.exception():
# Previous execution on this master returned an error
log.error('Unable to call {0} on {1}, trying another...'.format(func, master))
self._mark_master_dead(master)
del self.pub_futures[master]
# Add not sent data to the delayed list and try the next master
self.delayed.extend(data)
continue
future = getattr(syndic_future.result(), func)(values)
self.pub_futures[master] = (future, values)
return True
# Loop done and didn't exit: wasn't sent, try again later
return False
def iter_master_options(self, master_id=None):
'''
Iterate (in order) over your options for master
'''
masters = list(self._syndics.keys())
if self.opts['syndic_failover'] == 'random':
shuffle(masters)
if master_id not in self._syndics:
master_id = masters.pop(0)
else:
masters.remove(master_id)
while True:
yield master_id, self._syndics[master_id]
if len(masters) == 0:
break
master_id = masters.pop(0)
# Syndic Tune In
def tune_in(self):
'''
Lock onto the publisher. This is the main event loop for the syndic
'''
self._spawn_syndics()
# Instantiate the local client
self.local = salt.client.get_local_client(
self.opts['_minion_conf_file'], io_loop=self.io_loop)
self.local.event.subscribe('')
log.debug('SyndicManager \'{0}\' trying to tune in'.format(self.opts['id']))
# register the event sub to the poller
self.job_rets = {}
self.raw_events = []
self.local.event.set_event_handler(self._process_event)
# forward events every syndic_event_forward_timeout
self.forward_events = tornado.ioloop.PeriodicCallback(self._forward_events,
self.opts['syndic_event_forward_timeout'] * 1000,
io_loop=self.io_loop)
self.forward_events.start()
# Make sure to gracefully handle SIGUSR1
enable_sigusr1_handler()
self.io_loop.start()
def _process_event(self, raw):
# TODO: cleanup: Move down into event class
mtag, data = self.local.event.unpack(raw, self.local.event.serial)
event = {'data': data, 'tag': mtag}
log.trace('Got event {0}'.format(event['tag'])) # pylint: disable=no-member
tag_parts = event['tag'].split('/')
if len(tag_parts) >= 4 and tag_parts[1] == 'job' and \
salt.utils.jid.is_jid(tag_parts[2]) and tag_parts[3] == 'ret' and \
'return' in event['data']:
if 'jid' not in event['data']:
# Not a job return
return
if self.syndic_mode == 'cluster' and event['data'].get('master_id', 0) == self.opts.get('master_id', 1):
log.debug('Return received with matching master_id, not forwarding')
return
master = event['data'].get('master_id')
jdict = self.job_rets.setdefault(master, {}).setdefault(event['tag'], {})
if not jdict:
jdict['__fun__'] = event['data'].get('fun')
jdict['__jid__'] = event['data']['jid']
jdict['__load__'] = {}
fstr = '{0}.get_load'.format(self.opts['master_job_cache'])
# Only need to forward each load once. Don't hit the disk
# for every minion return!
if event['data']['jid'] not in self.jid_forward_cache:
jdict['__load__'].update(
self.mminion.returners[fstr](event['data']['jid'])
)
self.jid_forward_cache.add(event['data']['jid'])
if len(self.jid_forward_cache) > self.opts['syndic_jid_forward_cache_hwm']:
# Pop the oldest jid from the cache
tmp = sorted(list(self.jid_forward_cache))
tmp.pop(0)
self.jid_forward_cache = set(tmp)
if master is not None:
# __'s to make sure it doesn't print out on the master cli
jdict['__master_id__'] = master
jdict[event['data']['id']] = event['data']['return']
else:
# TODO: config to forward these? If so we'll have to keep track of who
# has seen them
# if we are the top level masters-- don't forward all the minion events
if self.syndic_mode == 'sync':
# Add generic event aggregation here
if 'retcode' not in event['data']:
self.raw_events.append(event)
def _forward_events(self):
log.trace('Forwarding events') # pylint: disable=no-member
if self.raw_events:
events = self.raw_events
self.raw_events = []
self._call_syndic('_fire_master',
kwargs={'events': events,
'pretag': tagify(self.opts['id'], base='syndic'),
'timeout': self.SYNDIC_EVENT_TIMEOUT,
'sync': False,
},
)
if self.delayed:
res = self._return_pub_syndic(self.delayed)
if res:
self.delayed = []
for master in list(six.iterkeys(self.job_rets)):
values = self.job_rets[master].values()
res = self._return_pub_syndic(values, master_id=master)
if res:
del self.job_rets[master]
class Matcher(object):
'''
Use to return the value for matching calls from the master
'''
def __init__(self, opts, functions=None):
self.opts = opts
self.functions = functions
def confirm_top(self, match, data, nodegroups=None):
'''
Takes the data passed to a top file environment and determines if the
data matches this minion
'''
matcher = 'compound'
if not data:
log.error('Received bad data when setting the match from the top '
'file')
return False
for item in data:
if isinstance(item, dict):
if 'match' in item:
matcher = item['match']
if hasattr(self, matcher + '_match'):
funcname = '{0}_match'.format(matcher)
if matcher == 'nodegroup':
return getattr(self, funcname)(match, nodegroups)
return getattr(self, funcname)(match)
else:
log.error('Attempting to match with unknown matcher: {0}'.format(
matcher
))
return False
def glob_match(self, tgt):
'''
Returns true if the passed glob matches the id
'''
if not isinstance(tgt, six.string_types):
return False
return fnmatch.fnmatch(self.opts['id'], tgt)
def pcre_match(self, tgt):
'''
Returns true if the passed pcre regex matches
'''
return bool(re.match(tgt, self.opts['id']))
def list_match(self, tgt):
'''
Determines if this host is on the list
'''
if isinstance(tgt, six.string_types):
tgt = tgt.split(',')
return bool(self.opts['id'] in tgt)
def grain_match(self, tgt, delimiter=DEFAULT_TARGET_DELIM):
'''
Reads in the grains glob match
'''
log.debug('grains target: {0}'.format(tgt))
if delimiter not in tgt:
log.error('Got insufficient arguments for grains match '
'statement from master')
return False
return salt.utils.subdict_match(
self.opts['grains'], tgt, delimiter=delimiter
)
def grain_pcre_match(self, tgt, delimiter=DEFAULT_TARGET_DELIM):
'''
Matches a grain based on regex
'''
log.debug('grains pcre target: {0}'.format(tgt))
if delimiter not in tgt:
log.error('Got insufficient arguments for grains pcre match '
'statement from master')
return False
return salt.utils.subdict_match(self.opts['grains'], tgt,
delimiter=delimiter, regex_match=True)
def data_match(self, tgt):
'''
Match based on the local data store on the minion
'''
if self.functions is None:
utils = salt.loader.utils(self.opts)
self.functions = salt.loader.minion_mods(self.opts, utils=utils)
comps = tgt.split(':')
if len(comps) < 2:
return False
val = self.functions['data.getval'](comps[0])
if val is None:
# The value is not defined
return False
if isinstance(val, list):
# We are matching a single component to a single list member
for member in val:
if fnmatch.fnmatch(str(member).lower(), comps[1].lower()):
return True
return False
if isinstance(val, dict):
if comps[1] in val:
return True
return False
return bool(fnmatch.fnmatch(
val,
comps[1],
))
def pillar_match(self, tgt, delimiter=DEFAULT_TARGET_DELIM):
'''
Reads in the pillar glob match
'''
log.debug('pillar target: {0}'.format(tgt))
if delimiter not in tgt:
log.error('Got insufficient arguments for pillar match '
'statement from master')
return False
return salt.utils.subdict_match(
self.opts['pillar'], tgt, delimiter=delimiter
)
def pillar_pcre_match(self, tgt, delimiter=DEFAULT_TARGET_DELIM):
'''
Reads in the pillar pcre match
'''
log.debug('pillar PCRE target: {0}'.format(tgt))
if delimiter not in tgt:
log.error('Got insufficient arguments for pillar PCRE match '
'statement from master')
return False
return salt.utils.subdict_match(
self.opts['pillar'], tgt, delimiter=delimiter, regex_match=True
)
def pillar_exact_match(self, tgt, delimiter=':'):
'''
Reads in the pillar match, no globbing, no PCRE
'''
log.debug('pillar target: {0}'.format(tgt))
if delimiter not in tgt:
log.error('Got insufficient arguments for pillar match '
'statement from master')
return False
return salt.utils.subdict_match(self.opts['pillar'],
tgt,
delimiter=delimiter,
exact_match=True)
def ipcidr_match(self, tgt):
'''
Matches based on IP address or CIDR notation
'''
try:
# Target is an address?
tgt = ipaddress.ip_address(tgt)
except: # pylint: disable=bare-except
try:
# Target is a network?
tgt = ipaddress.ip_network(tgt)
except: # pylint: disable=bare-except
log.error('Invalid IP/CIDR target: {0}'.format(tgt))
return []
proto = 'ipv{0}'.format(tgt.version)
grains = self.opts['grains']
if proto not in grains:
match = False
elif isinstance(tgt, (ipaddress.IPv4Address, ipaddress.IPv6Address)):
match = str(tgt) in grains[proto]
else:
match = salt.utils.network.in_subnet(tgt, grains[proto])
return match
def range_match(self, tgt):
'''
Matches based on range cluster
'''
if HAS_RANGE:
range_ = seco.range.Range(self.opts['range_server'])
try:
return self.opts['grains']['fqdn'] in range_.expand(tgt)
except seco.range.RangeException as exc:
log.debug('Range exception in compound match: {0}'.format(exc))
return False
return False
def compound_match(self, tgt):
'''
Runs the compound target check
'''
if not isinstance(tgt, six.string_types) and not isinstance(tgt, (list, tuple)):
log.error('Compound target received that is neither string, list nor tuple')
return False
log.debug('compound_match: {0} ? {1}'.format(self.opts['id'], tgt))
ref = {'G': 'grain',
'P': 'grain_pcre',
'I': 'pillar',
'J': 'pillar_pcre',
'L': 'list',
'N': None, # Nodegroups should already be expanded
'S': 'ipcidr',
'E': 'pcre'}
if HAS_RANGE:
ref['R'] = 'range'
results = []
opers = ['and', 'or', 'not', '(', ')']
if isinstance(tgt, six.string_types):
words = tgt.split()
else:
words = tgt
for word in words:
target_info = salt.utils.minions.parse_target(word)
# Easy check first
if word in opers:
if results:
if results[-1] == '(' and word in ('and', 'or'):
log.error('Invalid beginning operator after "(": {0}'.format(word))
return False
if word == 'not':
if not results[-1] in ('and', 'or', '('):
results.append('and')
results.append(word)
else:
# seq start with binary oper, fail
if word not in ['(', 'not']:
log.error('Invalid beginning operator: {0}'.format(word))
return False
results.append(word)
elif target_info and target_info['engine']:
if 'N' == target_info['engine']:
# Nodegroups should already be expanded/resolved to other engines
log.error('Detected nodegroup expansion failure of "{0}"'.format(word))
return False
engine = ref.get(target_info['engine'])
if not engine:
# If an unknown engine is called at any time, fail out
log.error('Unrecognized target engine "{0}" for'
' target expression "{1}"'.format(
target_info['engine'],
word,
)
)
return False
engine_args = [target_info['pattern']]
engine_kwargs = {}
if target_info['delimiter']:
engine_kwargs['delimiter'] = target_info['delimiter']
results.append(
str(getattr(self, '{0}_match'.format(engine))(*engine_args, **engine_kwargs))
)
else:
# The match is not explicitly defined, evaluate it as a glob
results.append(str(self.glob_match(word)))
results = ' '.join(results)
log.debug('compound_match {0} ? "{1}" => "{2}"'.format(self.opts['id'], tgt, results))
try:
return eval(results) # pylint: disable=W0123
except Exception:
log.error('Invalid compound target: {0} for results: {1}'.format(tgt, results))
return False
return False
def nodegroup_match(self, tgt, nodegroups):
'''
This is a compatibility matcher and is NOT called when using
nodegroups for remote execution, but is called when the nodegroups
matcher is used in states
'''
if tgt in nodegroups:
return self.compound_match(
salt.utils.minions.nodegroup_comp(tgt, nodegroups)
)
return False
class ProxyMinion(Minion):
'''
This class instantiates a 'proxy' minion--a minion that does not manipulate
the host it runs on, but instead manipulates a device that cannot run a minion.
'''
# TODO: better name...
@tornado.gen.coroutine
def _post_master_init(self, master):
'''
Function to finish init after connecting to a master
This is primarily loading modules, pillars, etc. (since they need
to know which master they connected to)
'''
log.debug("subclassed _post_master_init")
self.opts['master'] = master
self.opts['pillar'] = yield salt.pillar.get_async_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
self.opts['environment'],
pillarenv=self.opts.get('pillarenv'),
).compile_pillar()
if 'proxy' not in self.opts['pillar'] and 'proxy' not in self.opts:
log.error('No proxy key found in pillar or opts for id '+self.opts['id']+'.')
log.error('Check your pillar configuration and contents. Salt-proxy aborted.')
self._running = False
raise SaltSystemExit(code=-1)
if 'proxy' not in self.opts:
self.opts['proxy'] = self.opts['pillar']['proxy']
fq_proxyname = self.opts['proxy']['proxytype']
# Need to load the modules so they get all the dunder variables
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
# we can then sync any proxymodules down from the master
# we do a sync_all here in case proxy code was installed by
# SPM or was manually placed in /srv/salt/_modules etc.
self.functions['saltutil.sync_all'](saltenv='base')
# Then load the proxy module
self.proxy = salt.loader.proxy(self.opts)
# Check config 'add_proxymodule_to_opts' Remove this in Carbon.
if self.opts['add_proxymodule_to_opts']:
self.opts['proxymodule'] = self.proxy
# And re-load the modules so the __proxy__ variable gets injected
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
self.functions.pack['__proxy__'] = self.proxy
self.proxy.pack['__salt__'] = self.functions
self.proxy.pack['__ret__'] = self.returners
self.proxy.pack['__pillar__'] = self.opts['pillar']
# Start engines here instead of in the Minion superclass __init__
# This is because we need to inject the __proxy__ variable but
# it is not setup until now.
self.io_loop.spawn_callback(salt.engines.start_engines, self.opts,
self.process_manager, proxy=self.proxy)
if ('{0}.init'.format(fq_proxyname) not in self.proxy
or '{0}.shutdown'.format(fq_proxyname) not in self.proxy):
log.error('Proxymodule {0} is missing an init() or a shutdown() or both.'.format(fq_proxyname))
log.error('Check your proxymodule. Salt-proxy aborted.')
self._running = False
raise SaltSystemExit(code=-1)
proxy_init_fn = self.proxy[fq_proxyname+'.init']
proxy_init_fn(self.opts)
self.opts['grains'] = salt.loader.grains(self.opts, proxy=self.proxy)
self.serial = salt.payload.Serial(self.opts)
self.mod_opts = self._prep_mod_opts()
self.matcher = Matcher(self.opts, self.functions)
self.beacons = salt.beacons.Beacon(self.opts, self.functions)
uid = salt.utils.get_uid(user=self.opts.get('user', None))
self.proc_dir = get_proc_dir(self.opts['cachedir'], uid=uid)
self.schedule = salt.utils.schedule.Schedule(
self.opts,
self.functions,
self.returners)
# add default scheduling jobs to the minions scheduler
if self.opts['mine_enabled'] and 'mine.update' in self.functions:
self.schedule.add_job({
'__mine_interval':
{
'function': 'mine.update',
'minutes': self.opts['mine_interval'],
'jid_include': True,
'maxrunning': 2,
'return_job': self.opts.get('mine_return_job', False)
}
}, persist=True)
log.info('Added mine.update to scheduler')
else:
self.schedule.delete_job('__mine_interval', persist=True)
# add master_alive job if enabled
if (self.opts['transport'] != 'tcp' and
self.opts['master_alive_interval'] > 0):
self.schedule.add_job({
'__master_alive_{0}'.format(self.opts['master']):
{
'function': 'status.master',
'seconds': self.opts['master_alive_interval'],
'jid_include': True,
'maxrunning': 1,
'kwargs': {'master': self.opts['master'],
'connected': True}
}
}, persist=True)
if self.opts['master_failback'] and \
'master_list' in self.opts and \
self.opts['master'] != self.opts['master_list'][0]:
self.schedule.add_job({
'__master_failback':
{
'function': 'status.ping_master',
'seconds': self.opts['master_failback_interval'],
'jid_include': True,
'maxrunning': 1,
'kwargs': {'master': self.opts['master_list'][0]}
}
}, persist=True)
else:
self.schedule.delete_job('__master_failback', persist=True)
else:
self.schedule.delete_job('__master_alive_{0}'.format(self.opts['master']), persist=True)
self.schedule.delete_job('__master_failback', persist=True)
# Sync the grains here so the proxy can communicate them to the master
self.functions['saltutil.sync_grains'](saltenv='base')
self.grains_cache = self.opts['grains']
| 41.52992
| 165
| 0.547542
|
92b826b9a5dc45ee8cb6d5a711e2282de26858a1
| 6,344
|
py
|
Python
|
venv/lib/python3.5/site-packages/awscli/paramfile.py
|
seancarnahan/CodeDeployGitHubDemo
|
464d25e3ed2ecf5b16ecc2f519190faf5f24b910
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.5/site-packages/awscli/paramfile.py
|
seancarnahan/CodeDeployGitHubDemo
|
464d25e3ed2ecf5b16ecc2f519190faf5f24b910
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.5/site-packages/awscli/paramfile.py
|
seancarnahan/CodeDeployGitHubDemo
|
464d25e3ed2ecf5b16ecc2f519190faf5f24b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
import os
from botocore.vendored import requests
from awscli.compat import six
from awscli.compat import compat_open
logger = logging.getLogger(__name__)
# These are special cased arguments that do _not_ get the
# special param file processing. This is typically because it
# refers to an actual URI of some sort and we don't want to actually
# download the content (i.e TemplateURL in cloudformation).
PARAMFILE_DISABLED = set([
'apigateway.put-integration.uri',
'appstream2.create-stack.redirect-url',
'appstream2.update-stack.redirect-url',
'cloudformation.create-stack.template-url',
'cloudformation.update-stack.template-url',
'cloudformation.create-stack-set.template-url',
'cloudformation.update-stack-set.template-url',
'cloudformation.create-change-set.template-url',
'cloudformation.validate-template.template-url',
'cloudformation.estimate-template-cost.template-url',
'cloudformation.get-template-summary.template-url',
'cloudformation.create-stack.stack-policy-url',
'cloudformation.update-stack.stack-policy-url',
'cloudformation.set-stack-policy.stack-policy-url',
# aws cloudformation package --template-file
'custom.package.template-file',
# aws cloudformation deploy --template-file
'custom.deploy.template-file',
'cloudformation.update-stack.stack-policy-during-update-url',
# We will want to change the event name to ``s3`` as opposed to
# custom in the near future along with ``s3`` to ``s3api``.
'custom.cp.website-redirect',
'custom.mv.website-redirect',
'custom.sync.website-redirect',
'guardduty.create-ip-set.location',
'guardduty.update-ip-set.location',
'guardduty.create-threat-intel-set.location',
'guardduty.update-threat-intel-set.location',
'comprehend.detect-dominant-language.text',
'comprehend.batch-detect-dominant-language.text-list',
'comprehend.detect-entities.text',
'comprehend.batch-detect-entities.text-list',
'comprehend.detect-key-phrases.text',
'comprehend.batch-detect-key-phrases.text-list',
'comprehend.detect-sentiment.text',
'comprehend.batch-detect-sentiment.text-list',
'iam.create-open-id-connect-provider.url',
'machinelearning.predict.predict-endpoint',
'rds.copy-db-cluster-snapshot.pre-signed-url',
'rds.create-db-cluster.pre-signed-url',
'rds.copy-db-snapshot.pre-signed-url',
'rds.create-db-instance-read-replica.pre-signed-url',
'sqs.add-permission.queue-url',
'sqs.change-message-visibility.queue-url',
'sqs.change-message-visibility-batch.queue-url',
'sqs.delete-message.queue-url',
'sqs.delete-message-batch.queue-url',
'sqs.delete-queue.queue-url',
'sqs.get-queue-attributes.queue-url',
'sqs.list-dead-letter-source-queues.queue-url',
'sqs.receive-message.queue-url',
'sqs.remove-permission.queue-url',
'sqs.send-message.queue-url',
'sqs.send-message-batch.queue-url',
'sqs.set-queue-attributes.queue-url',
'sqs.purge-queue.queue-url',
'sqs.list-queue-tags.queue-url',
'sqs.tag-queue.queue-url',
'sqs.untag-queue.queue-url',
's3.copy-object.website-redirect-location',
's3.create-multipart-upload.website-redirect-location',
's3.put-object.website-redirect-location',
# Double check that this has been renamed!
'sns.subscribe.notification-endpoint',
'iot.create-job.document-source',
'translate.translate-text.text',
'workdocs.create-notification-subscription.notification-endpoint',
])
class ResourceLoadingError(Exception):
pass
def get_paramfile(path):
"""Load parameter based on a resource URI.
It is possible to pass parameters to operations by referring
to files or URI's. If such a reference is detected, this
function attempts to retrieve the data from the file or URI
and returns it. If there are any errors or if the ``path``
does not appear to refer to a file or URI, a ``None`` is
returned.
:type path: str
:param path: The resource URI, e.g. file://foo.txt. This value
may also be a non resource URI, in which case ``None`` is returned.
:return: The loaded value associated with the resource URI.
If the provided ``path`` is not a resource URI, then a
value of ``None`` is returned.
"""
data = None
if isinstance(path, six.string_types):
for prefix, function_spec in PREFIX_MAP.items():
if path.startswith(prefix):
function, kwargs = function_spec
data = function(prefix, path, **kwargs)
return data
def get_file(prefix, path, mode):
file_path = os.path.expandvars(os.path.expanduser(path[len(prefix):]))
try:
with compat_open(file_path, mode) as f:
return f.read()
except UnicodeDecodeError:
raise ResourceLoadingError(
'Unable to load paramfile (%s), text contents could '
'not be decoded. If this is a binary file, please use the '
'fileb:// prefix instead of the file:// prefix.' % file_path)
except (OSError, IOError) as e:
raise ResourceLoadingError('Unable to load paramfile %s: %s' % (
path, e))
def get_uri(prefix, uri):
try:
r = requests.get(uri)
if r.status_code == 200:
return r.text
else:
raise ResourceLoadingError(
"received non 200 status code of %s" % (
r.status_code))
except Exception as e:
raise ResourceLoadingError('Unable to retrieve %s: %s' % (uri, e))
PREFIX_MAP = {
'file://': (get_file, {'mode': 'r'}),
'fileb://': (get_file, {'mode': 'rb'}),
'http://': (get_uri, {}),
'https://': (get_uri, {}),
}
| 36.045455
| 78
| 0.684899
|
c8dad103de46f16ab2b864d45430a57e5a2b6ec1
| 512
|
py
|
Python
|
office365/onedrive/publicationFacet.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 544
|
2016-08-04T17:10:16.000Z
|
2022-03-31T07:17:20.000Z
|
office365/onedrive/publicationFacet.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 438
|
2016-10-11T12:24:22.000Z
|
2022-03-31T19:30:35.000Z
|
office365/onedrive/publicationFacet.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 202
|
2016-08-22T19:29:40.000Z
|
2022-03-30T20:26:15.000Z
|
from office365.runtime.client_value import ClientValue
class PublicationFacet(ClientValue):
def __init__(self, level=None, versionId=None):
"""
:param str level: The state of publication for this document. Either published or checkout. Read-only.
:param str versionId: The unique identifier for the version that is visible to the current caller. Read-only.
"""
super(PublicationFacet, self).__init__()
self.level = level
self.versionId = versionId
| 34.133333
| 117
| 0.697266
|
bc3e8ee66802f3d83d0357198a79c98d5772455d
| 2,094
|
py
|
Python
|
test/vanilla/Expected/AcceptanceTests/Report/report/_configuration.py
|
Azure/autorest.azure-functions-python
|
b0896d8aec6b0fd6f0bcb12ea8e0489652dc2783
|
[
"MIT"
] | 4
|
2020-10-22T20:35:38.000Z
|
2021-12-21T07:29:01.000Z
|
test/vanilla/Expected/AcceptanceTests/Report/report/_configuration.py
|
Azure/autorest.azure-functions-python
|
b0896d8aec6b0fd6f0bcb12ea8e0489652dc2783
|
[
"MIT"
] | 3
|
2020-09-09T15:16:15.000Z
|
2021-12-20T15:25:18.000Z
|
test/vanilla/Expected/AcceptanceTests/Report/report/_configuration.py
|
Azure/autorest.azure-functions-python
|
b0896d8aec6b0fd6f0bcb12ea8e0489652dc2783
|
[
"MIT"
] | 2
|
2020-11-10T07:16:23.000Z
|
2020-12-30T11:03:14.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
class AutoRestReportServiceConfiguration(Configuration):
"""Configuration for AutoRestReportService.
Note that all parameters used to create this instance are saved as instance
attributes.
"""
def __init__(
self,
**kwargs # type: Any
):
# type: (...) -> None
super(AutoRestReportServiceConfiguration, self).__init__(**kwargs)
kwargs.setdefault('sdk_moniker', 'autorestreportservice/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
| 41.058824
| 106
| 0.666667
|
4b05e19162a5cc7888db88a7abe8158bf229a328
| 3,894
|
py
|
Python
|
src/sas/sasgui/guiframe/custom_pstats.py
|
andyfaff/sasview
|
c00a797ab9c4ddc60f0fa8a64ae8a2067c225921
|
[
"BSD-3-Clause"
] | null | null | null |
src/sas/sasgui/guiframe/custom_pstats.py
|
andyfaff/sasview
|
c00a797ab9c4ddc60f0fa8a64ae8a2067c225921
|
[
"BSD-3-Clause"
] | null | null | null |
src/sas/sasgui/guiframe/custom_pstats.py
|
andyfaff/sasview
|
c00a797ab9c4ddc60f0fa8a64ae8a2067c225921
|
[
"BSD-3-Clause"
] | null | null | null |
import cProfile, pstats, os
def func_std_string(func_name): # match what old profile produced
if func_name[:2] == ('~', 0):
# special case for built-in functions
name = func_name[2]
if name.startswith('<') and name.endswith('>'):
return '{%s}' % name[1:-1]
else:
return name
else:
return "%s:%d(%s)" % func_name
def f8(x):
return "%8.3f" % x
class CustomPstats(pstats.Stats):
def __init__(self, *args, **kwds):
pstats.Stats.__init__(self, *args, **kwds)
def write_stats(self, *amount):
msg = ''
for filename in self.files:
msg += str(filename) + '\n'
#if self.files: msg += str(self.stream) + '\n'
indent = ' ' * 8
for func in self.top_level:
msg += str(indent)+ str(func_get_function_name(func))+"\n"
msg += str(indent) + str(self.total_calls)+ "function calls" + '\n'
if self.total_calls != self.prim_calls:
msg += "(%d primitive calls)" % self.prim_calls + '\n'
msg += "in %.3f CPU seconds" % self.total_tt + '\n'
#msg += str(self.stream) + '\n'
width = self.max_name_len
if self.fcn_list:
list = self.fcn_list[:]
temp_msg = " Ordered by: " + self.sort_type + '\n'
else:
list = self.stats.keys()
temp_msg = " Random listing order was used\n"
for selection in amount:
list, temp_msg = self.eval_print_amount(selection, list, temp_msg)
count = len(list)
if not list:
width, list = 0, list
else:
msg += str(temp_msg) + '\n'
if count < len(self.stats):
width = 0
for func in list:
if len(func_std_string(func)) > width:
width = len(func_std_string(func))
width, list = width+2, list
if list:
msg += ' ncalls tottime percall cumtime percall'
msg += ' filename:lineno(function)' + "\n"
for func in list:
cc, nc, tt, ct, callers = self.stats[func]
c = str(nc)
if nc != cc:
c = c + '/' + str(cc)
msg += str( c.rjust(9))
msg += str(f8(tt))
if nc == 0:
msg += str(' '*8)
else:
msg += str(f8(tt/nc))
msg += str(f8(ct))
if cc == 0:
msg += str( ' '*80)
else:
msg += str(f8(ct/cc))
msg += " " + str(func_std_string(func)) + '\n'
msg += str(self.stream) + '\n'
#msg += str(self.stream) + '\n'
return self, msg
def profile( fn, name='profile.txt',*args, **kw):
import cProfile, pstats, os
global call_result
def call():
global call_result
call_result = fn(*args, **kw)
cProfile.runctx('call()', dict(call=call), {}, 'profile.txt')
stats = CustomPstats('profile.txt')
#sort by cumlative time
stats.sort_stats('time')
stats.print_stats(50)
"""
filename = 'profile_cum_' + name
_, msg = stats.write_stats(50)
f = file(filename, 'wb')
f.write(msg)
f.close()
#sort by time
stats.sort_stats('time')
_, msg = stats.write_stats(50)
filename = 'profile_time_' + name
f = file(filename, 'wb')
f.write(msg)
f.close()
# sort by number of calls
stats.sort_stats('call')
_, msg = stats.write_stats(50)
filename = 'profile_call_' + name
f = file(filename, 'wb')
f.write(msg)
f.close()
os.unlink('profile.txt')
"""
return call_result
| 32.722689
| 78
| 0.479712
|
148f50dcdae20bfd954ff3ddba4a6b72597c8597
| 2,650
|
py
|
Python
|
utils/visualize_data.py
|
PointCloudYC/se-pseudogrid
|
65005d82fda1a31b8c945e02e378df102ba0fee0
|
[
"MIT"
] | 2
|
2021-11-30T06:38:23.000Z
|
2021-12-17T01:38:32.000Z
|
utils/visualize_data.py
|
PointCloudYC/se-pseudogrid
|
65005d82fda1a31b8c945e02e378df102ba0fee0
|
[
"MIT"
] | null | null | null |
utils/visualize_data.py
|
PointCloudYC/se-pseudogrid
|
65005d82fda1a31b8c945e02e378df102ba0fee0
|
[
"MIT"
] | null | null | null |
"""
Date: April 12, 2021
Author: YIN Chao
Functionality:
- read PSNet-5 dataset processed files (data/xxx/processed)
- write each point cloud to ply files with xyzrgb and xyzLabel format
- show each point cloud in open3d
"""
import sys
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import matplotlib as mpl
# mpl.rc('axes', labelsize=14)
# mpl.rc('xtick', labelsize=12)
# mpl.rc('ytick', labelsize=12)
import seaborn as sns
from util import write_ply
from visualize import Plot
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.append(ROOT_DIR)
VISUALIZE_IN_OPEN3D =True
SAVE_PLY = False
DATA_DIR='data/PSNet/PSNet_reduced_5/processed'
point_clouds = ['Area_3']
# point_clouds = ['Area_1','Area_2','Area_3','Area_4']
# point_clouds = ['Area_1','Area_4']
# read pkl data
for cloud_name in point_clouds:
filename= os.path.join(DAntrol system for UNITA_DIR, f'{cloud_name}.pkl')
with open(filename, 'rb') as f:
(cloud_points, cloud_colors, cloud_classes) = pickle.load(f)
print(f"{filename} loaded successfully")
# save xyzrgb and xyzLabel to ply file
if SAVE_PLY:
xyzRgb_name = os.path.join(DATA_DIR, 'cloudcompare', f'{cloud_name}_xyzRgb.ply')
xyzLabel_name = os.path.join(DATA_DIR, 'cloudcompare', f'{cloud_name}_xyzLabel.ply')
# save xyz + rgb to ply
write_ply(xyzRgb_name,
[cloud_points, cloud_colors],
['x', 'y', 'z', 'r', 'g', 'b'])
print(f"{cloud_name}_xyzRgb.ply saved successfully")
# save xyz + label to ply
write_ply(xyzLabel_name,
[cloud_points, cloud_classes],
['x', 'y', 'z', 'gt'])
print(f"{cloud_name}_xyzLabel.ply saved successfully")
# HACK: to show the figure with same rendering style, add 1 tank point since area_1 and area_4 do not have tank points(semantic label class is 4)
HACKING_RENDER_STYLE =True
if VISUALIZE_IN_OPEN3D:
xyzrgb = np.concatenate([cloud_points, cloud_colors], axis=-1)
Plot.draw_pc(xyzrgb) # visualize raw point clouds
if HACKING_RENDER_STYLE:
cloud_points = np.vstack((cloud_points,cloud_points[0]))
cloud_classes = np.vstack((cloud_classes,np.array(4))) # 4 is tank class's id
Plot.draw_pc_sem_ins(cloud_points, cloud_classes) # visualize ground-truth
# Plot.draw_pc_sem_ins(points, preds) # visualize prediction
| 37.323944
| 153
| 0.649434
|
3833a24c6865041f4357cf79429f866d000e22ef
| 10,528
|
py
|
Python
|
force_wfmanager/notifications/tests/test_ui_notification.py
|
force-h2020/force-wfmanager
|
bcd488cd37092cacd9d0c81b544ee8c1654d1d92
|
[
"BSD-2-Clause"
] | 1
|
2019-08-19T16:02:20.000Z
|
2019-08-19T16:02:20.000Z
|
force_wfmanager/notifications/tests/test_ui_notification.py
|
force-h2020/force-wfmanager
|
bcd488cd37092cacd9d0c81b544ee8c1654d1d92
|
[
"BSD-2-Clause"
] | 396
|
2017-07-18T15:19:55.000Z
|
2021-05-03T06:23:06.000Z
|
force_wfmanager/notifications/tests/test_ui_notification.py
|
force-h2020/force-wfmanager
|
bcd488cd37092cacd9d0c81b544ee8c1654d1d92
|
[
"BSD-2-Clause"
] | 2
|
2019-03-05T16:23:10.000Z
|
2020-04-16T08:59:11.000Z
|
# (C) Copyright 2010-2020 Enthought, Inc., Austin, TX
# All rights reserved.
import unittest
from testfixtures import LogCapture
from threading import Event
from force_bdss.api import (
BaseDriverEvent,
MCOStartEvent,
MCOProgressEvent,
MCOFinishEvent,
DataValue,
)
from force_wfmanager.notifications.ui_notification import UINotification
from force_wfmanager.notifications.ui_notification_factory import (
UINotificationFactory,
)
from force_wfmanager.notifications.ui_notification_model import (
UINotificationModel,
)
try:
import mock
except ImportError:
from unittest import mock
import zmq
UI_MIXIN = (
"force_bdss.ui_hooks.ui_notification_mixins.UIEventNotificationMixin."
)
class TestUINotification(unittest.TestCase):
def setUp(self):
factory = mock.Mock(spec=UINotificationFactory)
self.model = UINotificationModel(factory)
self.model.identifier = "an_id"
listener = UINotification(factory)
self.sync_socket = mock.Mock(spec=zmq.Socket)
self.sync_socket.recv_multipart = mock.Mock()
self.sync_socket.recv_multipart.side_effect = [
[x.encode("utf-8") for x in ["HELLO", "an_id", "1"]],
[x.encode("utf-8") for x in ["GOODBYE", "an_id"]],
]
self.pub_socket = mock.Mock(spec=zmq.Socket)
self.sub_socket = mock.Mock(spec=zmq.Socket)
self.sub_socket.recv_multipart.side_effect = [
[x.encode("utf-8") for x in ["MSG", "an_id", "1"]],
[x.encode("utf-8") for x in ["MGS", "PAUSE_BDSS", "1"]],
[x.encode("utf-8") for x in ["MGS", "PAUSE_BDSS", "1"]],
[x.encode("utf-8") for x in ["MGS", "RESUME_BDSS", "1"]],
[x.encode("utf-8") for x in ["MGS", "STOP_BDSS", "1"]],
]
self.context = mock.Mock(spec=zmq.Context)
self.context.socket.side_effect = [
self.pub_socket,
self.sub_socket,
self.sync_socket,
]
listener.__class__._create_context = mock.Mock(
return_value=self.context
)
self.listener = listener
def test_deliver(self):
listener = self.listener
listener.initialize(self.model)
self.assertEqual(
self.sync_socket.send_multipart.call_args[0][0],
[x.encode("utf-8") for x in ["HELLO", "an_id", "1"]],
)
listener.deliver(MCOStartEvent())
self.assertEqual(
self.pub_socket.send_multipart.call_args[0][0][0:2],
[x.encode("utf-8") for x in ["MESSAGE", "an_id"]],
)
listener.deliver(
MCOProgressEvent(
optimal_point=[
DataValue(value=1),
DataValue(value=2),
DataValue(value=3),
],
optimal_kpis=[DataValue(value=4), DataValue(value=5)],
)
)
self.assertEqual(
self.pub_socket.send_multipart.call_args[0][0][0:2],
[x.encode("utf-8") for x in ["MESSAGE", "an_id"]],
)
listener.deliver(MCOFinishEvent())
self.assertEqual(
self.pub_socket.send_multipart.call_args[0][0][0:2],
[x.encode("utf-8") for x in ["MESSAGE", "an_id"]],
)
with self.assertRaisesRegex(
TypeError, "Event is not a BaseDriverEvent"
):
listener.deliver("not an event")
with mock.patch.object(
listener._pub_socket, "send_multipart"
) as mock_send:
listener.deliver(BaseDriverEvent())
mock_send.assert_not_called()
def test_finalize(self):
self.listener.initialize(self.model)
self.assertTrue(self.listener._poller_running)
self.listener.finalize()
self.assertTrue(self.context.term.called)
self.assertTrue(self.sync_socket.close.called)
self.assertTrue(self.pub_socket.close.called)
self.assertIsNone(self.listener._context)
self.assertIsNone(self.listener._sync_socket)
self.assertIsNone(self.listener._pub_socket)
self.assertFalse(self.listener._poller_running)
def test_initialize(self):
listener = self.listener
listener.initialize(self.model)
self.assertEqual(
self.sync_socket.send_multipart.call_args[0][0],
[x.encode("utf-8") for x in ["HELLO", "an_id", "1"]],
)
def test_polling(self):
self.sync_socket.poll.return_value = 0
listener = self.listener
with LogCapture() as capture:
listener.initialize(self.model)
capture.check(
(
"force_wfmanager.notifications.ui_notification",
"INFO",
"Could not connect to UI server after 1000 ms. "
"Continuing without UI notification.",
)
)
self.assertIsNone(listener._context)
def test_wrong_init_recv(self):
listener = self.listener
self.sync_socket.recv_multipart.side_effect = [
[x.encode("utf-8") for x in ["HELLO", "not_the_right_id", "1"]],
[x.encode("utf-8") for x in ["GOODBYE", "an_id"]],
]
with LogCapture() as capture:
listener.initialize(self.model)
capture.check(
(
"force_wfmanager.notifications.ui_notification",
"ERROR",
"Unexpected reply in sync negotiation with UI server. "
"'['HELLO', 'not_the_right_id', '1']'",
)
)
self.assertIsNone(listener._context)
def test_deliver_without_context(self):
self.listener.deliver(MCOStartEvent())
self.assertFalse(self.pub_socket.send_multipart.called)
def test_finalize_without_context(self):
self.listener.finalize()
self.assertFalse(self.sync_socket.send_multipart.called)
def test_finalize_no_response(self):
self.sync_socket.poll.side_effect = [1, 0]
listener = self.listener
listener.initialize(self.model)
with LogCapture() as capture:
listener.finalize()
capture.check(
(
"force_wfmanager.notifications.ui_notification",
"ERROR",
"Could not close connection to UI server "
"after 1000 ms.",
)
)
self.assertIsNone(listener._context)
def test_wrong_finalize_recv(self):
listener = self.listener
self.sync_socket.poll.side_effect = [1, 1]
self.sync_socket.recv_multipart.side_effect = [
[x.encode("utf-8") for x in ["HELLO", "an_id", "1"]],
[x.encode("utf-8") for x in ["GOODBYE", "not_the_right_id"]],
]
listener.initialize(self.model)
with LogCapture() as capture:
listener.finalize()
capture.check(
(
"force_wfmanager.notifications.ui_notification",
"ERROR",
"Unexpected reply in goodbye sync "
"negotiation with UI server. "
"'['GOODBYE', 'not_the_right_id']'",
)
)
self.assertIsNone(listener._context)
def test_double_clear_sockets(self):
listener = self.listener
listener._close_and_clear_sockets()
self.assertIsNone(listener._context)
listener._close_and_clear_sockets()
self.assertIsNone(listener._context)
def test_run_poller(self):
stop_event = Event()
pause_event = Event()
with mock.patch(
"zmq.Poller.poll", return_value={self.sub_socket: None}
):
with mock.patch(UI_MIXIN + "send_stop") as mock_stop, mock.patch(
UI_MIXIN + "send_pause"
) as mock_pause, mock.patch(
UI_MIXIN + "send_resume"
) as mock_resume:
self.listener.set_stop_event(stop_event)
self.listener.set_pause_event(pause_event)
self.listener.run_poller(self.sub_socket)
self.assertEqual(1, mock_stop.call_count)
self.assertEqual(2, mock_pause.call_count)
self.assertEqual(1, mock_resume.call_count)
self.assertFalse(self.listener._poller_running)
# Test for different socket poll
pub_socket = self.pub_socket
sub_socket = self.sub_socket
class DummyPoller:
def __init__(self):
self.counter = 5
def poll(self):
self.counter -= 1
if self.counter != 0:
return {pub_socket: None}
return {sub_socket: None}
def register(self, socket):
self.socket = socket
self.sub_socket.recv_multipart.side_effect = [
[x.encode("utf-8") for x in ["MGS", "STOP_BDSS", "1"]]
]
with mock.patch("zmq.Poller", return_value=DummyPoller()):
with mock.patch(UI_MIXIN + "send_stop") as mock_stop, mock.patch(
UI_MIXIN + "send_pause"
) as mock_pause, mock.patch(
UI_MIXIN + "send_resume"
) as mock_resume:
self.listener.set_stop_event(stop_event)
self.listener.set_pause_event(pause_event)
self.listener.run_poller(self.sub_socket)
self.assertEqual(1, mock_stop.call_count)
self.assertEqual(0, mock_pause.call_count)
self.assertEqual(0, mock_resume.call_count)
self.assertFalse(self.listener._poller_running)
self.sub_socket.recv_multipart.side_effect = [
[x.encode("utf-8") for x in ["MGS", "STOP_BDSS"]],
[x.encode("utf-8") for x in ["MGS", "STOP_BDSS", "1"]],
]
with mock.patch(
"zmq.Poller.poll", return_value={self.sub_socket: None}
):
with LogCapture() as capture:
self.listener.run_poller(self.sub_socket)
capture.check(
(
"force_wfmanager.notifications.ui_notification",
"ERROR",
"Incompatible data received: expected (msg, identifier, "
"data), but got ['MGS', 'STOP_BDSS'] instead.",
)
)
| 34.631579
| 77
| 0.571524
|
06e396964bf27809f5c2302cfacefd53a65561d0
| 4,755
|
py
|
Python
|
src/repobee_plug/_exthooks.py
|
Rickard-Martensson/repobee
|
5c460e3388bc5b0843789fea747c800f692689c7
|
[
"MIT"
] | null | null | null |
src/repobee_plug/_exthooks.py
|
Rickard-Martensson/repobee
|
5c460e3388bc5b0843789fea747c800f692689c7
|
[
"MIT"
] | 11
|
2021-07-26T02:09:47.000Z
|
2022-03-29T02:15:24.000Z
|
src/repobee_plug/_exthooks.py
|
orquestradigital-actions/repobee
|
8b7bd49932837148422e08a18e33e9f62cd8987f
|
[
"MIT"
] | null | null | null |
"""Hookspecs for repobee extension hooks.
Extension hooks add something to the functionality of repobee, but are not
necessary for its operation. Currently, all extension hooks are related to
cloning repos.
.. module:: exthooks
:synopsis: Hookspecs for repobee extension hooks.
"""
import argparse
import configparser
from typing import Optional
from repobee_plug.cli.args import ConfigurableArguments
from repobee_plug.platform import PlatformAPI
from repobee_plug.hook import hookspec, Result
from repobee_plug.deprecation import deprecate
from repobee_plug.config import Config
from repobee_plug.localreps import StudentRepo, TemplateRepo
#########################
# Hooks for repos clone #
#########################
@hookspec
def post_clone(repo: StudentRepo, api: PlatformAPI) -> Optional[Result]:
"""Operate on a student repository after it has been cloned.
Args:
repo: A local representation of a student repo. The ``path``
attribute is always set to a valid directory containing the
repo.
api: An instance of the platform API.
Returns:
Optionally returns a Result for reporting the outcome of the hook.
May also return None, in which case no reporting will be performed
for the hook.
"""
@deprecate(remove_by_version="3.0.0", replacement="handle_parsed_args")
@hookspec
def clone_parser_hook(clone_parser: argparse.ArgumentParser) -> None:
"""Do something with the clone repos subparser before it is used used to
parse CLI options. The typical task is to add options to it.
.. danger::
This hook no longer has any effect, it is only kept for testing
purposes.
Args:
clone_parser: The ``clone`` subparser.
"""
#########################
# Hooks for repos setup #
#########################
@hookspec
def pre_setup(repo: TemplateRepo, api: PlatformAPI) -> Optional[Result]:
"""Operate on a template repository before it is distributed to
students.
.. note::
Structural changes to the master repo are not currently supported.
Changes to the repository during the callback will not be reflected
in the generated repositories. Support for preprocessing is not
planned as it is technically difficult to implement.
Args:
repo: Representation of a local template repo.
api: An instance of the platform API.
Returns:
Optionally returns a Result for reporting the outcome of the hook.
May also return None, in which case no reporting will be performed
for the hook.
"""
@hookspec
def post_setup(
repo: StudentRepo, api: PlatformAPI, newly_created: bool
) -> Optional[Result]:
"""Operate on a student repo after the setup command has executed.
Args:
repo: A student repository.
api: An instance of the platform API.
newly_created: False if the student repo already existed.
Returns:
Optionally returns a Result for reporting the outcome of the hook.
May also return None, in which case no reporting will be performed
for the hook.
"""
############################
# Hooks for config/parsing #
############################
@hookspec
def get_configurable_args() -> ConfigurableArguments:
"""Get the configurable arguments for a plugin.
.. danger::
This is not a public hook, don't implement this manually!
Returns:
The configurable arguments of a plugin.
"""
@deprecate(remove_by_version="3.8.0", replacement="handle_config")
@hookspec
def config_hook(config_parser: configparser.ConfigParser) -> None:
"""Hook into the config file parsing.
.. deprecated:: 3.6.0
Use :py:func:`handle_config` instead.
Args:
config_parser: The config parser after config has been read.
"""
@hookspec
def handle_config(config: Config) -> None:
"""Handle the config.
This hook is allowed both to read the config, and to modify it before it's
passed to the core RepoBee application.
.. warning::
The :py:class:`Config` class is currently not stable and its behavior
may change.
Args:
config: RepoBee's config.
"""
@hookspec
def handle_parsed_args(args: argparse.Namespace) -> None:
"""Handle the parsed args from the parser, before any processing is
applied.
Args:
args: The full namespace returned by
:py:func:`argparse.ArgumentParser.parse_args`
"""
@hookspec
def handle_processed_args(args: argparse.Namespace) -> None:
"""Handle the parsed command line arguments after RepoBee has applied
processing.
Args:
args: A processed version of the parsed CLI arguments.
"""
| 27.485549
| 78
| 0.674869
|
8cec3edfa3bf148bb1e00c4d2309fa53a35e9c28
| 3,546
|
py
|
Python
|
packages/SwingSet/misc-tools/vat-map.py
|
danwt/agoric-sdk
|
e0f7ee8884648973c63bbcae587ea36e51f58a64
|
[
"Apache-2.0"
] | null | null | null |
packages/SwingSet/misc-tools/vat-map.py
|
danwt/agoric-sdk
|
e0f7ee8884648973c63bbcae587ea36e51f58a64
|
[
"Apache-2.0"
] | null | null | null |
packages/SwingSet/misc-tools/vat-map.py
|
danwt/agoric-sdk
|
e0f7ee8884648973c63bbcae587ea36e51f58a64
|
[
"Apache-2.0"
] | null | null | null |
import sys, json, gzip, re, time
from collections import defaultdict
from pprint import pprint
# python vat-map.py SLOGFILE[.gz] > vat-map.json
#
# Determines a name for each vatID, also the manager type and number of
# deliveries. The resulting vat-map.json can be read by other tools to put a
# description on each vat. Static vats get a name from their bundle, e.g.
# "bank" or "mints" or "zoe". Dynamic contract vats find the agoric-sdk
# -relative filename of the entry point of the contract, e.g.
# "packages/treasury/src/stablecointMachine.js", and emit an abbreviation
# (for known contracts) or the full string.
unknown_vats = set() # vatID
vats = {} # vatID -> { name, managerType, cranks }
unnamed_zcf_vats = set()
abbreviations = {
"packages/treasury/src/stablecoinMachine.js": "treasury",
"packages/pegasus/src/pegasus.js": "pegasus",
"packages/zoe/src/contracts/multipoolAutoswap/multipoolAutoswap.js": "amm",
"packages/treasury/src/liquidateMinimum.js": "liquidate",
}
EPRE = re.compile(r'const entrypoint = "([^"]+)"')
def entrypoint_of_bundle(vatID, bundle):
mf = bundle["moduleFormat"]
if mf == "nestedEvaluate":
source = bundle["source"]
mo = EPRE.search(source)
return mo.group(1)
else:
print("unknown moduleFormat='%s' in vat %s" % (mf, vatID))
return None
fn = sys.argv[1]
opener = gzip.open if fn.endswith(".gz") else open
with opener(sys.argv[1]) as f:
for line in f:
if isinstance(line, bytes):
line = line.decode("utf-8")
if not line.strip():
continue
data = json.loads(line.strip())
type = data["type"]
if type == "create-vat":
vatID = data["vatID"]
if not data["dynamic"]:
vats[vatID] = { "name": data["name"], "managerType": data["managerType"],
"cranks": 0 }
else:
name = None
bundle = data["vatSourceBundle"]
entrypoint = entrypoint_of_bundle(vatID, bundle)
if entrypoint == "packages/zoe/contractFacet.js":
unnamed_zcf_vats.add(vatID)
name = "<zcf>"
else:
name = abbreviations.get(entrypoint, entrypoint)
vats[vatID] = { "name": name, "managerType": data["managerType"],
"cranks": 0 }
if type == "deliver":
vatID = data["vatID"]
if vatID in unnamed_zcf_vats:
kd = data["kd"]
if kd[0] == "message":
method = kd[2]["method"]
if method == "executeContract":
bundle = json.loads(kd[2]["args"]["body"])[0]
entrypoint = entrypoint_of_bundle(vatID, bundle)
name = abbreviations.get(entrypoint, entrypoint)
vats[vatID]["name"] = name
unnamed_zcf_vats.remove(vatID)
vats[vatID]["cranks"] += 1
print("{")
for count,vatID in enumerate(sorted(vats, key=lambda vatID: int(vatID[1:]))):
d = vats[vatID]
name = d.get("name", "<unknown>")
comma = "," if count < len(vats)-1 else ""
print('%6s: {"mtype": %12s, "deliveries": %10d, "name": %-22s}%s' % (
'"%s"' % vatID, '"%s"' % d["managerType"], d["cranks"], '"%s"' % name, comma))
print("}")
#for vatID in sorted(unknown_vats, key=lambda vatID: int(vatID[1:])):
# print("%4s: unknown" % (vatID,))
| 38.543478
| 89
| 0.562606
|
75feee6b224cf948bb6f00521dc2ba60e310ecec
| 22,318
|
py
|
Python
|
mindmeld/converter/rasa.py
|
dyoshiha/mindmeld
|
95f0e8482594f00040766a2ee687e9c9338f5a74
|
[
"Apache-2.0"
] | null | null | null |
mindmeld/converter/rasa.py
|
dyoshiha/mindmeld
|
95f0e8482594f00040766a2ee687e9c9338f5a74
|
[
"Apache-2.0"
] | null | null | null |
mindmeld/converter/rasa.py
|
dyoshiha/mindmeld
|
95f0e8482594f00040766a2ee687e9c9338f5a74
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Cisco Systems, Inc. and others. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Rasacoverter class used to convert Rasa projects
into Mindmeld projects"""
from keyword import iskeyword
import re
import os
import copy
import logging
import yaml
from mindmeld.converter.converter import Converter
logger = logging.getLogger(__name__)
class RasaConverter(Converter):
"""The class is a sub class of the abstract Converter class. This class
contains the methods required to convert a Rasa project into a MindMeld project
"""
def __init__(self, rasa_project_directory, mindmeld_project_directory):
if os.path.exists(os.path.dirname(rasa_project_directory)):
self.rasa_project_directory = rasa_project_directory
self.mindmeld_project_directory = mindmeld_project_directory
else:
msg = "`{rasa_project_directory}` does not exist. Please verify.".format(
rasa_project_directory=rasa_project_directory)
raise FileNotFoundError(msg)
def _create_intents_directories(self, mindmeld_project_directory, intents):
"""Note: Because Rasa does not support multiple domains at this time. All intents
are placed under a domain named 'general'."""
GENERAL_DOMAIN_LOCATION = '/domains/general/'
for intent in intents:
self.create_directory(mindmeld_project_directory + GENERAL_DOMAIN_LOCATION + intent)
def _create_entities_directories(self, mindmeld_project_directory, entities):
for entity in entities:
entity_path = mindmeld_project_directory + "/entities/" + entity
self.create_directory(entity_path)
with open(entity_path + "/gazetteer.txt", "w") as f:
f.close()
with open(entity_path + "/mapping.json", "w") as f:
# skeleton mapping file that a user must fill in
f.write("{\n \"entities\":[]\n}")
f.close()
@staticmethod
def _is_line_intent_definiton(line):
return (line[0:10] == "## intent:")
@staticmethod
def _get_intent_from_line(line):
return line.split(' ')[1].split(':')[1].rstrip()
def _create_intent_training_file(self, intent_directory):
self.create_directory(intent_directory)
with open(intent_directory + "/train.txt", "w") as f:
f.close()
@staticmethod
def _does_intent_ex_contain_entity(intent_example):
return len(re.findall(r"\[.*\]\(.*\)", intent_example)) > 0
def _write_intent_with_extinty(self, intent_f, intent_example):
mindmend_intent_example = intent_example
pattern = re.compile(r'\[\w*\]\(\w*\)')
for match in pattern.findall(intent_example):
mindmeld_entity = match.replace("[", "{").replace("]", "|") \
.replace("(", "").replace(")", "}")
mindmend_intent_example = mindmend_intent_example.replace(match, mindmeld_entity)
# add this to the respective entity gazetteer file as well
self.create_entity_files(mindmeld_entity)
intent_f.write(mindmend_intent_example)
@staticmethod
def _remove_comments_from_line(line):
start_of_comment = line.find("<!---")
end_of_comment = line.find("-->")
line_without_comment = line.replace(line[start_of_comment:end_of_comment+3], '')
line_without_comment = line_without_comment.rstrip()
return line_without_comment
def _add_example_to_training_file(self, current_intent_path, line):
with open(current_intent_path + "/train.txt", "a") as intent_f:
intent_example = line[2:]
intent_example = RasaConverter._remove_comments_from_line(intent_example) + "\n"
if RasaConverter._does_intent_ex_contain_entity(intent_example):
self._write_intent_with_extinty(intent_f, intent_example)
else:
intent_f.write(intent_example)
def _read_domain_file(self):
for file_ending in ["yaml", "yml"]:
file_name = self.rasa_project_directory + "/domain." + file_ending
if os.path.isfile(file_name):
try:
with open(file_name, "r") as stream:
domain_data_loaded = yaml.safe_load(stream)
return domain_data_loaded
except IOError as e:
logger.error("Can not open domain.yml file at %s",
file_name)
logger.error(e)
logger.error("Could not find domain.yml file in project directory")
raise FileNotFoundError
def _read_entities(self):
domain_file = self._read_domain_file()
if 'entities' in domain_file:
return domain_file['entities']
else:
return []
def _read_slots(self):
domain_file = self._read_domain_file()
if 'slots' in domain_file:
return domain_file['slots']
else:
return []
def _read_intents(self):
domain_file = self._read_domain_file()
return domain_file['intents']
def _read_actions(self):
domain_file = self._read_domain_file()
return domain_file['actions']
def _read_templates(self):
domain_file = self._read_domain_file()
if 'templates' in domain_file:
return domain_file['templates']
else:
return []
def create_entity_files(self, mm_entry):
entity = mm_entry.strip('{}').split("|")
gazetteer_location = self.mindmeld_project_directory + "/entities/" + \
entity[1] + "/gazetteer.txt"
try:
with open(gazetteer_location, "a") as f:
f.write(entity[0] + "\n")
f.close()
except FileNotFoundError as e:
self._create_entities_directories(self.mindmeld_project_directory, [entity[1]])
with open(gazetteer_location, "a") as f:
f.write(entity[0] + "\n")
f.close()
logger.error("Domain file may not contain entity %s", entity[1])
logger.error(e)
@staticmethod
def _is_valid_function_name(name):
return name.isidentifier() and not iskeyword(name)
@staticmethod
def _is_story_name(stories_line):
return stories_line[0:3] == '## '
def _get_story_name(self, stories_line):
if "<!--" in stories_line:
return self._remove_comments_from_line(
stories_line.replace("## ", "")).rstrip()
else:
return stories_line.replace("## ", "").rstrip()
@staticmethod
def _is_intent(stories_line):
return stories_line[0:2] == '* '
@staticmethod
def _is_action(stories_line):
return stories_line[0:3] == ' - '
@staticmethod
def _does_intent_have_entity(stories_line):
return len(re.findall(r"\{.*\}", stories_line)) > 0
@staticmethod
def _clean_up_entities_list(entities_with_values):
# trim off { }
entities_with_values = entities_with_values[1:-1]
# split data entities if there are multiples and clean white space
entities_list = entities_with_values.split(",")
for i, entity in enumerate(entities_list):
entities_list[i] = entity.replace("\"", '')
entities_list[i] = entities_list[i].lstrip()
return entities_list
def _get_intent_with_entity(self, stories_line):
if RasaConverter._does_intent_have_entity(stories_line):
entities_with_values = re.search(r"\{.*\}", stories_line)
entities_with_values = entities_with_values.group(0)
entities_list = self._clean_up_entities_list(entities_with_values)
start_of_entity = stories_line.find(entities_with_values)
intent = self._remove_comments_from_line(
stories_line[2:start_of_entity]).rstrip()
return intent, entities_list
else:
intent = self._remove_comments_from_line(stories_line[2:]).rstrip()
entities_list = []
return intent, entities_list
def _get_stories(self):
if os.path.isfile(self.rasa_project_directory + "/data/stories.md"):
try:
with open(self.rasa_project_directory + "/data/stories.md", "r") as f:
stories_dictionary = {}
current_story_name = ''
steps = []
current_step = {}
current_intent = ''
current_actions = []
stories_lines = f.readlines()
max_lines = len(stories_lines)
for line_num, line in enumerate(stories_lines):
if self._is_story_name(line):
current_story_name = self._get_story_name(line)
continue
if self._is_intent(line):
current_intent, current_entities = self \
._get_intent_with_entity(line)
current_step["intent"] = copy.deepcopy(current_intent)
current_step["entities"] = copy.deepcopy(current_entities)
continue
if self._is_action(line):
current_actions.append(
RasaConverter._remove_comments_from_line(line[3:]).rstrip())
if ((line_num + 1) < max_lines) and RasaConverter._is_action(
stories_lines[line_num + 1]):
continue
current_step["actions"] = copy.deepcopy(current_actions)
current_actions.clear()
steps.append(copy.deepcopy(current_step))
current_step.clear()
elif len(line.strip()) == 0:
if current_story_name != '':
stories_dictionary[current_story_name] = copy.deepcopy(steps)
steps.clear()
current_story_name = ''
if line_num == (max_lines - 1):
stories_dictionary[current_story_name] = copy.deepcopy(steps)
steps.clear()
current_story_name = ''
f.close()
return stories_dictionary
except IOError as e:
logger.error("Can not open stories.md file at %s",
self.rasa_project_directory + "/data/stories.md")
logger.error(e)
else:
logger.error("Could not find stories.md file in %s",
self.rasa_project_directory + "/data/stories.md")
raise FileNotFoundError
def create_mindmeld_directory(self, mindmeld_project_path):
self.create_directory(mindmeld_project_path)
self.create_directory(mindmeld_project_path + "/data")
self.create_directory(mindmeld_project_path + "/domains")
self.create_directory(mindmeld_project_path + "/domains/general")
self.create_directory(mindmeld_project_path + "/entities")
def create_mindmeld_training_data(self):
"""Method to transfer and reformat the training data in a Rasa Project
"""
# read intents listed in domain.yml
intents = self._read_intents()
# create intents subdirectories
self._create_intents_directories(self.mindmeld_project_directory, intents)
# read entities in domain.yml
entities = self._read_entities()
# create entities subdirectories if entities is not empty
if entities:
self._create_entities_directories(self.mindmeld_project_directory, entities)
# try and open data files from rasa project
nlu_data_loc = self.rasa_project_directory + "/data/nlu_data.md"
try:
with open(nlu_data_loc, "r") as nlu_data_md_file:
nlu_data_lines = nlu_data_md_file.readlines()
except FileNotFoundError:
logger.error("Cannot open nlu_data.md file at %s", nlu_data_loc)
# iterate through each line
current_intent = ''
current_intent_path = ''
for line in nlu_data_lines:
if (self._is_line_intent_definiton(line)):
current_intent = RasaConverter._get_intent_from_line(line)
current_intent_path = self.mindmeld_project_directory \
+ "/domains/general/" + current_intent
# create data text file for intent examples`
self._create_intent_training_file(current_intent_path)
else:
if (line[0] == '-'):
self._add_example_to_training_file(current_intent_path, line)
def _write_init_header(self):
string = '''from mindmeld import Application
from . import custom_features # noqa: F401
app = Application(__name__)
__all__ = ['app']
'''
f = open(self.mindmeld_project_directory + "/__init__.py", "w+")
f.write(string)
return f
@staticmethod
def _get_app_handle(intent, entities):
has_entity_string = ', has_entity='
has_entities_string = ', has_entities=['
entities_string = ""
if len(entities) > 1:
entities_string = has_entities_string
for entity_value in entities:
entity_string = entity_value.split(":")[0]
if entity_value == entities[-1]:
entities_string += "'" + entity_string + "']"
else:
entities_string += "'" + entity_string + "', "
elif len(entities) == 1:
for entity_value in entities:
entity_string = entity_value.split(":")[0]
entities_string += has_entity_string + "'" + entity_string + "'"
handle_string = "@app.handle(intent='" + intent + "'" + entities_string + ")\n"
return handle_string
def _write_function_declaration(self, action, f):
if self._is_valid_function_name(action):
function_declartion_string = "def {}(request, responder):\n".format(action)
f.write(function_declartion_string)
else:
logger.error("Action {action} is not a valid name for a python function")
raise SyntaxError
@staticmethod
def _write_function_body_prompt(prompts, f):
entities_list = []
prompts_list = []
# check if prompts contain any entities
for prompt in prompts:
entities = re.findall(r"\{.*\}", prompt)
entities_list = []
newprompt = prompt
for i, entity in enumerate(entities, start=0):
newprompt = prompt.replace(entity, '{' + str(i) + '}')
entities_list.append(entity.replace("{", "").replace("}", ""))
entities_args = ', '.join(map(str, entities_list))
prompts_list.append('"' + newprompt + '".format({})'.format(entities_args))
for entity in entities_list:
newentity = entity.replace("{", "").replace("}", "")
entities_string = " {}_s = [e['text'] for e in ".format(newentity) + \
"request.entities if e['type'] == '{}']\n".format(newentity)
entity_string = " {0} = {0}_s[0]\n".format(newentity)
f.write(entities_string)
f.write(entity_string)
prompts_string = " prompts = [{}]\n".format(', '.join(prompts_list))
f.write(prompts_string)
@staticmethod
def _write_default_function():
pass
@staticmethod
def _get_text_prompts_list(action_templates):
prompts = []
for template in action_templates:
if 'text' in template:
prompts.append(template['text'])
return prompts
@staticmethod
def _write_responder_lines(f):
responder_string = " responder.reply(prompts)\n responder.listen()\n"
f.write(responder_string)
def _read_file_lines(self):
with open(self.mindmeld_project_directory + "/__init__.py", "r+") as f:
return f.readlines()
def _write_functions(self, actions, templates, f):
for action in actions:
self._write_function_declaration(action, f)
if action in templates:
# Get list of templates per action
action_templates = templates[action]
prompts_list = RasaConverter._get_text_prompts_list(action_templates)
self._write_function_body_prompt(prompts_list, f)
self._write_responder_lines(f)
else:
if (action[0:6] == 'action'):
f.write(" # This is a custom action from rasa\n")
f.write(" pass\n")
else:
# If no templates, write a blank function
f.write(" # No templates were provided for action\n")
f.write(" pass\n")
if action != actions[-1]:
f.write('\n')
f.write('\n')
@staticmethod
def _attach_handle_to_function(handle, action, file_lines):
for i, line in enumerate(file_lines):
if (len(re.findall("def {}".format(action), line)) > 0):
insert_line = i
while (file_lines[i - 1].strip() != ''):
if (file_lines[i - 1] == handle):
return
i = i - 1
file_lines.insert(insert_line, handle)
@staticmethod
def _attach_actions_to_function(actions, file_lines):
current_line = 0
for i, line in enumerate(file_lines):
if (len(re.findall("def {action}", line) > 0)):
current_line = i
break
while (file_lines[current_line] != ""):
current_line += 1
assert file_lines[current_line] == ""
file_lines[current_line:current_line] = actions
def create_mindmeld_init(self):
f = self._write_init_header()
actions = self._read_actions()
templates = self._read_templates()
# Write all functions for each action
self._write_functions(actions, templates, f)
f.close()
# Read contents of current file
file_lines = self._read_file_lines()
stories_dictionary = self._get_stories()
# Loop through all stories and create intent-action relationship
for item in stories_dictionary.items():
# Loop through steps for each story
for step in item[1]:
# Get intent, any entities, and actions
intent = step['intent']
entities = step['entities']
actions = step['actions']
# attach handle to correct function
app_handle_string = RasaConverter._get_app_handle(intent, entities)
self._attach_handle_to_function(app_handle_string, actions[0], file_lines)
# check if more than 1 action per intent
if len(actions) > 1:
self._attach_actions_to_function(actions[1:], file_lines)
# write all lines back to file
with open(self.mindmeld_project_directory + "/__init__.py", "w") as f:
f.writelines(file_lines)
@staticmethod
def create_custom_features(mindmeld_project_directory, main_file_loc):
with open(main_file_loc + '/rasa_custom_features.txt', 'r') as f:
string = f.read()
with open(mindmeld_project_directory + "/custom_features.py", "w") as f:
f.write(string)
def convert_project(self):
"""Main function that will convert a Rasa project into a Mindmeld project.
The Rasa project consists of three major files that contain much of data
that is converted into the Mindmeld project:
/domain.yml - Contains all of the intents, entities, actions, and templates
used in the rasa project
/data/stories.md - Contains the stories which are used to match intents and
actions together
/data/nlu_data.md - Contains the training data for each intent. Some of the
training data may contain entities
limitations:
- Rasa has the ability to have custom actions, which is not supported by
the converter.
- Rasa has the ability to handle multiple intents per query, while Mindmeld
does not.
- Rasa training data may be json format, which is not currently supported.
- Rasa has a feature called Rasa forms which is not currently supported.
- Rasa's configuration files are not transfered, instead generic Mindmeld
configuration files are copied over.
"""
# Create project directory with sub folders
self.create_mindmeld_directory(self.mindmeld_project_directory)
# Transfer over test data from Rasa project and reformat to Mindmeld project
self.create_mindmeld_training_data()
file_loc = os.path.dirname(os.path.realpath(__file__))
self.create_main(self.mindmeld_project_directory, file_loc)
self.create_mindmeld_init()
self.create_config(self.mindmeld_project_directory, file_loc)
self.create_custom_features(self.mindmeld_project_directory, file_loc)
| 43.420233
| 97
| 0.60086
|
19e33eaabc8002583190512848d1b97981cece8b
| 2,557
|
py
|
Python
|
dizoo/multiagent_particle/envs/multiagent/multi_discrete.py
|
LuciusMos/DI-engine
|
b040b1c36afce038effec9eb483f625131573824
|
[
"Apache-2.0"
] | 464
|
2021-07-08T07:26:33.000Z
|
2022-03-31T12:35:16.000Z
|
dizoo/multiagent_particle/envs/multiagent/multi_discrete.py
|
LuciusMos/DI-engine
|
b040b1c36afce038effec9eb483f625131573824
|
[
"Apache-2.0"
] | 177
|
2021-07-09T08:22:55.000Z
|
2022-03-31T07:35:22.000Z
|
dizoo/multiagent_particle/envs/multiagent/multi_discrete.py
|
LuciusMos/DI-engine
|
b040b1c36afce038effec9eb483f625131573824
|
[
"Apache-2.0"
] | 92
|
2021-07-08T12:16:37.000Z
|
2022-03-31T09:24:41.000Z
|
# An old version of OpenAI Gym's multi_discrete.py. (Was getting affected by Gym updates)
# (https://github.com/openai/gym/blob/1fb81d4e3fb780ccf77fec731287ba07da35eb84/gym/spaces/multi_discrete.py)
import numpy as np
import gym
#from gym.spaces import prng
class MultiDiscrete(gym.Space):
"""
- The multi-discrete action space consists of a series of discrete action spaces with different parameters
- It can be adapted to both a Discrete action space or a continuous (Box) action space
- It is useful to represent game controllers or keyboards where each key can be represented as a discrete
action space
- It is parametrized by passing an array of arrays containing [min, max] for each discrete action space
where the discrete action space can take any integers from `min` to `max` (both inclusive)
Note: A value of 0 always need to represent the NOOP action.
e.g. Nintendo Game Controller
- Can be conceptualized as 3 discrete action spaces:
1) Arrow Keys: Discrete 5 - NOOP[0], UP[1], RIGHT[2], DOWN[3], LEFT[4] - params: min: 0, max: 4
2) Button A: Discrete 2 - NOOP[0], Pressed[1] - params: min: 0, max: 1
3) Button B: Discrete 2 - NOOP[0], Pressed[1] - params: min: 0, max: 1
- Can be initialized as
MultiDiscrete([ [0,4], [0,1], [0,1] ])
"""
def __init__(self, array_of_param_array):
self.low = np.array([x[0] for x in array_of_param_array])
self.high = np.array([x[1] for x in array_of_param_array])
self.num_discrete_space = self.low.shape[0]
def sample(self):
""" Returns a array with one sample from each discrete action space """
# For each row: round(random .* (max - min) + min, 0)
#random_array = prng.np_random.rand(self.num_discrete_space)
np_random = np.random.RandomState()
random_array = np_random.rand(self.num_discrete_space)
return [int(x) for x in np.floor(np.multiply((self.high - self.low + 1.), random_array) + self.low)]
def contains(self, x):
return len(x) == self.num_discrete_space and (np.array(x) >= self.low).all() and (np.array(x) <=
self.high).all()
@property
def shape(self):
return self.num_discrete_space
def __repr__(self):
return "MultiDiscrete" + str(self.num_discrete_space)
def __eq__(self, other):
return np.array_equal(self.low, other.low) and np.array_equal(self.high, other.high)
| 47.351852
| 110
| 0.650763
|
c30fa4d213ccd71b1af04ff23006d7e4c6ff3605
| 199,221
|
py
|
Python
|
Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
|
AdvancedThreatAnalytics/content
|
0ae9a9f8e94f227161c808341619044489484dae
|
[
"MIT"
] | null | null | null |
Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
|
AdvancedThreatAnalytics/content
|
0ae9a9f8e94f227161c808341619044489484dae
|
[
"MIT"
] | null | null | null |
Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
|
AdvancedThreatAnalytics/content
|
0ae9a9f8e94f227161c808341619044489484dae
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import demistomock as demisto
import copy
import json
import re
import os
import sys
import requests
from pytest import raises, mark
import pytest
import warnings
from CommonServerPython import xml2json, json2xml, entryTypes, formats, tableToMarkdown, underscoreToCamelCase, \
flattenCell, date_to_timestamp, datetime, camelize, pascalToSpace, argToList, \
remove_nulls_from_dictionary, is_error, get_error, hash_djb2, fileResult, is_ip_valid, get_demisto_version, \
IntegrationLogger, parse_date_string, IS_PY3, DebugLogger, b64_encode, parse_date_range, return_outputs, \
argToBoolean, ipv4Regex, ipv4cidrRegex, ipv6cidrRegex, ipv6Regex, batch, FeedIndicatorType, \
encode_string_results, safe_load_json, remove_empty_elements, aws_table_to_markdown, is_demisto_version_ge, \
appendContext, auto_detect_indicator_type, handle_proxy, get_demisto_version_as_str, get_x_content_info_headers, \
url_to_clickable_markdown, WarningsHandler, DemistoException, SmartGetDict
import CommonServerPython
try:
from StringIO import StringIO
except ImportError:
# Python 3
from io import StringIO # noqa
INFO = {'b': 1,
'a': {
'safd': 3,
'b': [
{'c': {'d': 432}, 'd': 2},
{'c': {'f': 1}},
{'b': 1234},
{'c': {'d': 4567}},
{'c': {'d': 11}},
{'c': {'d': u'asdf'}}],
'c': {'d': 10},
}
}
@pytest.fixture()
def clear_version_cache():
"""
Clear the version cache at end of the test (in case we mocked demisto.serverVersion)
"""
yield
get_demisto_version._version = None
@pytest.fixture(autouse=True)
def handle_calling_context(mocker):
mocker.patch.object(CommonServerPython, 'get_integration_name', return_value='Test')
def test_xml():
import json
xml = b"<work><employee><id>100</id><name>foo</name></employee><employee><id>200</id><name>goo</name>" \
b"</employee></work>"
jsonExpected = '{"work": {"employee": [{"id": "100", "name": "foo"}, {"id": "200", "name": "goo"}]}}'
jsonActual = xml2json(xml)
assert jsonActual == jsonExpected, "expected\n" + jsonExpected + "\n to equal \n" + jsonActual
jsonDict = json.loads(jsonActual)
assert jsonDict['work']['employee'][0]['id'] == "100", 'id of first employee must be 100'
assert jsonDict['work']['employee'][1]['name'] == "goo", 'name of second employee must be goo'
xmlActual = json2xml(jsonActual)
assert xmlActual == xml, "expected:\n{}\nto equal:\n{}".format(xml, xmlActual)
def toEntry(table):
return {
'Type': entryTypes['note'],
'Contents': table,
'ContentsFormat': formats['table'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': table
}
DATA = [
{
'header_1': 'a1',
'header_2': 'b1',
'header_3': 'c1'
},
{
'header_1': 'a2',
'header_2': 'b2',
'header_3': 'c2'
},
{
'header_1': 'a3',
'header_2': 'b3',
'header_3': 'c3'
}
]
TABLE_TO_MARKDOWN_ONLY_DATA_PACK = [
(
DATA,
'''### tableToMarkdown test
|header_1|header_2|header_3|
|---|---|---|
| a1 | b1 | c1 |
| a2 | b2 | c2 |
| a3 | b3 | c3 |
'''
),
(
[
{
'header_1|with_pipe': 'a1',
'header_2': 'b1',
},
{
'header_1|with_pipe': 'a2',
'header_2': 'b2',
}
],
'''### tableToMarkdown test
|header_1\\|with_pipe|header_2|
|---|---|
| a1 | b1 |
| a2 | b2 |
'''
)
]
DATA_WITH_URLS = [(
[
{
'header_1': 'a1',
'url1': 'b1',
'url2': 'c1'
},
{
'header_1': 'a2',
'url1': 'b2',
'url2': 'c2'
},
{
'header_1': 'a3',
'url1': 'b3',
'url2': 'c3'
}
],
'''### tableToMarkdown test
|header_1|url1|url2|
|---|---|---|
| a1 | [b1](b1) | [c1](c1) |
| a2 | [b2](b2) | [c2](c2) |
| a3 | [b3](b3) | [c3](c3) |
'''
)]
COMPLEX_DATA_WITH_URLS = [(
[
{'data':
{'id': '1',
'result':
{'files':
[
{
'filename': 'name',
'size': 0,
'url': 'url'
}
]
},
'links': ['link']
}
},
{'data':
{'id': '2',
'result':
{'files':
[
{
'filename': 'name',
'size': 0,
'url': 'url'
}
]
},
'links': ['link']
}
}
],
[
{'data':
{'id': '1',
'result':
{'files':
[
{
'filename': 'name',
'size': 0,
'url': '[url](url)'
}
]
},
'links': ['[link](link)']
}
},
{'data':
{'id': '2',
'result':
{'files':
[
{
'filename': 'name',
'size': 0,
'url': '[url](url)'
}
]
},
'links': ['[link](link)']
}
}
])]
@pytest.mark.parametrize('data, expected_table', TABLE_TO_MARKDOWN_ONLY_DATA_PACK)
def test_tbl_to_md_only_data(data, expected_table):
# sanity
table = tableToMarkdown('tableToMarkdown test', data)
assert table == expected_table
def test_tbl_to_md_header_transform_underscoreToCamelCase():
# header transform
table = tableToMarkdown('tableToMarkdown test with headerTransform', DATA,
headerTransform=underscoreToCamelCase)
expected_table = '''### tableToMarkdown test with headerTransform
|Header1|Header2|Header3|
|---|---|---|
| a1 | b1 | c1 |
| a2 | b2 | c2 |
| a3 | b3 | c3 |
'''
assert table == expected_table
def test_tbl_to_md_multiline():
# escaping characters: multiline + md-chars
data = copy.deepcopy(DATA)
for i, d in enumerate(data):
d['header_2'] = 'b%d.1\nb%d.2' % (i + 1, i + 1,)
d['header_3'] = 'c%d|1' % (i + 1,)
table = tableToMarkdown('tableToMarkdown test with multiline', data)
expected_table = '''### tableToMarkdown test with multiline
|header_1|header_2|header_3|
|---|---|---|
| a1 | b1.1<br>b1.2 | c1\\|1 |
| a2 | b2.1<br>b2.2 | c2\\|1 |
| a3 | b3.1<br>b3.2 | c3\\|1 |
'''
assert table == expected_table
def test_tbl_to_md_url():
# url + empty data
data = copy.deepcopy(DATA)
for i, d in enumerate(data):
d['header_3'] = '[url](https:\\demisto.com)'
d['header_2'] = None
table_url_missing_info = tableToMarkdown('tableToMarkdown test with url and missing info', data)
expected_table_url_missing_info = '''### tableToMarkdown test with url and missing info
|header_1|header_2|header_3|
|---|---|---|
| a1 | | [url](https:\\demisto.com) |
| a2 | | [url](https:\\demisto.com) |
| a3 | | [url](https:\\demisto.com) |
'''
assert table_url_missing_info == expected_table_url_missing_info
def test_tbl_to_md_single_column():
# single column table
table_single_column = tableToMarkdown('tableToMarkdown test with single column', DATA, ['header_1'])
expected_table_single_column = '''### tableToMarkdown test with single column
|header_1|
|---|
| a1 |
| a2 |
| a3 |
'''
assert table_single_column == expected_table_single_column
def test_is_ip_valid():
valid_ip_v6 = "FE80:0000:0000:0000:0202:B3FF:FE1E:8329"
valid_ip_v6_b = "FE80::0202:B3FF:FE1E:8329"
invalid_ip_v6 = "KKKK:0000:0000:0000:0202:B3FF:FE1E:8329"
valid_ip_v4 = "10.10.10.10"
invalid_ip_v4 = "10.10.10.9999"
invalid_not_ip_with_ip_structure = "1.1.1.1.1.1.1.1.1.1.1.1.1.1.1"
not_ip = "Demisto"
assert not is_ip_valid(valid_ip_v6)
assert is_ip_valid(valid_ip_v6, True)
assert is_ip_valid(valid_ip_v6_b, True)
assert not is_ip_valid(invalid_ip_v6, True)
assert not is_ip_valid(not_ip, True)
assert is_ip_valid(valid_ip_v4)
assert not is_ip_valid(invalid_ip_v4)
assert not is_ip_valid(invalid_not_ip_with_ip_structure)
def test_tbl_to_md_list_values():
# list values
data = copy.deepcopy(DATA)
for i, d in enumerate(data):
d['header_3'] = [i + 1, 'second item']
d['header_2'] = 'hi'
table_list_field = tableToMarkdown('tableToMarkdown test with list field', data)
expected_table_list_field = '''### tableToMarkdown test with list field
|header_1|header_2|header_3|
|---|---|---|
| a1 | hi | 1,<br>second item |
| a2 | hi | 2,<br>second item |
| a3 | hi | 3,<br>second item |
'''
assert table_list_field == expected_table_list_field
def test_tbl_to_md_empty_fields():
# all fields are empty
data = [
{
'a': None,
'b': None,
'c': None,
} for _ in range(3)
]
table_all_none = tableToMarkdown('tableToMarkdown test with all none fields', data)
expected_table_all_none = '''### tableToMarkdown test with all none fields
|a|b|c|
|---|---|---|
| | | |
| | | |
| | | |
'''
assert table_all_none == expected_table_all_none
# all fields are empty - removed
table_all_none2 = tableToMarkdown('tableToMarkdown test with all none fields2', data, removeNull=True)
expected_table_all_none2 = '''### tableToMarkdown test with all none fields2
**No entries.**
'''
assert table_all_none2 == expected_table_all_none2
def test_tbl_to_md_header_not_on_first_object():
# header not on first object
data = copy.deepcopy(DATA)
data[1]['extra_header'] = 'sample'
table_extra_header = tableToMarkdown('tableToMarkdown test with extra header', data,
headers=['header_1', 'header_2', 'extra_header'])
expected_table_extra_header = '''### tableToMarkdown test with extra header
|header_1|header_2|extra_header|
|---|---|---|
| a1 | b1 | |
| a2 | b2 | sample |
| a3 | b3 | |
'''
assert table_extra_header == expected_table_extra_header
def test_tbl_to_md_no_header():
# no header
table_no_headers = tableToMarkdown('tableToMarkdown test with no headers', DATA,
headers=['no', 'header', 'found'], removeNull=True)
expected_table_no_headers = '''### tableToMarkdown test with no headers
**No entries.**
'''
assert table_no_headers == expected_table_no_headers
def test_tbl_to_md_dict_value():
# dict value
data = copy.deepcopy(DATA)
data[1]['extra_header'] = {'sample': 'qwerty', 'sample2': 'asdf'}
table_dict_record = tableToMarkdown('tableToMarkdown test with dict record', data,
headers=['header_1', 'header_2', 'extra_header'])
expected_dict_record = '''### tableToMarkdown test with dict record
|header_1|header_2|extra_header|
|---|---|---|
| a1 | b1 | |
| a2 | b2 | sample: qwerty<br>sample2: asdf |
| a3 | b3 | |
'''
assert table_dict_record == expected_dict_record
def test_tbl_to_md_string_header():
# string header (instead of list)
table_string_header = tableToMarkdown('tableToMarkdown string header', DATA, 'header_1')
expected_string_header_tbl = '''### tableToMarkdown string header
|header_1|
|---|
| a1 |
| a2 |
| a3 |
'''
assert table_string_header == expected_string_header_tbl
def test_tbl_to_md_list_of_strings_instead_of_dict():
# list of string values instead of list of dict objects
table_string_array = tableToMarkdown('tableToMarkdown test with string array', ['foo', 'bar', 'katz'], ['header_1'])
expected_string_array_tbl = '''### tableToMarkdown test with string array
|header_1|
|---|
| foo |
| bar |
| katz |
'''
assert table_string_array == expected_string_array_tbl
def test_tbl_to_md_list_of_strings_instead_of_dict_and_string_header():
# combination: string header + string values list
table_string_array_string_header = tableToMarkdown('tableToMarkdown test with string array and string header',
['foo', 'bar', 'katz'], 'header_1')
expected_string_array_string_header_tbl = '''### tableToMarkdown test with string array and string header
|header_1|
|---|
| foo |
| bar |
| katz |
'''
assert table_string_array_string_header == expected_string_array_string_header_tbl
def test_tbl_to_md_dict_with_special_character():
data = {
'header_1': u'foo',
'header_2': [u'\xe2.rtf']
}
table_with_character = tableToMarkdown('tableToMarkdown test with special character', data)
expected_string_with_special_character = '''### tableToMarkdown test with special character
|header_1|header_2|
|---|---|
| foo | â.rtf |
'''
assert table_with_character == expected_string_with_special_character
def test_tbl_to_md_header_with_special_character():
data = {
'header_1': u'foo'
}
table_with_character = tableToMarkdown('tableToMarkdown test with special character Ù', data)
expected_string_with_special_character = '''### tableToMarkdown test with special character Ù
|header_1|
|---|
| foo |
'''
assert table_with_character == expected_string_with_special_character
@pytest.mark.parametrize('data, expected_table', DATA_WITH_URLS)
def test_tbl_to_md_clickable_url(data, expected_table):
table = tableToMarkdown('tableToMarkdown test', data, url_keys=['url1', 'url2'])
assert table == expected_table
def test_tbl_keep_headers_list():
headers = ['header_1', 'header_2']
data = {
'header_1': 'foo'
}
table = tableToMarkdown('tableToMarkdown test', data, removeNull=True, headers=headers)
assert 'header_2' not in table
assert headers == ['header_1', 'header_2']
@pytest.mark.parametrize('data, expected_data', COMPLEX_DATA_WITH_URLS)
def test_url_to_clickable_markdown(data, expected_data):
table = url_to_clickable_markdown(data, url_keys=['url', 'links'])
assert table == expected_data
def test_flatten_cell():
# sanity
utf8_to_flatten = b'abcdefghijklmnopqrstuvwxyz1234567890!'.decode('utf8')
flatten_text = flattenCell(utf8_to_flatten)
expected_string = 'abcdefghijklmnopqrstuvwxyz1234567890!'
assert flatten_text == expected_string
# list of uft8 and string to flatten
str_a = b'abcdefghijklmnopqrstuvwxyz1234567890!'
utf8_b = str_a.decode('utf8')
list_to_flatten = [str_a, utf8_b]
flatten_text2 = flattenCell(list_to_flatten)
expected_flatten_string = 'abcdefghijklmnopqrstuvwxyz1234567890!,\nabcdefghijklmnopqrstuvwxyz1234567890!'
assert flatten_text2 == expected_flatten_string
# special character test
special_char = u'会'
list_of_special = [special_char, special_char]
flattenCell(list_of_special)
flattenCell(special_char)
# dictionary test
dict_to_flatten = {'first': u'会'}
expected_flatten_dict = u'{\n "first": "\u4f1a"\n}'
assert flattenCell(dict_to_flatten) == expected_flatten_dict
def test_hash_djb2():
assert hash_djb2("test") == 2090756197, "Invalid value of hash_djb2"
def test_camelize():
non_camalized = [{'chookity_bop': 'asdasd'}, {'ab_c': 'd e', 'fgh_ijk': 'lm', 'nop': 'qr_st'}]
expected_output_upper_camel = [{'ChookityBop': 'asdasd'}, {'AbC': 'd e', 'Nop': 'qr_st', 'FghIjk': 'lm'}]
expected_output_lower_camel = [{'chookityBop': 'asdasd'}, {'abC': 'd e', 'nop': 'qr_st', 'fghIjk': 'lm'}]
assert camelize(non_camalized, '_') == expected_output_upper_camel
assert camelize(non_camalized, '_', upper_camel=True) == expected_output_upper_camel
assert camelize(non_camalized, '_', upper_camel=False) == expected_output_lower_camel
non_camalized2 = {'ab_c': 'd e', 'fgh_ijk': 'lm', 'nop': 'qr_st'}
expected_output2_upper_camel = {'AbC': 'd e', 'Nop': 'qr_st', 'FghIjk': 'lm'}
expected_output2_lower_camel = {'abC': 'd e', 'nop': 'qr_st', 'fghIjk': 'lm'}
assert camelize(non_camalized2, '_') == expected_output2_upper_camel
assert camelize(non_camalized2, '_', upper_camel=True) == expected_output2_upper_camel
assert camelize(non_camalized2, '_', upper_camel=False) == expected_output2_lower_camel
def test_camelize_string():
from CommonServerPython import camelize_string
non_camalized = ['chookity_bop', 'ab_c', 'fgh_ijk', 'nop']
expected_output_upper_camel = ['ChookityBop', 'AbC', 'FghIjk', 'Nop']
expected_output_lower_camel = ['chookityBop', 'abC', 'fghIjk', 'nop']
for i in range(len(non_camalized)):
assert camelize_string(non_camalized[i], '_') == expected_output_upper_camel[i]
assert camelize_string(non_camalized[i], '_', upper_camel=True) == expected_output_upper_camel[i]
assert camelize_string(non_camalized[i], '_', upper_camel=False) == expected_output_lower_camel[i]
def test_underscoreToCamelCase():
from CommonServerPython import underscoreToCamelCase
non_camalized = ['chookity_bop', 'ab_c', 'fgh_ijk', 'nop']
expected_output_upper_camel = ['ChookityBop', 'AbC', 'FghIjk', 'Nop']
expected_output_lower_camel = ['chookityBop', 'abC', 'fghIjk', 'nop']
for i in range(len(non_camalized)):
assert underscoreToCamelCase(non_camalized[i]) == expected_output_upper_camel[i]
assert underscoreToCamelCase(non_camalized[i], upper_camel=True) == expected_output_upper_camel[i]
assert underscoreToCamelCase(non_camalized[i], upper_camel=False) == expected_output_lower_camel[i]
# Note this test will fail when run locally (in pycharm/vscode) as it assumes the machine (docker image) has UTC timezone set
def test_date_to_timestamp():
assert date_to_timestamp('2018-11-06T08:56:41') == 1541494601000
assert date_to_timestamp(datetime.strptime('2018-11-06T08:56:41', "%Y-%m-%dT%H:%M:%S")) == 1541494601000
def test_pascalToSpace():
use_cases = [
('Validate', 'Validate'),
('validate', 'Validate'),
('TCP', 'TCP'),
('eventType', 'Event Type'),
('eventID', 'Event ID'),
('eventId', 'Event Id'),
('IPAddress', 'IP Address'),
]
for s, expected in use_cases:
assert pascalToSpace(s) == expected, 'Error on {} != {}'.format(pascalToSpace(s), expected)
def test_safe_load_json():
valid_json_str = '{"foo": "bar"}'
expected_valid_json_result = {u'foo': u'bar'}
assert expected_valid_json_result == safe_load_json(valid_json_str)
def test_remove_empty_elements():
test_dict = {
"foo": "bar",
"baz": {},
"empty": [],
"nested_dict": {
"empty_list": [],
"hummus": "pita"
},
"nested_list": {
"more_empty_list": []
}
}
expected_result = {
"foo": "bar",
"nested_dict": {
"hummus": "pita"
}
}
assert expected_result == remove_empty_elements(test_dict)
@pytest.mark.parametrize('header,raw_input,expected_output', [
('AWS DynamoDB DescribeBackup', {
'BackupDescription': {
"Foo": "Bar",
"Baz": "Bang",
"TestKey": "TestValue"
}
}, '''### AWS DynamoDB DescribeBackup\n|Baz|Foo|TestKey|\n|---|---|---|\n| Bang | Bar | TestValue |\n'''),
('Empty Results', {'key': []}, '### Empty Results\n**No entries.**\n')
])
def test_aws_table_to_markdown(header, raw_input, expected_output):
"""
Given
- A header and a dict with two levels
- A header and a dict with one key pointing to an empty list
When
- Creating a markdown table using the aws_table_to_markdown function
Ensure
- The header appears as a markdown header and the dictionary is translated to a markdown table
- The header appears as a markdown header and "No entries" text appears instead of a markdown table"
"""
assert aws_table_to_markdown(raw_input, header) == expected_output
def test_argToList():
expected = ['a', 'b', 'c']
test1 = ['a', 'b', 'c']
test2 = 'a,b,c'
test3 = '["a","b","c"]'
test4 = 'a;b;c'
test5 = 1
test6 = '1'
test7 = True
results = [argToList(test1), argToList(test2), argToList(test2, ','), argToList(test3), argToList(test4, ';')]
for result in results:
assert expected == result, 'argToList test failed, {} is not equal to {}'.format(str(result), str(expected))
assert argToList(test5) == [1]
assert argToList(test6) == ['1']
assert argToList(test7) == [True]
def test_remove_nulls():
temp_dictionary = {"a": "b", "c": 4, "e": [], "f": {}, "g": None, "h": "", "i": [1], "k": ()}
expected_dictionary = {"a": "b", "c": 4, "i": [1]}
remove_nulls_from_dictionary(temp_dictionary)
assert expected_dictionary == temp_dictionary, \
"remove_nulls_from_dictionary test failed, {} is not equal to {}".format(str(temp_dictionary),
str(expected_dictionary))
def test_is_error_true():
execute_command_results = [
{
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": "this is error message"
}
]
assert is_error(execute_command_results)
def test_is_error_none():
assert not is_error(None)
def test_is_error_single_entry():
execute_command_results = {
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": "this is error message"
}
assert is_error(execute_command_results)
def test_is_error_false():
execute_command_results = [
{
"Type": entryTypes["note"],
"ContentsFormat": formats["text"],
"Contents": "this is regular note"
}
]
assert not is_error(execute_command_results)
def test_not_error_entry():
execute_command_results = "invalid command results as string"
assert not is_error(execute_command_results)
def test_get_error():
execute_command_results = [
{
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": "this is error message"
}
]
error = get_error(execute_command_results)
assert error == "this is error message"
def test_get_error_single_entry():
execute_command_results = {
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": "this is error message"
}
error = get_error(execute_command_results)
assert error == "this is error message"
def test_get_error_need_raise_error_on_non_error_input():
execute_command_results = [
{
"Type": entryTypes["note"],
"ContentsFormat": formats["text"],
"Contents": "this is not an error"
}
]
try:
get_error(execute_command_results)
except ValueError as exception:
assert "execute_command_result has no error entry. before using get_error use is_error" in str(exception)
return
assert False
@mark.parametrize('data,data_expected', [
("this is a test", b"this is a test"),
(u"עברית", u"עברית".encode('utf-8')),
(b"binary data\x15\x00", b"binary data\x15\x00"),
]) # noqa: E124
def test_fileResult(mocker, request, data, data_expected):
mocker.patch.object(demisto, 'uniqueFile', return_value="test_file_result")
mocker.patch.object(demisto, 'investigation', return_value={'id': '1'})
file_name = "1_test_file_result"
def cleanup():
try:
os.remove(file_name)
except OSError:
pass
request.addfinalizer(cleanup)
res = fileResult("test.txt", data)
assert res['File'] == "test.txt"
with open(file_name, 'rb') as f:
assert f.read() == data_expected
# Error that always returns a unicode string to it's str representation
class SpecialErr(Exception):
def __str__(self):
return u"מיוחד"
def test_logger():
from CommonServerPython import LOG
LOG(u'€')
LOG(Exception(u'€'))
LOG(SpecialErr(12))
def test_logger_write(mocker):
mocker.patch.object(demisto, 'params', return_value={
'credentials': {'password': 'my_password'},
})
mocker.patch.object(demisto, 'info')
ilog = IntegrationLogger()
ilog.write("This is a test with my_password")
ilog.print_log()
# assert that the print doesn't contain my_password
# call_args is tuple (args list, kwargs). we only need the args
args = demisto.info.call_args[0]
assert 'This is a test' in args[0]
assert 'my_password' not in args[0]
assert '<XX_REPLACED>' in args[0]
def test_logger_init_key_name(mocker):
mocker.patch.object(demisto, 'params', return_value={
'key': {'password': 'my_password'},
'secret': 'my_secret'
})
mocker.patch.object(demisto, 'info')
ilog = IntegrationLogger()
ilog.write("This is a test with my_password and my_secret")
ilog.print_log()
# assert that the print doesn't contain my_password
# call_args is tuple (args list, kwargs). we only need the args
args = demisto.info.call_args[0]
assert 'This is a test' in args[0]
assert 'my_password' not in args[0]
assert 'my_secret' not in args[0]
assert '<XX_REPLACED>' in args[0]
def test_logger_replace_strs(mocker):
mocker.patch.object(demisto, 'params', return_value={
'apikey': 'my_apikey',
})
ilog = IntegrationLogger()
ilog.add_replace_strs('special_str', '') # also check that empty string is not added by mistake
ilog('my_apikey is special_str and b64: ' + b64_encode('my_apikey'))
assert ('' not in ilog.replace_strs)
assert ilog.messages[0] == '<XX_REPLACED> is <XX_REPLACED> and b64: <XX_REPLACED>'
TEST_SSH_KEY_ESC = '-----BEGIN OPENSSH PRIVATE KEY-----\\nb3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAACFw' \
'AAAAdzc2gtcn\\n-----END OPENSSH PRIVATE KEY-----'
TEST_SSH_KEY = '-----BEGIN OPENSSH PRIVATE KEY-----\nb3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAACFw' \
'AAAAdzc2gtcn\n-----END OPENSSH PRIVATE KEY-----'
TEST_PASS_JSON_CHARS = 'json_chars'
SENSITIVE_PARAM = {
'app': None,
'authentication': {
'credential': '',
'credentials': {
'id': '',
'locked': False,
'modified': '0001-01-01T00: 00: 00Z',
'name': '',
'password': 'cred_pass',
'sortValues': None,
'sshkey': TEST_SSH_KEY,
'sshkeyEsc': TEST_SSH_KEY_ESC,
'sshkeyPass': 'ssh_key_secret_pass',
'user': '',
'vaultInstanceId': '',
'version': 0,
'workgroup': ''
},
'identifier': 'admin',
'password': 'ident_pass',
'passwordChanged': False
},
'password': TEST_PASS_JSON_CHARS + '\\"',
}
def test_logger_replace_strs_credentials(mocker):
mocker.patch.object(demisto, 'params', return_value=SENSITIVE_PARAM)
basic_auth = b64_encode('{}:{}'.format(SENSITIVE_PARAM['authentication']['identifier'], SENSITIVE_PARAM['authentication']['password']))
ilog = IntegrationLogger()
# log some secrets
ilog('my cred pass: cred_pass. my ssh key: ssh_key_secret. my ssh key: {}.'
'my ssh key: {}. my ssh pass: ssh_key_secret_pass. ident: ident_pass.'
' basic auth: {}'.format(TEST_SSH_KEY, TEST_SSH_KEY_ESC, basic_auth))
for s in ('cred_pass', TEST_SSH_KEY, TEST_SSH_KEY_ESC, 'ssh_key_secret_pass', 'ident_pass', basic_auth):
assert s not in ilog.messages[0]
def test_debug_logger_replace_strs(mocker):
mocker.patch.object(demisto, 'params', return_value=SENSITIVE_PARAM)
debug_logger = DebugLogger()
debug_logger.int_logger.set_buffering(True)
debug_logger.log_start_debug()
msg = debug_logger.int_logger.messages[0]
assert 'debug-mode started' in msg
assert 'Params:' in msg
for s in ('cred_pass', 'ssh_key_secret', 'ssh_key_secret_pass', 'ident_pass', TEST_SSH_KEY,
TEST_SSH_KEY_ESC, TEST_PASS_JSON_CHARS):
assert s not in msg
def test_build_curl_post_noproxy():
"""
Given:
- HTTP client log messages of POST query
- Proxy is not used and insecure is not checked
When
- Building curl query
Then
- Ensure curl is generated as expected
"""
ilog = IntegrationLogger()
ilog.build_curl("send: b'POST /api HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"data\": \"value\"}'")
assert ilog.curl == [
'curl -X POST https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"data": "value"}\''
]
def test_build_curl_post_xml():
"""
Given:
- HTTP client log messages of POST query with XML body
- Proxy is not used and insecure is not checked
When
- Building curl query
Then
- Ensure curl is generated as expected
"""
ilog = IntegrationLogger()
ilog.build_curl("send: b'POST /api HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'<?xml version=\"1.0\" encoding=\"utf-8\"?>'")
assert ilog.curl == [
'curl -X POST https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'<?xml version="1.0" encoding="utf-8"?>\''
]
def test_build_curl_get_withproxy(mocker):
"""
Given:
- HTTP client log messages of GET query
- Proxy used and insecure checked
When
- Building curl query
Then
- Ensure curl is generated as expected
"""
mocker.patch.object(demisto, 'params', return_value={
'proxy': True,
'insecure': True
})
os.environ['https_proxy'] = 'http://proxy'
ilog = IntegrationLogger()
ilog.build_curl("send: b'GET /api HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"data\": \"value\"}'")
assert ilog.curl == [
'curl -X GET https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--proxy http://proxy -k -d \'{"data": "value"}\''
]
def test_build_curl_multiple_queries():
"""
Given:
- HTTP client log messages of POST and GET queries
- Proxy is not used and insecure is not checked
When
- Building curl query
Then
- Ensure two curl queries are generated as expected
"""
ilog = IntegrationLogger()
ilog.build_curl("send: b'POST /api/post HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"postdata\": \"value\"}'")
ilog.build_curl("send: b'GET /api/get HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"getdata\": \"value\"}'")
assert ilog.curl == [
'curl -X POST https://demisto.com/api/post -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"postdata": "value"}\'',
'curl -X GET https://demisto.com/api/get -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"getdata": "value"}\''
]
def test_is_mac_address():
from CommonServerPython import is_mac_address
mac_address_false = 'AA:BB:CC:00:11'
mac_address_true = 'AA:BB:CC:00:11:22'
assert (is_mac_address(mac_address_false) is False)
assert (is_mac_address(mac_address_true))
def test_return_error_command(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
outputs = {'output': 'error'}
expected_error = {
'Type': entryTypes['error'],
'ContentsFormat': formats['text'],
'Contents': err_msg,
"EntryContext": outputs
}
# Test command that is not fetch-incidents
mocker.patch.object(demisto, 'command', return_value="test-command")
mocker.patch.object(sys, 'exit')
mocker.spy(demisto, 'results')
return_error(err_msg, '', outputs)
assert str(demisto.results.call_args) == "call({})".format(expected_error)
def test_return_error_fetch_incidents(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
# Test fetch-incidents
mocker.patch.object(demisto, 'command', return_value="fetch-incidents")
returned_error = False
try:
return_error(err_msg)
except Exception as e:
returned_error = True
assert str(e) == err_msg
assert returned_error
def test_return_error_fetch_credentials(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
# Test fetch-credentials
mocker.patch.object(demisto, 'command', return_value="fetch-credentials")
returned_error = False
try:
return_error(err_msg)
except Exception as e:
returned_error = True
assert str(e) == err_msg
assert returned_error
def test_return_error_fetch_indicators(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
# Test fetch-indicators
mocker.patch.object(demisto, 'command', return_value="fetch-indicators")
returned_error = False
try:
return_error(err_msg)
except Exception as e:
returned_error = True
assert str(e) == err_msg
assert returned_error
def test_return_error_long_running_execution(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
# Test long-running-execution
mocker.patch.object(demisto, 'command', return_value="long-running-execution")
returned_error = False
try:
return_error(err_msg)
except Exception as e:
returned_error = True
assert str(e) == err_msg
assert returned_error
def test_return_error_script(mocker, monkeypatch):
from CommonServerPython import return_error
mocker.patch.object(sys, 'exit')
mocker.spy(demisto, 'results')
monkeypatch.delattr(demisto, 'command')
err_msg = "Testing unicode Ё"
outputs = {'output': 'error'}
expected_error = {
'Type': entryTypes['error'],
'ContentsFormat': formats['text'],
'Contents': err_msg,
"EntryContext": outputs
}
assert not hasattr(demisto, 'command')
return_error(err_msg, '', outputs)
assert str(demisto.results.call_args) == "call({})".format(expected_error)
def test_exception_in_return_error(mocker):
from CommonServerPython import return_error, IntegrationLogger
expected = {'EntryContext': None, 'Type': 4, 'ContentsFormat': 'text', 'Contents': 'Message'}
mocker.patch.object(demisto, 'results')
mocker.patch.object(IntegrationLogger, '__call__', return_value='Message')
with raises(SystemExit, match='0'):
return_error("Message", error=ValueError("Error!"))
results = demisto.results.call_args[0][0]
assert expected == results
# IntegrationLogger = LOG (2 times if exception supplied)
assert IntegrationLogger.__call__.call_count == 2
def test_return_error_get_modified_remote_data(mocker):
from CommonServerPython import return_error
mocker.patch.object(demisto, 'command', return_value='get-modified-remote-data')
mocker.patch.object(demisto, 'results')
err_msg = 'Test Error'
with raises(SystemExit):
return_error(err_msg)
assert demisto.results.call_args[0][0]['Contents'] == 'skip update. error: ' + err_msg
def test_return_error_get_modified_remote_data_not_implemented(mocker):
from CommonServerPython import return_error
mocker.patch.object(demisto, 'command', return_value='get-modified-remote-data')
mocker.patch.object(demisto, 'results')
err_msg = 'Test Error'
with raises(SystemExit):
try:
raise NotImplementedError('Command not implemented')
except:
return_error(err_msg)
assert demisto.results.call_args[0][0]['Contents'] == err_msg
def test_indicator_type_by_server_version_under_6_1(mocker, clear_version_cache):
"""
Given
- demisto version mock under 6.2
When
- demisto version mock under 6.2
Then
- Do not remove the STIX indicator type prefix.
"""
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.1.0',
}
)
assert FeedIndicatorType.indicator_type_by_server_version("STIX Attack Pattern") == "STIX Attack Pattern"
def test_indicator_type_by_server_version_6_2(mocker, clear_version_cache):
"""
Given
- demisto version mock set to 6.2
When
- demisto version mock set to 6.2
Then
- Return the STIX indicator type with the STIX prefix
"""
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.2.0',
}
)
assert FeedIndicatorType.indicator_type_by_server_version("STIX Attack Pattern") == "Attack Pattern"
def test_assign_params():
from CommonServerPython import assign_params
res = assign_params(a='1', b=True, c=None, d='')
assert res == {'a': '1', 'b': True}
class TestBuildDBotEntry(object):
def test_build_dbot_entry(self):
from CommonServerPython import build_dbot_entry
res = build_dbot_entry('user@example.com', 'Email', 'Vendor', 1)
assert res == {'DBotScore': {'Indicator': 'user@example.com', 'Type': 'email', 'Vendor': 'Vendor', 'Score': 1}}
def test_build_dbot_entry_no_malicious(self):
from CommonServerPython import build_dbot_entry
res = build_dbot_entry('user@example.com', 'Email', 'Vendor', 3, build_malicious=False)
assert res == {'DBotScore': {'Indicator': 'user@example.com', 'Type': 'email', 'Vendor': 'Vendor', 'Score': 3}}
def test_build_dbot_entry_malicious(self):
from CommonServerPython import build_dbot_entry, outputPaths
res = build_dbot_entry('user@example.com', 'Email', 'Vendor', 3, 'Malicious email')
assert res == {
"DBotScore": {
"Vendor": "Vendor",
"Indicator": "user@example.com",
"Score": 3,
"Type": "email"
},
outputPaths['email']: {
"Malicious": {
"Vendor": "Vendor",
"Description": "Malicious email"
},
"Address": "user@example.com"
}
}
def test_build_malicious_dbot_entry_file(self):
from CommonServerPython import build_malicious_dbot_entry, outputPaths
res = build_malicious_dbot_entry('md5hash', 'MD5', 'Vendor', 'Google DNS')
assert res == {
outputPaths['file']:
{"Malicious": {"Vendor": "Vendor", "Description": "Google DNS"}, "MD5": "md5hash"}}
def test_build_malicious_dbot_entry(self):
from CommonServerPython import build_malicious_dbot_entry, outputPaths
res = build_malicious_dbot_entry('8.8.8.8', 'ip', 'Vendor', 'Google DNS')
assert res == {outputPaths['ip']: {
'Address': '8.8.8.8', 'Malicious': {'Vendor': 'Vendor', 'Description': 'Google DNS'}}}
def test_build_malicious_dbot_entry_wrong_indicator_type(self):
from CommonServerPython import build_malicious_dbot_entry, DemistoException
with raises(DemistoException, match='Wrong indicator type'):
build_malicious_dbot_entry('8.8.8.8', 'notindicator', 'Vendor', 'Google DNS')
def test_illegal_dbot_score(self):
from CommonServerPython import build_dbot_entry, DemistoException
with raises(DemistoException, match='illegal DBot score'):
build_dbot_entry('1', 'ip', 'Vendor', 8)
def test_illegal_indicator_type(self):
from CommonServerPython import build_dbot_entry, DemistoException
with raises(DemistoException, match='illegal indicator type'):
build_dbot_entry('1', 'NOTHING', 'Vendor', 2)
def test_file_indicators(self):
from CommonServerPython import build_dbot_entry, outputPaths
res = build_dbot_entry('md5hash', 'md5', 'Vendor', 3)
assert res == {
"DBotScore": {
"Indicator": "md5hash",
"Type": "file",
"Vendor": "Vendor",
"Score": 3
},
outputPaths['file']: {
"MD5": "md5hash",
"Malicious": {
"Vendor": "Vendor",
"Description": None
}
}
}
class TestCommandResults:
def test_outputs_without_outputs_prefix(self):
"""
Given
- outputs as a list without output_prefix
When
- Returins results
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import CommandResults
with pytest.raises(ValueError, match='outputs_prefix'):
CommandResults(outputs=[])
def test_dbot_score_is_in_to_context_ip(self):
"""
Given
- IP indicator
When
- Creating a reputation
Then
- Validate the DBOT Score and IP output exists in entry context.
"""
from CommonServerPython import Common, DBotScoreType, CommandResults
indicator_id = '1.1.1.1'
raw_response = {'id': indicator_id}
indicator = Common.IP(
indicator_id,
dbot_score=Common.DBotScore(
indicator_id,
DBotScoreType.IP,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
)
entry_context = CommandResults(
indicator=indicator,
readable_output='Indicator!',
outputs={'Indicator': raw_response},
raw_response=raw_response
).to_context()['EntryContext']
assert Common.DBotScore.CONTEXT_PATH in entry_context
assert Common.IP.CONTEXT_PATH in entry_context
def test_dbot_score_is_in_to_context_file(self):
"""
Given
- File indicator
When
- Creating a reputation
Then
- Validate the DBOT Score and File output exists in entry context.
"""
from CommonServerPython import Common, DBotScoreType, CommandResults
indicator_id = '63347f5d946164a23faca26b78a91e1c'
raw_response = {'id': indicator_id}
indicator = Common.File(
md5=indicator_id,
dbot_score=Common.DBotScore(
indicator_id,
DBotScoreType.FILE,
'Indicator',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
)
entry_context = CommandResults(
indicator=indicator,
readable_output='output!',
outputs={'Indicator': raw_response},
raw_response=raw_response
).to_context()['EntryContext']
assert Common.DBotScore.CONTEXT_PATH in entry_context
assert Common.File.CONTEXT_PATH in entry_context
def test_dbot_score_is_in_to_context_domain(self):
"""
Given
- domain indicator
When
- Creating a reputation
Then
- Validate the DBOT Score and File output exists in entry context.
"""
from CommonServerPython import Common, DBotScoreType, CommandResults
indicator_id = 'example.com'
raw_response = {'id': indicator_id}
indicator = Common.Domain(
indicator_id,
dbot_score=Common.DBotScore(
indicator_id,
DBotScoreType.DOMAIN,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
)
entry_context = CommandResults(
indicator=indicator,
readable_output='output!',
outputs={'Indicator': raw_response},
raw_response=raw_response
).to_context()['EntryContext']
assert Common.DBotScore.CONTEXT_PATH in entry_context
assert Common.Domain.CONTEXT_PATH in entry_context
def test_dbot_score_is_in_to_context_url(self):
"""
Given
- domain indicator
When
- Creating a reputation
Then
- Validate the DBOT Score and File output exists in entry context.
"""
from CommonServerPython import Common, DBotScoreType, CommandResults
indicator_id = 'https://example.com'
raw_response = {'id': indicator_id}
indicator = Common.URL(
indicator_id,
dbot_score=Common.DBotScore(
indicator_id,
DBotScoreType.URL,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
)
entry_context = CommandResults(
indicator=indicator,
readable_output='output!',
outputs={'Indicator': raw_response},
raw_response=raw_response
).to_context()['EntryContext']
assert Common.DBotScore.CONTEXT_PATH in entry_context
assert Common.URL.CONTEXT_PATH in entry_context
def test_multiple_outputs_keys(self):
"""
Given
- File has 3 unique keys. sha256, md5 and sha1
When
- creating CommandResults with outputs_key_field=[sha1, sha256, md5]
Then
- entrycontext DT expression contains all 3 unique fields
"""
from CommonServerPython import CommandResults
files = [
{
'sha256': '111',
'sha1': '111',
'md5': '111'
},
{
'sha256': '222',
'sha1': '222',
'md5': '222'
}
]
results = CommandResults(outputs_prefix='File', outputs_key_field=['sha1', 'sha256', 'md5'], outputs=files)
assert list(results.to_context()['EntryContext'].keys())[0] == \
'File(val.sha1 && val.sha1 == obj.sha1 && val.sha256 && val.sha256 == obj.sha256 && val.md5 && val.md5 == obj.md5)'
def test_output_prefix_includes_dt(self):
"""
Given
- Returning File with only outputs_prefix which includes DT in it
- outputs key fields are not provided
When
- creating CommandResults
Then
- EntryContext key should contain only the outputs_prefix
"""
from CommonServerPython import CommandResults
files = [{"key": "value"}] # if outputs is empty list, no results are returned
results = CommandResults(outputs_prefix='File(val.sha1 == obj.sha1 && val.md5 == obj.md5)',
outputs_key_field='', outputs=files)
assert list(results.to_context()['EntryContext'].keys())[0] == \
'File(val.sha1 == obj.sha1 && val.md5 == obj.md5)'
def test_readable_only_context(self):
"""
Given:
- Markdown entry to CommandResults
When:
- Returning results
Then:
- Validate HumanReadable exists
"""
from CommonServerPython import CommandResults
markdown = '## Something'
context = CommandResults(readable_output=markdown).to_context()
assert context.get('HumanReadable') == markdown
def test_empty_outputs(self):
"""
Given:
- Outputs as None
When:
- Returning results
Then:
- Validate EntryContext key value
"""
from CommonServerPython import CommandResults
res = CommandResults(
outputs_prefix='FoundIndicators',
outputs_key_field='value',
outputs=None
)
context = res.to_context()
assert {} == context.get('EntryContext')
def test_empty_list_outputs(self):
"""
Given:
- Outputs with empty list
When:
- Returning results
Then:
- Validate EntryContext key value
"""
from CommonServerPython import CommandResults
res = CommandResults(
outputs_prefix='FoundIndicators',
outputs_key_field='value',
outputs=[]
)
context = res.to_context()
assert {} == context.get('EntryContext')
def test_return_command_results(self, clear_version_cache):
from CommonServerPython import Common, CommandResults, EntryFormat, EntryType, DBotScoreType
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score,
asn='some asn',
hostname='test.com',
geo_country=None,
geo_description=None,
geo_latitude=None,
geo_longitude=None,
positive_engines=None,
detection_engines=None
)
results = CommandResults(
outputs_key_field=None,
outputs_prefix=None,
outputs=None,
indicators=[ip]
)
assert results.to_context() == {
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': None,
'HumanReadable': None,
'EntryContext': {
'IP(val.Address && val.Address == obj.Address)': [
{
'Address': '8.8.8.8',
'ASN': 'some asn',
'Hostname': 'test.com'
}
],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': '8.8.8.8',
'Vendor': 'Test',
'Score': 1,
'Type': 'ip'
}
]
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
}
def test_multiple_indicators(self, clear_version_cache):
from CommonServerPython import Common, CommandResults, EntryFormat, EntryType, DBotScoreType
dbot_score1 = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip1 = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score1,
asn='some asn',
hostname='test.com',
geo_country=None,
geo_description=None,
geo_latitude=None,
geo_longitude=None,
positive_engines=None,
detection_engines=None
)
dbot_score2 = Common.DBotScore(
indicator='5.5.5.5',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip2 = Common.IP(
ip='5.5.5.5',
dbot_score=dbot_score2,
asn='some asn',
hostname='test.com',
geo_country=None,
geo_description=None,
geo_latitude=None,
geo_longitude=None,
positive_engines=None,
detection_engines=None
)
results = CommandResults(
outputs_key_field=None,
outputs_prefix=None,
outputs=None,
indicators=[ip1, ip2]
)
assert results.to_context() == {
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': None,
'HumanReadable': None,
'EntryContext': {
'IP(val.Address && val.Address == obj.Address)': [
{
'Address': '8.8.8.8',
'ASN': 'some asn',
'Hostname': 'test.com'
},
{
'Address': '5.5.5.5',
'ASN': 'some asn',
'Hostname': 'test.com'
}
],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': '8.8.8.8',
'Vendor': 'Test',
'Score': 1,
'Type': 'ip'
},
{
'Indicator': '5.5.5.5',
'Vendor': 'Test',
'Score': 1,
'Type': 'ip'
}
]
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
}
def test_return_list_of_items(self, clear_version_cache):
from CommonServerPython import CommandResults, EntryFormat, EntryType
tickets = [
{
'ticket_id': 1,
'title': 'foo'
},
{
'ticket_id': 2,
'title': 'goo'
}
]
results = CommandResults(
outputs_prefix='Jira.Ticket',
outputs_key_field='ticket_id',
outputs=tickets
)
assert results.to_context() == {
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': tickets,
'HumanReadable': tableToMarkdown('Results', tickets),
'EntryContext': {
'Jira.Ticket(val.ticket_id && val.ticket_id == obj.ticket_id)': tickets
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
}
def test_return_list_of_items_the_old_way(self):
from CommonServerPython import CommandResults, EntryFormat, EntryType
tickets = [
{
'ticket_id': 1,
'title': 'foo'
},
{
'ticket_id': 2,
'title': 'goo'
}
]
results = CommandResults(
outputs_prefix=None,
outputs_key_field=None,
outputs={
'Jira.Ticket(val.ticket_id == obj.ticket_id)': tickets
},
raw_response=tickets
)
assert sorted(results.to_context()) == sorted({
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': tickets,
'HumanReadable': None,
'EntryContext': {
'Jira.Ticket(val.ticket_id == obj.ticket_id)': tickets
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
})
def test_create_dbot_score_with_invalid_score(self):
from CommonServerPython import Common, DBotScoreType
try:
Common.DBotScore(
indicator='8.8.8.8',
integration_name='Virus Total',
score=100,
indicator_type=DBotScoreType.IP
)
assert False
except TypeError:
assert True
def test_create_dbot_score_with_invalid_reliability(self):
"""
Given:
- an invalid reliability value.
When
- creating a DBotScore entry
Then
- an error should be raised
"""
from CommonServerPython import Common, DBotScoreType
try:
Common.DBotScore(
indicator='8.8.8.8',
integration_name='Virus Total',
score=0,
indicator_type=DBotScoreType.IP,
reliability='Not a reliability'
)
assert False
except TypeError:
assert True
def test_create_dbot_score_with_valid_reliability(self):
"""
Given:
- a valid reliability value
When
- creating a DBotScore entry
Then
- the proper entry is created
"""
from CommonServerPython import Common, DBotScoreType, DBotScoreReliability, CommandResults
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
score=Common.DBotScore.GOOD,
indicator_type=DBotScoreType.IP,
reliability=DBotScoreReliability.B,
)
ip = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score,
)
results = CommandResults(
indicator=ip,
)
assert results.to_context()['EntryContext'] == {
'IP(val.Address && val.Address == obj.Address)': [
{
'Address': '8.8.8.8'
}
],
'DBotScore(val.Indicator && val.Indicator == '
'obj.Indicator && val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': '8.8.8.8',
'Type': 'ip',
'Vendor': 'Test',
'Score': 1,
'Reliability': 'B - Usually reliable'
}
]
}
def test_indicator_timeline_with_list_of_indicators(self):
"""
Given:
- a list of an indicator
When
- creating an IndicatorTimeline object
- creating a CommandResults objects using the IndicatorTimeline object
Then
- the IndicatorTimeline receives the appropriate category and message
"""
from CommonServerPython import CommandResults, IndicatorsTimeline
indicators = ['8.8.8.8']
timeline = IndicatorsTimeline(indicators=indicators, category='test', message='message')
results = CommandResults(
outputs_prefix=None,
outputs_key_field=None,
outputs=None,
raw_response=indicators,
indicators_timeline=timeline
)
assert sorted(results.to_context().get('IndicatorTimeline')) == sorted([
{'Value': '8.8.8.8', 'Category': 'test', 'Message': 'message'}
])
def test_indicator_timeline_running_from_an_integration(self, mocker):
"""
Given:
- a list of an indicator
When
- mocking the demisto.params()
- creating an IndicatorTimeline object
- creating a CommandResults objects using the IndicatorTimeline object
Then
- the IndicatorTimeline receives the appropriate category and message
"""
from CommonServerPython import CommandResults, IndicatorsTimeline
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
indicators = ['8.8.8.8']
timeline = IndicatorsTimeline(indicators=indicators)
results = CommandResults(
outputs_prefix=None,
outputs_key_field=None,
outputs=None,
raw_response=indicators,
indicators_timeline=timeline
)
assert sorted(results.to_context().get('IndicatorTimeline')) == sorted([
{'Value': '8.8.8.8', 'Category': 'Integration Update'}
])
def test_single_indicator(self, mocker):
"""
Given:
- a single indicator
When
- mocking the demisto.params()
- creating an Common.IP object
- creating a CommandResults objects using the indicator member
Then
- The CommandResults.to_context() returns single result of standard output IP and DBotScore
"""
from CommonServerPython import CommandResults, Common, DBotScoreType
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score
)
results = CommandResults(
indicator=ip
)
assert results.to_context()['EntryContext'] == {
'IP(val.Address && val.Address == obj.Address)': [
{
'Address': '8.8.8.8'
}
],
'DBotScore(val.Indicator && val.Indicator == '
'obj.Indicator && val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': '8.8.8.8',
'Type': 'ip',
'Vendor': 'Test',
'Score': 1
}
]
}
def test_single_indicator_with_indicators(self, mocker):
"""
Given:
- a single indicator and a list of indicators
When
- mocking the demisto.params()
- creating an Common.IP object
- creating a CommandResults objects using the indicator member AND indicators member
Then
- The CommandResults.__init__() should raise an ValueError with appropriate error
"""
from CommonServerPython import CommandResults, Common, DBotScoreType
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Virus Total',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score
)
with pytest.raises(ValueError) as e:
CommandResults(
indicator=ip,
indicators=[ip]
)
assert e.value.args[0] == 'indicators is DEPRECATED, use only indicator'
def test_indicator_with_no_auto_extract(self):
"""
Given:
- a list of an indicator
- ignore_auto_extract set to True
When
- creating a CommandResults object with an indicator
- using Ignore Auto Extract
Then
- the IgnoreAutoExtract field is set to True
"""
from CommonServerPython import CommandResults
indicators = ['8.8.8.8']
results = CommandResults(
outputs_prefix=None,
outputs_key_field=None,
outputs=None,
raw_response=indicators,
indicators_timeline=None,
ignore_auto_extract=True
)
assert results.to_context().get('IgnoreAutoExtract') is True
def test_entry_as_note(self):
"""
Given:
- mark_as_note set to True
When:
- creating a CommandResults object
Then:
- the Note field is set to True
"""
from CommonServerPython import CommandResults
results = CommandResults(
outputs_prefix='Test',
outputs_key_field='value',
outputs=None,
mark_as_note=True
)
assert results.to_context().get('Note') is True
class TestBaseClient:
from CommonServerPython import BaseClient
text = {"status": "ok"}
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201))
RETRIES_POSITIVE_TEST = [
'get',
'put',
'post'
]
@pytest.mark.skip(reason="Test - too long, only manual")
@pytest.mark.parametrize('method', RETRIES_POSITIVE_TEST)
def test_http_requests_with_retry_sanity(self, method):
"""
Given
- A base client
When
- Making http request call with retries configured to a number higher then 0
Then
- Ensure a successful request return response as expected
"""
url = 'http://httpbin.org/{}'.format(method)
res = self.client._http_request(method,
'',
full_url=url,
retries=1,
status_list_to_retry=[401])
assert res['url'] == url
RETRIES_NEGATIVE_TESTS_INPUT = [
('get', 400), ('get', 401), ('get', 500),
('put', 400), ('put', 401), ('put', 500),
('post', 400), ('post', 401), ('post', 500),
]
@pytest.mark.skip(reason="Test - too long, only manual")
@pytest.mark.parametrize('method, status', RETRIES_NEGATIVE_TESTS_INPUT)
def test_http_requests_with_retry_negative_sanity(self, method, status):
"""
Given
- A base client
When
- Making http request call with retries configured to a number higher then 0
Then
- An unsuccessful request returns a DemistoException regardless the bad status code.
"""
from CommonServerPython import DemistoException
with raises(DemistoException, match='{}'.format(status)):
self.client._http_request(method,
'',
full_url='http://httpbin.org/status/{}'.format(status),
retries=3,
status_list_to_retry=[400, 401, 500])
def test_http_request_json(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
res = self.client._http_request('get', 'event')
assert res == self.text
def test_http_request_json_negative(self, requests_mock):
from CommonServerPython import DemistoException
text = 'notjson'
requests_mock.get('http://example.com/api/v2/event', text=text)
with raises(DemistoException, match="Failed to parse json") as exception:
self.client._http_request('get', 'event')
assert exception.value.res
assert exception.value.res.text == text
def test_http_request_text(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
res = self.client._http_request('get', 'event', resp_type='text')
assert res == json.dumps(self.text)
def test_http_request_content(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', content=str.encode(json.dumps(self.text)))
res = self.client._http_request('get', 'event', resp_type='content')
assert json.loads(res) == self.text
def test_http_request_response(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event')
res = self.client._http_request('get', 'event', resp_type='response')
assert isinstance(res, requests.Response)
def test_http_request_proxy_false(self):
from CommonServerPython import BaseClient
import requests_mock
os.environ['http_proxy'] = 'http://testproxy:8899'
os.environ['https_proxy'] = 'https://testproxy:8899'
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=False, verify=True)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify == '/test1.pem'
assert not m.last_request.proxies
assert m.called is True
def test_http_request_proxy_true(self):
from CommonServerPython import BaseClient
import requests_mock
os.environ['http_proxy'] = 'http://testproxy:8899'
os.environ['https_proxy'] = 'https://testproxy:8899'
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=True, verify=True)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify == '/test1.pem'
assert m.last_request.proxies == {
'http': 'http://testproxy:8899',
'https': 'https://testproxy:8899'
}
assert m.called is True
def test_http_request_proxy_without_http_prefix(self):
"""
Given
- proxy param is set to true
- proxy configs are without http/https prefix
When
- run an http get request
Then
- the request will run and will use proxy configs that will include http:// prefix.
"""
from CommonServerPython import BaseClient
import requests_mock
os.environ['http_proxy'] = 'testproxy:8899'
os.environ['https_proxy'] = 'testproxy:8899'
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=True, verify=True)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify == '/test1.pem'
assert m.last_request.proxies == {
'http': 'http://testproxy:8899',
'https': 'http://testproxy:8899'
}
assert m.called is True
def test_http_request_proxy_empty_proxy(self):
"""
Given
- proxy param is set to true
- proxy configs are empty
When
- run an http get request
Then
- the request will run and will use empty proxy configs and will not add https prefixes
"""
from CommonServerPython import BaseClient
import requests_mock
os.environ['http_proxy'] = ''
os.environ['https_proxy'] = ''
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=True, verify=True)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify == '/test1.pem'
assert m.last_request.proxies == {}
assert m.called is True
def test_http_request_verify_false(self):
from CommonServerPython import BaseClient
import requests_mock
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=True, verify=False)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify is False
assert m.called is True
def test_http_request_not_ok(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', status_code=500)
with raises(DemistoException, match="[500]"):
self.client._http_request('get', 'event')
def test_http_request_not_ok_but_ok(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', status_code=500)
res = self.client._http_request('get', 'event', resp_type='response', ok_codes=(500,))
assert res.status_code == 500
def test_http_request_not_ok_with_json(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', status_code=500, content=str.encode(json.dumps(self.text)))
with raises(DemistoException, match="Error in API call"):
self.client._http_request('get', 'event')
def test_http_request_not_ok_with_json_parsing(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', status_code=500, content=str.encode(json.dumps(self.text)))
with raises(DemistoException) as exception:
self.client._http_request('get', 'event')
message = str(exception.value)
response_json_error = json.loads(message.split('\n')[1])
assert response_json_error == self.text
def test_http_request_timeout(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ConnectTimeout)
with raises(DemistoException, match="Connection Timeout Error"):
self.client._http_request('get', 'event')
def test_http_request_ssl_error(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.SSLError)
with raises(DemistoException, match="SSL Certificate Verification Failed"):
self.client._http_request('get', 'event', resp_type='response')
def test_http_request_ssl_error_insecure(cls, requests_mock):
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.SSLError('test ssl'))
client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
with raises(requests.exceptions.SSLError, match="^test ssl$"):
client._http_request('get', 'event', resp_type='response')
def test_http_request_proxy_error(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ProxyError)
with raises(DemistoException, match="Proxy Error"):
self.client._http_request('get', 'event', resp_type='response')
def test_http_request_connection_error(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ConnectionError)
with raises(DemistoException, match="Verify that the server URL parameter"):
self.client._http_request('get', 'event', resp_type='response')
def test_text_exception_parsing(self, requests_mock):
from CommonServerPython import DemistoException
reason = 'Bad Request'
text = 'additional text'
requests_mock.get('http://example.com/api/v2/event',
status_code=400,
reason=reason,
text=text)
with raises(DemistoException, match='- {}\n{}'.format(reason, text)):
self.client._http_request('get', 'event', resp_type='text')
def test_json_exception_parsing(self, requests_mock):
from CommonServerPython import DemistoException
reason = 'Bad Request'
json_response = {'error': 'additional text'}
requests_mock.get('http://example.com/api/v2/event',
status_code=400,
reason=reason,
json=json_response)
with raises(DemistoException, match='- {}\n.*{}'.format(reason, json_response["error"])):
self.client._http_request('get', 'event', resp_type='text')
def test_exception_response_json_parsing_when_ok_code_is_invalid(self, requests_mock):
from CommonServerPython import DemistoException
json_response = {'error': 'additional text'}
requests_mock.get('http://example.com/api/v2/event',
status_code=400,
json=json_response)
try:
self.client._http_request('get', 'event', ok_codes=(200,))
except DemistoException as e:
resp_json = e.res.json()
assert e.res.status_code == 400
assert resp_json.get('error') == 'additional text'
def test_exception_response_text_parsing_when_ok_code_is_invalid(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event',
status_code=400,
text='{"error": "additional text"}')
try:
self.client._http_request('get', 'event', ok_codes=(200,))
except DemistoException as e:
resp_json = json.loads(e.res.text)
assert e.res.status_code == 400
assert resp_json.get('error') == 'additional text'
def test_is_valid_ok_codes_empty(self):
from requests import Response
from CommonServerPython import BaseClient
new_client = BaseClient('http://example.com/api/v2/')
response = Response()
response.status_code = 200
assert new_client._is_status_code_valid(response, None)
def test_is_valid_ok_codes_from_function(self):
from requests import Response
response = Response()
response.status_code = 200
assert self.client._is_status_code_valid(response, (200, 201))
def test_is_valid_ok_codes_from_self(self):
from requests import Response
response = Response()
response.status_code = 200
assert self.client._is_status_code_valid(response, None)
def test_is_valid_ok_codes_empty_false(self):
from requests import Response
response = Response()
response.status_code = 400
assert not self.client._is_status_code_valid(response, None)
def test_is_valid_ok_codes_from_function_false(self):
from requests import Response
response = Response()
response.status_code = 400
assert not self.client._is_status_code_valid(response, (200, 201))
def test_is_valid_ok_codes_from_self_false(self):
from requests import Response
response = Response()
response.status_code = 400
assert not self.client._is_status_code_valid(response)
def test_parse_date_string():
# test unconverted data remains: Z
assert parse_date_string('2019-09-17T06:16:39Z') == datetime(2019, 9, 17, 6, 16, 39)
# test unconverted data remains: .22Z
assert parse_date_string('2019-09-17T06:16:39.22Z') == datetime(2019, 9, 17, 6, 16, 39, 220000)
# test time data without ms does not match format with ms
assert parse_date_string('2019-09-17T06:16:39Z', '%Y-%m-%dT%H:%M:%S.%f') == datetime(2019, 9, 17, 6, 16, 39)
# test time data with timezone Z does not match format with timezone +05:00
assert parse_date_string('2019-09-17T06:16:39Z', '%Y-%m-%dT%H:%M:%S+05:00') == datetime(2019, 9, 17, 6, 16, 39)
# test time data with timezone +05:00 does not match format with timezone Z
assert parse_date_string('2019-09-17T06:16:39+05:00', '%Y-%m-%dT%H:%M:%SZ') == datetime(2019, 9, 17, 6, 16, 39)
# test time data with timezone -05:00 and with ms does not match format with timezone +02:00 without ms
assert parse_date_string(
'2019-09-17T06:16:39.4040+05:00', '%Y-%m-%dT%H:%M:%S+02:00'
) == datetime(2019, 9, 17, 6, 16, 39, 404000)
def test_override_print(mocker):
mocker.patch.object(demisto, 'info')
int_logger = IntegrationLogger()
int_logger.set_buffering(False)
int_logger.print_override("test", "this")
assert demisto.info.call_count == 1
assert demisto.info.call_args[0][0] == "test this"
demisto.info.reset_mock()
int_logger.print_override("test", "this", file=sys.stderr)
assert demisto.info.call_count == 1
assert demisto.info.call_args[0][0] == "test this"
buf = StringIO()
# test writing to custom file (not stdout/stderr)
int_logger.print_override("test", "this", file=buf)
assert buf.getvalue() == 'test this\n'
def test_http_client_debug(mocker):
if not IS_PY3:
pytest.skip("test not supported in py2")
return
mocker.patch.object(demisto, 'info')
debug_log = DebugLogger()
from http.client import HTTPConnection
HTTPConnection.debuglevel = 1
# not using 'with' because its not compatible with all python versions
con = HTTPConnection("google.com")
con.request('GET', '/')
with con.getresponse() as r:
r.read()
con.close()
assert demisto.info.call_count > 5
assert debug_log is not None
def test_http_client_debug_int_logger_sensitive_query_params(mocker):
if not IS_PY3:
pytest.skip("test not supported in py2")
return
mocker.patch.object(demisto, 'params', return_value={'APIKey': 'dummy'})
mocker.patch.object(demisto, 'info')
debug_log = DebugLogger()
from http.client import HTTPConnection
HTTPConnection.debuglevel = 1
con = HTTPConnection("google.com")
con.request('GET', '?apikey=dummy')
# not using 'with' because its not compatible with all python versions
with con.getresponse() as r:
r.read()
con.close()
assert debug_log
for arg in demisto.info.call_args_list:
assert 'dummy' not in arg[0][0]
if 'apikey' in arg[0][0]:
assert 'apikey=<XX_REPLACED>' in arg[0][0]
class TestParseDateRange:
@staticmethod
def test_utc_time_sanity():
utc_now = datetime.utcnow()
utc_start_time, utc_end_time = parse_date_range('2 days', utc=True)
# testing UTC date time and range of 2 days
assert utc_now.replace(microsecond=0) == utc_end_time.replace(microsecond=0)
assert abs(utc_start_time - utc_end_time).days == 2
@staticmethod
def test_local_time_sanity():
local_now = datetime.now()
local_start_time, local_end_time = parse_date_range('73 minutes', utc=False)
# testing local datetime and range of 73 minutes
assert local_now.replace(microsecond=0) == local_end_time.replace(microsecond=0)
assert abs(local_start_time - local_end_time).seconds / 60 == 73
@staticmethod
def test_with_trailing_spaces():
utc_now = datetime.utcnow()
utc_start_time, utc_end_time = parse_date_range('2 days ', utc=True)
# testing UTC date time and range of 2 days
assert utc_now.replace(microsecond=0) == utc_end_time.replace(microsecond=0)
assert abs(utc_start_time - utc_end_time).days == 2
@staticmethod
def test_case_insensitive():
utc_now = datetime.utcnow()
utc_start_time, utc_end_time = parse_date_range('2 Days', utc=True)
# testing UTC date time and range of 2 days
assert utc_now.replace(microsecond=0) == utc_end_time.replace(microsecond=0)
assert abs(utc_start_time - utc_end_time).days == 2
@staticmethod
def test_error__invalid_input_format(mocker):
mocker.patch.object(sys, 'exit', side_effect=Exception('mock exit'))
demisto_results = mocker.spy(demisto, 'results')
try:
parse_date_range('2 Days ago', utc=True)
except Exception as exp:
assert str(exp) == 'mock exit'
results = demisto.results.call_args[0][0]
assert 'date_range must be "number date_range_unit"' in results['Contents']
@staticmethod
def test_error__invalid_time_value_not_a_number(mocker):
mocker.patch.object(sys, 'exit', side_effect=Exception('mock exit'))
demisto_results = mocker.spy(demisto, 'results')
try:
parse_date_range('ten Days', utc=True)
except Exception as exp:
assert str(exp) == 'mock exit'
results = demisto.results.call_args[0][0]
assert 'The time value is invalid' in results['Contents']
@staticmethod
def test_error__invalid_time_value_not_an_integer(mocker):
mocker.patch.object(sys, 'exit', side_effect=Exception('mock exit'))
demisto_results = mocker.spy(demisto, 'results')
try:
parse_date_range('1.5 Days', utc=True)
except Exception as exp:
assert str(exp) == 'mock exit'
results = demisto.results.call_args[0][0]
assert 'The time value is invalid' in results['Contents']
@staticmethod
def test_error__invalid_time_unit(mocker):
mocker.patch.object(sys, 'exit', side_effect=Exception('mock exit'))
demisto_results = mocker.spy(demisto, 'results')
try:
parse_date_range('2 nights', utc=True)
except Exception as exp:
assert str(exp) == 'mock exit'
results = demisto.results.call_args[0][0]
assert 'The unit of date_range is invalid' in results['Contents']
def test_encode_string_results():
s = "test"
assert s == encode_string_results(s)
s2 = u"בדיקה"
if IS_PY3:
res = str(s2)
else:
res = s2.encode("utf8")
assert encode_string_results(s2) == res
not_string = [1, 2, 3]
assert not_string == encode_string_results(not_string)
class TestReturnOutputs:
def test_return_outputs(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
raw_response = {'event': 1}
return_outputs(md, outputs, raw_response)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
def test_return_outputs_only_md(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
return_outputs(md)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert md == results['HumanReadable']
assert 'text' == results['ContentsFormat']
def test_return_outputs_raw_none(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
return_outputs(md, outputs, None)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert outputs == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
def test_return_outputs_timeline(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
raw_response = {'event': 1}
timeline = [{'Value': 'blah', 'Message': 'test', 'Category': 'test'}]
return_outputs(md, outputs, raw_response, timeline)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
assert timeline == results['IndicatorTimeline']
def test_return_outputs_timeline_without_category(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
raw_response = {'event': 1}
timeline = [{'Value': 'blah', 'Message': 'test'}]
return_outputs(md, outputs, raw_response, timeline)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
assert 'Category' in results['IndicatorTimeline'][0].keys()
assert results['IndicatorTimeline'][0]['Category'] == 'Integration Update'
def test_return_outputs_ignore_auto_extract(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
raw_response = {'event': 1}
ignore_auto_extract = True
return_outputs(md, outputs, raw_response, ignore_auto_extract=ignore_auto_extract)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
assert ignore_auto_extract == results['IgnoreAutoExtract']
def test_return_outputs_text_raw_response(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
raw_response = 'string'
return_outputs(md, raw_response=raw_response)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'text' == results['ContentsFormat']
def test_argToBoolean():
assert argToBoolean('true') is True
assert argToBoolean('yes') is True
assert argToBoolean('TrUe') is True
assert argToBoolean(True) is True
assert argToBoolean('false') is False
assert argToBoolean('no') is False
assert argToBoolean(False) is False
batch_params = [
# full batch case
([1, 2, 3], 1, [[1], [2], [3]]),
# empty case
([], 1, []),
# out of index case
([1, 2, 3], 5, [[1, 2, 3]]),
# out of index in end with batches
([1, 2, 3, 4, 5], 2, [[1, 2], [3, 4], [5]]),
([1] * 100, 2, [[1, 1]] * 50)
]
@pytest.mark.parametrize('iterable, sz, expected', batch_params)
def test_batch(iterable, sz, expected):
for i, item in enumerate(batch(iterable, sz)):
assert expected[i] == item
regexes_test = [
(ipv4Regex, '192.168.1.1', True),
(ipv4Regex, '192.168.1.1/24', False),
(ipv4Regex, '192.168.a.1', False),
(ipv4Regex, '192.168..1.1', False),
(ipv4Regex, '192.256.1.1', False),
(ipv4Regex, '192.256.1.1.1', False),
(ipv4cidrRegex, '192.168.1.1/32', True),
(ipv4cidrRegex, '192.168.1.1.1/30', False),
(ipv4cidrRegex, '192.168.1.b/30', False),
(ipv4cidrRegex, '192.168.1.12/381', False),
(ipv6Regex, '2001:db8:a0b:12f0::1', True),
(ipv6Regex, '2001:db8:a0b:12f0::1/11', False),
(ipv6Regex, '2001:db8:a0b:12f0::1::1', False),
(ipv6Regex, '2001:db8:a0b:12f0::98aa5', False),
(ipv6cidrRegex, '2001:db8:a0b:12f0::1/64', True),
(ipv6cidrRegex, '2001:db8:a0b:12f0::1/256', False),
(ipv6cidrRegex, '2001:db8:a0b:12f0::1::1/25', False),
(ipv6cidrRegex, '2001:db8:a0b:12f0::1aaasds::1/1', False)
]
@pytest.mark.parametrize('pattern, string, expected', regexes_test)
def test_regexes(pattern, string, expected):
# (str, str, bool) -> None
# emulates re.fullmatch from py3.4
assert expected is bool(re.match("(?:" + pattern + r")\Z", string))
IP_TO_INDICATOR_TYPE_PACK = [
('192.168.1.1', FeedIndicatorType.IP),
('192.168.1.1/32', FeedIndicatorType.CIDR),
('2001:db8:a0b:12f0::1', FeedIndicatorType.IPv6),
('2001:db8:a0b:12f0::1/64', FeedIndicatorType.IPv6CIDR),
]
@pytest.mark.parametrize('ip, indicator_type', IP_TO_INDICATOR_TYPE_PACK)
def test_ip_to_indicator(ip, indicator_type):
assert FeedIndicatorType.ip_to_indicator_type(ip) is indicator_type
data_test_b64_encode = [
(u'test', 'dGVzdA=='),
('test', 'dGVzdA=='),
(b'test', 'dGVzdA=='),
('', ''),
('%', 'JQ=='),
(u'§', 'wqc='),
(u'§t`e§s`t§', 'wqd0YGXCp3NgdMKn'),
]
@pytest.mark.parametrize('_input, expected_output', data_test_b64_encode)
def test_b64_encode(_input, expected_output):
output = b64_encode(_input)
assert output == expected_output, 'b64_encode({}) returns: {} instead: {}'.format(_input, output, expected_output)
def test_traceback_in_return_error_debug_mode_on(mocker):
mocker.patch.object(demisto, 'command', return_value="test-command")
mocker.spy(demisto, 'results')
mocker.patch('CommonServerPython.is_debug_mode', return_value=True)
from CommonServerPython import return_error
try:
raise Exception("This is a test string")
except Exception:
with pytest.raises(SystemExit):
return_error("some text")
assert "This is a test string" in str(demisto.results.call_args)
assert "Traceback" in str(demisto.results.call_args)
assert "some text" in str(demisto.results.call_args)
def test_traceback_in_return_error_debug_mode_off(mocker):
mocker.patch.object(demisto, 'command', return_value="test-command")
mocker.spy(demisto, 'results')
mocker.patch('CommonServerPython.is_debug_mode', return_value=False)
from CommonServerPython import return_error
try:
raise Exception("This is a test string")
except Exception:
with pytest.raises(SystemExit):
return_error("some text")
assert "This is a test string" not in str(demisto.results.call_args)
assert "Traceback" not in str(demisto.results.call_args)
assert "some text" in str(demisto.results.call_args)
# append_context unit test
CONTEXT_MOCK = {
'str_key': 'str_value',
'dict_key': {
'key1': 'val1',
'key2': 'val2'
},
'int_key': 1,
'list_key_str': ['val1', 'val2'],
'list_key_list': ['val1', 'val2'],
'list_key_dict': ['val1', 'val2']
}
UPDATED_CONTEXT = {
'str_key': 'str_data,str_value',
'dict_key': {
'key1': 'val1',
'key2': 'val2',
'data_key': 'data_val'
},
'int_key': [1, 2],
'list_key_str': ['val1', 'val2', 'str_data'],
'list_key_list': ['val1', 'val2', 'val1', 'val2'],
'list_key_dict': ['val1', 'val2', {'data_key': 'data_val'}]
}
DATA_MOCK_STRING = "str_data"
DATA_MOCK_LIST = ['val1', 'val2']
DATA_MOCK_DICT = {
'data_key': 'data_val'
}
DATA_MOCK_INT = 2
STR_KEY = "str_key"
DICT_KEY = "dict_key"
APPEND_CONTEXT_INPUT = [
(CONTEXT_MOCK, DATA_MOCK_STRING, STR_KEY, "key = {}, val = {}".format(STR_KEY, UPDATED_CONTEXT[STR_KEY])),
(CONTEXT_MOCK, DATA_MOCK_LIST, STR_KEY, "TypeError"),
(CONTEXT_MOCK, DATA_MOCK_DICT, STR_KEY, "TypeError"),
(CONTEXT_MOCK, DATA_MOCK_STRING, DICT_KEY, "TypeError"),
(CONTEXT_MOCK, DATA_MOCK_LIST, DICT_KEY, "TypeError"),
(CONTEXT_MOCK, DATA_MOCK_DICT, DICT_KEY, "key = {}, val = {}".format(DICT_KEY, UPDATED_CONTEXT[DICT_KEY])),
(CONTEXT_MOCK, DATA_MOCK_STRING, 'list_key_str',
"key = {}, val = {}".format('list_key_str', UPDATED_CONTEXT['list_key_str'])),
(CONTEXT_MOCK, DATA_MOCK_LIST, 'list_key_list',
"key = {}, val = {}".format('list_key_list', UPDATED_CONTEXT['list_key_list'])),
(CONTEXT_MOCK, DATA_MOCK_DICT, 'list_key_dict',
"key = {}, val = {}".format('list_key_dict', UPDATED_CONTEXT['list_key_dict'])),
(CONTEXT_MOCK, DATA_MOCK_INT, 'int_key', "key = {}, val = {}".format('int_key', UPDATED_CONTEXT['int_key'])),
]
def get_set_context(key, val):
from CommonServerPython import return_error
return_error("key = {}, val = {}".format(key, val))
@pytest.mark.parametrize('context_mock, data_mock, key, expected_answer', APPEND_CONTEXT_INPUT)
def test_append_context(mocker, context_mock, data_mock, key, expected_answer):
from CommonServerPython import demisto
mocker.patch.object(demisto, 'get', return_value=context_mock.get(key))
mocker.patch.object(demisto, 'setContext', side_effect=get_set_context)
mocker.patch.object(demisto, 'results')
if "TypeError" not in expected_answer:
with raises(SystemExit, match='0'):
appendContext(key, data_mock)
assert expected_answer in demisto.results.call_args[0][0]['Contents']
else:
with raises(TypeError) as e:
appendContext(key, data_mock)
assert expected_answer in e.value
INDICATOR_VALUE_AND_TYPE = [
('3fec1b14cea32bbcd97fad4507b06888', "File"),
('1c8893f75089a27ca6a8d49801d7aa6b64ea0c6167fe8b1becfe9bc13f47bdc1', 'File'),
('castaneda-thornton.com', 'Domain'),
('192.0.0.1', 'IP'),
('test@gmail.com', 'Email'),
('e775eb1250137c0b83d4e7c4549c71d6f10cae4e708ebf0b5c4613cbd1e91087', 'File'),
('test@yahoo.com', 'Email'),
('http://test.com', 'URL'),
('11.111.11.11/11', 'CIDR'),
('CVE-0000-0000', 'CVE'),
('dbot@demisto.works', 'Email'),
('37b6d02m-63e0-495e-kk92-7c21511adc7a@SB2APC01FT091.outlook.com', 'Email'),
('dummy@recipient.com', 'Email'),
('image003.gif@01CF4D7F.1DF62650', 'Email'),
('bruce.wayne@pharmtech.zz', 'Email'),
('joe@gmail.com', 'Email'),
('koko@demisto.com', 'Email'),
('42a5e275559a1651b3df8e15d3f5912499f0f2d3d1523959c56fc5aea6371e59', 'File'),
('10676cf66244cfa91567fbc1a937f4cb19438338b35b69d4bcc2cf0d3a44af5e', 'File'),
('52483514f07eb14570142f6927b77deb7b4da99f', 'File'),
('c8092abd8d581750c0530fa1fc8d8318', 'File'),
('fe80:0000:0000:0000:91ba:7558:26d3:acde', 'IPv6'),
('fd60:e22:f1b9::2', 'IPv6'),
('2001:db8:0000:0000:0000:0000:0000:0000', 'IPv6'),
('112.126.94.107', 'IP'),
('a', None),
('*castaneda-thornton.com', 'DomainGlob'),
(
'53e6baa124f54462786f1122e98e38ff1be3de82fe2a96b1849a8637043fd847eec7e0f53307bddf7a066565292d500c36c941f1f3bb9dcac807b2f4a0bfce1b',
'File')
]
@pytest.mark.parametrize('indicator_value, indicatory_type', INDICATOR_VALUE_AND_TYPE)
def test_auto_detect_indicator_type(indicator_value, indicatory_type):
"""
Given
- Indicator value
- Indicator type
When
- Trying to detect the type of an indicator.
Then
- Run the auto_detect_indicator_type and validate that the indicator type the function returns is as expected.
"""
if sys.version_info.major == 3 and sys.version_info.minor == 8:
assert auto_detect_indicator_type(indicator_value) == indicatory_type
else:
try:
auto_detect_indicator_type(indicator_value)
except Exception as e:
assert str(e) == "Missing tldextract module, In order to use the auto detect function please" \
" use a docker image with it installed such as: demisto/jmespath"
def test_auto_detect_indicator_type_tldextract(mocker):
"""
Given
tldextract version is lower than 3.0.0
When
Trying to detect the type of an indicator.
Then
Run the auto_detect_indicator_type and validate that tldextract using `cache_file` arg and not `cache_dir`
"""
if sys.version_info.major == 3 and sys.version_info.minor >= 8:
import tldextract as tlde
tlde.__version__ = '2.2.7'
mocker.patch.object(tlde, 'TLDExtract')
auto_detect_indicator_type('8')
res = tlde.TLDExtract.call_args
assert 'cache_file' in res[1].keys()
def test_handle_proxy(mocker):
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
handle_proxy()
assert os.getenv('REQUESTS_CA_BUNDLE') is None
os.environ['REQUESTS_CA_BUNDLE'] = '/test2.pem'
mocker.patch.object(demisto, 'params', return_value={})
handle_proxy()
assert os.environ['REQUESTS_CA_BUNDLE'] == '/test2.pem' # make sure no change
mocker.patch.object(demisto, 'params', return_value={'unsecure': True})
handle_proxy()
assert os.getenv('REQUESTS_CA_BUNDLE') is None
def test_handle_proxy_without_http_prefix():
"""
Given
proxy is configured in environment vars without http/https prefixes
When
run handle_proxy()
Then
the function will return proxies with http:// prefix
"""
os.environ['HTTP_PROXY'] = 'testproxy:8899'
os.environ['HTTPS_PROXY'] = 'testproxy:8899'
proxies = handle_proxy(checkbox_default_value=True)
assert proxies['http'] == 'http://testproxy:8899'
assert proxies['https'] == 'http://testproxy:8899'
def test_handle_proxy_with_http_prefix():
"""
Given
proxy is configured in environment vars with http/https prefixes
When
run handle_proxy()
Then
the function will return proxies unchanged
"""
os.environ['HTTP_PROXY'] = 'http://testproxy:8899'
os.environ['HTTPS_PROXY'] = 'https://testproxy:8899'
proxies = handle_proxy(checkbox_default_value=True)
assert proxies['http'] == 'http://testproxy:8899'
assert proxies['https'] == 'https://testproxy:8899'
def test_handle_proxy_with_socks5_prefix():
"""
Given
proxy is configured in environment vars with socks5 (socks proxy) prefixes
When
run handle_proxy()
Then
the function will return proxies unchanged
"""
os.environ['HTTP_PROXY'] = 'socks5://testproxy:8899'
os.environ['HTTPS_PROXY'] = 'socks5://testproxy:8899'
proxies = handle_proxy(checkbox_default_value=True)
assert proxies['http'] == 'socks5://testproxy:8899'
assert proxies['https'] == 'socks5://testproxy:8899'
@pytest.mark.parametrize(argnames="dict_obj, keys, expected, default_return_value",
argvalues=[
({'a': '1'}, ['a'], '1', None),
({'a': {'b': '2'}}, ['a', 'b'], '2', None),
({'a': {'b': '2'}}, ['a', 'c'], 'test', 'test'),
])
def test_safe_get(dict_obj, keys, expected, default_return_value):
from CommonServerPython import dict_safe_get
assert expected == dict_safe_get(dict_object=dict_obj,
keys=keys,
default_return_value=default_return_value)
MIRRORS = '''
[{
"channel_id":"GKQ86DVPH",
"channel_name": "incident-681",
"channel_topic": "incident-681",
"investigation_id":"681",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
},
{
"channel_id":"GKB19PA3V",
"channel_name": "group2",
"channel_topic": "cooltopic",
"investigation_id":"684",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
},
{
"channel_id":"GKB19PA3V",
"channel_name": "group2",
"channel_topic": "cooltopic",
"investigation_id":"692",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
},
{
"channel_id":"GKNEJU4P9",
"channel_name": "group3",
"channel_topic": "incident-713",
"investigation_id":"713",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
},
{
"channel_id":"GL8GHC0LV",
"channel_name": "group5",
"channel_topic": "incident-734",
"investigation_id":"734",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
}]
'''
CONVERSATIONS = '''[{
"id": "C012AB3CD",
"name": "general",
"is_channel": true,
"is_group": false,
"is_im": false,
"created": 1449252889,
"creator": "U012A3CDE",
"is_archived": false,
"is_general": true,
"unlinked": 0,
"name_normalized": "general",
"is_shared": false,
"is_ext_shared": false,
"is_org_shared": false,
"pending_shared": [],
"is_pending_ext_shared": false,
"is_member": true,
"is_private": false,
"is_mpim": false,
"topic": {
"value": "Company-wide announcements and work-based matters",
"creator": "",
"last_set": 0
},
"purpose": {
"value": "This channel is for team-wide communication and announcements. All team members are in this channel.",
"creator": "",
"last_set": 0
},
"previous_names": [],
"num_members": 4
},
{
"id": "C061EG9T2",
"name": "random",
"is_channel": true,
"is_group": false,
"is_im": false,
"created": 1449252889,
"creator": "U061F7AUR",
"is_archived": false,
"is_general": false,
"unlinked": 0,
"name_normalized": "random",
"is_shared": false,
"is_ext_shared": false,
"is_org_shared": false,
"pending_shared": [],
"is_pending_ext_shared": false,
"is_member": true,
"is_private": false,
"is_mpim": false,
"topic": {
"value": "Non-work banter and water cooler conversation",
"creator": "",
"last_set": 0
},
"purpose": {
"value": "A place for non-work-related flimflam.",
"creator": "",
"last_set": 0
},
"previous_names": [],
"num_members": 4
}]'''
OBJECTS_TO_KEYS = {
'mirrors': 'investigation_id',
'questions': 'entitlement',
'users': 'id'
}
def set_integration_context_versioned(integration_context, version=-1, sync=False):
global INTEGRATION_CONTEXT_VERSIONED
try:
if not INTEGRATION_CONTEXT_VERSIONED:
INTEGRATION_CONTEXT_VERSIONED = {'context': '{}', 'version': 0}
except NameError:
INTEGRATION_CONTEXT_VERSIONED = {'context': '{}', 'version': 0}
current_version = INTEGRATION_CONTEXT_VERSIONED['version']
if version != -1 and version <= current_version:
raise ValueError('DB Insert version {} does not match version {}'.format(current_version, version))
INTEGRATION_CONTEXT_VERSIONED = {'context': integration_context, 'version': current_version + 1}
def get_integration_context_versioned(refresh=False):
return INTEGRATION_CONTEXT_VERSIONED
def test_merge_lists():
from CommonServerPython import merge_lists
# Set
original = [{'id': '1', 'updated': 'n'}, {'id': '2', 'updated': 'n'}, {'id': '11', 'updated': 'n'}]
updated = [{'id': '1', 'updated': 'y'}, {'id': '3', 'updated': 'y'}, {'id': '11', 'updated': 'n', 'remove': True}]
expected = [{'id': '1', 'updated': 'y'}, {'id': '2', 'updated': 'n'}, {'id': '3', 'updated': 'y'}]
# Arrange
result = merge_lists(original, updated, 'id')
# Assert
assert len(result) == len(expected)
for obj in result:
assert obj in expected
@pytest.mark.parametrize('version, expected',
[
({'version': '5.5.0'}, False),
({'version': '6.0.0'}, True),
]
)
def test_is_versioned_context_available(mocker, version, expected):
from CommonServerPython import is_versioned_context_available
# Set
mocker.patch.object(demisto, 'demistoVersion', return_value=version)
# Arrange
result = is_versioned_context_available()
get_demisto_version._version = None
# Assert
assert expected == result
def test_update_context_merge(mocker):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
mocker.patch.object(CommonServerPython, 'is_versioned_context_available', return_value=True)
new_mirror = {
'channel_id': 'new_group',
'channel_name': 'incident-999',
'channel_topic': 'incident-999',
'investigation_id': '999',
'mirror_type': 'all',
'mirror_direction': 'both',
'mirror_to': 'group',
'auto_close': True,
'mirrored': False
}
mirrors = json.loads(MIRRORS)
mirrors.extend([new_mirror])
# Arrange
context, version = CommonServerPython.update_integration_context({'mirrors': [new_mirror]}, OBJECTS_TO_KEYS, True)
new_mirrors = json.loads(context['mirrors'])
# Assert
assert len(mirrors) == len(new_mirrors)
for mirror in mirrors:
assert mirror in new_mirrors
assert version == get_integration_context_versioned()['version']
def test_update_context_no_merge(mocker):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
mocker.patch.object(CommonServerPython, 'is_versioned_context_available', return_value=True)
new_conversation = {
'id': 'A0123456',
'name': 'general'
}
conversations = json.loads(CONVERSATIONS)
conversations.extend([new_conversation])
# Arrange
context, version = CommonServerPython.update_integration_context({'conversations': conversations}, OBJECTS_TO_KEYS,
True)
new_conversations = json.loads(context['conversations'])
# Assert
assert conversations == new_conversations
assert version == get_integration_context_versioned()['version']
@pytest.mark.parametrize('versioned_available', [True, False])
def test_get_latest_integration_context(mocker, versioned_available):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
mocker.patch.object(CommonServerPython, 'is_versioned_context_available', return_value=versioned_available)
mocker.patch.object(demisto, 'getIntegrationContext',
return_value={'mirrors': MIRRORS, 'conversations': CONVERSATIONS})
# Arrange
context, ver = CommonServerPython.get_integration_context_with_version(True)
# Assert
assert context == get_integration_context_versioned()['context']
assert ver == get_integration_context_versioned()['version'] if versioned_available else -1
def test_set_latest_integration_context(mocker):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS,
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
int_context = get_integration_context_versioned()
mocker.patch.object(CommonServerPython, 'update_integration_context',
side_effect=[(int_context['context'], int_context['version']),
(int_context['context'], int_context['version'] + 1)])
mocker.patch.object(CommonServerPython, 'set_integration_context', side_effect=[ValueError, int_context['context']])
# Arrange
CommonServerPython.set_to_integration_context_with_retries({}, OBJECTS_TO_KEYS)
int_context_calls = CommonServerPython.set_integration_context.call_count
int_context_args_1 = CommonServerPython.set_integration_context.call_args_list[0][0]
int_context_args_2 = CommonServerPython.set_integration_context.call_args_list[1][0]
# Assert
assert int_context_calls == 2
assert int_context_args_1 == (int_context['context'], True, int_context['version'])
assert int_context_args_2 == (int_context['context'], True, int_context['version'] + 1)
def test_set_latest_integration_context_es(mocker):
import CommonServerPython
# Set
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
es_inv_context_version_first = {'version': 5, 'sequenceNumber': 807, 'primaryTerm': 1}
es_inv_context_version_second = {'version': 7, 'sequenceNumber': 831, 'primaryTerm': 1}
mocker.patch.object(CommonServerPython, 'update_integration_context',
side_effect=[({}, es_inv_context_version_first),
({}, es_inv_context_version_second)])
mocker.patch.object(CommonServerPython, 'set_integration_context', side_effect=[ValueError, {}])
# Arrange
CommonServerPython.set_to_integration_context_with_retries({})
int_context_calls = CommonServerPython.set_integration_context.call_count
int_context_args_1 = CommonServerPython.set_integration_context.call_args_list[0][0]
int_context_args_2 = CommonServerPython.set_integration_context.call_args_list[1][0]
# Assert
assert int_context_calls == 2
assert int_context_args_1[1:] == (True, es_inv_context_version_first)
assert int_context_args_2[1:] == (True, es_inv_context_version_second)
def test_set_latest_integration_context_fail(mocker):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS,
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
int_context = get_integration_context_versioned()
mocker.patch.object(CommonServerPython, 'update_integration_context', return_value=(
int_context['context'], int_context['version']
))
mocker.patch.object(CommonServerPython, 'set_integration_context', side_effect=ValueError)
# Arrange
with pytest.raises(Exception):
CommonServerPython.set_to_integration_context_with_retries({}, OBJECTS_TO_KEYS)
int_context_calls = CommonServerPython.set_integration_context.call_count
# Assert
assert int_context_calls == CommonServerPython.CONTEXT_UPDATE_RETRY_TIMES
def test_get_x_content_info_headers(mocker):
test_license = 'TEST_LICENSE_ID'
test_brand = 'TEST_BRAND'
mocker.patch.object(
demisto,
'getLicenseID',
return_value=test_license
)
mocker.patch.object(
demisto,
'callingContext',
new_callable=mocker.PropertyMock(return_value={'context': {
'IntegrationBrand': test_brand,
'IntegrationInstance': 'TEST_INSTANCE',
}})
)
headers = get_x_content_info_headers()
assert headers['X-Content-LicenseID'] == test_license
assert headers['X-Content-Name'] == test_brand
def test_return_results_multiple_command_results(mocker):
"""
Given:
- List of 2 CommandResult
When:
- Calling return_results()
Then:
- demisto.results() is called 2 times (with the list items)
"""
from CommonServerPython import CommandResults, return_results
demisto_results_mock = mocker.patch.object(demisto, 'results')
mock_command_results = []
for i in range(2):
mock_output = {'MockContext': i}
mock_command_results.append(CommandResults(outputs_prefix='Mock', outputs=mock_output))
return_results(mock_command_results)
assert demisto_results_mock.call_count == 2
def test_return_results_multiple_dict_results(mocker):
"""
Given:
- List of 2 dictionaries
When:
- Calling return_results()
Then:
- demisto.results() is called 1 time (with the list as an argument)
"""
from CommonServerPython import return_results
demisto_results_mock = mocker.patch.object(demisto, 'results')
mock_command_results = [{'MockContext': 0}, {'MockContext': 1}]
return_results(mock_command_results)
args, _ = demisto_results_mock.call_args_list[0]
assert demisto_results_mock.call_count == 1
assert [{'MockContext': 0}, {'MockContext': 1}] in args
def test_return_results_mixed_results(mocker):
"""
Given:
- List containing a CommandResult object and two dictionaries (representing a demisto result entries)
When:
- Calling return_results()
Then:
- Assert that demisto.results() is called 2 times .
- Assert that the first call was with the CommandResult object.
- Assert that the second call was with the two demisto results dicts.
"""
from CommonServerPython import CommandResults, return_results
demisto_results_mock = mocker.patch.object(demisto, 'results')
mock_command_results_object = CommandResults(outputs_prefix='Mock', outputs={'MockContext': 0})
mock_demisto_results_entry = [{'MockContext': 1}, {'MockContext': 2}]
return_results([mock_command_results_object] + mock_demisto_results_entry)
assert demisto_results_mock.call_count == 2
assert demisto_results_mock.call_args_list[0][0][0] == mock_command_results_object.to_context()
assert demisto_results_mock.call_args_list[1][0][0] == mock_demisto_results_entry
class TestExecuteCommand:
@staticmethod
def test_sanity(mocker):
"""
Given:
- A successful command with a single entry as output.
When:
- Calling execute_command.
Then:
- Assert that only the Contents value is returned.
"""
from CommonServerPython import execute_command, EntryType
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=[{'Type': EntryType.NOTE,
'Contents': {'hello': 'world'}}])
res = execute_command('command', {'arg1': 'value'})
execute_command_args = demisto_execute_mock.call_args_list[0][0]
assert demisto_execute_mock.call_count == 1
assert execute_command_args[0] == 'command'
assert execute_command_args[1] == {'arg1': 'value'}
assert res == {'hello': 'world'}
@staticmethod
def test_multiple_results(mocker):
"""
Given:
- A successful command with several entries as output.
When:
- Calling execute_command.
Then:
- Assert that the "Contents" values of all entries are returned.
"""
from CommonServerPython import execute_command, EntryType
entries = [
{'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}},
{'Type': EntryType.NOTE, 'Context': 'no contents here'},
{'Type': EntryType.NOTE, 'Contents': {'entry': '2'}},
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=entries)
res = execute_command('command', {'arg1': 'value'})
assert demisto_execute_mock.call_count == 1
assert isinstance(res, list)
assert len(res) == 3
assert res[0] == {'hello': 'world'}
assert res[1] == {}
assert res[2] == {'entry': '2'}
@staticmethod
def test_raw_results(mocker):
"""
Given:
- A successful command with several entries as output.
When:
- Calling execute_command.
Then:
- Assert that the entire entries are returned.
"""
from CommonServerPython import execute_command, EntryType
entries = [
{'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}},
{'Type': EntryType.NOTE, 'Context': 'no contents here'},
'text',
1337,
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=entries)
res = execute_command('command', {'arg1': 'value'}, extract_contents=False)
assert demisto_execute_mock.call_count == 1
assert isinstance(res, list)
assert len(res) == 4
assert res[0] == {'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}}
assert res[1] == {'Type': EntryType.NOTE, 'Context': 'no contents here'}
assert res[2] == 'text'
assert res[3] == 1337
@staticmethod
def test_failure(mocker):
"""
Given:
- A command that fails.
When:
- Calling execute_command.
Then:
- Assert that the original error is returned to War-Room (using demisto.results).
- Assert an error is returned to the War-Room.
- Function ends the run using SystemExit.
"""
from CommonServerPython import execute_command, EntryType
error_entries = [
{'Type': EntryType.ERROR, 'Contents': 'error number 1'},
{'Type': EntryType.NOTE, 'Contents': 'not an error'},
{'Type': EntryType.ERROR, 'Contents': 'error number 2'},
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=error_entries)
demisto_results_mock = mocker.patch.object(demisto, 'results')
with raises(SystemExit, match='0'):
execute_command('bad', {'arg1': 'value'})
assert demisto_execute_mock.call_count == 1
assert demisto_results_mock.call_count == 1
# first call, args (not kwargs), first argument
error_text = demisto_results_mock.call_args_list[0][0][0]['Contents']
assert 'Failed to execute bad.' in error_text
assert 'error number 1' in error_text
assert 'error number 2' in error_text
assert 'not an error' not in error_text
@staticmethod
def test_failure_integration(monkeypatch):
from CommonServerPython import execute_command, EntryType
monkeypatch.delattr(demisto, 'executeCommand')
with raises(DemistoException, match=r'Cannot run demisto.executeCommand\(\) from integrations.'):
execute_command('bad', {'arg1': 'value'})
@staticmethod
def test_multiple_results_fail_on_error_false(mocker):
"""
Given:
- A successful command with several entries as output.
- fail_on_error set to False.
When:
- Calling execute_command.
Then:
- Assert that the status of the execution is True for successful run.
- Assert that the "Contents" values of all entries are returned.
"""
from CommonServerPython import execute_command, EntryType
entries = [
{'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}},
{'Type': EntryType.NOTE, 'Context': 'no contents here'},
{'Type': EntryType.NOTE, 'Contents': {'entry': '2'}},
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=entries)
status, res = execute_command('command', {'arg1': 'value'}, fail_on_error=False)
assert demisto_execute_mock.call_count == 1
assert isinstance(res, list)
assert len(res) == 3
assert status
assert res[0] == {'hello': 'world'}
assert res[1] == {}
assert res[2] == {'entry': '2'}
@staticmethod
def test_raw_results_fail_on_error_false(mocker):
"""
Given:
- A successful command with several entries as output.
- fail_on_error set to False.
When:
- Calling execute_command.
Then:
- Assert that the status of the execution is True for successful run.
- Assert that the entire entries are returned.
"""
from CommonServerPython import execute_command, EntryType
entries = [
{'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}},
{'Type': EntryType.NOTE, 'Context': 'no contents here'},
'text',
1337,
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=entries)
status, res = execute_command('command', {'arg1': 'value'}, extract_contents=False, fail_on_error=False)
assert demisto_execute_mock.call_count == 1
assert isinstance(res, list)
assert len(res) == 4
assert status
assert res[0] == {'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}}
assert res[1] == {'Type': EntryType.NOTE, 'Context': 'no contents here'}
assert res[2] == 'text'
assert res[3] == 1337
@staticmethod
def test_failure_fail_on_error_false(mocker):
"""
Given:
- A command that fails.
- fail_on_error set to False.
When:
- Calling execute_command.
Then:
- Assert that the status of the execution is False for failed run.
- Assert that the original errors are returned as a value, and not to the war-room.
"""
from CommonServerPython import execute_command, EntryType
error_entries = [
{'Type': EntryType.ERROR, 'Contents': 'error number 1'},
{'Type': EntryType.NOTE, 'Contents': 'not an error'},
{'Type': EntryType.ERROR, 'Contents': 'error number 2'},
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=error_entries)
demisto_results_mock = mocker.patch.object(demisto, 'results')
status, error_text = execute_command('bad', {'arg1': 'value'}, fail_on_error=False)
assert demisto_execute_mock.call_count == 1
assert demisto_results_mock.call_count == 0
assert not status
assert 'error number 1' in error_text
assert 'error number 2' in error_text
assert 'not an error' not in error_text
def test_arg_to_int__valid_numbers():
"""
Given
valid numbers
When
converting them to int
Then
ensure proper int returned
"""
from CommonServerPython import arg_to_number
result = arg_to_number(
arg='5',
arg_name='foo')
assert result == 5
result = arg_to_number(
arg='2.0',
arg_name='foo')
assert result == 2
result = arg_to_number(
arg=3,
arg_name='foo')
assert result == 3
result = arg_to_number(
arg=4,
arg_name='foo',
required=True)
assert result == 4
result = arg_to_number(
arg=5,
required=True)
assert result == 5
def test_arg_to_int__invalid_numbers():
"""
Given
invalid numbers
When
converting them to int
Then
raise ValueError
"""
from CommonServerPython import arg_to_number
try:
arg_to_number(
arg='aa',
arg_name='foo')
assert False
except ValueError as e:
assert 'Invalid number' in str(e)
def test_arg_to_int_required():
"""
Given
argument foo which with value None
When
converting the arg to number via required flag as True
Then
ensure ValueError raised
"""
from CommonServerPython import arg_to_number
# required set to false
result = arg_to_number(
arg=None,
arg_name='foo',
required=False)
assert result is None
try:
arg_to_number(
arg=None,
arg_name='foo',
required=True)
assert False
except ValueError as e:
assert 'Missing' in str(e)
try:
arg_to_number(
arg='',
arg_name='foo',
required=True)
assert False
except ValueError as e:
assert 'Missing' in str(e)
try:
arg_to_number(arg='goo')
assert False
except ValueError as e:
assert '"goo" is not a valid number' in str(e)
def test_arg_to_timestamp_valid_inputs():
"""
Given
valid dates provided
When
converting dates into timestamp
Then
ensure returned int which represents timestamp in milliseconds
"""
if sys.version_info.major == 2:
# skip for python 2 - date
assert True
return
from CommonServerPython import arg_to_datetime
from datetime import datetime, timezone
# hard coded date
result = arg_to_datetime(
arg='2020-11-10T21:43:43Z',
arg_name='foo'
)
assert result == datetime(2020, 11, 10, 21, 43, 43, tzinfo=timezone.utc)
# relative dates also work
result = arg_to_datetime(
arg='2 hours ago',
arg_name='foo'
)
assert result > datetime(2020, 11, 10, 21, 43, 43)
# relative dates also work
result = arg_to_datetime(
arg=1581982463,
arg_name='foo'
)
assert int(result.timestamp()) == 1581982463
result = arg_to_datetime(
arg='2 hours ago'
)
assert result > datetime(2020, 11, 10, 21, 43, 43)
def test_arg_to_timestamp_invalid_inputs():
"""
Given
invalid date like 'aaaa' or '2010-32-01'
When
when converting date to timestamp
Then
ensure ValueError is raised
"""
from CommonServerPython import arg_to_datetime
if sys.version_info.major == 2:
# skip for python 2 - date
assert True
return
try:
arg_to_datetime(
arg=None,
arg_name='foo',
required=True)
assert False
except ValueError as e:
assert 'Missing' in str(e)
try:
arg_to_datetime(
arg='aaaa',
arg_name='foo')
assert False
except ValueError as e:
assert 'Invalid date' in str(e)
try:
arg_to_datetime(
arg='2010-32-01',
arg_name='foo')
assert False
except ValueError as e:
assert 'Invalid date' in str(e)
try:
arg_to_datetime(
arg='2010-32-01')
assert False
except ValueError as e:
assert '"2010-32-01" is not a valid date' in str(e)
def test_warnings_handler(mocker):
mocker.patch.object(demisto, 'info')
# need to initialize WarningsHandler as pytest over-rides the handler
with pytest.warns(RuntimeWarning) as r:
warnings.warn("without handler", RuntimeWarning)
handler = WarningsHandler() # noqa
warnings.warn("This is a test", RuntimeWarning)
assert len(r) == 1
assert str(r[0].message) == "without handler"
# call_args is tuple (args list, kwargs). we only need the args
msg = demisto.info.call_args[0][0]
assert 'This is a test' in msg
assert 'python warning' in msg
def test_get_schedule_metadata():
"""
Given
- case 1: no parent entry
- case 2: parent entry with schedule metadata
- case 3: parent entry without schedule metadata
When
querying the schedule metadata
Then
ensure scheduled_metadata is returned correctly
- case 1: no data (empty dict)
- case 2: schedule metadata with all details
- case 3: empty schedule metadata (dict with polling: false)
"""
from CommonServerPython import get_schedule_metadata
# case 1
context = {'ParentEntry': None}
actual_scheduled_metadata = get_schedule_metadata(context=context)
assert actual_scheduled_metadata == {}
# case 2
parent_entry = {
'polling': True,
'pollingCommand': 'foo',
'pollingArgs': {'name': 'foo'},
'timesRan': 5,
'startDate': '2021-04-28T14:20:56.03728+03:00',
'endingDate': '2021-04-28T14:25:35.976244+03:00'
}
context = {
'ParentEntry': parent_entry
}
actual_scheduled_metadata = get_schedule_metadata(context=context)
assert actual_scheduled_metadata.get('is_polling') is True
assert actual_scheduled_metadata.get('polling_command') == parent_entry.get('pollingCommand')
assert actual_scheduled_metadata.get('polling_args') == parent_entry.get('pollingArgs')
assert actual_scheduled_metadata.get('times_ran') == (parent_entry.get('timesRan') + 1)
assert actual_scheduled_metadata.get('startDate') == parent_entry.get('start_date')
assert actual_scheduled_metadata.get('startDate') == parent_entry.get('start_date')
# case 3
parent_entry = {
'polling': False
}
context = {
'ParentEntry': parent_entry
}
actual_scheduled_metadata = get_schedule_metadata(context=context)
assert actual_scheduled_metadata == {'is_polling': False, 'times_ran': 1}
class TestCommonTypes:
def test_create_domain(self):
from CommonServerPython import CommandResults, Common, EntryType, EntryFormat, DBotScoreType
dbot_score = Common.DBotScore(
indicator='somedomain.com',
integration_name='Test',
indicator_type=DBotScoreType.DOMAIN,
score=Common.DBotScore.GOOD
)
domain = Common.Domain(
domain='somedomain.com',
dbot_score=dbot_score,
dns='dns.somedomain',
detection_engines=10,
positive_detections=5,
organization='Some Organization',
admin_phone='18000000',
admin_email='admin@test.com',
registrant_name='Mr Registrant',
registrar_name='Mr Registrar',
registrar_abuse_email='registrar@test.com',
creation_date='2019-01-01T00:00:00',
updated_date='2019-01-02T00:00:00',
expiration_date=None,
domain_status='ACTIVE',
name_servers=[
'PNS31.CLOUDNS.NET',
'PNS32.CLOUDNS.NET'
],
sub_domains=[
'sub-domain1.somedomain.com',
'sub-domain2.somedomain.com',
'sub-domain3.somedomain.com'
],
tags=['tag1', 'tag2'],
malware_family=['malware_family1', 'malware_family2'],
feed_related_indicators=[Common.FeedRelatedIndicators(
value='8.8.8.8',
indicator_type="IP",
description='test'
)],
domain_idn_name='domain_idn_name',
port='port',
internal="False",
category='category',
campaign='campaign',
traffic_light_protocol='traffic_light_protocol',
threat_types=[Common.ThreatTypes(threat_category='threat_category',
threat_category_confidence='threat_category_confidence')],
community_notes=[Common.CommunityNotes(note='note', timestamp='2019-01-01T00:00:00')],
publications=[Common.Publications(title='title', source='source', timestamp='2019-01-01T00:00:00',
link='link')],
geo_location='geo_location',
geo_country='geo_country',
geo_description='geo_description',
tech_country='tech_country',
tech_name='tech_name',
tech_organization='tech_organization',
tech_email='tech_email',
billing='billing'
)
results = CommandResults(
outputs_key_field=None,
outputs_prefix=None,
outputs=None,
indicators=[domain]
)
assert results.to_context() == {
'Type': 1,
'ContentsFormat': 'json',
'Contents': None,
'HumanReadable': None,
'EntryContext': {
'Domain(val.Name && val.Name == obj.Name)': [
{
"Name": "somedomain.com",
"DNS": "dns.somedomain",
"DetectionEngines": 10,
"PositiveDetections": 5,
"Registrar": {
"Name": "Mr Registrar",
"AbuseEmail": "registrar@test.com",
"AbusePhone": None
},
"Registrant": {
"Name": "Mr Registrant",
"Email": None,
"Phone": None,
"Country": None
},
"Admin": {
"Name": None,
"Email": "admin@test.com",
"Phone": "18000000",
"Country": None
},
"Organization": "Some Organization",
"Subdomains": [
"sub-domain1.somedomain.com",
"sub-domain2.somedomain.com",
"sub-domain3.somedomain.com"
],
"DomainStatus": "ACTIVE",
"CreationDate": "2019-01-01T00:00:00",
"UpdatedDate": "2019-01-02T00:00:00",
"NameServers": [
"PNS31.CLOUDNS.NET",
"PNS32.CLOUDNS.NET"
],
"Tags": ["tag1", "tag2"],
"FeedRelatedIndicators": [{"value": "8.8.8.8", "type": "IP", "description": "test"}],
"MalwareFamily": ["malware_family1", "malware_family2"],
"DomainIDNName": "domain_idn_name",
"Port": "port",
"Internal": "False",
"Category": "category",
"Campaign": "campaign",
"TrafficLightProtocol": "traffic_light_protocol",
"ThreatTypes": [{
"threatcategory": "threat_category",
"threatcategoryconfidence": "threat_category_confidence"
}],
"CommunityNotes": [{
"note": "note",
"timestamp": "2019-01-01T00:00:00"
}],
"Publications": [{
"source": "source",
"title": "title",
"link": "link",
"timestamp": "2019-01-01T00:00:00"
}],
"Geo": {
"Location": "geo_location",
"Country": "geo_country",
"Description": "geo_description"
},
"Tech": {
"Country": "tech_country",
"Name": "tech_name",
"Organization": "tech_organization",
"Email": "tech_email"
},
"Billing": "billing",
"WHOIS": {
"Registrar": {
"Name": "Mr Registrar",
"AbuseEmail": "registrar@test.com",
"AbusePhone": None
},
"Registrant": {
"Name": "Mr Registrant",
"Email": None,
"Phone": None,
"Country": None
},
"Admin": {
"Name": None,
"Email": "admin@test.com",
"Phone": "18000000",
"Country": None
},
"DomainStatus": "ACTIVE",
"CreationDate": "2019-01-01T00:00:00",
"UpdatedDate": "2019-01-02T00:00:00",
"NameServers": [
"PNS31.CLOUDNS.NET",
"PNS32.CLOUDNS.NET"
]
}
}
],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': 'somedomain.com',
'Type': 'domain',
'Vendor': 'Test',
'Score': 1
}
]
},
'IndicatorTimeline': [],
'IgnoreAutoExtract': False,
'Note': False,
'Relationships': []
}
def test_create_certificate(self):
"""
Given:
- an X509 Certificate with its properties
When
- creating a CommandResults with the Certificate Standard Context
Then
- the proper output Context is created
"""
from CommonServerPython import CommandResults, Common, EntryType, EntryFormat, DBotScoreType
dbot_score = Common.DBotScore(
indicator='bc33cf76519f1ec5ae7f287f321df33a7afd4fd553f364cf3c753f91ba689f8d',
integration_name='Test',
indicator_type=DBotScoreType.CERTIFICATE,
score=Common.DBotScore.NONE
)
cert_extensions = [
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.AUTHORITYKEYIDENTIFIER,
authority_key_identifier=Common.CertificateExtension.AuthorityKeyIdentifier(
key_identifier="0f80611c823161d52f28e78d4638b42ce1c6d9e2"
),
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.SUBJECTKEYIDENTIFIER,
digest="b34972bb12121b8851cd5564ff9656dcbca3f288",
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.SUBJECTALTERNATIVENAME,
subject_alternative_names=[
Common.GeneralName(
gn_type="dNSName",
gn_value="*.paloaltonetworks.com"
),
Common.GeneralName(
gn_type="dNSName",
gn_value="paloaltonetworks.com"
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.KEYUSAGE,
digital_signature=True,
key_encipherment=True,
critical=True
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.EXTENDEDKEYUSAGE,
usages=[
"serverAuth",
"clientAuth"
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.CRLDISTRIBUTIONPOINTS,
distribution_points=[
Common.CertificateExtension.DistributionPoint(
full_name=[
Common.GeneralName(
gn_type="uniformResourceIdentifier",
gn_value="http://crl3.digicert.com/ssca-sha2-g7.crl"
)
]
),
Common.CertificateExtension.DistributionPoint(
full_name=[
Common.GeneralName(
gn_type="uniformResourceIdentifier",
gn_value="http://crl4.digicert.com/ssca-sha2-g7.crl"
)
]
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.CERTIFICATEPOLICIES,
certificate_policies=[
Common.CertificateExtension.CertificatePolicy(
policy_identifier="2.16.840.1.114412.1.1",
policy_qualifiers=["https://www.digicert.com/CPS"]
),
Common.CertificateExtension.CertificatePolicy(
policy_identifier="2.23.140.1.2.2"
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.AUTHORITYINFORMATIONACCESS,
authority_information_access=[
Common.CertificateExtension.AuthorityInformationAccess(
access_method="OCSP",
access_location=Common.GeneralName(
gn_type="uniformResourceIdentifier",
gn_value="http://ocsp.digicert.com"
)
),
Common.CertificateExtension.AuthorityInformationAccess(
access_method="caIssuers",
access_location=Common.GeneralName(
gn_type="uniformResourceIdentifier",
gn_value="http://cacerts.digicert.com/DigiCertSHA2SecureServerCA.crt"
)
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.BASICCONSTRAINTS,
basic_constraints=Common.CertificateExtension.BasicConstraints(
ca=False
),
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.PRESIGNEDCERTIFICATETIMESTAMPS,
signed_certificate_timestamps=[
Common.CertificateExtension.SignedCertificateTimestamp(
version=0,
log_id="f65c942fd1773022145418083094568ee34d131933bfdf0c2f200bcc4ef164e3",
timestamp="2020-10-23T19:31:49.000Z",
entry_type="PreCertificate"
),
Common.CertificateExtension.SignedCertificateTimestamp(
version=0,
log_id="5cdc4392fee6ab4544b15e9ad456e61037fbd5fa47dca17394b25ee6f6c70eca",
timestamp="2020-10-23T19:31:49.000Z",
entry_type="PreCertificate"
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.SIGNEDCERTIFICATETIMESTAMPS,
signed_certificate_timestamps=[
Common.CertificateExtension.SignedCertificateTimestamp(
version=0,
log_id="f65c942fd1773022145418083094568ee34d131933bfdf0c2f200bcc4ef164e3",
timestamp="2020-10-23T19:31:49.000Z",
entry_type="X509Certificate"
),
Common.CertificateExtension.SignedCertificateTimestamp(
version=0,
log_id="5cdc4392fee6ab4544b15e9ad456e61037fbd5fa47dca17394b25ee6f6c70eca",
timestamp="2020-10-23T19:31:49.000Z",
entry_type="X509Certificate"
)
],
critical=False
)
]
certificate = Common.Certificate(
subject_dn='CN=*.paloaltonetworks.com,O=Palo Alto Networks\\, Inc.,L=Santa Clara,ST=California,C=US',
dbot_score=dbot_score,
serial_number='19290688218337824112020565039390569720',
issuer_dn='CN=DigiCert SHA2 Secure Server CA,O=DigiCert Inc,C=US',
validity_not_before='2020-10-23T00:00:00.000Z',
validity_not_after='2021-11-21T23:59:59.000Z',
sha256='bc33cf76519f1ec5ae7f287f321df33a7afd4fd553f364cf3c753f91ba689f8d',
sha1='2392ea5cd4c2a61e51547570634ef887ab1942e9',
md5='22769ae413997b86da4a0934072d9ed0',
publickey=Common.CertificatePublicKey(
algorithm=Common.CertificatePublicKey.Algorithm.RSA,
length=2048,
modulus='00:00:00:00',
exponent=65537
),
spki_sha256='94b716aeda21cd661949cfbf3f55457a277da712cdce0ab31989a4f288fad9b9',
signature_algorithm='sha256',
signature='SIGNATURE',
extensions=cert_extensions
)
results = CommandResults(
outputs_key_field=None,
outputs_prefix=None,
outputs=None,
indicators=[certificate]
)
CONTEXT_PATH = "Certificate(val.MD5 && val.MD5 == obj.MD5 || val.SHA1 && val.SHA1 == obj.SHA1 || " \
"val.SHA256 && val.SHA256 == obj.SHA256 || val.SHA512 && val.SHA512 == obj.SHA512)"
assert results.to_context() == {
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': None,
'HumanReadable': None,
'EntryContext': {
CONTEXT_PATH: [{
"SubjectDN": "CN=*.paloaltonetworks.com,O=Palo Alto Networks\\, Inc.,L=Santa Clara,ST=California,C=US",
"SubjectAlternativeName": [
{
"Type": "dNSName",
"Value": "*.paloaltonetworks.com"
},
{
"Type": "dNSName",
"Value": "paloaltonetworks.com"
}
],
"Name": [
"*.paloaltonetworks.com",
"paloaltonetworks.com"
],
"IssuerDN": "CN=DigiCert SHA2 Secure Server CA,O=DigiCert Inc,C=US",
"SerialNumber": "19290688218337824112020565039390569720",
"ValidityNotBefore": "2020-10-23T00:00:00.000Z",
"ValidityNotAfter": "2021-11-21T23:59:59.000Z",
"SHA256": "bc33cf76519f1ec5ae7f287f321df33a7afd4fd553f364cf3c753f91ba689f8d",
"SHA1": "2392ea5cd4c2a61e51547570634ef887ab1942e9",
"MD5": "22769ae413997b86da4a0934072d9ed0",
"PublicKey": {
"Algorithm": "RSA",
"Length": 2048,
"Modulus": "00:00:00:00",
"Exponent": 65537
},
"SPKISHA256": "94b716aeda21cd661949cfbf3f55457a277da712cdce0ab31989a4f288fad9b9",
"Signature": {
"Algorithm": "sha256",
"Signature": "SIGNATURE"
},
"Extension": [
{
"OID": "2.5.29.35",
"Name": "authorityKeyIdentifier",
"Critical": False,
"Value": {
"KeyIdentifier": "0f80611c823161d52f28e78d4638b42ce1c6d9e2"
}
},
{
"OID": "2.5.29.14",
"Name": "subjectKeyIdentifier",
"Critical": False,
"Value": {
"Digest": "b34972bb12121b8851cd5564ff9656dcbca3f288"
}
},
{
"OID": "2.5.29.17",
"Name": "subjectAltName",
"Critical": False,
"Value": [
{
"Type": "dNSName",
"Value": "*.paloaltonetworks.com"
},
{
"Type": "dNSName",
"Value": "paloaltonetworks.com"
}
]
},
{
"OID": "2.5.29.15",
"Name": "keyUsage",
"Critical": True,
"Value": {
"DigitalSignature": True,
"KeyEncipherment": True
}
},
{
"OID": "2.5.29.37",
"Name": "extendedKeyUsage",
"Critical": False,
"Value": {
"Usages": [
"serverAuth",
"clientAuth"
]
}
},
{
"OID": "2.5.29.31",
"Name": "cRLDistributionPoints",
"Critical": False,
"Value": [
{
"FullName": [
{
"Type": "uniformResourceIdentifier",
"Value": "http://crl3.digicert.com/ssca-sha2-g7.crl"
}
]
},
{
"FullName": [
{
"Type": "uniformResourceIdentifier",
"Value": "http://crl4.digicert.com/ssca-sha2-g7.crl"
}
]
}
]
},
{
"OID": "2.5.29.32",
"Name": "certificatePolicies",
"Critical": False,
"Value": [
{
"PolicyIdentifier": "2.16.840.1.114412.1.1",
"PolicyQualifiers": [
"https://www.digicert.com/CPS"
]
},
{
"PolicyIdentifier": "2.23.140.1.2.2"
}
]
},
{
"OID": "1.3.6.1.5.5.7.1.1",
"Name": "authorityInfoAccess",
"Critical": False,
"Value": [
{
"AccessMethod": "OCSP",
"AccessLocation": {
"Type": "uniformResourceIdentifier",
"Value": "http://ocsp.digicert.com"
}
},
{
"AccessMethod": "caIssuers",
"AccessLocation": {
"Type": "uniformResourceIdentifier",
"Value": "http://cacerts.digicert.com/DigiCertSHA2SecureServerCA.crt"
}
}
]
},
{
"OID": "2.5.29.19",
"Name": "basicConstraints",
"Critical": False,
"Value": {
"CA": False
}
},
{
"OID": "1.3.6.1.4.1.11129.2.4.2",
"Name": "signedCertificateTimestampList",
"Critical": False,
"Value": [
{
"Version": 0,
"LogId": "f65c942fd1773022145418083094568ee34d131933bfdf0c2f200bcc4ef164e3",
"Timestamp": "2020-10-23T19:31:49.000Z",
"EntryType": "PreCertificate"
},
{
"Version": 0,
"LogId": "5cdc4392fee6ab4544b15e9ad456e61037fbd5fa47dca17394b25ee6f6c70eca",
"Timestamp": "2020-10-23T19:31:49.000Z",
"EntryType": "PreCertificate"
}
]
},
{
"OID": "1.3.6.1.4.1.11129.2.4.5",
"Name": "signedCertificateTimestampList",
"Critical": False,
"Value": [
{
"Version": 0,
"LogId": "f65c942fd1773022145418083094568ee34d131933bfdf0c2f200bcc4ef164e3",
"Timestamp": "2020-10-23T19:31:49.000Z",
"EntryType": "X509Certificate"
},
{
"Version": 0,
"LogId": "5cdc4392fee6ab4544b15e9ad456e61037fbd5fa47dca17394b25ee6f6c70eca",
"Timestamp": "2020-10-23T19:31:49.000Z",
"EntryType": "X509Certificate"
}
]
}
]
}],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)': [{
"Indicator": "bc33cf76519f1ec5ae7f287f321df33a7afd4fd553f364cf3c753f91ba689f8d",
"Type": "certificate",
"Vendor": "Test",
"Score": 0
}]
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
}
def test_email_indicator_type(self, mocker):
"""
Given:
- a single email indicator entry
When
- creating an Common.EMAIL object
Then
- The context created matches the data entry
"""
from CommonServerPython import Common, DBotScoreType
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
dbot_score = Common.DBotScore(
indicator='user@example.com',
integration_name='Test',
indicator_type=DBotScoreType.EMAIL,
score=Common.DBotScore.GOOD
)
dbot_context = {'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)':
{'Indicator': 'user@example.com', 'Type': 'email', 'Vendor': 'Test', 'Score': 1}}
assert dbot_context == dbot_score.to_context()
email_context = Common.EMAIL(
domain='example.com',
address='user@example.com',
dbot_score=dbot_score
)
assert email_context.to_context()[email_context.CONTEXT_PATH] == {'Address': 'user@example.com', 'Domain': 'example.com'}
class TestIndicatorsSearcher:
def mock_search_after_output(self, fromDate='', toDate='', query='', size=0, value='', page=0, searchAfter='',
populateFields=None):
if not searchAfter:
searchAfter = 0
iocs = [{'value': 'mock{}'.format(searchAfter)}]
if searchAfter < 6:
searchAfter += 1
else:
# mock the end of indicators
searchAfter = None
if page >= 17:
# checking a unique case when trying to reach a certain page and not all the indicators
iocs = []
searchAfter = None
return {'searchAfter': searchAfter, 'iocs': iocs, 'total': 7}
def test_search_indicators_by_page(self, mocker):
"""
Given:
- Searching indicators couple of times
- Server version in less than 6.1.0
When:
- Mocking search indicators using paging
Then:
- The page number is rising
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators_obj_paging = IndicatorsSearcher()
search_indicators_obj_paging._can_use_search_after = False
for n in range(5):
search_indicators_obj_paging.search_indicators_by_version()
assert search_indicators_obj_paging._page == 5
def test_search_indicators_by_search_after(self, mocker):
"""
Given:
- Searching indicators couple of times
- Server version in equal or higher than 6.1.0
When:
- Mocking search indicators using the searchAfter parameter
Then:
- The search after param is rising
- The page param is rising
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators_obj_search_after = IndicatorsSearcher()
search_indicators_obj_search_after._can_use_search_after = True
try:
for n in range(5):
search_indicators_obj_search_after.search_indicators_by_version()
except Exception as e:
print(e)
assert search_indicators_obj_search_after._search_after_param == 5
assert search_indicators_obj_search_after._page == 5
def test_search_all_indicators_by_search_after(self, mocker):
"""
Given:
- Searching indicators couple of times
- Server version in equal or higher than 6.1.0
When:
- Mocking search indicators using the searchAfter parameter until there are no more indicators
so search_after is None
Then:
- The search after param is None
- The page param is rising
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators_obj_search_after = IndicatorsSearcher()
search_indicators_obj_search_after._can_use_search_after = True
for n in range(7):
search_indicators_obj_search_after.search_indicators_by_version()
assert search_indicators_obj_search_after._search_after_param is None
assert search_indicators_obj_search_after._page == 7
def test_search_indicators_in_certain_page(self, mocker):
"""
Given:
- Searching indicators in a specific page that is not 0
- Server version in equal or higher than 6.1.0
When:
- Mocking search indicators in this specific page
so search_after is None
Then:
- The search after param is not None
- The page param is 17
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators_obj_search_after = IndicatorsSearcher(page=17)
search_indicators_obj_search_after._can_use_search_after = True
search_indicators_obj_search_after.search_indicators_by_version()
assert search_indicators_obj_search_after._search_after_param is None
assert search_indicators_obj_search_after._page == 17
def test_iterator(self, mocker):
"""
Given:
- Searching indicators from page 10
- Total available indicators == 7
When:
- Searching indicators using iterator (whether search_after is supported or not)
- Searching indicators a 2nd time using the same search object
Then:
- Get 7 indicators
- Advance page to 17
- _is_search_done returns True when search_after is supported
- _is_search_done returns False when search_after is not supported
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators = IndicatorsSearcher(page=10)
search_indicators._can_use_search_after = True
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 7
assert search_indicators.page == 17
assert search_indicators._is_search_done() is True
search_indicators._can_use_search_after = False
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 7
assert search_indicators.page == 17
assert search_indicators._is_search_done() is True
def test_iterator__empty_page(self, mocker):
"""
Given:
- Searching indicators from page 18
- Total available indicators from page 10-16 == 7
- No available indicators from page 17
When:
- Searching indicators using iterator (search_after is not supported)
Then:
- Get 0 indicators
- page doesn't advance (set to 18)
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators = IndicatorsSearcher(page=18)
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 0
assert search_indicators.page == 18
def test_iterator__limit(self, mocker):
"""
Given:
- Searching indicators from page 10
- Total available indicators == 7
- Limit is set to 5
When:
- Searching indicators using iterator (whether search_after is supported or not)
- Searching indicators a 2nd time using the same search object
Then:
- Get 5 indicators
- Advance page to 15 when search_after is supported (is_search_done is supported)
- Advance page to 15 when search_after is not supported (is_search done is not supported)
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators = IndicatorsSearcher(page=10, limit=5)
search_indicators._can_use_search_after = True
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 5
assert search_indicators.page == 15
search_indicators._can_use_search_after = False
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 5
assert search_indicators.page == 15
class TestAutoFocusKeyRetriever:
def test_instantiate_class_with_param_key(self, mocker, clear_version_cache):
"""
Given:
- giving the api_key parameter
When:
- Mocking getAutoFocusApiKey
- Mocking server version to be 6.2.0
Then:
- The Auto Focus API Key is the one given to the class
"""
from CommonServerPython import AutoFocusKeyRetriever
mocker.patch.object(demisto, 'getAutoFocusApiKey', return_value='test')
mocker.patch.object(demisto, 'demistoVersion', return_value={'version': '6.2.0', 'buildNumber': '62000'})
auto_focus_key_retriever = AutoFocusKeyRetriever(api_key='1234')
assert auto_focus_key_retriever.key == '1234'
def test_instantiate_class_pre_6_2_failed(self, mocker, clear_version_cache):
"""
Given:
- not giving the api_key parameter
When:
- Mocking getAutoFocusApiKey
- Mocking server version to be 6.1.0
Then:
- Validate an exception with appropriate error message is raised.
"""
from CommonServerPython import AutoFocusKeyRetriever
mocker.patch.object(demisto, 'getAutoFocusApiKey', return_value='test')
mocker.patch.object(demisto, 'demistoVersion', return_value={'version': '6.1.0', 'buildNumber': '61000'})
with raises(DemistoException, match='For versions earlier than 6.2.0, configure an API Key.'):
AutoFocusKeyRetriever(api_key='')
def test_instantiate_class_without_param_key(self, mocker, clear_version_cache):
"""
Given:
- not giving the api_key parameter
When:
- Mocking getAutoFocusApiKey
- Mocking server version to be 6.2.0
Then:
- The Auto Focus API Key is the one given by the getAutoFocusApiKey method
"""
from CommonServerPython import AutoFocusKeyRetriever
mocker.patch.object(demisto, 'getAutoFocusApiKey', return_value='test')
mocker.patch.object(demisto, 'demistoVersion', return_value={'version': '6.2.0', 'buildNumber': '62000'})
auto_focus_key_retriever = AutoFocusKeyRetriever(api_key='')
assert auto_focus_key_retriever.key == 'test'
class TestEntityRelationship:
"""Global vars for all of the tests"""
name = 'related-to'
reverse_name = 'related-to'
relationship_type = 'IndicatorToIndicator'
entity_a = 'test1'
entity_a_family = 'Indicator'
entity_a_type = 'Domain'
entity_b = 'test2'
entity_b_family = 'Indicator'
entity_b_type = 'Domain'
source_reliability = 'F - Reliability cannot be judged'
def test_entity_relations_context(self):
"""
Given
- an EntityRelationship object.
When
- running to_context function of the object
Then
- Validate that the expected context is created
"""
from CommonServerPython import EntityRelationship
relationship = EntityRelationship(name='related-to',
relationship_type='IndicatorToIndicator',
entity_a='test1',
entity_a_family='Indicator',
entity_a_type='Domain',
entity_b='test2',
entity_b_family='Indicator',
entity_b_type='Domain',
source_reliability='F - Reliability cannot be judged',
brand='test')
expected_context = {
"Relationship": 'related-to',
"EntityA": 'test1',
"EntityAType": 'Domain',
"EntityB": 'test2',
"EntityBType": 'Domain',
}
assert relationship.to_context() == expected_context
def test_entity_relations_to_entry(self):
"""
Given
- an EntityRelationship object.
When
- running to_entry function of the object
Then
- Validate that the expected context is created
"""
from CommonServerPython import EntityRelationship
relationship = EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type,
source_reliability=TestEntityRelationship.source_reliability
)
expected_entry = {
"name": TestEntityRelationship.name,
"reverseName": TestEntityRelationship.reverse_name,
"type": TestEntityRelationship.relationship_type,
"entityA": TestEntityRelationship.entity_a,
"entityAFamily": TestEntityRelationship.entity_a_family,
"entityAType": TestEntityRelationship.entity_a_type,
"entityB": TestEntityRelationship.entity_b,
"entityBFamily": TestEntityRelationship.entity_b_family,
"entityBType": TestEntityRelationship.entity_b_type,
"fields": {},
"reliability": TestEntityRelationship.source_reliability
}
assert relationship.to_entry() == expected_entry
def test_entity_relations_to_indicator(self):
"""
Given
- an EntityRelationship object.
When
- running to_indicator function of the object
Then
- Validate that the expected context is created
"""
from CommonServerPython import EntityRelationship
relationship = EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type,
)
expected_to_indicator = {
"name": TestEntityRelationship.name,
"reverseName": TestEntityRelationship.reverse_name,
"type": TestEntityRelationship.relationship_type,
"entityA": TestEntityRelationship.entity_a,
"entityAFamily": TestEntityRelationship.entity_a_family,
"entityAType": TestEntityRelationship.entity_a_type,
"entityB": TestEntityRelationship.entity_b,
"entityBFamily": TestEntityRelationship.entity_b_family,
"entityBType": TestEntityRelationship.entity_b_type,
"fields": {},
}
assert relationship.to_indicator() == expected_to_indicator
def test_invalid_name_init(self):
"""
Given
- an EntityRelation object which has a invalid relation name.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name='ilegal',
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid relationship: ilegal" in str(exception)
def test_invalid_relation_type_init(self):
"""
Given
- an EntityRelation object which has a invalid relation type.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type='TestRelationshipType',
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid relationship type: TestRelationshipType" in str(exception)
def test_invalid_a_family_init(self):
"""
Given
- an EntityRelation object which has a invalid family type of the source.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family='IndicatorIlegal',
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid entity A Family type: IndicatorIlegal" in str(exception)
def test_invalid_a_type_init(self):
"""
Given
- an EntityRelation object which has a invalid type of the source.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type='DomainTest',
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid entity A type: DomainTest" in str(exception)
def test_invalid_b_family_init(self):
"""
Given
- an EntityRelation object which has a invalid family type of the destination.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family='IndicatorIlegal',
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid entity B Family type: IndicatorIlegal" in str(exception)
def test_invalid_b_type_init(self):
"""
Given
- an EntityRelation object which has a invalid type of the destination.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type='DomainTest'
)
except ValueError as exception:
assert "Invalid entity B type: DomainTest" in str(exception)
class TestSetAndGetLastRun:
def test_get_last_run_in_6_2_when_get_last_run_has_results(self, mocker):
"""
Given: 6.2.0 environment and getLastRun returns results
When: Fetch indicators
Then: Returning all indicators from demisto.getLastRun object
"""
import demistomock as demisto
from CommonServerPython import get_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mocker.patch.object(demisto, 'getLastRun', return_value={1: "first indicator"})
result = get_feed_last_run()
assert result == {1: "first indicator"}
def test_get_last_run_in_6_1_when_get_integration_context_has_results(self, mocker):
"""
Given: 6.1.0 environment and getIntegrationContext return results
When: Fetch indicators
This can happen when updating XSOAR version to 6.2.0 while a feed instance is already set.
Then: Returning all indicators from demisto.getIntegrationContext object
"""
import demistomock as demisto
from CommonServerPython import get_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.1.0"})
mocker.patch.object(demisto, 'getIntegrationContext', return_value={1: "first indicator"})
result = get_feed_last_run()
assert result == {1: "first indicator"}
def test_get_last_run_in_6_2_when_get_last_run_has_no_results(self, mocker):
"""
Given: 6.2.0 environment and getLastRun and getIntegrationContext are empty
When: Fetch indicators
Then: function will return empty dict
"""
import demistomock as demisto
from CommonServerPython import get_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mocker.patch.object(demisto, 'getIntegrationContext', return_value={})
mocker.patch.object(demisto, 'getLastRun', return_value={})
result = get_feed_last_run()
assert result == {}
def test_get_last_run_in_6_2_when_get_last_is_empty_and_get_integration_is_not(self, mocker):
"""
Given: 6.2.0 environment and getLastRun is empty and getIntegrationContext has results.
When: Fetch indicators
Then: function will return empty dict
"""
import demistomock as demisto
from CommonServerPython import get_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mocker.patch.object(demisto, 'getIntegrationContext', return_value={1: "first indicator"})
mocker.patch.object(demisto, 'getLastRun', return_value={})
set_last_run = mocker.patch.object(demisto, 'setLastRun', return_value={})
set_integration_context = mocker.patch.object(demisto, 'setIntegrationContext', return_value={})
result = get_feed_last_run()
assert result == {1: "first indicator"}
set_last_run.assert_called_with({1: "first indicator"})
set_integration_context.assert_called_with({})
def test_set_last_run_in_6_2(self, mocker):
"""
Given: 6.2.0 environment
When: Fetch indicators
Then: Using demisto.setLastRun to save results
"""
import demistomock as demisto
from CommonServerPython import set_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
set_last_run = mocker.patch.object(demisto, 'setLastRun', return_value={})
set_integration_context = mocker.patch.object(demisto, 'setIntegrationContext', return_value={})
set_feed_last_run({1: "first indicator"})
assert set_integration_context.called is False
set_last_run.assert_called_with({1: "first indicator"})
def test_set_last_run_in_6_1(self, mocker):
"""
Given: 6.1.0 environment
When: Fetch indicators
Then: Using demisto.setIntegrationContext to save results
"""
import demistomock as demisto
from CommonServerPython import set_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.1.0"})
set_last_run = mocker.patch.object(demisto, 'setLastRun', return_value={})
set_integration_context = mocker.patch.object(demisto, 'setIntegrationContext', return_value={})
set_feed_last_run({1: "first indicator"})
set_integration_context.assert_called_with({1: "first indicator"})
assert set_last_run.called is False
class TestIsDemistoServerGE:
@classmethod
@pytest.fixture(scope='function', autouse=True)
def clear_cache(cls):
get_demisto_version._version = None
def test_get_demisto_version(self, mocker):
# verify expected server version and build returned in case Demisto class has attribute demistoVersion
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '5.0.0',
'buildNumber': '50000'
}
)
assert get_demisto_version() == {
'version': '5.0.0',
'buildNumber': '50000'
}
# call again to check cache
assert get_demisto_version() == {
'version': '5.0.0',
'buildNumber': '50000'
}
# call count should be 1 as we cached
assert demisto.demistoVersion.call_count == 1
# test is_demisto_version_ge
assert is_demisto_version_ge('5.0.0')
assert is_demisto_version_ge('4.5.0')
assert not is_demisto_version_ge('5.5.0')
assert get_demisto_version_as_str() == '5.0.0-50000'
def test_is_demisto_version_ge_4_5(self, mocker):
get_version_patch = mocker.patch('CommonServerPython.get_demisto_version')
get_version_patch.side_effect = AttributeError('simulate missing demistoVersion')
assert not is_demisto_version_ge('5.0.0')
assert not is_demisto_version_ge('6.0.0')
with raises(AttributeError, match='simulate missing demistoVersion'):
is_demisto_version_ge('4.5.0')
def test_is_demisto_version_ge_dev_version(self, mocker):
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.0.0',
'buildNumber': '50000'
}
)
assert is_demisto_version_ge('6.0.0', '1-dev')
@pytest.mark.parametrize('version, build', [
('6.0.0', '49999'),
('6.0.0', '50000'),
('6.0.0', '6'), # Added with the fix of https://github.com/demisto/etc/issues/36876
('5.5.0', '50001')
])
def test_is_demisto_version_build_ge(self, mocker, version, build):
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.0.0',
'buildNumber': '50000'
}
)
assert is_demisto_version_ge(version, build)
@pytest.mark.parametrize('version, build', [
('6.0.0', '50001'),
('6.1.0', '49999')
])
def test_is_demisto_version_build_ge_negative(self, mocker, version, build):
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.0.0',
'buildNumber': '50000'
}
)
assert not is_demisto_version_ge(version, build)
def test_smart_get_dict():
d = {'t1': None, "t2": 1}
# before we remove the dict will return null which is unexpected by a lot of users
assert d.get('t1', 2) is None
s = SmartGetDict(d)
assert s.get('t1', 2) == 2
assert s.get('t2') == 1
assert s.get('t3') is None
class TestCustomIndicator:
def test_custom_indicator_init_success(self):
"""
Given: Data needed for creating a custom indicator
When: Data is valid
Then: Create a valid custom indicator
"""
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
indicator = Common.CustomIndicator('test', 'test_value', dbot_score, {'param': 'value'}, 'prefix')
assert indicator.CONTEXT_PATH == 'prefix(val.value && val.value == obj.value)'
assert indicator.param == 'value'
assert indicator.value == 'test_value'
def test_custom_indicator_init_existing_type(self):
"""
Given: Data needed for creating a custom indicator
When: Type already exists
Then: raise a Value Error
"""
with pytest.raises(ValueError):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
Common.CustomIndicator('ip', 'test_value', dbot_score, {'param': 'value'}, 'prefix')
def test_custom_indicator_init_no_prefix(self):
"""
Given: Data needed for Custom indicator
When: Prefix provided is None
Then: Raise ValueError
"""
with pytest.raises(ValueError):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
Common.CustomIndicator('test', 'test_value', dbot_score, {'param': 'value'}, None)
def test_custom_indicator_init_no_dbot_score(self):
"""
Given: Data needed for Custom indicator
When: Dbotscore is not a DBotScore object
Then: Raise ValueError
"""
with pytest.raises(ValueError):
from CommonServerPython import Common
dbot_score = ''
Common.CustomIndicator('test', 'test_value', dbot_score, {'param': 'value'}, 'prefix')
def test_custom_indicator_to_context(self):
"""
Given: Data needed for Custom indicator
When: there's a call to to_context
Then: create a valid context
"""
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
indicator = Common.CustomIndicator('test', 'test_value', dbot_score, {'param': 'value'}, 'prefix')
context = indicator.to_context()
assert context['DBotScore(val.Indicator &&'
' val.Indicator == obj.Indicator &&'
' val.Vendor == obj.Vendor && val.Type == obj.Type)']['Indicator'] == 'test'
assert context['prefix(val.value && val.value == obj.value)']['Value'] == 'test_value'
assert context['prefix(val.value && val.value == obj.value)']['param'] == 'value'
def test_custom_indicator_no_params(self):
"""
Given: Data needed for creating a custom indicator
When: params are None
Then: Raise an error
"""
with pytest.raises(TypeError):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
Common.CustomIndicator('test', 'test_value', dbot_score, None, 'prefix')
def test_custom_indicator_no_value(self):
"""
Given: Data needed for creating a custom indicator
When: value is None
Then: Raise an error
"""
with pytest.raises(ValueError):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
Common.CustomIndicator('test', None, dbot_score, {'param': 'value'}, 'prefix')
| 36.77021
| 139
| 0.581851
|
dd3433c0903ab701ff475e35d88a96a291420282
| 3,706
|
py
|
Python
|
IO/summationPub.py
|
storage4grid/PROFESS-PROFEV
|
adf4e26488225206c249938c9eecc394a06f9677
|
[
"Apache-2.0"
] | null | null | null |
IO/summationPub.py
|
storage4grid/PROFESS-PROFEV
|
adf4e26488225206c249938c9eecc394a06f9677
|
[
"Apache-2.0"
] | null | null | null |
IO/summationPub.py
|
storage4grid/PROFESS-PROFEV
|
adf4e26488225206c249938c9eecc394a06f9677
|
[
"Apache-2.0"
] | null | null | null |
"""
Created on Sep 19 15:45 2019
@author: nishit
"""
import configparser
import threading
from abc import abstractmethod
from queue import Queue
from random import randrange
import time
from IO.MQTTClient import MQTTClient
from IO.dataReceiver import DataReceiver
from utils_intern.messageLogger import MessageLogger
logger = MessageLogger.get_logger_parent()
class SummationPub():
def Stop(self):
self.rec.exit()
self.pub.exit()
@abstractmethod
def data_formater(self, data):
pass
@abstractmethod
def sum_data(self):
pass
def __init__(self, receiver_params, config):
self.q = Queue(maxsize=0)
self.pub = Publisher(config, self.q)
self.rec = Receiver(True, receiver_params, config, self.data_formater, id="none")
class Receiver(DataReceiver):
def __init__(self, internal, topic_params, config, data_formater, id):
self.data_formater = data_formater
super().__init__(internal, topic_params, config, id=id, prepare_topic_qos=False, sub_pub=True)
def on_msg_received(self, payload):
try:
logger.info("msg rec : " + str(payload))
data = self.data_formater(payload)
if len(data) == 0:
logger.info("No keys found in received data")
self.data.update(data)
self.data_update = True
self.last_time = time.time()
except Exception as e:
logger.error(e)
class Publisher():
def __init__(self, config, q):
self.stopRequest = threading.Event()
self.config = config
self.q = q
self.mqtt_client = self.init_mqtt()
self.consumer_thread = threading.Thread(target=self.consumer)
self.consumer_thread.start()
def init_mqtt(self):
try:
if "pub.mqtt.host" in dict(self.config.items("IO")):
host = self.config.get("IO", "pub.mqtt.host")
else:
host = self.config.get("IO", "mqtt.host")
port = self.config.get("IO", "mqtt.port")
client_id = "client_publish" + str(randrange(100000)) + str(time.time()).replace(".", "")
mqtt = MQTTClient(str(host), port, client_id,
username=self.config.get("IO", "mqtt.username", fallback=None),
password=self.config.get("IO", "mqtt.password", fallback=None),
ca_cert_path=self.config.get("IO", "mqtt.ca.cert.path", fallback=None),
set_insecure=bool(self.config.get("IO", "mqtt.insecure.flag", fallback=False)))
return mqtt
except Exception as e:
logger.error(e)
raise e
def consumer(self):
while True and not self.stopRequest.is_set():
if not self.q.empty():
try:
logger.debug("Queue size: " + str(self.q.qsize()))
data = self.q.get()
if data is not None:
self.publish_data(data)
except Exception as e:
logger.error("Error in consuming queue " + str(e))
else:
time.sleep(2)
def publish_data(self, data):
try:
topic = data["topic"]
data = data["data"]
self.mqtt_client.publish(topic=topic, message=data, waitForAck=True, qos=1)
logger.debug("Results published on this topic: " + topic)
except Exception as e:
logger.error("Error pub data " + str(e))
def exit(self):
self.stopRequest.set()
self.mqtt_client.MQTTExit()
self.consumer_thread.join()
| 33.089286
| 109
| 0.57987
|
2598bc676520755448378dc0b5ba76d4e9886e1c
| 67,095
|
py
|
Python
|
Lib/tkinter/test/test_ttk/test_widgets.py
|
shawwn/cpython
|
0ff8a3b374286d2218fc18f47556a5ace202dad3
|
[
"0BSD"
] | 1,886
|
2021-05-03T23:58:43.000Z
|
2022-03-31T19:15:58.000Z
|
Lib/tkinter/test/test_ttk/test_widgets.py
|
shawwn/cpython
|
0ff8a3b374286d2218fc18f47556a5ace202dad3
|
[
"0BSD"
] | 70
|
2021-05-04T23:25:35.000Z
|
2022-03-31T18:42:08.000Z
|
Lib/tkinter/test/test_ttk/test_widgets.py
|
shawwn/cpython
|
0ff8a3b374286d2218fc18f47556a5ace202dad3
|
[
"0BSD"
] | 52
|
2021-05-04T21:26:03.000Z
|
2022-03-08T18:02:56.000Z
|
import unittest
import tkinter
from tkinter import ttk, TclError
from test.support import requires
import sys
from tkinter.test.test_ttk.test_functions import MockTclObj
from tkinter.test.support import (AbstractTkTest, tcl_version, get_tk_patchlevel,
simulate_mouse_click)
from tkinter.test.widget_tests import (add_standard_options, noconv,
AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests,
setUpModule)
requires('gui')
class StandardTtkOptionsTests(StandardOptionsTests):
def test_class(self):
widget = self.create()
self.assertEqual(widget['class'], '')
errmsg='attempt to change read-only option'
if get_tk_patchlevel() < (8, 6, 0, 'beta', 3):
errmsg='Attempt to change read-only option'
self.checkInvalidParam(widget, 'class', 'Foo', errmsg=errmsg)
widget2 = self.create(class_='Foo')
self.assertEqual(widget2['class'], 'Foo')
def test_padding(self):
widget = self.create()
self.checkParam(widget, 'padding', 0, expected=('0',))
self.checkParam(widget, 'padding', 5, expected=('5',))
self.checkParam(widget, 'padding', (5, 6), expected=('5', '6'))
self.checkParam(widget, 'padding', (5, 6, 7),
expected=('5', '6', '7'))
self.checkParam(widget, 'padding', (5, 6, 7, 8),
expected=('5', '6', '7', '8'))
self.checkParam(widget, 'padding', ('5p', '6p', '7p', '8p'))
self.checkParam(widget, 'padding', (), expected='')
def test_style(self):
widget = self.create()
self.assertEqual(widget['style'], '')
errmsg = 'Layout Foo not found'
if hasattr(self, 'default_orient'):
errmsg = ('Layout %s.Foo not found' %
getattr(self, 'default_orient').title())
self.checkInvalidParam(widget, 'style', 'Foo',
errmsg=errmsg)
widget2 = self.create(class_='Foo')
self.assertEqual(widget2['class'], 'Foo')
# XXX
pass
class WidgetTest(AbstractTkTest, unittest.TestCase):
"""Tests methods available in every ttk widget."""
def setUp(self):
super().setUp()
self.widget = ttk.Button(self.root, width=0, text="Text")
self.widget.pack()
self.widget.wait_visibility()
def test_identify(self):
self.widget.update_idletasks()
self.assertEqual(self.widget.identify(
int(self.widget.winfo_width() / 2),
int(self.widget.winfo_height() / 2)
), "label")
self.assertEqual(self.widget.identify(-1, -1), "")
self.assertRaises(tkinter.TclError, self.widget.identify, None, 5)
self.assertRaises(tkinter.TclError, self.widget.identify, 5, None)
self.assertRaises(tkinter.TclError, self.widget.identify, 5, '')
def test_widget_state(self):
# XXX not sure about the portability of all these tests
self.assertEqual(self.widget.state(), ())
self.assertEqual(self.widget.instate(['!disabled']), True)
# changing from !disabled to disabled
self.assertEqual(self.widget.state(['disabled']), ('!disabled', ))
# no state change
self.assertEqual(self.widget.state(['disabled']), ())
# change back to !disable but also active
self.assertEqual(self.widget.state(['!disabled', 'active']),
('!active', 'disabled'))
# no state changes, again
self.assertEqual(self.widget.state(['!disabled', 'active']), ())
self.assertEqual(self.widget.state(['active', '!disabled']), ())
def test_cb(arg1, **kw):
return arg1, kw
self.assertEqual(self.widget.instate(['!disabled'],
test_cb, "hi", **{"msg": "there"}),
('hi', {'msg': 'there'}))
# attempt to set invalid statespec
currstate = self.widget.state()
self.assertRaises(tkinter.TclError, self.widget.instate,
['badstate'])
self.assertRaises(tkinter.TclError, self.widget.instate,
['disabled', 'badstate'])
# verify that widget didn't change its state
self.assertEqual(currstate, self.widget.state())
# ensuring that passing None as state doesn't modify current state
self.widget.state(['active', '!disabled'])
self.assertEqual(self.widget.state(), ('active', ))
class AbstractToplevelTest(AbstractWidgetTest, PixelSizeTests):
_conv_pixels = noconv
@add_standard_options(StandardTtkOptionsTests)
class FrameTest(AbstractToplevelTest, unittest.TestCase):
OPTIONS = (
'borderwidth', 'class', 'cursor', 'height',
'padding', 'relief', 'style', 'takefocus',
'width',
)
def create(self, **kwargs):
return ttk.Frame(self.root, **kwargs)
@add_standard_options(StandardTtkOptionsTests)
class LabelFrameTest(AbstractToplevelTest, unittest.TestCase):
OPTIONS = (
'borderwidth', 'class', 'cursor', 'height',
'labelanchor', 'labelwidget',
'padding', 'relief', 'style', 'takefocus',
'text', 'underline', 'width',
)
def create(self, **kwargs):
return ttk.LabelFrame(self.root, **kwargs)
def test_labelanchor(self):
widget = self.create()
self.checkEnumParam(widget, 'labelanchor',
'e', 'en', 'es', 'n', 'ne', 'nw', 's', 'se', 'sw', 'w', 'wn', 'ws',
errmsg='Bad label anchor specification {}')
self.checkInvalidParam(widget, 'labelanchor', 'center')
def test_labelwidget(self):
widget = self.create()
label = ttk.Label(self.root, text='Mupp', name='foo')
self.checkParam(widget, 'labelwidget', label, expected='.foo')
label.destroy()
class AbstractLabelTest(AbstractWidgetTest):
def checkImageParam(self, widget, name):
image = tkinter.PhotoImage(master=self.root, name='image1')
image2 = tkinter.PhotoImage(master=self.root, name='image2')
self.checkParam(widget, name, image, expected=('image1',))
self.checkParam(widget, name, 'image1', expected=('image1',))
self.checkParam(widget, name, (image,), expected=('image1',))
self.checkParam(widget, name, (image, 'active', image2),
expected=('image1', 'active', 'image2'))
self.checkParam(widget, name, 'image1 active image2',
expected=('image1', 'active', 'image2'))
self.checkInvalidParam(widget, name, 'spam',
errmsg='image "spam" doesn\'t exist')
def test_compound(self):
widget = self.create()
self.checkEnumParam(widget, 'compound',
'none', 'text', 'image', 'center',
'top', 'bottom', 'left', 'right')
def test_state(self):
widget = self.create()
self.checkParams(widget, 'state', 'active', 'disabled', 'normal')
def test_width(self):
widget = self.create()
self.checkParams(widget, 'width', 402, -402, 0)
@add_standard_options(StandardTtkOptionsTests)
class LabelTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'anchor', 'background', 'borderwidth',
'class', 'compound', 'cursor', 'font', 'foreground',
'image', 'justify', 'padding', 'relief', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'width', 'wraplength',
)
_conv_pixels = noconv
def create(self, **kwargs):
return ttk.Label(self.root, **kwargs)
def test_font(self):
widget = self.create()
self.checkParam(widget, 'font',
'-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*')
@add_standard_options(StandardTtkOptionsTests)
class ButtonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor', 'default',
'image', 'padding', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'width',
)
def create(self, **kwargs):
return ttk.Button(self.root, **kwargs)
def test_default(self):
widget = self.create()
self.checkEnumParam(widget, 'default', 'normal', 'active', 'disabled')
def test_invoke(self):
success = []
btn = ttk.Button(self.root, command=lambda: success.append(1))
btn.invoke()
self.assertTrue(success)
@add_standard_options(StandardTtkOptionsTests)
class CheckbuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor',
'image',
'offvalue', 'onvalue',
'padding', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'variable', 'width',
)
def create(self, **kwargs):
return ttk.Checkbutton(self.root, **kwargs)
def test_offvalue(self):
widget = self.create()
self.checkParams(widget, 'offvalue', 1, 2.3, '', 'any string')
def test_onvalue(self):
widget = self.create()
self.checkParams(widget, 'onvalue', 1, 2.3, '', 'any string')
def test_invoke(self):
success = []
def cb_test():
success.append(1)
return "cb test called"
cbtn = ttk.Checkbutton(self.root, command=cb_test)
# the variable automatically created by ttk.Checkbutton is actually
# undefined till we invoke the Checkbutton
self.assertEqual(cbtn.state(), ('alternate', ))
self.assertRaises(tkinter.TclError, cbtn.tk.globalgetvar,
cbtn['variable'])
res = cbtn.invoke()
self.assertEqual(res, "cb test called")
self.assertEqual(cbtn['onvalue'],
cbtn.tk.globalgetvar(cbtn['variable']))
self.assertTrue(success)
cbtn['command'] = ''
res = cbtn.invoke()
self.assertFalse(str(res))
self.assertLessEqual(len(success), 1)
self.assertEqual(cbtn['offvalue'],
cbtn.tk.globalgetvar(cbtn['variable']))
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class EntryTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'background', 'class', 'cursor',
'exportselection', 'font', 'foreground',
'invalidcommand', 'justify',
'show', 'state', 'style', 'takefocus', 'textvariable',
'validate', 'validatecommand', 'width', 'xscrollcommand',
)
def setUp(self):
super().setUp()
self.entry = self.create()
def create(self, **kwargs):
return ttk.Entry(self.root, **kwargs)
def test_invalidcommand(self):
widget = self.create()
self.checkCommandParam(widget, 'invalidcommand')
def test_show(self):
widget = self.create()
self.checkParam(widget, 'show', '*')
self.checkParam(widget, 'show', '')
self.checkParam(widget, 'show', ' ')
def test_state(self):
widget = self.create()
self.checkParams(widget, 'state',
'disabled', 'normal', 'readonly')
def test_validate(self):
widget = self.create()
self.checkEnumParam(widget, 'validate',
'all', 'key', 'focus', 'focusin', 'focusout', 'none')
def test_validatecommand(self):
widget = self.create()
self.checkCommandParam(widget, 'validatecommand')
def test_bbox(self):
self.assertIsBoundingBox(self.entry.bbox(0))
self.assertRaises(tkinter.TclError, self.entry.bbox, 'noindex')
self.assertRaises(tkinter.TclError, self.entry.bbox, None)
def test_identify(self):
self.entry.pack()
self.entry.wait_visibility()
self.entry.update_idletasks()
# bpo-27313: macOS Cocoa widget differs from X, allow either
if sys.platform == 'darwin':
self.assertIn(self.entry.identify(5, 5),
("textarea", "Combobox.button") )
else:
self.assertEqual(self.entry.identify(5, 5), "textarea")
self.assertEqual(self.entry.identify(-1, -1), "")
self.assertRaises(tkinter.TclError, self.entry.identify, None, 5)
self.assertRaises(tkinter.TclError, self.entry.identify, 5, None)
self.assertRaises(tkinter.TclError, self.entry.identify, 5, '')
def test_validation_options(self):
success = []
test_invalid = lambda: success.append(True)
self.entry['validate'] = 'none'
self.entry['validatecommand'] = lambda: False
self.entry['invalidcommand'] = test_invalid
self.entry.validate()
self.assertTrue(success)
self.entry['invalidcommand'] = ''
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['invalidcommand'] = test_invalid
self.entry['validatecommand'] = lambda: True
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['validatecommand'] = ''
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['validatecommand'] = True
self.assertRaises(tkinter.TclError, self.entry.validate)
def test_validation(self):
validation = []
def validate(to_insert):
if not 'a' <= to_insert.lower() <= 'z':
validation.append(False)
return False
validation.append(True)
return True
self.entry['validate'] = 'key'
self.entry['validatecommand'] = self.entry.register(validate), '%S'
self.entry.insert('end', 1)
self.entry.insert('end', 'a')
self.assertEqual(validation, [False, True])
self.assertEqual(self.entry.get(), 'a')
def test_revalidation(self):
def validate(content):
for letter in content:
if not 'a' <= letter.lower() <= 'z':
return False
return True
self.entry['validatecommand'] = self.entry.register(validate), '%P'
self.entry.insert('end', 'avocado')
self.assertEqual(self.entry.validate(), True)
self.assertEqual(self.entry.state(), ())
self.entry.delete(0, 'end')
self.assertEqual(self.entry.get(), '')
self.entry.insert('end', 'a1b')
self.assertEqual(self.entry.validate(), False)
self.assertEqual(self.entry.state(), ('invalid', ))
self.entry.delete(1)
self.assertEqual(self.entry.validate(), True)
self.assertEqual(self.entry.state(), ())
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class ComboboxTest(EntryTest, unittest.TestCase):
OPTIONS = (
'background', 'class', 'cursor', 'exportselection',
'font', 'foreground', 'height', 'invalidcommand',
'justify', 'postcommand', 'show', 'state', 'style',
'takefocus', 'textvariable',
'validate', 'validatecommand', 'values',
'width', 'xscrollcommand',
)
def setUp(self):
super().setUp()
self.combo = self.create()
def create(self, **kwargs):
return ttk.Combobox(self.root, **kwargs)
def test_height(self):
widget = self.create()
self.checkParams(widget, 'height', 100, 101.2, 102.6, -100, 0, '1i')
def _show_drop_down_listbox(self):
width = self.combo.winfo_width()
self.combo.event_generate('<ButtonPress-1>', x=width - 5, y=5)
self.combo.event_generate('<ButtonRelease-1>', x=width - 5, y=5)
self.combo.update_idletasks()
def test_virtual_event(self):
success = []
self.combo['values'] = [1]
self.combo.bind('<<ComboboxSelected>>',
lambda evt: success.append(True))
self.combo.pack()
self.combo.wait_visibility()
height = self.combo.winfo_height()
self._show_drop_down_listbox()
self.combo.update()
self.combo.event_generate('<Return>')
self.combo.update()
self.assertTrue(success)
def test_postcommand(self):
success = []
self.combo['postcommand'] = lambda: success.append(True)
self.combo.pack()
self.combo.wait_visibility()
self._show_drop_down_listbox()
self.assertTrue(success)
# testing postcommand removal
self.combo['postcommand'] = ''
self._show_drop_down_listbox()
self.assertEqual(len(success), 1)
def test_values(self):
def check_get_current(getval, currval):
self.assertEqual(self.combo.get(), getval)
self.assertEqual(self.combo.current(), currval)
self.assertEqual(self.combo['values'],
() if tcl_version < (8, 5) else '')
check_get_current('', -1)
self.checkParam(self.combo, 'values', 'mon tue wed thur',
expected=('mon', 'tue', 'wed', 'thur'))
self.checkParam(self.combo, 'values', ('mon', 'tue', 'wed', 'thur'))
self.checkParam(self.combo, 'values', (42, 3.14, '', 'any string'))
self.checkParam(self.combo, 'values', '')
self.combo['values'] = ['a', 1, 'c']
self.combo.set('c')
check_get_current('c', 2)
self.combo.current(0)
check_get_current('a', 0)
self.combo.set('d')
check_get_current('d', -1)
# testing values with empty string
self.combo.set('')
self.combo['values'] = (1, 2, '', 3)
check_get_current('', 2)
# testing values with empty string set through configure
self.combo.configure(values=[1, '', 2])
self.assertEqual(self.combo['values'],
('1', '', '2') if self.wantobjects else
'1 {} 2')
# testing values with spaces
self.combo['values'] = ['a b', 'a\tb', 'a\nb']
self.assertEqual(self.combo['values'],
('a b', 'a\tb', 'a\nb') if self.wantobjects else
'{a b} {a\tb} {a\nb}')
# testing values with special characters
self.combo['values'] = [r'a\tb', '"a"', '} {']
self.assertEqual(self.combo['values'],
(r'a\tb', '"a"', '} {') if self.wantobjects else
r'a\\tb {"a"} \}\ \{')
# out of range
self.assertRaises(tkinter.TclError, self.combo.current,
len(self.combo['values']))
# it expects an integer (or something that can be converted to int)
self.assertRaises(tkinter.TclError, self.combo.current, '')
# testing creating combobox with empty string in values
combo2 = ttk.Combobox(self.root, values=[1, 2, ''])
self.assertEqual(combo2['values'],
('1', '2', '') if self.wantobjects else '1 2 {}')
combo2.destroy()
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class PanedWindowTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'height',
'orient', 'style', 'takefocus', 'width',
)
def setUp(self):
super().setUp()
self.paned = self.create()
def create(self, **kwargs):
return ttk.PanedWindow(self.root, **kwargs)
def test_orient(self):
widget = self.create()
self.assertEqual(str(widget['orient']), 'vertical')
errmsg='attempt to change read-only option'
if get_tk_patchlevel() < (8, 6, 0, 'beta', 3):
errmsg='Attempt to change read-only option'
self.checkInvalidParam(widget, 'orient', 'horizontal',
errmsg=errmsg)
widget2 = self.create(orient='horizontal')
self.assertEqual(str(widget2['orient']), 'horizontal')
def test_add(self):
# attempt to add a child that is not a direct child of the paned window
label = ttk.Label(self.paned)
child = ttk.Label(label)
self.assertRaises(tkinter.TclError, self.paned.add, child)
label.destroy()
child.destroy()
# another attempt
label = ttk.Label(self.root)
child = ttk.Label(label)
self.assertRaises(tkinter.TclError, self.paned.add, child)
child.destroy()
label.destroy()
good_child = ttk.Label(self.root)
self.paned.add(good_child)
# re-adding a child is not accepted
self.assertRaises(tkinter.TclError, self.paned.add, good_child)
other_child = ttk.Label(self.paned)
self.paned.add(other_child)
self.assertEqual(self.paned.pane(0), self.paned.pane(1))
self.assertRaises(tkinter.TclError, self.paned.pane, 2)
good_child.destroy()
other_child.destroy()
self.assertRaises(tkinter.TclError, self.paned.pane, 0)
def test_forget(self):
self.assertRaises(tkinter.TclError, self.paned.forget, None)
self.assertRaises(tkinter.TclError, self.paned.forget, 0)
self.paned.add(ttk.Label(self.root))
self.paned.forget(0)
self.assertRaises(tkinter.TclError, self.paned.forget, 0)
def test_insert(self):
self.assertRaises(tkinter.TclError, self.paned.insert, None, 0)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, None)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, 0)
child = ttk.Label(self.root)
child2 = ttk.Label(self.root)
child3 = ttk.Label(self.root)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, child)
self.paned.insert('end', child2)
self.paned.insert(0, child)
self.assertEqual(self.paned.panes(), (str(child), str(child2)))
self.paned.insert(0, child2)
self.assertEqual(self.paned.panes(), (str(child2), str(child)))
self.paned.insert('end', child3)
self.assertEqual(self.paned.panes(),
(str(child2), str(child), str(child3)))
# reinserting a child should move it to its current position
panes = self.paned.panes()
self.paned.insert('end', child3)
self.assertEqual(panes, self.paned.panes())
# moving child3 to child2 position should result in child2 ending up
# in previous child position and child ending up in previous child3
# position
self.paned.insert(child2, child3)
self.assertEqual(self.paned.panes(),
(str(child3), str(child2), str(child)))
def test_pane(self):
self.assertRaises(tkinter.TclError, self.paned.pane, 0)
child = ttk.Label(self.root)
self.paned.add(child)
self.assertIsInstance(self.paned.pane(0), dict)
self.assertEqual(self.paned.pane(0, weight=None),
0 if self.wantobjects else '0')
# newer form for querying a single option
self.assertEqual(self.paned.pane(0, 'weight'),
0 if self.wantobjects else '0')
self.assertEqual(self.paned.pane(0), self.paned.pane(str(child)))
self.assertRaises(tkinter.TclError, self.paned.pane, 0,
badoption='somevalue')
def test_sashpos(self):
self.assertRaises(tkinter.TclError, self.paned.sashpos, None)
self.assertRaises(tkinter.TclError, self.paned.sashpos, '')
self.assertRaises(tkinter.TclError, self.paned.sashpos, 0)
child = ttk.Label(self.paned, text='a')
self.paned.add(child, weight=1)
self.assertRaises(tkinter.TclError, self.paned.sashpos, 0)
child2 = ttk.Label(self.paned, text='b')
self.paned.add(child2)
self.assertRaises(tkinter.TclError, self.paned.sashpos, 1)
self.paned.pack(expand=True, fill='both')
self.paned.wait_visibility()
curr_pos = self.paned.sashpos(0)
self.paned.sashpos(0, 1000)
self.assertNotEqual(curr_pos, self.paned.sashpos(0))
self.assertIsInstance(self.paned.sashpos(0), int)
@add_standard_options(StandardTtkOptionsTests)
class RadiobuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor',
'image',
'padding', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'value', 'variable', 'width',
)
def create(self, **kwargs):
return ttk.Radiobutton(self.root, **kwargs)
def test_value(self):
widget = self.create()
self.checkParams(widget, 'value', 1, 2.3, '', 'any string')
def test_invoke(self):
success = []
def cb_test():
success.append(1)
return "cb test called"
myvar = tkinter.IntVar(self.root)
cbtn = ttk.Radiobutton(self.root, command=cb_test,
variable=myvar, value=0)
cbtn2 = ttk.Radiobutton(self.root, command=cb_test,
variable=myvar, value=1)
if self.wantobjects:
conv = lambda x: x
else:
conv = int
res = cbtn.invoke()
self.assertEqual(res, "cb test called")
self.assertEqual(conv(cbtn['value']), myvar.get())
self.assertEqual(myvar.get(),
conv(cbtn.tk.globalgetvar(cbtn['variable'])))
self.assertTrue(success)
cbtn2['command'] = ''
res = cbtn2.invoke()
self.assertEqual(str(res), '')
self.assertLessEqual(len(success), 1)
self.assertEqual(conv(cbtn2['value']), myvar.get())
self.assertEqual(myvar.get(),
conv(cbtn.tk.globalgetvar(cbtn['variable'])))
self.assertEqual(str(cbtn['variable']), str(cbtn2['variable']))
class MenubuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'compound', 'cursor', 'direction',
'image', 'menu', 'padding', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'width',
)
def create(self, **kwargs):
return ttk.Menubutton(self.root, **kwargs)
def test_direction(self):
widget = self.create()
self.checkEnumParam(widget, 'direction',
'above', 'below', 'left', 'right', 'flush')
def test_menu(self):
widget = self.create()
menu = tkinter.Menu(widget, name='menu')
self.checkParam(widget, 'menu', menu, conv=str)
menu.destroy()
@add_standard_options(StandardTtkOptionsTests)
class ScaleTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'cursor', 'from', 'length',
'orient', 'style', 'takefocus', 'to', 'value', 'variable',
)
_conv_pixels = noconv
default_orient = 'horizontal'
def setUp(self):
super().setUp()
self.scale = self.create()
self.scale.pack()
self.scale.update()
def create(self, **kwargs):
return ttk.Scale(self.root, **kwargs)
def test_from(self):
widget = self.create()
self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=False)
def test_length(self):
widget = self.create()
self.checkPixelsParam(widget, 'length', 130, 131.2, 135.6, '5i')
def test_to(self):
widget = self.create()
self.checkFloatParam(widget, 'to', 300, 14.9, 15.1, -10, conv=False)
def test_value(self):
widget = self.create()
self.checkFloatParam(widget, 'value', 300, 14.9, 15.1, -10, conv=False)
def test_custom_event(self):
failure = [1, 1, 1] # will need to be empty
funcid = self.scale.bind('<<RangeChanged>>', lambda evt: failure.pop())
self.scale['from'] = 10
self.scale['from_'] = 10
self.scale['to'] = 3
self.assertFalse(failure)
failure = [1, 1, 1]
self.scale.configure(from_=2, to=5)
self.scale.configure(from_=0, to=-2)
self.scale.configure(to=10)
self.assertFalse(failure)
def test_get(self):
if self.wantobjects:
conv = lambda x: x
else:
conv = float
scale_width = self.scale.winfo_width()
self.assertEqual(self.scale.get(scale_width, 0), self.scale['to'])
self.assertEqual(conv(self.scale.get(0, 0)), conv(self.scale['from']))
self.assertEqual(self.scale.get(), self.scale['value'])
self.scale['value'] = 30
self.assertEqual(self.scale.get(), self.scale['value'])
self.assertRaises(tkinter.TclError, self.scale.get, '', 0)
self.assertRaises(tkinter.TclError, self.scale.get, 0, '')
def test_set(self):
if self.wantobjects:
conv = lambda x: x
else:
conv = float
# set restricts the max/min values according to the current range
max = conv(self.scale['to'])
new_max = max + 10
self.scale.set(new_max)
self.assertEqual(conv(self.scale.get()), max)
min = conv(self.scale['from'])
self.scale.set(min - 1)
self.assertEqual(conv(self.scale.get()), min)
# changing directly the variable doesn't impose this limitation tho
var = tkinter.DoubleVar(self.root)
self.scale['variable'] = var
var.set(max + 5)
self.assertEqual(conv(self.scale.get()), var.get())
self.assertEqual(conv(self.scale.get()), max + 5)
del var
# the same happens with the value option
self.scale['value'] = max + 10
self.assertEqual(conv(self.scale.get()), max + 10)
self.assertEqual(conv(self.scale.get()), conv(self.scale['value']))
# nevertheless, note that the max/min values we can get specifying
# x, y coords are the ones according to the current range
self.assertEqual(conv(self.scale.get(0, 0)), min)
self.assertEqual(conv(self.scale.get(self.scale.winfo_width(), 0)), max)
self.assertRaises(tkinter.TclError, self.scale.set, None)
@add_standard_options(StandardTtkOptionsTests)
class ProgressbarTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'orient', 'length',
'mode', 'maximum', 'phase',
'style', 'takefocus', 'value', 'variable',
)
_conv_pixels = noconv
default_orient = 'horizontal'
def create(self, **kwargs):
return ttk.Progressbar(self.root, **kwargs)
def test_length(self):
widget = self.create()
self.checkPixelsParam(widget, 'length', 100.1, 56.7, '2i')
def test_maximum(self):
widget = self.create()
self.checkFloatParam(widget, 'maximum', 150.2, 77.7, 0, -10, conv=False)
def test_mode(self):
widget = self.create()
self.checkEnumParam(widget, 'mode', 'determinate', 'indeterminate')
def test_phase(self):
# XXX
pass
def test_value(self):
widget = self.create()
self.checkFloatParam(widget, 'value', 150.2, 77.7, 0, -10,
conv=False)
@unittest.skipIf(sys.platform == 'darwin',
'ttk.Scrollbar is special on MacOSX')
@add_standard_options(StandardTtkOptionsTests)
class ScrollbarTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'cursor', 'orient', 'style', 'takefocus',
)
default_orient = 'vertical'
def create(self, **kwargs):
return ttk.Scrollbar(self.root, **kwargs)
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class NotebookTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'height', 'padding', 'style', 'takefocus', 'width',
)
def setUp(self):
super().setUp()
self.nb = self.create(padding=0)
self.child1 = ttk.Label(self.root)
self.child2 = ttk.Label(self.root)
self.nb.add(self.child1, text='a')
self.nb.add(self.child2, text='b')
def create(self, **kwargs):
return ttk.Notebook(self.root, **kwargs)
def test_tab_identifiers(self):
self.nb.forget(0)
self.nb.hide(self.child2)
self.assertRaises(tkinter.TclError, self.nb.tab, self.child1)
self.assertEqual(self.nb.index('end'), 1)
self.nb.add(self.child2)
self.assertEqual(self.nb.index('end'), 1)
self.nb.select(self.child2)
self.assertTrue(self.nb.tab('current'))
self.nb.add(self.child1, text='a')
self.nb.pack()
self.nb.wait_visibility()
if sys.platform == 'darwin':
tb_idx = "@20,5"
else:
tb_idx = "@5,5"
self.assertEqual(self.nb.tab(tb_idx), self.nb.tab('current'))
for i in range(5, 100, 5):
try:
if self.nb.tab('@%d, 5' % i, text=None) == 'a':
break
except tkinter.TclError:
pass
else:
self.fail("Tab with text 'a' not found")
def test_add_and_hidden(self):
self.assertRaises(tkinter.TclError, self.nb.hide, -1)
self.assertRaises(tkinter.TclError, self.nb.hide, 'hi')
self.assertRaises(tkinter.TclError, self.nb.hide, None)
self.assertRaises(tkinter.TclError, self.nb.add, None)
self.assertRaises(tkinter.TclError, self.nb.add, ttk.Label(self.root),
unknown='option')
tabs = self.nb.tabs()
self.nb.hide(self.child1)
self.nb.add(self.child1)
self.assertEqual(self.nb.tabs(), tabs)
child = ttk.Label(self.root)
self.nb.add(child, text='c')
tabs = self.nb.tabs()
curr = self.nb.index('current')
# verify that the tab gets readded at its previous position
child2_index = self.nb.index(self.child2)
self.nb.hide(self.child2)
self.nb.add(self.child2)
self.assertEqual(self.nb.tabs(), tabs)
self.assertEqual(self.nb.index(self.child2), child2_index)
self.assertEqual(str(self.child2), self.nb.tabs()[child2_index])
# but the tab next to it (not hidden) is the one selected now
self.assertEqual(self.nb.index('current'), curr + 1)
def test_forget(self):
self.assertRaises(tkinter.TclError, self.nb.forget, -1)
self.assertRaises(tkinter.TclError, self.nb.forget, 'hi')
self.assertRaises(tkinter.TclError, self.nb.forget, None)
tabs = self.nb.tabs()
child1_index = self.nb.index(self.child1)
self.nb.forget(self.child1)
self.assertNotIn(str(self.child1), self.nb.tabs())
self.assertEqual(len(tabs) - 1, len(self.nb.tabs()))
self.nb.add(self.child1)
self.assertEqual(self.nb.index(self.child1), 1)
self.assertNotEqual(child1_index, self.nb.index(self.child1))
def test_index(self):
self.assertRaises(tkinter.TclError, self.nb.index, -1)
self.assertRaises(tkinter.TclError, self.nb.index, None)
self.assertIsInstance(self.nb.index('end'), int)
self.assertEqual(self.nb.index(self.child1), 0)
self.assertEqual(self.nb.index(self.child2), 1)
self.assertEqual(self.nb.index('end'), 2)
def test_insert(self):
# moving tabs
tabs = self.nb.tabs()
self.nb.insert(1, tabs[0])
self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0]))
self.nb.insert(self.child1, self.child2)
self.assertEqual(self.nb.tabs(), tabs)
self.nb.insert('end', self.child1)
self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0]))
self.nb.insert('end', 0)
self.assertEqual(self.nb.tabs(), tabs)
# bad moves
self.assertRaises(tkinter.TclError, self.nb.insert, 2, tabs[0])
self.assertRaises(tkinter.TclError, self.nb.insert, -1, tabs[0])
# new tab
child3 = ttk.Label(self.root)
self.nb.insert(1, child3)
self.assertEqual(self.nb.tabs(), (tabs[0], str(child3), tabs[1]))
self.nb.forget(child3)
self.assertEqual(self.nb.tabs(), tabs)
self.nb.insert(self.child1, child3)
self.assertEqual(self.nb.tabs(), (str(child3), ) + tabs)
self.nb.forget(child3)
self.assertRaises(tkinter.TclError, self.nb.insert, 2, child3)
self.assertRaises(tkinter.TclError, self.nb.insert, -1, child3)
# bad inserts
self.assertRaises(tkinter.TclError, self.nb.insert, 'end', None)
self.assertRaises(tkinter.TclError, self.nb.insert, None, 0)
self.assertRaises(tkinter.TclError, self.nb.insert, None, None)
def test_select(self):
self.nb.pack()
self.nb.wait_visibility()
success = []
tab_changed = []
self.child1.bind('<Unmap>', lambda evt: success.append(True))
self.nb.bind('<<NotebookTabChanged>>',
lambda evt: tab_changed.append(True))
self.assertEqual(self.nb.select(), str(self.child1))
self.nb.select(self.child2)
self.assertTrue(success)
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.update()
self.assertTrue(tab_changed)
def test_tab(self):
self.assertRaises(tkinter.TclError, self.nb.tab, -1)
self.assertRaises(tkinter.TclError, self.nb.tab, 'notab')
self.assertRaises(tkinter.TclError, self.nb.tab, None)
self.assertIsInstance(self.nb.tab(self.child1), dict)
self.assertEqual(self.nb.tab(self.child1, text=None), 'a')
# newer form for querying a single option
self.assertEqual(self.nb.tab(self.child1, 'text'), 'a')
self.nb.tab(self.child1, text='abc')
self.assertEqual(self.nb.tab(self.child1, text=None), 'abc')
self.assertEqual(self.nb.tab(self.child1, 'text'), 'abc')
def test_tabs(self):
self.assertEqual(len(self.nb.tabs()), 2)
self.nb.forget(self.child1)
self.nb.forget(self.child2)
self.assertEqual(self.nb.tabs(), ())
def test_traversal(self):
self.nb.pack()
self.nb.wait_visibility()
self.nb.select(0)
simulate_mouse_click(self.nb, 5, 5)
self.nb.focus_force()
self.nb.event_generate('<Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.focus_force()
self.nb.event_generate('<Shift-Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child1))
self.nb.focus_force()
self.nb.event_generate('<Shift-Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.tab(self.child1, text='a', underline=0)
self.nb.enable_traversal()
self.nb.focus_force()
simulate_mouse_click(self.nb, 5, 5)
if sys.platform == 'darwin':
self.nb.event_generate('<Option-a>')
else:
self.nb.event_generate('<Alt-a>')
self.assertEqual(self.nb.select(), str(self.child1))
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class SpinboxTest(EntryTest, unittest.TestCase):
OPTIONS = (
'background', 'class', 'command', 'cursor', 'exportselection',
'font', 'foreground', 'format', 'from', 'increment',
'invalidcommand', 'justify', 'show', 'state', 'style',
'takefocus', 'textvariable', 'to', 'validate', 'validatecommand',
'values', 'width', 'wrap', 'xscrollcommand',
)
def setUp(self):
super().setUp()
self.spin = self.create()
self.spin.pack()
def create(self, **kwargs):
return ttk.Spinbox(self.root, **kwargs)
def _click_increment_arrow(self):
width = self.spin.winfo_width()
height = self.spin.winfo_height()
x = width - 5
y = height//2 - 5
self.spin.event_generate('<ButtonPress-1>', x=x, y=y)
self.spin.event_generate('<ButtonRelease-1>', x=x, y=y)
self.spin.update_idletasks()
def _click_decrement_arrow(self):
width = self.spin.winfo_width()
height = self.spin.winfo_height()
x = width - 5
y = height//2 + 4
self.spin.event_generate('<ButtonPress-1>', x=x, y=y)
self.spin.event_generate('<ButtonRelease-1>', x=x, y=y)
self.spin.update_idletasks()
def test_command(self):
success = []
self.spin['command'] = lambda: success.append(True)
self.spin.update()
self._click_increment_arrow()
self.spin.update()
self.assertTrue(success)
self._click_decrement_arrow()
self.assertEqual(len(success), 2)
# testing postcommand removal
self.spin['command'] = ''
self.spin.update_idletasks()
self._click_increment_arrow()
self._click_decrement_arrow()
self.spin.update()
self.assertEqual(len(success), 2)
def test_to(self):
self.spin['from'] = 0
self.spin['to'] = 5
self.spin.set(4)
self.spin.update()
self._click_increment_arrow() # 5
self.assertEqual(self.spin.get(), '5')
self._click_increment_arrow() # 5
self.assertEqual(self.spin.get(), '5')
def test_from(self):
self.spin['from'] = 1
self.spin['to'] = 10
self.spin.set(2)
self.spin.update()
self._click_decrement_arrow() # 1
self.assertEqual(self.spin.get(), '1')
self._click_decrement_arrow() # 1
self.assertEqual(self.spin.get(), '1')
def test_increment(self):
self.spin['from'] = 0
self.spin['to'] = 10
self.spin['increment'] = 4
self.spin.set(1)
self.spin.update()
self._click_increment_arrow() # 5
self.assertEqual(self.spin.get(), '5')
self.spin['increment'] = 2
self.spin.update()
self._click_decrement_arrow() # 3
self.assertEqual(self.spin.get(), '3')
def test_format(self):
self.spin.set(1)
self.spin['format'] = '%10.3f'
self.spin.update()
self._click_increment_arrow()
value = self.spin.get()
self.assertEqual(len(value), 10)
self.assertEqual(value.index('.'), 6)
self.spin['format'] = ''
self.spin.update()
self._click_increment_arrow()
value = self.spin.get()
self.assertTrue('.' not in value)
self.assertEqual(len(value), 1)
def test_wrap(self):
self.spin['to'] = 10
self.spin['from'] = 1
self.spin.set(1)
self.spin['wrap'] = True
self.spin.update()
self._click_decrement_arrow()
self.assertEqual(self.spin.get(), '10')
self._click_increment_arrow()
self.assertEqual(self.spin.get(), '1')
self.spin['wrap'] = False
self.spin.update()
self._click_decrement_arrow()
self.assertEqual(self.spin.get(), '1')
def test_values(self):
self.assertEqual(self.spin['values'],
() if tcl_version < (8, 5) else '')
self.checkParam(self.spin, 'values', 'mon tue wed thur',
expected=('mon', 'tue', 'wed', 'thur'))
self.checkParam(self.spin, 'values', ('mon', 'tue', 'wed', 'thur'))
self.checkParam(self.spin, 'values', (42, 3.14, '', 'any string'))
self.checkParam(self.spin, 'values', '')
self.spin['values'] = ['a', 1, 'c']
# test incrementing / decrementing values
self.spin.set('a')
self.spin.update()
self._click_increment_arrow()
self.assertEqual(self.spin.get(), '1')
self._click_decrement_arrow()
self.assertEqual(self.spin.get(), 'a')
# testing values with empty string set through configure
self.spin.configure(values=[1, '', 2])
self.assertEqual(self.spin['values'],
('1', '', '2') if self.wantobjects else
'1 {} 2')
# testing values with spaces
self.spin['values'] = ['a b', 'a\tb', 'a\nb']
self.assertEqual(self.spin['values'],
('a b', 'a\tb', 'a\nb') if self.wantobjects else
'{a b} {a\tb} {a\nb}')
# testing values with special characters
self.spin['values'] = [r'a\tb', '"a"', '} {']
self.assertEqual(self.spin['values'],
(r'a\tb', '"a"', '} {') if self.wantobjects else
r'a\\tb {"a"} \}\ \{')
# testing creating spinbox with empty string in values
spin2 = ttk.Spinbox(self.root, values=[1, 2, ''])
self.assertEqual(spin2['values'],
('1', '2', '') if self.wantobjects else '1 2 {}')
spin2.destroy()
@add_standard_options(StandardTtkOptionsTests)
class TreeviewTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'columns', 'cursor', 'displaycolumns',
'height', 'padding', 'selectmode', 'show',
'style', 'takefocus', 'xscrollcommand', 'yscrollcommand',
)
def setUp(self):
super().setUp()
self.tv = self.create(padding=0)
def create(self, **kwargs):
return ttk.Treeview(self.root, **kwargs)
def test_columns(self):
widget = self.create()
self.checkParam(widget, 'columns', 'a b c',
expected=('a', 'b', 'c'))
self.checkParam(widget, 'columns', ('a', 'b', 'c'))
self.checkParam(widget, 'columns', '')
def test_displaycolumns(self):
widget = self.create()
widget['columns'] = ('a', 'b', 'c')
self.checkParam(widget, 'displaycolumns', 'b a c',
expected=('b', 'a', 'c'))
self.checkParam(widget, 'displaycolumns', ('b', 'a', 'c'))
self.checkParam(widget, 'displaycolumns', '#all',
expected=('#all',))
self.checkParam(widget, 'displaycolumns', (2, 1, 0))
self.checkInvalidParam(widget, 'displaycolumns', ('a', 'b', 'd'),
errmsg='Invalid column index d')
self.checkInvalidParam(widget, 'displaycolumns', (1, 2, 3),
errmsg='Column index 3 out of bounds')
self.checkInvalidParam(widget, 'displaycolumns', (1, -2),
errmsg='Column index -2 out of bounds')
def test_height(self):
widget = self.create()
self.checkPixelsParam(widget, 'height', 100, -100, 0, '3c', conv=False)
self.checkPixelsParam(widget, 'height', 101.2, 102.6, conv=noconv)
def test_selectmode(self):
widget = self.create()
self.checkEnumParam(widget, 'selectmode',
'none', 'browse', 'extended')
def test_show(self):
widget = self.create()
self.checkParam(widget, 'show', 'tree headings',
expected=('tree', 'headings'))
self.checkParam(widget, 'show', ('tree', 'headings'))
self.checkParam(widget, 'show', ('headings', 'tree'))
self.checkParam(widget, 'show', 'tree', expected=('tree',))
self.checkParam(widget, 'show', 'headings', expected=('headings',))
def test_bbox(self):
self.tv.pack()
self.assertEqual(self.tv.bbox(''), '')
self.tv.wait_visibility()
self.tv.update()
item_id = self.tv.insert('', 'end')
children = self.tv.get_children()
self.assertTrue(children)
bbox = self.tv.bbox(children[0])
self.assertIsBoundingBox(bbox)
# compare width in bboxes
self.tv['columns'] = ['test']
self.tv.column('test', width=50)
bbox_column0 = self.tv.bbox(children[0], 0)
root_width = self.tv.column('#0', width=None)
if not self.wantobjects:
root_width = int(root_width)
self.assertEqual(bbox_column0[0], bbox[0] + root_width)
# verify that bbox of a closed item is the empty string
child1 = self.tv.insert(item_id, 'end')
self.assertEqual(self.tv.bbox(child1), '')
def test_children(self):
# no children yet, should get an empty tuple
self.assertEqual(self.tv.get_children(), ())
item_id = self.tv.insert('', 'end')
self.assertIsInstance(self.tv.get_children(), tuple)
self.assertEqual(self.tv.get_children()[0], item_id)
# add item_id and child3 as children of child2
child2 = self.tv.insert('', 'end')
child3 = self.tv.insert('', 'end')
self.tv.set_children(child2, item_id, child3)
self.assertEqual(self.tv.get_children(child2), (item_id, child3))
# child3 has child2 as parent, thus trying to set child2 as a children
# of child3 should result in an error
self.assertRaises(tkinter.TclError,
self.tv.set_children, child3, child2)
# remove child2 children
self.tv.set_children(child2)
self.assertEqual(self.tv.get_children(child2), ())
# remove root's children
self.tv.set_children('')
self.assertEqual(self.tv.get_children(), ())
def test_column(self):
# return a dict with all options/values
self.assertIsInstance(self.tv.column('#0'), dict)
# return a single value of the given option
if self.wantobjects:
self.assertIsInstance(self.tv.column('#0', width=None), int)
# set a new value for an option
self.tv.column('#0', width=10)
# testing new way to get option value
self.assertEqual(self.tv.column('#0', 'width'),
10 if self.wantobjects else '10')
self.assertEqual(self.tv.column('#0', width=None),
10 if self.wantobjects else '10')
# check read-only option
self.assertRaises(tkinter.TclError, self.tv.column, '#0', id='X')
self.assertRaises(tkinter.TclError, self.tv.column, 'invalid')
invalid_kws = [
{'unknown_option': 'some value'}, {'stretch': 'wrong'},
{'anchor': 'wrong'}, {'width': 'wrong'}, {'minwidth': 'wrong'}
]
for kw in invalid_kws:
self.assertRaises(tkinter.TclError, self.tv.column, '#0',
**kw)
def test_delete(self):
self.assertRaises(tkinter.TclError, self.tv.delete, '#0')
item_id = self.tv.insert('', 'end')
item2 = self.tv.insert(item_id, 'end')
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
self.tv.delete(item_id)
self.assertFalse(self.tv.get_children())
# reattach should fail
self.assertRaises(tkinter.TclError,
self.tv.reattach, item_id, '', 'end')
# test multiple item delete
item1 = self.tv.insert('', 'end')
item2 = self.tv.insert('', 'end')
self.assertEqual(self.tv.get_children(), (item1, item2))
self.tv.delete(item1, item2)
self.assertFalse(self.tv.get_children())
def test_detach_reattach(self):
item_id = self.tv.insert('', 'end')
item2 = self.tv.insert(item_id, 'end')
# calling detach without items is valid, although it does nothing
prev = self.tv.get_children()
self.tv.detach() # this should do nothing
self.assertEqual(prev, self.tv.get_children())
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
# detach item with children
self.tv.detach(item_id)
self.assertFalse(self.tv.get_children())
# reattach item with children
self.tv.reattach(item_id, '', 'end')
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
# move a children to the root
self.tv.move(item2, '', 'end')
self.assertEqual(self.tv.get_children(), (item_id, item2))
self.assertEqual(self.tv.get_children(item_id), ())
# bad values
self.assertRaises(tkinter.TclError,
self.tv.reattach, 'nonexistent', '', 'end')
self.assertRaises(tkinter.TclError,
self.tv.detach, 'nonexistent')
self.assertRaises(tkinter.TclError,
self.tv.reattach, item2, 'otherparent', 'end')
self.assertRaises(tkinter.TclError,
self.tv.reattach, item2, '', 'invalid')
# multiple detach
self.tv.detach(item_id, item2)
self.assertEqual(self.tv.get_children(), ())
self.assertEqual(self.tv.get_children(item_id), ())
def test_exists(self):
self.assertEqual(self.tv.exists('something'), False)
self.assertEqual(self.tv.exists(''), True)
self.assertEqual(self.tv.exists({}), False)
# the following will make a tk.call equivalent to
# tk.call(treeview, "exists") which should result in an error
# in the tcl interpreter since tk requires an item.
self.assertRaises(tkinter.TclError, self.tv.exists, None)
def test_focus(self):
# nothing is focused right now
self.assertEqual(self.tv.focus(), '')
item1 = self.tv.insert('', 'end')
self.tv.focus(item1)
self.assertEqual(self.tv.focus(), item1)
self.tv.delete(item1)
self.assertEqual(self.tv.focus(), '')
# try focusing inexistent item
self.assertRaises(tkinter.TclError, self.tv.focus, 'hi')
def test_heading(self):
# check a dict is returned
self.assertIsInstance(self.tv.heading('#0'), dict)
# check a value is returned
self.tv.heading('#0', text='hi')
self.assertEqual(self.tv.heading('#0', 'text'), 'hi')
self.assertEqual(self.tv.heading('#0', text=None), 'hi')
# invalid option
self.assertRaises(tkinter.TclError, self.tv.heading, '#0',
background=None)
# invalid value
self.assertRaises(tkinter.TclError, self.tv.heading, '#0',
anchor=1)
def test_heading_callback(self):
def simulate_heading_click(x, y):
simulate_mouse_click(self.tv, x, y)
self.tv.update()
success = [] # no success for now
self.tv.pack()
self.tv.wait_visibility()
self.tv.heading('#0', command=lambda: success.append(True))
self.tv.column('#0', width=100)
self.tv.update()
# assuming that the coords (5, 5) fall into heading #0
simulate_heading_click(5, 5)
if not success:
self.fail("The command associated to the treeview heading wasn't "
"invoked.")
success = []
commands = self.tv.master._tclCommands
self.tv.heading('#0', command=str(self.tv.heading('#0', command=None)))
self.assertEqual(commands, self.tv.master._tclCommands)
simulate_heading_click(5, 5)
if not success:
self.fail("The command associated to the treeview heading wasn't "
"invoked.")
# XXX The following raises an error in a tcl interpreter, but not in
# Python
#self.tv.heading('#0', command='I dont exist')
#simulate_heading_click(5, 5)
def test_index(self):
# item 'what' doesn't exist
self.assertRaises(tkinter.TclError, self.tv.index, 'what')
self.assertEqual(self.tv.index(''), 0)
item1 = self.tv.insert('', 'end')
item2 = self.tv.insert('', 'end')
c1 = self.tv.insert(item1, 'end')
c2 = self.tv.insert(item1, 'end')
self.assertEqual(self.tv.index(item1), 0)
self.assertEqual(self.tv.index(c1), 0)
self.assertEqual(self.tv.index(c2), 1)
self.assertEqual(self.tv.index(item2), 1)
self.tv.move(item2, '', 0)
self.assertEqual(self.tv.index(item2), 0)
self.assertEqual(self.tv.index(item1), 1)
# check that index still works even after its parent and siblings
# have been detached
self.tv.detach(item1)
self.assertEqual(self.tv.index(c2), 1)
self.tv.detach(c1)
self.assertEqual(self.tv.index(c2), 0)
# but it fails after item has been deleted
self.tv.delete(item1)
self.assertRaises(tkinter.TclError, self.tv.index, c2)
def test_insert_item(self):
# parent 'none' doesn't exist
self.assertRaises(tkinter.TclError, self.tv.insert, 'none', 'end')
# open values
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
open='')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
open='please')
self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=True)))
self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=False)))
# invalid index
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'middle')
# trying to duplicate item id is invalid
itemid = self.tv.insert('', 'end', 'first-item')
self.assertEqual(itemid, 'first-item')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
'first-item')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
MockTclObj('first-item'))
# unicode values
value = '\xe1ba'
item = self.tv.insert('', 'end', values=(value, ))
self.assertEqual(self.tv.item(item, 'values'),
(value,) if self.wantobjects else value)
self.assertEqual(self.tv.item(item, values=None),
(value,) if self.wantobjects else value)
self.tv.item(item, values=self.root.splitlist(self.tv.item(item, values=None)))
self.assertEqual(self.tv.item(item, values=None),
(value,) if self.wantobjects else value)
self.assertIsInstance(self.tv.item(item), dict)
# erase item values
self.tv.item(item, values='')
self.assertFalse(self.tv.item(item, values=None))
# item tags
item = self.tv.insert('', 'end', tags=[1, 2, value])
self.assertEqual(self.tv.item(item, tags=None),
('1', '2', value) if self.wantobjects else
'1 2 %s' % value)
self.tv.item(item, tags=[])
self.assertFalse(self.tv.item(item, tags=None))
self.tv.item(item, tags=(1, 2))
self.assertEqual(self.tv.item(item, tags=None),
('1', '2') if self.wantobjects else '1 2')
# values with spaces
item = self.tv.insert('', 'end', values=('a b c',
'%s %s' % (value, value)))
self.assertEqual(self.tv.item(item, values=None),
('a b c', '%s %s' % (value, value)) if self.wantobjects else
'{a b c} {%s %s}' % (value, value))
# text
self.assertEqual(self.tv.item(
self.tv.insert('', 'end', text="Label here"), text=None),
"Label here")
self.assertEqual(self.tv.item(
self.tv.insert('', 'end', text=value), text=None),
value)
# test for values which are not None
itemid = self.tv.insert('', 'end', 0)
self.assertEqual(itemid, '0')
itemid = self.tv.insert('', 'end', 0.0)
self.assertEqual(itemid, '0.0')
# this is because False resolves to 0 and element with 0 iid is already present
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', False)
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', '')
def test_selection(self):
self.assertRaises(TypeError, self.tv.selection, 'spam')
# item 'none' doesn't exist
self.assertRaises(tkinter.TclError, self.tv.selection_set, 'none')
self.assertRaises(tkinter.TclError, self.tv.selection_add, 'none')
self.assertRaises(tkinter.TclError, self.tv.selection_remove, 'none')
self.assertRaises(tkinter.TclError, self.tv.selection_toggle, 'none')
item1 = self.tv.insert('', 'end')
item2 = self.tv.insert('', 'end')
c1 = self.tv.insert(item1, 'end')
c2 = self.tv.insert(item1, 'end')
c3 = self.tv.insert(item1, 'end')
self.assertEqual(self.tv.selection(), ())
self.tv.selection_set(c1, item2)
self.assertEqual(self.tv.selection(), (c1, item2))
self.tv.selection_set(c2)
self.assertEqual(self.tv.selection(), (c2,))
self.tv.selection_add(c1, item2)
self.assertEqual(self.tv.selection(), (c1, c2, item2))
self.tv.selection_add(item1)
self.assertEqual(self.tv.selection(), (item1, c1, c2, item2))
self.tv.selection_add()
self.assertEqual(self.tv.selection(), (item1, c1, c2, item2))
self.tv.selection_remove(item1, c3)
self.assertEqual(self.tv.selection(), (c1, c2, item2))
self.tv.selection_remove(c2)
self.assertEqual(self.tv.selection(), (c1, item2))
self.tv.selection_remove()
self.assertEqual(self.tv.selection(), (c1, item2))
self.tv.selection_toggle(c1, c3)
self.assertEqual(self.tv.selection(), (c3, item2))
self.tv.selection_toggle(item2)
self.assertEqual(self.tv.selection(), (c3,))
self.tv.selection_toggle()
self.assertEqual(self.tv.selection(), (c3,))
self.tv.insert('', 'end', id='with spaces')
self.tv.selection_set('with spaces')
self.assertEqual(self.tv.selection(), ('with spaces',))
self.tv.insert('', 'end', id='{brace')
self.tv.selection_set('{brace')
self.assertEqual(self.tv.selection(), ('{brace',))
self.tv.insert('', 'end', id='unicode\u20ac')
self.tv.selection_set('unicode\u20ac')
self.assertEqual(self.tv.selection(), ('unicode\u20ac',))
self.tv.insert('', 'end', id=b'bytes\xe2\x82\xac')
self.tv.selection_set(b'bytes\xe2\x82\xac')
self.assertEqual(self.tv.selection(), ('bytes\xe2\x82\xac',))
self.tv.selection_set()
self.assertEqual(self.tv.selection(), ())
# Old interface
self.tv.selection_set((c1, item2))
self.assertEqual(self.tv.selection(), (c1, item2))
self.tv.selection_add((c1, item1))
self.assertEqual(self.tv.selection(), (item1, c1, item2))
self.tv.selection_remove((item1, c3))
self.assertEqual(self.tv.selection(), (c1, item2))
self.tv.selection_toggle((c1, c3))
self.assertEqual(self.tv.selection(), (c3, item2))
def test_set(self):
self.tv['columns'] = ['A', 'B']
item = self.tv.insert('', 'end', values=['a', 'b'])
self.assertEqual(self.tv.set(item), {'A': 'a', 'B': 'b'})
self.tv.set(item, 'B', 'a')
self.assertEqual(self.tv.item(item, values=None),
('a', 'a') if self.wantobjects else 'a a')
self.tv['columns'] = ['B']
self.assertEqual(self.tv.set(item), {'B': 'a'})
self.tv.set(item, 'B', 'b')
self.assertEqual(self.tv.set(item, column='B'), 'b')
self.assertEqual(self.tv.item(item, values=None),
('b', 'a') if self.wantobjects else 'b a')
self.tv.set(item, 'B', 123)
self.assertEqual(self.tv.set(item, 'B'),
123 if self.wantobjects else '123')
self.assertEqual(self.tv.item(item, values=None),
(123, 'a') if self.wantobjects else '123 a')
self.assertEqual(self.tv.set(item),
{'B': 123} if self.wantobjects else {'B': '123'})
# inexistent column
self.assertRaises(tkinter.TclError, self.tv.set, item, 'A')
self.assertRaises(tkinter.TclError, self.tv.set, item, 'A', 'b')
# inexistent item
self.assertRaises(tkinter.TclError, self.tv.set, 'notme')
def test_tag_bind(self):
events = []
item1 = self.tv.insert('', 'end', tags=['call'])
item2 = self.tv.insert('', 'end', tags=['call'])
self.tv.tag_bind('call', '<ButtonPress-1>',
lambda evt: events.append(1))
self.tv.tag_bind('call', '<ButtonRelease-1>',
lambda evt: events.append(2))
self.tv.pack()
self.tv.wait_visibility()
self.tv.update()
pos_y = set()
found = set()
for i in range(0, 100, 10):
if len(found) == 2: # item1 and item2 already found
break
item_id = self.tv.identify_row(i)
if item_id and item_id not in found:
pos_y.add(i)
found.add(item_id)
self.assertEqual(len(pos_y), 2) # item1 and item2 y pos
for y in pos_y:
simulate_mouse_click(self.tv, 0, y)
# by now there should be 4 things in the events list, since each
# item had a bind for two events that were simulated above
self.assertEqual(len(events), 4)
for evt in zip(events[::2], events[1::2]):
self.assertEqual(evt, (1, 2))
def test_tag_configure(self):
# Just testing parameter passing for now
self.assertRaises(TypeError, self.tv.tag_configure)
self.assertRaises(tkinter.TclError, self.tv.tag_configure,
'test', sky='blue')
self.tv.tag_configure('test', foreground='blue')
self.assertEqual(str(self.tv.tag_configure('test', 'foreground')),
'blue')
self.assertEqual(str(self.tv.tag_configure('test', foreground=None)),
'blue')
self.assertIsInstance(self.tv.tag_configure('test'), dict)
def test_tag_has(self):
item1 = self.tv.insert('', 'end', text='Item 1', tags=['tag1'])
item2 = self.tv.insert('', 'end', text='Item 2', tags=['tag2'])
self.assertRaises(TypeError, self.tv.tag_has)
self.assertRaises(TclError, self.tv.tag_has, 'tag1', 'non-existing')
self.assertTrue(self.tv.tag_has('tag1', item1))
self.assertFalse(self.tv.tag_has('tag1', item2))
self.assertFalse(self.tv.tag_has('tag2', item1))
self.assertTrue(self.tv.tag_has('tag2', item2))
self.assertFalse(self.tv.tag_has('tag3', item1))
self.assertFalse(self.tv.tag_has('tag3', item2))
self.assertEqual(self.tv.tag_has('tag1'), (item1,))
self.assertEqual(self.tv.tag_has('tag2'), (item2,))
self.assertEqual(self.tv.tag_has('tag3'), ())
@add_standard_options(StandardTtkOptionsTests)
class SeparatorTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'orient', 'style', 'takefocus',
# 'state'?
)
default_orient = 'horizontal'
def create(self, **kwargs):
return ttk.Separator(self.root, **kwargs)
@add_standard_options(StandardTtkOptionsTests)
class SizegripTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'style', 'takefocus',
# 'state'?
)
def create(self, **kwargs):
return ttk.Sizegrip(self.root, **kwargs)
tests_gui = (
ButtonTest, CheckbuttonTest, ComboboxTest, EntryTest,
FrameTest, LabelFrameTest, LabelTest, MenubuttonTest,
NotebookTest, PanedWindowTest, ProgressbarTest,
RadiobuttonTest, ScaleTest, ScrollbarTest, SeparatorTest,
SizegripTest, SpinboxTest, TreeviewTest, WidgetTest,
)
if __name__ == "__main__":
unittest.main()
| 35.860502
| 87
| 0.592414
|
dd29b750f19b1ed740fe1b21974595b1d6080a37
| 8,366
|
py
|
Python
|
reverse/A7105/a7105.py
|
guillaume-rico/HUBSAN-X4-H107D
|
cb291d53e4f68a27db834aea7e651f053cd64112
|
[
"MIT"
] | 4
|
2016-10-11T01:09:33.000Z
|
2020-12-31T12:48:25.000Z
|
reverse/A7105/a7105.py
|
guillaume-rico/HUBSAN-X4-H107D
|
cb291d53e4f68a27db834aea7e651f053cd64112
|
[
"MIT"
] | null | null | null |
reverse/A7105/a7105.py
|
guillaume-rico/HUBSAN-X4-H107D
|
cb291d53e4f68a27db834aea7e651f053cd64112
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import sys
import array
# Original work of Avalrop : https://github.com/alvarop/protox
# The protox module allows for the decoding of A7105 SPI packets
#
# Strobe packets are marked with an S. Register read/writes are marked R/W
#
# Sample output:
# R RSSI = 0.886V
# R RSSI = 0.872V
# W PLL Register I [82]
# S PLL Mode
# S Standby Mode
# S FIFO Write pointer reset
# W FIFO DATA Register [01 82 C0 AC D8 41 00 00 00 00 00 00 00 00 00 F8]
# S TX Mode
#
#
# Constants
#
STROBE_BIT = 0x80
READ_BIT = 0x40
fXTAL = 16000000.0
#
# Strobe string definitions
#
strobes = {}
strobes[0x80] = 'Sleep Mode'
strobes[0x90] = 'Idle Mode'
strobes[0xA0] = 'Standby Mode'
strobes[0xB0] = 'PLL Mode'
strobes[0xC0] = 'RX Mode'
strobes[0xD0] = 'TX Mode'
strobes[0xE0] = 'FIFO Write pointer reset'
strobes[0xF0] = 'FIFO Read pointer reset'
#
# Register string definitons
# Can be overwritten by function to do further processing
#
regs = {}
regs[0x00] = 'Mode Register'
regs[0x01] = 'Mode Control Register'
regs[0x02] = 'Calibration Control Register'
regs[0x03] = 'FIFO Register I'
regs[0x04] = 'FIFO Register II'
regs[0x05] = 'FIFO DATA Register'
regs[0x06] = 'ID DATA Register'
regs[0x07] = 'RC OSC Register I'
regs[0x08] = 'RC OSC Register II'
regs[0x09] = 'RC OSC Register III'
regs[0x0A] = 'CKO Pin Control Register'
regs[0x0B] = 'GIO1 Pin Control Register I'
regs[0x0C] = 'GIO2 Pin Control Register II'
regs[0x0D] = 'Clock Register'
regs[0x0E] = 'Data Rate Register'
regs[0x0F] = 'PLL Register I'
regs[0x10] = 'PLL Register II'
regs[0x11] = 'PLL Register III'
regs[0x12] = 'PLL Register IV'
regs[0x13] = 'PLL Register V'
regs[0x14] = 'TX Register I'
regs[0x15] = 'TX Register II'
regs[0x16] = 'Delay Register I'
regs[0x17] = 'Delay Register II'
regs[0x18] = 'RX Register'
regs[0x19] = 'RX Gain Register I'
regs[0x1A] = 'RX Gain Register II'
regs[0x1B] = 'RX Gain Register III'
regs[0x1C] = 'RX Gain Register IV'
regs[0x1D] = 'RSSI Threshold Register'
regs[0x1E] = 'ADC Control Register'
regs[0x1F] = 'Code Register I'
regs[0x20] = 'Code Register II'
regs[0x21] = 'Code Register III'
regs[0x22] = 'IF Calibration Register I'
regs[0x23] = 'IF Calibration Register II'
regs[0x24] = 'VCO current Calibration Register'
regs[0x25] = 'VCO Single band Calibration Register I'
regs[0x26] = 'VCO Single band Calibration Register II'
regs[0x27] = 'Battery detect Register'
regs[0x28] = 'TX test Register'
regs[0x29] = 'Rx DEM test Register I'
regs[0x2A] = 'Rx DEM test Register II'
regs[0x2B] = 'Charge Pump Current Register'
regs[0x2C] = 'Crystal test Register'
regs[0x2D] = 'PLL test Register'
regs[0x2E] = 'VCO test Register I'
regs[0x2F] = 'VCO test Register II'
regs[0x30] = 'IFAT Register'
regs[0x31] = 'RScale Register'
regs[0x32] = 'Filter test Register'
#
# Keep last value written to register here (for better context)
# FIFO and ID register won't work well since they are multi-byte, but that's ok
#
tmpRegs = array.array('B', [0]* 0x32)
#
# Custom packet processing functions
#
def processRSSI(packet):
rString = ''
if (packet[0] & READ_BIT) != 0:
rString += 'RSSI = ' + format((1.2 * packet[1]) / 256.0, "0.3f") + 'V'
else:
rString += 'RSSI Thershld Register [' + format(packet[1], '02X') + ']'
return rString
def processMode(packet):
rString = ''
if (packet[0] & READ_BIT) != 0:
rString = 'MODE: '
# FEC Flag
if ((packet[1] >> 6) & 0x1) == 1:
rString += 'FECERR '
else:
rString += 'FECOK '
# CRC Flag
if ((packet[1] >> 5) & 0x1) == 1:
rString += 'CRCERR '
else:
rString += 'CRCOK '
# RF Chip Enable Status
if ((packet[1] >> 4) & 0x1) == 1:
rString += 'RFEN '
else:
rString += 'RFDIS '
# Internal Crystal Oscillator status
if ((packet[1] >> 3) & 0x1) == 1:
rString += 'XEN '
else:
rString += 'XDIS '
# PLL Enabled
if ((packet[1] >> 2) & 0x1) == 1:
rString += 'PLLEN '
else:
rString += 'PLLDIS '
# TRX State Enabled
if ((packet[1] >> 1) & 0x1) == 1:
rString += 'TRXEN '
else:
rString += 'TRXDIS '
# TRX Status
if ((packet[1] >> 0) & 0x1) == 1:
rString += 'TX '
else:
rString += 'RX '
else:
rString += 'Device Reset'
return rString;
#
# Use the previously stored PLL1-PLL5 values to compute the actual
# radio frequency and return channel number plus frequency in MHz
#
def processPLL1(packet):
rString = ''
if (packet[0] & READ_BIT) != 0:
rString += 'PLL Register I = ' + format(packet[1], '02x')
else:
DBL = (tmpRegs[0x10] >> 7) & 0x01
RRC = (tmpRegs[0x10] >> 5) & 0x03
BIP = ((tmpRegs[0x10] & 0x01) << 8) + tmpRegs[0x11]
BFP = (tmpRegs[0x12] << 8) + tmpRegs[0x13]
CHR = (tmpRegs[0x10] >> 1) & 0x0F
CHN = tmpRegs[0x0F]
fLO_BASE = (DBL + 1) * (fXTAL/(RRC + 1)) * (BIP + BFP/pow(2,16))
fCHSP = fXTAL * (DBL + 1) / 4.0 / (CHR + 1)
fOFFSET = CHN * fCHSP
fLO = fLO_BASE + fOFFSET
rString += 'CH ' + str(CHN) + ' (' + str(fLO/1000000) + 'MHz)'
return rString
#
# Decode protoX packets
# Only decoding 0x20 (control) packets right now
#
def processFIFO(packet):
rString = ''
if(packet[1] == 0x20):
# Throttle
rString += 'T: ' + format(packet[3]/255.0 * 100, '2.0f') + ' '
# Rudder (Yaw left/right)
rString += 'R: ' + format((packet[5] - 128)/127.0 * 100, '2.0f') + ' '
# Elevator (Pitch forward/back)
rString += 'E: ' + format(-(packet[7] - 128)/127.0 * 100, '2.0f') + ' '
# Aileron (Roll left/right)
rString += 'A: ' + format(-(packet[9] - 128)/127.0 * 100, '2.0f') + ' '
if((packet[10] & 0x08)):
rString += 'Flips Enabled'
else :
rString += 'Flips Disabled'
# Wole string
rString = rString + ' ' + " | ".join(format(x, '#04x') for x in packet)
else:
rString += 'FIFO '
rString = rString + " | ".join(format(x, '#04x') for x in packet)
rString = rString.strip()
return rString
# Overwrite register functions
regs[0x00] = processMode
regs[0x05] = processFIFO
regs[0x0F] = processPLL1
regs[0x1D] = processRSSI
#
# Generic packet processing functions
#
def processPacket(packetString):
packet = array.array('B')
rString = ''
# Make byte array instead of strings for actual processing
for byte in packetString.split():
packet.append(int(byte,16))
# Strip read bit so we can use the regs[] array
reg = packet[0] & ~READ_BIT
if (packet[0] & READ_BIT) != 0:
rString += 'R '
else:
rString += 'W '
# Store register value for later use
tmpRegs[reg] = packet[1]
if reg in regs:
#
# Check if we have a special packet processing function for this register
# otherwise just print the name/description and values
#
if hasattr(regs[reg], '__call__'):
rString += regs[reg](packet)
else:
rString += regs[reg] + ' ['
for byteIndex in range(1, len(packet)):
rString += format(packet[byteIndex], '02X') + ' '
rString = rString.strip()
rString += ']'
else:
rString += '- unknown register! ' + format(reg, '02X')
return rString
def processStrobe(strobe):
return ('S ' + strobes[strobe])
#
# 'public' functions
#
#
# decodeSPIPacket
#
# packetString should be a string of space separated bytes
# Examples:
# '26 3b'
# '05 01 82 96 d5 18 f6 00 00 00 00 00 00 00 00 00 04 '
#
# The return value is a string with the decoded packet
#
def decodeSPIPacket(packetString):
if len(packetString) == 0:
return ''
firstByte = int(packetString.split()[0], 16)
if (firstByte & STROBE_BIT) != 0:
return processStrobe(firstByte)
else:
return processPacket(packetString)
if __name__ == '__main__':
# TODO - process lines from a previously processed file?
print('Standalone operation not yet implemented...')
| 26.308176
| 81
| 0.584031
|
f1f3b1f89ecf72014da867141d7efdcec433972b
| 241
|
py
|
Python
|
straal/__init__.py
|
piotr-szpetkowski/straal-python
|
f72fe0aed734bf5439c4ddee9bc9bad12f619c33
|
[
"Apache-2.0"
] | null | null | null |
straal/__init__.py
|
piotr-szpetkowski/straal-python
|
f72fe0aed734bf5439c4ddee9bc9bad12f619c33
|
[
"Apache-2.0"
] | null | null | null |
straal/__init__.py
|
piotr-szpetkowski/straal-python
|
f72fe0aed734bf5439c4ddee9bc9bad12f619c33
|
[
"Apache-2.0"
] | null | null | null |
from straal.base import get_api_key, get_base_url, init
from straal.cards import Card
from straal.customers import Customer
from straal.filters import filters
__all__ = ["get_api_key", "get_base_url", "init", "Card", "Customer", "filters"]
| 34.428571
| 80
| 0.780083
|
ae954749a1e3ef3477e0530c4d433610d05be343
| 2,361
|
py
|
Python
|
pymc3_ext/tests/test_updates.py
|
wlad111/pymc3
|
43432834be5bbca72caa32d40a848515eea554a8
|
[
"Apache-2.0"
] | null | null | null |
pymc3_ext/tests/test_updates.py
|
wlad111/pymc3
|
43432834be5bbca72caa32d40a848515eea554a8
|
[
"Apache-2.0"
] | null | null | null |
pymc3_ext/tests/test_updates.py
|
wlad111/pymc3
|
43432834be5bbca72caa32d40a848515eea554a8
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import numpy as np
import theano
from theano.configparser import change_flags
from pymc3_ext.variational.updates import (
sgd,
momentum,
nesterov_momentum,
adagrad,
rmsprop,
adadelta,
adam,
adamax,
adagrad_window
)
_a = theano.shared(1.)
_b = _a*2
_m = theano.shared(np.empty((10, ), theano.config.floatX))
_n = _m.sum()
_m2 = theano.shared(np.empty((10, 10, 10), theano.config.floatX))
_n2 = _b + _n + _m2.sum()
@pytest.mark.parametrize(
'opt',
[sgd, momentum, nesterov_momentum,
adagrad, rmsprop, adadelta, adam,
adamax, adagrad_window],
ids=['sgd', 'momentum', 'nesterov_momentum',
'adagrad', 'rmsprop', 'adadelta', 'adam',
'adamax', 'adagrad_window']
)
@pytest.mark.parametrize(
'getter',
[lambda t: t, # all params -> ok
lambda t: (None, t[1]), # missing loss -> fail
lambda t: (t[0], None), # missing params -> fail
lambda t: (None, None)], # all missing -> partial
ids=['all_params',
'missing_loss',
'missing_params',
'all_missing']
)
@pytest.mark.parametrize(
'kwargs',
[dict(), dict(learning_rate=1e-2)],
ids=['without_args', 'with_args']
)
@pytest.mark.parametrize(
'loss_and_params',
[(_b, [_a]), (_n, [_m]), (_n2, [_a, _m, _m2])],
ids=['scalar', 'matrix', 'mixed']
)
def test_updates_fast(opt, loss_and_params, kwargs, getter):
with change_flags(compute_test_value='ignore'):
loss, param = getter(loss_and_params)
args = dict()
args.update(**kwargs)
args.update(dict(loss_or_grads=loss, params=param))
if loss is None and param is None:
updates = opt(**args)
# Here we should get new callable
assert callable(updates)
# And be able to get updates
updates = opt(_b, [_a])
assert isinstance(updates, dict)
# case when both are None is above
elif loss is None or param is None:
# Here something goes wrong and user provides not full set of [params + loss_or_grads]
# We raise Value error
with pytest.raises(ValueError):
opt(**args)
else:
# Usual call to optimizer, old behaviour
updates = opt(**args)
assert isinstance(updates, dict)
| 29.886076
| 98
| 0.597205
|
29c7172609ed4f24032a25aaf60f35878cd61506
| 15,778
|
py
|
Python
|
tests/python/pants_test/engine/legacy/test_changed_integration.py
|
mateor/pants
|
e01cee8959da269c0b526138760847901e4d4a48
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/engine/legacy/test_changed_integration.py
|
mateor/pants
|
e01cee8959da269c0b526138760847901e4d4a48
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/engine/legacy/test_changed_integration.py
|
mateor/pants
|
e01cee8959da269c0b526138760847901e4d4a48
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import shutil
import subprocess
import unittest
from contextlib import contextmanager
from textwrap import dedent
from pants.base.build_environment import get_buildroot
from pants.util.contextutil import environment_as, temporary_dir
from pants.util.dirutil import safe_mkdir, safe_open, touch
from pants_test.base_test import TestGenerator
from pants_test.pants_run_integration_test import PantsRunIntegrationTest, ensure_engine
from pants_test.testutils.git_util import initialize_repo
def lines_to_set(str_or_list):
if isinstance(str_or_list, list):
return set(str_or_list)
else:
return set(x for x in str(str_or_list).split('\n') if x)
@contextmanager
def mutated_working_copy(files_to_mutate, to_append='\n '):
"""Given a list of files, append whitespace to each of them to trigger a git diff - then reset."""
assert to_append, 'to_append may not be empty'
for f in files_to_mutate:
with open(f, 'ab') as fh:
fh.write(to_append)
try:
yield
finally:
seek_point = len(to_append) * -1
for f in files_to_mutate:
with open(f, 'ab') as fh:
fh.seek(seek_point, os.SEEK_END)
fh.truncate()
@contextmanager
def create_isolated_git_repo():
# Isolated Git Repo Structure:
# worktree
# |--README
# |--pants.ini
# |--3rdparty
# |--BUILD
# |--src
# |--resources
# |--org/pantsbuild/resourceonly
# |--BUILD
# |--README.md
# |--java
# |--org/pantsbuild/helloworld
# |--BUILD
# |--helloworld.java
# |--python
# |--python_targets
# |--BUILD
# |--test_binary.py
# |--test_library.py
# |--test_unclaimed_src.py
# |--sources
# |--BUILD
# |--sources.py
# |--sources.txt
# |--tests
# |--scala
# |--org/pantsbuild/cp-directories
# |--BUILD
# |--ClasspathDirectoriesSpec.scala
with temporary_dir(root_dir=get_buildroot()) as worktree:
with safe_open(os.path.join(worktree, 'README'), 'w') as fp:
fp.write('Just a test tree.')
# Create an empty pants config file.
touch(os.path.join(worktree, 'pants.ini'))
# Copy .gitignore to new repo.
shutil.copyfile('.gitignore', os.path.join(worktree, '.gitignore'))
with initialize_repo(worktree=worktree, gitdir=os.path.join(worktree, '.git')) as git:
# Resource File
resource_file = os.path.join(worktree, 'src/resources/org/pantsbuild/resourceonly/README.md')
with safe_open(resource_file, 'w') as fp:
fp.write('Just resource.')
resource_build_file = os.path.join(worktree, 'src/resources/org/pantsbuild/resourceonly/BUILD')
with safe_open(resource_build_file, 'w') as fp:
fp.write(dedent("""
resources(
name='resource',
sources=['README.md'],
)
"""))
git.add(resource_file, resource_build_file)
git.commit('Check in a resource target.')
# Java Program
src_file = os.path.join(worktree, 'src/java/org/pantsbuild/helloworld/helloworld.java')
with safe_open(src_file, 'w') as fp:
fp.write(dedent("""
package org.pantsbuild.helloworld;
class HelloWorld {
public static void main(String[] args) {
System.out.println("Hello, World!\n");
}
}
"""))
src_build_file = os.path.join(worktree, 'src/java/org/pantsbuild/helloworld/BUILD')
with safe_open(src_build_file, 'w') as fp:
fp.write(dedent("""
jvm_binary(
dependencies=[
'{}',
],
source='helloworld.java',
main='org.pantsbuild.helloworld.HelloWorld',
)
""".format('src/resources/org/pantsbuild/resourceonly:resource')))
git.add(src_file, src_build_file)
git.commit('hello world java program with a dependency on a resource file.')
# Scala Program
scala_src_dir = os.path.join(worktree, 'tests/scala/org/pantsbuild/cp-directories')
safe_mkdir(os.path.dirname(scala_src_dir))
shutil.copytree('testprojects/tests/scala/org/pantsbuild/testproject/cp-directories', scala_src_dir)
git.add(scala_src_dir)
git.commit('Check in a scala test target.')
# Python library and binary
python_src_dir = os.path.join(worktree, 'src/python/python_targets')
safe_mkdir(os.path.dirname(python_src_dir))
shutil.copytree('testprojects/src/python/python_targets', python_src_dir)
git.add(python_src_dir)
git.commit('Check in python targets.')
# A `python_library` with `resources=['file.name']`.
python_src_dir = os.path.join(worktree, 'src/python/sources')
safe_mkdir(os.path.dirname(python_src_dir))
shutil.copytree('testprojects/src/python/sources', python_src_dir)
git.add(python_src_dir)
git.commit('Check in a python library with resource dependency.')
# Copy 3rdparty/BUILD.
_3rdparty_build = os.path.join(worktree, '3rdparty/BUILD')
safe_mkdir(os.path.dirname(_3rdparty_build))
shutil.copyfile('3rdparty/BUILD', _3rdparty_build)
git.add(_3rdparty_build)
git.commit('Check in 3rdparty/BUILD.')
with environment_as(PANTS_BUILDROOT_OVERRIDE=worktree):
yield worktree
class ChangedIntegrationTest(PantsRunIntegrationTest, TestGenerator):
TEST_MAPPING = {
# A `jvm_binary` with `source='file.name'`.
'src/java/org/pantsbuild/helloworld/helloworld.java': dict(
none=['src/java/org/pantsbuild/helloworld:helloworld'],
direct=['src/java/org/pantsbuild/helloworld:helloworld'],
transitive=['src/java/org/pantsbuild/helloworld:helloworld']
),
# A `python_binary` with `source='file.name'`.
'src/python/python_targets/test_binary.py': dict(
none=['src/python/python_targets:test'],
direct=['src/python/python_targets:test'],
transitive=['src/python/python_targets:test']
),
# A `python_library` with `sources=['file.name']`.
'src/python/python_targets/test_library.py': dict(
none=['src/python/python_targets:test_library'],
direct=['src/python/python_targets:test',
'src/python/python_targets:test_library',
'src/python/python_targets:test_library_direct_dependee'],
transitive=['src/python/python_targets:test',
'src/python/python_targets:test_library',
'src/python/python_targets:test_library_direct_dependee',
'src/python/python_targets:test_library_transitive_dependee',
'src/python/python_targets:test_library_transitive_dependee_2',
'src/python/python_targets:test_library_transitive_dependee_3',
'src/python/python_targets:test_library_transitive_dependee_4']
),
# A `resources` target with `sources=['file.name']` referenced by a `java_library` target.
'src/resources/org/pantsbuild/resourceonly/README.md': dict(
none=['src/resources/org/pantsbuild/resourceonly:resource'],
direct=['src/java/org/pantsbuild/helloworld:helloworld',
'src/resources/org/pantsbuild/resourceonly:resource'],
transitive=['src/java/org/pantsbuild/helloworld:helloworld',
'src/resources/org/pantsbuild/resourceonly:resource'],
),
# A `python_library` with `resources=['file.name']`.
'src/python/sources/sources.txt': dict(
none=['src/python/sources:sources'],
direct=['src/python/sources:sources'],
transitive=['src/python/sources:sources']
),
# A `scala_library` with `sources=['file.name']`.
'tests/scala/org/pantsbuild/cp-directories/ClasspathDirectoriesSpec.scala': dict(
none=['tests/scala/org/pantsbuild/cp-directories:cp-directories'],
direct=['tests/scala/org/pantsbuild/cp-directories:cp-directories'],
transitive=['tests/scala/org/pantsbuild/cp-directories:cp-directories']
),
# An unclaimed source file.
'src/python/python_targets/test_unclaimed_src.py': dict(
none=[],
direct=[],
transitive=[]
)
}
@classmethod
def generate_tests(cls):
"""Generates tests on the class for better reporting granularity than an opaque for loop test."""
def safe_filename(f):
return f.replace('/', '_').replace('.', '_')
for filename, dependee_mapping in cls.TEST_MAPPING.items():
for dependee_type in dependee_mapping.keys():
# N.B. The parameters here are used purely to close over the respective loop variables.
def inner_integration_coverage_test(self, filename=filename, dependee_type=dependee_type):
with create_isolated_git_repo() as worktree:
# Mutate the working copy so we can do `--changed-parent=HEAD` deterministically.
with mutated_working_copy([os.path.join(worktree, filename)]):
stdout = self.assert_changed_new_equals_old(
['--changed-include-dependees={}'.format(dependee_type), '--changed-parent=HEAD'],
test_list=True
)
self.assertEqual(
lines_to_set(self.TEST_MAPPING[filename][dependee_type]),
lines_to_set(stdout)
)
cls.add_test(
'test_changed_coverage_{}_{}'.format(dependee_type, safe_filename(filename)),
inner_integration_coverage_test
)
def assert_changed_new_equals_old(self, extra_args, success=True, test_list=False):
args = ['-q', 'changed'] + extra_args
changed_run = self.do_command(*args, success=success, enable_v2_engine=False)
engine_changed_run = self.do_command(*args, success=success, enable_v2_engine=True)
self.assertEqual(
lines_to_set(changed_run.stdout_data), lines_to_set(engine_changed_run.stdout_data)
)
if test_list:
# In the v2 engine, `--changed-*` options can alter the specs of any goal - test with `list`.
list_args = ['-q', 'list'] + extra_args
engine_list_run = self.do_command(*list_args, success=success, enable_v2_engine=True)
self.assertEqual(
lines_to_set(changed_run.stdout_data), lines_to_set(engine_list_run.stdout_data)
)
# If we get to here without asserting, we know all copies of stdout are identical - return one.
return changed_run.stdout_data
@ensure_engine
def test_changed_options_scope_shadowing(self):
"""Tests that the `test-changed` scope overrides `changed` scope."""
changed_src = 'src/python/python_targets/test_library.py'
expected_target = self.TEST_MAPPING[changed_src]['none'][0]
expected_set = {expected_target}
not_expected_set = set(self.TEST_MAPPING[changed_src]['transitive']).difference(expected_set)
with create_isolated_git_repo() as worktree:
with mutated_working_copy([os.path.join(worktree, changed_src)]):
pants_run = self.run_pants([
'-ldebug', # This ensures the changed target name shows up in the pants output.
'test-changed',
'--test-changed-changes-since=HEAD',
'--test-changed-include-dependees=none', # This option should be used.
'--changed-include-dependees=transitive' # This option should be stomped on.
])
self.assert_success(pants_run)
for expected_item in expected_set:
self.assertIn(expected_item, pants_run.stdout_data)
for not_expected_item in not_expected_set:
if expected_target.startswith(not_expected_item):
continue # Ignore subset matches.
self.assertNotIn(not_expected_item, pants_run.stdout_data)
@ensure_engine
def test_changed_options_scope_positional(self):
changed_src = 'src/python/python_targets/test_library.py'
expected_set = set(self.TEST_MAPPING[changed_src]['transitive'])
with create_isolated_git_repo() as worktree:
with mutated_working_copy([os.path.join(worktree, changed_src)]):
pants_run = self.run_pants([
'-ldebug', # This ensures the changed target names show up in the pants output.
'test-changed',
'--changes-since=HEAD',
'--include-dependees=transitive'
])
self.assert_success(pants_run)
for expected_item in expected_set:
self.assertIn(expected_item, pants_run.stdout_data)
@ensure_engine
def test_test_changed_exclude_target(self):
changed_src = 'src/python/python_targets/test_library.py'
exclude_target_regexp = r'_[0-9]'
excluded_set = {'src/python/python_targets:test_library_transitive_dependee_2',
'src/python/python_targets:test_library_transitive_dependee_3',
'src/python/python_targets:test_library_transitive_dependee_4'}
expected_set = set(self.TEST_MAPPING[changed_src]['transitive']) - excluded_set
with create_isolated_git_repo() as worktree:
with mutated_working_copy([os.path.join(worktree, changed_src)]):
pants_run = self.run_pants([
'-ldebug', # This ensures the changed target names show up in the pants output.
'--exclude-target-regexp={}'.format(exclude_target_regexp),
'test-changed',
'--changes-since=HEAD',
'--include-dependees=transitive'
])
self.assert_success(pants_run)
for expected_item in expected_set:
self.assertIn(expected_item, pants_run.stdout_data)
for excluded_item in excluded_set:
self.assertNotIn(excluded_item, pants_run.stdout_data)
@ensure_engine
def test_changed_changed_since_and_files(self):
with create_isolated_git_repo():
stdout = self.assert_changed_new_equals_old(['--changed-since=HEAD^^', '--files'])
# The output should be the files added in the last 2 commits.
self.assertEqual(
lines_to_set(stdout),
{'src/python/sources/BUILD',
'src/python/sources/sources.py',
'src/python/sources/sources.txt',
'3rdparty/BUILD'}
)
@ensure_engine
def test_changed_diffspec_and_files(self):
with create_isolated_git_repo():
git_sha = subprocess.check_output(['git', 'rev-parse', 'HEAD^^']).strip()
stdout = self.assert_changed_new_equals_old(['--changed-diffspec={}'.format(git_sha), '--files'])
# The output should be the files added in the last 2 commits.
self.assertEqual(
lines_to_set(stdout),
{'src/python/python_targets/BUILD',
'src/python/python_targets/test_binary.py',
'src/python/python_targets/test_library.py',
'src/python/python_targets/test_unclaimed_src.py'}
)
# Following 4 tests do not run in isolated repo because they don't mutate working copy.
def test_changed(self):
self.assert_changed_new_equals_old([])
@unittest.skip("Pending fix for https://github.com/pantsbuild/pants/issues/4010")
def test_changed_with_changes_since(self):
self.assert_changed_new_equals_old(['--changes-since=HEAD^^'])
@unittest.skip("Pending fix for https://github.com/pantsbuild/pants/issues/4010")
def test_changed_with_changes_since_direct(self):
self.assert_changed_new_equals_old(['--changes-since=HEAD^^', '--include-dependees=direct'])
@unittest.skip("Pending fix for https://github.com/pantsbuild/pants/issues/4010")
def test_changed_with_changes_since_transitive(self):
self.assert_changed_new_equals_old(['--changes-since=HEAD^^', '--include-dependees=transitive'])
ChangedIntegrationTest.generate_tests()
| 40.352941
| 106
| 0.67949
|
7fdf0f90f7d73d2f4eea3c057ff0a56276356bed
| 3,075
|
py
|
Python
|
examples/src/main/python/forwarding.py
|
observernet/obsrj
|
8a9f630115d1df1055b68d97951ee35e9d841acc
|
[
"Apache-2.0"
] | null | null | null |
examples/src/main/python/forwarding.py
|
observernet/obsrj
|
8a9f630115d1df1055b68d97951ee35e9d841acc
|
[
"Apache-2.0"
] | null | null | null |
examples/src/main/python/forwarding.py
|
observernet/obsrj
|
8a9f630115d1df1055b68d97951ee35e9d841acc
|
[
"Apache-2.0"
] | 1
|
2021-05-23T09:23:27.000Z
|
2021-05-23T09:23:27.000Z
|
# An example of how to use Jython to implement the "Getting Started" tutorial app, which receives coins and simply
# sends them on (minus a fee).
__author__ = "richard 'ragmondo' green"
import sys
# Change this to point to where you have a copy of the bitcoinj.jar
sys.path.append(r"/path/to/bitcoinj-core-0.12-bundled.jar")
# This is the address to forward all payments to. Change this (unless you want to send me some testnet coins)
my_address_text = "mzEjmna15T7DXj4HC9MBEG2UJzgFfEYtFo"
# 0 for instant send, 1 for a more realistic example
# if the wallet has no btc in it, then set to 1.
# if it has a confirmed balance in it, then you can set it to 0.
confirm_wait = 1
from org.obsrj.core import *
import org.obsrj.crypto.KeyCrypterException
import org.obsrj.params.MainNetParams
from org.obsrj.kits import WalletAppKit
from com.google.common.util.concurrent import FutureCallback
from com.google.common.util.concurrent import Futures
import java.io.File
import sys
def loud_exceptions(*args):
def _trace(func):
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception, e:
print "** python exception ",e
raise
except java.lang.Exception,e:
print "** java exception",e
raise
return wrapper
if len(args) == 1 and callable(args[0]):
return _trace(args[0])
else:
return _trace
@loud_exceptions
def forwardCoins(tx,w,pg,addr):
v = tx.getValueSentToMe(w)
amountToSend = v.subtract(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE)
sr = w.sendCoins(pg, addr, amountToSend)
class SenderListener(AbstractWalletEventListener):
def __init__(self,pg,address):
super(SenderListener,self). __init__()
self.peerGroup = pg
self.address = address
@loud_exceptions
def onCoinsReceived(self, w, tx, pb, nb):
print "tx received", tx
v = tx.getValueSentToMe(w)
class myFutureCallback(FutureCallback):
@loud_exceptions
def onSuccess(selfx, txn):
forwardCoins(tx,w,self.peerGroup, self.address)
print "creating %s confirm callback..." % (confirm_wait)
Futures.addCallback(tx.getConfidence().getDepthFuture(confirm_wait), myFutureCallback())
if __name__ == "__main__":
params = org.obsrj.params.TestNet3Params.get()
my_address = Address(params,my_address_text)
filePrefix = "forwarding-service-testnet"
f = java.io.File(".")
kit = WalletAppKit(params, f, filePrefix);
print "starting and initialising (please wait).."
kit.startAsync()
kit.awaitRunning()
pg = kit.peerGroup()
wallet = kit.wallet()
sendToAddress = kit.wallet().currentReceiveKey().toAddress(params)
print "send test coins to ", sendToAddress, "qrcode - http://qrickit.com/api/qr?d=%s" % (sendToAddress) # no affiliation with qrickit..
sl = SenderListener(pg,my_address)
wallet.addEventListener(sl)
print "finished initialising .. now in main event loop"
| 33.791209
| 139
| 0.682602
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.