max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
credsweeper/file_handler/patch_provider.py | iuriimet/CredSweeper | 0 | 12763851 | from typing import Dict, List, Optional
from credsweeper.file_handler.diff_content_provider import DiffContentProvider
from credsweeper.file_handler.files_provider import FilesProvider
from credsweeper.utils import Util
class PatchProvider(FilesProvider):
"""Provide data from a list of `.patch` files.
Allows to scan for data that has changed between git commits, rather than the entire project.
Attributes:
paths: file paths list to scan. All files should be in `.patch` format
change_type: string, type of analyses changes in patch (added or deleted)
skip_ignored: boolean variable, Checking the directory to the list
of ignored directories from the gitignore file
"""
def __init__(self,
paths: List[str],
change_type: Optional[str] = None,
skip_ignored: Optional[bool] = None) -> None:
"""Initialize Files Patch Provider for patch files from 'paths'.
Args:
paths: file paths list to scan. All files should be in `.patch` format
change_type: string, type of analyses changes in patch (added or deleted)
skip_ignored: boolean variable, Checking the directory to the list
of ignored directories from the gitignore file
"""
self.paths = paths
self.change_type = change_type
def load_patch_data(self) -> List[List[str]]:
raw_patches = []
for file_path in self.paths:
raw_patches.append(Util.read_file(file_path))
return raw_patches
def get_files_sequence(self, raw_patches: List[str]) -> List[DiffContentProvider]:
files = []
for raw_patch in raw_patches:
files_data = Util.patch2files_diff(raw_patch, self.change_type)
for file_path, file_diff in files_data.items():
files.append(DiffContentProvider(file_path=file_path, change_type=self.change_type, diff=file_diff))
return files
def get_scannable_files(self, config: Dict) -> List[DiffContentProvider]:
"""Get files to scan. Output based on the `paths` field.
Args:
config: dict of credsweeper configuration
Return:
file objects for analysing
"""
diff_data = self.load_patch_data()
files = self.get_files_sequence(diff_data)
return files
| 2.578125 | 3 |
xpxchain/models/mosaic/mosaic_properties.py | Sharmelen/python-xpx-chain-sdk | 1 | 12763852 | <reponame>Sharmelen/python-xpx-chain-sdk<filename>xpxchain/models/mosaic/mosaic_properties.py
"""
mosaic_properties
=================
Properties of an asset.
License
-------
Copyright 2019 NEM
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import annotations
import typing
from ..blockchain.network_type import OptionalNetworkType
from ... import util
__all__ = ['MosaicProperties']
DTO2Type = typing.Sequence[dict]
FLAGS_ID = 0
DIVISIBILITY_ID = 1
DURATION_ID = 2
PROPERTY_SIZE = 9
PROPERTIES = {
FLAGS_ID: 'flags',
DIVISIBILITY_ID: 'divisibility',
DURATION_ID: 'duration',
}
def to_flags(
supply_mutable: bool,
transferable: bool,
levy_mutable: bool
) -> int:
"""Convert 3 binary values to sequential bit flags"""
flags: int = 0
if supply_mutable:
flags |= 1
if transferable:
flags |= 2
if levy_mutable:
flags |= 4
return flags
def property_to_catbuffer(id: int, value: int) -> bytes:
"""Convert property with ID to catbuffer."""
return util.u8_to_catbuffer(id) + util.u64_to_catbuffer(value)
def property_from_catbuffer(catbuffer: bytes) -> typing.Tuple[int, int]:
"""Convert catbuffer to property with ID."""
id = util.u8_from_catbuffer(catbuffer[:1])
value = util.u64_from_catbuffer(catbuffer[1:])
return (id, value)
@util.inherit_doc
@util.dataclass(frozen=True)
class MosaicProperties(util.DTO):
"""
Properties of an asset.
Note: The `MosaicDefinitionTransaction` uses a different DTO
format for `MosaicProperties` than specified here.
:param flags: Flags for the properties of the mosaic.
:param divisibility: Decimal places mosaic can be divided into [0-6].
:param duration: Number of blocks the mosaic will be available.
DTO Format:
.. code-block:: yaml
MosaicPropertiesDTO: UInt64DTO[]
"""
flags: int
divisibility: int
duration: int
def __init__(
self,
flags: int,
divisibility: int = 0,
duration: int = 0,
):
if flags < 0 or flags > 7:
raise ValueError('Invalid flags, not in range [0-7].')
if divisibility < 0 or divisibility > 6:
raise ValueError('Invalid divisibility, not in range [0-6].')
self._set('flags', flags)
self._set('divisibility', divisibility)
self._set('duration', duration)
@property
def supply_mutable(self) -> bool:
"""Mosaic allows a supply change later on. Default false."""
return (self.flags & 1) == 1
@property
def transferable(self) -> bool:
"""Allow transfer of funds from non-creator accounts. Default true."""
return (self.flags & 2) == 2
@property
def levy_mutable(self) -> bool:
"""Get if levy is mutable. Default false."""
return (self.flags & 4) == 4
@classmethod
def create(cls, **kwds):
"""
Create mosaic properties with default parameters.
:param supply_mutable: Mosaic allows supply change later.
:param transferable: Allow transfer of funds from accounts other than creator.
:param levy_mutable: If levy is mutable.
:param divisibility: Decimal place mosaic can be divided into.
:param duration: Number of blocks the mosaic will be available.
"""
supply_mutable = typing.cast(bool, kwds.get('supply_mutable', False))
transferable = typing.cast(bool, kwds.get('transferable', True))
levy_mutable = typing.cast(bool, kwds.get('levy_mutable', False))
divisibility = kwds.get('divisibility', 0)
duration = kwds.get('duration', 0)
flags = to_flags(supply_mutable, transferable, levy_mutable)
return cls(flags, divisibility, duration)
@classmethod
def validate_dto(cls, data: typing.Sequence[dict]) -> bool:
"""Validate the data-transfer object."""
return (
len(data) in {2, 3}
and all(len(i) == 2 for i in data)
)
def to_dto(
self,
network_type: OptionalNetworkType = None,
) -> typing.Sequence[dict]:
# For indefinite mosaics, the duration is optional (default 0).
return [
{'id': 0, 'value': util.u64_to_dto(self.flags)},
{'id': 1, 'value': util.u64_to_dto(self.divisibility)},
{'id': 2, 'value': util.u64_to_dto(self.duration)},
]
@classmethod
def create_from_dto(
cls,
data: typing.Sequence[dict],
network_type: OptionalNetworkType = None,
) -> MosaicProperties:
if not cls.validate_dto(data):
raise ValueError('Invalid data-transfer object.')
# For indefinite mosaics, the duration is optional (default 0).
duration = 0
for prop in data[0:3]:
prop_id = util.u8_from_dto(prop["id"])
if (prop_id == 0):
flags = util.u64_from_dto(prop["value"])
elif (prop_id == 1):
divisibility = util.u64_from_dto(prop["value"])
elif (prop_id == 2):
duration = util.u64_from_dto(prop["value"])
else:
raise ValueError('Invalid data-transfer object.')
return cls(flags, divisibility, duration)
# TESTING
# Private, internal helper for testing only.
def _permute_(self, cb) -> MosaicProperties:
"""Permute data inside self."""
flags = 0x7 - self.flags
divisibility = 6 - self.divisibility
duration = cb(self.duration)
return MosaicProperties(flags, divisibility, duration)
@util.inherit_doc
@util.dataclass(frozen=True)
class MosaicDefinitionProperties(util.Model):
"""Internal class to provide transaction support for mosaic properties."""
model: MosaicProperties
def catbuffer_size(self) -> int:
"""Get the mosaic properties size as catbuffer."""
# Get the number of optional properties.
count = 0
if self.model.duration != 0:
count += 1
# uint8_t count
# uint8_t flags
# uint8_t divisibility
# typedef MosaicProperty { id: uint8_t, value: uint64_t }
# MosaicProperty[count] properties
count_size = util.U8_BYTES
flags_size = util.U8_BYTES
divisibility_size = util.U8_BYTES
property_size = util.U64_BYTES + util.U8_BYTES
return count_size + flags_size + divisibility_size + count * property_size
@classmethod
def validate_dto(cls, data: DTO2Type) -> bool:
"""Validate the data-transfer object."""
required_keys = {'id', 'value'}
return (
len(data) >= 2
and all((
cls.validate_dto_required(entry, required_keys)
and cls.validate_dto_all(entry, required_keys)
) for entry in data)
)
def to_dto(
self,
network_type: OptionalNetworkType = None,
):
# A newer version of DTO, which is used in MosaicDefinitionTransactions.
# We need to keep the two versions separate.
data = [
{'id': FLAGS_ID, 'value': util.u64_to_dto(self.model.flags)},
{'id': DIVISIBILITY_ID, 'value': util.u64_to_dto(self.model.divisibility)},
]
if self.model.duration != 0:
data.append({
'id': DURATION_ID,
'value': util.u64_to_dto(self.model.duration)
})
return data
@classmethod
def create_from_dto(
cls,
data: DTO2Type,
network_type: OptionalNetworkType = None,
):
# There's a data inconsistnecy in reply from node
# /mosaic routes contains 'id' field
# MosaciDefinition transactions replies with 'key' field
for item in data:
if (('key' in item) and ('id' not in item)):
item['id'] = item.pop('key')
if not cls.validate_dto(data):
raise ValueError('Invalid data-transfer object.')
# A newer version of DTO, which is used in MosaicDefinitionTransactions.
# We need to keep the two versions separate.
kwds = {}
for item in data:
kwds[PROPERTIES[item['id']]] = util.u64_from_dto(item['value'])
return cls(MosaicProperties(**kwds))
def to_catbuffer(
self,
network_type: OptionalNetworkType = None,
fee_strategy: typing.Optional[util.FeeCalculationStrategy] = util.FeeCalculationStrategy.MEDIUM,
) -> bytes:
# Serialize the required properties.
flags = util.u8_to_catbuffer(self.model.flags)
divisibility = util.u8_to_catbuffer(self.model.divisibility)
# Serialize the optional properties.
counter = 0
optional = b''
if self.model.duration != 0:
counter += 1
optional += property_to_catbuffer(DURATION_ID, self.model.duration)
count = util.u8_to_catbuffer(counter)
return count + flags + divisibility + optional
@classmethod
def create_from_catbuffer_pair(
cls,
data: bytes,
network_type: OptionalNetworkType = None,
):
# Read the array count, property flags and divisibility.
count = util.u8_from_catbuffer(data[:1])
flags = util.u8_from_catbuffer(data[1:2])
divisibility = util.u8_from_catbuffer(data[2:3])
# Ensure the buffer is long enough for the data, and iteratively
# read the remaining properties.
step = PROPERTY_SIZE
property_size = step * count
size = 3 + property_size
kwds = {}
for i in range(0, property_size, step):
start = 3 + i
stop = start + step
id, value = property_from_catbuffer(data[start:stop])
kwds[PROPERTIES[id]] = value
# Instantiate our class and return pair.
inst = cls(MosaicProperties(flags, divisibility, **kwds))
remaining = data[size:]
return inst, remaining
| 1.960938 | 2 |
numero_feliz.py | neriphy/numeros_felices | 0 | 12763853 | #Programa para evaluar si un numero es feliz
numero_a_evaluar = input("Introduce el numero a evaluar: ")
n = numero_a_evaluar
suma = 2
primer_digito = 0
segundo_digito = 0
tercer_digito = 0
cuarto_digito = 0
while suma > 1:
primer_digito = numero_a_evaluar[0]
primer_digito = int(primer_digito)
print(primer_digito)
try:
segundo_digito = numero_a_evaluar[1]
segundo_digito = int(segundo_digito)
print(segundo_digito)
except IndexError as Numeromenorandigits:
pass
try:
tercer_digito = numero_a_evaluar[2]
tercer_digito = int(tercer_digito)
print(tercer_digito)
except IndexError as Numeromenorandigits:
pass
try:
cuarto_digito = numero_a_evaluar[3]
cuarto_digito = int(cuarto_digito)
print(cuarto_digito)
except IndexError as Numeromenorandigits:
pass
suma = primer_digito ** 2 + segundo_digito ** 2 + tercer_digito ** 2
print (suma)
numero_a_evaluar = suma
numero_a_evaluar = str(numero_a_evaluar)
if suma == 1:
print(n,"es un numero feliz")
| 4.125 | 4 |
tf_video/main.py | jegork/tf-video-preprocessing | 1 | 12763854 | <reponame>jegork/tf-video-preprocessing
from keras import backend
import tensorflow as tf
from keras.utils.control_flow_util import smart_cond
from keras.layers.preprocessing.preprocessing_utils import ensure_tensor
import numpy as np
from .utils import *
from keras.engine.base_layer import BaseRandomLayer
HORIZONTAL = "horizontal"
VERTICAL = "vertical"
HORIZONTAL_AND_VERTICAL = "horizontal_and_vertical"
H_AXIS = -3
W_AXIS = -2
def transform(
video,
transforms,
fill_mode="reflect",
fill_value=0.0,
interpolation="bilinear",
output_shape=None,
):
if output_shape is None:
output_shape = tf.shape(video)[1:3]
if not tf.executing_eagerly():
output_shape_value = tf.get_static_value(output_shape)
if output_shape_value is not None:
output_shape = output_shape_value
output_shape = tf.convert_to_tensor(output_shape, tf.int32, name="output_shape")
if not output_shape.get_shape().is_compatible_with([2]):
raise ValueError(
"output_shape must be a 1-D Tensor of 2 elements: "
"new_height, new_width, instead got "
"{}".format(output_shape)
)
fill_value = tf.convert_to_tensor(fill_value, tf.float32, name="fill_value")
return tf.raw_ops.ImageProjectiveTransformV3(
images=video,
output_shape=output_shape,
fill_value=fill_value,
transforms=transforms,
fill_mode=fill_mode.upper(),
interpolation=interpolation.upper(),
)
def check_fill_mode_and_interpolation(fill_mode, interpolation):
if fill_mode not in {"reflect", "wrap", "constant", "nearest"}:
raise NotImplementedError(
"Unknown `fill_mode` {}. Only `reflect`, `wrap`, "
"`constant` and `nearest` are supported.".format(fill_mode)
)
if interpolation not in {"nearest", "bilinear"}:
raise NotImplementedError(
"Unknown `interpolation` {}. Only `nearest` and "
"`bilinear` are supported.".format(interpolation)
)
class VideoRandomZoom(BaseRandomLayer):
def __init__(
self,
height_factor,
width_factor=None,
fill_mode="reflect",
interpolation="bilinear",
seed=None,
fill_value=0.0,
**kwargs
):
super(VideoRandomZoom, self).__init__(seed=seed, force_generator=True, **kwargs)
self.height_factor = height_factor
if isinstance(height_factor, (tuple, list)):
self.height_lower = height_factor[0]
self.height_upper = height_factor[1]
else:
self.height_lower = -height_factor
self.height_upper = height_factor
if abs(self.height_lower) > 1.0 or abs(self.height_upper) > 1.0:
raise ValueError(
"`height_factor` must have values between [-1, 1], "
"got {}".format(height_factor)
)
self.width_factor = width_factor
if width_factor is not None:
if isinstance(width_factor, (tuple, list)):
self.width_lower = width_factor[0]
self.width_upper = width_factor[1]
else:
self.width_lower = (
-width_factor
) # pylint: disable=invalid-unary-operand-type
self.width_upper = width_factor
if self.width_lower < -1.0 or self.width_upper < -1.0:
raise ValueError(
"`width_factor` must have values larger than -1, "
"got {}".format(width_factor)
)
check_fill_mode_and_interpolation(fill_mode, interpolation)
self.fill_mode = fill_mode
self.fill_value = fill_value
self.interpolation = interpolation
self.seed = seed
def call(self, inputs, training=True):
if training is None:
training = backend.learning_phase()
inputs = ensure_tensor(inputs, self.compute_dtype)
original_shape = inputs.shape
unbatched = inputs.shape.rank == 4
# The transform op only accepts rank 4 inputs, so if we have an unbatched
# image, we need to temporarily expand dims to a batch.
def random_zoomed_inputs():
"""Zoomed inputs with random ops."""
inputs_shape = tf.shape(inputs)
if unbatched:
batch_size = 1
frame_size = inputs_shape[0]
else:
batch_size = inputs_shape[0]
frame_size = inputs_shape[1]
img_hd = tf.cast(inputs_shape[H_AXIS], tf.float32)
img_wd = tf.cast(inputs_shape[W_AXIS], tf.float32)
height_zoom = self._random_generator.random_uniform(
shape=[batch_size, 1],
minval=1.0 + self.height_lower,
maxval=1.0 + self.height_upper,
)
height_zoom = tf.reshape(tf.tile(height_zoom, [1, frame_size]), [-1, 1])
if self.width_factor is not None:
width_zoom = self._random_generator.random_uniform(
shape=[batch_size, 1],
minval=1.0 + self.width_lower,
maxval=1.0 + self.width_upper,
)
width_zoom = tf.reshape(tf.tile(width_zoom, [1, frame_size]), [-1, 1])
else:
width_zoom = height_zoom
zooms = tf.cast(
tf.concat([width_zoom, height_zoom], axis=1), dtype=tf.float32
)
if not unbatched:
flat_inputs = tf.reshape(inputs, [ batch_size * frame_size, inputs_shape[2], inputs_shape[3], inputs_shape[4] ])
transformed = transform(
flat_inputs,
get_zoom_matrix(zooms, img_hd, img_wd),
fill_mode=self.fill_mode,
fill_value=self.fill_value,
interpolation=self.interpolation,
)
return tf.reshape(transformed, inputs_shape)
else:
return transform(
inputs,
get_zoom_matrix(zooms, img_hd, img_wd),
fill_mode=self.fill_mode,
fill_value=self.fill_value,
interpolation=self.interpolation,
)
output = smart_cond(training, random_zoomed_inputs, lambda: inputs)
output.set_shape(original_shape)
return output
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {
"height_factor": self.height_factor,
"width_factor": self.width_factor,
"fill_mode": self.fill_mode,
"fill_value": self.fill_value,
"interpolation": self.interpolation,
"seed": self.seed,
}
base_config = super(VideoRandomZoom, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class VideoRandomRotation(BaseRandomLayer):
def __init__(
self,
factor,
fill_mode="reflect",
interpolation="bilinear",
seed=None,
fill_value=0.0,
**kwargs
):
super(VideoRandomRotation, self).__init__(
seed=seed, force_generator=True, **kwargs
)
self.factor = factor
if isinstance(factor, (tuple, list)):
self.lower = factor[0]
self.upper = factor[1]
else:
self.lower = -factor
self.upper = factor
if self.upper < self.lower:
raise ValueError(
"Factor cannot have negative values, " "got {}".format(factor)
)
check_fill_mode_and_interpolation(fill_mode, interpolation)
self.fill_mode = fill_mode
self.fill_value = fill_value
self.interpolation = interpolation
self.seed = seed
def call(self, inputs, training=True):
if training is None:
training = backend.learning_phase()
inputs = ensure_tensor(inputs, self.compute_dtype)
original_shape = inputs.shape
unbatched = inputs.shape.rank == 4
# The transform op only accepts rank 4 inputs, so if we have an unbatched
# image, we need to temporarily expand dims to a batch.
def random_rotated_inputs():
"""Rotated inputs with random ops."""
inputs_shape = tf.shape(inputs)
if unbatched:
batch_size = 1
else:
batch_size = inputs_shape[0]
img_hd = tf.cast(inputs_shape[H_AXIS], tf.float32)
img_wd = tf.cast(inputs_shape[W_AXIS], tf.float32)
min_angle = self.lower * 2.0 * np.pi
max_angle = self.upper * 2.0 * np.pi
angles = self._random_generator.random_uniform(
shape=[batch_size], minval=min_angle, maxval=max_angle
)
if unbatched:
return transform(
inputs,
get_rotation_matrix(angles, img_hd, img_wd),
fill_mode=self.fill_mode,
fill_value=self.fill_value,
interpolation=self.interpolation,
)
else:
angles = tf.reshape(tf.tile(tf.reshape(angles, [-1, 1]), [1, inputs_shape[1]]), [-1])
flat_inputs = tf.reshape(inputs, [batch_size * inputs_shape[1], inputs_shape[2], inputs_shape[3], inputs_shape[4]])
transformed = transform(
flat_inputs,
get_rotation_matrix(angles, img_hd, img_wd),
fill_mode=self.fill_mode,
fill_value=self.fill_value,
interpolation=self.interpolation,
)
return tf.reshape(transformed, inputs_shape)
output = smart_cond(training, random_rotated_inputs, lambda: inputs)
output.set_shape(original_shape)
return output
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {
"factor": self.factor,
"fill_mode": self.fill_mode,
"fill_value": self.fill_value,
"interpolation": self.interpolation,
"seed": self.seed,
}
base_config = super(VideoRandomRotation, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class VideoRandomCrop(BaseRandomLayer):
def __init__(self, height, width, seed=None, **kwargs):
super(VideoRandomCrop, self).__init__(
**kwargs, autocast=False, seed=seed, force_generator=True
)
self.height = height
self.width = width
self.seed = seed
def call(self, inputs, training=True):
if training is None:
training = backend.learning_phase()
inputs = ensure_tensor(inputs, dtype=self.compute_dtype)
input_shape = tf.shape(inputs)
h_diff = input_shape[H_AXIS] - self.height
w_diff = input_shape[W_AXIS] - self.width
def random_crop():
dtype = input_shape.dtype
rands = self._random_generator.random_uniform([2], 0, dtype.max, dtype)
h_start = rands[0] % (h_diff + 1)
w_start = rands[1] % (w_diff + 1)
return tf.map_fn(
lambda x: tf.image.crop_to_bounding_box(
x, h_start, w_start, self.height, self.width
),
inputs,
)
def resize():
_resize = lambda x: tf.cast(
tf.image.smart_resize(x, [self.height, self.width]), self.compute_dtype
)
return tf.map_fn(_resize, inputs)
return tf.cond(
tf.reduce_all((training, h_diff >= 0, w_diff >= 0)), random_crop, resize
)
def compute_output_shape(self, input_shape):
input_shape = tf.TensorShape(input_shape).as_list()
input_shape[H_AXIS] = self.height
input_shape[W_AXIS] = self.width
return tf.TensorShape(input_shape)
def get_config(self):
config = {
"height": self.height,
"width": self.width,
"seed": self.seed,
}
base_config = super(VideoRandomCrop, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class VideoRandomContrast(BaseRandomLayer):
def __init__(self, factor, seed=None, **kwargs):
super(VideoRandomContrast, self).__init__(
seed=seed, force_generator=True, **kwargs
)
self.factor = factor
if isinstance(factor, (tuple, list)):
self.lower = factor[0]
self.upper = factor[1]
else:
self.lower = self.upper = factor
if self.lower < 0.0 or self.upper < 0.0 or self.lower > 1.0:
raise ValueError(
"Factor cannot have negative values or greater than 1.0,"
" got {}".format(factor)
)
self.seed = seed
self._random_generator = backend.RandomGenerator(seed, force_generator=True)
def call(self, inputs, training=True):
if training is None:
training = backend.learning_phase()
inputs = ensure_tensor(inputs, self.compute_dtype)
def random_contrasted_inputs():
seed = self._random_generator.make_seed_for_stateless_op()
if seed is not None:
return stateless_random_contrast(
inputs, 1.0 - self.lower, 1.0 + self.upper, seed=seed
)
else:
return random_contrast(
inputs,
1.0 - self.lower,
1.0 + self.upper,
seed=self._random_generator.make_legacy_seed(),
)
output = smart_cond(training, random_contrasted_inputs, lambda: inputs)
output.set_shape(inputs.shape)
return output
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {
"factor": self.factor,
"seed": self.seed,
}
base_config = super(VideoRandomContrast, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class VideoRandomFlip(BaseRandomLayer):
def __init__(self, mode=HORIZONTAL_AND_VERTICAL, seed=None, **kwargs):
super(VideoRandomFlip, self).__init__(seed=seed, force_generator=True, **kwargs)
self.mode = mode
if mode == HORIZONTAL:
self.horizontal = True
self.vertical = False
elif mode == VERTICAL:
self.horizontal = False
self.vertical = True
elif mode == HORIZONTAL_AND_VERTICAL:
self.horizontal = True
self.vertical = True
else:
raise ValueError(
"VideoRandomFlip layer {name} received an unknown mode "
"argument {arg}".format(name=self.name, arg=mode)
)
self.seed = seed
self._random_generator = backend.RandomGenerator(seed, force_generator=True)
def call(self, inputs, training=True):
if training is None:
training = backend.learning_phase()
inputs = ensure_tensor(inputs, self.compute_dtype)
def random_flipped_inputs():
flipped_outputs = inputs
if self.horizontal:
seed = self._random_generator.make_seed_for_stateless_op()
if seed is not None:
flipped_outputs = stateless_random_flip_left_right(
flipped_outputs, seed=seed
)
else:
flipped_outputs = random_flip_left_right(
flipped_outputs, self._random_generator.make_legacy_seed()
)
if self.vertical:
seed = self._random_generator.make_seed_for_stateless_op()
if seed is not None:
flipped_outputs = stateless_random_flip_up_down(
flipped_outputs, seed=seed
)
else:
flipped_outputs = random_flip_up_down(
flipped_outputs, self._random_generator.make_legacy_seed()
)
return flipped_outputs
output = smart_cond(training, random_flipped_inputs, lambda: inputs)
output.set_shape(inputs.shape)
return output
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {
"mode": self.mode,
"seed": self.seed,
}
base_config = super(VideoRandomFlip, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
| 2.546875 | 3 |
src/xdg/__init__.py | srstevenson/xdg | 70 | 12763855 | # Copyright © 2016-2021 <NAME> <<EMAIL>>
#
# Permission to use, copy, modify, and/or distribute this software for
# any purpose with or without fee is hereby granted, provided that the
# above copyright notice and this permission notice appear in all
# copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL
# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
# AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR
# PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
"""XDG Base Directory Specification variables.
xdg_cache_home(), xdg_config_home(), xdg_data_home(), and xdg_state_home()
return pathlib.Path objects containing the value of the environment variable
named XDG_CACHE_HOME, XDG_CONFIG_HOME, XDG_DATA_HOME, and XDG_STATE_HOME
respectively, or the default defined in the specification if the environment
variable is unset, empty, or contains a relative path rather than absolute
path.
xdg_config_dirs() and xdg_data_dirs() return a list of pathlib.Path
objects containing the value, split on colons, of the environment
variable named XDG_CONFIG_DIRS and XDG_DATA_DIRS respectively, or the
default defined in the specification if the environment variable is
unset or empty. Relative paths are ignored, as per the specification.
xdg_runtime_dir() returns a pathlib.Path object containing the value of
the XDG_RUNTIME_DIR environment variable, or None if the environment
variable is not set, or contains a relative path rather than absolute path.
"""
# pylint: disable=fixme
import os
from pathlib import Path
from typing import List, Optional
__all__ = [
"xdg_cache_home",
"xdg_config_dirs",
"xdg_config_home",
"xdg_data_dirs",
"xdg_data_home",
"xdg_runtime_dir",
"xdg_state_home",
"XDG_CACHE_HOME",
"XDG_CONFIG_DIRS",
"XDG_CONFIG_HOME",
"XDG_DATA_DIRS",
"XDG_DATA_HOME",
"XDG_RUNTIME_DIR",
]
def _path_from_env(variable: str, default: Path) -> Path:
"""Read an environment variable as a path.
The environment variable with the specified name is read, and its
value returned as a path. If the environment variable is not set, is
set to the empty string, or is set to a relative rather than
absolute path, the default value is returned.
Parameters
----------
variable : str
Name of the environment variable.
default : Path
Default value.
Returns
-------
Path
Value from environment or default.
"""
# TODO(srstevenson): Use assignment expression in Python 3.8.
value = os.environ.get(variable)
if value and os.path.isabs(value):
return Path(value)
return default
def _paths_from_env(variable: str, default: List[Path]) -> List[Path]:
"""Read an environment variable as a list of paths.
The environment variable with the specified name is read, and its
value split on colons and returned as a list of paths. If the
environment variable is not set, or set to the empty string, the
default value is returned. Relative paths are ignored, as per the
specification.
Parameters
----------
variable : str
Name of the environment variable.
default : List[Path]
Default value.
Returns
-------
List[Path]
Value from environment or default.
"""
# TODO(srstevenson): Use assignment expression in Python 3.8.
value = os.environ.get(variable)
if value:
paths = [
Path(path) for path in value.split(":") if os.path.isabs(path)
]
if paths:
return paths
return default
def xdg_cache_home() -> Path:
"""Return a Path corresponding to XDG_CACHE_HOME."""
return _path_from_env("XDG_CACHE_HOME", Path.home() / ".cache")
def xdg_config_dirs() -> List[Path]:
"""Return a list of Paths corresponding to XDG_CONFIG_DIRS."""
return _paths_from_env("XDG_CONFIG_DIRS", [Path("/etc/xdg")])
def xdg_config_home() -> Path:
"""Return a Path corresponding to XDG_CONFIG_HOME."""
return _path_from_env("XDG_CONFIG_HOME", Path.home() / ".config")
def xdg_data_dirs() -> List[Path]:
"""Return a list of Paths corresponding to XDG_DATA_DIRS."""
return _paths_from_env(
"XDG_DATA_DIRS",
[Path(path) for path in "/usr/local/share/:/usr/share/".split(":")],
)
def xdg_data_home() -> Path:
"""Return a Path corresponding to XDG_DATA_HOME."""
return _path_from_env("XDG_DATA_HOME", Path.home() / ".local" / "share")
def xdg_runtime_dir() -> Optional[Path]:
"""Return a Path corresponding to XDG_RUNTIME_DIR.
If the XDG_RUNTIME_DIR environment variable is not set, None will be
returned as per the specification.
"""
value = os.getenv("XDG_RUNTIME_DIR")
if value and os.path.isabs(value):
return Path(value)
return None
def xdg_state_home() -> Path:
"""Return a Path corresponding to XDG_STATE_HOME."""
return _path_from_env("XDG_STATE_HOME", Path.home() / ".local" / "state")
# The following variables are deprecated, but remain for backward compatibility.
XDG_CACHE_HOME = xdg_cache_home()
XDG_CONFIG_DIRS = xdg_config_dirs()
XDG_CONFIG_HOME = xdg_config_home()
XDG_DATA_DIRS = xdg_data_dirs()
XDG_DATA_HOME = xdg_data_home()
XDG_RUNTIME_DIR = xdg_runtime_dir()
| 1.851563 | 2 |
apiempleados/admin.py | acroooo/registroempleados-spa | 0 | 12763856 | from django.contrib import admin
from .models import Empleado
# Register your models here.
class EmpleadoAdmin(admin.ModelAdmin):
lista = ['nombre_completo', 'email', 'contacto', 'direccion']
admin.site.register(Empleado, EmpleadoAdmin)
| 1.609375 | 2 |
qiskit/transpiler/propertyset.py | jagunnels/qiskit-sdk-py | 0 | 12763857 | <reponame>jagunnels/qiskit-sdk-py
# -*- coding: utf-8 -*-
# Copyright 2018, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
""" A property set is maintained by the PassManager to keep information
about the current state of the circuit """
class PropertySet(dict):
""" A default dictionary-like object """
def __missing__(self, key):
return None
| 1.539063 | 2 |
tests/test_command.py | acerv/etcdgo | 0 | 12763858 | """
Unittests for command module.
"""
import os
import pytest
from click.testing import CliRunner
import etcd3
import etcdgo.command
@pytest.fixture
def runner(mocker):
"""
Click runner client.
"""
mocker.patch('etcd3.Etcd3Client.__init__', return_value=None)
mocker.patch('etcdgo.config.Config.__init__', return_value=None)
runner = CliRunner()
with runner.isolated_filesystem():
def _callback(
cmd,
hostname="localhost",
port="2349",
basefolder="/cmdline_test"):
ret = runner.invoke(etcdgo.command.cli, [
"-h",
hostname,
"-p",
port,
"-f",
basefolder,
] + cmd)
return ret
yield _callback
def test_cli_help(runner):
"""
Test for --help option
"""
ret = runner(['--help'])
assert not ret.exception
assert ret.exit_code == 0
def test_push_config_type_error(request, runner):
"""
This test check if pushing non-supported configuration will raise
an exception.
"""
key = request.node.name
with open("other.txt", "w") as config:
config.write("data = test")
# push configuration file
ret = runner(['push', key, 'other.txt'])
assert str(ret.exception) == "'.txt' extension type is not supported."
assert ret.exit_code == 1
def test_push_empty_config_error(request, runner):
"""
This test check if pushing with empty config will raise an exception.
"""
ret = runner(['push', 'myconfig', ''])
assert str(ret.exception) == "config can't be empty."
assert ret.exit_code == 1
def test_push_empty_label_error(request, runner):
"""
This test check if pushing a configuration with empty label will raise
an exception.
"""
ret = runner(['push', '', 'other.ini'])
assert str(ret.exception) == "label can't be empty."
assert ret.exit_code == 1
def test_push_config_not_exist_error(request, runner):
"""
This test check if pushing non existing configuration will raise
an exception.
"""
key = request.node.name
# configuration doesn't exist
ret = runner(['push', key, 'other.ini'])
assert str(ret.exception) == "configuration file doesn't exist."
assert ret.exit_code == 1
def test_push_error(request, mocker, runner):
"""
Push a configuration and check if exceptions are handled.
"""
key = request.node.name
with open("myconfig.ini", "w") as config:
config.write("[config]\ndata = test")
mocker.patch(
"etcdgo.config.Config.push",
side_effect=Exception("test exception"))
# push configuration file
ret = runner(['push', key, 'myconfig.ini'])
assert str(ret.exception) == "test exception"
assert ret.exit_code == 1
def test_push(request, mocker, runner):
"""
Push a configuration.
"""
key = request.node.name
with open("myconfig.ini", "w") as config:
config.write("[config]\ndata = test")
mocker.patch("etcdgo.config.Config.push")
# push configuration file
ret = runner(['push', key, 'myconfig.ini'])
assert not ret.exception
assert ret.exit_code == 0
etcdgo.config.Config.push.assert_called_with(key, "myconfig.ini")
def test_pull_empty_label_error(request, runner):
"""
This test check if pulling a configuration with empty label will raise
an exception.
"""
ret = runner(['pull', ''])
assert str(ret.exception) == "label can't be empty."
assert ret.exit_code == 1
def test_pull_output_type_error(request, runner):
"""
This test check if pulling a configuration with non supported output type
will raise an exception.
"""
ret = runner(['pull', 'mystuff', '--output-type', ''])
assert str(ret.exception) == "output_type can't be empty."
assert ret.exit_code == 1
ret = runner(['pull', 'mystuff', '--output-type', 'txt'])
assert str(ret.exception) == "'txt' format is not supported"
assert ret.exit_code == 1
def test_pull_error(request, mocker, runner):
"""
Pull a configuration and check if exceptions are handled.
"""
key = request.node.name
mocker.patch(
"etcdgo.config.Config.dump",
side_effect=Exception("test exception"))
# push configuration file
ret = runner(['pull', key])
assert str(ret.exception) == "test exception"
assert ret.exit_code == 1
def test_pull_ini(request, mocker, runner):
"""
Push/pull a ini configuration.
"""
key = request.node.name
mocker.patch(
"etcdgo.config.Config.dump",
return_value="[config]\ntest = data")
# push configuration file
ret = runner(['pull', key, '--output-type', 'ini'])
assert not ret.exception
assert ret.exit_code == 0
assert ret.output == "[config]\ntest = data\n"
def test_pull_json(request, mocker, runner):
"""
Pull a json configuration.
"""
key = request.node.name
mocker.patch(
"etcdgo.config.Config.dump",
return_value="{\n'test':'data'\n}")
# push configuration file
ret = runner(['pull', key, '--output-type', 'json'])
assert not ret.exception
assert ret.exit_code == 0
assert ret.output == "{\n'test':'data'\n}\n"
def test_pull_yaml(request, mocker, runner):
"""
Pull a yaml configuration.
"""
key = request.node.name
mocker.patch(
"etcdgo.config.Config.dump",
return_value="people:\n gianni: patoc\n gigi: bufera\n")
# push configuration file
ret = runner(['pull', key, '--output-type', 'json'])
assert not ret.exception
assert ret.exit_code == 0
assert ret.output == "people:\n gianni: patoc\n gigi: bufera\n\n"
| 2.53125 | 3 |
tests/benchmark_scripts/toxic_train.py | stanford-futuredata/Willump | 45 | 12763859 | # Original source here: https://www.kaggle.com/tunguz/logistic-regression-with-words-and-char-n-grams
import argparse
import gc
import pickle
import time
import pandas as pd
import scipy.sparse
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import train_test_split
from toxic_utils import *
from willump.evaluation.willump_executor import willump_execute
class_names = ['toxic', 'severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate']
base_path = "tests/test_resources/toxic_comment_classification/"
train = pd.read_csv(base_path + 'train.csv').fillna(' ')
train_text = train['comment_text'].values
parser = argparse.ArgumentParser()
parser.add_argument("-k", "--top_k", type=int, help="Top-K to return")
args = parser.parse_args()
training_cascades = {}
@willump_execute(training_cascades=training_cascades, willump_train_function=willump_train_function,
willump_predict_function=willump_predict_function, willump_score_function=willump_score_function,
willump_predict_proba_function=willump_predict_proba_function, top_k=args.top_k)
def vectorizer_transform(input_text, word_vect, char_vect, train_target):
word_features = word_vect.transform(input_text)
char_features = char_vect.transform(input_text)
combined_features = scipy.sparse.hstack([word_features, char_features], format="csr")
clf = willump_train_function(combined_features, train_target)
return clf
start_time = time.time()
try:
word_vectorizer, char_vectorizer = pickle.load(open(base_path + "vectorizer.pk", "rb"))
except FileNotFoundError:
word_vectorizer = TfidfVectorizer(
lowercase=False,
analyzer='word',
stop_words='english',
ngram_range=(1, 1),
encoding="ascii",
decode_error="strict",
max_features=10000)
word_vectorizer.fit(train_text)
char_vectorizer = TfidfVectorizer(
lowercase=False,
analyzer='char',
stop_words='english',
ngram_range=(2, 6),
encoding="ascii",
decode_error="strict",
max_features=50000)
char_vectorizer.fit(train_text)
pickle.dump((word_vectorizer, char_vectorizer), open(base_path + "vectorizer.pk", "wb"))
class_name = "toxic"
train_target = train[class_name]
del train
gc.collect()
train_text, _, train_target, _ = train_test_split(train_text, train_target, test_size=0.2, random_state=42)
classifier = vectorizer_transform(train_text, word_vectorizer, char_vectorizer, train_target)
gc.collect()
t0 = time.time()
vectorizer_transform(train_text, word_vectorizer, char_vectorizer, train_target)
gc.collect()
print("Willump Train Time: %fs" % (time.time() - t0))
print("Total Train Time: %fs" % (time.time() - start_time))
pickle.dump(classifier, open(base_path + "model.pk", "wb"))
pickle.dump(training_cascades, open(base_path + "training_cascades.pk", "wb"))
| 2.796875 | 3 |
world/character/tests.py | nigelmathes/GAME | 0 | 12763860 | import os
from django.test import TestCase
from actions_api.combat import Combat
from character.models import Character, StatusEffects
class CombatTests(TestCase):
fixtures = [os.path.join("/Users", "Nigel", "HOBBY_PROJECTS", "GAME", "world", "full_backup.json")]
def test_combat_round(self):
"""
A round of combat is done successfully
Player wins with:
Attack (1) beats Area (0)
"""
# Arrange
player = Character.objects.get(pk=1)
target = Character.objects.get(pk=2)
expected_player_hp = player.hit_points
expected_target_hp = target.hit_points - 100
expected_player_ex = 50
expected_target_ex = 100
object_to_test = Combat(player=player,
target=target,
player_attack_type="attack",
target_attack_type="area")
# Act
# Perform a round of combat
player, target = object_to_test.do_combat_round()
# Assert
self.assertEqual(player.hit_points, expected_player_hp)
self.assertEqual(target.hit_points, expected_target_hp)
self.assertEqual(player.ex_meter, expected_player_ex)
self.assertEqual(target.ex_meter, expected_target_ex)
def test_matchups(self):
"""
Test that the following holds true:
Area beats Disrupt and Dodge
Attack beats Disrupt and Area
Block beats Attack and Area
Disrupt beats Block and Dodge
Dodge beats Attack and Block
"""
# Arrange
player_attacks = ['area', 'attack', 'block', 'disrupt', 'dodge']
target_attacks = ['area', 'attack', 'block', 'disrupt', 'dodge']
expected_player_hps = [[0, 0, 0, 100, 100],
[100, 0, 0, 100, 0],
[100, 100, 0, 0, 0],
[0, 0, 100, 0, 100],
[0, 100, 100, 0, 0]]
expected_target_hps = [[0, 100, 100, 0, 0],
[0, 0, 100, 0, 100],
[0, 0, 0, 100, 100],
[100, 100, 0, 0, 0],
[100, 0, 0, 100, 0]]
# Act
# Perform a round of combat
for i, player_attack in enumerate(player_attacks):
for j, target_attack in enumerate(target_attacks):
player = Character.objects.get(pk=1)
target = Character.objects.get(pk=2)
object_to_test = Combat(player=player,
target=target,
player_attack_type=player_attack,
target_attack_type=target_attack)
player, target = object_to_test.do_combat_round()
# Assert - The 400 is a kluge because I don't want to remake the list above
self.assertEqual(player.hit_points, 400 + expected_player_hps[i][j])
self.assertEqual(target.hit_points, 400 + expected_target_hps[i][j])
# TODO: Make a healing ability and alter this test
def test_healing(self):
""" A round of combat with healing is done successfully """
# Arrange
player = Character.objects.get(pk=1)
target = Character.objects.get(pk=2)
expected_player_hp = player.hit_points - 100
expected_target_hp = target.hit_points
object_to_test = Combat(player=player,
target=target,
player_attack_type="disrupt",
target_attack_type="area")
# Act
# Perform a round of combat
player, target = object_to_test.do_combat_round()
# Assert
self.assertEqual(player.hit_points, expected_player_hp)
self.assertEqual(target.hit_points, expected_target_hp)
def test_enhancement(self):
""" A round of combat is done successfully """
# Arrange - add statuses here
expected_statuses = [[2, "prone", 1],
[2, "disorient", 1]]
ability_combos = [["disrupt", "block"],
["area", "disrupt"]]
# Act
for expected_status, ability_combo in zip(expected_statuses, ability_combos):
player = Character.objects.get(pk=1)
target = Character.objects.get(pk=2)
expected_player_hp = player.hit_points
expected_target_hp = target.hit_points - 100
# Perform a round of combat
object_to_test = Combat(player=player,
target=target,
player_attack_type=ability_combo[0],
target_attack_type=ability_combo[1],
player_enhanced=True)
# Act
# Perform a round of combat
player, target = object_to_test.do_combat_round()
status_effects_list = list(StatusEffects.objects.all().values_list())
# Assert
self.assertEqual(player.hit_points, expected_player_hp)
self.assertEqual(target.hit_points, expected_target_hp)
self.assertEqual(expected_status[0], status_effects_list[0][1])
self.assertEqual(expected_status[1], status_effects_list[0][2])
self.assertEqual(expected_status[2], status_effects_list[0][3])
def test_check_and_apply_status(self):
""" Check that check_and_apply_status() updates the combat rules """
# Arrange
player = Character.objects.get(pk=1)
target = Character.objects.get(pk=2)
expected_rules = {"area": {"beats": ["disrupt", "dodge", "block"],
"loses": ["attack"]},
"attack": {"beats": ["disrupt", "area"],
"loses": ["block", "dodge"]},
"block": {"beats": ["area", "attack"],
"loses": ["disrupt", "dodge"]},
"disrupt": {"beats": ["block", "dodge"],
"loses": ["attack", "area"]},
"dodge": {"beats": ["attack", "block"],
"loses": ["area", "disrupt"]}}
object_to_test = Combat(player=player,
target=target,
player_attack_type="disrupt",
target_attack_type="block",
player_enhanced=True)
# Inflict a status effect
_ = object_to_test.do_combat_round()
# Act
# Check and apply the status effect
_ = object_to_test.check_and_apply_status()
# Assert
self.assertDictEqual(object_to_test.rules, expected_rules)
def test_consume_status(self):
"""
Check that check_and_apply_status() applies a status,
updates the duration, and culls the database of 0 duration statuses
"""
# Arrange
player = Character.objects.get(pk=1)
target = Character.objects.get(pk=2)
object_to_test = Combat(player=player,
target=target,
player_attack_type="disrupt",
target_attack_type="block",
player_enhanced=True)
# Inflict a status effect
_ = object_to_test.do_combat_round()
check_status_before_apply = StatusEffects.objects.filter(character_id=target.pk)
self.assertTrue(check_status_before_apply.exists())
# Act
# Check and apply the status effect
_ = object_to_test.check_and_apply_status()
check_status_after_apply = StatusEffects.objects.filter(character_id=target.pk)
self.assertFalse(check_status_after_apply.exists())
def test_new_rules_combat_resolution(self):
""" Check that new combat rules are resolved correctly"""
# Arrange
player = Character.objects.get(pk=1)
target = Character.objects.get(pk=2)
expected_outcome = "player_wins"
expected_rules = {"area": {"beats": ["disrupt", "dodge", "block"],
"loses": ["attack"]},
"attack": {"beats": ["disrupt", "area"],
"loses": ["block", "dodge"]},
"block": {"beats": ["area", "attack"],
"loses": ["disrupt", "dodge"]},
"disrupt": {"beats": ["block", "dodge"],
"loses": ["attack", "area"]},
"dodge": {"beats": ["attack", "block"],
"loses": ["area", "disrupt"]}}
object_to_test = Combat(player=player,
target=target,
player_attack_type="disrupt",
target_attack_type="block",
player_enhanced=True)
# Act
# Inflict a status effect
_ = object_to_test.do_combat_round()
# Check and apply the status effect
new_rules, _ = object_to_test.check_and_apply_status()
# Change the attack type to something that applies to the altered ruleset
object_to_test.player_attack_type = "area"
# Calculate the winner with the new rules
result = object_to_test.calculate_winner()
# Assert
self.assertEqual(result, expected_outcome)
self.assertDictEqual(new_rules, expected_rules)
self.assertDictEqual(object_to_test.rules, expected_rules)
def test_double_damage(self):
""" Check that an extra effect is added for the double damage status"""
# Arrange
player = Character.objects.get(pk=3)
target = Character.objects.get(pk=2)
expected_rules = {"area": {"beats": ["disrupt", "dodge"],
"loses": ["attack", "block"]},
"attack": {"beats": ["disrupt", "area"],
"loses": ["block", "dodge"]},
"block": {"beats": ["area", "attack"],
"loses": ["disrupt", "dodge"]},
"disrupt": {"beats": ["block", "dodge"],
"loses": ["attack", "area"]},
"dodge": {"beats": ["attack", "block"],
"loses": ["area", "disrupt"]}}
object_to_test = Combat(player=player,
target=target,
player_attack_type="attack",
target_attack_type="area",
player_enhanced=True)
# Act
_ = object_to_test.do_combat_round()
# Assert target lost 200 HP instead of 100
self.assertEqual(player.hit_points, 500)
self.assertEqual(target.hit_points, 300)
# Assert no status effects exist
check_player_status = StatusEffects.objects.filter(character_id=player.pk)
self.assertFalse(check_player_status.exists())
check_target_status = StatusEffects.objects.filter(character_id=target.pk)
self.assertFalse(check_target_status.exists())
# Assert rules didn't change
self.assertDictEqual(object_to_test.rules, expected_rules)
def test_percent_hp_damage(self):
"""
Check that an extra effect is added for the poison status
and the damage is correct
"""
# Arrange
player = Character.objects.get(pk=4) # Chemist; Moira_IRL
target = Character.objects.get(pk=2) # Cloistered; Crunchbucket
expected_rules = {"area": {"beats": ["disrupt", "dodge"],
"loses": ["attack", "block"]},
"attack": {"beats": ["disrupt", "area"],
"loses": ["block", "dodge"]},
"block": {"beats": ["area", "attack"],
"loses": ["disrupt", "dodge"]},
"disrupt": {"beats": ["block", "dodge"],
"loses": ["attack", "area"]},
"dodge": {"beats": ["attack", "block"],
"loses": ["area", "disrupt"]}}
object_to_test = Combat(player=player,
target=target,
player_attack_type="attack",
target_attack_type="area",
player_enhanced=True)
# Act
# Inflict a status effect and check HP's
_ = object_to_test.do_combat_round()
self.assertEqual(player.hit_points, 500)
self.assertEqual(target.hit_points, 400)
# Check status effect applied to target
check_status = StatusEffects.objects.get(character_id=target.pk)
self.assertEqual(check_status.name, 'poison')
self.assertEqual(check_status.duration, 2)
# Attack a second time, player loses, make sure poison still applies
object_to_test = Combat(player=player,
target=target,
player_attack_type="area",
target_attack_type="attack",
player_enhanced=False)
_ = object_to_test.do_combat_round()
# Check status effect decreased in duration by 1 and was not removed
check_status = StatusEffects.objects.get(character_id=target.pk)
self.assertEqual(check_status.name, 'poison')
self.assertEqual(check_status.duration, 1)
# Assert the player took 100 damage from losing round and target
# took 50 damage from poison
self.assertEqual(player.hit_points, 400)
self.assertEqual(target.hit_points, 350)
# Assert rules didn't change
self.assertDictEqual(object_to_test.rules, expected_rules)
| 2.703125 | 3 |
python-cim/samples/hash_term.py | dnides/flare-wmi | 0 | 12763861 | <reponame>dnides/flare-wmi
import logging
from cim import CIM
from cim import Index
def main(type_, path, *s):
if type_ not in ("xp", "win7"):
raise RuntimeError("Invalid mapping type: {:s}".format(type_))
c = CIM(type_, path)
i = Index(c.cim_type, c.logical_index_store)
for ss in s:
print("XX_%s\t%s" % (str(i.hash(ss.encode("utf-16le"))), str(ss)))
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
import sys
main(*sys.argv[1:])
| 2.34375 | 2 |
src/category.py | tarun-bisht/security-camera | 1 | 12763862 | <filename>src/category.py
def read_label_pbtxt(label_path: str) -> dict:
with open(label_path, "r") as label_file:
lines = label_file.readlines()
labels = {}
for row, content in enumerate(lines):
labels[row] = {"id": row, "name": content.strip()}
return labels
| 2.875 | 3 |
tencentcloud/bmvpc/v20180625/bmvpc_client.py | snowxmas/tencentcloud-sdk-python | 46 | 12763863 | <filename>tencentcloud/bmvpc/v20180625/bmvpc_client.py
# -*- coding: utf8 -*-
# Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.bmvpc.v20180625 import models
class BmvpcClient(AbstractClient):
_apiVersion = '2018-06-25'
_endpoint = 'bmvpc.tencentcloudapi.com'
def AcceptVpcPeerConnection(self, request):
"""接受黑石对等连接
:param request: Request instance for AcceptVpcPeerConnection.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.AcceptVpcPeerConnectionRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.AcceptVpcPeerConnectionResponse`
"""
try:
params = request._serialize()
body = self.call("AcceptVpcPeerConnection", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.AcceptVpcPeerConnectionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def AsyncRegisterIps(self, request):
"""批量注册虚拟IP,异步接口。通过接口来查询任务进度。每次请求最多注册256个IP
:param request: Request instance for AsyncRegisterIps.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.AsyncRegisterIpsRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.AsyncRegisterIpsResponse`
"""
try:
params = request._serialize()
body = self.call("AsyncRegisterIps", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.AsyncRegisterIpsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def BindEipsToNatGateway(self, request):
"""NAT网关绑定EIP接口,可将EIP绑定到NAT网关,该EIP作为访问外网的源IP地址,将流量发送到Internet
:param request: Request instance for BindEipsToNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.BindEipsToNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.BindEipsToNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("BindEipsToNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.BindEipsToNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def BindIpsToNatGateway(self, request):
"""可用于将子网的部分IP绑定到NAT网关
:param request: Request instance for BindIpsToNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.BindIpsToNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.BindIpsToNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("BindIpsToNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.BindIpsToNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def BindSubnetsToNatGateway(self, request):
"""NAT网关绑定子网后,该子网内全部IP可出公网
:param request: Request instance for BindSubnetsToNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.BindSubnetsToNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.BindSubnetsToNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("BindSubnetsToNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.BindSubnetsToNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateCustomerGateway(self, request):
"""本接口(CreateCustomerGateway)用于创建对端网关。
:param request: Request instance for CreateCustomerGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateCustomerGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateCustomerGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("CreateCustomerGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateCustomerGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateDockerSubnetWithVlan(self, request):
"""创建黑石Docker子网, 如果不指定VlanId,将会分配2000--2999范围的VlanId; 子网会关闭分布式网关
:param request: Request instance for CreateDockerSubnetWithVlan.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateDockerSubnetWithVlanRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateDockerSubnetWithVlanResponse`
"""
try:
params = request._serialize()
body = self.call("CreateDockerSubnetWithVlan", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateDockerSubnetWithVlanResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateHostedInterface(self, request):
"""本接口(CreateHostedInterface)用于黑石托管机器加入带VLANID不为5的子网。
1) 不能加入vlanId 为5的子网,只能加入VLANID范围为2000-2999的子网。
2) 每台托管机器最多可以加入20个子网。
3) 每次调用最多能支持传入10台托管机器。
:param request: Request instance for CreateHostedInterface.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateHostedInterfaceRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateHostedInterfaceResponse`
"""
try:
params = request._serialize()
body = self.call("CreateHostedInterface", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateHostedInterfaceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateInterfaces(self, request):
"""物理机加入子网
:param request: Request instance for CreateInterfaces.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateInterfacesRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateInterfacesResponse`
"""
try:
params = request._serialize()
body = self.call("CreateInterfaces", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateInterfacesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateNatGateway(self, request):
"""创建NAT网关接口,可针对网段方式、子网全部IP、子网部分IP这三种方式创建NAT网关
:param request: Request instance for CreateNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("CreateNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateRoutePolicies(self, request):
"""创建黑石路由表的路由规则
:param request: Request instance for CreateRoutePolicies.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateRoutePoliciesRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateRoutePoliciesResponse`
"""
try:
params = request._serialize()
body = self.call("CreateRoutePolicies", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateRoutePoliciesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateSubnet(self, request):
"""创建黑石私有网络的子网
访问管理: 用户可以对VpcId进行授权操作。例如设置资源为["qcs::bmvpc:::unVpc/vpc-xxxxx"]
:param request: Request instance for CreateSubnet.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateSubnetRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateSubnetResponse`
"""
try:
params = request._serialize()
body = self.call("CreateSubnet", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateSubnetResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateVirtualSubnetWithVlan(self, request):
"""创建黑石虚拟子网, 虚拟子网用于在黑石上创建虚拟网络,与黑石子网要做好规划。虚拟子网会分配2000-2999的VlanId。
:param request: Request instance for CreateVirtualSubnetWithVlan.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateVirtualSubnetWithVlanRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateVirtualSubnetWithVlanResponse`
"""
try:
params = request._serialize()
body = self.call("CreateVirtualSubnetWithVlan", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateVirtualSubnetWithVlanResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateVpc(self, request):
"""创建黑石私有网络
:param request: Request instance for CreateVpc.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateVpcRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateVpcResponse`
"""
try:
params = request._serialize()
body = self.call("CreateVpc", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateVpcResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateVpcPeerConnection(self, request):
"""创建对等连接
:param request: Request instance for CreateVpcPeerConnection.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.CreateVpcPeerConnectionRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.CreateVpcPeerConnectionResponse`
"""
try:
params = request._serialize()
body = self.call("CreateVpcPeerConnection", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateVpcPeerConnectionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteCustomerGateway(self, request):
"""本接口(DeleteCustomerGateway)用于删除对端网关。
:param request: Request instance for DeleteCustomerGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteCustomerGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteCustomerGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteCustomerGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteCustomerGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteHostedInterface(self, request):
"""本接口用于托管机器从VLANID不为5的子网中移除。
1) 不能从vlanId 为5的子网中移除。
2) 每次调用最多能支持传入10台物理机。
:param request: Request instance for DeleteHostedInterface.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteHostedInterfaceRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteHostedInterfaceResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteHostedInterface", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteHostedInterfaceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteHostedInterfaces(self, request):
"""托管机器移除子网批量接口,传入一台托管机器和多个子网,批量移除这些子网。异步接口,接口返回TaskId。
:param request: Request instance for DeleteHostedInterfaces.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteHostedInterfacesRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteHostedInterfacesResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteHostedInterfaces", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteHostedInterfacesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteInterfaces(self, request):
"""物理机移除子网批量接口,传入一台物理机和多个子网,批量移除这些子网。异步接口,接口返回TaskId。
:param request: Request instance for DeleteInterfaces.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteInterfacesRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteInterfacesResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteInterfaces", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteInterfacesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteNatGateway(self, request):
"""删除NAT网关
:param request: Request instance for DeleteNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteRoutePolicy(self, request):
"""删除黑石路由表路由规则
:param request: Request instance for DeleteRoutePolicy.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteRoutePolicyRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteRoutePolicyResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteRoutePolicy", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteRoutePolicyResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteSubnet(self, request):
"""本接口(DeleteSubnet)用于删除黑石私有网络子网。
删除子网前,请清理该子网下所有资源,包括物理机、负载均衡、黑石数据库、弹性IP、NAT网关等资源
:param request: Request instance for DeleteSubnet.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteSubnetRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteSubnetResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteSubnet", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteSubnetResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteVirtualIp(self, request):
"""退还虚拟IP。此接口只能退还虚拟IP,物理机IP不能退还。
:param request: Request instance for DeleteVirtualIp.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVirtualIpRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVirtualIpResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteVirtualIp", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteVirtualIpResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteVpc(self, request):
"""本接口(DeleteVpc)用于删除黑石私有网络(VPC)。
删除私有网络前,请清理该私有网络下所有资源,包括子网、负载均衡、弹性 IP、对等连接、NAT 网关、专线通道、SSLVPN 等资源。
:param request: Request instance for DeleteVpc.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVpcRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVpcResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteVpc", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteVpcResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteVpcPeerConnection(self, request):
"""删除黑石对等连接
:param request: Request instance for DeleteVpcPeerConnection.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVpcPeerConnectionRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVpcPeerConnectionResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteVpcPeerConnection", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteVpcPeerConnectionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteVpnConnection(self, request):
"""本接口(DeleteVpnConnection)用于删除VPN通道。
:param request: Request instance for DeleteVpnConnection.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVpnConnectionRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVpnConnectionResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteVpnConnection", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteVpnConnectionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteVpnGateway(self, request):
"""本接口(DeleteVpnGateway)用于删除VPN网关。
:param request: Request instance for DeleteVpnGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVpnGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeleteVpnGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteVpnGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteVpnGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeregisterIps(self, request):
"""注销私有网络IP为空闲
:param request: Request instance for DeregisterIps.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DeregisterIpsRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DeregisterIpsResponse`
"""
try:
params = request._serialize()
body = self.call("DeregisterIps", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeregisterIpsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeCustomerGateways(self, request):
"""本接口(DescribeCustomerGateways)用于查询对端网关列表。
:param request: Request instance for DescribeCustomerGateways.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeCustomerGatewaysRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeCustomerGatewaysResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeCustomerGateways", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeCustomerGatewaysResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeNatGateways(self, request):
"""获取NAT网关信息,包括NAT网关 ID、网关名称、私有网络、网关并发连接上限、绑定EIP列表等
:param request: Request instance for DescribeNatGateways.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeNatGatewaysRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeNatGatewaysResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeNatGateways", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeNatGatewaysResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeNatSubnets(self, request):
"""可获取NAT网关绑定的子网信息
:param request: Request instance for DescribeNatSubnets.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeNatSubnetsRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeNatSubnetsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeNatSubnets", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeNatSubnetsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeRoutePolicies(self, request):
"""本接口(DescribeRoutePolicies)用于查询路由表条目。
:param request: Request instance for DescribeRoutePolicies.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeRoutePoliciesRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeRoutePoliciesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeRoutePolicies", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeRoutePoliciesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeRouteTables(self, request):
"""本接口(DescribeRouteTables)用于查询路由表。
:param request: Request instance for DescribeRouteTables.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeRouteTablesRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeRouteTablesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeRouteTables", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeRouteTablesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSubnetAvailableIps(self, request):
"""获取子网内可用IP列表
:param request: Request instance for DescribeSubnetAvailableIps.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeSubnetAvailableIpsRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeSubnetAvailableIpsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSubnetAvailableIps", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSubnetAvailableIpsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSubnetByDevice(self, request):
"""物理机可以加入物理机子网,虚拟子网,DOCKER子网,通过此接口可以查询物理机加入的子网。
:param request: Request instance for DescribeSubnetByDevice.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeSubnetByDeviceRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeSubnetByDeviceResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSubnetByDevice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSubnetByDeviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSubnetByHostedDevice(self, request):
"""托管可以加入物理机子网,虚拟子网,DOCKER子网,通过此接口可以查询托管加入的子网。
:param request: Request instance for DescribeSubnetByHostedDevice.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeSubnetByHostedDeviceRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeSubnetByHostedDeviceResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSubnetByHostedDevice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSubnetByHostedDeviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSubnets(self, request):
"""本接口(DescribeSubnets)用于查询黑石子网列表。
:param request: Request instance for DescribeSubnets.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeSubnetsRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeSubnetsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSubnets", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSubnetsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeTaskStatus(self, request):
"""根据任务ID,获取任务的执行状态
:param request: Request instance for DescribeTaskStatus.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeTaskStatusRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeTaskStatusResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeTaskStatus", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeTaskStatusResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeVpcPeerConnections(self, request):
"""获取对等连接列表
:param request: Request instance for DescribeVpcPeerConnections.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcPeerConnectionsRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcPeerConnectionsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeVpcPeerConnections", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeVpcPeerConnectionsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeVpcQuota(self, request):
"""本接口(DescribeVpcQuota)用于查询用户VPC相关配额限制。
:param request: Request instance for DescribeVpcQuota.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcQuotaRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcQuotaResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeVpcQuota", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeVpcQuotaResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeVpcResource(self, request):
"""查询黑石私有网络关联资源
:param request: Request instance for DescribeVpcResource.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcResourceRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcResourceResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeVpcResource", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeVpcResourceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeVpcView(self, request):
"""本接口(DescribeVpcView)用于查询VPC网络拓扑视图。
:param request: Request instance for DescribeVpcView.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcViewRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcViewResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeVpcView", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeVpcViewResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeVpcs(self, request):
"""本接口(DescribeVpcs)用于查询私有网络列表。
本接口不传参数时,返回默认排序下的前20条VPC信息。
:param request: Request instance for DescribeVpcs.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcsRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpcsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeVpcs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeVpcsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeVpnConnections(self, request):
"""本接口(DescribeVpnConnections)查询VPN通道列表。
:param request: Request instance for DescribeVpnConnections.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpnConnectionsRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpnConnectionsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeVpnConnections", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeVpnConnectionsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeVpnGateways(self, request):
"""本接口(DescribeVpnGateways)用于查询VPN网关列表。
:param request: Request instance for DescribeVpnGateways.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpnGatewaysRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DescribeVpnGatewaysResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeVpnGateways", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeVpnGatewaysResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DownloadCustomerGatewayConfiguration(self, request):
"""本接口(DownloadCustomerGatewayConfiguration)用于下载VPN通道配置。
:param request: Request instance for DownloadCustomerGatewayConfiguration.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.DownloadCustomerGatewayConfigurationRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.DownloadCustomerGatewayConfigurationResponse`
"""
try:
params = request._serialize()
body = self.call("DownloadCustomerGatewayConfiguration", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DownloadCustomerGatewayConfigurationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyCustomerGatewayAttribute(self, request):
"""本接口(ModifyCustomerGatewayAttribute)用于修改对端网关信息。
:param request: Request instance for ModifyCustomerGatewayAttribute.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyCustomerGatewayAttributeRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyCustomerGatewayAttributeResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyCustomerGatewayAttribute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyCustomerGatewayAttributeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyRoutePolicy(self, request):
"""修改自定义路由
:param request: Request instance for ModifyRoutePolicy.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyRoutePolicyRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyRoutePolicyResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyRoutePolicy", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyRoutePolicyResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyRouteTable(self, request):
"""修改路由表
:param request: Request instance for ModifyRouteTable.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyRouteTableRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyRouteTableResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyRouteTable", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyRouteTableResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifySubnetAttribute(self, request):
"""修改子网属性
:param request: Request instance for ModifySubnetAttribute.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifySubnetAttributeRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifySubnetAttributeResponse`
"""
try:
params = request._serialize()
body = self.call("ModifySubnetAttribute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifySubnetAttributeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifySubnetDHCPRelay(self, request):
"""修改子网DHCP Relay属性
:param request: Request instance for ModifySubnetDHCPRelay.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifySubnetDHCPRelayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifySubnetDHCPRelayResponse`
"""
try:
params = request._serialize()
body = self.call("ModifySubnetDHCPRelay", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifySubnetDHCPRelayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyVpcAttribute(self, request):
"""本接口(ModifyVpcAttribute)用于修改VPC的标识名称和控制VPC的监控起停。
:param request: Request instance for ModifyVpcAttribute.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpcAttributeRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpcAttributeResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyVpcAttribute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyVpcAttributeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyVpcPeerConnection(self, request):
"""修改黑石对等连接
:param request: Request instance for ModifyVpcPeerConnection.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpcPeerConnectionRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpcPeerConnectionResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyVpcPeerConnection", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyVpcPeerConnectionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyVpnConnectionAttribute(self, request):
"""本接口(ModifyVpnConnectionAttribute)用于修改VPN通道。
:param request: Request instance for ModifyVpnConnectionAttribute.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpnConnectionAttributeRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpnConnectionAttributeResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyVpnConnectionAttribute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyVpnConnectionAttributeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyVpnGatewayAttribute(self, request):
"""本接口(ModifyVpnGatewayAttribute)用于修改VPN网关属性。
:param request: Request instance for ModifyVpnGatewayAttribute.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpnGatewayAttributeRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpnGatewayAttributeResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyVpnGatewayAttribute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyVpnGatewayAttributeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RejectVpcPeerConnection(self, request):
"""拒绝黑石对等连接申请
:param request: Request instance for RejectVpcPeerConnection.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.RejectVpcPeerConnectionRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.RejectVpcPeerConnectionResponse`
"""
try:
params = request._serialize()
body = self.call("RejectVpcPeerConnection", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RejectVpcPeerConnectionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ResetVpnConnection(self, request):
"""本接口(ResetVpnConnection)用于重置VPN通道。
:param request: Request instance for ResetVpnConnection.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ResetVpnConnectionRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ResetVpnConnectionResponse`
"""
try:
params = request._serialize()
body = self.call("ResetVpnConnection", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ResetVpnConnectionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UnbindEipsFromNatGateway(self, request):
"""NAT网关解绑该EIP后,NAT网关将不会使用该EIP作为访问外网的源IP地址
:param request: Request instance for UnbindEipsFromNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.UnbindEipsFromNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.UnbindEipsFromNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("UnbindEipsFromNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UnbindEipsFromNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UnbindIpsFromNatGateway(self, request):
"""NAT网关解绑IP接口,可将子网的部分IP从NAT网关中解绑
:param request: Request instance for UnbindIpsFromNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.UnbindIpsFromNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.UnbindIpsFromNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("UnbindIpsFromNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UnbindIpsFromNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UnbindSubnetsFromNatGateway(self, request):
"""NAT网关解绑子网接口,可将子网解绑NAT网关
:param request: Request instance for UnbindSubnetsFromNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.UnbindSubnetsFromNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.UnbindSubnetsFromNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("UnbindSubnetsFromNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UnbindSubnetsFromNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpgradeNatGateway(self, request):
"""升级NAT网关接口,可NAT网关修改为小型NAT网关、中型NAT网关、以及大型NAT网关
:param request: Request instance for UpgradeNatGateway.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.UpgradeNatGatewayRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.UpgradeNatGatewayResponse`
"""
try:
params = request._serialize()
body = self.call("UpgradeNatGateway", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpgradeNatGatewayResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | 2.15625 | 2 |
boating/tests.py | iago1460/pedal-boating | 0 | 12763864 | import datetime
from django.test import TestCase
from boating.choices import MONDAY, SATURDAY, SUNDAY
from boating.models import Booking, OpeningTimes, HirePoint, Boat
from boating.views import place_booking
class HirePointMixin(object):
hire_point1 = None
hire_point2 = None
def setUp(self):
hire_point1 = HirePoint.objects.create(name='HirePoint 1', description='Mon-Fri')
hire_point2 = HirePoint.objects.create(name='HirePoint 2', description='Weekend')
for day in range(MONDAY, SATURDAY):
OpeningTimes.objects.create(
hire_point=hire_point1, day=day, from_hour=datetime.time(hour=9), to_hour=datetime.time(hour=20)
)
for day in [SATURDAY, SUNDAY]:
OpeningTimes.objects.create(
hire_point=hire_point2, day=day, from_hour=datetime.time(hour=7), to_hour=datetime.time(hour=23)
)
self.hire_point1 = hire_point1
self.hire_point2 = hire_point2
class HirePointTestCase(HirePointMixin, TestCase):
def test_opening_hours(self):
# datetime.date(2016, 2, day) Monday is day one on February
for day in range(MONDAY, SATURDAY):
# hire_point 1
self.assertEqual(
self.hire_point1.get_start_time(datetime.date(2016, 2, day)),
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(9, 0, 0))
)
self.assertEqual(
self.hire_point1.get_closing_time(datetime.date(2016, 2, day)),
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(20, 0, 0))
)
# estaurant 2
self.assertIsNone(self.hire_point2.get_start_time(datetime.date(2016, 2, day)))
self.assertIsNone(self.hire_point2.get_closing_time(datetime.date(2016, 2, day)))
for day in [SATURDAY, SUNDAY]:
# hire_point 1
self.assertIsNone(self.hire_point1.get_start_time(datetime.date(2016, 2, day)))
self.assertIsNone(self.hire_point1.get_closing_time(datetime.date(2016, 2, day)))
# hire_point 2
self.assertEqual(
self.hire_point2.get_start_time(datetime.date(2016, 2, day)),
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(7, 0, 0))
)
self.assertEqual(
self.hire_point2.get_closing_time(datetime.date(2016, 2, day)),
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(23, 0, 0))
)
def test_is_open(self):
# datetime.date(2016, 2, day) Monday is day one on February
for day in range(MONDAY, SATURDAY):
# hire_point 1
self.assertFalse(
self.hire_point1.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(8, 59, 59))
)
)
self.assertTrue(
self.hire_point1.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(9, 0, 0))
)
)
self.assertTrue(
self.hire_point1.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(20, 0, 0))
)
)
self.assertFalse(
self.hire_point1.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(20, 0, 1))
)
)
# hire_point 2
self.assertFalse(
self.hire_point2.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(12, 0, 0))
)
)
for day in [SATURDAY, SUNDAY]:
# hire_point 1
self.assertFalse(
self.hire_point1.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(12, 0, 0))
)
)
# hire_point 2
self.assertFalse(
self.hire_point2.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(6, 59, 59))
)
)
self.assertTrue(
self.hire_point2.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(7, 0, 0))
)
)
self.assertTrue(
self.hire_point2.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(23, 0, 0))
)
)
self.assertFalse(
self.hire_point2.is_open(
datetime.datetime.combine(datetime.date(2016, 2, day), datetime.time(23, 0, 1))
)
)
class BookingTestCase(HirePointMixin, TestCase):
boats_in_hire_point1 = None
bookings_in_hire_point1 = None
def setUp(self):
super(BookingTestCase, self).setUp()
boats_in_hire_point1 = []
for seats in [2, 4, 4, 6]:
boats_in_hire_point1.append(
Boat.objects.create(hire_point=self.hire_point1, seats=seats)
)
self.boats_in_hire_point1 = boats_in_hire_point1
bookings_in_hire_point1 = []
booking1 = Booking.objects.create(
name='Client1', number_of_people=1, hire_point=self.hire_point1,
start_time=datetime.datetime(2016, 2, 1, 10, 0, 0), end_time=datetime.datetime(2016, 2, 1, 11, 0, 0)
)
booking1.boats.add(boats_in_hire_point1[0])
bookings_in_hire_point1.append(booking1)
booking2 = Booking.objects.create(
name='Client2', number_of_people=1, hire_point=self.hire_point1,
start_time=datetime.datetime(2016, 2, 2, 10, 0, 0), end_time=datetime.datetime(2016, 2, 2, 11, 0, 0)
)
booking2.boats.add(boats_in_hire_point1[0])
bookings_in_hire_point1.append(booking2)
self.bookings_in_hire_point1 = bookings_in_hire_point1
def _check_boat(self, hire_point, people, start_time, end_time, assert_list):
min_step = datetime.timedelta(minutes=15)
time = start_time
while time < end_time:
boats_available = hire_point.is_available(people=people, start_time=time, duration=min_step * 2)
time += min_step
self.assertListEqual(boats_available, assert_list)
def test_available_boats(self):
hire_point = self.hire_point1
for people in range(1, 3):
start_time = datetime.datetime(2016, 2, 1, 9, 45, 0)
end_time = datetime.datetime(2016, 2, 1, 9, 45, 0)
assert_list = [self.boats_in_hire_point1[1]]
self._check_boat(hire_point, people, start_time, end_time, assert_list)
start_time = datetime.datetime(2016, 2, 1, 11, 0, 0)
end_time = datetime.datetime(2016, 2, 1, 20, 0, 0)
assert_list = [self.boats_in_hire_point1[0]]
self._check_boat(hire_point, people, start_time, end_time, assert_list)
people = 5
start_time = datetime.datetime(2016, 2, 1, 9, 45, 0)
end_time = datetime.datetime(2016, 2, 1, 9, 45, 0)
assert_list = [self.boats_in_hire_point1[1], self.boats_in_hire_point1[2]]
self._check_boat(hire_point, people, start_time, end_time, assert_list)
start_time = datetime.datetime(2016, 2, 1, 11, 0, 0)
end_time = datetime.datetime(2016, 2, 1, 20, 0, 0)
assert_list = [self.boats_in_hire_point1[0], self.boats_in_hire_point1[1]]
self._check_boat(hire_point, people, start_time, end_time, assert_list)
def test_availability(self):
hire_point = self.hire_point1
date = datetime.date(2016, 2, 1)
duration = datetime.timedelta(minutes=30)
people = 5
slots, boats = hire_point.get_available_slots(date, people, duration)
self.assertEqual(len(slots), len(boats))
for index, slot in enumerate(slots):
if slot <= datetime.datetime(2016, 2, 1, 9, 30, 0):
self.assertListEqual(boats[index], [self.boats_in_hire_point1[0], self.boats_in_hire_point1[1]])
elif slot < datetime.datetime(2016, 2, 1, 11, 0, 0):
self.assertListEqual(boats[index], [self.boats_in_hire_point1[1], self.boats_in_hire_point1[2]])
else:
self.assertListEqual(boats[index], [self.boats_in_hire_point1[0], self.boats_in_hire_point1[1]])
def test_booking(self):
hire_point = self.hire_point1
start_time = datetime.datetime(2016, 2, 2, 9, 45, 0)
duration = datetime.timedelta(minutes=30)
people = 9
name = '<NAME>'
booking = place_booking(hire_point, name, start_time, duration, people)
self.assertSequenceEqual(booking.boats.all(), self.boats_in_hire_point1[1:4])
start_time = datetime.datetime(2016, 2, 2, 10, 45, 0)
booking = place_booking(hire_point, name, start_time, duration, people)
self.assertSequenceEqual(booking.boats.all(), self.boats_in_hire_point1[1:4])
date = start_time.date()
people = 3
slots, boats = hire_point.get_available_slots(date, people, duration)
self.assertEqual(len(slots), len(boats))
for index, slot in enumerate(slots):
if slot < datetime.datetime(2016, 2, 2, 9, 30, 0):
self.assertListEqual(boats[index], [self.boats_in_hire_point1[0], self.boats_in_hire_point1[1]])
elif slot < datetime.datetime(2016, 2, 2, 10, 15, 0):
raise RuntimeError('Cannot be any schedule available')
elif slot == datetime.datetime(2016, 2, 2, 10, 15, 0):
self.assertListEqual(boats[index], [self.boats_in_hire_point1[1]])
elif slot < datetime.datetime(2016, 2, 2, 11, 15, 0):
raise RuntimeError('Cannot be any schedule available')
else:
self.assertListEqual(boats[index], [self.boats_in_hire_point1[0], self.boats_in_hire_point1[1]])
| 2.796875 | 3 |
paddlex_restful/restful/project/train/classification.py | chccc1994/PaddleX | 8 | 12763865 | <gh_stars>1-10
# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path as osp
from paddleslim import L1NormFilterPruner
def build_transforms(params):
from paddlex import transforms as T
crop_size = params.image_shape[0]
train_transforms = T.Compose([
T.RandomCrop(
crop_size=crop_size,
scaling=[.88, 1.],
aspect_ratio=[3. / 4, 4. / 3]),
T.RandomHorizontalFlip(prob=params.horizontal_flip_prob),
T.RandomVerticalFlip(prob=params.vertical_flip_prob), T.RandomDistort(
brightness_range=params.brightness_range,
brightness_prob=params.brightness_prob,
contrast_range=params.contrast_range,
contrast_prob=params.contrast_prob,
saturation_range=params.saturation_range,
saturation_prob=params.saturation_prob,
hue_range=params.hue_range,
hue_prob=params.hue_prob), T.Normalize(
mean=params.image_mean, std=params.image_std)
])
eval_transforms = T.Compose([
T.ResizeByShort(short_size=int(crop_size * 1.143)),
T.CenterCrop(crop_size=crop_size), T.Normalize(
mean=params.image_mean, std=params.image_std)
])
return train_transforms, eval_transforms
def build_datasets(dataset_path, train_transforms, eval_transforms):
import paddlex as pdx
train_file_list = osp.join(dataset_path, 'train_list.txt')
eval_file_list = osp.join(dataset_path, 'val_list.txt')
label_list = osp.join(dataset_path, 'labels.txt')
train_dataset = pdx.datasets.ImageNet(
data_dir=dataset_path,
file_list=train_file_list,
label_list=label_list,
transforms=train_transforms,
shuffle=True)
eval_dataset = pdx.datasets.ImageNet(
data_dir=dataset_path,
file_list=eval_file_list,
label_list=label_list,
transforms=eval_transforms)
return train_dataset, eval_dataset
def build_optimizer(parameters, step_each_epoch, params):
import paddle
from paddle.regularizer import L2Decay
learning_rate = params.learning_rate
num_epochs = params.num_epochs
if params.lr_policy == 'Cosine':
learning_rate = paddle.optimizer.lr.CosineAnnealingDecay(
learning_rate=learning_rate, T_max=step_each_epoch * num_epochs)
elif params.lr_policy == 'Linear':
learning_rate = paddle.optimizer.lr.PolynomialDecay(
learning_rate=learning_rate,
decay_steps=step_each_epoch * num_epochs,
end_lr=0.0,
power=1.0)
elif params.lr_policy == 'Piecewise':
lr_decay_epochs = params.lr_decay_epochs
gamma = 0.1
boundaries = [step_each_epoch * e for e in lr_decay_epochs]
values = [
learning_rate * (gamma**i)
for i in range(len(lr_decay_epochs) + 1)
]
learning_rate = paddle.optimizer.lr.PiecewiseDecay(
boundaries=boundaries, values=values)
optimizer = paddle.optimizer.Momentum(
learning_rate=learning_rate,
momentum=.9,
weight_decay=L2Decay(1e-04),
parameters=parameters)
return optimizer
def train(task_path, dataset_path, params):
import paddlex as pdx
pdx.log_level = 3
train_transforms, eval_transforms = build_transforms(params)
train_dataset, eval_dataset = build_datasets(
dataset_path=dataset_path,
train_transforms=train_transforms,
eval_transforms=eval_transforms)
step_each_epoch = train_dataset.num_samples // params.batch_size
save_interval_epochs = params.save_interval_epochs
save_dir = osp.join(task_path, 'output')
pretrain_weights = params.pretrain_weights
if pretrain_weights is not None and osp.exists(pretrain_weights):
pretrain_weights = osp.join(pretrain_weights, 'model.pdparams')
classifier = getattr(pdx.cls, params.model)
sensitivities_path = params.sensitivities_path
pruned_flops = params.pruned_flops
model = classifier(num_classes=len(train_dataset.labels))
if sensitivities_path is not None:
# load weights
model.net_initialize(pretrain_weights=pretrain_weights)
pretrain_weights = None
# prune
inputs = [1, 3] + list(eval_dataset[0]['image'].shape[:2])
model.pruner = L1NormFilterPruner(
model.net, inputs=inputs, sen_file=sensitivities_path)
model.prune(pruned_flops=pruned_flops)
optimizer = build_optimizer(model.net.parameters(), step_each_epoch,
params)
model.train(
num_epochs=params.num_epochs,
train_dataset=train_dataset,
train_batch_size=params.batch_size,
eval_dataset=eval_dataset,
save_interval_epochs=save_interval_epochs,
log_interval_steps=2,
save_dir=save_dir,
pretrain_weights=pretrain_weights,
optimizer=optimizer,
use_vdl=True,
resume_checkpoint=params.resume_checkpoint)
| 1.71875 | 2 |
vampytest/core/result/__init__.py | HuyaneMatsu/vampytest | 1 | 12763866 | <filename>vampytest/core/result/__init__.py
from .failures import *
from .result import *
from .result_group import *
__all__ = (
*failures.__all__,
*result.__all__,
*result_group.__all__,
)
| 1.046875 | 1 |
tests/spells/test_length_of.py | awesome-archive/geomancer | 216 | 12763867 | # -*- coding: utf-8 -*-
# Import modules
import pytest
from google.cloud import bigquery
from tests.spells.base_test_spell import BaseTestSpell, SpellDB
# Import from package
from geomancer.backend.settings import SQLiteConfig
from geomancer.spells import LengthOf
params = [
SpellDB(
spell=LengthOf(
on="residential",
within=50,
source_table="gis_osm_roads_free_1",
feature_name="len_residential",
options=SQLiteConfig(),
),
dburl="sqlite:///tests/data/source.sqlite",
)
]
@pytest.mark.slow
class TestLengthOf(BaseTestSpell):
@pytest.fixture(params=params, ids=["roads-sqlite"])
def spelldb(self, request):
return request.param
| 2.28125 | 2 |
test/test_pwm_setup.py | mrtnschltr/CHIP_IO | 295 | 12763868 | <reponame>mrtnschltr/CHIP_IO<filename>test/test_pwm_setup.py
import pytest
import os
import time
import CHIP_IO.PWM as PWM
import CHIP_IO.OverlayManager as OM
import CHIP_IO.Utilities as UT
def setup_module(module):
if not UT.is_chip_pro():
OM.load("PWM0")
def teardown_module(module):
PWM.cleanup()
if not UT.is_chip_pro():
OM.unload("PWM0")
class TestPwmSetup:
def setup_method(self, test_method):
time.sleep(0.5)
def test_start_pwm(self):
PWM.start("PWM0", 0)
pwm_test = '/sys/class/pwm/pwmchip0/pwm0/'
assert os.path.exists(pwm_test) == True
duty = open(pwm_test + 'duty_cycle').readline().strip()
period = open(pwm_test + 'period').readline().strip()
assert int(duty) == 0
assert int(period) == 500000
def test_start_pwm_with_polarity_one(self):
PWM.cleanup()
PWM.start("PWM0", 0, 2000, 1)
pwm_test = '/sys/class/pwm/pwmchip0/pwm0/'
duty = open(pwm_test + 'duty_cycle').readline().strip()
period = open(pwm_test + 'period').readline().strip()
polarity = open(pwm_test + 'polarity').readline().strip()
assert int(duty) == 0
assert int(period) == 500000
assert str(polarity) == "inversed"
def test_start_pwm_with_polarity_default(self):
PWM.cleanup()
PWM.start("PWM0", 0, 2000, 0)
pwm_test = '/sys/class/pwm/pwmchip0/pwm0/'
duty = open(pwm_test + 'duty_cycle').readline().strip()
period = open(pwm_test + 'period').readline().strip()
polarity = open(pwm_test + 'polarity').readline().strip()
assert int(duty) == 0
assert int(period) == 500000
assert str(polarity) == "normal"
def test_start_pwm_with_polarity_zero(self):
PWM.cleanup()
PWM.start("PWM0", 0, 2000, 0)
pwm_test = '/sys/class/pwm/pwmchip0/pwm0/'
duty = open(pwm_test + 'duty_cycle').readline().strip()
period = open(pwm_test + 'period').readline().strip()
polarity = open(pwm_test + 'polarity').readline().strip()
assert int(duty) == 0
assert int(period) == 500000
assert str(polarity) == "normal"
def test_pwm_start_invalid_pwm_key(self):
with pytest.raises(ValueError):
PWM.start("P8_25", -1)
def test_pwm_start_invalid_duty_cycle_negative(self):
with pytest.raises(ValueError):
PWM.start("PWM0", -1)
def test_pwm_start_valid_duty_cycle_min(self):
#testing an exception isn't thrown
PWM.cleanup()
PWM.start("PWM0", 0)
PWM.cleanup()
def test_pwm_start_valid_duty_cycle_max(self):
#testing an exception isn't thrown
PWM.start("PWM0", 100)
PWM.cleanup()
def test_pwm_start_invalid_duty_cycle_high(self):
with pytest.raises(ValueError):
PWM.start("PWM0", 101)
def test_pwm_start_invalid_duty_cycle_string(self):
with pytest.raises(TypeError):
PWM.start("PWM0", "1")
def test_pwm_start_invalid_frequency_negative(self):
with pytest.raises(ValueError):
PWM.start("PWM0", 0, -1)
def test_pwm_start_invalid_frequency_string(self):
with pytest.raises(TypeError):
PWM.start("PWM0", 0, "1")
def test_pwm_start_negative_polarity(self):
with pytest.raises(ValueError):
PWM.start("PWM0", 0, 100, -1)
def test_pwm_start_invalid_positive_polarity(self):
with pytest.raises(ValueError):
PWM.start("PWM0", 0, 100, 2)
def test_pwm_start_invalid_polarity_type(self):
with pytest.raises(TypeError):
PWM.start("PWM0", 0, 100, "1")
@pytest.mark.xfail(reason="pwm cleanup is doing weirdness for this test")
def test_pwm_duty_modified(self):
PWM.start("PWM0", 0)
pwm_test = '/sys/class/pwm/pwmchip0/pwm0/'
assert os.path.exists(pwm_test) == True
duty = open(pwm_test + 'duty_cycle').readline().strip()
period = open(pwm_test + 'period').readline().strip()
assert int(duty) == 0
assert int(period) == 500000
PWM.set_duty_cycle("PWM0", 100)
duty = open(pwm_test + 'duty_cycle').readline().strip()
period = open(pwm_test + 'period').readline().strip()
assert int(duty) == 500000
assert int(period) == 500000
def test_pwm_duty_cycle_non_setup_key(self):
with pytest.raises(ValueError):
PWM.cleanup()
PWM.set_duty_cycle("PWM0", 100)
def test_pwm_duty_cycle_invalid_key(self):
with pytest.raises(ValueError):
PWM.set_duty_cycle("P9_15", 100)
def test_pwm_duty_cycle_invalid_value_high(self):
PWM.start("PWM0", 0)
with pytest.raises(ValueError):
PWM.set_duty_cycle("PWM0", 101)
PWM.cleanup()
def test_pwm_duty_cycle_invalid_value_negative(self):
PWM.start("PWM0", 0)
with pytest.raises(ValueError):
PWM.set_duty_cycle("PWM0", -1)
PWM.cleanup()
def test_pwm_duty_cycle_invalid_value_string(self):
PWM.start("PWM0", 0)
with pytest.raises(TypeError):
PWM.set_duty_cycle("PWM0", "a")
PWM.cleanup()
def test_pwm_frequency_invalid_value_negative(self):
PWM.start("PWM0", 0)
with pytest.raises(ValueError):
PWM.set_frequency("PWM0", -1)
PWM.cleanup()
def test_pwm_frequency_invalid_value_string(self):
PWM.start("PWM0", 0)
with pytest.raises(TypeError):
PWM.set_frequency("PWM0", "11")
PWM.cleanup()
def test_pwm_freq_non_setup_key(self):
with pytest.raises(RuntimeError):
PWM.set_frequency("PWM0", 100)
def test_pwm_freq_non_setup_key(self):
with pytest.raises(ValueError):
PWM.set_frequency("P9_15", 100)
| 2.296875 | 2 |
yatube/posts/migrations/0011_auto_20211118_2131.py | LHLHLHE/yatube_project | 0 | 12763869 | # Generated by Django 2.2.16 on 2021-11-18 18:31
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('posts', '0010_comment'),
]
operations = [
migrations.AlterModelOptions(
name='comment',
options={'ordering': ('-pub_date',), 'verbose_name': 'Комментарий', 'verbose_name_plural': 'Комментарии'},
),
migrations.RemoveField(
model_name='comment',
name='created',
),
migrations.AddField(
model_name='comment',
name='pub_date',
field=models.DateTimeField(auto_now_add=True, db_index=True, default=django.utils.timezone.now, verbose_name='Дата публикации'),
preserve_default=False,
),
migrations.AlterField(
model_name='post',
name='pub_date',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Дата публикации'),
),
]
| 1.632813 | 2 |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_group_cfg.py | Maikor/ydk-py | 0 | 12763870 | """ Cisco_IOS_XR_group_cfg
This module contains IOS\-XR group YANG data
for flexible cli groups
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Groups(Entity):
"""
config groups
.. attribute:: group
Group config definition
**type**\: list of :py:class:`Group <ydk.models.cisco_ios_xr.Cisco_IOS_XR_group_cfg.Groups.Group>`
"""
_prefix = 'group-cfg'
_revision = '2016-04-29'
def __init__(self):
super(Groups, self).__init__()
self._top_entity = None
self.yang_name = "groups"
self.yang_parent_name = "Cisco-IOS-XR-group-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("group", ("group", Groups.Group))])
self._leafs = OrderedDict()
self.group = YList(self)
self._segment_path = lambda: "Cisco-IOS-XR-group-cfg:groups"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Groups, [], name, value)
class Group(Entity):
"""
Group config definition
.. attribute:: group_name (key)
Group name
**type**\: str
**length:** 0..32
"""
_prefix = 'group-cfg'
_revision = '2016-04-29'
def __init__(self):
super(Groups.Group, self).__init__()
self.yang_name = "group"
self.yang_parent_name = "groups"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['group_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('group_name', (YLeaf(YType.str, 'group-name'), ['str'])),
])
self.group_name = None
self._segment_path = lambda: "group" + "[group-name='" + str(self.group_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-group-cfg:groups/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Groups.Group, ['group_name'], name, value)
def clone_ptr(self):
self._top_entity = Groups()
return self._top_entity
class ApplyGroups(Entity):
"""
apply groups
.. attribute:: apply_group
apply\-group name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
"""
_prefix = 'group-cfg'
_revision = '2016-04-29'
def __init__(self):
super(ApplyGroups, self).__init__()
self._top_entity = None
self.yang_name = "apply-groups"
self.yang_parent_name = "Cisco-IOS-XR-group-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('apply_group', (YLeaf(YType.str, 'apply-group'), ['str'])),
])
self.apply_group = None
self._segment_path = lambda: "Cisco-IOS-XR-group-cfg:apply-groups"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ApplyGroups, ['apply_group'], name, value)
def clone_ptr(self):
self._top_entity = ApplyGroups()
return self._top_entity
| 1.929688 | 2 |
suites/API/HistoryApi/GetRelativeAccountHistory.py | echoprotocol/pytests | 1 | 12763871 | <reponame>echoprotocol/pytests
# -*- coding: utf-8 -*-
from common.base_test import BaseTest
import lemoncheesecake.api as lcc
from lemoncheesecake.matching import (
check_that, equal_to, has_length, is_, is_list, is_str, require_that, require_that_in
)
SUITE = {
"description": "Method 'get_relative_account_history'"
}
@lcc.prop("main", "type")
@lcc.prop("positive", "type")
@lcc.prop("negative", "type")
@lcc.tags("api", "history_api", "get_relative_account_history")
@lcc.suite("Check work of method 'get_relative_account_history'", rank=1)
class GetRelativeAccountHistory(BaseTest):
def __init__(self):
super().__init__()
self.__database_api_identifier = None
self.__registration_api_identifier = None
self.__history_api_identifier = None
self.echo_acc0 = None
def setup_suite(self):
super().setup_suite()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__database_api_identifier = self.get_identifier("database")
self.__registration_api_identifier = self.get_identifier("registration")
self.__history_api_identifier = self.get_identifier("history")
lcc.log_info(
"API identifiers are: database='{}', registration='{}', "
"history='{}'".format(
self.__database_api_identifier, self.__registration_api_identifier, self.__history_api_identifier
)
)
self.echo_acc0 = self.get_account_id(
self.accounts[0], self.__database_api_identifier, self.__registration_api_identifier
)
lcc.log_info("Echo account is '{}'".format(self.echo_acc0))
@lcc.test("Simple work of method 'get_relative_account_history'")
def method_main_check(self):
stop, start = 0, 0
limit = 1
lcc.set_step("Get relative account history")
params = [self.echo_acc0, stop, limit, start]
response_id = self.send_request(
self.get_request("get_relative_account_history", params), self.__history_api_identifier
)
response = self.get_response(response_id)
lcc.log_info(
"Call method 'get_relative_account_history' with: account='{}', stop='{}', limit='{}', start='{}' "
"parameters".format(self.echo_acc0, stop, limit, start)
)
lcc.set_step("Check response from method 'get_relative_account_history'")
results = response["result"]
check_that("'number of history results'", results, has_length(limit))
for result in results:
self.object_validator.validate_operation_history_object(self, result)
@lcc.prop("positive", "type")
@lcc.tags("api", "history_api", "get_relative_account_history")
@lcc.suite("Positive testing of method 'get_relative_account_history'", rank=2)
class PositiveTesting(BaseTest):
def __init__(self):
super().__init__()
self.__database_api_identifier = None
self.__registration_api_identifier = None
self.__history_api_identifier = None
self.echo_acc0 = None
self.echo_acc1 = None
def get_relative_account_history(self, account, stop, limit, start, negative=False):
lcc.log_info("Get relative '{}' account history".format(account))
params = [account, stop, limit, start]
response_id = self.send_request(
self.get_request("get_relative_account_history", params), self.__history_api_identifier
)
return self.get_response(response_id, negative=negative)
def setup_suite(self):
super().setup_suite()
self._connect_to_echopy_lib()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__database_api_identifier = self.get_identifier("database")
self.__registration_api_identifier = self.get_identifier("registration")
self.__history_api_identifier = self.get_identifier("history")
lcc.log_info(
"API identifiers are: database='{}', registration='{}', "
"history='{}'".format(
self.__database_api_identifier, self.__registration_api_identifier, self.__history_api_identifier
)
)
self.echo_acc0 = self.get_account_id(
self.accounts[0], self.__database_api_identifier, self.__registration_api_identifier
)
self.echo_acc1 = self.get_account_id(
self.accounts[1], self.__database_api_identifier, self.__registration_api_identifier
)
lcc.log_info("Echo accounts are: #1='{}', #2='{}'".format(self.echo_acc0, self.echo_acc1))
def teardown_suite(self):
self._disconnect_to_echopy_lib()
super().teardown_suite()
@lcc.test("Check new account history")
@lcc.depends_on("API.HistoryApi.GetRelativeAccountHistory.GetRelativeAccountHistory.method_main_check")
def new_account_history(self, get_random_valid_account_name):
new_account = get_random_valid_account_name
stop, start = 0, 0
limit = 100
lcc.set_step("Create and get new account")
new_account = self.get_account_id(
new_account, self.__database_api_identifier, self.__registration_api_identifier
)
lcc.log_info("New Echo account created, account_id='{}'".format(new_account))
lcc.set_step("Get new account history")
response = self.get_relative_account_history(new_account, stop, limit, start)
lcc.set_step("Check new account history")
expected_number_of_operations = 1
require_that("'new account history'", response["result"], has_length(expected_number_of_operations))
check_that(
"'id single operation'", response["result"][0]["op"][0], is_(self.echo.config.operation_ids.ACCOUNT_CREATE)
)
@lcc.test("Check limit number of operations to retrieve")
@lcc.depends_on("API.HistoryApi.GetRelativeAccountHistory.GetRelativeAccountHistory.method_main_check")
def limit_operations_to_retrieve(self, get_random_valid_account_name, get_random_integer_up_to_hundred):
new_account = get_random_valid_account_name
stop, start = 0, 0
min_limit = 1
max_limit = 100
default_account_create_operation = 1
operation_count = get_random_integer_up_to_hundred
lcc.set_step("Create and get new account")
new_account = self.get_account_id(
new_account, self.__database_api_identifier, self.__registration_api_identifier
)
lcc.log_info("New Echo account created, account_id='{}'".format(new_account))
lcc.set_step("Perform operations using a new account. Operation count equal to limit")
self.utils.perform_transfer_operations(
self,
new_account,
self.echo_acc0,
self.__database_api_identifier,
operation_count=operation_count,
only_in_history=True
)
lcc.log_info("Fill account history with '{}' number of transfer operations".format(operation_count))
lcc.set_step("Check that count of new account history with the maximum limit is equal to operation_count")
response = self.get_relative_account_history(new_account, stop, max_limit, start)
check_that(
"'number of history results'", response["result"],
has_length(operation_count + default_account_create_operation)
)
lcc.set_step("Check minimum list length account history")
response = self.get_relative_account_history(new_account, stop, min_limit, start)
check_that("'number of history results'", response["result"], has_length(min_limit))
lcc.set_step("Perform operations using a new account to create max_limit operations")
operation_count = max_limit - operation_count - default_account_create_operation
self.utils.perform_transfer_operations(
self,
new_account,
self.echo_acc0,
self.__database_api_identifier,
operation_count=operation_count,
only_in_history=True
)
lcc.log_info("Fill account history with '{}' number of transfer operations".format(operation_count))
lcc.set_step("Check that count of new account history with the limit = max_limit is equal to max_limit")
response = self.get_relative_account_history(new_account, stop, max_limit, start)
check_that("'number of history results'", response["result"], has_length(max_limit))
@lcc.test("Check stop and start IDs of the operations in account history")
@lcc.depends_on("API.HistoryApi.GetRelativeAccountHistory.GetRelativeAccountHistory.method_main_check")
def stop_and_start_operations(self, get_random_integer, get_random_integer_up_to_hundred):
transfer_amount_1 = get_random_integer
transfer_amount_2 = get_random_integer_up_to_hundred
stop = 0
start = 0
operations = []
operation_ids = []
lcc.set_step("Perform one operation")
broadcast_result = self.utils.perform_transfer_operations(
self,
self.echo_acc0,
self.echo_acc1,
self.__database_api_identifier,
transfer_amount=transfer_amount_1,
only_in_history=True
)
operations.append(broadcast_result["trx"]["operations"][0])
lcc.log_info("Fill account history with '{}' number of transfer operations".format(len(operations)))
limit = len(operations)
lcc.set_step("Get account history. Limit: '{}'".format(limit))
response = self.get_relative_account_history(self.echo_acc0, stop, limit, start)
lcc.set_step("Check account history to see added operation and store operation id")
require_that("'account history'", response["result"][0]["op"], is_list(operations[0]))
lcc.set_step("Perform another operations")
broadcast_result = self.utils.perform_transfer_operations(
self,
self.echo_acc0,
self.echo_acc1,
self.__database_api_identifier,
transfer_amount=transfer_amount_2,
only_in_history=True
)
operations.append(broadcast_result["trx"]["operations"][0])
lcc.log_info("Fill account history with '{}' number of transfer operations".format(len(operations)))
limit = len(operations)
stop = 1
lcc.set_step("Get account history. Stop: '{}', limit: '{}'".format(stop, limit))
response = self.get_relative_account_history(self.echo_acc0, stop, limit, start)
lcc.set_step("Check account history to see added operations and store operation ids")
operations.reverse()
for i in range(limit):
require_that("'account history'", response["result"][i]["op"], is_list(operations[i]))
operation_ids.append(response["result"][i]["id"])
start = 10000
stop = 0
lcc.set_step("Get account history. Stop: '{}', limit: '{}' and start: '{}'".format(stop, limit, start))
response = self.get_relative_account_history(self.echo_acc0, stop, limit, start)
lcc.set_step("Check account history to see operations from the selected ids interval")
for i in range(limit):
lcc.log_info("Check operation #{}:".format(i))
require_that_in(response["result"][i], ["id"], is_str(operation_ids[i]), ["op"], is_list(operations[i]))
@lcc.prop("negative", "type")
@lcc.tags("api", "history_api", "get_relative_account_history")
@lcc.suite("Negative testing of method 'get_relative_account_history'", rank=3)
class NegativeTesting(BaseTest):
def __init__(self):
super().__init__()
self.__database_api_identifier = None
self.echo_acc0 = None
def setup_suite(self):
super().setup_suite()
self._connect_to_echopy_lib()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__database_api_identifier = self.get_identifier("database")
self.__registration_api_identifier = self.get_identifier("registration")
self.__history_api_identifier = self.get_identifier("history")
lcc.log_info(
"API identifiers are: database='{}', registration='{}', "
"history='{}'".format(
self.__database_api_identifier, self.__registration_api_identifier, self.__history_api_identifier
)
)
self.echo_acc0 = self.get_account_id(
self.accounts[0], self.__database_api_identifier, self.__registration_api_identifier
)
def teardown_suite(self):
self._disconnect_to_echopy_lib()
super().teardown_suite()
@lcc.test("Check negative int value in get_relative_account_history")
@lcc.depends_on("API.HistoryApi.GetRelativeAccountHistory.GetRelativeAccountHistory.method_main_check")
def check_negative_int_value_in_get_relative_account_history(self):
error_message = "Assert Exception: result >= 0: Invalid cast from negative number to unsigned"
stop, start = 0, 0
limit = -1
lcc.set_step("Get 'get_relative_account_history' with negative limit")
params = [self.echo_acc0, stop, limit, start]
response_id = self.send_request(
self.get_request("get_relative_account_history", params), self.__history_api_identifier
)
message = self.get_response(response_id, negative=True)["error"]["message"]
check_that("error_message", message, equal_to(error_message), quiet=True)
stop, start = -1, 0
limit = 1
lcc.set_step("Get 'get_relative_account_history' with negative stop")
params = [self.echo_acc0, stop, limit, start]
response_id = self.send_request(
self.get_request("get_relative_account_history", params), self.__history_api_identifier
)
message = self.get_response(response_id, negative=True)["error"]["message"]
check_that("error_message", message, equal_to(error_message), quiet=True)
stop, start = 0, -1
limit = 1
lcc.set_step("Get 'get_relative_account_history' with negative start")
params = [self.echo_acc0, stop, limit, start]
response_id = self.send_request(
self.get_request("get_relative_account_history", params), self.__history_api_identifier
)
message = self.get_response(response_id, negative=True)["error"]["message"]
check_that("error_message", message, equal_to(error_message), quiet=True)
| 2.25 | 2 |
practice/practice_1.3/test_search.py | Electro98/aads | 7 | 12763872 | """Тесты для модуля search"""
import unittest
import search # pylint: disable=E0401
TEST_SEARCH_ONE_SYMBOL = [
('', 'a', False, 'first', 1, None),
('', 'a', True, 'first', 1, None),
('', 'a', False, 'last', 1, None),
('', 'a', True, 'last', 1, None),
('a', 'a', False, 'first', 1, (0, )),
('a', 'a', True, 'first', 1, (0, )),
('a', 'a', False, 'last', 1, (0, )),
('a', 'a', True, 'last', 1, (0, )),
('aaa', 'a', False, 'first', 1, (0, 1, 2)),
('aaa', 'a', True, 'first', 1, (0, 1, 2)),
('aaa', 'a', False, 'last', 1, (2, 1, 0)),
('aaa', 'a', True, 'last', 1, (2, 1, 0)),
('bca', 'c', False, 'first', 1, (1, )),
('bca', 'c', True, 'first', 1, (1, )),
('bca', 'c', False, 'last', 1, (1, )),
('bca', 'c', True, 'last', 1, (1, )),
]
TEST_SEARCH_MANY_SYMBOL = [
('', 'abc', False, 'first', 1, None),
('', 'abc', True, 'first', 1, None),
('', 'abc', False, 'last', 1, None),
('', 'abc', True, 'last', 1, None),
('a', 'abc', False, 'first', 1, None),
('a', 'abc', True, 'first', 1, None),
('a', 'abc', False, 'last', 1, None),
('a', 'abc', True, 'last', 1, None),
('abc', 'abc', False, 'first', 1, (0, )),
('abc', 'abc', True, 'first', 1, (0, )),
('abc', 'abc', False, 'last', 1, (0, )),
('abc', 'abc', True, 'last', 1, (0, )),
('abcabc', 'abc', False, 'first', 1, (0, 3)),
('abcabc', 'abc', True, 'first', 1, (0, 3)),
('abcabc', 'abc', False, 'last', 1, (3, 0)),
('abcabc', 'abc', True, 'last', 1, (3, 0)),
('aabcbccaabc', 'abc', False, 'first', 1, (1, 8)),
('aabcbccaabc', 'abc', True, 'first', 1, (1, 8)),
('aAbCbccaabc', 'AbC', False, 'first', 1, (1, 8)),
('aabcbccaAbC', 'AbC', True, 'first', 1, (8, )),
('aabcbccaabc', 'abc', False, 'last', 1, (8, 1)),
('aabcbccaabc', 'abc', True, 'last', 1, (8, 1)),
('aAbCbccaabc', 'AbC', False, 'last', 1, (8, 1)),
('aabcbccaAbC', 'AbC', True, 'last', 1, (8, )),
]
TEST_SEARCH_FEW_SUBSTR = [
('', ('abc', 'a'), False, 'first', 1, None),
('', ('abc', 'a'), True, 'first', 1, None),
('', ('abc', 'a'), False, 'last', 1, None),
('', ('abc', 'a'), True, 'last', 1, None),
('a', ('abc', 'a'), False, 'first', 1, {'abc': None, 'a': (0, )}),
('a', ('abc', 'a'), True, 'first', 1, {'abc': None, 'a': (0, )}),
('a', ('abc', 'a'), False, 'last', 1, {'abc': None, 'a': (0, )}),
('a', ('abc', 'a'), True, 'last', 1, {'abc': None, 'a': (0, )}),
('ababbababa', ('aba', 'bba'), False, 'first', 1, {'aba': (0, 5, 7), 'bba': (3, )}),
('ababbababa', ('aba', 'bba'), True, 'first', 1, {'aba': (0, 5, 7), 'bba': (3, )}),
('ababbababa', ('aba', 'bba'), False, 'last', 1, {'aba': (7, 5, 0), 'bba': (3, )}),
('ababbababa', ('aba', 'bba'), True, 'last', 1, {'aba': (7, 5, 0), 'bba': (3, )}),
]
class TestSearch(unittest.TestCase):
"""Тест-кейс модуля search"""
def test_binary_search_one_symbol(self):
"""Тест функции search для поиска строки из одного символа"""
for string, sub_string, case_sensitivity, method, count, expected in TEST_SEARCH_ONE_SYMBOL:
with self.subTest():
self.assertEqual(
search.search(
string, sub_string, case_sensitivity, method, count
),
expected
)
def test_binary_search_many_symbol(self):
"""Тест функции search для поиска строки из нескольких символов"""
for string, sub_string, case_sensitivity, method, count, expected in TEST_SEARCH_MANY_SYMBOL:
with self.subTest():
self.assertEqual(
search.search(
string, sub_string, case_sensitivity, method, count
),
expected
)
def test_binary_search_few_substr(self):
"""Тест поиска нескольких подстрок"""
for string, sub_string, case_sensitivity, method, count, expected in TEST_SEARCH_FEW_SUBSTR:
with self.subTest():
self.assertEqual(
search.search(
string, sub_string, case_sensitivity, method, count
),
expected
)
| 2.65625 | 3 |
setup.py | jcaxmacher/git-remote-cvm | 0 | 12763873 | <reponame>jcaxmacher/git-remote-cvm<gh_stars>0
#!/usr/bin/env python
import os
import setuptools
__version__ = '1.15'
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setuptools.setup(
name = 'git-remote-cvm',
packages = ['git_remote_cvm'],
version = __version__,
description = 'Git remote prefix to simplify pushing to and pulling from CodeCommit using the CVM.',
long_description = read('README.rst'),
author = 'Amazon Web Services and <NAME>',
url = 'https://github.com/jcaxmacher/git-remote-cvm',
license = 'Apache License 2.0',
install_requires = ['botocore >= 1.10.4'],
entry_points = {
'console_scripts': [
'git-remote-cvm = git_remote_cvm:main',
],
},
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
],
)
| 1.429688 | 1 |
robotidy/transformers/ext_SplitDocumentationLines.py | josflorap/robotframework-tidy | 0 | 12763874 | from robot.api.parsing import ModelTransformer, Token
class ext_SplitDocumentationLines(ModelTransformer):
def __init__(self, line_length: int = 200):
self.line_length = line_length
self.inSettings = False
def visit_Documentation(self, node):
for token in node.tokens:
if token.value == 'Documentation':
self.inSettings = True
elif token.value == '[Documentation]':
self.inSettings = False
size = len(node.tokens)
idx_list = [idx + 1 for idx, val in
enumerate(node.tokens) if val.type == Token.EOL]
res = [node.tokens[i: j] for i, j in
zip([0] + idx_list, idx_list +
([size] if idx_list[-1] != size else []))]
for k in res:
# print(repr(k))
length = 0
for l in k:
length += len(l.value)
if length > self.line_length:
argument = [t.value for t in k if t.type == Token.ARGUMENT]
string_with_max_chars = argument[0][:(len(argument) - length + self.line_length - 1)].rstrip()
string_to_remain = argument[0][:string_with_max_chars.rfind(' ')]
string_to_jump = argument[0][string_with_max_chars.rfind(' '):].lstrip()
for t in k:
if t.type == Token.ARGUMENT and not self.inSettings and t.value[0] != '-':
t.value = string_to_remain + '\n' + ' ... ' + string_to_jump
elif t.type == Token.ARGUMENT and self.inSettings and t.value[0] != '-':
t.value = string_to_remain + '\n' + '... ' + string_to_jump
return node | 2.84375 | 3 |
useroptions/apps.py | VisheshJain112/predective-ai | 0 | 12763875 | from django.apps import AppConfig
class UseroptionsConfig(AppConfig):
name = 'useroptions'
| 1.109375 | 1 |
tests/py/test_fake_data.py | kant/gratipay.com | 517 | 12763876 | from __future__ import print_function, unicode_literals
from gratipay import fake_data
from gratipay.testing import Harness
from gratipay.cli.fake_data import main
class TestFakeData(Harness):
def test_fake_data_cli(self):
num_participants = 6
num_tips = 25
num_teams = 5
num_packages = 5
main(self.db, num_participants, num_tips, num_teams, num_packages)
participants = self.db.all("SELECT * FROM participants")
teams = self.db.all("SELECT * FROM teams")
packages = self.db.all("SELECT * FROM packages")
payment_instructions = self.db.all("SELECT * FROM payment_instructions")
assert len(participants) == num_participants
assert len(teams) == num_teams + 1 # +1 for the fake Gratipay team.
assert len(packages) == num_packages
assert len(payment_instructions) == num_tips
def test_fake_participant_identity(self):
crusher = self.make_participant('crusher', email_address='<EMAIL>')
country_id = fake_data.fake_participant_identity(crusher)
assert [x.country.id for x in crusher.list_identity_metadata()] == [country_id]
def test_fake_team_doesnt_fail_for_name_with_apostrophe(self):
crusher = self.make_participant('crusher', email_address='<EMAIL>')
team = fake_data.fake_team(self.db, crusher, "D'Amorebury")
assert team.name != "d-amorebury"
| 2.328125 | 2 |
droos_bot/__init__.py | yshalsager/droos_bot | 5 | 12763877 | <gh_stars>1-10
""" Bot initialization """
import json
import logging
from logging.handlers import TimedRotatingFileHandler
from pathlib import Path
from sys import stderr, stdout
from telegram import ParseMode
from telegram.ext import Defaults, Dispatcher, PicklePersistence, Updater
from droos_bot.gsheet.spreadsheet import Spreadsheet
# paths
WORK_DIR = Path(__package__)
PARENT_DIR = WORK_DIR.parent
# bot config
CONFIG = json.loads((PARENT_DIR / "config.json").read_text(encoding="utf-8"))
BOT_TOKEN = CONFIG["tg_bot_token"]
TG_BOT_ADMINS = CONFIG["tg_bot_admins"]
# Logging
LOG_FILE = PARENT_DIR / "last_run.log"
LOG_FORMAT = "%(asctime)s [%(levelname)s] %(name)s [%(module)s.%(funcName)s:%(lineno)d]: %(message)s"
FORMATTER: logging.Formatter = logging.Formatter(LOG_FORMAT)
handler = TimedRotatingFileHandler(LOG_FILE, when="d", interval=1, backupCount=3)
logging.basicConfig(filename=str(LOG_FILE), filemode="w", format=LOG_FORMAT)
OUT = logging.StreamHandler(stdout)
ERR = logging.StreamHandler(stderr)
OUT.setFormatter(FORMATTER)
ERR.setFormatter(FORMATTER)
OUT.setLevel(logging.INFO)
ERR.setLevel(logging.WARNING)
LOGGER = logging.getLogger()
LOGGER.addHandler(OUT)
LOGGER.addHandler(ERR)
LOGGER.addHandler(handler)
LOGGER.setLevel(logging.INFO)
# bot
persistence = PicklePersistence(filename=f"{PARENT_DIR}/bot.pickle")
defaults = Defaults(
parse_mode=ParseMode.MARKDOWN_V2, run_async=True, disable_web_page_preview=True
)
updater: Updater = Updater(
BOT_TOKEN, persistence=persistence, use_context=True, defaults=defaults
)
dispatcher: Dispatcher = updater.dispatcher # type: ignore
sheet = Spreadsheet(
f"{PARENT_DIR}/service_account.json", CONFIG["sheet_id"], CONFIG["sheet_name"]
)
| 2.09375 | 2 |
apis_server/serializers/project_serializer.py | ilyde-platform/ilyde-apis | 0 | 12763878 | # coding: utf-8
#
# Copyright (c) 2020-2021 Hopenly srl.
#
# This file is part of Ilyde.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from apis_server.serializers.base_model_ import Model
from apis_server import util
class ProjectSerializer(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, name=None, description=None, visibility=None, template=None, id=None, state=None, owner=None, members=None, repo_bucket=None, create_at=None, last_update=None): # noqa: E501
"""ProjectSerializer - a model defined in OpenAPI
:param name: The name of this ProjectSerializer. # noqa: E501
:type name: str
:param description: The description of this ProjectSerializer. # noqa: E501
:type description: str
:param visibility: The visibility of this ProjectSerializer. # noqa: E501
:type visibility: str
:param template: The template of this ProjectSerializer. # noqa: E501
:type template: str
:param id: The id of this ProjectSerializer. # noqa: E501
:type id: str
:param state: The state of this ProjectSerializer. # noqa: E501
:type state: str
:param owner: The owner of this ProjectSerializer. # noqa: E501
:type owner: str
:param members: The members of this ProjectSerializer. # noqa: E501
:type members: List[str]
:param repo_bucket: The repo_bucket of this ProjectSerializer. # noqa: E501
:type repo_bucket: str
:param create_at: The create_at of this ProjectSerializer. # noqa: E501
:type create_at: str
:param last_update: The last_update of this ProjectSerializer. # noqa: E501
:type last_update: str
"""
self.openapi_types = {
'name': str,
'description': str,
'visibility': str,
'template': str,
'id': str,
'state': str,
'owner': str,
'members': List[str],
'repo_bucket': str,
'create_at': str,
'last_update': str
}
self.attribute_map = {
'name': 'name',
'description': 'description',
'visibility': 'visibility',
'template': 'template',
'id': 'id',
'state': 'state',
'owner': 'owner',
'members': 'members',
'repo_bucket': 'repo_bucket',
'create_at': 'create_at',
'last_update': 'last_update'
}
self._name = name
self._description = description
self._visibility = visibility
self._template = template
self._id = id
self._state = state
self._owner = owner
self._members = members
self._repo_bucket = repo_bucket
self._create_at = create_at
self._last_update = last_update
@classmethod
def from_dict(cls, dikt):
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Project of this ProjectSerializer. # noqa: E501
:rtype: ProjectSerializer
"""
return util.deserialize_model(dikt, cls)
@property
def name(self):
"""Gets the name of this ProjectSerializer.
:return: The name of this ProjectSerializer.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ProjectSerializer.
:param name: The name of this ProjectSerializer.
:type name: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def description(self):
"""Gets the description of this ProjectSerializer.
:return: The description of this ProjectSerializer.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this ProjectSerializer.
:param description: The description of this ProjectSerializer.
:type description: str
"""
if description is None:
raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501
self._description = description
@property
def visibility(self):
"""Gets the visibility of this ProjectSerializer.
:return: The visibility of this ProjectSerializer.
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""Sets the visibility of this ProjectSerializer.
:param visibility: The visibility of this ProjectSerializer.
:type visibility: str
"""
allowed_values = ["PRIVATE", "PUBLIC"] # noqa: E501
if visibility not in allowed_values:
raise ValueError(
"Invalid value for `visibility` ({0}), must be one of {1}"
.format(visibility, allowed_values)
)
self._visibility = visibility
@property
def template(self):
"""Gets the template of this ProjectSerializer.
:return: The template of this ProjectSerializer.
:rtype: str
"""
return self._template
@template.setter
def template(self, template):
"""Sets the template of this ProjectSerializer.
:param template: The template of this ProjectSerializer.
:type template: str
"""
allowed_values = ["GENERIC"] # noqa: E501
if template not in allowed_values:
raise ValueError(
"Invalid value for `template` ({0}), must be one of {1}"
.format(template, allowed_values)
)
self._template = template
@property
def id(self):
"""Gets the id of this ProjectSerializer.
:return: The id of this ProjectSerializer.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ProjectSerializer.
:param id: The id of this ProjectSerializer.
:type id: str
"""
self._id = id
@property
def state(self):
"""Gets the state of this ProjectSerializer.
:return: The state of this ProjectSerializer.
:rtype: str
"""
return self._state
@state.setter
def state(self, state):
"""Sets the state of this ProjectSerializer.
:param state: The state of this ProjectSerializer.
:type state: str
"""
allowed_values = ["OPEN", "CLOSED"] # noqa: E501
if state not in allowed_values:
raise ValueError(
"Invalid value for `state` ({0}), must be one of {1}"
.format(state, allowed_values)
)
self._state = state
@property
def owner(self):
"""Gets the owner of this ProjectSerializer.
:return: The owner of this ProjectSerializer.
:rtype: str
"""
return self._owner
@owner.setter
def owner(self, owner):
"""Sets the owner of this ProjectSerializer.
:param owner: The owner of this ProjectSerializer.
:type owner: str
"""
self._owner = owner
@property
def members(self):
"""Gets the members of this ProjectSerializer.
:return: The members of this ProjectSerializer.
:rtype: List[str]
"""
return self._members
@members.setter
def members(self, members):
"""Sets the members of this ProjectSerializer.
:param members: The members of this ProjectSerializer.
:type members: List[str]
"""
self._members = members
@property
def repo_bucket(self):
"""Gets the repo_bucket of this ProjectSerializer.
:return: The repo_bucket of this ProjectSerializer.
:rtype: str
"""
return self._repo_bucket
@repo_bucket.setter
def repo_bucket(self, repo_bucket):
"""Sets the repo_bucket of this ProjectSerializer.
:param repo_bucket: The repo_bucket of this ProjectSerializer.
:type repo_bucket: str
"""
self._repo_bucket = repo_bucket
@property
def create_at(self):
"""Gets the create_at of this ProjectSerializer.
:return: The create_at of this ProjectSerializer.
:rtype: str
"""
return self._create_at
@create_at.setter
def create_at(self, create_at):
"""Sets the create_at of this ProjectSerializer.
:param create_at: The create_at of this ProjectSerializer.
:type create_at: str
"""
self._create_at = create_at
@property
def last_update(self):
"""Gets the last_update of this ProjectSerializer.
:return: The last_update of this ProjectSerializer.
:rtype: str
"""
return self._last_update
@last_update.setter
def last_update(self, last_update):
"""Sets the last_update of this ProjectSerializer.
:param last_update: The last_update of this ProjectSerializer.
:type last_update: str
"""
self._last_update = last_update
| 2.015625 | 2 |
programCO2.py | DanielSalazar1/Assignments | 0 | 12763879 | <gh_stars>0
co2 = input("Please input air quality value: ")
co2 = int(co2)
if co2 > 399 and co2 < 698:
print("Excelent")
elif co2 > 699 and co2 < 898:
print("Good")
elif co2 > 899 and co2 < 1098:
print("Fair")
elif co2 > 1099 and co2 < 1598:
print("Mediocre, contaminated indoor air")
elif co2 > 1599 and co2 < 2101:
print("Bad, heavily contaminated indoor air")
| 3.640625 | 4 |
mp4.py | avmmodules/download_videos_yt | 0 | 12763880 | <filename>mp4.py
'''
Description:
Download YouTube video's with python.
Author: AlejandroV
Version: 1.0
Video: https://youtu.be/gDiSwTOHcXk
'''
from pytube import YouTube
# Your url video of YouTube
url = ""
yt = YouTube(url)
yt.streams.filter(progressive=True, file_extension='mp4').order_by('resolution').desc().first().download() | 3.09375 | 3 |
examples/flask_simple/flask_simple.py | Mjolnir-Software/dynoscale-python | 0 | 12763881 | import logging
import os
import sys
import colorlog
from flask import Flask
from dynoscale.wsgi import DynoscaleWsgiApp
# Configure logging
handler = colorlog.StreamHandler(stream=sys.stdout)
handler.setFormatter(
colorlog.ColoredFormatter(
fmt="%(asctime)s.%(msecs)03d %(log_color)s%(levelname)-8s%(reset)s %(processName)s %(threadName)10s"
" %(name)s: %(message)s",
datefmt="%H:%M:%S",
)
)
logging.getLogger("").handlers = [handler]
logging.getLogger("dynoscale").setLevel(logging.DEBUG)
app = Flask(__name__)
app.logger.setLevel(logging.DEBUG)
@app.route("/")
def index():
app.logger.info('################### index requested')
return "<h1 style='color:blue'>Hello from Flask!</h1>"
# THE LINE BELOW IS ALL YOU NEED TO USE DYNOSCALE
dynoscale_app = DynoscaleWsgiApp(app.wsgi_app)
# YUP, WE KNOW, CAN'T GET MUCH SIMPLER THAN THAT :)
if __name__ == "__main__":
if 'wrap' in sys.argv:
app.wsgi_app = dynoscale_app
port = os.getenv('PORT', 3000)
app.run(host='0.0.0.0', port=port, debug=True)
| 2.390625 | 2 |
tests/test_db.py | ctsit/metab_import | 0 | 12763882 | <filename>tests/test_db.py<gh_stars>0
import sqlite3
import unittest
from m3c import db
class TestDb(unittest.TestCase):
def setUp(self):
self.conn = sqlite3.connect(":memory:")
self.conn.executescript("""
CREATE TABLE names (
person_id INT,
first_name TEXT,
last_name TEXT,
withheld BOOLEAN
);
INSERT INTO names VALUES (7, "James", "Bond", 0);
""")
def tearDown(self):
self.conn.close()
def test_case_insensitive_name_matching(self):
cursor = self.conn.cursor()
actual = list(db.get_person(cursor, "james", "bond", False))
expected = [7]
self.assertListEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
| 3.203125 | 3 |
saveModule.py | bison--/Minecraft | 0 | 12763883 | <gh_stars>0
import main # we need the blocktypes from the main program
import json
import os
from time import gmtime, strftime
class saveModule(object):
def __init__(self):
# "tarnslate" the block texture tuples into readable words for saving
self.coordDictSave = { str(main.GRASS):'GRASS', str(main.SAND):'SAND', str(main.BRICK):'BRICK', str(main.STONE):'STONE' }
# "tarnslate" the words back into tuples for loading
self.coordDictLoad = { 'GRASS':main.GRASS, 'SAND':main.SAND, 'BRICK':main.BRICK, 'STONE':main.STONE }
self.saveGameFile = 'savegame.sav'
def printStuff(self, txt):
print(strftime("%d-%m-%Y %H:%M:%S|", gmtime()) + str(txt) )
def hasSaveGame(self):
if os.path.exists(self.saveGameFile):
return True
else:
return False
def loadWorld(self, model):
self.printStuff('start loading...')
fh = open(self.saveGameFile, 'r')
worldMod = fh.read()
fh.close()
worldMod = worldMod.split('\n')
for blockLine in worldMod:
# remove the last empty line
if blockLine != '':
coords, blockType = blockLine.split('=>')
# convert the json list into tuple; json ONLY get lists but we need tuples
# translate the readable word back into the texture coords
model.add_block( tuple(json.loads(coords)), self.coordDictLoad[blockType], False )
self.printStuff('loading completed')
def saveWorld(self, model):
self.printStuff('start saving...')
fh = open(self.saveGameFile, 'w')
# build a string to save it in one action
worldString = ''
for block in model.world:
# convert the block coords into json
# convert with the translation dictionary the block type into a readable word
worldString += json.dumps(block) + '=>' + self.coordDictSave[ str(model.world[block]) ] + '\n'
fh.write(worldString)
fh.close()
self.printStuff('saving completed')
| 2.96875 | 3 |
django/db/models/query_utils.py | egenerat/gae-django | 3 | 12763884 | """
Various data structures used in query construction.
Factored out from django.db.models.query to avoid making the main module very
large and/or so that they can be used by other modules without getting into
circular import difficulties.
"""
import weakref
from django.utils.copycompat import deepcopy
from django.db.backends import util
from django.utils import tree
from django.utils.datastructures import SortedDict
class InvalidQuery(Exception):
"""
The query passed to raw isn't a safe query to use with raw.
"""
pass
class QueryWrapper(object):
"""
A type that indicates the contents are an SQL fragment and the associate
parameters. Can be used to pass opaque data to a where-clause, for example.
"""
def __init__(self, sql, params):
self.data = sql, params
def as_sql(self, qn=None, connection=None):
return self.data
class Q(tree.Node):
"""
Encapsulates filters as objects that can then be combined logically (using
& and |).
"""
# Connection types
AND = 'AND'
OR = 'OR'
default = AND
def __init__(self, *args, **kwargs):
super(Q, self).__init__(children=list(args) + kwargs.items())
def _combine(self, other, conn):
if not isinstance(other, Q):
raise TypeError(other)
obj = type(self)()
obj.add(self, conn)
obj.add(other, conn)
return obj
def __or__(self, other):
return self._combine(other, self.OR)
def __and__(self, other):
return self._combine(other, self.AND)
def __invert__(self):
obj = type(self)()
obj.add(self, self.AND)
obj.negate()
return obj
class DeferredAttribute(object):
"""
A wrapper for a deferred-loading field. When the value is read from this
object the first time, the query is executed.
"""
def __init__(self, field_name, model):
self.field_name = field_name
self.model_ref = weakref.ref(model)
self.loaded = False
def __get__(self, instance, owner):
"""
Retrieves and caches the value from the datastore on the first lookup.
Returns the cached value.
"""
from django.db.models.fields import FieldDoesNotExist
assert instance is not None
cls = self.model_ref()
data = instance.__dict__
if data.get(self.field_name, self) is self:
# self.field_name is the attname of the field, but only() takes the
# actual name, so we need to translate it here.
try:
cls._meta.get_field_by_name(self.field_name)
name = self.field_name
except FieldDoesNotExist:
name = [f.name for f in cls._meta.fields
if f.attname == self.field_name][0]
# We use only() instead of values() here because we want the
# various data coersion methods (to_python(), etc.) to be called
# here.
val = getattr(
cls._base_manager.filter(pk=instance.pk).only(name).using(
instance._state.db).get(),
self.field_name
)
data[self.field_name] = val
return data[self.field_name]
def __set__(self, instance, value):
"""
Deferred loading attributes can be set normally (which means there will
never be a database lookup involved.
"""
instance.__dict__[self.field_name] = value
def select_related_descend(field, restricted, requested, reverse=False):
"""
Returns True if this field should be used to descend deeper for
select_related() purposes. Used by both the query construction code
(sql.query.fill_related_selections()) and the model instance creation code
(query.get_cached_row()).
Arguments:
* field - the field to be checked
* restricted - a boolean field, indicating if the field list has been
manually restricted using a requested clause)
* requested - The select_related() dictionary.
* reverse - boolean, True if we are checking a reverse select related
"""
if not field.rel:
return False
if field.rel.parent_link and not reverse:
return False
if restricted:
if reverse and field.related_query_name() not in requested:
return False
if not reverse and field.name not in requested:
return False
if not restricted and field.null:
return False
return True
# This function is needed because data descriptors must be defined on a class
# object, not an instance, to have any effect.
def deferred_class_factory(model, attrs):
"""
Returns a class object that is a copy of "model" with the specified "attrs"
being replaced with DeferredAttribute objects. The "pk_value" ties the
deferred attributes to a particular instance of the model.
"""
class Meta:
proxy = True
app_label = model._meta.app_label
# The app_cache wants a unique name for each model, otherwise the new class
# won't be created (we get an old one back). Therefore, we generate the
# name using the passed in attrs. It's OK to reuse an existing class
# object if the attrs are identical.
name = "%s_Deferred_%s" % (model.__name__, '_'.join(sorted(list(attrs))))
name = util.truncate_name(name, 80, 32)
overrides = dict([(attr, DeferredAttribute(attr, model))
for attr in attrs])
overrides["Meta"] = Meta
overrides["__module__"] = model.__module__
overrides["_deferred"] = True
return type(name, (model,), overrides)
# The above function is also used to unpickle model instances with deferred
# fields.
deferred_class_factory.__safe_for_unpickling__ = True
| 2.796875 | 3 |
neural_mmo/forge/ethyr/torch/policy/__init__.py | fangqyi/sandbox-society | 1 | 12763885 | <reponame>fangqyi/sandbox-society<filename>neural_mmo/forge/ethyr/torch/policy/__init__.py
from neural_mmo.forge.ethyr.torch.policy.attention import Attention, SelfAttention
from neural_mmo.forge.ethyr.torch.policy.recurrent import BatchFirstLSTM
from neural_mmo.forge.ethyr.torch.policy.embed import BiasedInput, MixedDTypeInput
from neural_mmo.forge.ethyr.torch.policy.embed import Input
| 1.1875 | 1 |
python/fledge/services/core/api/certificate_store.py | DDC-NDRS/fledge-iot_fledge | 69 | 12763886 | # -*- coding: utf-8 -*-
# FLEDGE_BEGIN
# See: http://fledge-iot.readthedocs.io/
# FLEDGE_END
import os
import json
from aiohttp import web
from fledge.common import logger
from fledge.common.web.middleware import has_permission
from fledge.services.core import connect
from fledge.common.configuration_manager import ConfigurationManager
from fledge.common.common import _FLEDGE_ROOT, _FLEDGE_DATA
__author__ = "<NAME>"
__copyright__ = "Copyright (c) 2017 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
_help = """
-------------------------------------------------------------------------------
| GET POST | /fledge/certificate |
| DELETE | /fledge/certificate/{name} |
-------------------------------------------------------------------------------
"""
FORBIDDEN_MSG = 'Resource you were trying to reach is absolutely forbidden for some reason'
_logger = logger.setup(__name__)
async def get_certs(request):
""" Get the list of certs
:Example:
curl -X GET http://localhost:8081/fledge/certificate
"""
certs = []
keys = []
key_valid_extensions = ('.key', '.pem')
short_cert_name_valid_extensions = ('.cert', '.cer', '.csr', '.crl', '.crt', '.der', '.p12', '.pfx')
certs_root_dir = _get_certs_dir('/etc/certs')
for root, dirs, files in os.walk(certs_root_dir):
if not root.endswith(("pem", "json")):
for f in files:
if f.endswith(short_cert_name_valid_extensions):
certs.append(f)
if f.endswith(key_valid_extensions):
keys.append(f)
json_certs_path = _get_certs_dir('/etc/certs/json')
json_cert_files = os.listdir(json_certs_path)
json_certs = [f for f in json_cert_files if f.endswith('.json')]
certs += json_certs
pem_certs_path = _get_certs_dir('/etc/certs/pem')
pem_cert_files = os.listdir(pem_certs_path)
pem_certs = [f for f in pem_cert_files if f.endswith('.pem')]
certs += pem_certs
return web.json_response({"certs": certs, "keys": keys})
async def upload(request):
""" Upload a certificate
:Example:
curl -F "cert=@filename.pem" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.json" http://localhost:8081/fledge/certificate
curl -F "key=@filename.pem" -F "cert=@filename.pem" http://localhost:8081/fledge/certificate
curl -F "key=@filename.key" -F "cert=@filename.json" http://localhost:8081/fledge/certificate
curl -F "key=@filename.key" -F "cert=@filename.cert" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.cert" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.cer" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.csr" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.crl" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.crt" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.der" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.p12" http://localhost:8081/fledge/certificate
curl -F "cert=@filename.pfx" http://localhost:8081/fledge/certificate
curl -F "key=@filename.key" -F "cert=@filename.cert" -F "overwrite=1" http://localhost:8081/fledge/certificate
"""
data = await request.post()
# contains the name of the file in string format
key_file = data.get('key')
cert_file = data.get('cert')
allow_overwrite = data.get('overwrite', '0')
# accepted values for overwrite are '0 and 1'
should_overwrite = False
if allow_overwrite in ('0', '1'):
should_overwrite = True if int(allow_overwrite) == 1 else False
else:
raise web.HTTPBadRequest(reason="Accepted value for overwrite is 0 or 1")
if not cert_file:
raise web.HTTPBadRequest(reason="Cert file is missing")
cert_filename = cert_file.filename
# default installed auth cert keys can be deleted, for matching/debugging disallow overwrite
if cert_filename in ['admin.cert', 'admin.key', 'user.cert', 'user.key', 'fledge.key', 'fledge.cert', 'ca.key',
'ca.cert']:
if request.is_auth_optional:
_logger.warning(FORBIDDEN_MSG)
raise web.HTTPForbidden(reason=FORBIDDEN_MSG, body=json.dumps({"message": FORBIDDEN_MSG}))
else:
if not request.user_is_admin:
msg = "admin role permissions required to overwrite the default installed auth/TLS certificates."
_logger.warning(msg)
raise web.HTTPForbidden(reason=msg, body=json.dumps({"message": msg}))
# note.. We are not checking if HTTPS enabled or auth mechanism?
# Here, in secured instance, we are simply disallowing non-admin user to overwrite/import configured TLS/CA certificates
if request.user and not request.user_is_admin:
cf_mgr = ConfigurationManager(connect.get_storage_async())
cat = await cf_mgr.get_category_all_items(category_name='rest_api')
configured_ca_and_tls_certs = [cat['certificateName']['value'], cat['authCertificateName']['value']]
if cert_filename and cert_filename.rpartition('.')[0] in configured_ca_and_tls_certs: # we better disallow any extension with those names instead of [1]/endswith .cert
msg = 'Certificate with name {} is configured to be used, ' \
'An `admin` role permissions required to add/overwrite.'.format(cert_filename)
_logger.warning(msg)
raise web.HTTPForbidden(reason=msg, body=json.dumps({"message": msg}))
key_valid_extensions = ('.key', '.pem')
cert_valid_extensions = ('.cert', '.cer', '.csr', '.crl', '.crt', '.der', '.json', '.pem', '.p12', '.pfx')
key_filename = None
if key_file:
key_filename = key_file.filename
if not key_filename.endswith(key_valid_extensions):
raise web.HTTPBadRequest(reason="Accepted file extensions are {} for key file".format(key_valid_extensions))
if not cert_filename.endswith(cert_valid_extensions):
raise web.HTTPBadRequest(reason="Accepted file extensions are {} for cert file".format(cert_valid_extensions))
certs_dir = _get_certs_dir('/etc/certs/')
if cert_filename.endswith('.pem'):
certs_dir = _get_certs_dir('/etc/certs/pem')
if cert_filename.endswith('.json'):
certs_dir = _get_certs_dir('/etc/certs/json')
is_found = True if len(_find_file(cert_filename, certs_dir)) else False
if is_found and should_overwrite is False:
raise web.HTTPBadRequest(reason="Certificate with the same name already exists! "
"To overwrite, set the overwrite flag")
if key_file:
key_file_found = True if len(_find_file(key_filename, _get_certs_dir('/etc/certs/'))) else False
if key_file_found and should_overwrite is False:
raise web.HTTPBadRequest(reason="Key cert with the same name already exists. "
"To overwrite, set the overwrite flag")
if cert_file:
cert_file_data = data['cert'].file
cert_file_content = cert_file_data.read()
cert_file_path = str(certs_dir) + '/{}'.format(cert_filename)
with open(cert_file_path, 'wb') as f:
f.write(cert_file_content)
if key_file:
key_file_data = data['key'].file
key_file_content = key_file_data.read()
key_file_path = str(_get_certs_dir('/etc/certs/')) + '/{}'.format(key_filename)
with open(key_file_path, 'wb') as f:
f.write(key_file_content)
# in order to bring this new cert usage into effect, make sure to
# update config for category rest_api
# and restart for TLS
msg = "{} has been uploaded successfully".format(cert_filename)
if key_file:
msg = "{} and {} have been uploaded successfully".format(key_filename, cert_filename)
return web.json_response({"result": msg})
@has_permission("admin")
async def delete_certificate(request):
""" Delete a certificate
:Example:
curl -X DELETE http://localhost:8081/fledge/certificate/user.key
curl -X DELETE http://localhost:8081/fledge/certificate/user.cert
curl -X DELETE http://localhost:8081/fledge/certificate/filename.cer
curl -X DELETE http://localhost:8081/fledge/certificate/filename.csr
curl -X DELETE http://localhost:8081/fledge/certificate/filename.crl
curl -X DELETE http://localhost:8081/fledge/certificate/filename.crt
curl -sX DELETE http://localhost:8081/fledge/certificate/filename.der
curl -X DELETE http://localhost:8081/fledge/certificate/filename.p12
curl -X DELETE http://localhost:8081/fledge/certificate/filename.pfx
curl -X DELETE http://localhost:8081/fledge/certificate/fledge.json?type=cert
curl -X DELETE http://localhost:8081/fledge/certificate/fledge.pem?type=cert
curl -X DELETE http://localhost:8081/fledge/certificate/fledge.pem
curl -X DELETE http://localhost:8081/fledge/certificate/fledge.pem?type=key
"""
file_name = request.match_info.get('name', None)
valid_extensions = ('.cert', '.cer', '.csr', '.crl', '.crt', '.der', '.json', '.key', '.pem', '.p12', '.pfx')
if not file_name.endswith(valid_extensions):
msg = "Accepted file extensions are {}".format(valid_extensions)
raise web.HTTPBadRequest(reason=msg, body=json.dumps({"message": msg}))
if file_name in ['admin.cert', 'user.cert', 'fledge.key', 'fledge.cert', 'ca.key', 'ca.cert']:
if request.is_auth_optional:
_logger.warning(FORBIDDEN_MSG)
raise web.HTTPForbidden(reason=FORBIDDEN_MSG, body=json.dumps({"message": FORBIDDEN_MSG}))
cf_mgr = ConfigurationManager(connect.get_storage_async())
cat = await cf_mgr.get_category_all_items(category_name='rest_api')
configured_ca_and_tls_certs = [cat['certificateName']['value'], cat['authCertificateName']['value']]
if file_name and file_name.rpartition('.')[0] in configured_ca_and_tls_certs:
# check if cert_name is currently set for 'certificateName' or authCertificateName in config for 'rest_api'
msg = 'Certificate with name {} is configured for use, you can not delete but overwrite if required.'.format(
file_name)
raise web.HTTPConflict(reason=msg, body=json.dumps({"message": msg}))
_type = None
if 'type' in request.query and request.query['type'] != '':
_type = request.query['type']
if _type not in ['cert', 'key']:
msg = "Only cert and key are allowed for the value of type param"
raise web.HTTPBadRequest(reason=msg, body=json.dumps({"message": msg}))
certs_dir = _get_certs_dir('/etc/certs/')
is_found = False
cert_path = list()
if _type and _type == 'cert':
short_cert_name_valid_extensions = ('.cert', '.cer', '.csr', '.crl', '.crt', '.der', '.p12', '.pfx')
if not file_name.endswith(short_cert_name_valid_extensions):
if os.path.isfile(certs_dir + 'pem/' + file_name):
is_found = True
cert_path = [certs_dir + 'pem/' + file_name]
if os.path.isfile(certs_dir + 'json/' + file_name):
is_found = True
cert_path = [certs_dir + 'json/' + file_name]
else:
if os.path.isfile(certs_dir + file_name):
is_found = True
cert_path = [certs_dir + file_name]
if _type and _type == 'key':
if os.path.isfile(certs_dir + file_name):
is_found = True
cert_path = [certs_dir + file_name]
if _type is None:
for root, dirs, files in os.walk(certs_dir):
if root.endswith('json'):
for f in files:
if file_name == f:
is_found = True
cert_path.append(certs_dir + 'json/' + file_name)
files.remove(f)
if root.endswith('pem'):
for f in files:
if file_name == f:
is_found = True
cert_path.append(certs_dir + 'pem/' + file_name)
files.remove(f)
for f in files:
if file_name == f:
is_found = True
cert_path.append(certs_dir + file_name)
if not is_found:
msg = 'Certificate with name {} does not exist'.format(file_name)
raise web.HTTPNotFound(reason=msg, body=json.dumps({"message": msg}))
# Remove file
for fp in cert_path:
os.remove(fp)
return web.json_response({'result': "{} has been deleted successfully".format(file_name)})
def _get_certs_dir(_path):
dir_path = _FLEDGE_DATA + _path if _FLEDGE_DATA else _FLEDGE_ROOT + '/data' + _path
if not os.path.exists(dir_path):
os.makedirs(dir_path)
certs_dir = os.path.expanduser(dir_path)
return certs_dir
def _find_file(name, path):
fl = list()
for root, dirs, files in os.walk(path):
if name in files:
fl.append(os.path.join(root, name))
return fl
| 1.953125 | 2 |
ghapi/_nbdev.py | jrieke/ghapi | 0 | 12763887 | # AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"GH_HOST": "00_core.ipynb",
"GhApi": "00_core.ipynb",
"date2gh": "00_core.ipynb",
"gh2date": "00_core.ipynb",
"print_summary": "00_core.ipynb",
"GhApi.delete_release": "00_core.ipynb",
"GhApi.upload_file": "00_core.ipynb",
"GhApi.create_release": "00_core.ipynb",
"GhApi.list_tags": "00_core.ipynb",
"GhApi.list_branches": "00_core.ipynb",
"EMPTY_TREE_SHA": "00_core.ipynb",
"GhApi.create_branch_empty": "00_core.ipynb",
"GhApi.delete_tag": "00_core.ipynb",
"GhApi.delete_branch": "00_core.ipynb",
"GhApi.get_branch": "00_core.ipynb",
"GhApi.list_files": "00_core.ipynb",
"GhApi.get_content": "00_core.ipynb",
"GhApi.update_contents": "00_core.ipynb",
"GhApi.enable_pages": "00_core.ipynb",
"contexts": "01_actions.ipynb",
"env_github": "01_actions.ipynb",
"user_repo": "01_actions.ipynb",
"Event": "01_actions.ipynb",
"create_workflow_files": "01_actions.ipynb",
"fill_workflow_templates": "01_actions.ipynb",
"env_contexts": "01_actions.ipynb",
"def_pipinst": "01_actions.ipynb",
"create_workflow": "01_actions.ipynb",
"gh_create_workflow": "01_actions.ipynb",
"example_payload": "01_actions.ipynb",
"github_token": "01_actions.ipynb",
"actions_output": "01_actions.ipynb",
"actions_debug": "01_actions.ipynb",
"actions_warn": "01_actions.ipynb",
"actions_error": "01_actions.ipynb",
"actions_group": "01_actions.ipynb",
"actions_endgroup": "01_actions.ipynb",
"actions_mask": "01_actions.ipynb",
"set_git_user": "01_actions.ipynb",
"Scope": "02_auth.ipynb",
"scope_str": "02_auth.ipynb",
"GhDeviceAuth": "02_auth.ipynb",
"GhDeviceAuth.url_docs": "02_auth.ipynb",
"GhDeviceAuth.open_browser": "02_auth.ipynb",
"GhDeviceAuth.auth": "02_auth.ipynb",
"GhDeviceAuth.wait": "02_auth.ipynb",
"paged": "03_page.ipynb",
"parse_link_hdr": "03_page.ipynb",
"GhApi.last_page": "03_page.ipynb",
"pages": "03_page.ipynb",
"GhApi.list_events": "04_event.ipynb",
"GhApi.list_events_parallel": "04_event.ipynb",
"GhEvent": "04_event.ipynb",
"GhApi.fetch_events": "04_event.ipynb",
"load_sample_events": "04_event.ipynb",
"save_sample_events": "04_event.ipynb",
"full_type": "04_event.ipynb",
"evt_emojis": "04_event.ipynb",
"description": "04_event.ipynb",
"emoji": "04_event.ipynb",
"text": "04_event.ipynb",
"described_evts": "04_event.ipynb",
"ghapi": "10_cli.ipynb",
"ghpath": "10_cli.ipynb",
"ghraw": "10_cli.ipynb",
"completion_ghapi": "10_cli.ipynb",
"GH_OPENAPI_URL": "90_build_lib.ipynb",
"build_funcs": "90_build_lib.ipynb",
"GhMeta": "90_build_lib.ipynb"}
modules = ["core.py",
"actions.py",
"auth.py",
"page.py",
"event.py",
"cli.py",
"build_lib.py"]
doc_url = "https://ghapi.fast.ai/"
git_url = "https://github.com/fastai/ghapi/tree/master/"
def custom_doc_links(name): return None
| 1.140625 | 1 |
f.py | Brzeczunio/nauka_gita | 0 | 12763888 | def wypisz(par1, par2):
print('{0} {1}'.format(par1, par2))
def sprawdz(arg1, arg2):
if arg1 > arg2:
return True
else:
return False
| 3.25 | 3 |
zuoye/5.py | ASAv123456789/ASAv | 0 | 12763889 | <reponame>ASAv123456789/ASAv
ce,nll=input("<<")
lx=ce*(nll/1200)
print("The interest is",lx)
| 2.828125 | 3 |
tests/cli/test_test.py | flupke/jenskipper | 4 | 12763890 | <filename>tests/cli/test_test.py
import re
from jenskipper.cli import test
def test_test(requests_mock):
requests_mock.get('/api/json', json={'useCrumbs': False})
name_pattern = r'default_job%s\.[0-9a-f]{8}' % test.TEMP_JOBS_INFIX
requests_mock.post(re.compile('/job/%s/config.xml' % name_pattern))
queue_path = '/queue/default_job'
requests_mock.post(
re.compile('/job/%s/build' % name_pattern),
status_code=201,
headers={'location': queue_path}
)
build_url = '/build/default_job'
requests_mock.get(queue_path + '/api/json',
json={'executable': {'url': build_url}})
requests_mock.get(build_url + '/api/json', json={'result': 'SUCCESS'})
requests_mock.post(re.compile('/job/%s/disable' % name_pattern))
requests_mock.post(re.compile('/job/%s/doDelete' % name_pattern))
exit_code = test.test(['default_job'], standalone_mode=False)
assert exit_code is None
| 2.203125 | 2 |
capture-images.py | andrewxue98/sourdough | 0 | 12763891 | from time import sleep
from picamera import PiCamera
import datetime
import os
import RPi.GPIO as GPIO
pin1 = 4
pin2 = 17
dir = 'data'
if not os.path.exists(dir):
os.mkdir(dir)
camera = PiCamera()
while datetime.datetime.now() < datetime.datetime(2020, 12, 3, 12):
try:
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(pin1,GPIO.OUT)
GPIO.output(pin1,GPIO.HIGH)
GPIO.setup(pin2, GPIO.OUT)
GPIO.output(pin2, GPIO.HIGH)
dt = datetime.datetime.now()
month, day, hour, minute = dt.month, dt.day, dt.hour, dt.minute
filename = f"sourdough-date-{month}-{day}-time-{hour}-{minute}.jpg"
filepath = f"{dir}/{filename}"
camera.capture(filepath)
print(f"Saved image at {filepath}...")
sleep(300)
except:
camera.close()
raise Exception('Image Capture Failed...')
camera.close()
| 3.078125 | 3 |
worker/fb.py | wdr-data/sportsfreund | 5 | 12763892 | import os
import json
import logging
import gevent
import requests
from feeds.models.subscription import Subscription
from lib.attachment import Attachment
from lib.facebook import upload_attachment, guess_attachment_type
from lib.response import Replyable, SenderTypes
from lib.sent_tracking import UserSentTracking
from worker import BaseTask
logger = logging.getLogger(__name__)
PAGE_TOKEN = os.environ.get('FB_PAGE_TOKEN', 'na')
class FacebookError(Exception):
def __init__(self, error):
super()
self.message = error.get('message')
self.type = error.get('type')
self.code = error.get('code')
self.subcode = error.get('error_subcode')
self.fbtrace = error.get('fbtrace_id')
def __str__(self):
return self.message
class Send(BaseTask):
def run(self, params):
"""Sends a payload via the graph API"""
payload = params['payload']
id = params['sending_id']
for i in range(10):
tracking = UserSentTracking.by_id(payload['recipient']['id'])
if not ('last_sent' in tracking):
break
if tracking.last_sent + 1 == id:
break
gevent.sleep(0.5)
logger.debug("JSON Payload: " + json.dumps(payload))
headers = {'Content-Type': 'application/json'}
r = requests.post("https://graph.facebook.com/v2.6/me/messages",
params={'access_token': PAGE_TOKEN},
data=json.dumps(payload),
headers=headers)
response = r.content.decode()
logger.debug(response)
error = json.loads(response).get('error')
if error:
if int(error.get('code', 0)) == 551: # Unavailable
Subscription.collection.delete_many({'psid': payload['recipient']['id']})
else:
raise FacebookError(error)
else:
UserSentTracking.set_sent(payload['recipient']['id'], id)
class SendAttachment(BaseTask):
def run(self, params):
"""Sends a payload via the graph API"""
recipient_id = params['recipient_id']
event = Replyable({'sender': {'id': recipient_id}}, SenderTypes.FACEBOOK)
url = params['url']
type = params['type']
try:
attachment = Attachment.query(url=url)[0]
attachment_id = attachment.attachment_id
except IndexError:
attachment_id = upload_attachment(url, type)
if attachment_id is None:
raise ValueError('Uploading attachment with URL %s failed' % url)
Attachment.create(url=url, attachment_id=attachment_id)
event.send_attachment_by_id(attachment_id, type or guess_attachment_type(url))
| 2.015625 | 2 |
tests/unit/beacons/test_memusage.py | Noah-Huppert/salt | 19 | 12763893 | <reponame>Noah-Huppert/salt<filename>tests/unit/beacons/test_memusage.py
# coding: utf-8
# Python libs
from __future__ import absolute_import
from collections import namedtuple
# Salt libs
import salt.beacons.memusage as memusage
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
# Salt testing libs
from tests.support.unit import TestCase
STUB_MEMORY_USAGE = namedtuple(
"vmem", "total available percent used free active inactive buffers cached shared"
)(
15722012672,
9329594368,
40.7,
5137018880,
4678086656,
6991405056,
2078953472,
1156378624,
4750528512,
898908160,
)
class MemUsageBeaconTestCase(TestCase, LoaderModuleMockMixin):
"""
Test case for salt.beacons.memusage
"""
def setup_loader_modules(self):
return {}
def test_non_list_config(self):
config = {}
ret = memusage.validate(config)
self.assertEqual(
ret, (False, "Configuration for memusage beacon must be a list.")
)
def test_empty_config(self):
config = [{}]
ret = memusage.validate(config)
self.assertEqual(
ret, (False, "Configuration for memusage beacon requires percent.")
)
def test_memusage_match(self):
with patch("psutil.virtual_memory", MagicMock(return_value=STUB_MEMORY_USAGE)):
config = [{"percent": "40%"}, {"interval": 30}]
ret = memusage.validate(config)
self.assertEqual(ret, (True, "Valid beacon configuration"))
ret = memusage.beacon(config)
self.assertEqual(ret, [{"memusage": 40.7}])
def test_memusage_nomatch(self):
with patch("psutil.virtual_memory", MagicMock(return_value=STUB_MEMORY_USAGE)):
config = [{"percent": "70%"}]
ret = memusage.validate(config)
self.assertEqual(ret, (True, "Valid beacon configuration"))
ret = memusage.beacon(config)
self.assertNotEqual(ret, [{"memusage": 50}])
| 2.09375 | 2 |
ml_ops/tests/diabetes/test_publish_model.py | GolamRashed/azure-databricks-mlops-mlflow | 33 | 12763894 | <reponame>GolamRashed/azure-databricks-mlops-mlflow
import logging
import unittest
from unittest.mock import MagicMock
from diabetes_mlops.publish_model import run
class TestEvaluateMethods(unittest.TestCase):
logger = logging.getLogger(__name__)
logging.basicConfig(
format="%(asctime)s %(module)s %(levelname)s: %(message)s",
datefmt="%m/%d/%Y %I:%M:%S %p",
level=logging.INFO,
)
def test_publish_model(self):
self.logger.info("unittest test_publish_model")
run(MagicMock(), MagicMock())
assert True
def test_publish_model_exception(self):
self.logger.info("unittest test_publish_model exception")
with self.assertRaises(Exception):
run(None, None)
assert True
if __name__ == "__main__":
unittest.main()
| 2.5625 | 3 |
0034. Find First and Last Position of Element in Sorted Array/Solution.py | furutuki/LeetCodeSolution | 0 | 12763895 | from typing import List
class Solution:
def searchRange(self, nums: List[int], target: int) -> List[int]:
low = 0
high = len(nums) - 1
while low < len(nums) and high >= 0 and low <= high:
mid = int((low + high) / 2)
if nums[mid] == target:
start = mid
stop = mid
while start >= 0 and nums[start] == target:
start -= 1
start += 1
while stop < len(nums) and nums[stop] == target:
stop += 1
stop -= 1
return [start, stop]
elif nums[mid] > target:
high = mid - 1
else:
low = mid + 1
return [-1, -1]
s = Solution()
print(s.searchRange([5,7,7,8,8,10], 8))
print(s.searchRange([5,7,7,8,8,10], 6))
print(s.searchRange([5,7,7,8,8,10], 5))
print(s.searchRange([5,7,7,8,8,10], 10))
print(s.searchRange([5,7,7,8,8,10], 7))
print(s.searchRange([5,7,7,8,8,10], 287))
| 3.234375 | 3 |
sklearn_dummies/__init__.py | gsmafra/sklearn-dummies | 1 | 12763896 | <gh_stars>1-10
from .base import DataFrameDummies, NPArrayDummies
| 0.976563 | 1 |
tensorflow/python/training/saving/functional_saver.py | handongke/tensorflow | 36 | 12763897 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Saves and restore variables inside traced @tf.functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.eager import def_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.training.saving import saveable_object
from tensorflow.python.util import nest
class Saver(object):
"""A minimal utility class for saving and restoring checkpoints.
Note that this is a low-level utility which stores Tensors in the keys
specified by `SaveableObject`s. Higher-level utilities for object-based
checkpointing are built on top of it.
"""
def __init__(self, saveable_objects):
"""Specify a list of `SaveableObject`s to save and restore.
Args:
saveable_objects: A list of `SaveableObject`s.
"""
saveable_objects = list(saveable_objects)
for saveable in saveable_objects:
if not isinstance(saveable, saveable_object.SaveableObject):
raise ValueError(
"Saver expected a list of SaveableObjects, got %s." % (saveable,))
self._saveable_objects = saveable_objects
def to_proto(self):
"""Serializes to a SaverDef referencing the current graph."""
filename_tensor = array_ops.placeholder(
shape=[], dtype=dtypes.string, name="saver_filename")
# TODO(allenl): Add save and restore function names to the proto directly.
signature = (tensor_spec.TensorSpec(shape=(), dtype=dtypes.string),)
# Autograph is off because of reference cycles which must be collected when
# a function is created and destroyed (as in tf.saved_model.save). It's also
# not necessary, so having it off may be slightly faster.
#
# TODO(b/121302372): We should be able to decorate save() and restore()
# unconditionally.
save_tensor = def_function.function(
self.save, input_signature=signature, autograph=False)(filename_tensor)
restore_op = def_function.function(
self.restore, input_signature=signature, autograph=False)(
filename_tensor).op
return saver_pb2.SaverDef(
filename_tensor_name=filename_tensor.name,
save_tensor_name=save_tensor.name,
restore_op_name=restore_op.name,
version=saver_pb2.SaverDef.V2)
def save(self, file_prefix):
"""Save the saveable objects to a checkpoint with `file_prefix`.
Args:
file_prefix: A string or scalar string Tensor containing the prefix to
save under.
Returns:
A scalar string Tensor containing `file_prefix` with control dependencies
on the save ops.
"""
tensor_names = []
tensors = []
tensor_slices = []
for saveable in self._saveable_objects:
for spec in saveable.specs:
tensor_names.append(spec.name)
tensors.append(spec.tensor)
tensor_slices.append(spec.slice_spec)
with ops.device("cpu:0"):
with ops.control_dependencies([io_ops.save_v2(
file_prefix, tensor_names, tensor_slices, tensors)]):
return array_ops.identity(file_prefix)
def restore(self, file_prefix):
"""Restore the saveable objects from a checkpoint with `file_prefix`.
Args:
file_prefix: A string or scalar string Tensor containing the prefix for
files to read from.
Returns:
A scalar string Tensor containing `file_prefix` with control dependencies
on the restore ops.
"""
restore_ops = restore_from_saveable_objects(
file_prefix, self._saveable_objects)
with ops.device("cpu:0"):
with ops.control_dependencies(restore_ops):
return array_ops.identity(file_prefix)
def restore_from_saveable_objects(file_prefix, saveable_objects):
"""Reads from a checkpoint and returns restore ops for `saveable_objects`s."""
restore_specs = []
tensor_structure = []
for saveable in saveable_objects:
saveable_tensor_structure = []
tensor_structure.append(saveable_tensor_structure)
for spec in saveable.specs:
saveable_tensor_structure.append(spec.name)
restore_specs.append((spec.name, spec.slice_spec, spec.dtype))
tensor_names, tensor_slices, tensor_dtypes = zip(*restore_specs)
with ops.device("cpu:0"):
restored_tensors = io_ops.restore_v2(
file_prefix, tensor_names, tensor_slices, tensor_dtypes)
structured_restored_tensors = nest.pack_sequence_as(
tensor_structure, restored_tensors)
restore_ops = []
for saveable, restored_tensors in zip(saveable_objects,
structured_restored_tensors):
restore_ops.append(saveable.restore(restored_tensors,
restored_shapes=None))
return restore_ops
| 1.929688 | 2 |
PythonAdvance/re_example.py | JiaLei123/PythonCamp | 0 | 12763898 | <filename>PythonAdvance/re_example.py
import re
pattern = re.compile(r'[0-9]+')
match = pattern.findall('hello world! hello')
print pattern.findall('station 1000 100 and 7')
| 3.25 | 3 |
data-service/src/main/resources/apiserver.py | cgiraldo/platform-data-mgmnt | 4 | 12763899 | """
Copyright (c) 2016 Cisco and/or its affiliates.
This software is licensed to you under the terms of the Apache License, Version 2.0
(the "License").
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
The code, technical concepts, and all information contained herein, are the property of
Cisco Technology, Inc.and/or its affiliated entities, under various laws including copyright,
international treaties, patent, and/or contract.
Any use of the material herein must be in accordance with the terms of the License.
All rights not expressly granted by the License are reserved.
Unless required by applicable law or agreed to separately in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied.
Purpose: Implements Restful API service for managing platform dataset
"""
import logging
import signal
import socket
import sys
import tornado.httpserver
import tornado.ioloop
from tornado.options import options, parse_config_file
from tornado_json.application import Application
from tornado_json.routes import get_routes
import config
import dataservice
from dataservice import HDBDataStore
from endpoint import Platform
options.logging = None
APISERVER = None
def sig_handler(sig, frame):
"""
call back handler for sighup and sigterm
:param sig:
:param frame:
:return:
"""
logging.warning(
"Received shutdown signal for dataset dataset with signal:%s and frame:%s", sig, frame)
tornado.ioloop.IOLoop.instance().add_callback(shutdown)
def shutdown():
"""shuts down the server"""
logging.info('Stopping http server')
APISERVER.stop()
io_loop = tornado.ioloop.IOLoop.instance()
io_loop.stop()
def main():
"""
Main entry point for my service.
:return:
"""
# pylint: disable=global-statement
global APISERVER
config.define_options()
err_msg = ''
# Attempt to load config from config file
try:
parse_config_file("server.conf")
except IOError:
err_msg = ("{} doesn't exist or couldn't be opened. Using defaults."
.format(options.conf_file_path))
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.getLevelName(options.log_level),
stream=sys.stderr)
logging.info(options.as_dict())
if err_msg:
logging.error(err_msg)
platform = Platform.factory(options.hadoop_distro)
endpoints = platform.discover(options)
if not endpoints:
logging.error("Failed to discover API endpoints of cluster")
db_store = HDBDataStore(endpoints['HDFS'].geturl(), endpoints['HBASE'].geturl(),
options.thrift_port,
options.datasets_table,
options.data_repo)
routes = get_routes(dataservice)
logging.info("Service Routes %s", routes)
settings = dict()
APISERVER = tornado.httpserver.HTTPServer(
Application(routes=routes, settings=settings, db_conn=db_store))
for port in options.ports:
try:
logging.debug("Attempting to bind for dataset dataset on port:%d and address %s",
port, options.bind_address)
APISERVER.listen(port, options.bind_address)
logging.info("Awesomeness is listening on:%s", port)
break
except socket.error:
logging.warn("Not able to bind on port:%d", port)
else:
logging.warn("No free port available to bind dataset")
signal.signal(signal.SIGTERM, sig_handler)
signal.signal(signal.SIGINT, sig_handler)
# keep collecting dataset
tornado.ioloop.PeriodicCallback(db_store.collect, options.sync_period).start()
# db_conn2.collect()
tornado.ioloop.IOLoop.instance().start()
if __name__ == '__main__':
main()
| 1.390625 | 1 |
src/lib/test_environment/spawn_test_container.py | SimonDudanski/integration-test-docker-environment | 0 | 12763900 | <filename>src/lib/test_environment/spawn_test_container.py
import pathlib
from typing import List
import luigi
import netaddr
from docker.transport import unixconn
from ...lib.base.docker_base_task import DockerBaseTask
from ...lib.base.json_pickle_parameter import JsonPickleParameter
from ...lib.data.container_info import ContainerInfo
from ...lib.data.docker_network_info import DockerNetworkInfo
from ...lib.test_environment.analyze_test_container import DockerTestContainerBuild
from ...lib.test_environment.create_export_directory import CreateExportDirectory
class SpawnTestContainer(DockerBaseTask):
environment_name = luigi.Parameter()
test_container_name = luigi.Parameter()
network_info = JsonPickleParameter(
DockerNetworkInfo, significant=False) # type: DockerNetworkInfo
ip_address_index_in_subnet = luigi.IntParameter(significant=False)
attempt = luigi.IntParameter(1)
reuse_test_container = luigi.BoolParameter(False, significant=False)
no_test_container_cleanup_after_end = luigi.BoolParameter(False, significant=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.ip_address_index_in_subnet < 0:
raise Exception(
"ip_address_index_in_subnet needs to be greater than 0 got %s"
% self.ip_address_index_in_subnet)
def register_required(self):
self.test_container_image_future = self.register_dependency(DockerTestContainerBuild())
self.export_directory_future = self.register_dependency(CreateExportDirectory())
def run_task(self):
subnet = netaddr.IPNetwork(self.network_info.subnet)
ip_address = str(subnet[2 + self.ip_address_index_in_subnet])
container_info = None
if self.network_info.reused and self.reuse_test_container:
container_info = self._try_to_reuse_test_container(ip_address, self.network_info)
if container_info is None:
container_info = self._create_test_container(ip_address, self.network_info)
test_container = \
self._client.containers.get(self.test_container_name)
self._copy_tests()
self.return_object(container_info)
def _copy_tests(self):
self.logger.warning("Copy tests in test container %s.", self.test_container_name)
test_container = \
self._client.containers.get(self.test_container_name)
try:
test_container.exec_run(cmd="rm -r /tests")
except:
pass
test_container.exec_run(cmd="cp -r /tests_src /tests")
def _try_to_reuse_test_container(self, ip_address: str,
network_info: DockerNetworkInfo) -> ContainerInfo:
self.logger.info("Try to reuse test container %s",
self.test_container_name)
container_info = None
try:
network_aliases = self._get_network_aliases()
container_info = self.create_container_info(ip_address, network_aliases, network_info)
except Exception as e:
self.logger.warning("Tried to reuse test container %s, but got Exeception %s. "
"Fallback to create new database.", self.test_container_name, e)
return container_info
def _create_test_container(self, ip_address,
network_info: DockerNetworkInfo) -> ContainerInfo:
self._remove_container(self.test_container_name)
test_container_image_info = \
self.get_values_from_futures(self.test_container_image_future)["test-container"]
# A later task which uses the test_container needs the exported container,
# but to access exported container from inside the test_container,
# we need to mount the release directory into the test_container.
exports_host_path = pathlib.Path(self._get_export_directory()).absolute()
tests_host_path = pathlib.Path("./tests").absolute()
volumes = {
exports_host_path: {
"bind": "/exports",
"mode": "rw"
},
tests_host_path: {
"bind": "/tests_src",
"mode": "rw"
}
}
docker_unix_sockets = [i for i in self._client.api.adapters.values() if isinstance(i, unixconn.UnixHTTPAdapter)]
if len(docker_unix_sockets) > 0:
host_docker_socker_path = docker_unix_sockets[0].socket_path
volumes[host_docker_socker_path] = {
"bind": "/var/run/docker.sock",
"mode": "rw"
}
test_container = \
self._client.containers.create(
image=test_container_image_info.get_target_complete_name(),
name=self.test_container_name,
network_mode=None,
command="sleep infinity",
detach=True,
volumes=volumes,
labels={"test_environment_name": self.environment_name, "container_type": "test_container"})
docker_network = self._client.networks.get(network_info.network_name)
network_aliases = self._get_network_aliases()
docker_network.connect(test_container, ipv4_address=ip_address, aliases=network_aliases)
test_container.start()
container_info = self.create_container_info(ip_address, network_aliases, network_info)
return container_info
def _get_network_aliases(self):
network_aliases = ["test_container", self.test_container_name]
return network_aliases
def create_container_info(self, ip_address: str, network_aliases: List[str],
network_info: DockerNetworkInfo) -> ContainerInfo:
test_container = self._client.containers.get(self.test_container_name)
if test_container.status != "running":
raise Exception(f"Container {self.test_container_name} not running")
container_info = ContainerInfo(container_name=self.test_container_name,
ip_address=ip_address,
network_aliases=network_aliases,
network_info=network_info)
return container_info
def _get_export_directory(self):
return self.get_values_from_future(self.export_directory_future)
def _remove_container(self, container_name: str):
try:
container = self._client.containers.get(container_name)
container.remove(force=True)
self.logger.info("Removed container %s", container_name)
except Exception as e:
pass
def cleanup_task(self):
if not self.no_test_container_cleanup_after_end:
self._remove_container(self.test_container_name)
| 1.992188 | 2 |
highlightjs/tests.py | bumfo/django-highlightjs | 0 | 12763901 | <filename>highlightjs/tests.py
from __future__ import unicode_literals
from django.test import TestCase
from django.utils.html import escape
from django.template import Template, Context
from django.test.utils import override_settings
from . import settings
class HighlightjsTemplateTagTests(TestCase):
def test_highlightjs_css_url_tag(self):
out = Template(
"{% load highlightjs %}"
"{% highlightjs_css_url %}"
).render(Context())
self.assertEqual(out, settings.css_url())
def test_highlightjs_javascript_tag(self):
out = Template(
"{% load highlightjs %}"
"{% highlightjs_javascript %}"
).render(Context())
self.assertIn(settings.highlightjs_url(), out)
@override_settings(HIGHLIGHTJS={'include_jquery': False})
def test_highlightjs_javascript_tag_without_jquery(self):
settings.update_settings()
out = Template(
"{% load highlightjs %}"
"{% highlightjs_javascript %}"
).render(Context())
self.assertNotIn(settings.highlightjs_jquery_url(), out)
@override_settings(HIGHLIGHTJS={'include_jquery': True})
def test_highlightjs_javascript_tag_with_jquery(self):
settings.update_settings()
out = Template(
"{% load highlightjs %}"
"{% highlightjs_javascript %}"
).render(Context())
self.assertIn(settings.highlightjs_jquery_url(), out)
def test_highlightjs_this_tag(self):
code = "friends = ['john', 'pat', 'gary', 'michael']" \
+ "for i, name in enumerate(friends):" \
+ " print 'iteration {iteration} is {name}'.format(iteration=i, name=name)"
out = Template(
"{% load highlightjs %}"
"{% highlightjs_this code %}"
).render(Context({'code': code}))
self.assertIn(escape(code), out)
self.assertIn('<pre>', out)
| 2.1875 | 2 |
Ex003.py | GabrielSilva2y3d/Curso-em-video-python-exercicios | 0 | 12763902 | valor1 = int(input('Digite um valor:'))
valor2 = int(input('Digite outro valor:'))
soma = valor1 + valor2
print(f'A soma entre os valores {valor1} e {valor2} é igual a {soma}!')
| 3.890625 | 4 |
Analytics/resources/Widgets/get_widgets.py | thanosbnt/SharingCitiesDashboard | 4 | 12763903 | <gh_stars>1-10
from http import HTTPStatus
from flask_jwt_extended import jwt_required
from flask_restful import Resource
from flask_restful import abort
from flask_restful import reqparse
from models.widget import WidgetModel
class GetWidgets(Resource):
"""
Get one or more widgets from the database with a userID
Parameters can be passed using a POST request that contains a JSON with the following fields:
:param userID: Unique user identification number
:param limit: the max count of widgets to be returned (optional)
:type userID: int
:type limit: int
"""
def __init__(self) -> None:
"""
instantiates the get_widgets endpoint
Parameters can be passed using a POST request that contains a JSON with the following fields:
:param userID: Unique user identification number
:param limit: the max count of widgets to be returned (optional)
:type userID: int
:type limit: int
"""
# Arguments required to fetch the widgets related to the userID
self.reqparser_get = reqparse.RequestParser()
self.reqparser_get.add_argument('userID', required=True, help='A user_id is required',
location=['form', 'json'])
self.reqparser_get.add_argument('limit', default=1, required=False, type=int,
help='unable to parse limit', location=['form', 'json'])
super().__init__()
@jwt_required
def post(self) -> ([WidgetModel], int):
"""
Get one or more widgets from the database with a userID
Parameters can be passed using a POST request that contains a JSON with the following fields:
:param userID: Unique user identification number
:param limit: the max count of widgets to be returned
:type userID: int
:type limit: int
:returns [widget]: A list of widgets with a maximum length of limit and a status code 200
"""
args = self.reqparser_get.parse_args()
# Fetch the widget instances related to the userID passed
widgets = WidgetModel.query.filter_by(user_id=args["userID"]).limit(args["limit"]).all()
# Were widget instances returned
if not widgets:
# No widgets found for userID
return [], 200
# Store widgets to be returned
widget_list = []
for widget in widgets:
widget = WidgetModel.get_widget_by_id(widget.id)
# Format widget data for response
widget_list.append(widget.json())
return widget_list, HTTPStatus.OK.value
| 2.828125 | 3 |
slime_mind/engine/plant.py | teofrastusb/That-Game | 0 | 12763904 | import random
import uuid
class Plant():
def __init__(self, config):
self.id = str(uuid.uuid4())
self.x = None
self.y = None
self.conf = config
self.level = 1
self.max_hp = config.getint('max_hp')
self.current_hp = self.max_hp
self.max_level = config.getint('max_level')
self.hp_increment = config.getint('hp_increment')
def update(self):
self.level_up()
def level_up(self):
# Check if the plant levels up
do_level_up = 0
level_up_chance = random.randint(0,self.conf.getint('level_up_chance_one'))
if level_up_chance == 0:
level_up_chance = random.randint(0,self.max_level + self.conf.getint('level_up_chance_two'))
if level_up_chance > self.level and self.level < self.max_level:
do_level_up = 1
# If the plant levels up do the following
if do_level_up:
self.level += 1
# Change max hp on level up
self.max_hp += self.hp_increment
# Add hp on level up, not exceeding max
self.current_hp = min(self.current_hp + self.max_hp//2, self.max_hp)
def reset_level(self):
self.level = 1
self.max_hp = self.conf.getint('max_hp')
self.current_hp = self.max_hp
def can_seed(self):
return self.level == self.max_level
def __dict__(self):
return {
'type': 'PLANT',
'id': self.id,
'x': self.x,
'y': self.y,
'level': self.level,
'current_hp': self.current_hp,
'max_hp': self.max_hp,
'can_seed': self.can_seed()
}
| 3.265625 | 3 |
tests/test_ref_list.py | kyzas/scake | 0 | 12763905 | <reponame>kyzas/scake
# -*- coding: utf-8 -*-
from scake import Scake
class Foo():
def __init__(self, x):
self.x = x
def __call__(self):
return self.x
class Wrapper():
def __init__(self, x):
assert not isinstance(x, (tuple, list))
self.x = x
def __call__(self):
return self.x
def test_ref_list_4():
config = {
'transforms': {
't1': {
'$Foo': {
'x': 5,
}
},
't2': {
'$Foo': {
'x': 10,
}
},
't3': {
'$Foo': {
'x': 17,
}
},
'offical_list': [
'=/wrapper',
'=/transforms/t3',
]
},
'v0': {
'v1': {
'v2': [
'=/transforms/t1',
'=/transforms/t2',
],
}
},
'seq_obj': {
'$Foo': {
'x': '=/v0/v1/v2',
}
},
'wrapper': {
'$Wrapper': {
'x': '=/seq_obj',
}
},
'compose': {
'$Foo': {
'x': '=/transforms/offical_list',
}
},
'aug': {
'train': {
'main': '=/compose',
}
},
'dataset': {
'$Foo': {
'x': '=/aug/train/main',
}
},
'dataloader': {
'$Foo': {
'x': '=/dataset',
}
}
}
s = Scake(config, class_mapping=globals())
s.run(debug=True)
compose = s['/compose']
offical_list = compose.x
wrapper = offical_list[0]
assert isinstance(wrapper.x, Foo)
def test_ref_list_3():
config = {
'v0': {
'v1': {
'v2': [10, 20, 30],
}
},
'foo': {
'$Foo': {
'x': '=/v0/v1/v2',
}
},
'out': {
'$Foo': {
'x': [1, '=/foo', 2],
}
}
}
s = Scake(config, class_mapping=globals())
s.run(debug=True)
assert isinstance((s['/out'].x)[1], Foo)
def test_ref_list_2():
config = {
'v0': {
'v1': {
'v2': [10, 20, 30],
}
},
'foo': {
'$Foo': {
'x': '=/v0/v1/v2',
}
},
'out': [
'=/foo',
77,
88
],
}
s = Scake(config, class_mapping=globals())
s.run(debug=True)
assert isinstance(s['/out'][0], Foo)
def test_ref_list_1():
config = {
'v0': {
'v1': {
'v2': [10, 20, 30],
}
},
'foo': {
'$Foo': {
'x': '=/v0/v1/v2',
}
},
'out': '=/foo',
}
s = Scake(config, class_mapping=globals())
s.run(debug=True)
assert s['/foo'].x == [10, 20, 30]
assert s['/out'].x == [10, 20, 30]
def test_ref_list_obj():
config = {
'foo': {
'$Foo': {
'x': 10,
}
},
'f0': [1, '=/foo', 3],
'f1': '=/f0',
}
s = Scake(config, class_mapping=globals())
s.run(debug=True)
assert isinstance(s['/f0'][1], Foo)
assert isinstance(s['/f1'][1], Foo)
def test_ref_list_simple():
config = {
'f0': [1, 2, 3],
'f1': '=/f0',
}
s = Scake(config, class_mapping=globals())
s.run(debug=True)
assert s['/f1'] == [1, 2, 3]
| 2.203125 | 2 |
bin/tRNA-lengths.py | dlysiak/tsRNAsearch_add-new-species | 0 | 12763906 | #!/usr/bin/env python
"""
Write tRNA lengths to file
"""
__author__ = "<NAME>"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
import sys
gff = open(sys.argv[1], "r")
readlines = gff.readlines()
gff.close()
my_list = list()
my_dict = dict()
for line in readlines:
splitline = line.strip().split("\t")
feature_end = int(splitline[4])
feature_id = (splitline[0].split('-')[1])[:3]
if feature_id not in my_dict: # Add all tRNA lengths to a dictionary (e.g. Gly: 78,75,76)
my_dict[feature_id] = [feature_end]
else:
my_dict[feature_id].append(feature_end)
for k,v in my_dict.iteritems():
keyval = (k + "\t" + str(max(v)) + "\n")
my_list.append(keyval)
my_list.sort()
with open(sys.argv[2], 'w') as f:
for item in my_list:
f.write(item)
| 2.875 | 3 |
torch2trt/converters/narrow.py | NVIDIA-AI-IOT-private/torch2trt | 0 | 12763907 | import tensorrt as trt
from torch2trt.torch2trt import *
from torch2trt.module_test import add_module_test
@tensorrt_converter('torch.Tensor.narrow')
@tensorrt_converter('torch.narrow')
def convert_narrow(ctx):
inputs = get_arg(ctx, 'input', pos=0, default=None)
start = get_arg(ctx, 'start', pos=2, default=None)
output = ctx.method_return
shape = list(inputs.shape)
start = [0]*len(shape)
stride = [1]*len(shape)
dim = ctx.method_args[1] if get_arg(ctx, 'dim', pos=1, default=0) >=0 else len(shape)+get_arg(ctx, 'dim', pos=1, default=0)
start[dim] = ctx.method_args[2]
shape[dim] = ctx.method_args[3]
# not consider batch dimension
input_trt = trt_(ctx.network,inputs)
layer = ctx.network.add_slice(input=input_trt,start=start, shape=shape,stride=stride)
output._trt = layer.get_output(0)
class Narrow(torch.nn.Module):
def __init__(self, dim, start, length):
super(Narrow, self).__init__()
self.dim = dim
self.start = start
self.length = length
def forward(self, x):
return torch.narrow(x,self.dim,self.start,self.length)
@add_module_test(torch.float32, torch.device('cuda'), [(1,3,224,224)])
def test_narrow1():
return Narrow(1,0,2)
@add_module_test(torch.float32, torch.device('cuda'), [(1,3,224,224)])
def test_narrow2():
return Narrow(2,0,50)
| 2.328125 | 2 |
aiostar/tasks/ParallelTask.py | douding123986/aiostar | 5 | 12763908 | """
Name : ParallelTask
Author : blu
Time : 2022/3/7 08:19
Desc : 并行任务基类
"""
from aiostar.tasks import BaseTask
class ParallelTask(BaseTask):
def __init__(self, fun, *args):
self.fun = fun
self.args = args
| 2.140625 | 2 |
view/console.py | germank/training-monitor | 0 | 12763909 | <reponame>germank/training-monitor<filename>view/console.py
import cmd
import logging
from signalslot import Signal
class CommandConsole(cmd.Cmd):
def __init__(self):
cmd.Cmd.__init__(self)
self.save_command = Signal(['output_dir'])
self.quit_command = Signal()
def do_save(self, line):
self.save_command.emit(output_dir=line)
def do_EOF(self, line):
return self.do_quit(line)
def do_quit(self, line):
self.quit_command.emit()
return True
| 2.453125 | 2 |
parser.py | danesjenovdan/ajdovscina-parser | 0 | 12763910 | from parlaparser.parser import Parser
parser = Parser()
parser.parse()
| 1.320313 | 1 |
circuit_recognizer/circuit_recognizer/connect/edge.py | symbench/scm | 1 | 12763911 | <filename>circuit_recognizer/circuit_recognizer/connect/edge.py
class Edge:
def __init__(self, src, dst, line):
self.src = src
self.dst = dst
self.line = line
assert src != dst, f"Source and destination are the same!"
def reverse(self):
return Edge(self.dst, self.src, self.line.reverse())
def to_dict(self):
return {
"src": self.src.id if self.src else None,
"dst": self.dst.id if self.dst else None,
}
def __str__(self):
src_id = self.src.id if self.src else "None"
dst_id = self.dst.id if self.dst else "None"
return f"{src_id} -> {dst_id}"
| 3.234375 | 3 |
example.py | AllCoolNicknamesWereTaken/NumPy_tut | 0 | 12763912 | from numpy import *
a = arange(20).reshape(4,5)
print(a)
print(a.ndim)
print("wymiary {}, typy {}".format(a.dtype.name, a.itemsize))
b = ones((2,3,5,6))
print(b.ndim)
c = empty_like(a)
a = a-c
d = a>(-45)
print(d)
#as array
# print(c*a)
# as matrix
d = random.random((2,3,6))
# print("minimum {}, maximum{}".format(d.min(),d.max()))
print(d)
print("min kol {}, sum wiersze {}".format(d.min(axis=0), d.sum(axis=1)))
| 3.09375 | 3 |
plugin/vimtools/tests/test_delete.py | dsw7/VimTools | 0 | 12763913 | <reponame>dsw7/VimTools
from helpers import VimToolsTestCase
class TestDel(VimToolsTestCase):
def setUp(self):
self.input_string = """\
foo bar 1
foo bar 2
foo bar 3
"""
def test_del(self):
expected_string = """\
foo bar 3
"""
command = [":Del 1 2"]
self.assert_files_equal(command, self.input_string, expected_string)
| 2.5625 | 3 |
sistem_ozellikleri.py | melihcemipek/python | 0 | 12763914 | import platform
# İşletim sistemi
print(platform.system())
# İşlemci bilgisi
print(platform.processor())
# Nesil bilgisi
print(platform.release())
# PC Bağlantı adı
print(platform.node())
# Python Versiyonu
print(platform.python_version())
# Python Compiler Bilgisi
print(platform.python_compiler())
| 1.726563 | 2 |
setup.py | saulzar/node_expressions | 1 | 12763915 | <filename>setup.py
from setuptools import setup, find_namespace_packages
setup(
name="node-expressions",
version="0.1.3",
author="<NAME>",
author_email="<EMAIL>",
description="API for using expressions to create node graphs in blender",
url="https://github.com/saulzar/node_expressions",
packages=find_namespace_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires = [
"fuzzywuzzy"
],
python_requires='>=3.7',
)
| 1.25 | 1 |
scripts/owner/what_do_i_own.py | wwjiang007/fuchsia-1 | 210 | 12763916 | <filename>scripts/owner/what_do_i_own.py
#!/usr/bin/env python2.7
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import re
import sys
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
FUCHSIA_ROOT = os.path.dirname( # $root
os.path.dirname( # scripts
SCRIPT_DIR)) # owner
# $ what_do_i_own.py me@mydomain file_i_own file_i_dont
# file_i_own
def main():
parser = argparse.ArgumentParser(
description='Filters `paths` for those owned by `owner`')
parser.add_argument('owner')
parser.add_argument('paths', nargs='+')
args = parser.parse_args()
owner = args.owner
abspaths = [os.path.abspath(path) for path in args.paths]
perfile_exp = re.compile('^\s*per-file ([^\s=]*)\s*=\s*' + owner)
# Find all OWNERS files
path_to_owners = {}
for path in abspaths:
dir = path if os.path.isdir(path) else os.path.dirname(path)
dir = os.path.abspath(dir)
while (os.path.exists(dir) and
os.path.commonprefix([dir, FUCHSIA_ROOT]) == FUCHSIA_ROOT):
owners_path = os.path.join(dir, 'OWNERS')
if os.path.exists(owners_path):
path_to_owners[path] = owners_path
break
dir = os.path.dirname(dir)
# Parse all OWNERS files
owned = set()
for path, owners in path_to_owners.iteritems():
with open(owners) as f:
for line in f.readlines():
if line.strip().startswith(owner):
owned.add(path)
continue
match = perfile_exp.match(line)
if match:
filename = os.path.abspath(
os.path.join(os.path.dirname(owners), match.group(1)))
if filename in abspaths:
owned.add(path)
# Print owned files
for owned_path in sorted(owned):
print os.path.relpath(owned_path)
return 0
if __name__ == '__main__':
sys.exit(main())
| 2.484375 | 2 |
runtime/hetdesrun/adapters/local_file/utils.py | mmalki-neusta/hetida-designer | 41 | 12763917 | def to_url_representation(path: str) -> str:
"""Convert path to a representation that can be used in urls/queries"""
return path.replace("_", "-_-").replace("/", "__")
def from_url_representation(url_rep: str) -> str:
"""Reconvert url representation of path to actual path"""
return url_rep.replace("__", "/").replace("-_-", "_")
| 3.265625 | 3 |
tests/test_pds.py | gamis/flo | 0 | 12763918 | <reponame>gamis/flo<gh_stars>0
import pandas as pd
import pytest
from pandas.testing import assert_series_equal, assert_frame_equal
from flo import e_
from flo.pds import monkey_patch_pandas
monkey_patch_pandas()
@pytest.mark.parametrize('series,value,expected',
[(pd.Series([1, 2, 3, 4, 3]), 3, pd.Series([3, 3], index=[2, 4])),
(pd.Series([1, 2, 3, float('nan'), 3]), 3, pd.Series([3, 3], index=[2, 4], dtype=float)),
(pd.Series([1, 2, 3, float('nan'), 3]), None, pd.Series([float('nan')], index=[3])),
(pd.Series(["foo", "bar", None, "baz"]), "bar", pd.Series(["bar"], index=[1])),
(pd.Series(["foo", "bar", None, "baz"]), None, pd.Series([None], index=[2])),
(pd.Series(["foo"]), "you", pd.Series([], dtype=object))
])
def test_only(series: pd.Series, value, expected: pd.Series):
assert_series_equal(series.only(value), expected)
assert_frame_equal(pd.DataFrame(dict(foo=series)).only(foo=value), pd.DataFrame(dict(foo=expected)))
@pytest.mark.parametrize('series,value,expected',
[(pd.Series([1, 2, 3, 4, 3]), 3, pd.Series([1, 2, 4], index=[0, 1, 3])),
(pd.Series([1, 2, 3, float('nan'), 3]), 3,
pd.Series([1, 2, float('nan')], index=[0, 1, 3], dtype=float)),
(pd.Series([1, 2, 3, float('nan'), 3]), None,
pd.Series([1, 2, 3, 3], index=[0, 1, 2, 4], dtype=float)),
(pd.Series(["foo", "bar", None, "baz"]), "bar",
pd.Series(["foo", None, "baz"], index=[0, 2, 3])),
(pd.Series(["foo", "bar", None, "baz"]), None,
pd.Series(["foo", "bar", "baz"], index=[0, 1, 3])),
(pd.Series(["foo"]), "you", pd.Series(["foo"]))
])
def test_ds_without(series: pd.Series, value, expected: pd.Series):
assert_series_equal(series.without(value), expected)
assert_frame_equal(pd.DataFrame(dict(foo=series)).without(foo=value), pd.DataFrame(dict(foo=expected)))
@pytest.mark.parametrize('series,condition,expected',
[(pd.Series([1, 2, 3, 4, 3]), lambda e: e % 2 == 0, pd.Series([2, 4], index=[1, 3])),
(pd.Series(["foo", "bar", "baz"]), e_.startswith('b'),
pd.Series(["bar", "baz"], index=[1, 2]))
])
def test_ds_only_if(series: pd.Series, condition, expected: pd.Series):
assert_series_equal(series.only_if(condition), expected)
assert_frame_equal(pd.DataFrame(dict(foo=series)).only_if(foo=condition), pd.DataFrame(dict(foo=expected)))
@pytest.mark.parametrize('series,value,expected',
[(pd.Series([1, 2, 3, 4, 3]), {3}, pd.Series([3, 3], index=[2, 4])),
(pd.Series([1, 2, 3, 4, 3]), {7, 3, 18}, pd.Series([3, 3], index=[2, 4])),
(pd.Series([1, 2, 3, 4, 3]), {3, 4}, pd.Series([3, 4, 3], index=[2, 3, 4])),
(pd.Series([1, 2, 3, float('nan'), 3]), {3}, pd.Series([3, 3], index=[2, 4], dtype=float)),
(pd.Series([1, 2, 3, float('nan'), 3]), {float('nan')}, pd.Series([float('nan')], index=[3])),
(pd.Series(["foo", "bar", None, "baz"]), {"bar"}, pd.Series(["bar"], index=[1])),
(pd.Series(["foo", "bar", None, "baz"]), {None}, pd.Series([None], index=[2])),
(pd.Series(["foo"]), {"you"}, pd.Series([], dtype=object)),
(pd.Series(["foo"]), set(), pd.Series([], dtype=object))
])
def test_ds_only_in(series: pd.Series, value, expected: pd.Series):
assert_series_equal(series.only_in(value), expected)
assert_frame_equal(pd.DataFrame(dict(foo=series)).only_in(foo=value), pd.DataFrame(dict(foo=expected)))
@pytest.mark.parametrize('series,value,expected',
[(pd.Series([1, 2, 3, 4, 3]), {3}, pd.Series([1, 2, 4], index=[0, 1, 3])),
(pd.Series([1, 2, 3, float('nan'), 3]), {3},
pd.Series([1, 2, float('nan')], index=[0, 1, 3], dtype=float)),
(pd.Series([1, 2, 3, float('nan'), 3]), {float('nan')},
pd.Series([1, 2, 3, 3], index=[0, 1, 2, 4], dtype=float)),
(pd.Series(["foo", "bar", None, "baz"]), {"bar"},
pd.Series(["foo", None, "baz"], index=[0, 2, 3])),
(pd.Series(["foo", "bar", None, "baz"]), {None},
pd.Series(["foo", "bar", "baz"], index=[0, 1, 3])),
(pd.Series(["foo"]), {"you"}, pd.Series(["foo"]))
])
def test_ds_not_in(series: pd.Series, value, expected: pd.Series):
assert_series_equal(series.not_in(value), expected)
assert_frame_equal(pd.DataFrame(dict(foo=series)).not_in(foo=value), pd.DataFrame(dict(foo=expected)))
@pytest.mark.parametrize('series,method,value,expected',
[(pd.Series([1, 2, 3, 4, 3]), "gt_", 3, pd.Series([4], index=[3])),
(pd.Series([1, 2, 3, 4, 3]), "geq_", 3, pd.Series([3, 4, 3], index=[2, 3, 4])),
(pd.Series([1, 2, 3, 4, 3]), "lt_", 3, pd.Series([1, 2], index=[0, 1])),
(pd.Series([1, 2, 3, 4, 3]), "leq_", 3,
pd.Series([1, 2, 3, 3], index=[0, 1, 2, 4])),
])
def test_ds_ops(series: pd.Series, method: str, value, expected: pd.Series):
assert_series_equal(getattr(pd.Series, method)(series, value), expected)
assert_frame_equal(getattr(pd.DataFrame, method)(pd.DataFrame(dict(foo=series)), foo=value), pd.DataFrame(dict(foo=expected)))
@pytest.mark.parametrize('series,kwargs,expected',
[(pd.Series([1, 2, 3, 4, 3]), {}, pd.Series([2, 1, 1, 1], index=[3, 1, 2, 4])),
])
def test_ds_ops(series: pd.Series, kwargs, expected: pd.Series):
assert_series_equal(series.count_distinct(**kwargs), expected)
expected.index.rename('foo', inplace=True)
assert_series_equal(pd.DataFrame(dict(foo=series)).count_by('foo', **kwargs), expected)
@pytest.mark.parametrize('left,right,left_inclusive,right_inclusive,element,expected',
[(2, 10, True, False, 3, [3]),
(2, 10, True, False, 2, [2]),
(2, 10, True, False, 10, []),
(2, 10, True, True, 10, [10]),
(2, 10, False, False, 2, [])
])
def test_between(left, right, left_inclusive, right_inclusive, element, expected) -> None:
assert_series_equal(pd.Series([element]).only_between(left, right, left_inclusive=left_inclusive, right_inclusive=right_inclusive), pd.Series(expected, dtype='int64'))
if left_inclusive:
if right_inclusive:
interval = [left, right]
else:
interval = f"[{left},{right})"
else:
if right_inclusive:
interval = f"({left},{right}]"
else:
interval = (left, right)
assert_frame_equal(pd.DataFrame(dict(foo=[element])).only_between(foo=interval), pd.DataFrame(dict(foo=expected), dtype='int64'))
| 2.34375 | 2 |
CLI.py | smiteshz/YLYLDadJokes | 1 | 12763919 | import requests as req
print( " _ _ _____ _ ___ ___ ___ ___ ")
print( " | | | | / ____| | | |__ \ / _ \ / _ \ / _ \ ")
print( " | | ___ | | _____ | | __ ___ _ __ ___ _ __ __ _| |_ ___ _ __ ) | | | | | | | | | |")
print( " _ | |/ _ \| |/ / _ \ | | |_ |/ _ \ '_ \ / _ \ '__/ _\`| __/ _ \| '__| / /| | | | | | | | | |")
print( " | |__| | (_) | < __/ | |__| | __/ | | | __/ | | (_| | || (_) | | / /_| |_| | |_| | |_| |")
print( " \____/ \___/|_|\_\___| \_____|\___|_| |_|\___|_| \__,_|\__\___/|_| |____|\___/ \___/ \___/ ")
print( " ")
url = "https://icanhazdadjoke.com/"
choice = True
search_q = ""
ch = ""
while choice:
search_q = input("What do you want to seach for?(Press Enter for any random joke) ")
res = req.get(
url,
headers ={"Accept" : "application/json"}
).json()
print(res["joke"])
ch = input("One more ? (Press Enter for a joke or Type q to quit) ")
if ch == "q" or ch == "Q":
break
elif ch == "":
continue
else:
continue
| 3.125 | 3 |
brainframe_qt/ui/dialogs/server_configuration/server_configuration.py | aotuai/brainframe-qt | 17 | 12763920 | <filename>brainframe_qt/ui/dialogs/server_configuration/server_configuration.py
import logging
from typing import Optional, Tuple
from PyQt5.QtCore import QObject
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QApplication, QCheckBox, QDialog, QDialogButtonBox, \
QGridLayout, QLabel, QLineEdit, QPushButton, QWidget
from PyQt5.uic import loadUi
from brainframe.api import BrainFrameAPI, bf_errors
from brainframe_qt.api_utils import api
from brainframe_qt.extensions import DialogActivity
from brainframe_qt.ui.dialogs.license_dialog.license_dialog import \
LicenseDialog
from brainframe_qt.ui.resources import QTAsyncWorker
from brainframe_qt.ui.resources.config import ServerSettings
from brainframe_qt.ui.resources.links.documentation import \
LICENSE_DOCS_LINK
from brainframe_qt.ui.resources.paths import qt_ui_paths
from brainframe_qt.ui.resources.ui_elements.widgets.dialogs import \
BrainFrameMessage
from brainframe_qt.util.licensing import LicenseState
from brainframe_qt.util.secret import decrypt, encrypt
class ServerConfigActivity(DialogActivity):
_built_in = True
def open(self, *, parent: QWidget):
ServerConfigurationDialog.show_dialog(parent=parent)
def window_title(self) -> str:
return QApplication.translate("ServerConfigActivity",
"Server Configuration")
@staticmethod
def icon() -> QIcon:
return QIcon(":/icons/server_config")
@staticmethod
def short_name() -> str:
return QApplication.translate("ServerConfigActivity", "Server")
class ServerConfigurationDialog(QDialog):
def __init__(self, *, parent: QObject):
super().__init__(parent=parent)
loadUi(qt_ui_paths.server_configuration_ui, self)
self.server_config = ServerSettings()
self._init_ui()
def _init_ui(self):
self.grid_layout: QGridLayout
self.server_address_line_edit: QLineEdit
self.authentication_checkbox: QCheckBox
self.server_username_line_edit: QLineEdit
self.server_password_line_edit: QLineEdit
self.save_password_checkbox: QCheckBox
self.button_box: QDialogButtonBox
self.license_config_button: QPushButton
self.connection_status_label: QLabel
self.connection_report_label: QLabel
self.check_connection_button: QPushButton
self.connection_report_label.setOpenExternalLinks(True)
self.server_address_line_edit.setText(self.server_config.server_url)
self.server_username_line_edit.setText(self.server_config.server_username)
if self.server_config.server_password:
settings_password = self.server_config.server_password
if settings_password:
try:
decrypt_password = decrypt(settings_password)
self.server_password_line_edit.setText(decrypt_password)
except ValueError:
message = self.tr("Invalid password saved in QSettings. Clearing.")
logging.error(message)
del self.server_config.server_password
use_auth = bool(self.server_config.server_username)
self._show_authentication_fields(use_auth)
self.authentication_checkbox.setChecked(use_auth)
self.save_password_checkbox.setChecked(bool(self.server_password))
# noinspection PyUnresolvedReferences
self.authentication_checkbox.stateChanged.connect(
self._show_authentication_fields)
self.authentication_checkbox.stateChanged.connect(
self._verify)
# noinspection PyUnresolvedReferences
self.server_address_line_edit.textChanged.connect(self._verify)
# noinspection PyUnresolvedReferences
self.server_username_line_edit.textChanged.connect(self._verify)
# noinspection PyUnresolvedReferences
self.server_password_line_edit.textChanged.connect(self._verify)
self.license_config_button.clicked.connect(self._open_license_dialog)
self.check_connection_button.clicked.connect(self.check_connection)
self._verify()
if self.fields_filled:
self.check_connection()
@property
def authentication_enabled(self) -> bool:
return self.authentication_checkbox.isChecked()
@property
def credentials(self) -> Optional[Tuple[str, str]]:
if not self.authentication_enabled:
return None
return self.server_username, self.server_password
@property
def fields_filled(self) -> bool:
if not self.server_address:
return False
if self.authentication_enabled:
if not self.server_username:
return False
if not self.server_password:
return False
return True
@property
def save_password(self) -> bool:
if self.authentication_enabled \
and self.save_password_checkbox.isChecked():
return True
return False
@property
def server_address(self) -> str:
return self.server_address_line_edit.text()
@property
def server_password(self) -> Optional[str]:
if self.authentication_enabled:
return self.server_password_line_edit.text()
else:
return None
@property
def server_username(self) -> Optional[str]:
if self.authentication_enabled:
return self.server_username_line_edit.text()
else:
return None
def accept(self):
def _save_settings():
self.server_config.server_url = self.server_address
if self.authentication_enabled:
self.server_config.server_username = self.server_username
if self.save_password:
# Note that this is _not_ meant to be a form of security,
# simply to prevent the password from sitting in plain text
# on the client computer. The key is available in plain text
# within this repo.
encrypted = encrypt(self.server_password)
self.server_config.server_password = encrypted
if not self.authentication_enabled:
del self.server_config.server_username
del self.server_config.server_password
if not self.save_password:
del self.server_config.server_password
try:
api.set_url(self.server_address)
except ValueError:
title = self.tr("Invalid Schema")
message = self.tr(
"Unable to connect to a BrainFrame server with the provided "
"URL schema. Supported schemas are {0} and {1}.") \
.format("http://", "https://")
else:
api.set_credentials(self.credentials)
try:
api.version()
except bf_errors.UnauthorizedError:
title = self.tr("Server Authentication Error")
message = self.tr(
"Unable to authenticate with the BrainFrame server. \n"
"Please recheck the entered credentials.")
except bf_errors.ServerNotReadyError:
title = self.tr("Connection Error")
message = self.tr(
"Unable to connect to the BrainFrame server. \n"
"Please recheck the entered server address.")
else:
_save_settings()
super().accept()
return
message = BrainFrameMessage.warning(
parent=self,
title=title,
warning=message
)
message.add_button(standard_button=BrainFrameMessage.Ignore)
result = message.exec()
if result == BrainFrameMessage.Ignore:
_save_settings()
super().accept()
@classmethod
def show_dialog(cls, *, parent: QObject):
cls(parent=parent).exec()
def check_connection(self):
def on_success(license_state: LicenseState):
self.license_config_button.setEnabled(True)
license_link = "<br>"
license_link += self.tr(
"<a href='{license_docs_link}'>Download</a> a new one") \
.format(license_docs_link=LICENSE_DOCS_LINK)
if license_state is LicenseState.EXPIRED:
label_text = "❗"
report_text = self.tr("Expired License")
report_text += license_link
elif license_state is LicenseState.INVALID:
label_text = "❗"
report_text = self.tr("Invalid License")
report_text += license_link
elif license_state is LicenseState.MISSING:
label_text = "❗"
report_text = self.tr("Missing License")
report_text += license_link
elif license_state is LicenseState.VALID:
label_text = "✔️"
report_text = self.tr("Connection Successful")
else:
label_text = "❗"
report_text = self.tr("Unknown license state")
self.connection_status_label.setText(label_text)
self.connection_report_label.setText(report_text)
def on_error(exc: BaseException):
self.license_config_button.setDisabled(True)
if isinstance(exc, bf_errors.UnauthorizedError):
label_text = "❗"
report_text = self.tr("Invalid credentials")
elif isinstance(exc, bf_errors.ServerNotReadyError):
label_text = "❌"
report_text = self.tr("Unable to locate server")
else:
raise exc
self.connection_status_label.setText(label_text)
self.connection_report_label.setText(report_text)
QTAsyncWorker(self, self._check_connection,
on_success=on_success, on_error=on_error) \
.start()
def _check_connection(self) -> LicenseState:
# Create a temporary API object to check connection with
temp_api = BrainFrameAPI(self.server_address, self.credentials)
# Check connection
temp_api.version()
# Check license state
api_state = temp_api.get_license_info().state
license_state = LicenseState.from_api_state(api_state)
return license_state
def _open_license_dialog(self):
LicenseDialog.show_dialog(parent=self)
self.check_connection()
def _verify(self):
self.connection_status_label.setText("❓")
self.connection_report_label.setText("")
enabled = self.fields_filled
self.button_box.button(QDialogButtonBox.Ok).setEnabled(enabled)
self.check_connection_button.setEnabled(enabled)
def _show_authentication_fields(self, show: bool):
widgets = [self.server_username_label, self.server_username_line_edit,
self.server_password_label, self.server_password_line_edit,
self.save_password_checkbox]
for widget in widgets:
widget.setVisible(show)
| 1.820313 | 2 |
test.py | sfu-arch/uir-lib | 4 | 12763921 | <gh_stars>1-10
with open('log', 'r') as logfile:
for line in logfile:
if line.__contains__('LOG'):
print(line)
#pvahdatn@cs-arch-20:~/git/dandelion-lib$ sbt "testOnly dataflow.test03Tester" > log
| 2.1875 | 2 |
beckley/solver/solver.py | cypher-me/HAS-Qualifier-Challenges | 75 | 12763922 | from skyfield.api import EarthSatellite, Topos, load, load_file
import os,sys,stat,time
from pwnlib.tubes.remote import remote
from pwnlib.tubes.process import process
import re
##convert from Skyfield to Decimal
def c_to_d(coordinate):
coord_str = str(coordinate)
coord_split = coord_str.split()
degree_split = coord_split[0]
degree = degree_split.split("deg",2)
minute_split = coord_split[1]
minute = minute_split.split("'",2)
second_split = coord_split[2]
second = second_split.split("\"",2)
sign = -1 if int(degree[0]) < 0 else 1
decimal = sign * (abs(int(degree[0])) + float(minute[0])/60 + float(second[0])/3600)
return decimal
#Establish Connection to challenge
if __name__ == "__main__":
Ticket = os.getenv("TICKET", "")
Host = os.getenv("HOST","172.17.0.1")
Port = int(os.getenv("PORT","3133"))
sock = remote(Host, Port)
if len(Ticket) >0:
sock.recv(128)
sock.send(Ticket + "\n")
##### Load TLE---------------------------------
text = """
REDACT
1 13337U 98067A 20087.38052801 -.00000452 00000-0 00000+0 0 9995
2 13337 51.6460 33.2488 0005270 61.9928 83.3154 15.48919755219337
"""
lines = text.strip().splitlines()
satellite = EarthSatellite(lines[1], lines[2], lines[0])
##### Set Time to match what is given in problem----------------------------
ts = load.timescale()
#Wait for process to start
time.sleep(4)
prompt = sock.recv(2000)
#Clear initial part of buffer
print prompt
#Store minutes and seconds
#Display for verification
timestr=re.search('at 21:(\d{2}):(\d{2})',prompt)
mins = timestr.group(1)
sec = timestr.group(2)
print "Time is 21:"+mins+":"+sec
#Find the URL for the cgi-bin server
ipsearch=re.search(b'http:\/\/([^\:]+):(\d+)\/.*',prompt)
ipaddr=ipsearch.group(1)
ipport=ipsearch.group(2)
#Load Time here
t = ts.utc(2020, 3, 26, 21, int(mins), int(sec))
##### Set point on ground------------------------------------------------
washmon = Topos('38.8891 N', '77.0354 W')
##### Generate position at time 't'---------------------------------------
geocentric = satellite.at(t)
subpoint = geocentric.subpoint()
##### Define Difference-------------------------------------------------
difference = satellite - washmon
##### Actually calculate azimuth----------------------------------------
topocentric = difference.at(t)
#Ask the topocentric position for its altitude and azimuth coordinates
##### Need to subtract 180# from AZ and
alt, az, distance = topocentric.altaz()
alt = (90-c_to_d(alt))
az =(c_to_d(az)+180)% 360
#In case need to verify actual distance from Washington Monument
#print(int(distance.km), 'km')
looklat=38.890
looklng=-77.03
lookrng=430000 #not exact just needs to be larger than ~250000 meters
lookhed=str(az)
looktlt=str(alt)
#FIX Docker HOSTS FILE FOR TESTING
os.system('echo \'172.17.0.1 test.com\' >> /etc/hosts')
#Setup Curl Request
url = ipaddr + ":" + ipport
cmd='curl http://'+url+'/cgi-bin/HSCKML.py?CAMERA='+str(looklng)+','+str(looklat)+','+str(lookrng)+','+str(looktlt)+','+str(lookhed)+' -H \'User-Agent: GoogleEarth/7.3.2.5815(X11;Linux (5.2.0.0);en;kml:2.2;client:Pro;type:default)\' -H \'Accept: application/vnd.google-earth.kml+xml, application/vnd.google-earth.kmz, image/*, */*\' -H \'Accept-Language: en-US, *\' -H \'Connection: keep-alive\''
print(cmd)
result = os.popen(cmd).readlines()
#Grep for flag{} in Curl output
for line in result:
m = re.search("flag{.*}", line)
if m:
print line.strip()
print m.group(0)
sys.stdout.flush()
| 2.625 | 3 |
setup.py | muellermartin/flipflop | 16 | 12763923 | <filename>setup.py
# Bootstrap setuptools
from distutils.core import setup
setup(
name='flipflop',
version='1.0',
py_modules=['flipflop'],
provides=['flipflop'],
author='<NAME>',
author_email='<EMAIL>',
description='FastCGI wrapper for WSGI applications',
url='https://github.com/Kozea/flipflop',
license='BSD',
classifiers=[
"Development Status :: 4 - Beta",
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| 1.101563 | 1 |
src/robusta/core/reporting/consts.py | robusta-dev/robusta | 273 | 12763924 | from enum import Enum
SYNC_RESPONSE_SINK = "robusta-synchronized-response-sink"
class FindingType(Enum):
ISSUE = "issue"
CONF_CHANGE = "configuration_change"
HEALTH_CHECK = "health_check"
REPORT = "report"
# Finding sources
class FindingSource(Enum):
NONE = None # empty default
KUBERNETES_API_SERVER = "kubernetes_api_server"
PROMETHEUS = "prometheus"
MANUAL = "manual"
CALLBACK = "callback"
# Finding subject types
class FindingSubjectType(Enum):
TYPE_NONE = None # empty default
TYPE_DEPLOYMENT = "deployment"
TYPE_NODE = "node"
TYPE_POD = "pod"
TYPE_JOB = "job"
TYPE_DAEMONSET = "daemonset"
@staticmethod
def from_kind(kind: str):
if kind == "deployment":
return FindingSubjectType.TYPE_DEPLOYMENT
elif kind == "node":
return FindingSubjectType.TYPE_NODE
elif kind == "pod":
return FindingSubjectType.TYPE_POD
elif kind == "job":
return FindingSubjectType.TYPE_JOB
elif kind == "daemonset":
return FindingSubjectType.TYPE_DAEMONSET
return FindingSubjectType.TYPE_NONE
# Annotations
class SlackAnnotations:
UNFURL = "unfurl"
ATTACHMENT = "attachment"
| 2.296875 | 2 |
protocol/arc_package_specifics_pb2.py | pablo-tx/chrome-sync-server | 8 | 12763925 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: arc_package_specifics.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='arc_package_specifics.proto',
package='sync_pb',
syntax='proto2',
serialized_pb=_b('\n\x1b\x61rc_package_specifics.proto\x12\x07sync_pb\"~\n\x13\x41rcPackageSpecifics\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x17\n\x0fpackage_version\x18\x02 \x01(\x05\x12\x1e\n\x16last_backup_android_id\x18\x03 \x01(\x03\x12\x18\n\x10last_backup_time\x18\x04 \x01(\x03\x42\x02H\x03')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_ARCPACKAGESPECIFICS = _descriptor.Descriptor(
name='ArcPackageSpecifics',
full_name='sync_pb.ArcPackageSpecifics',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='package_name', full_name='sync_pb.ArcPackageSpecifics.package_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='package_version', full_name='sync_pb.ArcPackageSpecifics.package_version', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_backup_android_id', full_name='sync_pb.ArcPackageSpecifics.last_backup_android_id', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_backup_time', full_name='sync_pb.ArcPackageSpecifics.last_backup_time', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=40,
serialized_end=166,
)
DESCRIPTOR.message_types_by_name['ArcPackageSpecifics'] = _ARCPACKAGESPECIFICS
ArcPackageSpecifics = _reflection.GeneratedProtocolMessageType('ArcPackageSpecifics', (_message.Message,), dict(
DESCRIPTOR = _ARCPACKAGESPECIFICS,
__module__ = 'arc_package_specifics_pb2'
# @@protoc_insertion_point(class_scope:sync_pb.ArcPackageSpecifics)
))
_sym_db.RegisterMessage(ArcPackageSpecifics)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\003'))
# @@protoc_insertion_point(module_scope)
| 1.226563 | 1 |
top_400/linked_list/237_delete_node_in_a_linked_list.py | Fernadoo/LeetCode | 0 | 12763926 | <filename>top_400/linked_list/237_delete_node_in_a_linked_list.py
'''
Write a function to delete a node (except the tail) in a singly
linked list, given only access to that node.
Given linked list -- head = [4,5,1,9], which looks like following:
Example 1:
Input: head = [4,5,1,9], node = 5
Output: [4,1,9]
Explanation: You are given the second node with value 5, the linked
list should become 4 -> 1 -> 9 after calling your function.
Example 2:
Input: head = [4,5,1,9], node = 1
Output: [4,5,9]
Explanation: You are given the third node with value 1, the linked
list should become 4 -> 5 -> 9 after calling your function.
Note:
* The linked list will have at least two elements.
* All of the nodes' values will be unique.
* The given node will not be the tail and it will always be a valid node of the linked list.
* Do not return anything from your function.
'''
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def __str__(self):
output = ""
curr = self
while curr != None:
output = output + str(curr.val) + "-> "
curr = curr.next
output += "NULL"
return output
class Solution(object):
def deleteNode(self, node):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead.
"""
node.val = node.next.val
node.next = node.next.next
sol = Solution()
x_1, x_2, x_3, x_4 = ListNode(1), ListNode(2), ListNode(3), ListNode(4)
x_1.next = x_2
x_2.next = x_3
x_3.next = x_4
# print(x_1)
sol.deleteNode(x_2)
print(x_1) | 4.0625 | 4 |
app/main.py | cds-snc/sre-bot | 0 | 12763927 | import os
import logging
from slack_bolt.adapter.socket_mode import SocketModeHandler
from slack_bolt import App
from dotenv import load_dotenv
from commands import incident, sre
from commands.helpers import incident_helper, webhook_helper
from server import bot_middleware, server
from jobs import scheduled_tasks
server_app = server.handler
logging.basicConfig(level=logging.INFO)
load_dotenv()
def main():
SLACK_TOKEN = os.environ.get("SLACK_TOKEN")
APP_TOKEN = os.environ.get("APP_TOKEN")
PREFIX = os.environ.get("PREFIX", "")
bot = App(token=SLACK_TOKEN)
# Add bot to server_app
server_app.add_middleware(bot_middleware.BotMiddleware, bot=bot)
# Register incident events
bot.command(f"/{PREFIX}incident")(incident.open_modal)
bot.view("incident_view")(incident.submit)
bot.action("handle_incident_action_buttons")(
incident.handle_incident_action_buttons
)
# Incident events
bot.action("add_folder_metadata")(incident_helper.add_folder_metadata)
bot.action("view_folder_metadata")(incident_helper.view_folder_metadata)
bot.view("view_folder_metadata_modal")(incident_helper.list_folders)
bot.view("add_metadata_view")(incident_helper.save_metadata)
bot.action("delete_folder_metadata")(incident_helper.delete_folder_metadata)
bot.action("archive_channel")(incident_helper.archive_channel_action)
bot.view("view_save_incident_roles")(incident_helper.save_incident_roles)
# Register SRE events
bot.command(f"/{PREFIX}sre")(sre.sre_command)
# Webhooks events
bot.view("create_webhooks_view")(webhook_helper.create_webhook)
bot.action("toggle_webhook")(webhook_helper.toggle_webhook)
bot.action("reveal_webhook")(webhook_helper.reveal_webhook)
SocketModeHandler(bot, APP_TOKEN).connect()
# Run scheduled tasks if not in dev
if PREFIX == "":
scheduled_tasks.init(bot)
stop_run_continuously = scheduled_tasks.run_continuously()
server_app.add_event_handler("shutdown", lambda: stop_run_continuously.set())
server_app.add_event_handler("startup", main)
| 1.945313 | 2 |
plot.py | thomasg3/energy-price-aware-scheduling | 1 | 12763928 | <gh_stars>1-10
#!/usr/bin/env python
# make a visualisation of the results, filtered and with the optimal added to the plot or not.
import os
import argparse
import cPickle as pickle
import matplotlib.pyplot as plt
def main(filter, optimal_too):
run_location = os.path.join("results", "runs")
image_location = os.path.join("results", "img")
if not os.path.isdir(image_location):
os.makedirs(image_location)
files = [f for f in os.listdir(run_location) if f.split('.')[-1] == 'p' and filter in f]
for index, file_name in enumerate(files):
with open(os.path.join(run_location, file_name), 'r') as stream:
result = pickle.load(stream)
time_slots = range(len(result.prediction.actual_values))
time_slot_names = ["{:02d}:{:02d}".format(t / 2, (t % 2) * 30) for t in time_slots]
time_slot_names = [t if i % 4 == 0 else "" for i, t in enumerate(time_slot_names)]
power_usage = result.forecasted_schedule.power_usage()
if optimal_too:
optimal_usage = result.optimal_schedule.power_usage()
fig, ax1 = plt.subplots()
ax1.set_title(file_name.split('.')[0])
ax1.plot(time_slots, result.prediction.actual_values, 'g', label="actual")
ax1.plot(time_slots, result.prediction.prediction_values, 'r', label="prediction")
ax2 = ax1.twinx()
ax2.bar(time_slots, power_usage, alpha=0.2)
if optimal_too:
ax2.bar(time_slots, optimal_usage, color="g", alpha=0.2)
image_name = file_name.split('.')[0]+'.png'
image_full_name = os.path.join(image_location, image_name)
if not os.path.isfile(image_full_name):
open(image_full_name, 'a').close()
plt.savefig(image_full_name)
plt.close()
print "{}/{} Saved {}".format(index+1, len(files), image_full_name)
if __name__ == '__main__':
parser = argparse.ArgumentParser("Plot experiment results")
parser.add_argument("-r", "--result", help="filter the results to plot", default="")
parser.add_argument("-o", help="plot optimal schedule in graph", action="store_true")
args = parser.parse_args()
main(args.result, args.o)
| 2.34375 | 2 |
A3/code/part4.py | ankurshaswat/COL780 | 1 | 12763929 | """
Move object from one visual marker to another
"""
import sys
import cv2
import numpy as np
import obj_loader
from utils import (calculate_dist_corners, get_camera_params,
get_matrix, load_ref_images, render, get_homographies_contour)
if __name__ == "__main__":
OBJ_PATH = sys.argv[1]
OBJ = obj_loader.OBJ(OBJ_PATH, swapyz=True)
REF_IMAGES, REF_DSC = load_ref_images()
VID_FEED = cv2.VideoCapture(-1)
CAM_MAT = get_camera_params()
REACHED_X, REACHED_Y = 0, 0
MATCH_DATA = [None, None]
CORNER_DATA = [None, None]
while True:
RET, FRAME = VID_FEED.read()
if not RET:
print("Unable to capture video")
sys.exit()
MATCH_DATA, CORNER_DATA = get_homographies_contour(FRAME, REF_IMAGES, MATCH_DATA, CORNER_DATA)
if cv2.waitKey(1) & 0xFF == ord("q"):
break
if MATCH_DATA[0] is not None and MATCH_DATA[1] is not None:
HOMOGRAPHY1 = MATCH_DATA[0]
HOMOGRAPHY2 = MATCH_DATA[1]
CORNER1 = CORNER_DATA[0]
CORNER2 = CORNER_DATA[1]
PROJ_MAT1, R, T = get_matrix(CAM_MAT, HOMOGRAPHY1)
DIST = calculate_dist_corners(CORNER1, CORNER2)
DIST_X = DIST[0]
DIST_Y = DIST[1]
STEP_X = DIST_X/40
STEP_Y = DIST_Y/40
if abs(REACHED_X) >= abs(DIST_X) or abs(REACHED_Y) >= abs(DIST_Y):
REACHED_X = 0
REACHED_Y = 0
else:
REACHED_X += STEP_X
REACHED_Y += STEP_Y
TRANS = np.array(
[[1, 0, REACHED_X], [0, 1, REACHED_Y], [0, 0, 1]])
PROJ_MAT1 = np.dot(TRANS, PROJ_MAT1)
FRAME = render(FRAME, OBJ, PROJ_MAT1, REF_IMAGES[0], False)
cv2.imshow("frame", FRAME)
else:
cv2.imshow("frame", FRAME)
VID_FEED.release()
cv2.destroyAllWindows()
| 2.71875 | 3 |
torchsolver/utils/counter.py | killf/torchsolver | 0 | 12763930 | <gh_stars>0
import torch
import numpy as np
__ALL__ = ["Counter"]
class Counter:
def __init__(self):
self.data = dict()
def append(self, **kwargs):
for key, value in kwargs.items():
self.set(key, value)
def set(self, key, value):
if key not in self.data:
self.data[key] = []
if isinstance(value, torch.Tensor):
value = value.cpu().numpy()
self.data[key].append(value)
def get(self, key):
if key not in self.data:
return 0
return np.mean(self.data[key])
def __getattr__(self, key):
return self.get(key)
| 2.46875 | 2 |
Python/Behavioral/Strategy.py | mildmelon/design-patterns-for-humans-python | 10 | 12763931 | class SortStrategy:
def sort(self, dataset):
pass
class BubbleSortStrategy(SortStrategy):
def sort(self, dataset):
print('Sorting using bubble sort')
return dataset
class QuickSortStrategy(SortStrategy):
def sort(self, dataset):
print('Sorting using quick sort')
return dataset
class Sorter:
_sorter = None
def __init__(self, sorter):
self._sorter = sorter
def sort(self, dataset):
return self._sorter.sort(dataset)
dataset = [1, 5, 4, 3, 2, 8]
sorter = Sorter(BubbleSortStrategy())
sorter.sort(dataset)
sorter = Sorter(QuickSortStrategy())
sorter.sort(dataset) | 3.15625 | 3 |
coding/python/day03/02_read_json_multiline.py | dkapanidis/my-tutorials | 0 | 12763932 | from pyspark.sql import SparkSession
spark = SparkSession.builder.getOrCreate()
df = spark.read.option("multiLine", True).json("people_multiline.json")
print(df.show()) | 2.953125 | 3 |
1216. Valid Palindrome III .py | LiaoJJ/LeetCode | 1 | 12763933 | '''
BF C(n,k)*n O(n!)
LeetCode 516
DP find the longest palindrome sequence O(n^2)
'''
class Solution:
def isValidPalindrome(self, s: str, k: int) -> bool:
n = len(str)
dp = [[0]*n for _ in range(n)]
for i in range(n-2, -1, -1):
dp[i][i] = 1
for j in range(i+1, n):
if s[i]==s[j]:
dp[i][j] = dp[i+1][j-1] + 2
else:
dp[i][j] = max(dp[i+1][j], dp[i][j-1])
return dp[0][n-1]+k >=len(s) | 3.46875 | 3 |
gen_model.py | munnellg/elltm | 3 | 12763934 | import sys
import logging
import gensim
from optparse import OptionParser
from docs import config
logging.basicConfig(format="%(asctime)s : %(levelname)s : %(message)s", level=logging.INFO)
# Build this program's option parser
def build_opt_parser():
usage = "usage: %prog [options] <filename> [, <filename>, ...]"
parser = OptionParser(usage=usage)
parser.add_option("-o", "--model-out", dest="model_out",
default=config.default_model_out, metavar="FILE",
help="Output file for the model generated from corpus"
)
parser.add_option("-m", "--model-type", dest="model_type",
default=config.default_model_type,
help="The type of model the program should produce. Valid inputs are lsa, lsi, lda"
)
parser.add_option("-t", "--num-topics", dest="num_topics",
default=config.default_num_topics, type="int",
help="The number of topics under which the program should attempt to group the corpus"
)
return parser
# Parse commandline arguments using OptionParser given
def parse_arguments(parser):
(options, args) = parser.parse_args()
options.model_type = options.model_type.lower()
if options.model_type not in (config.code_lsa, config.code_lsi, config.code_lda):
logging.warning( "Invalid model type \"{}\". Reverting to default \"{}\"".format(options.model_type, config.default_model_type) )
options.model_type = config.default_model_type
if len(args) < 2:
parser.print_help()
exit()
return options, args
# Generates and LDA model using dictionary and vectors passed as arguments. Writes model to file
def gen_lda_model(corpus, dictionary, model_out, num_topics):
lda = gensim.models.ldamodel.LdaModel(
corpus=corpus,
id2word=dictionary,
num_topics=num_topics,
**config.lda_settings
)
lda.save(model_out)
return lda
# Generates and LSI model using dictionary and vectors passed as arguments. Writes model to file
def gen_lsi_model(corpus, dictionary, model_out, num_topics):
lsi = gensim.models.lsimodel.LsiModel(
corpus=corpus,
id2word=dictionary,
num_topics=num_topics,
**config.lsi_settings
)
lsi.save(model_out)
return lsi
# Generates the type of model selected using the information passed as arguments
def gen_model(model_type, corpus, dictionary, model_out, num_topics):
if model_type == config.code_lsi or model_type == config.code_lsa:
return gen_lsi_model(corpus, dictionary, model_out, num_topics)
elif model_type == config.code_lda:
return gen_lda_model(corpus, dictionary, model_out, num_topics)
# Main function
def main():
parser = build_opt_parser()
(options, args) = parse_arguments(parser)
dictionary = gensim.corpora.Dictionary.load(args[0])
corpus = gensim.corpora.MmCorpus(args[1])
model = gen_model(options.model_type, corpus, dictionary, options.model_out, options.num_topics)
if __name__ == "__main__":
main()
| 2.734375 | 3 |
neodroidvision/entry_points/cli.py | sintefneodroid/vision | 0 | 12763935 | <reponame>sintefneodroid/vision<filename>neodroidvision/entry_points/cli.py<gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "<NAME>"
__doc__ = r"""
Created on 09/02/2020
"""
import fire
from neodroidvision import get_version
from pyfiglet import Figlet
import draugr
sponsors = "SINTEF Ocean, Alexandra Institute, <NAME>"
margin_percentage = 0 / 6
terminal_width = draugr.get_terminal_size().columns
margin = int(margin_percentage * terminal_width)
width = terminal_width - 2 * margin
underline = "_" * width
indent = " " * margin
class NeodroidVisionCLI(object):
def run(self) -> None:
pass
@staticmethod
def version() -> None:
"""
Prints the version of this Neodroid installation.
"""
draw_cli_header()
print(f"Version: {get_version()}")
@staticmethod
def sponsors() -> None:
print(sponsors)
def draw_cli_header(*, title="Neodroid Vision", font="big"):
figlet = Figlet(font=font, justify="center", width=terminal_width)
description = figlet.renderText(title)
print(f"{description}{underline}\n")
def main(*, always_draw_header=False):
if always_draw_header:
draw_cli_header()
fire.Fire(NeodroidVisionCLI, name="neodroid-vision")
if __name__ == "__main__":
main()
| 2.203125 | 2 |
blog/context_processor/context.py | misterpingvinist/skeletpingvina.xyz | 0 | 12763936 | from ..models import About
from blog.forms import ContactForm
from blog.models import Category
def about(request):
about = About.objects.all()[0]
category = Category.objects.all()
form = ContactForm()
return {
'about_title': about.title,
'about_text': about.text,
'about_image': about.image.url,
'category': category,
'form_request':form
}
| 2.0625 | 2 |
delivery/models.py | choudhary2001/ecommerce | 2 | 12763937 | <reponame>choudhary2001/ecommerce<gh_stars>1-10
from django.db import models
# Create your models here.
from django.contrib.auth.models import User
from django.utils.html import mark_safe
from django.db import models
from PIL import Image
# Create your models here.
class DeliveryBoy(models.Model):
STATE_CHOICES = (
("Andaman & Nicobar Islands",'Andaman & Nicobar Islands'),
("Andhra Pradesh",'Andhra Pradesh'),
("Arunachal Pradesh",'Arunachal Pradesh'),
("Assam",'Assam'),
("Bihar",'Bihar'),
("Chandigarh",'Chandigarh'),
("Chhattisgarh",'Chhattisgarh'),
("Dadra & Nagar Haveli",'Dadra & Nagar Haveli'),
("Daman and Diu",'Daman and Diu'),
("Delhi",'Delhi'),
("Goa",'Goa'),
("Gujarat",'Gujarat'),
("Haryana",'Haryana'),
("Himachal Pradesh",'Himachal Pradesh'),
("Jammu & Kashmir",'Jammu & Kashmir'),
("Jharkhand",'Jharkhand'),
("Karnataka",'Karnataka'),
("Kerala",'Kerala'),
("Lakshadweep",'Lakshadweep'),
("Madhya Pradesh",'Madhya Pradesh'),
("Maharashtra",'Maharashtra'),
("Manipur",'Manipur'),
("Meghalaya",'Meghalaya'),
("Mizoram",'Mizoram'),
("Nagaland",'Nagaland'),
("Odisha",'Odisha'),
("Puducherry",'Puducherry'),
("Punjab",'Punjab'),
("Rajasthan",'Rajasthan'),
("Sikkim",'Sikkim'),
("Tamil Nadu",'Tamil Nadu'),
("Telangana",'Telangana'),
("Tripura",'Tripura'),
("Uttarakhand",'Uttarakhand'),
("Uttar Pradesh",'Uttar Pradesh'),
("West Bengal",'West Bengal'),
)
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
username = models.CharField(max_length=300, blank=True)
user = models.OneToOneField(User, related_name='deliveryboy' ,on_delete=models.CASCADE,primary_key=True)
email = models.EmailField()
photo = models.ImageField(default='user.png',upload_to='user_photos/')
mobile = models.CharField(max_length=12,null=True)
adhar_Number = models.CharField(max_length=15,null=True)
adhar_photo = models.ImageField(upload_to='aadhar/')
alternate_mobile = models.CharField(max_length=10,null=True,blank=True)
address = models.TextField()
pincode = models.CharField(max_length=6, null=True)
landmark = models.CharField(max_length=500, null=True, blank=True)
locality = models.CharField(max_length=100, null=True, blank=True)
city = models.CharField(max_length=100, null=True, blank=True)
state = models.CharField(max_length=50,choices=STATE_CHOICES, null=True)
account_Holder_Name = models.CharField(max_length=50, null=True)
account_Number = models.CharField(max_length=20, null=True)
ifsc_Code = models.CharField(max_length=11, null=True)
def image_tag(self):
return mark_safe('<img src="%s" width="50" height="50" />' % (self.photo.url))
class Meta:
ordering = ['first_name']
def __str__(self):
return self.adhar_Number
| 2.28125 | 2 |
meutils/aizoo/examples/pooling.py | Jie-Yuan/MeUtils | 3 | 12763938 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Project : inn.
# @File : pooling
# @Time : 2020/5/20 6:23 下午
# @Author : yuanjie
# @Email : <EMAIL>
# @Software : PyCharm
# @Description :
import tensorflow as tf
# 类似np
tf.keras.layers.GlobalMaxPooling1D
tf.keras.layers.GlobalAveragePooling1D
# 步长池化
tf.keras.layers.MaxPooling1D
tf.keras.layers.AveragePooling1D
| 1.90625 | 2 |
yanghui/116connect.py | technologyMz/ALL_IN_ML | 3 | 12763939 | """
# Definition for a Node.
"""
import collections
class Node:
def __init__(self, val: int = 0, left: 'Node' = None, right: 'Node' = None, next: 'Node' = None):
self.val = val
self.left = left
self.right = right
self.next = next
class Solution:
def connect_DFX(self, root) :
if not root:
return root
doing_due = collections.deque([root])
todo_due = collections.deque([root])
while todo_due:
todo_due = collections.deque()
while doing_due:
the_point = doing_due.popleft()
if doing_due:
the_point.next = doing_due[0]
else:
the_point.next = None
if the_point.left or the_point.right:
todo_due.append(the_point.left)
todo_due.append(the_point.right)
doing_due = todo_due
# # 用了递归,和list,但是不知道怎么改变root本身的值
# cur_list = [root]
# def make_next(cl):
# nl = []
# for index, one_point in enumerate(cl):
# if index == len(cl) -1 :
# one_point.next = None
# else:
# one_point.next = cl[index + 1]
#
# if one_point.left or one_point.right:
# nl.append(one_point.left)
# nl.append(one_point.right)
# return nl
#
# while cur_list:
# cur_list = make_next(cur_list)
#
# return root
#
# doing_due = collections.deque()
# todo_due = collections.deque([root])
#
# while todo_due:
# last_point = todo_due.pop()
# last_point.next = None
# while the_due:
# right_point = the_due.pop()
# right_point.next = last_point
# last_point = right_point
# while open_due:
# thepoint = open_due.popleft()
# due.append(thepoint)
# if thepoint.left or thepoint.right:
# open_due.append(thepoint.left)
# open_due.append(thepoint.right)
#
# while due:
# thepoint = due.popleft()
print(due)
# 构造二叉树t, BOTTOM-UP METHOD
# 1
# / \
# 2 3
# / \ / \
# 4 5 6 7
# / \ / \ / \ / \
# 8 9 10 11 12 13 14 15
right_tree3 = Node(7)
right_tree3.left = Node(14)
right_tree3.right = Node(15)
right_tree2 = Node(6)
right_tree2.left = Node(12)
right_tree2.right = Node(13)
right_tree1 = Node(3)
right_tree1.left = right_tree2
right_tree1.right = right_tree3
left_tree3 = Node(5)
left_tree3.left = Node(10)
left_tree3.right = Node(11)
left_tree2 = Node(4)
left_tree2.left = Node(8)
left_tree2.right = Node(9)
left_tree1 = Node(2)
left_tree1.left = left_tree2
left_tree1.right = left_tree3
tree1 = Node(1)
tree1.left = left_tree1
tree1.right = right_tree1
solution = Solution()
merged_tree = solution.connect_DFX(tree1)
print(merged_tree)
| 3.921875 | 4 |
test.py | lhq1/legal-predicetion | 87 | 12763940 | import argparse
import os
import torch
from net.model import get_model
from net.file_reader import init_dataset, init_transformer
from net.work import test_file
from net.parser import ConfigParser
from net.loader import init
from net.utils import init_thulac
parser = argparse.ArgumentParser()
parser.add_argument('--model', '-m')
parser.add_argument('--gpu', '-g')
parser.add_argument('--config', '-c')
args = parser.parse_args()
configFilePath = args.config
if configFilePath is None:
print("python *.py\t--config/-c\tconfigfile")
usegpu = True
# if args.use is None:
# print("python *.py\t--use/-u\tcpu/gpu")
if args.gpu is None:
usegpu = False
else:
usegpu = True
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
config = ConfigParser(configFilePath)
config.config.set("train", "train_num_process", 0)
def self_init():
init(config)
init_transformer(config)
init_thulac(config)
self_init()
train_dataset, test_dataset = init_dataset(config)
print("Building net...")
model_name = config.get("net", "name")
net = get_model(model_name, config, usegpu)
print("Net building done.")
print("Loading model...")
net.load_state_dict(torch.load(args.model))
if torch.cuda.is_available() and usegpu:
net = net.cuda()
print("Model loaded.")
print("Testing model...")
test_file(net, test_dataset, usegpu, config, 0)
print("Test done.")
for x in train_dataset.read_process:
x.terminate()
print(x, x.is_alive())
x.join()
print(x, x.is_alive())
for x in test_dataset.read_process:
x.terminate()
print(x, x.is_alive())
x.join()
print(x, x.is_alive())
| 2.28125 | 2 |
ibis/pandas/core.py | emilyreff7/ibis | 5 | 12763941 | """The pandas backend is a departure from the typical ibis backend in that it
doesn't compile to anything, and the execution of the ibis expression
is under the purview of ibis itself rather than executing SQL against a server.
Design
------
The pandas backend uses a technique called `multiple dispatch
<https://en.wikipedia.org/wiki/Multiple_dispatch>`_, implemented in a
third-party open source library called `multipledispatch
<https://github.com/mrocklin/multipledispatch>`_.
Multiple dispatch is a generalization of standard single-dispatch runtime
polymorphism to multiple arguments.
Compilation
-----------
This is a no-op because we execute ibis expressions directly.
Execution
---------
Execution is divided into different dispatched functions, each arising from
a different use case.
A top level function `execute` exists to provide the API for executing an ibis
expression against in-memory data.
The general flow of execution is:
::
If the current operation is in scope:
return it
Else:
execute the arguments of the current node
execute the current node with its executed arguments
Specifically, execute is comprised of a series of steps that happen at
different times during the loop.
1. ``pre_execute``
------------------
First, at the beginning of the main execution loop, ``pre_execute`` is called.
This function serves a similar purpose to ``data_preload``, the key difference
being that ``pre_execute`` is called *every time* there's a call to execute.
By default this function does nothing.
2. ``execute_node``
-------------------
Second, when an expression is ready to be evaluated we call
:func:`~ibis.pandas.core.execute` on the expressions arguments and then
:func:`~ibis.pandas.dispatch.execute_node` on the expression with its
now-materialized arguments.
3. ``post_execute``
-------------------
The final step--``post_execute``--is called immediately after the previous call
to ``execute_node`` and takes the instance of the
:class:`~ibis.expr.operations.Node` just computed and the result of the
computation.
The purpose of this function is to allow additional computation to happen in
the context of the current level of the execution loop. You might be wondering
That may sound vague, so let's look at an example.
Let's say you want to take a three day rolling average, and you want to include
3 days of data prior to the first date of the input. You don't want to see that
data in the result for a few reasons, one of which is that it would break the
contract of window functions: given N rows of input there are N rows of output.
Defining a ``post_execute`` rule for :class:`~ibis.expr.operations.WindowOp`
allows you to encode such logic. One might want to implement this using
:class:`~ibis.expr.operations.ScalarParameter`, in which case the ``scope``
passed to ``post_execute`` would be the bound values passed in at the time the
``execute`` method was called.
"""
import collections
import datetime
import functools
import numbers
from typing import Any, Mapping, Optional, Sequence
import numpy as np
import pandas as pd
import toolz
import ibis
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
import ibis.expr.types as ir
import ibis.expr.window as win
import ibis.pandas.aggcontext as agg_ctx
import ibis.util
from ibis.client import find_backends
from ibis.pandas.dispatch import (
execute,
execute_first,
execute_literal,
execute_node,
post_execute,
pre_execute,
)
integer_types = np.integer, int
floating_types = (numbers.Real,)
numeric_types = integer_types + floating_types
boolean_types = bool, np.bool_
fixed_width_types = numeric_types + boolean_types
date_types = (datetime.date,)
time_types = (datetime.time,)
timestamp_types = pd.Timestamp, datetime.datetime, np.datetime64
timedelta_types = pd.Timedelta, datetime.timedelta, np.timedelta64
temporal_types = date_types + time_types + timestamp_types + timedelta_types
scalar_types = fixed_width_types + temporal_types
simple_types = scalar_types + (str, type(None))
@functools.singledispatch
def is_computable_input(arg):
"""All inputs are not computable without a specific override."""
return False
@is_computable_input.register(ibis.client.Client)
@is_computable_input.register(ir.Expr)
@is_computable_input.register(dt.DataType)
@is_computable_input.register(type(None))
@is_computable_input.register(win.Window)
@is_computable_input.register(tuple)
def is_computable_input_arg(arg):
"""Return whether `arg` is a valid computable argument."""
return True
# Register is_computable_input for each scalar type (int, float, date, etc).
# We use consume here to avoid leaking the iteration variable into the module.
ibis.util.consume(
is_computable_input.register(t)(is_computable_input_arg)
for t in scalar_types
)
def get_node(obj):
"""Attempt to get the underlying :class:`Node` instance from `obj`."""
try:
return obj.op()
except AttributeError:
return obj
def dependencies(expr: ir.Expr):
"""Compute the dependencies of an expression.
Parameters
----------
expr
An ibis expression
Returns
-------
dict
Mapping from hashable objects to ibis expression inputs.
See Also
--------
is_computable_input
dependents
"""
stack = [expr]
dependencies = collections.defaultdict(list)
while stack:
expr = stack.pop()
node = get_node(expr)
if isinstance(node, collections.abc.Hashable):
if not isinstance(node, ops.Node):
dependencies[node] = []
if node not in dependencies:
computable_inputs = [
arg for arg in node.inputs if is_computable_input(arg)
]
stack.extend(computable_inputs)
dependencies[node].extend(computable_inputs)
return dict(dependencies)
def dependents(dependencies):
"""Get dependents from dependencies.
Parameters
----------
dependencies
A mapping from hashable objects to ibis expression inputs.
Returns
-------
dict
A mapping from hashable objects to expressions that depend on the keys.
See Also
--------
dependencies
"""
dependents = collections.defaultdict(list)
for node in dependencies.keys():
dependents[node] = []
for node, deps in dependencies.items():
for dep in deps:
dependents[get_node(dep)].append(node.to_expr())
return dict(dependents)
def toposort(expr: ir.Expr):
"""Topologically sort the nodes that underly `expr`.
Parameters
----------
expr
An ibis expression.
Returns
-------
Tuple
A tuple whose first element is the topologically sorted values required
to compute `expr` and whose second element is the dependencies of
`expr`.
"""
# compute dependencies and dependents
parents = dependencies(expr)
children = dependents(parents)
# count the number of dependencies each node has
indegree = toolz.valmap(len, parents)
# queue up the nodes with no dependencies
queue = collections.deque(
node for node, count in indegree.items() if not count
)
toposorted = []
while queue:
node = queue.popleft()
# invariant: every element of the queue has indegree 0, i.e., no
# dependencies
assert not indegree[node]
toposorted.append(node)
# remove the node -> child edge for every child of node
for child in map(get_node, children[node]):
indegree[child] -= 1
# if we removed the last edge, enqueue the child
if not indegree[child]:
queue.append(child)
return toposorted, parents
@execute.register(ir.Expr)
def main_execute(
expr: ir.Expr,
scope: Optional[Mapping] = None,
aggcontext: Optional[agg_ctx.AggregationContext] = None,
clients: Sequence[ibis.client.Client] = (),
params: Optional[Mapping] = None,
**kwargs: Any
):
"""Execute an ibis expression against the pandas backend.
Parameters
----------
expr
scope
aggcontext
clients
params
"""
toposorted, dependencies = toposort(expr)
params = toolz.keymap(get_node, params if params is not None else {})
# Add to scope the objects that have no dependencies and are not ibis
# nodes. We have to filter out nodes for cases--such as zero argument
# UDFs--that do not have any dependencies yet still need to be evaluated.
full_scope = toolz.merge(
scope if scope is not None else {},
{
key: key
for key, parents in dependencies.items()
if not parents and not isinstance(key, ops.Node)
},
params,
)
if not clients:
clients = list(find_backends(expr))
if aggcontext is None:
aggcontext = agg_ctx.Summarize()
# give backends a chance to inject scope if needed
execute_first_scope = execute_first(
expr.op(), *clients, scope=full_scope, aggcontext=aggcontext, **kwargs
)
full_scope = toolz.merge(full_scope, execute_first_scope)
nodes = [node for node in toposorted if node not in full_scope]
# compute the nodes that are not currently in scope
for node in nodes:
# allow clients to pre compute nodes as they like
pre_executed_scope = pre_execute(
node, *clients, scope=full_scope, aggcontext=aggcontext, **kwargs
)
# merge the existing scope with whatever was returned from pre_execute
execute_scope = toolz.merge(full_scope, pre_executed_scope)
# if after pre_execute our node is in scope, then there's nothing to do
# in this iteration
if node in execute_scope:
full_scope = execute_scope
else:
# If we're evaluating a literal then we can be a bit quicker about
# evaluating the dispatch graph
if isinstance(node, ops.Literal):
executor = execute_literal
else:
executor = execute_node
# Gather the inputs we've already computed that the current node
# depends on
execute_args = [
full_scope[get_node(arg)] for arg in dependencies[node]
]
# execute the node with its inputs
execute_node_result = executor(
node,
*execute_args,
aggcontext=aggcontext,
scope=execute_scope,
clients=clients,
**kwargs,
)
# last change to perform any additional computation on the result
# before it gets added to scope for the next node
full_scope[node] = post_execute(
node,
execute_node_result,
clients=clients,
aggcontext=aggcontext,
scope=full_scope,
)
# the last node in the toposorted graph is the root and maps to the desired
# result in scope
last_node = toposorted[-1]
result = full_scope[last_node]
return result
| 3.21875 | 3 |
tests/unit/test_reports.py | flying-sheep/profimp | 56 | 12763942 | # Copyright 2015: <NAME>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import os
import mock
from profimp import reports
from tests.unit import test
class ReportsTestCase(test.TestCase):
@mock.patch("profimp.reports._normalize")
def test_to_json(self, mock_normalize):
results = mock.MagicMock()
results.to_dict.return_value = {"a": 1, "b": 20}
mock_normalize.return_value = {"c": 10}
self.assertEqual(json.dumps(mock_normalize.return_value, indent=2),
reports.to_json(results))
mock_normalize.assert_called_once_with(results.to_dict.return_value)
@mock.patch("profimp.reports.to_json")
@mock.patch("profimp.reports.open", create=True)
def test_html(self, mock_open, mock_to_json):
mock_open.side_effect = [
mock.mock_open(read_data="1_{{DATA}}_2").return_value
]
mock_to_json.return_value = "ABC"
template_path = os.path.join(
os.path.dirname(reports.__file__), "templates", "report.tpl")
results = mock.MagicMock()
self.assertEqual("1_ABC_2", reports.to_html(results))
mock_to_json.assert_called_once_with(results)
mock_open.assert_called_once_with(template_path)
def test_normalize(self):
results = {
"started_at": 1,
"finished_at": 10,
"children": [
{
"started_at": 2,
"finished_at": 3,
"children": [
{
"started_at": 2.5,
"finished_at": 2.6,
"children": []
}
]
},
{
"started_at": 3,
"finished_at": 4,
"children": []
},
]
}
expected_normalized_results = {
"started_at": 0.0,
"finished_at": 9000.0,
"children": [
{
"started_at": 1000.0,
"finished_at": 2000.0,
"children": [
{
"started_at": 1500.0,
"finished_at": 1600.0,
"children": []
}
]
},
{
"started_at": 2000.0,
"finished_at": 3000.0,
"children": []
},
]
}
self.assertEqual(expected_normalized_results,
reports._normalize(results))
| 2.421875 | 2 |
src/uncertainty.py | QianWanghhu/IES-FF | 0 | 12763943 | <gh_stars>0
#!/usr/bin/env python
import numpy as np
import pandas as pd
# Note: do save the results while running the original model.
from multiprocessing import Pool
import numpy as np
import os
import matplotlib.pyplot as plt
from functools import partial
import time
import copy
import pandas as pd
import pickle
from scipy import stats
# from scipy.optimize import root
from scipy.optimize import bisect
import matplotlib as mpl
from matplotlib import rc
import spotpy as sp
from funcs.read_data import variables_prep, file_settings
from funcs.modeling_funcs import vs_settings, \
modeling_settings, paralell_vs, obtain_initials, change_param_values
mpl.rcParams['font.size'] = 16
mpl.rcParams['lines.linewidth'] = 3
mpl.rcParams['text.usetex'] = False # use latex for all text handling
mpl.rcParams['savefig.bbox'] = 'tight'
mpl.rcParams['savefig.format'] = 'jpg' # gives best resolution plots
mpl.rcParams['axes.labelsize'] = 20
mpl.rcParams['axes.titlesize'] = 20
mpl.rcParams['xtick.labelsize'] = 20
mpl.rcParams['ytick.labelsize'] = 20
mpl.rcParams['legend.fontsize'] = 16
# print mpl.rcParams.keys()
mpl.rcParams['text.latex.preamble'] = \
r'\usepackage{siunitx}\usepackage{amsmath}\usepackage{amssymb}'
# Create the copy of models and veneer list
project_name = 'MW_BASE_RC10.rsproj'
veneer_name = 'vcmd45\\FlowMatters.Source.VeneerCmd.exe'
first_port=15000; num_copies = 1
_, things_to_record, _, _, _ = modeling_settings()
processes, ports = paralell_vs(first_port, num_copies, project_name, veneer_name)
vs_list = vs_settings(ports, things_to_record)
# obtain the initial values of parameters
initial_values = obtain_initials(vs_list[0])
def run_source_lsq(vars, vs_list=vs_list):
"""
Script used to run_source and return the output file.
The function is called by AdaptiveLejaPCE.
"""
from funcs.modeling_funcs import modeling_settings, generate_observation_ensemble
import spotpy as sp
print('Read Parameters')
parameters = pd.read_csv('../data/Parameters-PCE2.csv', index_col='Index')
# Use annual or monthly loads
def timeseries_sum(df, temp_scale = 'annual'):
"""
Obtain the sum of timeseries of different temporal scale.
temp_scale: str, default is 'Y', monthly using 'M'
"""
assert temp_scale in ['monthly', 'annual'], 'The temporal scale given is not supported.'
if temp_scale == 'monthly':
sum_126001A = df.resample('M').sum()
else:
month_126001A = df.resample('M').sum()
sum_126001A = pd.DataFrame(index = np.arange(df.index[0].year, df.index[-1].year),
columns=df.columns)
for i in range(sum_126001A.shape[0]):
sum_126001A.iloc[i, :] = month_126001A.iloc[i*12: (i+1)*12, :].sum()
return sum_126001A
# End timeseries_sum()
def viney_F(evaluation, simulation):
pb = sp.objectivefunctions.pbias(evaluation, simulation) / 100
nse = sp.objectivefunctions.nashsutcliffe(evaluation, simulation)
F = nse - 5 *( np.abs(np.log(1 + pb)))**2.5
return F
# Define functions for the objective functions
def cal_obj(x_obs, x_mod, obj_type = 'nse'):
obj_map = {'nse': sp.objectivefunctions.nashsutcliffe,
'rmse': sp.objectivefunctions.rmse,
'pbias': sp.objectivefunctions.pbias,
'viney': viney_F
}
obj = []
for k in range(x_mod.shape[1]):
obj.append(obj_map[obj_type](x_obs, x_mod[:, k].reshape(x_mod.shape[0], 1)))
# if obj[0] == 0: obj[0] = 1e-8
obj = np.array(obj)
if obj_type =='pbias':
obj = obj / 100
obj = obj.reshape(obj.shape[0], 1)
print(obj)
return obj
# End cal_obj
# rescale samples to the absolute range
vars_copy = copy.deepcopy(vars)
# vars_copy[0, :] = vars_copy[0, :] * 100
# vars_copy[1, :] = vars_copy[1, :] * 100
# import observation if the output.txt requires the use of obs.
date_range = pd.to_datetime(['2009/07/01', '2018/06/30'])
observed_din = pd.read_csv(f'{file_settings()[1]}126001A.csv', index_col='Date')
observed_din.index = pd.to_datetime(observed_din.index)
observed_din = observed_din.loc[date_range[0]:date_range[1], :].filter(items=[observed_din.columns[0]]).apply(lambda x: 1000 * x)
# loop over the vars and try to use parallel
parameter_df = pd.DataFrame(index=np.arange(vars.shape[1]), columns=parameters.Name_short)
for i in range(vars.shape[1]):
parameter_df.iloc[i] = vars[:, i]
# set the time period of the results
retrieve_time = [pd.Timestamp('2009-07-01'), pd.Timestamp('2018-06-30')]
# define the modeling period and the recording variables
_, _, criteria, start_date, end_date = modeling_settings()
initial_values = obtain_initials(vs_list[0])
din = generate_observation_ensemble(vs_list,
criteria, start_date, end_date, parameter_df, retrieve_time, initial_values)
# obtain the sum at a given temporal scale
din_126001A = timeseries_sum(din, temp_scale = 'annual')
obs_din = timeseries_sum(observed_din, temp_scale = 'annual')
din_126001A = pd.DataFrame(din_126001A,dtype='float').values
obs_din = pd.DataFrame(obs_din,dtype='float').values
obj = cal_obj(obs_din, din_126001A, obj_type = 'viney')
print(f'Finish {obj.shape[0]} run')
# calculate the objective NSE and PBIAS
obj_nse = cal_obj(obs_din, din_126001A, obj_type = 'nse')
obj_pbias = cal_obj(obs_din, din_126001A, obj_type = 'pbias')
train_iteration = np.append(vars, obj_nse.T, axis=0)
train_iteration = np.append(train_iteration, obj_pbias.T, axis=0)
train_iteration = np.append(train_iteration, obj.T, axis=0)
# save sampling results of NSE and PBIAS
train_file = 'outlier_samples.txt'
if os.path.exists(train_file):
train_samples = np.loadtxt(train_file)
train_samples = np.append(train_samples, train_iteration, axis=1)
np.savetxt(train_file, train_samples)
else:
np.savetxt(train_file, train_iteration)
# END if-else
return obj
# END run_source_lsq()
# Obtain samples satisfying the criteria
# Call the function to run DIN model with selected samples
fdir = '../output/gp_run_0816/sampling-sa/fix_mean_subreg/'
samples = np.loadtxt(f'{fdir}samples_fix_2.txt')
values = np.loadtxt(f'{fdir}values_fix_2.txt')
# Plot the results comparing GP and the original model outputs
if os.path.exists(f'{fdir}values_fix_2_filter.txt') and \
(os.path.exists(f'{fdir}outlier_samples.txt')):
y_outlier_gp = np.loadtxt(f'{fdir}values_fix_2_filter.txt')
y_outlier_true = np.loadtxt(f'{fdir}outlier_samples.txt')[-1, :]
else:
index_filter = np.where((values>0.382) & (values<1))[0]
samples_filter = samples[:, index_filter]
values_filter = values[index_filter]
np.savetxt(f'{fdir}samples_fix_2_filter.txt', samples_filter)
np.savetxt(f'{fdir}values_fix_2_filter.txt', values_filter)
# run the main model
obj_model = run_source_lsq(samples_filter, vs_list=vs_list)
np.savetxt('outlier_samples.txt', obj_model)
y_residuals = y_outlier_true.flatten() - y_outlier_gp[0:100].flatten()
ax = plt.scatter(y_outlier_true, y_residuals)
# plt.plot(np.arange(0, 10, 1)/10, np.arange(0, 10, 1)/10, linestyle='--')
plt.xlabel('Model output')
plt.ylabel('GP output')
| 1.851563 | 2 |
scripts/separateComponents.py | tatumdmortimer/t7ss | 0 | 12763944 | <gh_stars>0
#!/usr/bin/env python
import argparse
from collections import defaultdict
from Bio import SeqIO
# This gets conserved gene names from orthomcl output, checks that those genes
# are on the same component from plasmidSPAdes assembly, and outputs a file
# listing gene names.
def get_args():
"""Parse command line arguments"""
parser = argparse.ArgumentParser(description='Check plasmid ESX')
parser.add_argument("group", help="OrthoMCL groups file")
parser.add_argument("plasmid", help="File containing plasmid info")
return parser.parse_args()
def get_orthomcl_genes(ortho_file):
"""Get gene names for conserved ESX genes from OrthoMCL output"""
eccA = ['core1161']
eccB = ['core1077']
eccC = ['core1132']
eccD = ['core1266', 'core2180']
eccE = ['core1198']
mycP = ['core1159']
groupDict = {}
with open(ortho_file, 'r') as infile:
for line in infile:
line = line.strip().split(':')
groupDict[line[0]] = line[1].split()[1:]
geneDict = defaultdict(lambda: ['-', '-', '-', '-', '-', '-'])
for g in groupDict:
genes = groupDict[g]
for gene in genes:
strain = gene.split('|')[1][:-6]
if g in eccA:
geneDict[strain][0] = gene.split('|')[1]
elif g in eccB:
geneDict[strain][1] = gene.split('|')[1]
elif g in eccC:
geneDict[strain][2] = gene.split('|')[1]
elif g in eccD:
geneDict[strain][3] = gene.split('|')[1]
elif g in eccE:
geneDict[strain][4] = gene.split('|')[1]
elif g in mycP:
geneDict[strain][5] = gene.split('|')[1]
return geneDict
def get_component(geneDict, plasmidFile):
"""Check that all genes are on the same component and output new annotation
files for that component only"""
componentDict = {}
with open(plasmidFile, 'r') as infile:
for line in infile:
line = line.strip().split()
strain = line[0]
esx_genes = geneDict[strain]
components = set()
gff = 'newAnnotations/{0}_plasmid/{0}_plasmid.gff'.format(strain)
with open(gff, 'r') as gff_file:
for line in gff_file:
if line[0] != '#':
if line[0] == '>':
break
line = line.strip().split()
component = line[0][-2]
gene = line[8].split(';')[0][3:]
if gene in esx_genes:
components.add(component)
componentDict[strain] = components
return componentDict
def create_fastas(componentDict, plasmidFile):
"""Create new nucleotide and amino acid fasta files that are separate
for each component from the assembly with ESX genes"""
with open(plasmidFile) as infile:
for line in infile:
line = line.strip().split()
strain = line[0]
aa = 'newAnnotations/{0}_plasmid/{0}_plasmid.faa'.format(strain)
nuc = 'newAnnotations/{0}_plasmid/{0}_plasmid.ffn'.format(strain)
gff = 'newAnnotations/{0}_plasmid/{0}_plasmid.gff'.format(strain)
aa_dict = SeqIO.to_dict(SeqIO.parse(aa, "fasta"))
nuc_dict = SeqIO.to_dict(SeqIO.parse(nuc, "fasta"))
for c in componentDict[strain]:
new_aa = []
new_nuc = []
with open(gff, 'r') as gff_file:
for line in gff_file:
if line[0] != '#':
if line[0] == '>':
break
line = line.strip().split()
component = line[0][-2]
gene = line[8].split(';')[0][3:]
if component == c:
try:
new_aa.append(aa_dict[gene])
new_nuc.append(nuc_dict[gene])
except KeyError:
print("{0} does not exist".format(gene))
SeqIO.write(new_aa, "{0}_{1}.faa".format(strain, c), "fasta")
SeqIO.write(new_nuc, "{0}_{1}.ffn".format(strain, c), "fasta")
args = get_args()
geneDict = get_orthomcl_genes(args.group)
componentDict = get_component(geneDict, args.plasmid)
create_fastas(componentDict, args.plasmid)
| 3.125 | 3 |
ssd/src/ssd_model.py | chuanfuye/object_detection | 0 | 12763945 | from src.res50_backbone import resnet50
from torch import nn, Tensor
import torch
from torch.jit.annotations import Optional, List, Dict, Tuple, Module
from src.utils import dboxes300_coco, Encoder, PostProcess
class Backbone(nn.Module):
def __init__(self, pretrain_path=None):
super(Backbone, self).__init__()
net = resnet50()
self.out_channels = [1024, 512, 512, 256, 256, 256]
if pretrain_path is not None:
net.load_state_dict(torch.load(pretrain_path))
self.feature_extractor = nn.Sequential(*list(net.children())[:7])
conv4_block1 = self.feature_extractor[-1][0]
# 修改conv4_block1的步距,从2->1
conv4_block1.conv1.stride = (1, 1)
conv4_block1.conv2.stride = (1, 1)
conv4_block1.downsample[0].stride = (1, 1)
def forward(self, x):
x = self.feature_extractor(x)
return x
class SSD300(nn.Module):
def __init__(self, backbone=None, num_classes=21):
super(SSD300, self).__init__()
if backbone is None:
raise Exception("backbone is None")
if not hasattr(backbone, "out_channels"):
raise Exception("the backbone not has attribute: out_channel")
self.feature_extractor = backbone
self.num_classes = num_classes
# out_channels = [1024, 512, 512, 256, 256, 256] for resnet50
self._build_additional_features(self.feature_extractor.out_channels)
self.num_defaults = [4, 6, 6, 6, 4, 4]
location_extractors = []
confidence_extractors = []
# out_channels = [1024, 512, 512, 256, 256, 256] for resnet50
for nd, oc in zip(self.num_defaults, self.feature_extractor.out_channels):
# nd is number_default_boxes, oc is output_channel
location_extractors.append(nn.Conv2d(oc, nd * 4, kernel_size=3, padding=1))
confidence_extractors.append(nn.Conv2d(oc, nd * self.num_classes, kernel_size=3, padding=1))
self.loc = nn.ModuleList(location_extractors)
self.conf = nn.ModuleList(confidence_extractors)
self._init_weights()
default_box = dboxes300_coco()
self.compute_loss = Loss(default_box)
self.encoder = Encoder(default_box)
self.postprocess = PostProcess(default_box)
def _build_additional_features(self, input_size):
"""
为backbone(resnet50)添加额外的一系列卷积层,得到相应的一系列特征提取器
:param input_size:
:return:
"""
additional_blocks = []
# input_size = [1024, 512, 512, 256, 256, 256] for resnet50
middle_channels = [256, 256, 128, 128, 128]
for i, (input_ch, output_ch, middle_ch) in enumerate(zip(input_size[:-1], input_size[1:], middle_channels)):
padding, stride = (1, 2) if i < 3 else (0, 1)
layer = nn.Sequential(
nn.Conv2d(input_ch, middle_ch, kernel_size=1, bias=False),
nn.BatchNorm2d(middle_ch),
nn.ReLU(inplace=True),
nn.Conv2d(middle_ch, output_ch, kernel_size=3, padding=padding, stride=stride, bias=False),
nn.BatchNorm2d(output_ch),
nn.ReLU(inplace=True),
)
additional_blocks.append(layer)
self.additional_blocks = nn.ModuleList(additional_blocks)
def _init_weights(self):
layers = [*self.additional_blocks, *self.loc, *self.conf]
for layer in layers:
for param in layer.parameters():
if param.dim() > 1:
nn.init.xavier_uniform_(param)
# Shape the classifier to the view of bboxes
def bbox_view(self, features, loc_extractor, conf_extractor):
locs = []
confs = []
for f, l, c in zip(features, loc_extractor, conf_extractor):
# [batch, n*4, feat_size, feat_size] -> [batch, 4, -1]
locs.append(l(f).view(f.size(0), 4, -1))
# [batch, n*classes, feat_size, feat_size] -> [batch, classes, -1]
confs.append(c(f).view(f.size(0), self.num_classes, -1))
locs, confs = torch.cat(locs, 2).contiguous(), torch.cat(confs, 2).contiguous()
return locs, confs
def forward(self, image, targets=None):
x = self.feature_extractor(image)
# Feature Map 38x38x1024, 19x19x512, 10x10x512, 5x5x256, 3x3x256, 1x1x256
detection_features = torch.jit.annotate(List[Tensor], []) # [x]
detection_features.append(x)
for layer in self.additional_blocks:
x = layer(x)
detection_features.append(x)
# Feature Map 38x38x4, 19x19x6, 10x10x6, 5x5x6, 3x3x4, 1x1x4
locs, confs = self.bbox_view(detection_features, self.loc, self.conf)
# For SSD 300, shall return nbatch x 8732 x {nlabels, nlocs} results
# 38x38x4 + 19x19x6 + 10x10x6 + 5x5x6 + 3x3x4 + 1x1x4 = 8732
if self.training:
if targets is None:
raise ValueError("In training mode, targets should be passed")
# bboxes_out (Tensor 8732 x 4), labels_out (Tensor 8732)
bboxes_out = targets['boxes']
bboxes_out = bboxes_out.transpose(1, 2).contiguous()
# print(bboxes_out.is_contiguous())
labels_out = targets['labels']
# print(labels_out.is_contiguous())
# ploc, plabel, gloc, glabel
loss = self.compute_loss(locs, confs, bboxes_out, labels_out)
return {"total_losses": loss}
# 将预测回归参数叠加到default box上得到最终预测box,并执行非极大值抑制虑除重叠框
# results = self.encoder.decode_batch(locs, confs)
results = self.postprocess(locs, confs)
return results
class Loss(nn.Module):
"""
Implements the loss as the sum of the followings:
1. Confidence Loss: All labels, with hard negative mining
2. Localization Loss: Only on positive labels
Suppose input dboxes has the shape 8732x4
"""
def __init__(self, dboxes):
super(Loss, self).__init__()
# Two factor are from following links
# http://jany.st/post/2017-11-05-single-shot-detector-ssd-from-scratch-in-tensorflow.html
self.scale_xy = 1.0 / dboxes.scale_xy # 10
self.scale_wh = 1.0 / dboxes.scale_wh # 5
self.location_loss = nn.SmoothL1Loss(reduction='none')
# [num_anchors, 4] -> [4, num_anchors] -> [1, 4, num_anchors]
self.dboxes = nn.Parameter(dboxes(order="xywh").transpose(0, 1).unsqueeze(dim=0),
requires_grad=False)
self.confidence_loss = nn.CrossEntropyLoss(reduction='none')
def _location_vec(self, loc):
# type: (Tensor)
"""
Generate Location Vectors
计算ground truth相对anchors的回归参数
:param loc: anchor匹配到的对应GTBOX Nx4x8732
:return:
"""
gxy = self.scale_xy * (loc[:, :2, :] - self.dboxes[:, :2, :]) / self.dboxes[:, 2:, :] # Nx2x8732
gwh = self.scale_wh * (loc[:, 2:, :] / self.dboxes[:, 2:, :]).log() # Nx2x8732
return torch.cat((gxy, gwh), dim=1).contiguous()
def forward(self, ploc, plabel, gloc, glabel):
# type: (Tensor, Tensor, Tensor, Tensor)
"""
ploc, plabel: Nx4x8732, Nxlabel_numx8732
predicted location and labels
gloc, glabel: Nx4x8732, Nx8732
ground truth location and labels
"""
# 获取正样本的mask Tensor: [N, 8732]
mask = glabel > 0
# mask1 = torch.nonzero(glabel)
# 计算一个batch中的每张图片的正样本个数 Tensor: [N]
pos_num = mask.sum(dim=1)
# 计算gt的location回归参数 Tensor: [N, 4, 8732]
vec_gd = self._location_vec(gloc)
# sum on four coordinates, and mask
# 计算定位损失(只有正样本)
loc_loss = self.location_loss(ploc, vec_gd).sum(dim=1) # Tensor: [N, 8732]
loc_loss = (mask.float() * loc_loss).sum(dim=1) # Tenosr: [N]
# hard negative mining Tenosr: [N, 8732]
con = self.confidence_loss(plabel, glabel)
# positive mask will never selected
# 获取负样本
con_neg = con.clone()
con_neg[mask] = torch.tensor(0.0)
# 按照confidence_loss降序排列 con_idx(Tensor: [N, 8732])
_, con_idx = con_neg.sort(dim=1, descending=True)
_, con_rank = con_idx.sort(dim=1) # 这个步骤比较巧妙
# number of negative three times positive
# 用于损失计算的负样本数是正样本的3倍(在原论文Hard negative mining部分),
# 但不能超过总样本数8732
neg_num = torch.clamp(3 * pos_num, max=mask.size(1)).unsqueeze(-1)
neg_mask = con_rank < neg_num # Tensor [N, 8732]
# confidence最终loss使用选取的正样本loss+选取的负样本loss
con_loss = (con * (mask.float() + neg_mask.float())).sum(dim=1) # Tensor [N]
# avoid no object detected
# 避免出现图像中没有GTBOX的情况
total_loss = loc_loss + con_loss
# eg. [15, 3, 5, 0] -> [1.0, 1.0, 1.0, 0.0]
num_mask = (pos_num > 0).float() # 统计一个batch中的每张图像中是否存在正样本
pos_num = pos_num.float().clamp(min=1e-6) # 防止出现分母为零的情况
ret = (total_loss * num_mask / pos_num).mean(dim=0) # 只计算存在正样本的图像损失
return ret
| 2.3125 | 2 |
foldershare.py | xHasKx/foldershare | 0 | 12763946 | <filename>foldershare.py
#!/usr/bin/env python3
import os
import sys
import argparse
import traceback
from subprocess import Popen, PIPE
from http.server import HTTPServer, BaseHTTPRequestHandler
# using 1 MB chunks when sending tar.gz stream
CHUNKSIZE = 1*1024*1024
class ShareHandler(BaseHTTPRequestHandler):
'''
Shared folder content HTTP handler
'''
def do_GET(self):
'''
HTTP GET method handler
'''
# default HTTP response attributes
streaming = False
code = 200
headers = [('Connection', 'close')]
body = None
try:
# processing request
if self.path == '/':
# show brief help page
host = self.server.server_address[0]
if host == '0.0.0.0':
host = 'localhost'
port = self.server.server_address[1]
headers.append(('Content-Type', 'text/html; charset=utf-8'))
body = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Shared Folder</title>
</head>
<body>
<h3>
Shared folder content is available as
<a href="/files.tar.gz">/files.tar.gz</a> file
</h3>
<p>Command line download & extract example:</p>
<pre>
curl http://{host}:{port}/files.tar.gz | tar xz</pre>
<p>or:</p>
<pre>
wget http://{host}:{port}/files.tar.gz -O - | tar xz</pre>
<p>
Source code:
<a href="https://github.com/xHasKx/foldershare" target="_blank">
github.com/xHasKx/foldershare
</href>
</p>
</body>
</html>
'''.strip().format(host=host, port=port).encode('utf-8')
elif self.path == "/files.tar.gz":
# send files as tar.gz archive
with Popen('tar cz *', stdout=PIPE, shell=True) as p:
# subprocess started, start sending HTTP response
self.send_response(200)
for hdr in headers:
self.send_header(hdr[0], hdr[1])
self.send_header('Content-Type', 'application/tar+gzip')
self.end_headers()
# start streaming mode to prevent actions in finally block
streaming = True
while True:
# read each stdout chunk
chunk = p.stdout.read(CHUNKSIZE)
if not chunk:
# end of file reached
break
# and write each chunk to http response
self.wfile.write(chunk)
else:
# all other requests
code = 404
except:
# handle any error - send HTTP 500 code
code = 500
headers.append(('Content-Type', 'text/plain'))
body = b'Internal Server Error'
# and show traceback on stdout
traceback.print_exc()
finally:
if not streaming:
# send generated HTTP answer if streaming mode is not started
self.send_response(code)
for hdr in headers:
self.send_header(hdr[0], hdr[1])
self.end_headers()
if body:
self.wfile.write(body)
def http_server(address, port):
'''
Start HTTP server on specified host and port
'''
# create HTTP server
server = HTTPServer((address, port), ShareHandler)
print('Starting HTTP server on {}:{} to share folder {}'.format(
address, port, os.getcwd()))
print('Press Ctrl+C (possible several times) to stop it')
# and start its loop
try:
server.serve_forever()
except KeyboardInterrupt:
print()
finally:
print('Stopped HTTP server')
def main():
'''
Entry Point
'''
# parse args
parser = argparse.ArgumentParser(
description='Share folder content over HTTP server as tar.gz file')
parser.add_argument('port', metavar='port', type=int,
default=8080, nargs='?',
help='Port to start HTTP server, default is 8080')
parser.add_argument('address', metavar='address', type=str,
default="", nargs='?',
help='Network address to start HTTP server, ' +
'default is all interfaces')
args = parser.parse_args()
# check args are valid
if args.port <= 0 or args.port > 65535:
raise Exception("Invalid port: not in valid range: " + str(args.port))
# start http server to share current folder
http_server(args.address, args.port)
if __name__ == '__main__':
main()
| 2.9375 | 3 |
where/cleaners/removers/__init__.py | ingridfausk/where | 16 | 12763947 | <gh_stars>10-100
"""Framework for removing observations
Description:
------------
Each remover should be defined in a separate .py-file. The function inside the .py-file that should be called needs to
be decorated with the :func:`~midgard.dev.plugins.register` decorator as follows::
from midgard.dev import plugins
@plugins.register
def ignore_station(dset):
...
"""
# Standard library imports
from typing import Any, Dict
# External library imports
import numpy as np
# Midgard imports
from midgard.dev import plugins
# Where imports
from where.lib import config
from where.lib import log
def apply_removers(config_key: str, dset: "Dataset") -> None:
"""Apply all removers for a given session
Args:
config_key: The configuration key listing which removers to apply.
dset: Dataset containing analysis data.
"""
prefix = dset.vars["pipeline"]
removers = config.tech[config_key].list
log.info(f"Applying removers")
keep_idxs = plugins.call_all(package_name=__name__, plugins=removers, prefix=prefix, dset=dset)
all_keep_idx = np.ones(dset.num_obs, dtype=bool)
for remover, remover_keep_idx in keep_idxs.items():
log.info(f"Removing {sum(np.logical_not(remover_keep_idx)):5d} observations based on {remover}")
all_keep_idx = np.logical_and(all_keep_idx, remover_keep_idx)
log.info(f"Keeping {sum(all_keep_idx)} of {dset.num_obs} observations")
dset.subset(all_keep_idx)
if dset.num_obs == 0:
log.fatal("No observations are available.")
def apply_remover(remover: str, dset: "Dataset", **kwargs: Dict[Any, Any]) -> None:
"""Apply defined remover for a given session
Args:
remover: The remover name.
dset: Dataset containing analysis data.
kwargs: Input arguments to the remover.
"""
log.info(f"Apply remover {remover!r}")
keep_idx = plugins.call(package_name=__name__, plugin_name=remover, dset=dset, **kwargs)
log.info(f"Keeping {sum(keep_idx)} of {dset.num_obs} observations")
dset.subset(keep_idx)
if dset.num_obs == 0:
log.fatal("No observations are available.")
| 2.390625 | 2 |
tests/runner/http/test_request.py | hf400159/apisix-python-plugin-runner | 0 | 12763948 | <reponame>hf400159/apisix-python-plugin-runner
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import socket
import logging
import apisix.runner.utils.common as runner_utils
from apisix.runner.server.logger import Logger as RunnerServerLogger
from apisix.runner.server.server import RPCRequest as RunnerRPCRequest
from apisix.runner.http.request import Request as RunnerHttpRequest
def default_request():
sock = socket.socket()
logger = RunnerServerLogger(logging.INFO)
return RunnerRPCRequest(sock, logger)
def test_request_unknown_handler():
builder = runner_utils.new_builder()
r = default_request()
req = RunnerHttpRequest(r)
ok = req.unknown_handler(builder)
assert ok
def test_request_config_handler():
builder = runner_utils.new_builder()
r = default_request()
req = RunnerHttpRequest(r)
req.set_conf_token(0)
ok = req.config_handler(builder)
assert not ok
req.set_conf_token(1)
ok = req.config_handler(builder)
assert ok
def test_request_call_handler():
builder = runner_utils.new_builder()
r = default_request()
req = RunnerHttpRequest(r)
req.set_uri("")
req.set_headers({})
req.set_args({})
ok = req.call_handler(builder)
assert not ok
req.set_header("X-Hello", "World")
req.set_id(1)
ok = req.call_handler(builder)
assert ok
req.set_uri("/hello")
req.set_id(1)
ok = req.call_handler(builder)
assert ok
def test_request_handler():
default_key = "hello"
default_val = "world"
default_empty_str = ""
default_empty_dict = {}
default_id = 1000
default_token = 1
default_uri = "/hello"
default_method = "GET"
default_ip = "127.0.0.1"
r = default_request()
req = RunnerHttpRequest(r)
assert not req.set_id(0)
assert req.set_id(default_id)
assert req.get_id() == default_id
assert not req.set_conf_token(0)
assert req.set_conf_token(default_token)
assert req.get_conf_token() == default_token
assert not req.set_method(default_key)
assert req.set_method(default_method)
assert req.get_method() == default_method
assert not req.set_uri(default_key)
assert req.set_uri(default_uri)
assert req.get_uri() == default_uri
assert not req.set_header(default_key, default_empty_str)
assert req.set_header(default_key, default_val)
assert req.get_header(default_key) == default_val
assert not req.set_headers(default_empty_dict)
assert req.set_headers({default_key: default_val})
assert req.get_headers() == {default_key: default_val}
assert not req.set_config(default_empty_str, default_empty_str)
assert req.set_config(default_key, default_empty_str)
assert req.set_config(default_key, default_val)
assert req.get_config(default_key) == default_val
assert not req.set_configs(default_empty_dict)
assert req.set_configs({default_key: default_val})
assert req.get_configs() == {default_key: default_val}
assert not req.set_arg(default_key, default_empty_str)
assert req.set_arg(default_key, default_val)
assert req.get_arg(default_key) == default_val
assert not req.set_args(default_empty_dict)
assert req.set_args({default_key: default_val})
assert req.get_args() == {default_key: default_val}
assert not req.set_remote_addr(default_empty_str)
assert req.set_remote_addr(default_ip)
assert req.get_remote_addr() == default_ip
assert not req.set_body(default_empty_str)
assert req.get_body() == default_empty_str
assert req.set_body(default_val)
assert req.get_body() == default_val
assert not req.set_var(default_key, default_empty_str)
assert req.get_var(default_key) == default_empty_str
assert req.set_var(default_key, default_val)
assert req.get_var(default_key) == default_val
| 1.9375 | 2 |
restaurants/views.py | MaryamKia/RestaurantsApp | 0 | 12763949 | from django.db.models import Q
import random
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.views import View
from django.views.generic import TemplateView, ListView, DetailView, CreateView, UpdateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from .forms import RestaurantCreateForm, RestaurantLocationCreateForm
from .models import RestaurantLocation
# Create your views here.
"""function based view"""
# def home_old(request):
# # response
# html_var = 'f strings'
# html_ = f"""<!doctype html>
# <html lang="en">
# <head>
# </head>
# <body>
# <h1>Hello World!</h1>
# <p>
# This is {html_var} coming through
# </p>
# </body>
# </html>"""
# # f strings
# return HttpResponse(html_)
#
#
# def home(request):
# num = None
# some_list = [
# random.randint(0, 100000000),
# random.randint(0, 100000000),
# random.randint(0, 100000000)
# ]
# condition_bool_item = False
# if condition_bool_item:
# num = random.randint(0, 100000000)
# context = {
# "num": num,
# "some_list": some_list
# }
# return render(request, "home.html", context)
#
#
# def about(request):
# context = {}
# return render(request, "about.html", context)
# def contact(request):
# context = {}
# return render(request, "contact.html", context)
"""class based view"""
# class ContactView(View):
# def get(self, request, arg, **kwargs):
# context = {}
# return render(request, "contact.html", context)
# class HomeView(TemplateView):
# template_name = 'home.html'
#
# def get_context_data(self, *args, **kwargs):
# context = super(HomeView, self).get_context_data(*args, **kwargs)
# num = None
# some_list = [
# random.randint(0, 100000000),
# random.randint(0, 100000000),
# random.randint(0, 100000000)
# ]
# condition_bool_item = True
# if condition_bool_item:
# num = random.randint(0, 100000000)
# context = {
# "num": num,
# "some_list": some_list
# }
# print(context)
# return context
#
#
# class AboutView(TemplateView):
# template_name = 'about.html'
#
#
# class ContactView(TemplateView):
# template_name = 'contact.html'
"""make a new view using model.py"""
# def restaurant_listview(request):
# template_name = 'restaurants/restaurantlocation_list.html'
# queryset = RestaurantLocation.objects.all()
# context = {
# 'object_list': queryset
# }
# return render(request, template_name, context)
class RestaurantListView(LoginRequiredMixin, ListView):
# def get_queryset(self):
# slug = self.kwargs.get('slug')
# if slug:
# queryset = RestaurantLocation.objects.filter(
# Q(category__iexact=slug) |
# Q(category__icontains=slug)
# )
# else:
# queryset = RestaurantLocation.objects.all()
# return queryset
# or in forward steps:
def get_queryset(self):
return RestaurantLocation.objects.filter(owner=self.request.user)
class RestaurantDetailView(LoginRequiredMixin, DetailView):
def get_queryset(self):
return RestaurantLocation.objects.filter(owner=self.request.user)
# def get_context_data(self, **kwargs):
# print(self.kwargs)
# context = super(RestaurantDetailView, self).get_context_data(**kwargs)
# print('get_context_data:', context)
# return context
# def get_object(self, **kwargs):
# rest_id = self.kwargs.get('rest_id')
# obj = get_object_or_404(RestaurantLocation, id=rest_id)
# return obj
#
@login_required(login_url='/login/')
def restaurant_createview(request):
form = RestaurantLocationCreateForm(request.POST or None)
errors = None
if form.is_valid():
if request.user.is_authenticated:
instance = form.save(commit=False)
instance.owner = request.user
instance.save()
# """use Model Forms instead of Forms. below obj belongs to Forms"""
# obj = RestaurantLocation.objects.create(
# name=form.cleaned_data.get('name'),
# location=form.cleaned_data.get('location'),
# category=form.cleaned_data.get('category'),
# )
return HttpResponseRedirect('/restaurants/')
else:
return HttpResponseRedirect('/login/')
if form.errors:
errors = form.errors
template_name = 'restaurants/form.html'
context = {'form': form, 'errors': errors}
return render(request, template_name, context)
class RestaurantCreateView(LoginRequiredMixin, CreateView):
form_class = RestaurantLocationCreateForm
login_url = '/login/'
template_name = 'form.html'
# success_url = "/restaurants/"
def form_valid(self, form):
instance = form.save(commit=False)
instance.owner = self.request.user
return super(RestaurantCreateView, self).form_valid(form)
def get_context_data(self, *args, **kwargs):
context = super(RestaurantCreateView, self).get_context_data(**kwargs)
context['title'] = 'Add Restaurant'
return context
class RestaurantUpdateView(LoginRequiredMixin, UpdateView):
form_class = RestaurantLocationCreateForm
template_name = 'restaurants/detail_update.html'
# success_url = '/restaurants/'
# def form_valid(self, form):
# instance = form.save(commit=False)
# instance.owner = self.request.user
# return super(RestaurantCreateView, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(RestaurantUpdateView, self).get_context_data(**kwargs)
name = self.get_object().name
context['title'] = f'Update Restaurant: {name}'
return context
def get_queryset(self):
return RestaurantLocation.objects.filter(owner=self.request.user)
| 2.171875 | 2 |
world_rowing/cli.py | matthewghgriffiths/rowing | 1 | 12763950 | #!/usr/bin/env python
import sys
import argparse
import datetime
import logging
from typing import (
Optional, Dict, List, cast, Union, Any, Tuple, Iterable
)
import pandas as pd
import cmd2
from world_rowing import api, dashboard
from world_rowing.utils import first
logger = logging.getLogger('world_rowing.cli')
year_parser = argparse.ArgumentParser(
description='Specify which year you want, defaults to current'
)
year_parser.add_argument(
'year', type=int, help='year to retrieve',
nargs='?', default=datetime.datetime.now().year
)
year_parser.add_argument(
'choices', type=int, help='year to retrieve',
nargs='*', default=()
)
n_parser = argparse.ArgumentParser(
description='Specify how many results you want'
)
n_parser.add_argument(
'n', type=int, help='number to retrieve',
nargs='?', default=5
)
logging_parser = argparse.ArgumentParser(
description='Specify the logging level you want to see'
)
logging_parser.add_argument(
'--log_file', nargs='?',
help='[optional] path to logfile',
)
logging_parser.add_argument(
'level', choices=['DEBUG', 'INFO', 'CRITICAL', 'ERROR'],
default='INFO', nargs='?',
help='the logging level to record',
)
class RowingApp(cmd2.Cmd):
"""A simple cmd2 application."""
def __init__(self):
super().__init__()
# Make maxrepeats settable at runtime
self.current_race = api.get_last_race_started().name
self.current_competition = api.get_most_recent_competition().name
self.save_folder = '.'
self.block = False
self.add_settable(
cmd2.Settable('current_race', str, 'id of current race', self)
)
self.add_settable(
cmd2.Settable('current_competition', int,
'id of current competition', self)
)
self.add_settable(
cmd2.Settable('save_folder', str, 'folder to save data', self)
)
self.add_settable(
cmd2.Settable(
'block', bool, 'give access to cli after plotting?', self)
)
self.intro = "Welcome try running `pgmts`, `race` or `livetracker`"
self.prompt = 'rowing> '
self.foreground_color = 'cyan'
def select_with_choice(
self,
opts: Union[str, List[str], List[Tuple[Any, Optional[str]]]],
prompt: str = 'Your choice? ',
choice: Optional[int] = None,
):
if isinstance(choice, int):
local_opts: Union[List[str], List[Tuple[Any, Optional[str]]]]
if isinstance(opts, str):
local_opts = cast(List[Tuple[Any, Optional[str]]], list(
zip(opts.split(), opts.split())))
else:
local_opts = opts
fulloptions: List[Tuple[Any, Optional[str]]] = []
for opt in local_opts:
if isinstance(opt, str):
fulloptions.append((opt, opt))
else:
try:
fulloptions.append((opt[0], opt[1]))
except IndexError:
fulloptions.append((opt[0], opt[0]))
try:
option, description = fulloptions[choice - 1]
self.poutput(f'selecting option {choice}. {description}')
return str(option)
except (ValueError, IndexError) as ex:
self.poutput(
f"'{choice}' isn't a valid choice. "
f"Pick a number between 1 and {len(fulloptions)}:"
)
return self.select(opts, prompt)
def select_from_dataframe(
self, df,
column='DisplayName', name='row',
choice: Optional[int] = None, prompt: Optional[str] = None
):
selected_id = self.select_with_choice(
list(df[column].items()),
prompt or f"Select which {name} you want ",
choice=choice
)
return df.loc[selected_id]
@cmd2.with_argparser(logging_parser)
def do_log(self, args):
self.poutput(args)
logging.basicConfig(
filename=args.log_file,
level=getattr(logging, args.level)
)
@cmd2.with_argparser(year_parser)
def do_pgmts(self, args):
choices = iter(args.choices)
competition = self.select_competition(
args.year, choice=next(choices, None)
)
pgmts = api.get_competition_pgmts(competition.name)
if pgmts.empty:
self.poutput(
f'no results could be loaded for {competition.DisplayName}')
return
group_boat_pgmts = pgmts.groupby('Boat')
boat_pgmts = group_boat_pgmts\
.first()\
.sort_values('PGMT', ascending=False)
self.poutput(
f"loaded PGMTS for {len(pgmts)} results"
)
mode = self.select_with_choice(
[
'by result',
'by boat class',
'by final',
'plot by boat class'
],
'How to display PGMTs?',
choice=next(choices, None)
)
if mode == 'by result':
pgmts.PGMT = pgmts.PGMT.map("{:.2%}".format)
self.poutput(pgmts.to_string())
elif mode == 'by boat class':
boat_pgmts.PGMT = boat_pgmts.PGMT.map("{:.2%}".format)
self.poutput(boat_pgmts.to_string())
elif mode == 'by final':
final_pgmts = api.get_competition_pgmts(
competition.name, finals_only=True)
final_pgmts.PGMT = final_pgmts.PGMT.map("{:.2%}".format)
self.poutput(final_pgmts.to_string())
else:
import matplotlib.pyplot as plt
plt.ion()
f, ax = plt.subplots(figsize=(12, 8))
ymin = 1
ymax = 0
n = 10
for boat in boat_pgmts.index:
pgmt = group_boat_pgmts.get_group(
boat).PGMT.sort_values(ascending=False)
ax.step(
range(pgmt.size), pgmt.values,
label=boat, where='post'
)
ymin = min(ymin, pgmt.values[:n].min())
ymax = max(ymax, pgmt.values[:n].max())
ax.set_xlim(0, n)
ax.set_ylim(
ymin - 0.01,
pgmts.PGMT.max() + .01)
ax.legend()
plt.show(block=self.block)
@cmd2.with_argparser(n_parser)
def do_upcoming(self, args):
next_races = api.show_next_races(args.n)
if next_races.empty:
self.poutput(
'Could not find any upcoming races'
)
else:
self.poutput(next_races.to_string())
@cmd2.with_argparser(year_parser)
def do_view(self, args):
"""
Show live tracker details for a specified race
"""
race, event, competition = self.select_race(
args.year, choices=args.choices)
self.dashboard(race.name)
do_view_race = do_view
race = do_view
def dashboard(self, race_id):
import matplotlib.pyplot as plt
dash = dashboard.Dashboard.from_race_id(
race_id,
)
dash.live_ion_dashboard()
plt.show(block=self.block)
def do_livetracker(self, args):
"""
Show live tracker details for the most recent race
"""
import matplotlib.pyplot as plt
dash = dashboard.Dashboard.load_last_race()
dash.live_ion_dashboard()
plt.show(block=self.block)
def select_race(self, year: int, choices: Iterable[int] = ()):
choices = iter(choices)
competition = self.select_competition(year, next(choices, None))
race, event = self.select_competition_race(
competition.name, choices=choices)
return race, event, competition
def select_competition(self, year: int, choice: Optional[int] = None):
return self.select_from_dataframe(
api.get_competitions(year), name='competition', choice=choice,
)
def select_competition_race(self, competition_id: str, choices: Iterable[int] = ()):
events = api.get_competition_events(competition_id)
if len(events) == 0:
self.poutput("no races found for {competition.DisplayName}")
return
choices = iter(choices)
event = self.select_from_dataframe(
events, name='event', choice=next(choices, None),
)
races = api.get_competition_races(competition_id)
if len(races) == 0:
self.poutput("no races found for {event.DisplayName}")
return
race = self.select_from_dataframe(
races.loc[races.eventId == event.name],
name='race',
choice=next(choices, None)
)
return race, event
def run():
RowingApp().cmdloop()
def main():
sys.exit(run())
if __name__ == '__main__':
main()
| 3.125 | 3 |